[
  {
    "path": ".gitignore",
    "content": "# rviz file\n*.rviz\n\n# document buid file\nbuild\n_build\n\n# vscode file\n.vscode\n"
  },
  {
    "path": "CHANGELOG.rst",
    "content": "changelog for ros2_grasp_library\n^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n\n0.5.0 (2019-11-06)\n------------------\n* Added examples for advanced industrial robot applications\n  * draw X\n  * fixed position pick and place\n  * random picking with OpenVINO grasp planning\n  * recognition picking with OpenVINO grasp planning and OpenVINO mask-rcnn object segmentation\n* Support ROS2 hand-eye calibration\n* Support robot interface for manipulation\n* Added tutorials on how to\n  * Build and launch example applications\n  * Operate hand-eye calibration and publish the transformation\n  * Quickly enable robot interface on a new industrial robot\n\n0.4.0 (2019-03-13)\n------------------\n* Support \"service-driven\" grasp detection mechanism (via configure auto_mode) to optimize CPU load for real-time processing.\n* Support grasp transformation from camera frame to a specified target frame expected in the visual manipulation.\n* Support launch option \"grasp_approach\" to specify expected approach direction in the target frame specified by 'grasp_frame_id'. Grasp Planner will return grasp poses with approach direction approximate to this parameter.\n* Support launch option \"device\" to configure device for grasp pose inference to execute, 0 for CPU, 1 for GPU, 2 for VPU, 3 for FPGA. In case OpenVINO plug-ins are installed (tutorial), this configure deploy the CNN based deep learning inference on to the target device.\n* Add tutorials for introduction to Intel DLDT toolkit and Intel OpenVINO toolkit.\n* Add tutorials for launch options and customization notes.\n\n0.3.0 (2018-12-28)\n------------------\n* Support grasp pose detection from RGBD point cloud.\n* Support MoveIt! grasp planning service.\n"
  },
  {
    "path": "LICENSE",
    "content": "\n                                 Apache License\n                           Version 2.0, January 2004\n                        http://www.apache.org/licenses/\n\n   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION\n\n   1. Definitions.\n\n      \"License\" shall mean the terms and conditions for use, reproduction,\n      and distribution as defined by Sections 1 through 9 of this document.\n\n      \"Licensor\" shall mean the copyright owner or entity authorized by\n      the copyright owner that is granting the License.\n\n      \"Legal Entity\" shall mean the union of the acting entity and all\n      other entities that control, are controlled by, or are under common\n      control with that entity. For the purposes of this definition,\n      \"control\" means (i) the power, direct or indirect, to cause the\n      direction or management of such entity, whether by contract or\n      otherwise, or (ii) ownership of fifty percent (50%) or more of the\n      outstanding shares, or (iii) beneficial ownership of such entity.\n\n      \"You\" (or \"Your\") shall mean an individual or Legal Entity\n      exercising permissions granted by this License.\n\n      \"Source\" form shall mean the preferred form for making modifications,\n      including but not limited to software source code, documentation\n      source, and configuration files.\n\n      \"Object\" form shall mean any form resulting from mechanical\n      transformation or translation of a Source form, including but\n      not limited to compiled object code, generated documentation,\n      and conversions to other media types.\n\n      \"Work\" shall mean the work of authorship, whether in Source or\n      Object form, made available under the License, as indicated by a\n      copyright notice that is included in or attached to the work\n      (an example is provided in the Appendix below).\n\n      \"Derivative Works\" shall mean any work, whether in Source or Object\n      form, that is based on (or derived from) the Work and for which the\n      editorial revisions, annotations, elaborations, or other modifications\n      represent, as a whole, an original work of authorship. For the purposes\n      of this License, Derivative Works shall not include works that remain\n      separable from, or merely link (or bind by name) to the interfaces of,\n      the Work and Derivative Works thereof.\n\n      \"Contribution\" shall mean any work of authorship, including\n      the original version of the Work and any modifications or additions\n      to that Work or Derivative Works thereof, that is intentionally\n      submitted to Licensor for inclusion in the Work by the copyright owner\n      or by an individual or Legal Entity authorized to submit on behalf of\n      the copyright owner. For the purposes of this definition, \"submitted\"\n      means any form of electronic, verbal, or written communication sent\n      to the Licensor or its representatives, including but not limited to\n      communication on electronic mailing lists, source code control systems,\n      and issue tracking systems that are managed by, or on behalf of, the\n      Licensor for the purpose of discussing and improving the Work, but\n      excluding communication that is conspicuously marked or otherwise\n      designated in writing by the copyright owner as \"Not a Contribution.\"\n\n      \"Contributor\" shall mean Licensor and any individual or Legal Entity\n      on behalf of whom a Contribution has been received by Licensor and\n      subsequently incorporated within the Work.\n\n   2. Grant of Copyright License. Subject to the terms and conditions of\n      this License, each Contributor hereby grants to You a perpetual,\n      worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n      copyright license to reproduce, prepare Derivative Works of,\n      publicly display, publicly perform, sublicense, and distribute the\n      Work and such Derivative Works in Source or Object form.\n\n   3. Grant of Patent License. Subject to the terms and conditions of\n      this License, each Contributor hereby grants to You a perpetual,\n      worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n      (except as stated in this section) patent license to make, have made,\n      use, offer to sell, sell, import, and otherwise transfer the Work,\n      where such license applies only to those patent claims licensable\n      by such Contributor that are necessarily infringed by their\n      Contribution(s) alone or by combination of their Contribution(s)\n      with the Work to which such Contribution(s) was submitted. If You\n      institute patent litigation against any entity (including a\n      cross-claim or counterclaim in a lawsuit) alleging that the Work\n      or a Contribution incorporated within the Work constitutes direct\n      or contributory patent infringement, then any patent licenses\n      granted to You under this License for that Work shall terminate\n      as of the date such litigation is filed.\n\n   4. Redistribution. You may reproduce and distribute copies of the\n      Work or Derivative Works thereof in any medium, with or without\n      modifications, and in Source or Object form, provided that You\n      meet the following conditions:\n\n      (a) You must give any other recipients of the Work or\n          Derivative Works a copy of this License; and\n\n      (b) You must cause any modified files to carry prominent notices\n          stating that You changed the files; and\n\n      (c) You must retain, in the Source form of any Derivative Works\n          that You distribute, all copyright, patent, trademark, and\n          attribution notices from the Source form of the Work,\n          excluding those notices that do not pertain to any part of\n          the Derivative Works; and\n\n      (d) If the Work includes a \"NOTICE\" text file as part of its\n          distribution, then any Derivative Works that You distribute must\n          include a readable copy of the attribution notices contained\n          within such NOTICE file, excluding those notices that do not\n          pertain to any part of the Derivative Works, in at least one\n          of the following places: within a NOTICE text file distributed\n          as part of the Derivative Works; within the Source form or\n          documentation, if provided along with the Derivative Works; or,\n          within a display generated by the Derivative Works, if and\n          wherever such third-party notices normally appear. The contents\n          of the NOTICE file are for informational purposes only and\n          do not modify the License. You may add Your own attribution\n          notices within Derivative Works that You distribute, alongside\n          or as an addendum to the NOTICE text from the Work, provided\n          that such additional attribution notices cannot be construed\n          as modifying the License.\n\n      You may add Your own copyright statement to Your modifications and\n      may provide additional or different license terms and conditions\n      for use, reproduction, or distribution of Your modifications, or\n      for any such Derivative Works as a whole, provided Your use,\n      reproduction, and distribution of the Work otherwise complies with\n      the conditions stated in this License.\n\n   5. Submission of Contributions. Unless You explicitly state otherwise,\n      any Contribution intentionally submitted for inclusion in the Work\n      by You to the Licensor shall be under the terms and conditions of\n      this License, without any additional terms or conditions.\n      Notwithstanding the above, nothing herein shall supersede or modify\n      the terms of any separate license agreement you may have executed\n      with Licensor regarding such Contributions.\n\n   6. Trademarks. This License does not grant permission to use the trade\n      names, trademarks, service marks, or product names of the Licensor,\n      except as required for reasonable and customary use in describing the\n      origin of the Work and reproducing the content of the NOTICE file.\n\n   7. Disclaimer of Warranty. Unless required by applicable law or\n      agreed to in writing, Licensor provides the Work (and each\n      Contributor provides its Contributions) on an \"AS IS\" BASIS,\n      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or\n      implied, including, without limitation, any warranties or conditions\n      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A\n      PARTICULAR PURPOSE. You are solely responsible for determining the\n      appropriateness of using or redistributing the Work and assume any\n      risks associated with Your exercise of permissions under this License.\n\n   8. Limitation of Liability. In no event and under no legal theory,\n      whether in tort (including negligence), contract, or otherwise,\n      unless required by applicable law (such as deliberate and grossly\n      negligent acts) or agreed to in writing, shall any Contributor be\n      liable to You for damages, including any direct, indirect, special,\n      incidental, or consequential damages of any character arising as a\n      result of this License or out of the use or inability to use the\n      Work (including but not limited to damages for loss of goodwill,\n      work stoppage, computer failure or malfunction, or any and all\n      other commercial damages or losses), even if such Contributor\n      has been advised of the possibility of such damages.\n\n   9. Accepting Warranty or Additional Liability. While redistributing\n      the Work or Derivative Works thereof, You may choose to offer,\n      and charge a fee for, acceptance of support, warranty, indemnity,\n      or other liability obligations and/or rights consistent with this\n      License. However, in accepting such obligations, You may act only\n      on Your own behalf and on Your sole responsibility, not on behalf\n      of any other Contributor, and only if You agree to indemnify,\n      defend, and hold each Contributor harmless for any liability\n      incurred by, or claims asserted against, such Contributor by reason\n      of your accepting any such warranty or additional liability.\n\n   END OF TERMS AND CONDITIONS\n\n   APPENDIX: How to apply the Apache License to your work.\n\n      To apply the Apache License to your work, attach the following\n      boilerplate notice, with the fields enclosed by brackets \"[]\"\n      replaced with your own identifying information. (Don't include\n      the brackets!)  The text should be enclosed in the appropriate\n      comment syntax for the file format. We also recommend that a\n      file or class name and description of purpose be included on the\n      same \"printed page\" as the copyright notice for easier\n      identification within third-party archives.\n\n   Copyright 2018 Intel Corporation\n\n   Licensed under the Apache License, Version 2.0 (the \"License\");\n   you may not use this file except in compliance with the License.\n   You may obtain a copy of the License at\n\n       http://www.apache.org/licenses/LICENSE-2.0\n\n   Unless required by applicable law or agreed to in writing, software\n   distributed under the License is distributed on an \"AS IS\" BASIS,\n   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n   See the License for the specific language governing permissions and\n   limitations under the License.\n"
  },
  {
    "path": "README.md",
    "content": "# DISCONTINUATION OF PROJECT #  \nThis project will no longer be maintained by Intel.  \nIntel has ceased development and contributions including, but not limited to, maintenance, bug fixes, new releases, or updates, to this project.  \nIntel no longer accepts patches to this project.  \n If you have an ongoing need to use this project, are interested in independently developing it, or would like to maintain patches for the open source software community, please create your own fork of this project.  \n  \nContact: webadmin@linux.intel.com\n# ROS2 Grasp Library\n\nA ROS2 intelligent visual grasp solution for advanced industrial usages, with OpenVINO™ grasp detection and MoveIt Grasp Planning.\n\n## Overview\nROS2 Grasp Library enables state-of-the-art CNN based deep learning grasp detection algorithms on ROS2 for intelligent visual grasp in industrial robot usage scenarios. This package provides ROS2 interfaces compliant with the open source [MoveIt](http://moveit.ros.org/) motion planning framework supported by most of the [robot models](https://moveit.ros.org/robots) in ROS industrial. This package delivers\n* A ROS2 Grasp Planner providing grasp planning service, as an extensible capability of MoveIt ([moveit_msgs::srv::GraspPlanning](http://docs.ros.org/api/moveit_msgs/html/srv/GraspPlanning.html)), translating grasp detection results into MoveIt Interfaces ([moveit_msgs::msg::Grasp](http://docs.ros.org/api/moveit_msgs/html/msg/Grasp.html))\n* A ROS2 Grasp Detctor abstracting interfaces for grasp detection results\n* A ROS2 hand-eye calibration module generating transformation from camera frame to robot frame\n* ROS2 example applications demonstrating how to use this ROS2 Grasp Library in advanced industrial usages for intelligent visual grasp\n\n## Grasp Detection Algorithms\nGrasp detection back-end algorithms enabled by this ROS2 Grasp Library:\n- [Grasp Pose Detection](https://github.com/atenpas/gpd) detects 6-DOF grasp poses for a 2-finger grasp (e.g. a parallel jaw gripper) in 3D point clouds from RGBD sensor or PCD file. The grasp detection was enabled with Intel® [DLDT](https://github.com/opencv/dldt) toolkit and Intel® [OpenVINO™](https://software.intel.com/en-us/openvino-toolkit) toolkit.\n\n  <img src=\"grasp_tutorials/doc/grasp_ros2/img/ros2_grasp_library.png\" width = 50% height = 50% alt=\"ROS2 Grasp Library\" align=center />\n\n## Tutorials\nRefer to ROS2 Grasp Library [Tutorials](http://intel.github.io/ros2_grasp_library) for how to\n* Install, build, and launch the ROS2 Grasp Planner and Detector\n* Use launch options to customize in a new workspace\n* Bring up the intelligent visual grasp solution on a new robot\n* Do hand-eye calibration for a new camera setup\n* Launch the example applications\n\n## Example Applications\n\n### Random Picking (OpenVINO Grasp Detection)\n\n[<img src=\"grasp_tutorials/_static/images/random_pick.png\" width = 50% height = 50% alt=\"Random Pick with OpenVINO Grasp Detection - Link to Youtube video demo\" align=center>](https://www.youtube.com/embed/b4EPvHdidOA)\n\n### Recognition Picking (OpenVINO Grasp Detection + OpenVINO Mask-rcnn Object Segmentation)\n\n[<img src=\"grasp_tutorials/_static/images/recognize_pick.png\" width = 50% height = 50% alt=\"Recognition Pick with OpenVINO Grasp Detection - Link to Youtube video demo\" align=center>](https://www.youtube.com/embed/trIt0uKRXBs)\n\n## Known Issues\n  * Cloud camera failed at \"Invalid sizes when resizing a matrix or array\" when dealing with XYZRGBA pointcloud from ROS2 Realsenes, tracked as [#6](https://github.com/atenpas/gpg/issues/6) of gpg, [patch](https://github.com/atenpas/gpg/pull/7) under review.\n  * 'colcon test' sometimes failed with test suite \"tgrasp_ros2\", due to ROS2 service request failure issue (reported ros2 examples issue [#228](https://github.com/ros2/examples/issues/228) and detailed discussed in ros2 demo issue [#304](https://github.com/ros2/demos/issues/304))\n  * Rviz2 failed to receive Static TF from camera due to transient_local QoS (expected in the coming ROS2 Eloquent, discussed in geometry2 issue [#183](https://github.com/ros2/geometry2/issues/183)), workaround [patch](https://github.com/intel/ros2_intel_realsense/pull/88) available till the adaption to Eloquent\n\n## Contribute to This Project\n  It's welcomed to contribute to this project. Here're some recommended practices:\n  * When adding a new feature it's expected to add tests covering the new functionalities\n    ```bash\n    colcon test --packages-select <names_of_affected_packages>\n    ```\n  * Before submitting a patch, it's recommended to pass all existing tests to avoid regression\n    ```bash\n    colcon test --packages-select <names_of_existing_packages>\n    ```\n\n###### *Any security issue should be reported using process at https://01.org/security*\n"
  },
  {
    "path": "docker/Dockerfile",
    "content": "########################################################\n# Based on Ubuntu 18.04\n########################################################\n\n# Set the base image to ubuntu 18.04\n\nFROM ubuntu:bionic\n\nMAINTAINER Liu Cong \"congx.liu@intel.com\"\n\nARG DEPS_DIR=/root/deps\nWORKDIR $DEPS_DIR\n\n# install ros2 grasp library deps\nCOPY ./script/ $DEPS_DIR/script/\nRUN bash script/install_ros2_grasp_library_deps.sh /root/deps\n\nWORKDIR /root\nENTRYPOINT [\"/root/script/ros_entrypoint.sh\"]\nCMD [\"bash\"]\n"
  },
  {
    "path": "docker/README.md",
    "content": "# Precondition\n## add docker group\n```\nsudo groupadd docker\nsudo usermod -aG docker $USER\n```\n## Build docker image\n```\ncd ros2_grasp_library/docker\ndocker build -t intel/ros2:ros2_grasp_library_deps .\n\n```\nIf your use proxy\n```\ndocker build -t intel/ros2:ros2_grasp_library_deps --build-arg http_proxy=<proxy>:<port> --build-arg https_proxy=<proxy>:<port> .\n```\n## OPTION:Please refer below command to verify image creating success\n```\ndocker images\n\nREPOSITORY            TAG                           IMAGE ID            CREATED             SIZE\nintel/ros2            ros2_grasp_library_deps       b6d619a01f33        1 hours ago         6.92GB\n```\n# Run OpenVINO Grasp Library with RGBD Camera\n## Terminal 1: Build ros2_grasp_library and launch Rviz2 to illustrate detection results.\nAfter the project runs, there will be a pop-up x window, you need to set the operating environment first.\n```\n./setup_docker_display.sh\n\ndocker run -it --rm --privileged -v /tmp/.X11-unix:/tmp/.X11-unix:rw -v /tmp/.docker.xauth:/tmp/.docker.xauth:rw -v /dev/bus/usb:/dev/bus/usb  \\\n    -v /dev:/dev:rw -e XAUTHORITY=/tmp/.docker.xauth -e DISPLAY --name ros2_grasp_library intel/ros2:ros2_grasp_library_deps bash\n\n# cd /root/\n# mkdir -p ros2_ws/src\n# cd ros2_ws/src\n# git clone https://github.com/intel/ros2_grasp_library.git\n# git clone https://github.com/intel/ros2_intel_realsense.git -b refactor\n# git clone https://github.com/intel/ros2_openvino_toolkit.git\n# cd ..\n# colcon build --symlink-install --packages-select grasp_msgs moveit_msgs people_msgs grasp_ros2 realsense_msgs realsense_ros realsense_node\n# source ./install/local_setup.bash\n# ros2 run rviz2 rviz2 -d src/ros2_grasp_library/grasp_ros2/rviz2/grasp.rviz\n```\n## Terminal 2: launch RGBD camera\n```\ndocker exec -t -i ros2_grasp_library bash\n\n# source /root/ros2_ws/install/setup.bash\n# ros2 run realsense_node realsense_node\n```\n## Terminal 3: launch Grasp Library\n```\ndocker exec -t -i ros2_grasp_library bash\n\n# source /root/ros2_ws/install/setup.bash\n# ros2 run grasp_ros2 grasp_ros2 __params:=src/ros2_grasp_library/grasp_ros2/cfg/grasp_ros2_params.yaml\n```\nNote: If you haven't already installed or want more information on how to use docker, please see the article here for more information:\nhttps://docs.docker.com/install/\n\n"
  },
  {
    "path": "docker/script/00_ros2_install.sh",
    "content": "#!/bin/bash\n\nDEPS_DIR=${DEPS_PATH}\nros2_version=dashing\nSUDO=$1\nif [ \"$SUDO\" == \"sudo\" ];then\n\tSUDO=\"sudo\"\nelse\n\tSUDO=\"\"\nfi\n\n# fix popup caused by libssl\n$SUDO apt-get install -y debconf-utils \\\n    echo 'libssl1.0.0:amd64 libraries/restart-without-asking boolean true' | $SUDO debconf-set-selections\n    \n# Authorize gpg key with apt\n$SUDO apt-get update && $SUDO apt-get install -y curl gnupg2 lsb-release &&\\\n    curl http://repo.ros2.org/repos.key | $SUDO apt-key add -\n\n# Add the repository to sources list\n$SUDO sh -c 'echo \"deb [arch=amd64,arm64] http://packages.ros.org/ros2/ubuntu `lsb_release -cs` main\" > /etc/apt/sources.list.d/ros2-latest.list'\n\n# Install development tools and ROS tools\n$SUDO apt-get update && $SUDO apt-get install -y \\\n    python-rosdep \\\n    python3-vcstool \\\n    python3-colcon-common-extensions\n    \n# Install ROS 2 packages\necho \"install $ros2_version\"\n$SUDO apt-get update && $SUDO apt-get install -y ros-${ros2_version}-desktop\n"
  },
  {
    "path": "docker/script/10_eigen_install.sh",
    "content": "#!/bin/bash \n\nset -e\n\nDEPS_DIR=${DEPS_PATH}\neigen_version=https://gitlab.com/libeigen/eigen/-/archive/3.2/eigen-3.2.tar.gz\nSUDO=$1\nif [ \"$SUDO\" == \"sudo\" ];then\n        SUDO=\"sudo\"\nelse\n        SUDO=\"\"\nfi\n\ncd $DEPS_DIR\n\n$SUDO apt-get install -y gfortran\nwget -t 3 -c $eigen_version\ntar -xvf eigen-3.2.tar.gz\ncd eigen-3.2 &&mkdir -p build && cd build\ncmake -DCMAKE_BUILD_TYPE=Release ..\nmake -j4\n$SUDO make install\n$SUDO rm -rf /usr/include/eigen3/\n$SUDO ln -sf /usr/local/include/eigen3 /usr/include/\n$SUDO make install\n"
  },
  {
    "path": "docker/script/11_libpcl_install.sh",
    "content": "#!/bin/bash \n\nset -e\n\nDEPS_DIR=${DEPS_PATH}\npcl_version=https://github.com/PointCloudLibrary/pcl/archive/pcl-1.8.1.tar.gz\nSUDO=$1\nif [ \"$SUDO\" == \"sudo\" ];then\n        SUDO=\"sudo\"\nelse\n        SUDO=\"\"\nfi\n\ncd $DEPS_DIR\n\n$SUDO apt-get install -y libhdf5-dev python3-h5py python3-pip\nwget -t 3 -c  $pcl_version\ntar -xvf pcl-1.8.1.tar.gz\ncd pcl-pcl-1.8.1 &&mkdir -p build && cd build\ncmake -DCMAKE_BUILD_TYPE=Release ..\nmake -j4\n$SUDO make install\n"
  },
  {
    "path": "docker/script/12_opencv_install.sh",
    "content": "#!/bin/bash\n\nDEPS_DIR=${DEPS_PATH}\nopencv_version=4.1.2\nSUDO=$1\nif [ \"$SUDO\" == \"sudo\" ];then\n        SUDO=\"sudo\"\nelse\n        SUDO=\"\"\nfi\n\n#install opencv\ncd $DEPS_DIR\n$SUDO apt-get update && $SUDO apt-get install -y build-essential \\\n        libgtk2.0-dev \\\n        pkg-config \\\n        libavcodec-dev \\\n        libavformat-dev \\\n        libswscale-dev \\\n        python-dev \\\n        python-numpy \\\n        libtbb2 \\\n        libtbb-dev \\\n        libjpeg-dev \\\n        libpng-dev \\\n        libtiff-dev \\\n        libdc1394-22-dev\ngit clone --depth 1 https://github.com/opencv/opencv.git -b $opencv_version\ngit clone --depth 1 https://github.com/opencv/opencv_contrib.git -b $opencv_version\ncd $DEPS_DIR/opencv\nmkdir -p build && cd build\ncd $DEPS_DIR/opencv/build\ncmake -D OPENCV_EXTRA_MODULES_PATH=$DEPS_DIR/opencv_contrib/modules \\\n    -D CMAKE_BUILD_TYPE=Release \\\n    -D CMAKE_INSTALL_PREFIX=/usr/local \\\n    -D BUILD_EXAMPLES=ON \\\n    -D BUILD_opencv_xfeatures2d=OFF \\\n    ..\nmake -j4\n$SUDO make install\necho \"/usr/local/lib\" | $SUDO tee /etc/ld.so.conf.d/opencv.conf\n$SUDO ldconfig\n"
  },
  {
    "path": "docker/script/13_openvino_install.sh",
    "content": "#!/bin/bash\n\nDEPS_DIR=${DEPS_PATH}\nMKL_URL=https://github.com/intel/mkl-dnn/releases/download/v0.19/mklml_lnx_2019.0.5.20190502.tgz\nMKL_VERSION=mklml_lnx_2019.0.5.20190502\nOPENVINO_VERSION=2019_R3.1\n\nSUDO=$1\nif [ \"$SUDO\" == \"\" ];then\n        SUDO=\"sudo\"\nfi\n\n# install mkl 2019.0.5.20190502\n$SUDO apt-get update && $SUDO apt-get install -y wget\ncd $DEPS_DIR\nwget -t 3 -c ${MKL_URL} &&\\\n  tar -xvf ${MKL_VERSION}.tgz &&\\\n  cd ${MKL_VERSION} &&\\\n  $SUDO mkdir -p /usr/local/lib/mklml &&\\\n  $SUDO cp -rf ./lib /usr/local/lib/mklml &&\\\n  $SUDO cp -rf ./include /usr/local/lib/mklml &&\\\n  $SUDO touch /usr/local/lib/mklml/version.info\n\n#install opencl 19.41.14441\ncd $DEPS_DIR\nmkdir -p opencl && cd opencl &&\\\n  wget -t 3 -c https://github.com/intel/compute-runtime/releases/download/19.41.14441/intel-gmmlib_19.3.2_amd64.deb &&\\\n  wget -t 3 -c https://github.com/intel/compute-runtime/releases/download/19.41.14441/intel-igc-core_1.0.2597_amd64.deb &&\\\n  wget -t 3 -c https://github.com/intel/compute-runtime/releases/download/19.41.14441/intel-igc-opencl_1.0.2597_amd64.deb &&\\\n  wget -t 3 -c https://github.com/intel/compute-runtime/releases/download/19.41.14441/intel-opencl_19.41.14441_amd64.deb &&\\\n  wget -t 3 -c https://github.com/intel/compute-runtime/releases/download/19.41.14441/intel-ocloc_19.41.14441_amd64.deb &&\\\n  $SUDO dpkg -i *.deb\n\n#install cmake 3.11\nif [ $(cmake --version|grep \"version\"|awk '{print $3}') != \"3.14.3\"  ];then\n  cd $DEPS_DIR\n  wget -t 3 -c https://www.cmake.org/files/v3.14/cmake-3.14.3.tar.gz && \\\n    tar xf cmake-3.14.3.tar.gz && \\\n    (cd cmake-3.14.3 && ./bootstrap --parallel=$(nproc --all) && make --jobs=$(nproc --all) && $SUDO make install) && \\\n    rm -rf cmake-3.14.3 cmake-3.14.3.tar.gz\nfi\n#install openvino 2019_R3.1\ncd $DEPS_DIR\n$SUDO apt-get update && $SUDO apt-get install -y git\ngit clone --depth 1 https://github.com/openvinotoolkit/openvino -b ${OPENVINO_VERSION}\ncd $DEPS_DIR/openvino/inference-engine\ngit submodule update --init --recursive &&\\\n  chmod +x install_dependencies.sh &&\\\n  $SUDO ./install_dependencies.sh\nmkdir -p build && cd build &&\\\n  cmake -DCMAKE_BUILD_TYPE=Release \\\n  -DCMAKE_INSTALL_PREFIX=/usr/local \\\n  -DGEMM=MKL -DMKLROOT=/usr/local/lib/mklml \\\n  -DTHREADING=OMP \\\n  -DENABLE_MKL_DNN=ON \\\n  -DENABLE_CLDNN=ON \\\n  -DENABLE_OPENCV=OFF \\\n  ..\ncd $DEPS_DIR/openvino/inference-engine/build\nmake -j8\n\ncd $DEPS_DIR/openvino/inference-engine/build\n$SUDO mkdir -p /usr/share/InferenceEngine &&\\\n  $SUDO cp InferenceEngineConfig*.cmake /usr/share/InferenceEngine &&\\\n  $SUDO cp targets.cmake /usr/share/InferenceEngine &&\\\n  echo `pwd`/../bin/intel64/Release/lib | $SUDO tee -a /etc/ld.so.conf.d/openvino.conf &&\\\n  $SUDO ldconfig\n$SUDO ln -sf $DEPS_DIR/openvino /opt/openvino_toolkit/openvino\n"
  },
  {
    "path": "docker/script/20_librealsense_install.sh",
    "content": "#!/bin/bash\n\nDEPS_DIR=${DEPS_PATH}\nlibrealsense_version=2.31.0-0~realsense0.1791\nSUDO=$1\nif [ \"$SUDO\" == \"sudo\" ];then\n        SUDO=\"sudo\"\nelse\n        SUDO=\"\"\nfi\n\n# install librealsense v2.34.0-0~realsense0.2251\necho \"install librealsense 2.34.0-0~realsense0.2251\"\ncd $DEPS_DIR\nif [ $http_proxy == \"\" ];then\n\t$SUDO apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv-key F6E65AC044F831AC80A06380C8B3A55A6F3EFCDE\nelse\n\t$SUDO apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --keyserver-options http-proxy=$http_proxy --recv-key F6E65AC044F831AC80A06380C8B3A55A6F3EFCDE\nfi\n$SUDO sh -c 'echo \"deb http://realsense-hw-public.s3.amazonaws.com/Debian/apt-repo `lsb_release -cs` main\" > /etc/apt/sources.list.d/librealsense.list'\n$SUDO apt-get update && $SUDO apt-get install -y librealsense2=${librealsense_version} \\\n       \tlibrealsense2-dev=${librealsense_version} \\\n       \tlibrealsense2-udev-rules=${librealsense_version} \\\n\tlibrealsense2-gl=${librealsense_version} \\\n        librealsense2-utils=${librealsense_version} \\\n\tlibrealsense2-dbg=${librealsense_version} \\\n\tlibrealsense2-dkms\n"
  },
  {
    "path": "docker/script/30_gpg_install.sh",
    "content": "#!/bin/bash\n\nDEPS_DIR=${DEPS_PATH}\nSUDO=$1\nif [ \"$SUDO\" == \"sudo\" ];then\n        SUDO=\"sudo\"\nelse\n        SUDO=\"\"\nfi\n\n# install gpg\ncd $DEPS_DIR\nwget -t 3 -c https://github.com/atenpas/gpg/archive/3dcd656d70f095ad1bda3d2fb597a994198466ab.zip\nunzip 3dcd656d70f095ad1bda3d2fb597a994198466ab.zip \ncd gpg-3dcd656d70f095ad1bda3d2fb597a994198466ab\nmkdir -p build && cd build\ncmake .. && make\n$SUDO make install\nls /usr/local/lib/libgrasp_candidates_generator.so\n"
  },
  {
    "path": "docker/script/31_gpd_install.sh",
    "content": "#!/bin/bash\n\nDEPS_DIR=${DEPS_PATH}\nSUDO=$1\nif [ \"$SUDO\" == \"sudo\" ];then\n        SUDO=\"sudo\"\nelse\n        SUDO=\"\"\nfi\n\n# install gpd\ncd $DEPS_DIR\ngit clone --depth 1 https://github.com/sharronliu/gpd.git -b libgpd\ncd gpd/src/gpd\nmkdir -p build && cd build\ncmake -DUSE_OPENVINO=ON .. && make\n$SUDO make install\n"
  },
  {
    "path": "docker/script/32_ur_modern_driver_install.sh",
    "content": "#!/bin/bash\n\nDEPS_DIR=${DEPS_PATH}\nSUDO=$1\nif [ \"$SUDO\" == \"sudo\" ];then\n        SUDO=\"sudo\"\nelse\n        SUDO=\"\"\nfi\n\n# install ur_modern_driver\ncd $DEPS_DIR\ngit clone --depth 1 https://github.com/RoboticsYY/ur_modern_driver.git -b libur_modern_driver\ncd ur_modern_driver/libur_modern_driver\nmkdir -p build && cd build\ncmake -DCMAKE_BUILD_TYPE=Release .. && make\n$SUDO make install\n"
  },
  {
    "path": "docker/script/50_ros2_deps.sh",
    "content": "#!/bin/bash\n\nSUDO=$1\nif [ \"$SUDO\" == \"sudo\" ];then\n        SUDO=\"sudo\"\nelse\n        SUDO=\"\"\nfi\n\n$SUDO apt-get install -y ros-dashing-object-msgs \\\n        python3-scipy \\\n        ros-dashing-eigen3-cmake-module\n\nWORK_DIR=${DEPS_PATH}/../ros2_ws\nmkdir -p $WORK_DIR/src &&cd $WORK_DIR/src\n\ngit clone --depth 1 https://github.com/RoboticsYY/ros2_ur_description.git\ngit clone --depth 1 https://github.com/RoboticsYY/handeye\ngit clone --depth 1 https://github.com/RoboticsYY/criutils.git\ngit clone --depth 1 https://github.com/RoboticsYY/baldor.git\ngit clone --depth 1 https://github.com/intel/ros2_intel_realsense.git -b refactor\ngit clone --depth 1 https://github.com/intel/ros2_grasp_library.git\n\ncd $WORK_DIR\nsource /opt/ros/dashing/setup.sh\nexport InferenceEngine_DIR=/opt/openvino_toolkit/openvino/inference-engine/build/\nexport export CPU_EXTENSION_LIB=/opt/openvino_toolkit/openvino/inference-engine/bin/intel64/Release/lib/libcpu_extension.so\nexport GFLAGS_LIB=/opt/openvino_toolkit/openvino/inference-engine/bin/intel64/Release/lib/libgflags_nothreads.a\nexport LD_LIBRARY_PATH=$LD_LIBRARY_PATH:$InferenceEngine_DIR/../bin/intel64/Release/lib:/usr/local/lib/mklml/lib\n\ncolcon build --symlink-install\n"
  },
  {
    "path": "docker/script/install_ros2_grasp_library.sh",
    "content": "#!/bin/bash\n\nset -e\n\ndeps_path=$1\nif [ -z \"$deps_path\" ]; then\n  echo -e \"warring:\\n    install_ros2_grasp_library_deps.sh <your-install-deps-path>\"\n  echo -e \"If you want to use'sudo' : install_ros2_grasp_library_deps.sh <your-install-deps-path> sudo\"\n  exit 0\nfi\n\nshift\n\n# mkdir deps-path\necho \"DEPS_PATH = $deps_path\"\nmkdir -p $deps_path\nexport DEPS_PATH=$deps_path\n\nCURRENT_DIR=$(dirname \"$(readlink -f \"${BASH_SOURCE[0]}\")\")\necho \"CURRENT_DIR = ${CURRENT_DIR}\"\n\n# install ros2 dashing\nbash ${CURRENT_DIR}/00_ros2_install.sh $@\n\n# instal eigen 3.2\nbash ${CURRENT_DIR}/10_eigen_install.sh $@\n\n# install libpcl 1.8.1\nbash ${CURRENT_DIR}/11_libpcl_install.sh $@\n\n# install opencv 4.1.2 \nbash ${CURRENT_DIR}/12_opencv_install.sh $@\n\n# install openvino 2019_R3.1\nbash ${CURRENT_DIR}/13_openvino_install.sh $@\n\n# install librealsense 2.31\nbash ${CURRENT_DIR}/20_librealsense_install.sh $@\n\n# install gpg\nbash ${CURRENT_DIR}/30_gpg_install.sh $@\n\n# install gpd\nbash ${CURRENT_DIR}/31_gpd_install.sh $@\n\n# install ur_modern_driver\nbash ${CURRENT_DIR}/32_ur_modern_driver_install.sh $@\n\n# build ros2 other deps\nbash ${CURRENT_DIR}/50_ros2_deps.sh $@\n"
  },
  {
    "path": "docker/script/ros_entrypoint.sh",
    "content": "#!/bin/bash\n\nset -e\n\n# setup ros2 environment\nsource /opt/ros/dashing/setup.bash\n\nsource /root/ros2_ws/install/setup.bash\n\nexec \"$@\"\n"
  },
  {
    "path": "docker/script/ros_env.sh",
    "content": "#!/bin/bash\n\nROS_PATH=$(pwd)\n\n# setup ros2 environment\nsource /opt/ros/dashing/setup.bash\nsource ${ROS_PATH}/install/setup.bash\nexport ROS_DOMAIN_ID=100  # robot_group_id\n\nexport InferenceEngine_DIR=/opt/openvino_toolkit/openvino/inference-engine/build/\nexport export CPU_EXTENSION_LIB=/opt/openvino_toolkit/openvino/inference-engine/bin/intel64/Release/lib/libcpu_extension.so\nexport GFLAGS_LIB=/opt/openvino_toolkit/openvino/inference-engine/bin/intel64/Release/lib/libgflags_nothreads.a\nexport LD_LIBRARY_PATH=$LD_LIBRARY_PATH:$InferenceEngine_DIR/../bin/intel64/Release/lib:/usr/local/lib/mklml/lib\n"
  },
  {
    "path": "docker/setup_docker_display.sh",
    "content": "#!/bin/bash\nset -e\n\n# setup docker display\nXSOCK=/tmp/.X11-unix\nXAUTH=/tmp/.docker.xauth\ntouch $XAUTH\nxauth nlist $DISPLAY | sed -e 's/^..../ffff/' | xauth -f $XAUTH nmerge -\n"
  },
  {
    "path": "grasp_apps/draw_x/CMakeLists.txt",
    "content": "# Copyright (c) 2019 Intel Corporation\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#      http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\ncmake_minimum_required(VERSION 3.5)\nproject(draw_x)\n\nif(NOT CMAKE_CXX_STANDARD)\n  set(CMAKE_CXX_STANDARD 14)\nendif()\n\nif(CMAKE_COMPILER_IS_GNUCXX OR CMAKE_CXX_COMPILER_ID MATCHES \"Clang\")\n  add_compile_options(-Wall -Wextra -Wpedantic)\nendif()\n\nif(CMAKE_BUILD_TYPE EQUAL \"RELEASE\")\n  message(STATUS \"Create Release Build.\")\n  set(CMAKE_CXX_FLAGS \"-O2 ${CMAKE_CXX_FLAGS}\")\nelse()\n  message(STATUS \"Create Debug Build.\")\nendif()\n\nfind_package(ament_cmake REQUIRED)\nfind_package(rclcpp REQUIRED)\nfind_package(robot_interface REQUIRED)\n\ninclude_directories(\n  include\n  ${rclcpp_INCLUDE_DIRS}\n  ${robot_interface_INCLUDE_DIRS}\n)\n\n# draw_x app\nadd_executable(${PROJECT_NAME}\n  src/draw_x.cpp\n)\n\nament_target_dependencies(${PROJECT_NAME}\n  \"rclcpp\"\n  \"robot_interface\"\n)\n\ntarget_link_libraries(${PROJECT_NAME}\n  ${ament_LIBRARIES}\n  ${robot_interface_LIBRARIES}\n)\n\n# Install binaries\ninstall(TARGETS ${PROJECT_NAME}\n  RUNTIME DESTINATION bin\n)\ninstall(TARGETS ${PROJECT_NAME}\n  DESTINATION lib/${PROJECT_NAME}\n)\n\n# Install launch files.\ninstall(DIRECTORY\n  launch\n  DESTINATION share/${PROJECT_NAME}/\n)\n\n# Flags\nif(UNIX OR APPLE)\n  # Linker flags.\n  if(${CMAKE_CXX_COMPILER_ID} STREQUAL \"GNU\" OR ${CMAKE_CXX_COMPILER_ID} STREQUAL \"Intel\")\n    # GCC specific flags. ICC is compatible with them.\n    set(CMAKE_SHARED_LINKER_FLAGS \"${CMAKE_SHARED_LINKER_FLAGS} -z noexecstack -z relro -z now\")\n    set(CMAKE_EXE_LINKER_FLAGS \"${CMAKE_EXE_LINKER_FLAGS} -z noexecstack -z relro -z now\")\n  elseif(${CMAKE_CXX_COMPILER_ID} STREQUAL \"Clang\")\n    # In Clang, -z flags are not compatible, they need to be passed to linker via -Wl.\n    set(CMAKE_SHARED_LINKER_FLAGS \"${CMAKE_SHARED_LINKER_FLAGS} \\\n      -Wl,-z,noexecstack -Wl,-z,relro -Wl,-z,now\")\n    set(CMAKE_EXE_LINKER_FLAGS \"${CMAKE_EXE_LINKER_FLAGS} \\\n      -Wl,-z,noexecstack -Wl,-z,relro -Wl,-z,now\")\n  endif()\n\n  # Compiler flags.\n  if(${CMAKE_CXX_COMPILER_ID} STREQUAL \"GNU\")\n    # GCC specific flags.\n    if(CMAKE_CXX_COMPILER_VERSION VERSION_GREATER 4.9 OR\n        CMAKE_CXX_COMPILER_VERSION VERSION_EQUAL 4.9)\n      set(CMAKE_CXX_FLAGS \"${CMAKE_CXX_FLAGS} -fPIE -fstack-protector-strong\")\n    else()\n      set(CMAKE_CXX_FLAGS \"${CMAKE_CXX_FLAGS} -fPIE -fstack-protector\")\n    endif()\n  elseif(${CMAKE_CXX_COMPILER_ID} STREQUAL \"Clang\")\n    # Clang is compatbile with some of the flags.\n    set(CMAKE_CXX_FLAGS \"${CMAKE_CXX_FLAGS} -fPIE -fstack-protector\")\n  elseif(${CMAKE_CXX_COMPILER_ID} STREQUAL \"Intel\")\n    # Same as above, with exception that ICC compilation crashes with -fPIE option, even\n    # though it uses -pie linker option that require -fPIE during compilation. Checksec\n    # shows that it generates correct PIE anyway if only -pie is provided.\n    set(CMAKE_CXX_FLAGS \"${CMAKE_CXX_FLAGS} -fstack-protector\")\n  endif()\n\n  # Generic flags.\n  set(CMAKE_CXX_FLAGS \"${CMAKE_CXX_FLAGS} -fPIC -fno-operator-names -Wformat -Wformat-security \\\n    -Wall -fopenmp\")\n  set( CUDA_PROPAGATE_HOST_FLAGS OFF )\n  set(CMAKE_CXX_FLAGS_RELEASE \"${CMAKE_CXX_FLAGS_RELEASE} -D_FORTIFY_SOURCE=2\")\n  set(CMAKE_EXE_LINKER_FLAGS \"${CMAKE_EXE_LINKER_FLAGS} -pie\")\nendif()\n\nament_package()\n"
  },
  {
    "path": "grasp_apps/draw_x/launch/draw_x.launch.py",
    "content": "# Copyright (c) 2019 Intel Corporation. All Rights Reserved\n# \n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n# \n#     http://www.apache.org/licenses/LICENSE-2.0\n# \n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport os\n\nimport launch\nimport launch.actions\nimport launch.substitutions\nimport launch_ros.actions\nfrom ament_index_python.packages import get_package_share_directory\n\ndef generate_launch_description():\n\n    # .yaml file for configuring the parameters\n    yaml = os.path.join(\n        get_package_share_directory('draw_x'), \n            'launch', 'draw_x.yaml'\n    )\n\n    return launch.LaunchDescription([\n\n        launch_ros.actions.Node(\n            package='draw_x', \n            node_executable='draw_x', \n            output='screen', arguments=['__params:='+yaml]),\n\n    ])"
  },
  {
    "path": "grasp_apps/draw_x/launch/draw_x.yaml",
    "content": "robot_control:\n    ros__parameters:\n        host: \"192.168.0.5\"\n        shutdown_on_disconnect: true\n        joint_names: [\"shoulder_pan_joint\", \"shoulder_lift_joint\", \"elbow_joint\", \"wrist_1_joint\", \"wrist_2_joint\", \"wrist_3_joint\"]\n"
  },
  {
    "path": "grasp_apps/draw_x/package.xml",
    "content": "<?xml version=\"1.0\"?>\n<?xml-model href=\"http://download.ros.org/schema/package_format2.xsd\" schematypens=\"http://www.w3.org/2001/XMLSchema\"?>\n<package format=\"2\">\n  <name>draw_x</name>\n  <version>0.5.0</version>\n  <description>A demo app for draw_x</description>\n  <author email=\"yu.yan@intel.com\">Yu Yan</author>\n  <maintainer email=\"yu.yan@intel.com\">Yu Yan</maintainer>\n  <license>Apache License 2.0</license>\n\n  <buildtool_depend>ament_cmake</buildtool_depend>\n  <build_depend>rclcpp</build_depend>\n  <build_depend>robot_interface</build_depend>\n\n  <exec_depend>rclcpp</exec_depend>\n  <exec_depend>robot_interface</exec_depend>\n\n  <test_depend>ament_lint_auto</test_depend>\n  <test_depend>ament_lint_common</test_depend>\n\n  <export>\n    <build_type>ament_cmake</build_type>\n  </export>\n</package>\n"
  },
  {
    "path": "grasp_apps/draw_x/src/draw_x.cpp",
    "content": "#include <geometry_msgs/msg/pose_stamped.hpp>\n#include <rclcpp/rclcpp.hpp>\n#include <robot_interface/control_ur.hpp>\n\n/* pose in joint values*/\nstatic const std::vector<double> HOME = {0.87, -1.44, 1.68, -1.81, -1.56, 0};\n/* pose in [x, y, z, R, P, Y]*/\nstatic const std::vector<double> CORNER1_POSE = { 0.1, -0.65, 0.15, 3.14, 0, -3.14};\nstatic const std::vector<double> CORNER2_POSE = {-0.1, -0.45, 0.15, 3.14, 0, -3.14};\nstatic const std::vector<double> CORNER3_POSE = {-0.1, -0.65, 0.15, 3.14, 0, -3.14};\nstatic const std::vector<double> CORNER4_POSE = { 0.1, -0.45, 0.15, 3.14, 0, -3.14};\n\nint main(int argc, char **argv)\n{\n  rclcpp::init(argc, argv);\n\n  // init robot control\n  auto robot = std::make_shared<URControl>(\"robot_control\",\n       rclcpp::NodeOptions().automatically_declare_parameters_from_overrides(true));\n  robot->parseArgs();\n  robot->startLoop();\n  rclcpp::sleep_for(2s);\n\n  // Move to home\n  robot->moveToJointValues(HOME, 1.05, 1.4);\n\n  // Move to the first corner\n  robot->moveToTcpPose(CORNER1_POSE[0], CORNER1_POSE[1], CORNER1_POSE[2],\n                       CORNER1_POSE[3], CORNER1_POSE[4], CORNER1_POSE[5], 1.05, 1.4);\n\n  robot->moveToTcpPose(CORNER1_POSE[0], CORNER1_POSE[1], CORNER1_POSE[2] - 0.05,\n                       CORNER1_POSE[3], CORNER1_POSE[4], CORNER1_POSE[5], 1.05, 1.4);\n\n  // Move to the second corner\n  robot->moveToTcpPose(CORNER2_POSE[0], CORNER2_POSE[1], CORNER2_POSE[2] - 0.05,\n                       CORNER2_POSE[3], CORNER2_POSE[4], CORNER2_POSE[5], 1.05, 1.4);\n\n  robot->moveToTcpPose(CORNER2_POSE[0], CORNER2_POSE[1], CORNER2_POSE[2],\n                       CORNER2_POSE[3], CORNER2_POSE[4], CORNER2_POSE[5], 1.05, 1.4);\n\n  // Move to the third corner\n  robot->moveToTcpPose(CORNER3_POSE[0], CORNER3_POSE[1], CORNER3_POSE[2],\n                       CORNER3_POSE[3], CORNER3_POSE[4], CORNER3_POSE[5], 1.05, 1.4);\n\n  robot->moveToTcpPose(CORNER3_POSE[0], CORNER3_POSE[1], CORNER3_POSE[2] - 0.05,\n                       CORNER3_POSE[3], CORNER3_POSE[4], CORNER3_POSE[5], 1.05, 1.4);\n\n  // Move to the fourth corner\n  robot->moveToTcpPose(CORNER4_POSE[0], CORNER4_POSE[1], CORNER4_POSE[2] - 0.05,\n                       CORNER4_POSE[3], CORNER4_POSE[4], CORNER4_POSE[5], 1.05, 1.4);\n\n  robot->moveToTcpPose(CORNER4_POSE[0], CORNER4_POSE[1], CORNER4_POSE[2],\n                       CORNER4_POSE[3], CORNER4_POSE[4], CORNER4_POSE[5], 1.05, 1.4);\n\n  // Move back to home\n  robot->moveToJointValues(HOME, 1.05, 1.4);\n\n  rclcpp::shutdown();\n  return 0;\n\n}\n"
  },
  {
    "path": "grasp_apps/fixed_position_pick/CMakeLists.txt",
    "content": "# Copyright (c) 2019 Intel Corporation\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#      http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\ncmake_minimum_required(VERSION 3.5)\nproject(fixed_position_pick)\n\nif(NOT CMAKE_CXX_STANDARD)\n  set(CMAKE_CXX_STANDARD 14)\nendif()\n\nif(CMAKE_COMPILER_IS_GNUCXX OR CMAKE_CXX_COMPILER_ID MATCHES \"Clang\")\n  add_compile_options(-Wall -Wextra -Wpedantic)\nendif()\n\nif(CMAKE_BUILD_TYPE EQUAL \"RELEASE\")\n  message(STATUS \"Create Release Build.\")\n  set(CMAKE_CXX_FLAGS \"-O2 ${CMAKE_CXX_FLAGS}\")\nelse()\n  message(STATUS \"Create Debug Build.\")\nendif()\n\nfind_package(ament_cmake REQUIRED)\nfind_package(rclcpp REQUIRED)\nfind_package(robot_interface REQUIRED)\n\ninclude_directories(\n  include\n  ${rclcpp_INCLUDE_DIRS}\n  ${robot_interface_INCLUDE_DIRS}\n)\n\n# draw_x app\nadd_executable(${PROJECT_NAME}\n  src/fixed_position_pick.cpp\n)\n\nament_target_dependencies(${PROJECT_NAME}\n  \"rclcpp\"\n  \"robot_interface\"\n)\n\ntarget_link_libraries(${PROJECT_NAME}\n  ${ament_LIBRARIES}\n  ${robot_interface_LIBRARIES}\n)\n\n# Install binaries\ninstall(TARGETS ${PROJECT_NAME}\n  RUNTIME DESTINATION bin\n)\ninstall(TARGETS ${PROJECT_NAME}\n  DESTINATION lib/${PROJECT_NAME}\n)\n\n# Install launch files.\ninstall(DIRECTORY\n  launch\n  DESTINATION share/${PROJECT_NAME}/\n)\n\n# Flags\nif(UNIX OR APPLE)\n  # Linker flags.\n  if(${CMAKE_CXX_COMPILER_ID} STREQUAL \"GNU\" OR ${CMAKE_CXX_COMPILER_ID} STREQUAL \"Intel\")\n    # GCC specific flags. ICC is compatible with them.\n    set(CMAKE_SHARED_LINKER_FLAGS \"${CMAKE_SHARED_LINKER_FLAGS} -z noexecstack -z relro -z now\")\n    set(CMAKE_EXE_LINKER_FLAGS \"${CMAKE_EXE_LINKER_FLAGS} -z noexecstack -z relro -z now\")\n  elseif(${CMAKE_CXX_COMPILER_ID} STREQUAL \"Clang\")\n    # In Clang, -z flags are not compatible, they need to be passed to linker via -Wl.\n    set(CMAKE_SHARED_LINKER_FLAGS \"${CMAKE_SHARED_LINKER_FLAGS} \\\n      -Wl,-z,noexecstack -Wl,-z,relro -Wl,-z,now\")\n    set(CMAKE_EXE_LINKER_FLAGS \"${CMAKE_EXE_LINKER_FLAGS} \\\n      -Wl,-z,noexecstack -Wl,-z,relro -Wl,-z,now\")\n  endif()\n\n  # Compiler flags.\n  if(${CMAKE_CXX_COMPILER_ID} STREQUAL \"GNU\")\n    # GCC specific flags.\n    if(CMAKE_CXX_COMPILER_VERSION VERSION_GREATER 4.9 OR\n        CMAKE_CXX_COMPILER_VERSION VERSION_EQUAL 4.9)\n      set(CMAKE_CXX_FLAGS \"${CMAKE_CXX_FLAGS} -fPIE -fstack-protector-strong\")\n    else()\n      set(CMAKE_CXX_FLAGS \"${CMAKE_CXX_FLAGS} -fPIE -fstack-protector\")\n    endif()\n  elseif(${CMAKE_CXX_COMPILER_ID} STREQUAL \"Clang\")\n    # Clang is compatbile with some of the flags.\n    set(CMAKE_CXX_FLAGS \"${CMAKE_CXX_FLAGS} -fPIE -fstack-protector\")\n  elseif(${CMAKE_CXX_COMPILER_ID} STREQUAL \"Intel\")\n    # Same as above, with exception that ICC compilation crashes with -fPIE option, even\n    # though it uses -pie linker option that require -fPIE during compilation. Checksec\n    # shows that it generates correct PIE anyway if only -pie is provided.\n    set(CMAKE_CXX_FLAGS \"${CMAKE_CXX_FLAGS} -fstack-protector\")\n  endif()\n\n  # Generic flags.\n  set(CMAKE_CXX_FLAGS \"${CMAKE_CXX_FLAGS} -fPIC -fno-operator-names -Wformat -Wformat-security \\\n    -Wall -fopenmp\")\n  set( CUDA_PROPAGATE_HOST_FLAGS OFF )\n  set(CMAKE_CXX_FLAGS_RELEASE \"${CMAKE_CXX_FLAGS_RELEASE} -D_FORTIFY_SOURCE=2\")\n  set(CMAKE_EXE_LINKER_FLAGS \"${CMAKE_EXE_LINKER_FLAGS} -pie\")\nendif()\n\nament_package()"
  },
  {
    "path": "grasp_apps/fixed_position_pick/launch/fixed_position_pick.launch.py",
    "content": "# Copyright (c) 2019 Intel Corporation. All Rights Reserved\n# \n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n# \n#     http://www.apache.org/licenses/LICENSE-2.0\n# \n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport os\n\nimport launch\nimport launch.actions\nimport launch.substitutions\nimport launch_ros.actions\nfrom ament_index_python.packages import get_package_share_directory\n\ndef generate_launch_description():\n\n    # .yaml file for configuring the parameters\n    yaml = os.path.join(\n        get_package_share_directory('fixed_position_pick'), \n            'launch', 'fixed_position_pick.yaml'\n    )\n\n    return launch.LaunchDescription([\n\n        launch_ros.actions.Node(\n            package='fixed_position_pick', \n            node_executable='fixed_position_pick', \n            output='screen', arguments=['__params:='+yaml]),\n\n    ])"
  },
  {
    "path": "grasp_apps/fixed_position_pick/launch/fixed_position_pick.yaml",
    "content": "robot_control:\n    ros__parameters:\n        host: \"192.168.0.5\"\n        shutdown_on_disconnect: true\n        joint_names: [\"shoulder_pan_joint\", \"shoulder_lift_joint\", \"elbow_joint\", \"wrist_1_joint\", \"wrist_2_joint\", \"wrist_3_joint\"]\n"
  },
  {
    "path": "grasp_apps/fixed_position_pick/package.xml",
    "content": "<?xml version=\"1.0\"?>\n<?xml-model href=\"http://download.ros.org/schema/package_format2.xsd\" schematypens=\"http://www.w3.org/2001/XMLSchema\"?>\n<package format=\"2\">\n  <name>fixed_position_pick</name>\n  <version>0.5.0</version>\n  <description>A demo app for draw_x</description>\n  <author email=\"yu.yan@intel.com\">Yu Yan</author>\n  <maintainer email=\"yu.yan@intel.com\">Yu Yan</maintainer>\n  <license>Apache License 2.0</license>\n\n  <buildtool_depend>ament_cmake</buildtool_depend>\n  <build_depend>rclcpp</build_depend>\n  <build_depend>robot_interface</build_depend>\n\n  <exec_depend>rclcpp</exec_depend>\n  <exec_depend>robot_interface</exec_depend>\n\n  <test_depend>ament_lint_auto</test_depend>\n  <test_depend>ament_lint_common</test_depend>\n\n  <export>\n    <build_type>ament_cmake</build_type>\n  </export>\n</package>"
  },
  {
    "path": "grasp_apps/fixed_position_pick/src/fixed_position_pick.cpp",
    "content": "#include <geometry_msgs/msg/pose_stamped.hpp>\n#include <rclcpp/rclcpp.hpp>\n#include <robot_interface/control_ur.hpp>\n\n/* pose in joint values*/\nstatic const std::vector<double> HOME = {0.87, -1.44, 1.68, -1.81, -1.56, 0};\n/* pose in [x, y, z, qx, qy, qz, qw]*/\nstatic const std::vector<double> PICK_POSE = { -0.157402, -0.679509, 0.094437, 0.190600, 0.948295, 0.239947, 0.082662};\nstatic const std::vector<double> PLACE_POSE = {-0.350, -0.296, 0.145, -0.311507, 0.950216, -0.004305, 0.005879};\n\nint main(int argc, char **argv)\n{\n  rclcpp::init(argc, argv);\n\n  // init robot control\n  auto robot = std::make_shared<URControl>(\"robot_control\",\n       rclcpp::NodeOptions().automatically_declare_parameters_from_overrides(true));\n  robot->parseArgs();\n  robot->startLoop();\n  rclcpp::sleep_for(2s);\n\n  // Move to home\n  robot->moveToJointValues(HOME, 1.05, 1.4);\n\n  // Pick\n  geometry_msgs::msg::PoseStamped pose_pick;\n  pose_pick.header.frame_id = \"base\";\n  pose_pick.header.stamp = robot->now();\n  pose_pick.pose.position.x = PICK_POSE[0];\n  pose_pick.pose.position.y = PICK_POSE[1];\n  pose_pick.pose.position.z = PICK_POSE[2];\n  pose_pick.pose.orientation.x = PICK_POSE[3];\n  pose_pick.pose.orientation.y = PICK_POSE[4];\n  pose_pick.pose.orientation.z = PICK_POSE[5];\n  pose_pick.pose.orientation.w = PICK_POSE[6];\n\n  robot->pick(pose_pick, 1.05, 1.4, 0.5, 0.1);\n\n  // Place\n  geometry_msgs::msg::PoseStamped pose_place;\n  pose_place.header.frame_id = \"base\";\n  pose_place.header.stamp = robot->now();\n  pose_place.pose.position.x = PLACE_POSE[0];\n  pose_place.pose.position.y = PLACE_POSE[1];\n  pose_place.pose.position.z = PLACE_POSE[2];\n  pose_place.pose.orientation.x = PLACE_POSE[3];\n  pose_place.pose.orientation.y = PLACE_POSE[4];\n  pose_place.pose.orientation.z = PLACE_POSE[5];\n  pose_place.pose.orientation.w = PLACE_POSE[6];\n\n  robot->place(pose_place, 1.05, 1.4, 0.5, 0.1);\n\n  // Move back to home\n  robot->moveToJointValues(HOME, 1.05, 1.4);\n\n  rclcpp::shutdown();\n  return 0;\n\n}"
  },
  {
    "path": "grasp_apps/random_pick/CMakeLists.txt",
    "content": "# Copyright (c) 2019 Intel Corporation\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#      http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\noption(BUILD_RANDOM_PICK \"build random_pick app\" ON)\nif(NOT BUILD_RANDOM_PICK STREQUAL \"ON\")\n  return()\nendif()\n\ncmake_minimum_required(VERSION 3.5)\nproject(random_pick)\n\nif(NOT CMAKE_CXX_STANDARD)\n  set(CMAKE_CXX_STANDARD 14)\nendif()\n\nif(CMAKE_COMPILER_IS_GNUCXX OR CMAKE_CXX_COMPILER_ID MATCHES \"Clang\")\n  add_compile_options(-Wall -Wextra -Wpedantic)\nendif()\n\nif(CMAKE_BUILD_TYPE EQUAL \"RELEASE\")\n  message(STATUS \"Create Release Build.\")\n  set(CMAKE_CXX_FLAGS \"-O2 ${CMAKE_CXX_FLAGS}\")\nelse()\n  message(STATUS \"Create Debug Build.\")\nendif()\n\nfind_package(ament_cmake REQUIRED)\nfind_package(rclcpp REQUIRED)\nfind_package(moveit_msgs REQUIRED)\nfind_package(robot_interface REQUIRED)\nfind_package(tf2_ros REQUIRED)\n\ninclude_directories(\n  include\n  ${rclcpp_INCLUDE_DIRS}\n  ${moveit_msgs_INCLUDE_DIRS}\n  ${robot_interface_INCLUDE_DIRS}\n  ${tf2_ros_INCLUDE_DIRS}\n)\n\n# random_pick app\nadd_executable(${PROJECT_NAME}\n  src/random_pick.cpp\n)\n\nament_target_dependencies(${PROJECT_NAME}\n  \"rclcpp\"\n  \"moveit_msgs\"\n  \"robot_interface\"\n  \"tf2_ros\"\n)\n\ntarget_link_libraries(${PROJECT_NAME}\n  ${ament_LIBRARIES}\n  ${robot_interface_LIBRARIES}\n)\n\n# Install binaries\ninstall(TARGETS ${PROJECT_NAME}\n  RUNTIME DESTINATION bin\n)\ninstall(TARGETS ${PROJECT_NAME}\n  DESTINATION lib/${PROJECT_NAME}\n)\n\n# Flags\nif(UNIX OR APPLE)\n  # Linker flags.\n  if(${CMAKE_CXX_COMPILER_ID} STREQUAL \"GNU\" OR ${CMAKE_CXX_COMPILER_ID} STREQUAL \"Intel\")\n    # GCC specific flags. ICC is compatible with them.\n    set(CMAKE_SHARED_LINKER_FLAGS \"${CMAKE_SHARED_LINKER_FLAGS} -z noexecstack -z relro -z now\")\n    set(CMAKE_EXE_LINKER_FLAGS \"${CMAKE_EXE_LINKER_FLAGS} -z noexecstack -z relro -z now\")\n  elseif(${CMAKE_CXX_COMPILER_ID} STREQUAL \"Clang\")\n    # In Clang, -z flags are not compatible, they need to be passed to linker via -Wl.\n    set(CMAKE_SHARED_LINKER_FLAGS \"${CMAKE_SHARED_LINKER_FLAGS} \\\n      -Wl,-z,noexecstack -Wl,-z,relro -Wl,-z,now\")\n    set(CMAKE_EXE_LINKER_FLAGS \"${CMAKE_EXE_LINKER_FLAGS} \\\n      -Wl,-z,noexecstack -Wl,-z,relro -Wl,-z,now\")\n  endif()\n\n  # Compiler flags.\n  if(${CMAKE_CXX_COMPILER_ID} STREQUAL \"GNU\")\n    # GCC specific flags.\n    if(CMAKE_CXX_COMPILER_VERSION VERSION_GREATER 4.9 OR\n        CMAKE_CXX_COMPILER_VERSION VERSION_EQUAL 4.9)\n      set(CMAKE_CXX_FLAGS \"${CMAKE_CXX_FLAGS} -fPIE -fstack-protector-strong\")\n    else()\n      set(CMAKE_CXX_FLAGS \"${CMAKE_CXX_FLAGS} -fPIE -fstack-protector\")\n    endif()\n  elseif(${CMAKE_CXX_COMPILER_ID} STREQUAL \"Clang\")\n    # Clang is compatbile with some of the flags.\n    set(CMAKE_CXX_FLAGS \"${CMAKE_CXX_FLAGS} -fPIE -fstack-protector\")\n  elseif(${CMAKE_CXX_COMPILER_ID} STREQUAL \"Intel\")\n    # Same as above, with exception that ICC compilation crashes with -fPIE option, even\n    # though it uses -pie linker option that require -fPIE during compilation. Checksec\n    # shows that it generates correct PIE anyway if only -pie is provided.\n    set(CMAKE_CXX_FLAGS \"${CMAKE_CXX_FLAGS} -fstack-protector\")\n  endif()\n\n  # Generic flags.\n  set(CMAKE_CXX_FLAGS \"${CMAKE_CXX_FLAGS} -fPIC -fno-operator-names -Wformat -Wformat-security \\\n    -Wall -fopenmp\")\n  set( CUDA_PROPAGATE_HOST_FLAGS OFF )\n  set(CMAKE_CXX_FLAGS_RELEASE \"${CMAKE_CXX_FLAGS_RELEASE} -D_FORTIFY_SOURCE=2\")\n  set(CMAKE_EXE_LINKER_FLAGS \"${CMAKE_EXE_LINKER_FLAGS} -pie\")\nendif()\n\nament_package()\n"
  },
  {
    "path": "grasp_apps/random_pick/cfg/random_pick.yaml",
    "content": "robot_control:\n    ros__parameters:\n        host: \"192.168.1.5\"\n"
  },
  {
    "path": "grasp_apps/random_pick/package.xml",
    "content": "<?xml version=\"1.0\"?>\n<?xml-model href=\"http://download.ros.org/schema/package_format2.xsd\" schematypens=\"http://www.w3.org/2001/XMLSchema\"?>\n<package format=\"2\">\n  <name>random_pick</name>\n  <version>0.5.0</version>\n  <description>A demo app for grasp detection, and random picking</description>\n  <author email=\"sharron.liu@intel.com\">Sharron LIU</author>\n  <maintainer email=\"sharron.liu@intel.com\">Sharron LIU</maintainer>\n  <license>Apache License 2.0</license>\n\n  <buildtool_depend>ament_cmake</buildtool_depend>\n  <build_depend>rclcpp</build_depend>\n  <build_depend>moveit_msgs</build_depend>\n  <build_depend>people_msgs</build_depend>\n  <build_depend>robot_interface</build_depend>\n  <build_depend>tf2_ros</build_depend>\n\n  <exec_depend>rclcpp</exec_depend>\n  <exec_depend>moveit_msgs</exec_depend>\n  <exec_depend>people_msgs</exec_depend>\n  <exec_depend>robot_interface</exec_depend>\n  <exec_depend>tf2_ros</exec_depend>\n\n  <test_depend>ament_lint_auto</test_depend>\n  <test_depend>ament_lint_common</test_depend>\n\n  <export>\n    <build_type>ament_cmake</build_type>\n  </export>\n</package>\n"
  },
  {
    "path": "grasp_apps/random_pick/src/random_pick.cpp",
    "content": "#include <geometry_msgs/msg/pose_stamped.hpp>\n#include <moveit_msgs/msg/move_it_error_codes.hpp>\n#include <moveit_msgs/msg/place_location.hpp>\n#include <moveit_msgs/srv/grasp_planning.hpp>\n#include <rclcpp/logger.hpp>\n#include <rclcpp/rclcpp.hpp>\n#include <robot_interface/control_ur.hpp>\n#include <tf2_ros/static_transform_broadcaster.h>\n\n#define robot_enable\n\nusing GraspPlanning = moveit_msgs::srv::GraspPlanning;\n/* pick position in [x, y, z, R, P, Y]*/\nstatic std::vector<double> pick_ = {0.0, -0.54, 0.145, 3.14, 0.0, 1.956};\n/* place position in [x, y, z, R, P, Y]*/\nstatic std::vector<double> place_ = {-0.50, -0.30, 0.20, 3.14, 0.0, 1.956};\n/* pre-pick position in joint values*/\nstatic std::vector<double> joint_values_pick = {1.065, -1.470, 1.477, -1.577, -1.556, 0};\n/* place position in joint values*/\nstatic std::vector<double> joint_values_place = {0.385, -1.470, 1.477, -1.577, -1.556, 0};\nstatic double vel_ = 0.9, acc_ = 0.9, vscale_ = 0.9, appr_ = 0.1;\nstatic std::shared_ptr<URControl> robot_ = nullptr;\nstatic rclcpp::Node::SharedPtr node_ = nullptr;\nstatic std::shared_ptr<GraspPlanning::Response> result_ = nullptr;\n\nint main(int argc, char **argv)\n{\n  rclcpp::init(argc, argv);\n\n  // init robot control\n  robot_ = std::make_shared<URControl>(\"robot_control\",\n    rclcpp::NodeOptions().automatically_declare_parameters_from_overrides(true));\n  robot_->parseArgs();\n  robot_->startLoop();\n  rclcpp::sleep_for(2s);\n\n#ifdef robot_enable\n  // reset joint\n  robot_->moveToJointValues(joint_values_place, vel_, acc_);\n#endif\n\n  // init random pick node\n  node_ = rclcpp::Node::make_shared(\"random_pick\");\n  tf2_ros::StaticTransformBroadcaster tfb(node_);\n  // create client for grasp planning\n  auto client = node_->create_client<GraspPlanning>(\"plan_grasps\");\n  // wait for service\n  while (!client->wait_for_service(5s)) {\n    RCLCPP_INFO(node_->get_logger(), \"Wait for service\");\n  }\n  RCLCPP_INFO(node_->get_logger(), \"Got service\");\n\n  while(rclcpp::ok())\n  {\n      // request grasp poses\n      auto request = std::make_shared<GraspPlanning::Request>();\n      auto result_future = client->async_send_request(request);\n      RCLCPP_INFO(node_->get_logger(), \"Request sent\");\n      // wait for response\n      if (rclcpp::spin_until_future_complete(node_, result_future) !=\n        rclcpp::executor::FutureReturnCode::SUCCESS)\n      {\n        continue;\n      }\n      // get response\n      if (moveit_msgs::msg::MoveItErrorCodes::SUCCESS == result_future.get()->error_code.val) {\n        result_ = result_future.get();\n\tRCLCPP_INFO(node_->get_logger(), \"Response received %d\", result_->error_code.val);\n      } else continue;\n\n      geometry_msgs::msg::PoseStamped p = result_->grasps[0].grasp_pose;\n      // publish grasp pose\n      tf2::Quaternion q(p.pose.orientation.x, p.pose.orientation.y, p.pose.orientation.z, p.pose.orientation.w);\n      double roll, pitch, yaw;\n      tf2::Matrix3x3 r;\n      r.setRotation(q);\n      r.getRPY(roll, pitch, yaw);\n      RCLCPP_INFO(node_->get_logger(), \"**********pick pose [position %f %f %f, quat %f %f %f %f, RPY %f %f %f]\",\n        p.pose.position.x, p.pose.position.y, p.pose.position.z,\n        p.pose.orientation.x, p.pose.orientation.y, p.pose.orientation.z, p.pose.orientation.w,\n        roll, pitch, yaw);\n      geometry_msgs::msg::TransformStamped tf_msg;\n      tf_msg.header = p.header;\n      tf_msg.child_frame_id = \"grasp_pose\";\n      tf_msg.transform.translation.x = p.pose.position.x;\n      tf_msg.transform.translation.y = p.pose.position.y;\n      tf_msg.transform.translation.z = p.pose.position.z;\n      tf_msg.transform.rotation = p.pose.orientation;\n      tfb.sendTransform(tf_msg);\n\n#ifdef robot_enable\n      // pick\n      robot_->moveToJointValues(joint_values_pick, vel_, acc_);\n      robot_->pick(p, vel_, acc_, vscale_, appr_);\n      // place\n      robot_->moveToJointValues(joint_values_place, vel_, acc_);\n      robot_->place(place_[0], place_[1], place_[2], place_[3], place_[4], place_[5], vel_, acc_, vscale_, appr_);\n#endif\n  }\n\n  rclcpp::shutdown();\n  return 0;\n\n}\n"
  },
  {
    "path": "grasp_apps/recognize_pick/CMakeLists.txt",
    "content": "# Copyright (c) 2019 Intel Corporation\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#      http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\noption(BUILD_RECOGNIZE_PICK \"build recognize_pick app\" OFF)\nif(NOT BUILD_RECOGNIZE_PICK STREQUAL \"ON\")\n  return()\nendif()\n\ncmake_minimum_required(VERSION 3.5)\nproject(recognize_pick)\n\nif(NOT CMAKE_CXX_STANDARD)\n  set(CMAKE_CXX_STANDARD 14)\nendif()\n\nif(CMAKE_COMPILER_IS_GNUCXX OR CMAKE_CXX_COMPILER_ID MATCHES \"Clang\")\n  add_compile_options(-Wall -Wextra -Wpedantic)\nendif()\n\nif(CMAKE_BUILD_TYPE EQUAL \"RELEASE\")\n  message(STATUS \"Create Release Build.\")\n  set(CMAKE_CXX_FLAGS \"-O2 ${CMAKE_CXX_FLAGS}\")\nelse()\n  message(STATUS \"Create Debug Build.\")\nendif()\n\nfind_package(ament_cmake REQUIRED)\nfind_package(rclcpp REQUIRED)\nfind_package(moveit_msgs REQUIRED)\nfind_package(people_msgs REQUIRED)\nfind_package(robot_interface REQUIRED)\nfind_package(tf2_ros REQUIRED)\n\ninclude_directories(\n  include\n  ${rclcpp_INCLUDE_DIRS}\n  ${moveit_msgs_INCLUDE_DIRS}\n  ${people_msgs_INCLUDE_DIRS}\n  ${robot_interface_INCLUDE_DIRS}\n  ${tf2_ros_INCLUDE_DIRS}\n)\n\n# recognize_pick app\nadd_executable(${PROJECT_NAME}\n  src/recognize_pick.cpp\n)\n\nament_target_dependencies(${PROJECT_NAME}\n  \"rclcpp\"\n  \"moveit_msgs\"\n  \"people_msgs\"\n  \"robot_interface\"\n  \"tf2_ros\"\n)\n\ntarget_link_libraries(${PROJECT_NAME}\n  ${ament_LIBRARIES}\n  ${robot_interface_LIBRARIES}\n)\n\n# place publisher app\nset(PLACE_PUBLISHER place_publisher)\nadd_executable(${PLACE_PUBLISHER}\n  src/place_publisher.cpp\n)\n\nament_target_dependencies(${PLACE_PUBLISHER}\n  \"rclcpp\"\n  \"moveit_msgs\"\n)\n\ntarget_link_libraries(${PLACE_PUBLISHER}\n  ${ament_LIBRARIES}\n)\n\n# Install binaries\ninstall(TARGETS ${PROJECT_NAME} ${PLACE_PUBLISHER}\n  RUNTIME DESTINATION bin\n)\ninstall(TARGETS ${PROJECT_NAME} ${PLACE_PUBLISHER}\n  DESTINATION lib/${PROJECT_NAME}\n)\n\n# Flags\nif(UNIX OR APPLE)\n  # Linker flags.\n  if(${CMAKE_CXX_COMPILER_ID} STREQUAL \"GNU\" OR ${CMAKE_CXX_COMPILER_ID} STREQUAL \"Intel\")\n    # GCC specific flags. ICC is compatible with them.\n    set(CMAKE_SHARED_LINKER_FLAGS \"${CMAKE_SHARED_LINKER_FLAGS} -z noexecstack -z relro -z now\")\n    set(CMAKE_EXE_LINKER_FLAGS \"${CMAKE_EXE_LINKER_FLAGS} -z noexecstack -z relro -z now\")\n  elseif(${CMAKE_CXX_COMPILER_ID} STREQUAL \"Clang\")\n    # In Clang, -z flags are not compatible, they need to be passed to linker via -Wl.\n    set(CMAKE_SHARED_LINKER_FLAGS \"${CMAKE_SHARED_LINKER_FLAGS} \\\n      -Wl,-z,noexecstack -Wl,-z,relro -Wl,-z,now\")\n    set(CMAKE_EXE_LINKER_FLAGS \"${CMAKE_EXE_LINKER_FLAGS} \\\n      -Wl,-z,noexecstack -Wl,-z,relro -Wl,-z,now\")\n  endif()\n\n  # Compiler flags.\n  if(${CMAKE_CXX_COMPILER_ID} STREQUAL \"GNU\")\n    # GCC specific flags.\n    if(CMAKE_CXX_COMPILER_VERSION VERSION_GREATER 4.9 OR\n        CMAKE_CXX_COMPILER_VERSION VERSION_EQUAL 4.9)\n      set(CMAKE_CXX_FLAGS \"${CMAKE_CXX_FLAGS} -fPIE -fstack-protector-strong\")\n    else()\n      set(CMAKE_CXX_FLAGS \"${CMAKE_CXX_FLAGS} -fPIE -fstack-protector\")\n    endif()\n  elseif(${CMAKE_CXX_COMPILER_ID} STREQUAL \"Clang\")\n    # Clang is compatbile with some of the flags.\n    set(CMAKE_CXX_FLAGS \"${CMAKE_CXX_FLAGS} -fPIE -fstack-protector\")\n  elseif(${CMAKE_CXX_COMPILER_ID} STREQUAL \"Intel\")\n    # Same as above, with exception that ICC compilation crashes with -fPIE option, even\n    # though it uses -pie linker option that require -fPIE during compilation. Checksec\n    # shows that it generates correct PIE anyway if only -pie is provided.\n    set(CMAKE_CXX_FLAGS \"${CMAKE_CXX_FLAGS} -fstack-protector\")\n  endif()\n\n  # Generic flags.\n  set(CMAKE_CXX_FLAGS \"${CMAKE_CXX_FLAGS} -fPIC -fno-operator-names -Wformat -Wformat-security \\\n    -Wall -fopenmp\")\n  set( CUDA_PROPAGATE_HOST_FLAGS OFF )\n  set(CMAKE_CXX_FLAGS_RELEASE \"${CMAKE_CXX_FLAGS_RELEASE} -D_FORTIFY_SOURCE=2\")\n  set(CMAKE_EXE_LINKER_FLAGS \"${CMAKE_EXE_LINKER_FLAGS} -pie\")\nendif()\n\nament_package()\n"
  },
  {
    "path": "grasp_apps/recognize_pick/cfg/recognize_pick.yaml",
    "content": "robot_control:\n    ros__parameters:\n        host: \"192.168.1.5\"\n"
  },
  {
    "path": "grasp_apps/recognize_pick/package.xml",
    "content": "<?xml version=\"1.0\"?>\n<?xml-model href=\"http://download.ros.org/schema/package_format2.xsd\" schematypens=\"http://www.w3.org/2001/XMLSchema\"?>\n<package format=\"2\">\n  <name>recognize_pick</name>\n  <version>0.5.0</version>\n  <description>A demo app for object segmentation, grasp detection, and picking</description>\n  <author email=\"sharron.liu@intel.com\">Sharron LIU</author>\n  <maintainer email=\"sharron.liu@intel.com\">Sharron LIU</maintainer>\n  <license>Apache License 2.0</license>\n\n  <buildtool_depend>ament_cmake</buildtool_depend>\n  <build_depend>rclcpp</build_depend>\n  <build_depend>moveit_msgs</build_depend>\n  <build_depend>people_msgs</build_depend>\n  <build_depend>robot_interface</build_depend>\n  <build_depend>tf2_ros</build_depend>\n\n  <exec_depend>rclcpp</exec_depend>\n  <exec_depend>moveit_msgs</exec_depend>\n  <exec_depend>people_msgs</exec_depend>\n  <exec_depend>robot_interface</exec_depend>\n  <exec_depend>tf2_ros</exec_depend>\n\n  <test_depend>ament_lint_auto</test_depend>\n  <test_depend>ament_lint_common</test_depend>\n\n  <export>\n    <build_type>ament_cmake</build_type>\n  </export>\n</package>\n"
  },
  {
    "path": "grasp_apps/recognize_pick/src/place_publisher.cpp",
    "content": "// Copyright (c) 2019 Intel Corporation\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n//     http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\n\n#include <rclcpp/logger.hpp>\n#include <rclcpp/rclcpp.hpp>\n#include <moveit_msgs/msg/place_location.hpp>\n\nint main(int argc, char ** argv) {\n  std::vector<std::string> args = rclcpp::init_and_remove_ros_arguments(argc, argv);\n  auto node = rclcpp::Node::make_shared(\"PlacePublisher\");\n  auto pub = node->create_publisher<moveit_msgs::msg::PlaceLocation>(\"/recognize_pick/place\", 10);\n\n  rclcpp::Clock clock(RCL_ROS_TIME);\n\n  moveit_msgs::msg::PlaceLocation p;\n  if (args.size() < 5) {\n    p.place_pose.pose.position.x = -0.45;\n    p.place_pose.pose.position.y = -0.30;\n    p.place_pose.pose.position.z = 0.125;\n  } else {\n    p.place_pose.pose.position.x = atof(args[2].c_str());\n    p.place_pose.pose.position.y = atof(args[3].c_str());\n    p.place_pose.pose.position.z = atof(args[4].c_str());\n  }\n  if (args.size() < 2) {\n    RCLCPP_INFO(node->get_logger(), \"Place publisher specifying object name and place position.\");\n    RCLCPP_INFO(node->get_logger(), \"Usage: place_publisher object_name [x y z]\");\n    RCLCPP_INFO(node->get_logger(), \"Example: place_publisher sports_ball\");\n    RCLCPP_INFO(node->get_logger(), \"Example: place_publisher sports_ball -0.45 -0.30 0.125\");\n    rclcpp::shutdown();\n    return 0;\n  } else {\n    p.id = args[1];\n  }\n\n  RCLCPP_INFO(node->get_logger(), \"place publisher %s [%f %f %f]\",\n    p.id.c_str(), p.place_pose.pose.position.x, p.place_pose.pose.position.y, p.place_pose.pose.position.z);\n\n  while (rclcpp::ok()) {\n    p.place_pose.header.stamp = clock.now();\n    p.place_pose.header.frame_id = \"base\";\n    pub->publish(p);\n    rclcpp::Rate(0.5).sleep();\n  }\n  rclcpp::spin(node);\n  rclcpp::shutdown();\n  return 0;\n}\n"
  },
  {
    "path": "grasp_apps/recognize_pick/src/recognize_pick.cpp",
    "content": "#include <geometry_msgs/msg/pose_stamped.hpp>\n#include <moveit_msgs/msg/move_it_error_codes.hpp>\n#include <moveit_msgs/msg/place_location.hpp>\n#include <moveit_msgs/srv/grasp_planning.hpp>\n#include <rclcpp/logger.hpp>\n#include <rclcpp/rclcpp.hpp>\n#include <robot_interface/control_ur.hpp>\n#include <tf2_ros/static_transform_broadcaster.h>\n\n#define robot_enable\n\nusing GraspPlanning = moveit_msgs::srv::GraspPlanning;\n/* pick position in [x, y, z, R, P, Y]*/\nstatic std::vector<double> pick_ = {0.0, -0.54, 0.145, 3.14, 0.0, 1.956};\n/* place position in [x, y, z, R, P, Y]*/\nstatic std::vector<double> place_ = {-0.45, -0.30, 0.125, 3.14, 0.0, 1.956};\n/* pre-pick position in joint values*/\nstatic std::vector<double> joint_values_pick = {1.065, -1.470, 1.477, -1.577, -1.556, 0};\n/* place position in joint values*/\nstatic std::vector<double> joint_values_place = {0.385, -1.470, 1.477, -1.577, -1.556, 0};\nstatic double vel_ = 0.4, acc_ = 0.4, vscale_ = 0.5, appr_ = 0.1;\nstatic std::shared_ptr<URControl> robot_ = nullptr;\nstatic rclcpp::Node::SharedPtr node_ = nullptr;\nstatic std::shared_ptr<GraspPlanning::Response> result_ = nullptr;\nstatic moveit_msgs::msg::PlaceLocation::SharedPtr place_pose_ = nullptr;\n\nstatic void place_callback(const moveit_msgs::msg::PlaceLocation::SharedPtr msg) {\n  place_pose_ = msg;\n}\n\nint main(int argc, char **argv)\n{\n  rclcpp::init(argc, argv);\n\n  // init robot control\n  robot_ = std::make_shared<URControl>(\"robot_control\",\n    rclcpp::NodeOptions().automatically_declare_parameters_from_overrides(true));\n  robot_->parseArgs();\n  robot_->startLoop();\n  rclcpp::sleep_for(2s);\n\n#ifdef robot_enable\n  // reset joint\n  robot_->moveToJointValues(joint_values_place, vel_, acc_);\n#endif\n\n  // init random pick node\n  node_ = rclcpp::Node::make_shared(\"random_pick\");\n  tf2_ros::StaticTransformBroadcaster tfb(node_);\n  // subscribe place callback\n  auto sub = node_->create_subscription<moveit_msgs::msg::PlaceLocation>(\n    \"/recognize_pick/place\", rclcpp::QoS(rclcpp::KeepLast(0)), place_callback);\n  // create client\n  auto client = node_->create_client<GraspPlanning>(\"plan_grasps\");\n  // wait for service\n  while (!client->wait_for_service(5s)) {\n    RCLCPP_INFO(node_->get_logger(), \"Wait for service\");\n  }\n  RCLCPP_INFO(node_->get_logger(), \"Got service\");\n\n  while(rclcpp::ok())\n  {\n    if (place_pose_ == nullptr) {\n      RCLCPP_INFO(node_->get_logger(), \"Wait for place mission\");\n      rclcpp::spin_some(node_);\n      rclcpp::sleep_for(std::chrono::seconds(2));\n      continue;\n    }\n      moveit_msgs::msg::PlaceLocation::SharedPtr place = place_pose_;\n      RCLCPP_INFO(node_->get_logger(), \"Place %s\", place->id.c_str());\n      // get grasp poses\n      auto request = std::make_shared<GraspPlanning::Request>();\n      request->target.id = place->id;\n\n      auto result_future = client->async_send_request(request);\n      RCLCPP_INFO(node_->get_logger(), \"Request sent\");\n      // wait for response\n      if (rclcpp::spin_until_future_complete(node_, result_future) !=\n        rclcpp::executor::FutureReturnCode::SUCCESS)\n      {\n        continue;\n      }\n      // get response\n      if (moveit_msgs::msg::MoveItErrorCodes::SUCCESS == result_future.get()->error_code.val) {\n        result_ = result_future.get();\n\tRCLCPP_INFO(node_->get_logger(), \"Response received %d\", result_->error_code.val);\n      } else continue;\n\n      geometry_msgs::msg::PoseStamped p = result_->grasps[0].grasp_pose;\n      // publish grasp pose\n      tf2::Quaternion q(p.pose.orientation.x, p.pose.orientation.y, p.pose.orientation.z, p.pose.orientation.w);\n      double roll, pitch, yaw;\n      tf2::Matrix3x3 r;\n      r.setRotation(q);\n      r.getRPY(roll, pitch, yaw);\n      RCLCPP_INFO(node_->get_logger(), \"**********pick pose [position %f %f %f, quat %f %f %f %f, RPY %f %f %f]\",\n        p.pose.position.x, p.pose.position.y, p.pose.position.z,\n        p.pose.orientation.x, p.pose.orientation.y, p.pose.orientation.z, p.pose.orientation.w,\n        roll, pitch, yaw);\n      geometry_msgs::msg::TransformStamped tf_msg;\n      tf_msg.header = p.header;\n      tf_msg.child_frame_id = \"grasp_pose\";\n      tf_msg.transform.translation.x = p.pose.position.x;\n      tf_msg.transform.translation.y = p.pose.position.y;\n      tf_msg.transform.translation.z = p.pose.position.z;\n      tf_msg.transform.rotation = p.pose.orientation;\n      tfb.sendTransform(tf_msg);\n\n#ifdef robot_enable\n      // pick\n      robot_->moveToJointValues(joint_values_pick, vel_, acc_);\n      robot_->pick(p, vel_, acc_, vscale_, appr_);\n      // place\n      robot_->moveToJointValues(joint_values_place, vel_, acc_);\n      robot_->place(place_[0], place_[1], place_[2], place_[3], place_[4], place_[5], vel_, acc_, vscale_, appr_);\n#endif\n    rclcpp::spin_some(node_);\n    place_pose_ = nullptr;\n  }\n\n  rclcpp::shutdown();\n  return 0;\n\n}\n"
  },
  {
    "path": "grasp_msgs/CMakeLists.txt",
    "content": "cmake_minimum_required(VERSION 3.5)\n\nproject(grasp_msgs)\n\nif(NOT CMAKE_CXX_STANDARD)\n  set(CMAKE_CXX_STANDARD 14)\nendif()\n\nif(CMAKE_COMPILER_IS_GNUCXX OR CMAKE_CXX_COMPILER_ID MATCHES \"Clang\")\n  add_compile_options(-Wall -Wextra -Wpedantic)\nendif()\n\nfind_package(ament_cmake REQUIRED)\nfind_package(rosidl_default_generators REQUIRED)\nfind_package(builtin_interfaces REQUIRED)\nfind_package(std_msgs REQUIRED)\nfind_package(geometry_msgs REQUIRED)\n\nset(msg_files\n  \"msg/GraspConfig.msg\"\n  \"msg/GraspConfigList.msg\"\n  \"msg/SamplesMsg.msg\"\n)\nrosidl_generate_interfaces(${PROJECT_NAME}\n  ${msg_files}\n  DEPENDENCIES builtin_interfaces std_msgs geometry_msgs\n  ADD_LINTER_TESTS\n)\n\nament_export_dependencies(rosidl_default_runtime)\n\nament_package()\n"
  },
  {
    "path": "grasp_msgs/msg/CloudIndexed.msg",
    "content": "# This message holds a point cloud and a list of indices into the point cloud \n# at which to sample grasp candidates.\n\n# The point cloud.\ngpd/CloudSources cloud_sources\n\n# The indices into the point cloud at which to sample grasp candidates.\nstd_msgs/Int64[] indices\n"
  },
  {
    "path": "grasp_msgs/msg/CloudSamples.msg",
    "content": "# This message holds a point cloud and a list of samples at which the grasp \n# detector should search for grasp candidates.\n\n# The point cloud.\ngpd/CloudSources cloud_sources\n\n# The samples, as (x,y,z) points, at which to search for grasp candidates. \ngeometry_msgs/Point[] samples\n"
  },
  {
    "path": "grasp_msgs/msg/CloudSources.msg",
    "content": "# This message holds a point cloud that can be a combination of point clouds \n# from different camera sources (at least one). For each point in the cloud, \n# this message also stores the index of the camera that produced the point.\n\n# The point cloud.\nsensor_msgs/PointCloud2 cloud\n\n# For each point in the cloud, the index of the camera that acquired the point.\nstd_msgs/Int64[] camera_source\n\n# A list of camera positions at which the point cloud was acquired.\ngeometry_msgs/Point[] view_points"
  },
  {
    "path": "grasp_msgs/msg/GraspConfig.msg",
    "content": "# This message describes a 2-finger grasp configuration by its 6-DOF pose, \n# consisting of a 3-DOF position and 3-DOF orientation, and the opening \n# width of the robot hand.\n\n# Position\ngeometry_msgs/Point bottom # centered bottom/base of the robot hand)\ngeometry_msgs/Point top # centered top/fingertip of the robot hand)\ngeometry_msgs/Point surface # centered position on object surface\n\n# Orientation represented as three axis (R = [approach binormal axis])\ngeometry_msgs/Vector3 approach # The grasp approach direction\ngeometry_msgs/Vector3 binormal # The binormal\ngeometry_msgs/Vector3 axis # The hand axis\n\ngeometry_msgs/Point sample # Point at which the grasp was found\n\nstd_msgs/Float32 width # Required aperture (opening width) of the robot hand \n\nstd_msgs/Float32 score # Score assigned to the grasp by the classifier\n"
  },
  {
    "path": "grasp_msgs/msg/GraspConfigList.msg",
    "content": "# This message stores a list of grasp configurations.\n\n# The time of acquisition, and the coordinate frame ID.\nstd_msgs/Header header\n\n# The list of grasp configurations.\ngrasp_msgs/GraspConfig[] grasps\n\n# Name of the known object these grasps associated to.\nstring object_name\n"
  },
  {
    "path": "grasp_msgs/msg/SamplesMsg.msg",
    "content": "# This message describes a set of point samples at which to detect grasps.\n\n# Header\nstd_msgs/Header header\n\n# The samples, as (x,y,z) points, at which to search for grasp candidates. \ngeometry_msgs/Point[] samples\n"
  },
  {
    "path": "grasp_msgs/package.xml",
    "content": "<?xml version=\"1.0\"?>\n<?xml-model href=\"http://download.ros.org/schema/package_format2.xsd\" schematypens=\"http://www.w3.org/2001/XMLSchema\"?>\n<package format=\"3\">\n  <name>grasp_msgs</name>\n  <version>0.5.0</version>\n  <description>ROS2 messages definitions for grasp library</description>\n  <maintainer email=\"sharron.liu@intel.com\">Sharron LIU</maintainer>\n  <license>Apache License 2.0</license>\n\n  <buildtool_depend>ament_cmake</buildtool_depend>\n  <buildtool_depend>rosidl_default_generators</buildtool_depend>\n  <build_depend>builtin_interfaces</build_depend>\n  <build_depend>std_msgs</build_depend>\n  <build_depend>geometry_msgs</build_depend>\n\n  <exec_depend>rosidl_default_runtime</exec_depend>\n  <exec_depend>builtin_interfaces</exec_depend>\n  <exec_depend>std_msgs</exec_depend>\n  <exec_depend>geometry_msgs</exec_depend>\n\n  <test_depend>ament_lint_common</test_depend>\n\n  <member_of_group>rosidl_interface_packages</member_of_group>\n\n  <export>\n    <build_type>ament_cmake</build_type>\n  </export>\n</package>\n"
  },
  {
    "path": "grasp_ros2/CMakeLists.txt",
    "content": "# Copyright (c) 2018 Intel Corporation\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#      http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\ncmake_minimum_required(VERSION 3.5)\nproject(grasp_ros2)\n\nif(NOT CMAKE_CXX_STANDARD)\n  set(CMAKE_CXX_STANDARD 14)\nendif()\n\nif(CMAKE_COMPILER_IS_GNUCXX OR CMAKE_CXX_COMPILER_ID MATCHES \"Clang\")\n  add_compile_options(-Wall -Wextra -Wpedantic)\nendif()\n\nif(CMAKE_BUILD_TYPE EQUAL \"RELEASE\")\n  message(STATUS \"Create Release Build.\")\n  set(CMAKE_CXX_FLAGS \"-O2 ${CMAKE_CXX_FLAGS}\")\nelse()\n  message(STATUS \"Create Debug Build.\")\nendif()\n\nif(BUILD_RECOGNIZE_PICK EQUAL \"ON\")\n  add_definitions(\"-DRECOGNIZE_PICK\")\nendif()\nfind_package(ament_cmake REQUIRED)\nfind_package(builtin_interfaces REQUIRED)\nfind_package(rclcpp REQUIRED)\nfind_package(rclcpp_components REQUIRED)\nfind_package(grasp_msgs REQUIRED)\nfind_package(sensor_msgs REQUIRED)\nfind_package(moveit_msgs)\nif(BUILD_RECOGNIZE_PICK STREQUAL \"ON\")\nfind_package(people_msgs)\nendif()\nfind_package(visualization_msgs)\nfind_package(tf2)\nfind_package(tf2_ros REQUIRED)\nfind_package(tf2_geometry_msgs REQUIRED)\nfind_package(trajectory_msgs REQUIRED)\nfind_package(pcl_conversions REQUIRED)\nfind_package(Eigen3 REQUIRED)\n\n# GPG\nfind_library(gpg_LIBRARIES grasp_candidates_generator)\nfind_path(gpg_INCLUDE_DIRS gpg/grasp.h)\n# GPD\nfind_library(gpd_LIBRARIES grasp_pose_detection)\nfind_path(gpd_INCLUDE_DIRS gpd/grasp_detector.h)\n\n# PCL\nfind_package(PCL 1.8.1 EXACT)\ninclude_directories(${PCL_INCLUDE_DIRS})\nlink_directories(${PCL_LIBRARY_DIRS})\nadd_definitions(${PCL_DEFINITIONS})\n\ninclude_directories(\n  include\n  ${rclcpp_INCLUDE_DIRS}\n  ${builtin_interfaces_INCLUDE_DIRS}\n  ${grasp_msgs_INCLUDE_DIRS}\n  ${sensor_msgs_INCLUDE_DIRS}\n  ${moveit_msgs_INCLUDE_DIRS}\nif(BUILD_RECOGNIZE_PICK STREQUAL \"ON\")\n  ${people_msgs_INCLUDE_DIRS}\nendif\n  ${tf2_geometry_msgs_INCLUDE_DIRS}\n  ${trajectory_msgs_INCLUDE_DIRS}\n  ${visualization_msgs_INCLUDE_DIRS}\n  ${pcl_conversions_INCLUDE_DIRS}\n  ${tf2_INCLUDE_DIRS}\n  ${gpg_INCLUDE_DIRS}\n  ${gpd_INCLUDE_DIRS}\n)\n\n# create ament index resource which references the libraries in the binary dir\nset(node_plugins \"\")\n\n# grasp detect\nset(libgrasp_detect \"grasp_detect\")\nadd_library(${libgrasp_detect} SHARED\n  src/consts.cpp\n  src/ros_params.cpp\n  src/grasp_detector_gpd.cpp)\ntarget_compile_definitions(${libgrasp_detect}\n  PRIVATE \"GRASP_ROS2_BUILDING_DLL\")\nament_target_dependencies(${libgrasp_detect}\n  \"class_loader\"\n  \"rclcpp\"\n  \"rclcpp_components\"\n  \"grasp_msgs\"\n  \"sensor_msgs\"\n  \"moveit_msgs\"\nif(BUILD_RECOGNIZE_PICK STREQUAL \"ON\")\n  \"people_msgs\"\nendif()\n  \"visualization_msgs\")\ntarget_link_libraries(${libgrasp_detect}\n  ${gpg_LIBRARIES}\n  ${gpd_LIBRARIES}\n  ${PCL_LIBRARIES}\n)\nrclcpp_components_register_nodes(${libgrasp_detect} \"grasp_ros2::GraspDetectorGPD\")\nset(node_plugins \"${node_plugins}grasp_ros2::GraspDetectorGPD;$<TARGET_FILE:${libgrasp_detect}>\\n\")\n\n# grasp plan\nset(libgrasp_plan \"grasp_plan\")\nadd_library(${libgrasp_plan} SHARED\n  src/consts.cpp\n  src/ros_params.cpp\n  src/grasp_planner.cpp)\ntarget_compile_definitions(${libgrasp_plan}\n  PRIVATE \"GRASP_ROS2_BUILDING_DLL\")\nament_target_dependencies(${libgrasp_plan}\n  \"class_loader\"\n  \"rclcpp\"\n  \"rclcpp_components\"\n  \"grasp_msgs\"\n  \"moveit_msgs\"\n  \"tf2\"\n  \"tf2_ros\"\n  \"tf2_geometry_msgs\"\n  \"trajectory_msgs\")\ntarget_link_libraries(${libgrasp_plan}\n)\nrclcpp_components_register_nodes(${libgrasp_plan} \"grasp_ros2::GraspPlanner\")\nset(node_plugins \"${node_plugins}grasp_ros2::GraspPlanner;$<TARGET_FILE:${libgrasp_plan}>\\n\")\n\nadd_executable(${PROJECT_NAME}\n  src/grasp_composition.cpp\n)\n\nament_target_dependencies(${PROJECT_NAME}\n  \"rclcpp\"\n  \"builtin_interfaces\"\n  \"grasp_msgs\"\n  \"sensor_msgs\"\n  \"moveit_msgs\"\n  \"visualization_msgs\"\n  \"tf2\"\n  \"tf2_ros\"\n  \"tf2_geometry_msgs\"\n  \"trajectory_msgs\"\n  \"pcl_conversions\"\n)\n\ntarget_link_libraries(${PROJECT_NAME}\n  ${ament_LIBRARIES}\n  ${gpg_LIBRARIES}\n  ${gpd_LIBRARIES}\n  ${PCL_LIBRARIES}\n  ${libgrasp_detect}\n  ${libgrasp_plan}\n)\n\n# Install libs\ninstall(TARGETS\n  ${libgrasp_detect}\n  ${libgrasp_plan}\n  ARCHIVE DESTINATION lib\n  LIBRARY DESTINATION lib\n  RUNTIME DESTINATION bin)\n\n# Install binaries\ninstall(TARGETS ${PROJECT_NAME}\n  RUNTIME DESTINATION bin\n)\ninstall(TARGETS ${PROJECT_NAME}\n  DESTINATION lib/${PROJECT_NAME}\n)\n\n# Install header files\ninstall(\n  DIRECTORY include/\n  DESTINATION include\n)\n\n# Flags\nif(UNIX OR APPLE)\n  # Linker flags.\n  if(${CMAKE_CXX_COMPILER_ID} STREQUAL \"GNU\" OR ${CMAKE_CXX_COMPILER_ID} STREQUAL \"Intel\")\n    # GCC specific flags. ICC is compatible with them.\n    set(CMAKE_SHARED_LINKER_FLAGS \"${CMAKE_SHARED_LINKER_FLAGS} -z noexecstack -z relro -z now\")\n    set(CMAKE_EXE_LINKER_FLAGS \"${CMAKE_EXE_LINKER_FLAGS} -z noexecstack -z relro -z now\")\n  elseif(${CMAKE_CXX_COMPILER_ID} STREQUAL \"Clang\")\n    # In Clang, -z flags are not compatible, they need to be passed to linker via -Wl.\n    set(CMAKE_SHARED_LINKER_FLAGS \"${CMAKE_SHARED_LINKER_FLAGS} \\\n      -Wl,-z,noexecstack -Wl,-z,relro -Wl,-z,now\")\n    set(CMAKE_EXE_LINKER_FLAGS \"${CMAKE_EXE_LINKER_FLAGS} \\\n      -Wl,-z,noexecstack -Wl,-z,relro -Wl,-z,now\")\n  endif()\n\n  # Compiler flags.\n  if(${CMAKE_CXX_COMPILER_ID} STREQUAL \"GNU\")\n    # GCC specific flags.\n    if(CMAKE_CXX_COMPILER_VERSION VERSION_GREATER 4.9 OR\n        CMAKE_CXX_COMPILER_VERSION VERSION_EQUAL 4.9)\n      set(CMAKE_CXX_FLAGS \"${CMAKE_CXX_FLAGS} -fPIE -fstack-protector-strong\")\n    else()\n      set(CMAKE_CXX_FLAGS \"${CMAKE_CXX_FLAGS} -fPIE -fstack-protector\")\n    endif()\n  elseif(${CMAKE_CXX_COMPILER_ID} STREQUAL \"Clang\")\n    # Clang is compatbile with some of the flags.\n    set(CMAKE_CXX_FLAGS \"${CMAKE_CXX_FLAGS} -fPIE -fstack-protector\")\n  elseif(${CMAKE_CXX_COMPILER_ID} STREQUAL \"Intel\")\n    # Same as above, with exception that ICC compilation crashes with -fPIE option, even\n    # though it uses -pie linker option that require -fPIE during compilation. Checksec\n    # shows that it generates correct PIE anyway if only -pie is provided.\n    set(CMAKE_CXX_FLAGS \"${CMAKE_CXX_FLAGS} -fstack-protector\")\n  endif()\n\n  # Generic flags.\n  set(CMAKE_CXX_FLAGS \"${CMAKE_CXX_FLAGS} -fPIC -fno-operator-names -Wformat -Wformat-security \\\n    -Wall -fopenmp\")\n  set( CUDA_PROPAGATE_HOST_FLAGS OFF )\n  set(CMAKE_CXX_FLAGS_RELEASE \"${CMAKE_CXX_FLAGS_RELEASE} -D_FORTIFY_SOURCE=2\")\n  set(CMAKE_EXE_LINKER_FLAGS \"${CMAKE_EXE_LINKER_FLAGS} -pie\")\nendif()\n\nif(BUILD_TESTING)\n  find_package(ament_lint_auto REQUIRED)\n  ament_lint_auto_find_test_dependencies()\n  add_subdirectory(tests)\nendif()\n\nament_package()\n"
  },
  {
    "path": "grasp_ros2/cfg/grasp_ros2_params.yaml",
    "content": "GraspDetectorGPD:\n  ros__parameters:\n    cloud_topic: /camera/pointcloud\n    #cloud_topic: /mechmind/color_point_cloud\n    rviz: true\n    device: 0 # 0:CPU, 1:GPU, 2:VPU\n    auto_mode: true\n    plane_remove: true\n    # grasp workspace in camera frames\n    workspace: [-0.21, 0.29, -0.22, 0.15, 0.0, 1.0] # Realsense\n    #workspace: [-0.16, 0.34, -0.26, 0.14, 1.4, 1.8] # Mechmind\n    # gripper geometry parameters in metre\n    # finger_width: the finger thickness\n    # hand_outer_diameter: the maximum robot hand aperture \n    # hand_depth: the hand depth (the finger length)\n    # hand_height: the finger breadth\n    finger_width: 0.005\n    hand_outer_diameter: 0.100\n    hand_depth: 0.038\n    hand_height: 0.020\nGraspPlanner:\n  ros__parameters:\n    grasp_score_threshold: 20\n    grasp_frame_id: \"camera_color_optical_frame\" # Realsense\n    #grasp_frame_id: \"mechmind_camera\" # Mechmind\n    grasp_offset: [0.000, 0.000, 0.0]\n    eef_offset: 0.174\n    eef_yaw_offset: 0.7854 # M_PI/4\n    finger_joint_names: [\"panda_finger_joint1\", \"panda_finger_joint2\"]\n"
  },
  {
    "path": "grasp_ros2/cfg/random_pick.yaml",
    "content": "GraspDetectorGPD:\n  ros__parameters:\n    #cloud_topic: /camera/pointcloud\n    cloud_topic: /mechmind/color_point_cloud\n    rviz: true\n    device: 0 # 0:CPU, 1:GPU, 2:VPU\n    auto_mode: true\n    plane_remove: true\n    # grasp workspace in camera frames\n    #workspace: [-0.21, 0.29, -0.22, 0.15, 0.0, 1.0] # Realsense\n    workspace: [-0.16, 0.28, -0.26, 0.14, 1.4, 1.65] # Mechmind\n    # gripper geometry parameters in metre\n    # finger_width: the finger thickness\n    # hand_outer_diameter: the maximum robot hand aperture \n    # hand_depth: the hand depth (the finger length)\n    # hand_height: the finger breadth\n    finger_width: 0.005\n    hand_outer_diameter: 0.100\n    hand_depth: 0.038\n    hand_height: 0.020\n    num_samples: 200\nGraspPlanner:\n  ros__parameters:\n    grasp_score_threshold: 0\n    grasp_frame_id: \"base\"\n    grasp_approach: [0.0, 0.0, -1.0]\n    grasp_approach_angle: 0.523 # 1.047=PI/3 # 0.785=PI/4 # 0.523=PI/6 # 0.345=PI/9 # acceptable approaching angle\n    grasp_offset: [0.004, 0.000, 0.0]\n    # grasp boundry in grasp_frame_id\n    grasp_boundry: [-0.2, 0.2, -0.65, -0.30, -0.15, 0.15]\n    eef_offset: 0.162\n    eef_yaw_offset: -0.7854 # M_PI/4\n    finger_joint_names: [\"panda_finger_joint1\", \"panda_finger_joint2\"]\n"
  },
  {
    "path": "grasp_ros2/cfg/recognize_pick.yaml",
    "content": "GraspDetectorGPD:\n  ros__parameters:\n    cloud_topic: /camera/pointcloud\n    # cloud_topic: \"/camera/aligned_depth_to_color/color/points\"\n    object_topic: \"/ros2_openvino_toolkit/segmented_obejcts\"\n    rviz: true\n    device: 1 # 0:CPU, 1:GPU, 2:VPU\n    auto_mode: false\n    plane_remove: true\n    object_detect: true\n    # grasp workspace in camera frames\n    workspace: [-0.23, 0.23, -0.33, 0.05, 0.0, 1.0]\n    # gripper geometry parameters in metre\n    # finger_width: the finger thickness\n    # hand_outer_diameter: the maximum robot hand aperture\n    # hand_depth: the hand depth (the finger length)\n    # hand_height: the finger breadth\n    finger_width: 0.005\n    hand_outer_diameter: 0.100\n    hand_depth: 0.038\n    hand_height: 0.020\nGraspPlanner:\n  ros__parameters:\n    grasp_score_threshold: 1\n    grasp_frame_id: \"base\"\n    grasp_approach: [0.0, 0.0, -1.0] # expect approaching in -z axis\n    grasp_approach_angle: 0.7 # 1.047=PI/3 # 0.785=PI/4 # 0.523=PI/6 # 0.345=PI/9 # acceptable approaching angle\n    grasp_offset: [0.006, -0.003, 0.000]\n    # grasp boundry in grasp_frame_id\n    grasp_boundry: [-0.2, 0.2, -0.65, -0.30, -0.15, 0.15]\n    eef_offset: 0.154\n    eef_yaw_offset: 0.7854 # M_PI/4\n    finger_joint_names: [\"panda_finger_joint1\", \"panda_finger_joint2\"]\n"
  },
  {
    "path": "grasp_ros2/cfg/test_grasp_ros2.yaml",
    "content": "GraspDetectorGPD:\n  ros__parameters:\n    cloud_topic: /camera/pointcloud\n    rviz: false\n    device: 0\n    auto_mode: false\nGraspPlanner:\n  ros__parameters:\n    grasp_score_threshold: 0\n    grasp_frame_id: \"camera_color_optical_frame\"\n"
  },
  {
    "path": "grasp_ros2/include/grasp_library/ros2/consts.hpp",
    "content": "// Copyright (c) 2019 Intel Corporation. All Rights Reserved\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n//     http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\n\n#ifndef GRASP_LIBRARY__ROS2__CONSTS_HPP_\n#define GRASP_LIBRARY__ROS2__CONSTS_HPP_\n\n#include <string>\n\nnamespace grasp_ros2\n{\n\n/** Consts class\n *\n * \\brief A class contains global constatnts definition for grasp library.\n *\n */\nclass Consts\n{\npublic:\n  /** Topic name of \"PointCloud2\" message published by an RGBD sensor.*/\n  static const char kTopicPointCloud2[];\n  /** Topic name of \"detected objects\" message published by Object Detector.*/\n  static const char kTopicDetectedObjects[];\n  /** Topic name of \"detected grasps\" message published by this Grasp Detector.*/\n  static const char kTopicDetectedGrasps[];\n  /** Topic name of \"rviz grasps\" message published by this Grasp Detector.*/\n  static const char kTopicVisualGrasps[];\n  /** Topic name of \"tabletop pointcloud\" message published by this Grasp Detector.*/\n  static const char kTopicTabletop[];\n};\n\n}  // namespace grasp_ros2\n\n#endif  // GRASP_LIBRARY__ROS2__CONSTS_HPP_\n"
  },
  {
    "path": "grasp_ros2/include/grasp_library/ros2/grasp_detector_base.hpp",
    "content": "// Copyright (c) 2019 Intel Corporation. All Rights Reserved\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n//     http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\n\n#ifndef GRASP_LIBRARY__ROS2__GRASP_DETECTOR_BASE_HPP_\n#define GRASP_LIBRARY__ROS2__GRASP_DETECTOR_BASE_HPP_\n\n#include <grasp_msgs/msg/grasp_config_list.hpp>\n#include <string>\n\nnamespace grasp_ros2\n{\n\n/** GraspCallback class\n *\n * \\brief Abstract base class for grasp callback.\n *\n * A grasp planner inherits from this class get called back for grasp detection resutls.\n */\nclass GraspCallback\n{\npublic:\n  /**\n   * \\brief Callback for grasp detection results.\n   *\n   * \\param msg Pointer to grasp detection results.\n   */\n  virtual void grasp_callback(const grasp_msgs::msg::GraspConfigList::SharedPtr msg) = 0;\n};\n\n/** GraspDetectorBase class\n *\n * \\brief A base class for detecting grasp poses from visual input.\n *\n * This class defines uniform interface for grasp library, regardless whichever algorithm\n * is used for grasp detection.\n */\nclass GraspDetectorBase\n{\npublic:\n  /**\n   * \\brief Constructor.\n   */\n  GraspDetectorBase()\n  : object_name_(\"\"), grasp_cb_(nullptr)\n  {\n  }\n\n  /**\n   * \\brief Destructor.\n   */\n  ~GraspDetectorBase()\n  {\n  }\n\n  /**\n   * \\brief Start grasp detection.\n   * When this function is called, GraspDetector starts processing visual input.\n   * \\param name Name of the object for which to detect grasps\n   */\n  void start(std::string name = \"\")\n  {\n    started_ = true;\n    object_name_ = name;\n  }\n\n  /**\n   * \\brief Stop grasp detection.\n   * When this function is called, GraspDetector stops processing visual input.\n   */\n  void stop()\n  {\n    started_ = false;\n  }\n\n  /**\n   * \\brief Register grasp callback function.\n   *\n   * \\param cb Callback function to be registered.\n   */\n  void add_callback(GraspCallback * cb)\n  {\n    grasp_cb_ = cb;\n  }\n\nprotected:\n  bool started_ = false;\n  std::string object_name_;\n  GraspCallback * grasp_cb_;\n};\n\n}  // namespace grasp_ros2\n\n#endif  // GRASP_LIBRARY__ROS2__GRASP_DETECTOR_BASE_HPP_\n"
  },
  {
    "path": "grasp_ros2/include/grasp_library/ros2/grasp_detector_gpd.hpp",
    "content": "// Copyright (c) 2018 Intel Corporation. All Rights Reserved\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n//     http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\n\n#ifndef GRASP_LIBRARY__ROS2__GRASP_DETECTOR_GPD_HPP_\n#define GRASP_LIBRARY__ROS2__GRASP_DETECTOR_GPD_HPP_\n\n// ROS2\n#include <geometry_msgs/msg/point.hpp>\n#include <geometry_msgs/msg/vector3.hpp>\n#ifdef RECOGNIZE_PICK\n#include <people_msgs/msg/objects_in_masks.hpp>\n#endif\n#include <rclcpp/logger.hpp>\n#include <rclcpp/rclcpp.hpp>\n#include <sensor_msgs/msg/point_cloud2.hpp>\n#include <visualization_msgs/msg/marker_array.hpp>\n\n// PCL\n#include <pcl/common/common.h>\n#include <pcl/point_cloud.h>\n#include <pcl/point_types.h>\n\n// eigen\n#include <Eigen/Geometry>\n\n// GPG\n#include <gpg/cloud_camera.h>\n\n// this project (messages)\n#include <gpd/grasp_detector.h>\n#include <grasp_msgs/msg/grasp_config.hpp>\n#include <grasp_msgs/msg/grasp_config_list.hpp>\n\n// system\n#include <algorithm>\n#include <map>\n#include <string>\n#include <tuple>\n#include <vector>\n\n#include \"grasp_library/ros2/consts.hpp\"\n#include \"grasp_library/ros2/grasp_detector_base.hpp\"\n#include \"grasp_library/ros2/grasp_planner.hpp\"\n\nnamespace grasp_ros2\n{\n\ntypedef pcl::PointCloud<pcl::PointXYZRGBA> PointCloudRGBA;\ntypedef pcl::PointCloud<pcl::PointNormal> PointCloudPointNormal;\n\n\n/** GraspDetectorGPD class\n *\n * \\brief A ROS node that can detect grasp poses in a point cloud.\n *\n * This class is a ROS node that handles all the ROS topics.\n *\n*/\nclass GraspDetectorGPD : public rclcpp::Node, public GraspDetectorBase\n{\npublic:\n  /**\n   * \\brief Constructor.\n  */\n  explicit GraspDetectorGPD(const rclcpp::NodeOptions & options);\n\n  /**\n   * \\brief Destructor.\n  */\n  ~GraspDetectorGPD()\n  {\n    delete cloud_camera_;\n\n    // todo stop and delete threads\n  }\n\nprivate:\n  /**\n   * \\brief Run the ROS node. Loops while waiting for incoming ROS messages.\n   */\n  void onInit();\n\n  /**\n   * \\brief Detect grasp poses in a point cloud received from a ROS topic.\n   * \\return the list of grasp poses\n   */\n  std::vector<Grasp> detectGraspPosesInTopic();\n\n  /**\n   * \\brief Callback function for the ROS topic that contains the input point cloud.\n   * \\param msg the incoming ROS message\n   */\n  void cloud_callback(const sensor_msgs::msg::PointCloud2::SharedPtr msg);\n#ifdef RECOGNIZE_PICK\n  /**\n   * \\brief Callback function for the ROS topic that contains the detected and segmented objects\n   * \\param msg The detected objects message\n   */\n  void object_callback(const people_msgs::msg::ObjectsInMasks::SharedPtr msg);\n#endif\n  /**\n   * \\brief Create a ROS message that contains a list of grasp poses from a list of handles.\n   * \\param hands the list of grasps\n   * \\return the ROS message that contains the grasp poses\n   */\n  grasp_msgs::msg::GraspConfigList createGraspListMsg(const std::vector<Grasp> & hands);\n\n  /**\n   * \\brief Convert GPD Grasp into grasp message.\n   * \\param hand A GPD grasp\n   * \\return The Grasp message converted\n   */\n  grasp_msgs::msg::GraspConfig convertToGraspMsg(const Grasp & hand);\n\n  /**\n   * \\brief Convert GPD Grasps into visual grasp messages.\n   */\n  visualization_msgs::msg::MarkerArray convertToVisualGraspMsg(\n    const std::vector<Grasp> & hands,\n    double outer_diameter, double hand_depth, double finger_width, double hand_height,\n    const std::string & frame_id);\n\n  /**\n   * \\brief Create finger marker for visual grasp messages\n   */\n  visualization_msgs::msg::Marker createFingerMarker(\n    const Eigen::Vector3d & center,\n    const Eigen::Matrix3d & frame, double length, double width, double height, int id,\n    const std::string & frame_id);\n\n  /**\n   * \\brief Create hand base marker for visual grasp messages\n   */\n  visualization_msgs::msg::Marker createHandBaseMarker(\n    const Eigen::Vector3d & start,\n    const Eigen::Vector3d & end, const Eigen::Matrix3d & frame, double length, double height,\n    int id,\n    const std::string & frame_id);\n\n  /** Converts an Eigen Vector into a Point message. Todo ROS2 eigen_conversions*/\n  void pointEigenToMsg(const Eigen::Vector3d & e, geometry_msgs::msg::Point & m)\n  {\n    m.x = e(0);\n    m.y = e(1);\n    m.z = e(2);\n  }\n\n  /** Converts an Eigen Vector into a Vector message. Todo ROS2 eigen_conversions*/\n  void vectorEigenToMsg(const Eigen::Vector3d & e, geometry_msgs::msg::Vector3 & m)\n  {\n    m.x = e(0);\n    m.y = e(1);\n    m.z = e(2);\n  }\n\n  Eigen::Vector3d view_point_; /**< (input) view point of the camera onto the point cloud*/\n  /** stores point cloud with (optional) camera information and surface normals*/\n  CloudCamera * cloud_camera_;\n  std_msgs::msg::Header cloud_camera_header_; /**< stores header of the point cloud*/\n  /** status variables for received (input) messages*/\n  bool has_cloud_;\n  std::string frame_; /**< point cloud frame*/\n  bool auto_mode_; /**< grasp detection mode*/\n  bool plane_remove_; /**< whether enable object detection>*/\n#ifdef RECOGNIZE_PICK\n  /** the latest message on detected objects*/\n  people_msgs::msg::ObjectsInMasks::SharedPtr object_msg_;\n#endif\n  std::vector<double> grasp_ws_;\n\n  rclcpp::callback_group::CallbackGroup::SharedPtr callback_group_subscriber1_;\n  rclcpp::callback_group::CallbackGroup::SharedPtr callback_group_subscriber2_;\n  /** ROS2 subscriber for point cloud messages*/\n  rclcpp::Subscription<sensor_msgs::msg::PointCloud2>::SharedPtr cloud_sub_;\n#ifdef RECOGNIZE_PICK\n  /** ROS2 subscriber for object  messages*/\n  rclcpp::Subscription<people_msgs::msg::ObjectsInMasks>::SharedPtr object_sub_;\n#endif\n  /** ROS2 publisher for grasp list messages*/\n  rclcpp::Publisher<grasp_msgs::msg::GraspConfigList>::SharedPtr grasps_pub_;\n  /** ROS2 publisher for filtered point clouds*/\n  rclcpp::Publisher<sensor_msgs::msg::PointCloud2>::SharedPtr filtered_pub_;\n  /** ROS2 publisher for grasps in rviz (visualization)*/\n  rclcpp::Publisher<visualization_msgs::msg::MarkerArray>::SharedPtr grasps_rviz_pub_;\n\n  std::shared_ptr<GraspDetector> grasp_detector_; /**< used to run the grasp pose detection*/\n  GraspDetector::GraspDetectionParameters detection_param_; /**< grasp detector parameters*/\n  rclcpp::Logger logger_ = rclcpp::get_logger(\"GraspDetectorGPD\");\n  std::thread * detector_thread_; /**< thread for grasp detection*/\n};\n\n}  // namespace grasp_ros2\n\n#endif  // GRASP_LIBRARY__ROS2__GRASP_DETECTOR_GPD_HPP_\n"
  },
  {
    "path": "grasp_ros2/include/grasp_library/ros2/grasp_planner.hpp",
    "content": "// Copyright (c) 2018 Intel Corporation. All Rights Reserved\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n//     http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\n\n#ifndef GRASP_LIBRARY__ROS2__GRASP_PLANNER_HPP_\n#define GRASP_LIBRARY__ROS2__GRASP_PLANNER_HPP_\n\n#include <rclcpp/logger.hpp>\n#include <rclcpp/rclcpp.hpp>\n#include <grasp_msgs/msg/grasp_config_list.hpp>\n#include <moveit_msgs/msg/grasp.h>\n#include <moveit_msgs/srv/grasp_planning.hpp>\n#include <tf2_geometry_msgs/tf2_geometry_msgs.h>\n#include <tf2_ros/transform_listener.h>\n#include <tf2_ros/static_transform_broadcaster.h>\n#include <trajectory_msgs/msg/joint_trajectory_point.hpp>\n\n#include <condition_variable>\n#include <deque>\n#include <map>\n#include <memory>\n#include <mutex>\n#include <string>\n#include <utility>\n#include <vector>\n\n#include \"grasp_library/ros2/grasp_detector_base.hpp\"\n\nnamespace grasp_ros2\n{\n\n/** GraspPlanner class\n *\n * \\brief A MoveIt grasp planner\n *\n * This class provide ROS service for MoveIt grasp planning. Grasp Planner drives grasp detection\n * and takes the results from Grasp Detector.\n*/\nclass GraspPlanner : public rclcpp::Node, public GraspCallback\n{\npublic:\n  struct GraspPlanningParameters\n  {\n    /** timeout in seconds for a service request waiting for grasp detection result*/\n    int grasp_service_timeout_;\n    /** minimum score expected for grasps returned from this service*/\n    int grasp_score_threshold_;\n    /** frame id expected for grasps returned from this service*/\n    std::string grasp_frame_id_;\n    /** approach direction in grasp_frame_id_ expected for grasps*/\n    tf2::Vector3 grasp_approach_;\n    /** maxmimum angle in radian acceptable between the expected 'approach_' and\n     * the real approach returned from this service*/\n    double grasp_approach_angle_;\n    /** offset [x, y, z] in metres applied to the grasps detected*/\n    std::vector<double> grasp_offset_;\n    /** boundry cube in grasp_frame_id_ expected for grasps returned from this service*/\n    std::vector<double> grasp_boundry_;\n    /** offset in metres from the gripper base (finger root) to the parent link of gripper*/\n    double eef_offset;\n    /** gripper yaw offset to its parent link, in radian (e.g. 0.0, or M_PI/4)*/\n    double eef_yaw_offset;\n    /** minimum distance in metres for a grasp to approach and retreat*/\n    double grasp_min_distance_;\n    /** desired distance in metres for a grasp to approach and retreat*/\n    double grasp_desired_distance_;\n    /** joint names of gripper fingers*/\n    std::vector<std::string> finger_joint_names_;\n    /** trajectory points in 'open' status, for joints in the same order as 'finger_joint_names_'*/\n    trajectory_msgs::msg::JointTrajectoryPoint finger_points_open_;\n    /** trajectory points in 'close' status, for joints in the same order as 'finger_joint_names_'*/\n    trajectory_msgs::msg::JointTrajectoryPoint finger_points_close_;\n  };\n\n  /**\n   * \\brief Constructor.\n   * \\param grasp_detector Grasp Detector used by this planner.\n  */\n  explicit GraspPlanner(\n    const rclcpp::NodeOptions & options,\n    GraspDetectorBase * grasp_detector = nullptr);\n\n  /**\n   * \\brief Destructor.\n  */\n  ~GraspPlanner()\n  {\n    delete tfBuffer_;\n  }\n\n  void grasp_callback(const grasp_msgs::msg::GraspConfigList::SharedPtr msg);\n\n  /**\n   * \\brief Grasp planning service handler.\n   * When a grasp service request comes, Grasp Planner tells the Grasp Detector to start grasp\n   * detection, waits for grasp callback arrival or till a configurable timeout period, then stops\n   * grasp detection, skips grasps with low scores, transforms grasps into the specified frame_id\n   * (if TF available), applies the configured offset, skips grasps out of boundry, and returns the\n   * results via grasp service response.\n  */\n  void grasp_service(\n    const std::shared_ptr<rmw_request_id_t> request_header,\n    const std::shared_ptr<moveit_msgs::srv::GraspPlanning::Request> req,\n    const std::shared_ptr<moveit_msgs::srv::GraspPlanning::Response> res);\n\nprivate:\n  /**\n   * \\brief Transform a grasp from original frame to the 'grasp_frame_id_' frame.\n   * Keep 'to' grasp identical to 'from' grasp, in case of transform missing or failure.\n   * \\param from The grasp to transform.\n   * \\param to The transformed output.\n   * \\param header Message header for the frame of the 'from' grasp.\n   * \\return true if transformation success, otherwise false.\n   */\n  bool transform(\n    grasp_msgs::msg::GraspConfig & from, grasp_msgs::msg::GraspConfig & to,\n    const std_msgs::msg::Header & header);\n\n  /**\n   * \\brief Check if the grasp position is in boundary.\n   * \\param p Grasp position.\n   * \\return True if the grasp position in boundary, otherwise False.\n   */\n  bool check_boundry(const geometry_msgs::msg::Point & p);\n\n  /**\n   * \\brief Translate a grasp message to MoveIt message.\n   * 'Grasp.grasp_pose.pose.position' was translated from 'GraspConfig.bottom', which is the\n   * position closest to the 'parent_link' of the end-effector.\n   * \\param grasp Grasp message to be translated.\n   * \\header Message header for the frame where the 'grasp' was detected.\n   * \\return MoveIt message\n   */\n  moveit_msgs::msg::Grasp toMoveIt(\n    grasp_msgs::msg::GraspConfig & grasp,\n    const std_msgs::msg::Header & header);\n\n  std::mutex m_;\n  std::condition_variable cv_;\n  GraspPlanningParameters param_;\n  rclcpp::Logger logger_ = rclcpp::get_logger(\"GraspPlanner\");\n  /*buffer for grasps to be returned from this service*/\n  std::vector<moveit_msgs::msg::Grasp> moveit_grasps_;\n  rclcpp::callback_group::CallbackGroup::SharedPtr callback_group_subscriber3_;\n  rclcpp::Service<moveit_msgs::srv::GraspPlanning>::SharedPtr grasp_srv_; /*grasp service*/\n  tf2_ros::Buffer * tfBuffer_; /*buffer for transformation listener*/\n  std::shared_ptr<tf2_ros::TransformListener> tfListener_; /*Transform listener*/\n  tf2_ros::StaticTransformBroadcaster tfBroadcaster_; /*grasp pose transformation broadcaster*/\n  GraspDetectorBase * grasp_detector_; /*grasp detector node*/\n};\n\n}  // namespace grasp_ros2\n\n#endif  // GRASP_LIBRARY__ROS2__GRASP_PLANNER_HPP_\n"
  },
  {
    "path": "grasp_ros2/include/grasp_library/ros2/ros_params.hpp",
    "content": "// Copyright (c) 2018 Intel Corporation. All Rights Reserved\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n//     http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\n\n#ifndef GRASP_LIBRARY__ROS2__ROS_PARAMS_HPP_\n#define GRASP_LIBRARY__ROS2__ROS_PARAMS_HPP_\n\n// ROS2 core\n#include <rclcpp/rclcpp.hpp>\n\n// ROS2 projects\n#include <gpd/grasp_detector.h>\n#include \"grasp_library/ros2/grasp_planner.hpp\"\n\nnamespace grasp_ros2\n{\n\n/** ROSParameters class\n *\n * \\brief A class to bridge parameters passed from ROS.\n *\n*/\nclass ROSParameters\n{\npublic:\n  static void getDetectionParams(\n    rclcpp::Node * node,\n    GraspDetector::GraspDetectionParameters & param);\n  static void getPlanningParams(rclcpp::Node * Node, GraspPlanner::GraspPlanningParameters & param);\n};\n\n}  // namespace grasp_ros2\n\n#endif  // GRASP_LIBRARY__ROS2__ROS_PARAMS_HPP_\n"
  },
  {
    "path": "grasp_ros2/package.xml",
    "content": "<?xml version=\"1.0\"?>\n<?xml-model href=\"http://download.ros.org/schema/package_format2.xsd\" schematypens=\"http://www.w3.org/2001/XMLSchema\"?>\n<package format=\"2\">\n  <name>grasp_ros2</name>\n  <version>0.5.0</version>\n  <description>ROS2 grasp library as MoveIt plug-in</description>\n  <maintainer email=\"sharron.liu@intel.com\">Sharron LIU</maintainer>\n  <license>Apache License 2.0</license>\n\n  <buildtool_depend>ament_cmake</buildtool_depend>\n  <buildtool_depend>rosidl_default_generators</buildtool_depend>\n  <build_depend>builtin_interfaces</build_depend>\n  <build_depend>rclcpp</build_depend>\n  <build_depend>rclcpp_components</build_depend>\n  <build_depend>std_msgs</build_depend>\n  <build_depend>sensor_msgs</build_depend>\n  <build_depend>grasp_msgs</build_depend>\n  <build_depend>moveit_msgs</build_depend>\n  <build_depend>people_msgs</build_depend>\n  <build_depend>visualization_msgs</build_depend>\n  <build_depend>pcl_conversions</build_depend>\n  <build_depend>tf2</build_depend>\n  <build_depend>tf2_ros</build_depend>\n  <build_depend>tf2_geometry_msgs</build_depend>\n  <build_depend>trajectory_msgs</build_depend>\n\n  <exec_depend>rosidl_default_runtime</exec_depend>\n  <exec_depend>builtin_interfaces</exec_depend>\n  <exec_depend>rclcpp</exec_depend>\n  <exec_depend>rclcpp_components</exec_depend>\n  <exec_depend>std_msgs</exec_depend>\n  <exec_depend>sensor_msgs</exec_depend>\n  <exec_depend>grasp_msgs</exec_depend>\n  <exec_depend>moveit_msgs</exec_depend>\n  <exec_depend>people_msgs</exec_depend>\n  <exec_depend>visualization_msgs</exec_depend>\n  <exec_depend>pcl_conversions</exec_depend>\n  <exec_depend>tf2</exec_depend>\n  <exec_depend>tf2_ros</exec_depend>\n  <exec_depend>tf2_geometry_msgs</exec_depend>\n  <exec_depend>trajectory_msgs</exec_depend>\n\n  <test_depend>ament_lint_auto</test_depend>\n  <test_depend>ament_lint_common</test_depend>\n\n  <export>\n    <build_type>ament_cmake</build_type>\n  </export>\n</package>\n"
  },
  {
    "path": "grasp_ros2/src/consts.cpp",
    "content": "// Copyright (c) 2019 Intel Corporation. All Rights Reserved\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n//     http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\n\n#include \"grasp_library/ros2/consts.hpp\"\n\nnamespace grasp_ros2\n{\n\nconst char Consts::kTopicPointCloud2[] = \"/camera/pointcloud\";\nconst char Consts::kTopicDetectedObjects[] = \"/ros2_openvino_toolkit/segmented_obejcts\";\nconst char Consts::kTopicDetectedGrasps[] = \"/grasp_library/clustered_grasps\";\nconst char Consts::kTopicVisualGrasps[] = \"/grasp_library/grasps_rviz\";\nconst char Consts::kTopicTabletop[] = \"/grasp_library/tabletop_points\";\n\n}  // namespace grasp_ros2\n"
  },
  {
    "path": "grasp_ros2/src/grasp_composition.cpp",
    "content": "// Copyright (c) 2019 Intel Corporation\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n//     http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\n\n#include <rclcpp/rclcpp.hpp>\n\n#include <memory>\n\n#include \"grasp_library/ros2/grasp_detector_gpd.hpp\"\n#include \"grasp_library/ros2/grasp_planner.hpp\"\n\nusing GraspDetectorGPD = grasp_ros2::GraspDetectorGPD;\nusing GraspDetectorBase = grasp_ros2::GraspDetectorBase;\nusing GraspPlanner = grasp_ros2::GraspPlanner;\n\nint main(int argc, char ** argv)\n{\n  rclcpp::init(argc, argv);\n  rclcpp::executors::MultiThreadedExecutor exec;\n\n  auto detect_node = std::make_shared<GraspDetectorGPD>(\n    rclcpp::NodeOptions().automatically_declare_parameters_from_overrides(true));\n  exec.add_node(detect_node);\n  GraspDetectorBase * grasp_detector = dynamic_cast<GraspDetectorBase *>(detect_node.get());\n  auto plan_node = std::make_shared<GraspPlanner>(\n    rclcpp::NodeOptions().automatically_declare_parameters_from_overrides(true), grasp_detector);\n  exec.add_node(plan_node);\n  exec.spin();\n\n  detect_node = nullptr;\n  plan_node = nullptr;\n  rclcpp::shutdown();\n  return 0;\n}\n"
  },
  {
    "path": "grasp_ros2/src/grasp_detector_gpd.cpp",
    "content": "// Copyright (c) 2019 Intel Corporation. All Rights Reserved\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n//     http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\n\n#include <pcl_conversions/pcl_conversions.h>\n#include <pcl/filters/passthrough.h>\n#include <pcl/filters/crop_box.h>\n#include <pcl/filters/extract_indices.h>\n#include <limits>\n#include <memory>\n#include <string>\n#include <thread>\n#include <vector>\n#include \"grasp_library/ros2/grasp_detector_gpd.hpp\"\n#include \"grasp_library/ros2/ros_params.hpp\"\n\nnamespace grasp_ros2\n{\n\nGraspDetectorGPD::GraspDetectorGPD(const rclcpp::NodeOptions & options)\n: Node(\"GraspDetectorGPD\", options),\n  GraspDetectorBase(), cloud_camera_(NULL), has_cloud_(false), frame_(\"\"),\n#ifdef RECOGNIZE_PICK\n  object_msg_(nullptr), object_sub_(nullptr),\n#endif\n  filtered_pub_(nullptr), grasps_rviz_pub_(nullptr)\n{\n  std::vector<double> camera_position;\n  this->get_parameter_or(\"camera_position\", camera_position,\n    std::vector<double>(std::initializer_list<double>({0, 0, 0})));\n  view_point_ << camera_position[0], camera_position[1], camera_position[2];\n  this->get_parameter_or(\"auto_mode\", auto_mode_, true);\n  std::string cloud_topic, grasp_topic, rviz_topic, tabletop_topic, object_topic;\n  this->get_parameter_or(\"cloud_topic\", cloud_topic,\n    std::string(Consts::kTopicPointCloud2));\n  bool rviz, object_detect;\n  this->get_parameter_or(\"rviz\", rviz, false);\n  this->get_parameter_or(\"plane_remove\", plane_remove_, false);\n  this->get_parameter_or(\"object_detect\", object_detect, false);\n\n  callback_group_subscriber1_ = this->create_callback_group(\n    rclcpp::callback_group::CallbackGroupType::MutuallyExclusive);\n  auto sub1_opt = rclcpp::SubscriptionOptions();\n  sub1_opt.callback_group = callback_group_subscriber1_;\n\n  auto callback = [this](const sensor_msgs::msg::PointCloud2::SharedPtr msg) -> void {\n      this->cloud_callback(msg);\n    };\n  cloud_sub_ =\n    this->create_subscription<sensor_msgs::msg::PointCloud2>(cloud_topic,\n      rclcpp::QoS(10), callback, sub1_opt);\n\n  grasps_pub_ = this->create_publisher<grasp_msgs::msg::GraspConfigList>(\n    Consts::kTopicDetectedGrasps, 10);\n  if (rviz) {\n    grasps_rviz_pub_ = this->create_publisher<visualization_msgs::msg::MarkerArray>(\n      Consts::kTopicVisualGrasps, 10);\n    filtered_pub_ = this->create_publisher<sensor_msgs::msg::PointCloud2>(\n      Consts::kTopicTabletop, 10);\n  }\n#ifdef RECOGNIZE_PICK\n  if (object_detect) {\n    callback_group_subscriber2_ = this->create_callback_group(\n      rclcpp::callback_group::CallbackGroupType::MutuallyExclusive);\n    auto sub2_opt = rclcpp::SubscriptionOptions();\n    sub2_opt.callback_group = callback_group_subscriber2_;\n\n    this->get_parameter_or(\"object_topic\", object_topic,\n      std::string(Consts::kTopicDetectedObjects));\n    auto callback = [this](const people_msgs::msg::ObjectsInMasks::SharedPtr msg) -> void {\n        this->object_callback(msg);\n      };\n    object_sub_ =\n      this->create_subscription<people_msgs::msg::ObjectsInMasks>(object_topic,\n        rclcpp::QoS(10), callback, sub2_opt);\n  }\n#endif\n  // GraspDetector::GraspDetectionParameters detection_param;\n  ROSParameters::getDetectionParams(this, detection_param_);\n  grasp_detector_ = std::make_shared<GraspDetector>(detection_param_);\n  RCLCPP_INFO(logger_, \"ROS2 Grasp Library node up...\");\n\n  detector_thread_ = new std::thread(&GraspDetectorGPD::onInit, this);\n  detector_thread_->detach();\n}\n\nvoid GraspDetectorGPD::onInit()\n{\n  rclcpp::Rate rate(100);\n  RCLCPP_INFO(logger_, \"Waiting for point cloud to arrive ...\");\n\n  while (rclcpp::ok()) {\n    if (has_cloud_) {\n      // detect grasps in point cloud\n      std::vector<Grasp> grasps = detectGraspPosesInTopic();\n      // visualize grasps in rviz\n      if (grasps_rviz_pub_) {\n        const HandSearch::Parameters & params = grasp_detector_->getHandSearchParameters();\n        grasps_rviz_pub_->publish(convertToVisualGraspMsg(grasps, params.hand_outer_diameter_,\n          params.hand_depth_,\n          params.finger_width_, params.hand_height_, frame_));\n      }\n\n      // reset the system\n      has_cloud_ = false;\n      RCLCPP_INFO(logger_, \"Waiting for point cloud to arrive ...\");\n    }\n\n    // rclcpp::spin(shared_from_this());\n    rate.sleep();\n  }\n}\n\nstd::vector<Grasp> GraspDetectorGPD::detectGraspPosesInTopic()\n{\n  // detect grasp poses\n  std::vector<Grasp> grasps;\n\n  {\n    // preprocess the point cloud\n    grasp_detector_->preprocessPointCloud(*cloud_camera_);\n    // detect grasps in the point cloud\n    grasps = grasp_detector_->detectGrasps(*cloud_camera_);\n  }\n\n  // Publish the selected grasps.\n  grasp_msgs::msg::GraspConfigList selected_grasps_msg = createGraspListMsg(grasps);\n  if (grasp_cb_) {\n    grasp_cb_->grasp_callback(\n      std::make_shared<grasp_msgs::msg::GraspConfigList>(selected_grasps_msg));\n  }\n  grasps_pub_->publish(selected_grasps_msg);\n  RCLCPP_INFO(logger_, \"Published %d highest-scoring grasps.\", selected_grasps_msg.grasps.size());\n\n  return grasps;\n}\n\nvoid GraspDetectorGPD::cloud_callback(const sensor_msgs::msg::PointCloud2::SharedPtr msg)\n{\n  if (!auto_mode_ && !started_) {return;}\n#ifdef RECOGNIZE_PICK\n  people_msgs::msg::ObjectsInMasks::SharedPtr object_msg;\n  if (object_sub_) {\n    if (object_name_.empty()) {\n      RCLCPP_INFO(logger_, \"Waiting for object name...\");\n      return;\n    }\n    object_msg = object_msg_;\n    object_msg_ = nullptr;\n    if (nullptr == object_msg || object_msg->objects_vector.empty()) {\n      RCLCPP_INFO(logger_, \"Waiting for object callback...\");\n      return;\n    }\n  }\n#endif\n  RCLCPP_DEBUG(logger_, \"PCD callback...\");\n  if (!has_cloud_) {\n    delete cloud_camera_;\n    cloud_camera_ = NULL;\n    Eigen::Matrix3Xd view_points(3, 1);\n    view_points.col(0) = view_point_;\n\n    if (msg->fields.size() == 6 && msg->fields[3].name == \"normal_x\" &&\n      msg->fields[4].name == \"normal_y\" &&\n      msg->fields[5].name == \"normal_z\")\n    {\n      PointCloudPointNormal::Ptr cloud(new PointCloudPointNormal);\n      pcl::fromROSMsg(*msg, *cloud);\n      cloud_camera_ = new CloudCamera(cloud, 0, view_points);\n      cloud_camera_header_ = msg->header;\n    } else {\n      PointCloudRGBA::Ptr cloud(new PointCloudRGBA);\n      pcl::fromROSMsg(*msg, *cloud);\n\n      // filter workspace\n      for (uint32_t i = 0; i < cloud->size(); i++) {\n        if (cloud->points[i].x > detection_param_.workspace_[0] && cloud->points[i].x < detection_param_.workspace_[1] &&\n            cloud->points[i].y > detection_param_.workspace_[2] && cloud->points[i].y < detection_param_.workspace_[3] &&\n            cloud->points[i].z > detection_param_.workspace_[4] && cloud->points[i].z < detection_param_.workspace_[5]) {\n          continue;\n        } else {\n          cloud->points[i].x = std::numeric_limits<float>::quiet_NaN();\n          cloud->points[i].y = std::numeric_limits<float>::quiet_NaN();\n          cloud->points[i].z = std::numeric_limits<float>::quiet_NaN();\n        }\n      }\n\n      // remove table plane\n      if (plane_remove_) {\n        pcl::ModelCoefficients::Ptr coefficients(new pcl::ModelCoefficients);\n        pcl::PointIndices::Ptr inliers(new pcl::PointIndices);\n        pcl::SACSegmentation<pcl::PointXYZRGBA> seg;\n        seg.setOptimizeCoefficients(true);\n        seg.setModelType(pcl::SACMODEL_PLANE);\n        seg.setMethodType(pcl::SAC_RANSAC);\n        seg.setDistanceThreshold(0.025);\n        seg.setInputCloud(cloud);\n        seg.segment(*inliers, *coefficients);\n        for (size_t i = 0; i < inliers->indices.size(); ++i) {\n          cloud->points[inliers->indices[i]].x = std::numeric_limits<float>::quiet_NaN();\n          cloud->points[inliers->indices[i]].y = std::numeric_limits<float>::quiet_NaN();\n          cloud->points[inliers->indices[i]].z = std::numeric_limits<float>::quiet_NaN();\n        }\n      }\n#ifdef RECOGNIZE_PICK\n      // filter object location\n      if (object_sub_) {\n        bool found = false;\n        for (auto obj : object_msg->objects_vector) {\n          if (0 == obj.object_name.compare(object_name_)) {\n            RCLCPP_INFO(logger_, \"obj name %s prob %f roi [%d %d %d %d] %d %d\",\n              obj.object_name.c_str(), obj.probability, obj.roi.x_offset, obj.roi.y_offset,\n              obj.roi.width, obj.roi.height, msg->width, msg->height);\n            std::vector<int> indices;\n            for (size_t i = 0; i < obj.roi.height; i++) {  // rows\n              int idx = (i + obj.roi.y_offset) * msg->width + obj.roi.x_offset;\n              for (size_t j = 0; j < obj.roi.width; j++) {  // columns\n                // todo use mask_array from from object msg\n                if (!isnan(cloud->points[idx + j].x) &&\n                  !isnan(cloud->points[idx + j].y) &&\n                  !isnan(cloud->points[idx + j].z))\n                {\n                  indices.push_back(idx + j);\n                }\n              }\n            }\n            pcl::ExtractIndices<pcl::PointXYZRGBA> filter;\n            filter.setInputCloud(cloud);\n            filter.setIndices(boost::make_shared<std::vector<int>>(indices));\n            filter.filter(*cloud);\n            Eigen::Matrix3Xf xyz =\n              cloud->getMatrixXfMap(3, sizeof(pcl::PointXYZRGBA) / sizeof(float), 0);\n            RCLCPP_INFO(logger_, \"*************** %f %f, %f %f, %f %f\",\n              xyz.row(0).minCoeff(), xyz.row(0).maxCoeff(),\n              xyz.row(1).minCoeff(), xyz.row(1).maxCoeff(),\n              xyz.row(2).minCoeff(), xyz.row(2).maxCoeff());\n            grasp_ws_ = {xyz.row(0).minCoeff(), xyz.row(0).maxCoeff(),\n              xyz.row(1).minCoeff(), xyz.row(1).maxCoeff(),\n              xyz.row(2).minCoeff(), xyz.row(2).maxCoeff()};\n            found = true;\n            break;\n          }\n        }\n        if (!found) {return;}\n      }\n#endif\n      if (filtered_pub_) {\n        sensor_msgs::msg::PointCloud2 msg2;\n        pcl::toROSMsg(*cloud, msg2);\n        // workaround rviz rgba\n        msg2.fields[3].name = \"rgb\";\n        msg2.fields[3].datatype = 7;\n        filtered_pub_->publish(msg2);\n      }\n      cloud_camera_ = new CloudCamera(cloud, 0, view_points);\n      cloud_camera_header_ = msg->header;\n    }\n    RCLCPP_INFO(logger_, \"Received cloud with %d points and normals.\",\n      cloud_camera_->getCloudProcessed()->size());\n\n    has_cloud_ = true;\n    frame_ = msg->header.frame_id;\n  }\n}\n#ifdef RECOGNIZE_PICK\nvoid GraspDetectorGPD::object_callback(const people_msgs::msg::ObjectsInMasks::SharedPtr msg)\n{\n  RCLCPP_INFO(logger_, \"Object callback *************************[%d]\", msg->objects_vector.size());\n  for (auto obj : msg->objects_vector) {\n    RCLCPP_INFO(logger_, \"obj name %s prob %f roi[%d %d %d %d]\",\n      obj.object_name.c_str(), obj.probability,\n      obj.roi.x_offset, obj.roi.y_offset, obj.roi.width, obj.roi.height);\n    if (0 == obj.object_name.compare(\"orange\")) {\n      for (size_t i = 0; i < obj.roi.height; i++) {       // rows\n        // std::cout << \"\\n\";\n        for (size_t j = 0; j < obj.roi.width; j++) {       // columns\n          // int a = obj.mask_array[i * obj.roi.width + j] * 10;\n          // if (a>5) std::cout << a; else std::cout << \"*\";\n        }\n      }\n    }\n  }\n  if (msg->objects_vector.size() > 0) {\n    object_msg_ = msg;\n  }\n}\n#endif\ngrasp_msgs::msg::GraspConfigList GraspDetectorGPD::createGraspListMsg(\n  const std::vector<Grasp> & hands)\n{\n  grasp_msgs::msg::GraspConfigList msg;\n\n  for (uint32_t i = 0; i < hands.size(); i++) {\n    msg.grasps.push_back(convertToGraspMsg(hands[i]));\n  }\n\n  msg.header = cloud_camera_header_;\n  msg.object_name = object_name_;\n\n  return msg;\n}\n\ngrasp_msgs::msg::GraspConfig GraspDetectorGPD::convertToGraspMsg(const Grasp & hand)\n{\n  grasp_msgs::msg::GraspConfig msg;\n  pointEigenToMsg(hand.getGraspBottom(), msg.bottom);\n  pointEigenToMsg(hand.getGraspTop(), msg.top);\n  pointEigenToMsg(hand.getGraspSurface(), msg.surface);\n  vectorEigenToMsg(hand.getApproach(), msg.approach);\n  vectorEigenToMsg(hand.getBinormal(), msg.binormal);\n  vectorEigenToMsg(hand.getAxis(), msg.axis);\n  msg.width.data = hand.getGraspWidth();\n  msg.score.data = hand.getScore();\n  pointEigenToMsg(hand.getSample(), msg.sample);\n\n  return msg;\n}\n\nvisualization_msgs::msg::MarkerArray GraspDetectorGPD::convertToVisualGraspMsg(\n  const std::vector<Grasp> & hands,\n  double outer_diameter, double hand_depth, double finger_width, double hand_height,\n  const std::string & frame_id)\n{\n  double width = outer_diameter;\n  double hw = 0.5 * width;\n\n  visualization_msgs::msg::MarkerArray marker_array;\n  visualization_msgs::msg::Marker left_finger, right_finger, base, approach;\n  Eigen::Vector3d left_bottom, right_bottom, left_top, right_top, left_center, right_center,\n    approach_center,\n    base_center;\n\n  for (uint32_t i = 0; i < hands.size(); i++) {\n    left_bottom = hands[i].getGraspBottom() - (hw - 0.5 * finger_width) * hands[i].getBinormal();\n    right_bottom = hands[i].getGraspBottom() + (hw - 0.5 * finger_width) * hands[i].getBinormal();\n    left_top = left_bottom + hand_depth * hands[i].getApproach();\n    right_top = right_bottom + hand_depth * hands[i].getApproach();\n    left_center = left_bottom + 0.5 * (left_top - left_bottom);\n    right_center = right_bottom + 0.5 * (right_top - right_bottom);\n    base_center = left_bottom + 0.5 * (right_bottom - left_bottom) - 0.01 * hands[i].getApproach();\n    approach_center = base_center - 0.04 * hands[i].getApproach();\n\n    base = createHandBaseMarker(left_bottom, right_bottom,\n        hands[i].getFrame(), 0.02, hand_height, i, frame_id);\n    left_finger = createFingerMarker(left_center,\n        hands[i].getFrame(), hand_depth, finger_width, hand_height, i * 3, frame_id);\n    right_finger = createFingerMarker(right_center,\n        hands[i].getFrame(), hand_depth, finger_width, hand_height, i * 3 + 1, frame_id);\n    approach = createFingerMarker(approach_center,\n        hands[i].getFrame(), 0.08, finger_width, hand_height, i * 3 + 2, frame_id);\n\n    marker_array.markers.push_back(left_finger);\n    marker_array.markers.push_back(right_finger);\n    marker_array.markers.push_back(approach);\n    marker_array.markers.push_back(base);\n  }\n\n  return marker_array;\n}\n\nvisualization_msgs::msg::Marker GraspDetectorGPD::createFingerMarker(\n  const Eigen::Vector3d & center,\n  const Eigen::Matrix3d & frame, double length, double width, double height, int id,\n  const std::string & frame_id)\n{\n  visualization_msgs::msg::Marker marker;\n  marker.header.frame_id = frame_id;\n  marker.header.stamp = rclcpp::Clock(RCL_ROS_TIME).now();\n  marker.ns = \"finger\";\n  marker.id = id;\n  marker.type = visualization_msgs::msg::Marker::CUBE;\n  marker.action = visualization_msgs::msg::Marker::ADD;\n  marker.pose.position.x = center(0);\n  marker.pose.position.y = center(1);\n  marker.pose.position.z = center(2);\n  marker.lifetime = rclcpp::Duration(20.0, 0);\n\n  // use orientation of hand frame\n  Eigen::Quaterniond quat(frame);\n  marker.pose.orientation.x = quat.x();\n  marker.pose.orientation.y = quat.y();\n  marker.pose.orientation.z = quat.z();\n  marker.pose.orientation.w = quat.w();\n\n  // these scales are relative to the hand frame (unit: meters)\n  marker.scale.x = length;  // forward direction\n  marker.scale.y = width;  // hand closing direction\n  marker.scale.z = height;  // hand vertical direction\n\n  marker.color.a = 0.5;\n  marker.color.r = 0.0;\n  marker.color.g = 0.0;\n  marker.color.b = 0.5;\n\n  return marker;\n}\n\nvisualization_msgs::msg::Marker GraspDetectorGPD::createHandBaseMarker(\n  const Eigen::Vector3d & start,\n  const Eigen::Vector3d & end, const Eigen::Matrix3d & frame, double length, double height, int id,\n  const std::string & frame_id)\n{\n  Eigen::Vector3d center = start + 0.5 * (end - start);\n\n  visualization_msgs::msg::Marker marker;\n  marker.header.frame_id = frame_id;\n  marker.header.stamp = rclcpp::Clock(RCL_ROS_TIME).now();\n  marker.ns = \"hand_base\";\n  marker.id = id;\n  marker.type = visualization_msgs::msg::Marker::CUBE;\n  marker.action = visualization_msgs::msg::Marker::ADD;\n  marker.pose.position.x = center(0);\n  marker.pose.position.y = center(1);\n  marker.pose.position.z = center(2);\n  marker.lifetime = rclcpp::Duration(20.0, 0);\n\n  // use orientation of hand frame\n  Eigen::Quaterniond quat(frame);\n  marker.pose.orientation.x = quat.x();\n  marker.pose.orientation.y = quat.y();\n  marker.pose.orientation.z = quat.z();\n  marker.pose.orientation.w = quat.w();\n\n  // these scales are relative to the hand frame (unit: meters)\n  marker.scale.x = length;  // forward direction\n  marker.scale.y = (end - start).norm();  // hand closing direction\n  marker.scale.z = height;  // hand vertical direction\n\n  marker.color.a = 0.5;\n  marker.color.r = 0.0;\n  marker.color.g = 0.0;\n  marker.color.b = 1.0;\n\n  return marker;\n}\n\n}  // namespace grasp_ros2\n\n#include \"rclcpp_components/register_node_macro.hpp\"\nRCLCPP_COMPONENTS_REGISTER_NODE(grasp_ros2::GraspDetectorGPD)\n"
  },
  {
    "path": "grasp_ros2/src/grasp_planner.cpp",
    "content": "// Copyright (c) 2019 Intel Corporation\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n//     http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\n\n#include <tf2/LinearMath/Matrix3x3.h>\n#include <tf2/LinearMath/Quaternion.h>\n#include <tf2_ros/transform_listener.h>\n\n#include <algorithm>\n#include <condition_variable>\n#include <map>\n#include <memory>\n#include <mutex>\n#include <string>\n#include <utility>\n\n#include \"grasp_library/ros2/consts.hpp\"\n#include \"grasp_library/ros2/grasp_planner.hpp\"\n#include \"grasp_library/ros2/ros_params.hpp\"\n\nnamespace grasp_ros2\n{\n\nusing GraspPlanning = moveit_msgs::srv::GraspPlanning;\n\nGraspPlanner::GraspPlanner(const rclcpp::NodeOptions & options, GraspDetectorBase * grasp_detector)\n: Node(\"GraspPlanner\", options),\n  GraspCallback(), tfBroadcaster_(this), grasp_detector_(grasp_detector)\n{\n  ROSParameters::getPlanningParams(this, param_);\n  callback_group_subscriber3_ = this->create_callback_group(\n    rclcpp::callback_group::CallbackGroupType::MutuallyExclusive);\n  auto service = [this](const std::shared_ptr<rmw_request_id_t> request_header,\n      const std::shared_ptr<GraspPlanning::Request> req,\n      const std::shared_ptr<GraspPlanning::Response> res) -> void {\n      this->grasp_service(request_header, req, res);\n    };\n  grasp_srv_ = this->create_service<GraspPlanning>(\"plan_grasps\", service, rmw_qos_profile_default,\n      callback_group_subscriber3_);\n\n  grasp_detector_->add_callback(this);\n\n  tfBuffer_ = new tf2_ros::Buffer(std::make_shared<rclcpp::Clock>(RCL_ROS_TIME));\n  tfListener_ = std::make_shared<tf2_ros::TransformListener>(*tfBuffer_);\n  RCLCPP_INFO(logger_, \"ROS2 Grasp Planning Service up...\");\n}\n\nvoid GraspPlanner::grasp_callback(const grasp_msgs::msg::GraspConfigList::SharedPtr msg)\n{\n  RCLCPP_INFO(logger_, \"Received grasp callback\");\n  rclcpp::Time rclcpp_time = std::make_shared<rclcpp::Clock>(RCL_ROS_TIME)->now() +\n    rclcpp::Duration(6, 0);\n  static bool tf_needed = (param_.grasp_frame_id_ != msg->header.frame_id);\n  RCLCPP_INFO(logger_, \"tf_needed %d\", tf_needed);\n  std_msgs::msg::Header header;\n  header.frame_id = tf_needed ? param_.grasp_frame_id_ : msg->header.frame_id;\n  header.stamp = msg->header.stamp;\n  grasp_msgs::msg::GraspConfig to_grasp;\n\n  for (auto from_grasp : msg->grasps) {\n    // skip low score grasp\n    if (from_grasp.score.data < param_.grasp_score_threshold_) {\n      RCLCPP_INFO(logger_, \"skip low score grasps %f\", from_grasp.score.data);\n      continue;\n    }\n\n    // transform grasp to grasp_frame_id\n    if (tf_needed) {\n      if (!transform(from_grasp, to_grasp, msg->header)) {\n        // skip transformation failure\n        continue;\n      }\n    }\n\n    if (param_.grasp_approach_angle_ != M_PI) {\n      // skip unacceptable approach\n      tf2::Vector3 approach(to_grasp.approach.x, to_grasp.approach.y, to_grasp.approach.z);\n      double ang = tf2::tf2Angle(param_.grasp_approach_, approach);\n      if (std::isnan(ang) ||\n        ang < -param_.grasp_approach_angle_ || ang > param_.grasp_approach_angle_)\n      {\n        RCLCPP_INFO(logger_, \"skip unacceptable approach\");\n        continue;\n      }\n    }\n    // apply grasp offset\n    to_grasp.bottom.x += param_.grasp_offset_[0];\n    to_grasp.bottom.y += param_.grasp_offset_[1];\n    to_grasp.bottom.z += param_.grasp_offset_[2];\n    // skip out of boundary grasps\n    if (!tf_needed || check_boundry(to_grasp.bottom)) {\n      // translate into moveit grasp\n      moveit_msgs::msg::Grasp moveit_msg = toMoveIt(to_grasp, header);\n      std::unique_lock<std::mutex> lock(m_);\n      moveit_grasps_.push_back(moveit_msg);\n    }\n  }\n}\n\nbool GraspPlanner::transform(\n  grasp_msgs::msg::GraspConfig & from, grasp_msgs::msg::GraspConfig & to,\n  const std_msgs::msg::Header & header)\n{\n  geometry_msgs::msg::PointStamped from_top, to_top, from_surface, to_surface,\n    from_bottom, to_bottom;\n  geometry_msgs::msg::Vector3Stamped from_approach, to_approach, from_binormal, to_binormal,\n    from_axis, to_axis;\n\n  to = from;\n\n  from_top.point = from.top;\n  from_top.header = header;\n  from_surface.point = from.surface;\n  from_surface.header = header;\n  from_bottom.point = from.bottom;\n  from_bottom.header = header;\n  from_approach.vector = from.approach;\n  from_approach.header = header;\n  from_binormal.vector = from.binormal;\n  from_binormal.header = header;\n  from_axis.vector = from.axis;\n  from_axis.header = header;\n  while (rclcpp::ok()) {\n    try {\n      tfBuffer_->transform(from_top, to_top, param_.grasp_frame_id_);\n      tfBuffer_->transform(from_surface, to_surface, param_.grasp_frame_id_);\n      tfBuffer_->transform(from_bottom, to_bottom, param_.grasp_frame_id_);\n      tfBuffer_->transform(from_approach, to_approach, param_.grasp_frame_id_);\n      tfBuffer_->transform(from_binormal, to_binormal, param_.grasp_frame_id_);\n      tfBuffer_->transform(from_axis, to_axis, param_.grasp_frame_id_);\n    } catch (tf2::TransformException & ex) {\n      RCLCPP_WARN(logger_, \"transform exception\");\n      rclcpp::Rate(1).sleep();\n      continue;\n    }\n    break;\n  }\n\n  to.top = to_top.point;\n  to.surface = to_surface.point;\n  to.bottom = to_bottom.point;\n  to.approach = to_approach.vector;\n  to.binormal = to_binormal.vector;\n  to.axis = to_axis.vector;\n  return true;\n}\n\nbool GraspPlanner::check_boundry(const geometry_msgs::msg::Point & p)\n{\n  RCLCPP_INFO(logger_, \"point [%f %f %f]\", p.x, p.y, p.z);\n  return p.x >= param_.grasp_boundry_[0] && p.x <= param_.grasp_boundry_[1] &&\n         p.y >= param_.grasp_boundry_[2] && p.y <= param_.grasp_boundry_[3] &&\n         p.z >= param_.grasp_boundry_[4] && p.z <= param_.grasp_boundry_[5];\n}\n\nmoveit_msgs::msg::Grasp GraspPlanner::toMoveIt(\n  grasp_msgs::msg::GraspConfig & grasp,\n  const std_msgs::msg::Header & header)\n{\n  moveit_msgs::msg::Grasp msg;\n  msg.grasp_pose.header = header;\n  msg.grasp_quality = grasp.score.data;\n\n  double offset = param_.eef_offset;\n  // set grasp position, translation from hand-base to the parent-link of EEF\n  msg.grasp_pose.pose.position.x = grasp.bottom.x - grasp.approach.x * offset;\n  msg.grasp_pose.pose.position.y = grasp.bottom.y - grasp.approach.y * offset;\n  msg.grasp_pose.pose.position.z = grasp.bottom.z - grasp.approach.z * offset;\n\n  // rotation matrix https://github.com/atenpas/gpd/blob/master/tutorials/hand_frame.png\n  tf2::Matrix3x3 r(\n    grasp.binormal.x, grasp.axis.x, grasp.approach.x,\n    grasp.binormal.y, grasp.axis.y, grasp.approach.y,\n    grasp.binormal.z, grasp.axis.z, grasp.approach.z);\n  tf2::Quaternion quat;\n  r.getRotation(quat);\n  // EEF yaw-offset to its parent-link (last link of arm)\n  quat *= tf2::Quaternion(tf2::Vector3(0, 0, 1), param_.eef_yaw_offset);\n  quat.normalize();\n  // set grasp orientation\n  msg.grasp_pose.pose.orientation = tf2::toMsg(quat);\n  RCLCPP_INFO(logger_, \"==============offset is %f quat [%f %f %f %f]\", offset,\n    msg.grasp_pose.pose.orientation.x, msg.grasp_pose.pose.orientation.y,\n    msg.grasp_pose.pose.orientation.z, msg.grasp_pose.pose.orientation.w);\n\n  // set pre-grasp approach\n  msg.pre_grasp_approach.direction.header = header;\n  msg.pre_grasp_approach.direction.vector = grasp.approach;\n  msg.pre_grasp_approach.min_distance = param_.grasp_min_distance_;\n  msg.pre_grasp_approach.desired_distance = param_.grasp_desired_distance_;\n\n  // set post-grasp retreat\n  msg.post_grasp_retreat.direction.header = header;\n  msg.post_grasp_retreat.direction.vector.x = -grasp.approach.x;\n  msg.post_grasp_retreat.direction.vector.y = -grasp.approach.y;\n  msg.post_grasp_retreat.direction.vector.z = -grasp.approach.z;\n  msg.post_grasp_retreat.min_distance = param_.grasp_min_distance_;\n  msg.post_grasp_retreat.desired_distance = param_.grasp_desired_distance_;\n\n  // set pre-grasp posture\n  msg.pre_grasp_posture.joint_names = param_.finger_joint_names_;\n  msg.pre_grasp_posture.points.push_back(param_.finger_points_open_);\n\n  // set grasp posture\n  msg.grasp_posture.joint_names = param_.finger_joint_names_;\n  msg.grasp_posture.points.push_back(param_.finger_points_close_);\n\n  return msg;\n}\n\nvoid GraspPlanner::grasp_service(\n  const std::shared_ptr<rmw_request_id_t> request_header,\n  const std::shared_ptr<GraspPlanning::Request> req,\n  const std::shared_ptr<GraspPlanning::Response> res)\n{\n  (void)request_header;\n  (void)req;\n  RCLCPP_INFO(logger_, \"Received Grasp Planning request\");\n\n  {\n    std::unique_lock<std::mutex> lock(m_);\n    moveit_grasps_.clear();\n    grasp_detector_->start(req->target.id);\n  }\n  // blocking till grasps found\n  while (moveit_grasps_.empty()) {\n    rclcpp::Rate(20).sleep();\n  }\n  grasp_detector_->stop();\n  res->grasps = moveit_grasps_;\n\n  if (res->grasps.empty()) {\n    RCLCPP_INFO(logger_, \"No expected grasp found.\");\n    res->error_code.val = moveit_msgs::msg::MoveItErrorCodes::FAILURE;\n  } else {\n    RCLCPP_INFO(logger_, \"%ld grasps found.\", res->grasps.size());\n    res->error_code.val = moveit_msgs::msg::MoveItErrorCodes::SUCCESS;\n  }\n}\n\n}  // namespace grasp_ros2\n\n#include \"rclcpp_components/register_node_macro.hpp\"\nRCLCPP_COMPONENTS_REGISTER_NODE(grasp_ros2::GraspPlanner)\n"
  },
  {
    "path": "grasp_ros2/src/ros_params.cpp",
    "content": "// Copyright (c) 2018 Intel Corporation. All Rights Reserved\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n//     http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\n\n#include <string>\n#include <vector>\n\n#include \"grasp_library/ros2/ros_params.hpp\"\n\nnamespace grasp_ros2\n{\n\nvoid ROSParameters::getDetectionParams(\n  rclcpp::Node * node,\n  GraspDetector::GraspDetectionParameters & param)\n{\n  // Read hand geometry parameters.\n  node->get_parameter_or(\"finger_width\", param.hand_search_params.finger_width_, 0.005);\n  node->get_parameter_or(\"hand_outer_diameter\", param.hand_search_params.hand_outer_diameter_,\n    0.12);\n  node->get_parameter_or(\"hand_depth\", param.hand_search_params.hand_depth_, 0.06);\n  node->get_parameter_or(\"hand_height\", param.hand_search_params.hand_height_, 0.02);\n  node->get_parameter_or(\"init_bite\", param.hand_search_params.init_bite_, 0.01);\n\n  // Read local hand search parameters.\n  node->get_parameter_or(\"nn_radius\", param.hand_search_params.nn_radius_frames_, 0.01);\n  node->get_parameter_or(\"num_orientations\", param.hand_search_params.num_orientations_, 8);\n  node->get_parameter_or(\"num_samples\", param.hand_search_params.num_samples_, 100);\n  node->get_parameter_or(\"num_threads\", param.hand_search_params.num_threads_, 4);\n  node->get_parameter_or(\"rotation_axis\", param.hand_search_params.rotation_axis_, 2);\n\n  // Read plotting parameters.\n  node->get_parameter_or(\"plot_samples\", param.plot_samples_, false);\n  node->get_parameter_or(\"plot_normals\", param.plot_normals_, false);\n  param.generator_params.plot_normals_ = param.plot_normals_;\n  node->get_parameter_or(\"plot_filtered_grasps\", param.plot_filtered_grasps_, false);\n  node->get_parameter_or(\"plot_valid_grasps\", param.plot_valid_grasps_, false);\n  node->get_parameter_or(\"plot_clusters\", param.plot_clusters_, false);\n  node->get_parameter_or(\"plot_selected_grasps\", param.plot_selected_grasps_, false);\n\n  // Read general parameters.\n  param.generator_params.num_samples_ = param.hand_search_params.num_samples_;\n  param.generator_params.num_threads_ = param.hand_search_params.num_threads_;\n  node->get_parameter_or(\"plot_candidates\", param.generator_params.plot_grasps_, false);\n\n  // Read preprocessing parameters.\n  node->get_parameter_or(\"remove_outliers\", param.generator_params.remove_statistical_outliers_,\n    false);\n  node->get_parameter_or(\"voxelize\", param.generator_params.voxelize_, true);\n  node->get_parameter_or(\"workspace\", param.generator_params.workspace_,\n    std::vector<double>(std::initializer_list<double>({-1.0, 1.0, -1.0, 1.0, -1.0, 1.0})));\n  param.workspace_ = param.generator_params.workspace_;\n\n  // Read classification parameters and create classifier.\n  node->get_parameter_or(\"model_file\", param.model_file_, std::string(\"\"));\n  node->get_parameter_or(\"trained_file\", param.weights_file_, std::string(\"\"));\n  node->get_parameter_or(\"min_score_diff\", param.min_score_diff_, 500.0);\n  node->get_parameter_or(\"create_image_batches\", param.create_image_batches_, false);\n  node->get_parameter_or(\"device\", param.device_, 0);\n\n  // Read grasp image parameters.\n  node->get_parameter_or(\"image_outer_diameter\", param.image_params.outer_diameter_,\n    param.hand_search_params.hand_outer_diameter_);\n  node->get_parameter_or(\"image_depth\", param.image_params.depth_,\n    param.hand_search_params.hand_depth_);\n  node->get_parameter_or(\"image_height\", param.image_params.height_,\n    param.hand_search_params.hand_height_);\n  node->get_parameter_or(\"image_size\", param.image_params.size_, 60);\n  node->get_parameter_or(\"image_num_channels\", param.image_params.num_channels_, 15);\n\n  // Read learning parameters.\n  node->get_parameter_or(\"remove_plane_before_image_calculation\", param.remove_plane_, false);\n\n  // Read grasp filtering parameters\n  node->get_parameter_or(\"filter_grasps\", param.filter_grasps_, false);\n  node->get_parameter_or(\"filter_half_antipodal\", param.filter_half_antipodal_, false);\n  param.gripper_width_range_.push_back(0.03);\n  param.gripper_width_range_.push_back(0.10);\n  // node->get_parameter(\"gripper_width_range\", param.gripper_width_range_);\n\n  // Read clustering parameters\n  node->get_parameter_or(\"min_inliers\", param.min_inliers_, 1);\n\n  // Read grasp selection parameters\n  node->get_parameter_or(\"num_selected\", param.num_selected_, 5);\n}\n\nvoid ROSParameters::getPlanningParams(\n  rclcpp::Node * node,\n  GraspPlanner::GraspPlanningParameters & param)\n{\n  node->get_parameter_or(\"grasp_service_timeout\", param.grasp_service_timeout_, 0);\n  node->get_parameter_or(\"grasp_score_threshold\", param.grasp_score_threshold_, 200);\n  node->get_parameter_or(\"grasp_frame_id\", param.grasp_frame_id_, std::string(\"base\"));\n  std::vector<double> approach;\n  node->get_parameter_or(\"grasp_approach\", approach,\n    std::vector<double>(std::initializer_list<double>({0.0, 0.0, -1.0})));\n  param.grasp_approach_ = tf2::Vector3(approach[0], approach[1], approach[2]);\n  node->get_parameter_or(\"grasp_approach_angle\", param.grasp_approach_angle_, M_PI);\n  node->get_parameter_or(\"grasp_offset\", param.grasp_offset_,\n    std::vector<double>(std::initializer_list<double>({0.0, 0.0, 0.0})));\n  node->get_parameter_or(\"grasp_boundry\", param.grasp_boundry_,\n    std::vector<double>(std::initializer_list<double>({-1.0, 1.0, -1.0, 1.0, -1.0, 1.0})));\n  node->get_parameter_or(\"eef_offset\", param.eef_offset, 0.154);\n  node->get_parameter_or(\"eef_yaw_offset\", param.eef_yaw_offset, 0.0);\n  node->get_parameter_or(\"grasp_min_distance\", param.grasp_min_distance_, 0.06);\n  node->get_parameter_or(\"grasp_desired_distance\", param.grasp_desired_distance_, 0.1);\n\n  // gripper parameters\n  std::vector<double> finger_opens, finger_closes;\n  node->get_parameter_or(\"finger_joint_names\", param.finger_joint_names_,\n    std::vector<std::string>(std::initializer_list<std::string>({std::string(\"panda_finger_joint1\"),\n      std::string(\"panda_finger_joint2\")})));\n  node->get_parameter_or(\"finger_positions_open\", param.finger_points_open_.positions,\n    std::vector<double>(std::initializer_list<double>({-0.01, 0.01})));\n  node->get_parameter_or(\"finger_positions_close\", param.finger_points_close_.positions,\n    std::vector<double>(std::initializer_list<double>({-0.0, 0.0})));\n}\n\n}  // namespace grasp_ros2\n"
  },
  {
    "path": "grasp_ros2/tests/CMakeLists.txt",
    "content": "find_package(ament_cmake REQUIRED)\nfind_package(ament_cmake_gtest REQUIRED)\nfind_package(rclcpp REQUIRED)\nfind_package(grasp_msgs REQUIRED)\nfind_package(moveit_msgs REQUIRED)\nfind_package(pcl_conversions REQUIRED)\nset(TEST_NAME tgrasp_ros2)\n\nament_add_gtest(${TEST_NAME} tgrasp_ros2.cpp ${CMAKE_CURRENT_SOURCE_DIR}/../src/consts.cpp)\n\nif(TARGET ${TEST_NAME})\n  get_filename_component(RESOURCE_DIR \"resource\" ABSOLUTE)\n  configure_file(tgrasp_ros2.h.in tgrasp_ros2.h)\n  include_directories(${CMAKE_CURRENT_BINARY_DIR} ${PCL_INCLUDE_DIRS})\n  link_directories(${PCL_LIBRARY_DIRS})\n  target_include_directories(${TEST_NAME} PUBLIC\n    ${grasp_library_INCLUDE_DIRS}\n  )\n  ament_target_dependencies(${TEST_NAME}\n    pcl_conversions\n    rclcpp\n    grasp_msgs\n    moveit_msgs\n    sensor_msgs\n  )\n  target_link_libraries(${TEST_NAME} ${GTEST_LIBRARIES} ${PCL_LIBRARIES})\n\n  # Install binaries\n  install(TARGETS ${TEST_NAME}\n    RUNTIME DESTINATION bin\n  )\n\n  install(TARGETS ${TEST_NAME}\n    DESTINATION lib/${PROJECT_NAME}\n  )\n\nendif()\n"
  },
  {
    "path": "grasp_ros2/tests/tgrasp_ros2.cpp",
    "content": "// Copyright (c) 2019 Intel Corporation\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n//     http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\n\n#include <gtest/gtest.h>\n\n#include <rclcpp/rclcpp.hpp>\n#include <sensor_msgs/msg/point_cloud2.hpp>\n\n#include <grasp_msgs/msg/grasp_config_list.hpp>\n#include <moveit_msgs/msg/move_it_error_codes.hpp>\n#include <moveit_msgs/srv/grasp_planning.hpp>\n#include <pcl_conversions/pcl_conversions.h>\n\n#include <cassert>\n#include <memory>\n#include <string>\n#include <thread>\n\n#include \"grasp_library/ros2/consts.hpp\"\n#include \"./tgrasp_ros2.h\"\n\nusing Consts = grasp_ros2::Consts;\nusing GraspPlanning = moveit_msgs::srv::GraspPlanning;\n\nstatic bool received_topic = false;\nstatic int num_grasps = 0;\nstatic bool pcd_stop = false;\nstatic rclcpp::Node::SharedPtr node = nullptr;\nstatic std::shared_ptr<GraspPlanning::Response> result = nullptr;\n\nstatic void pcd_publisher()\n{\n  char path[512];\n  snprintf(path, sizeof(path), \"%s/table_top.pcd\", RESOURCE_DIR);\n  pcl::PointCloud<pcl::PointXYZRGBA> cloud;\n  if (0 != pcl::io::loadPCDFile<pcl::PointXYZRGBA>(path, cloud)) {\n    return;\n  }\n  sensor_msgs::msg::PointCloud2 msg;\n  pcl::toROSMsg(cloud, msg);\n  msg.header.frame_id = \"camera_color_optical_frame\";\n  auto pcd_node = rclcpp::Node::make_shared(\"PCDPublisher\");\n  auto pcd_pub = pcd_node->create_publisher<sensor_msgs::msg::PointCloud2>(\n    Consts::kTopicPointCloud2, 10);\n  rclcpp::Rate loop_rate(30);\n  while (!pcd_stop && rclcpp::ok()) {\n    pcd_pub->publish(msg);\n    loop_rate.sleep();\n  }\n}\n\nstatic void topic_cb(const grasp_msgs::msg::GraspConfigList::SharedPtr msg)\n{\n  RCLCPP_INFO(node->get_logger(), \"Topic received\");\n  received_topic = true;\n  num_grasps = msg->grasps.size();\n}\n\nTEST(GraspLibraryTests, TestGraspService) {\n  EXPECT_TRUE(result->error_code.val == moveit_msgs::msg::MoveItErrorCodes::SUCCESS);\n  EXPECT_GT(result->grasps.size(), uint32_t(0));\n}\n\nTEST(GraspLibraryTests, TestGraspTopic) {\n  rclcpp::Rate(1).sleep();\n  EXPECT_TRUE(received_topic);\n  EXPECT_GT(num_grasps, 0);\n}\n\nint main(int argc, char * argv[])\n{\n  rclcpp::init(argc, argv);\n\n  std::thread pcd_thread(pcd_publisher);\n  pcd_thread.detach();\n\n  node = rclcpp::Node::make_shared(\"GraspLibraryTest\");\n  auto sub = node->create_subscription<grasp_msgs::msg::GraspConfigList>(\n    Consts::kTopicDetectedGrasps, rclcpp::QoS(rclcpp::KeepLast(1)), topic_cb);\n\n  auto client = node->create_client<GraspPlanning>(\"plan_grasps\");\n  while (!client->wait_for_service(std::chrono::seconds(1))) {\n    if (!rclcpp::ok()) {\n      RCLCPP_ERROR(node->get_logger(), \"Client interrupted\");\n      return 1;\n    }\n    RCLCPP_INFO(node->get_logger(), \"Wait for service\");\n  }\n  auto request = std::make_shared<GraspPlanning::Request>();\n  auto result_future = client->async_send_request(request);\n  RCLCPP_INFO(node->get_logger(), \"Request sent\");\n\n  if (rclcpp::spin_until_future_complete(node, result_future) !=\n    rclcpp::executor::FutureReturnCode::SUCCESS)\n  {\n    RCLCPP_ERROR(node->get_logger(), \"Request failed\");\n    return 1;\n  }\n  result = result_future.get();\n  RCLCPP_INFO(node->get_logger(), \"Response received %d\", result->error_code.val);\n\n  testing::InitGoogleTest(&argc, argv);\n  int ret = RUN_ALL_TESTS();\n\n  pcd_stop = true;\n  rclcpp::Rate(3).sleep();\n  // pcd_thread.join() disabled. It causes runtest exit abnormally\n\n  node = nullptr;\n  rclcpp::shutdown();\n  return ret;\n}\n"
  },
  {
    "path": "grasp_ros2/tests/tgrasp_ros2.h.in",
    "content": "// Copyright (c) 2018 Intel Corporation. All Rights Reserved\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n//     http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\n\n#ifndef GRASP_LIBRARY__TGRASP_LIBRARY_H_\n#define GRASP_LIBRARY__TGRASP_LIBRARY_H_\n\n#include <pcl/io/pcd_io.h>\n#include <pcl/point_types.h>\n\n#define RESOURCE_DIR \"@RESOURCE_DIR@\"\n\n#endif  // GRASP_LIBRARY__TGRASP_LIBRARY_H_\n"
  },
  {
    "path": "grasp_tutorials/CMakeLists.txt",
    "content": "# Copyright (c) 2019 Intel Corporation\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#      http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\ncmake_minimum_required(VERSION 3.5)\nproject(grasp_tutorials)\n\nfind_package(ament_cmake REQUIRED)\nament_package()\n"
  },
  {
    "path": "grasp_tutorials/README.md",
    "content": "# ROS2 Grasp Library Tutorials\n\nThis tutorials aim to introduce how to\n* Install, build, and launch the ROS2 Grasp Planner and Detector\n* Use launch options to customize in a new workspace\n* Bring up the intelligent visual grasp solution on a new robot\n* Do hand-eye calibration for a new camera setup\n* Launch the example applications\n\n## Build and test this tutorial\n\n```bash\ncd ros2_grasp_library/grasp_tutorials\nsphinx-build . build # check the outputs in the ./build/ folder\ncd ros2_grasp_library/grasp_utils/robot_interface\ndoxygen Doxyfile # check the outputs in the ./build/ folder\n```\n"
  },
  {
    "path": "grasp_tutorials/conf.py",
    "content": "# -*- coding: utf-8 -*-\n#\n# app_tutorials documentation build configuration file, created by\n# sphinx-quickstart on Thu Oct 18 17:31:36 2018.\n#\n# This file is execfile()d with the current directory set to its\n# containing dir.\n#\n# Note that not all possible configuration values are present in this\n# autogenerated file.\n#\n# All configuration values have a default; values that are commented out\n# serve to show the default.\n\nimport sys\nimport os\n\n# If extensions (or modules to document with autodoc) are in another directory,\n# add these directories to sys.path here. If the directory is relative to the\n# documentation root, use os.path.abspath to make it absolute, like shown here.\n#sys.path.insert(0, os.path.abspath('.'))\n\n# -- General configuration ------------------------------------------------\n\n# If your documentation needs a minimal Sphinx version, state it here.\n#needs_sphinx = '1.0'\n\n# Add any Sphinx extension module names here, as strings. They can be\n# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom\n# ones.\nextensions = [\n    'sphinx.ext.autodoc',\n    'sphinx.ext.doctest',\n    'sphinx.ext.intersphinx',\n    'sphinx.ext.todo',\n    'sphinx.ext.coverage',\n    'sphinx.ext.mathjax',\n    'sphinx.ext.ifconfig',\n    'sphinx.ext.viewcode',\n]\n\n# Add any paths that contain templates here, relative to this directory.\n# templates_path = []\n\n# The suffix(es) of source filenames.\n# You can specify multiple suffix as a list of string:\n# source_suffix = ['.rst', '.md']\nsource_suffix = '.rst'\n\n# The encoding of source files.\n#source_encoding = 'utf-8-sig'\n\n# The master toctree document.\nmaster_doc = 'index'\n\n# General information about the project.\nproject = u'ROS2 Grasp Library Tutorials'\ncopyright = u'2019, Intel Corporation'\nauthor = u'sharron, liu; yu, yan'\n\n# The version info for the project you're documenting, acts as replacement for\n# |version| and |release|, also used in various other places throughout the\n# built documents.\n#\n# The short X.Y version.\nversion = u'0.5.0'\n# The full version, including alpha/beta/rc tags.\nrelease = u'0.5.0'\n\n# The language for content autogenerated by Sphinx. Refer to documentation\n# for a list of supported languages.\n#\n# This is also used if you do content translation via gettext catalogs.\n# Usually you set \"language\" from the command line for these cases.\nlanguage = None\n\n# There are two options for replacing |today|: either, you set today to some\n# non-false value, then it is used:\n#today = ''\n# Else, today_fmt is used as the format for a strftime call.\n#today_fmt = '%B %d, %Y'\n\n# List of patterns, relative to source directory, that match files and\n# directories to ignore when looking for source files.\n# exclude_patterns = ['doc/ur5_setup_with_moveit.rst', 'doc/franka_setup_with_moveit.rst']\n\n# The reST default role (used for this markup: `text`) to use for all\n# documents.\n#default_role = None\n\n# If true, '()' will be appended to :func: etc. cross-reference text.\n#add_function_parentheses = True\n\n# If true, the current module name will be prepended to all description\n# unit titles (such as .. function::).\n#add_module_names = True\n\n# If true, sectionauthor and moduleauthor directives will be shown in the\n# output. They are ignored by default.\n#show_authors = False\n\n# The name of the Pygments (syntax highlighting) style to use.\npygments_style = 'sphinx'\n\n# A list of ignored prefixes for module index sorting.\n#modindex_common_prefix = []\n\n# If true, keep warnings as \"system message\" paragraphs in the built documents.\n#keep_warnings = False\n\n# If true, `todo` and `todoList` produce output, else they produce nothing.\ntodo_include_todos = True\n\n\n# -- Options for HTML output ----------------------------------------------\n\n# The theme to use for HTML and HTML Help pages.  See the documentation for\n# a list of builtin themes.\nhtml_theme = 'sphinx_rtd_theme'\n\n# Theme options are theme-specific and customize the look and feel of a theme\n# further.  For a list of options available for each theme, see the\n# documentation.\n#html_theme_options = {}\n\n# Add any paths that contain custom themes here, relative to this directory.\n#html_theme_path = []\n\n# The name for this set of Sphinx documents.  If None, it defaults to\n# \"<project> v<release> documentation\".\n#html_title = None\n\n# A shorter title for the navigation bar.  Default is the same as html_title.\n#html_short_title = None\n\n# The name of an image file (relative to this directory) to place at the top\n# of the sidebar.\n#html_logo = None\n\n# The name of an image file (relative to this directory) to use as a favicon of\n# the docs.  This file should be a Windows icon file (.ico) being 16x16 or 32x32\n# pixels large.\n#html_favicon = None\n\n# Add any paths that contain custom static files (such as style sheets) here,\n# relative to this directory. They are copied after the builtin static files,\n# so a file named \"default.css\" will overwrite the builtin \"default.css\".\nhtml_static_path = ['_static']\n\n# Add any extra paths that contain custom files (such as robots.txt or\n# .htaccess) here, relative to this directory. These files are copied\n# directly to the root of the documentation.\n#html_extra_path = []\n\n# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,\n# using the given strftime format.\n#html_last_updated_fmt = '%b %d, %Y'\n\n# If true, SmartyPants will be used to convert quotes and dashes to\n# typographically correct entities.\n#html_use_smartypants = True\n\n# Custom sidebar templates, maps document names to template names.\n#html_sidebars = {}\n\n# Additional templates that should be rendered to pages, maps page names to\n# template names.\n#html_additional_pages = {}\n\n# If false, no module index is generated.\n#html_domain_indices = True\n\n# If false, no index is generated.\n#html_use_index = True\n\n# If true, the index is split into individual pages for each letter.\n#html_split_index = False\n\n# If true, links to the reST sources are added to the pages.\n#html_show_sourcelink = True\n\n# If true, \"Created using Sphinx\" is shown in the HTML footer. Default is True.\n#html_show_sphinx = True\n\n# If true, \"(C) Copyright ...\" is shown in the HTML footer. Default is True.\n#html_show_copyright = True\n\n# If true, an OpenSearch description file will be output, and all pages will\n# contain a <link> tag referring to it.  The value of this option must be the\n# base URL from which the finished HTML is served.\n#html_use_opensearch = ''\n\n# This is the file name suffix for HTML files (e.g. \".xhtml\").\n#html_file_suffix = None\n\n# Language to be used for generating the HTML full-text search index.\n# Sphinx supports the following languages:\n#   'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'\n#   'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr'\n#html_search_language = 'en'\n\n# A dictionary with options for the search language support, empty by default.\n# Now only 'ja' uses this config value\n#html_search_options = {'type': 'default'}\n\n# The name of a javascript file (relative to the configuration directory) that\n# implements a search results scorer. If empty, the default will be used.\n#html_search_scorer = 'scorer.js'\n\n# Output file base name for HTML help builder.\nhtmlhelp_basename = 'grasp_tutorialsdoc'\n\n# -- Options for LaTeX output ---------------------------------------------\n\nlatex_elements = {\n# The paper size ('letterpaper' or 'a4paper').\n#'papersize': 'letterpaper',\n\n# The font size ('10pt', '11pt' or '12pt').\n#'pointsize': '10pt',\n\n# Additional stuff for the LaTeX preamble.\n#'preamble': '',\n\n# Latex figure (float) alignment\n#'figure_align': 'htbp',\n}\n\n# Grouping the document tree into LaTeX files. List of tuples\n# (source start file, target name, title,\n#  author, documentclass [howto, manual, or own class]).\nlatex_documents = [\n    (master_doc, 'grasp_tutorials.tex', u'grasp\\\\_tutorials Documentation',\n     u'Intel Corporation', 'manual'),\n]\n\n# The name of an image file (relative to this directory) to place at the top of\n# the title page.\n#latex_logo = None\n\n# For \"manual\" documents, if this is true, then toplevel headings are parts,\n# not chapters.\n#latex_use_parts = False\n\n# If true, show page references after internal links.\n#latex_show_pagerefs = False\n\n# If true, show URL addresses after external links.\n#latex_show_urls = False\n\n# Documents to append as an appendix to all manuals.\n#latex_appendices = []\n\n# If false, no module index is generated.\n#latex_domain_indices = True\n\n\n# -- Options for manual page output ---------------------------------------\n\n# One entry per manual page. List of tuples\n# (source start file, name, description, authors, manual section).\nman_pages = [\n    (master_doc, 'grasp_tutorials', u'grasp_tutorials Documentation',\n     [author], 1)\n]\n\n# If true, show URL addresses after external links.\n#man_show_urls = False\n\n\n# -- Options for Texinfo output -------------------------------------------\n\n# Grouping the document tree into Texinfo files. List of tuples\n# (source start file, target name, title, author,\n#  dir menu entry, description, category)\ntexinfo_documents = [\n    (master_doc, 'grasp_tutorials', u'grasp_tutorials Documentation',\n     author, 'grasp_tutorials', 'One line description of project.',\n     'Miscellaneous'),\n]\n\n# Documents to append as an appendix to all manuals.\n#texinfo_appendices = []\n\n# If false, no module index is generated.\n#texinfo_domain_indices = True\n\n# How to display URL addresses: 'footnote', 'no', or 'inline'.\n#texinfo_show_urls = 'footnote'\n\n# If true, do not generate a @detailmenu in the \"Top\" node's menu.\n#texinfo_no_detailmenu = False\n\n\n# Example configuration for intersphinx: refer to the Python standard library.\nintersphinx_mapping = {'https://docs.python.org/': None}\n"
  },
  {
    "path": "grasp_tutorials/doc/bringup_robot.rst",
    "content": "Bring up a New Robot\n====================\n\nThis tutorial explains what is expected to do when bringing up this ROS2 Grasp Library on a new robot.\n\n.. _GraspPlanning: http://docs.ros.org/api/moveit_msgs/html/srv/GraspPlanning.html\n.. _GPD: https://github.com/atenpas/gpd\n.. _OpenVINO™: https://software.intel.com/en-us/openvino-toolkit\n.. _Grasp: http://docs.ros.org/api/moveit_msgs/html/msg/Grasp.html\n\nMinimum APIs to Implement\n-------------------------\n\n- `moveToTcpPose`\n\n  - Move the TCP (tool center point, usually the end effector of the robot arm, not the hand) to a pose specified with position [x, y, z] and orientation [alpha, betta, gamma] (also called [roll, pitch, yaw]). This function returns when the robot `moved` to the specified pose.\n\n- `moveToJointValues`\n\n  - Move each joint of the robot to the specified values (usually angles). This function differs from the `moveToTcpPose` since a same TCP pose may be reached with various solutions of joint values. This function is used when the application expect the joints of the robot in specific state, that is proper to performe any successive picking or place action. This function returns when the robot `moved` to the specified joint values.\n\n- `open`\n\n  - Open the gripper.\n\n- `close`\n\n  - Close the gripper.\n\n- `startLoop`\n\n  - Start a loop to read and publish the robot state. Robot states are subsribed by Rviz for visualization.\n\nOptional implementation and possible extentions\n-----------------------------------------------\n\n- Optionally you may implement the `pick` and `place` interface to customize the pick and placed pipeline, or even plug-in the collision avoidance motion planning.\n\n- Python extention is not supported. It's possible to implement the Robot Interface in python and bind to C++.\n\n\nRefer to `Robot Interface API <../api/html/index.html>`_ for more detailed definition.\n\nExample UR5 Implementation\n--------------------------\n\nRefer to the UR5 `example <https://github.com/intel/ros2_grasp_library/tree/master/grasp_utils/robot_interface/src>`_ implementatino for Robot Interface.\n\nTest Your Implementation\n------------------------\n\nIt's important to test your implementation before integrating this part with other components in ROS2 Grasp Library.\n\nRefer to `UR5 tests <https://github.com/intel/ros2_grasp_library/blob/master/grasp_utils/robot_interface/test/ur_test_move_command.cpp>`_, adapt it to your robot tests.\n\nBring up Robot Control Applications\n-----------------------------------\n\nOnce finished the testing, you may start to bring up the `Draw X <draw_x.html>`_ app or the `fixed position pick and place <fixed_position_pick.html>`_ app on your new robot. These application does not require camera, instead they control the robot only.\n"
  },
  {
    "path": "grasp_tutorials/doc/draw_x.rst",
    "content": "Draw X\n======\n\nOverview\n--------------\n\nThis demo shows how to use the robot interface to draw letter ``X``\nat the fixed positions with an UR5 robot arm.\n\nRequirement\n------------\n\nBefore running the code, make sure you have\nfollowed the instructions below to setup the robot correctly.\n\n- Hardware\n\n  - Host running ROS2\n\n  - `UR5`_\n\n- Software\n\n  - `ROS2 Dashing`_ Desktop\n\n  - `robot_interface`_\n\n.. _UR5: https://www.universal-robots.com/products/ur5-robot\n\n.. _ROS2 Dashing: https://index.ros.org/doc/ros2/Installation/Dashing/Linux-Install-Debians/\n\n.. _robot_interface: https://github.com/intel/ros2_grasp_library/tree/master/grasp_utils/robot_interface\n\nDownload and Build the Example Code\n------------------------------------\n\nWithin your ROS2 workspace, download and compile the example code:\n\n::\n\n  cd <path_of_your_ros2_workspace>/src\n\n  git clone https://github.com/intel/ros2_grasp_library.git\n\n  cd ..\n\n  colcon build --base-paths src/ros2_grasp_library/grasp_apps/draw_x\n\nLaunch the Application\n----------------------\n\n- Launch the application\n\n::\n\n  ros2 launch draw_x draw_x.launch.py\n\n.. note:: Please make sure the emergency button on the teach pendant is in your hand,\n          in case there is any accident.\n\n- Expected Outputs:\n\n  1. The robot moves its arm to the home pose\n  2. The robot moves its arm to the pose above the first corner of X\n  3. The robot moves its arm down to the first corner of X\n  4. The robot moves its arm to the second corner of X\n  5. The robot moves its arm up to the pose above the second corner of X\n  6. The robot moves its arm to the pose above the third corner of X\n  7. The robot moves its arm down to the third corner of X\n  8. The robot moves its arm to the fourth corner of X\n  9. The robot moves its arm up to the pose above the fourth corner of X\n  10. The robot moves its arm to the home pose again\n"
  },
  {
    "path": "grasp_tutorials/doc/fixed_position_pick.rst",
    "content": "Fixed Position Pick\n====================\n\nOverview\n--------------\n\nThis demo shows how to use the robot interface to pick and place a\nobject at a predefined location with an UR5 robot arm.\n\nRequirement\n------------\n\nBefore running the code, make sure you have\nfollowed the instructions below to setup the robot correctly.\n\n- Hardware\n\n  - Host running ROS2\n\n  - `UR5`_\n\n  - `Robot Gripper`_\n\n- Software\n\n  - `ROS2 Dashing`_ Desktop\n\n  - `robot_interface`_\n\n.. _UR5: https://www.universal-robots.com/products/ur5-robot\n\n.. _ROS2 Dashing: https://index.ros.org/doc/ros2/Installation/Dashing/Linux-Install-Debians/\n\n.. _robot_interface: https://github.com/intel/ros2_grasp_library/tree/master/grasp_utils/robot_interface\n\n.. _Robot Gripper: https://www.universal-robots.com/plus/end-effectors/hitbot-electric-gripper\n\nDownload and Build the Example Code\n------------------------------------\n\nWithin your ROS2 workspace, download and compile the example code:\n\n::\n\n  cd <path_of_your_ros2_workspace>/src\n\n  git clone https://github.com/intel/ros2_grasp_library.git\n\n  cd ..\n\n  colcon build --base-paths src/ros2_grasp_library/grasp_apps/fixed_position_pick\n\nLaunch the Application\n----------------------\n\n- Launch the application\n\n::\n\n  ros2 launch fixed_position_pick fixed_position_pick.launch.py\n\n.. note:: Please make sure the emergency button on the teach pendant is in your hand,\n          in case there is any accident.\n\n- Expected Outputs:\n\n  1. The robot moves to the home pose\n  2. The robot picks up an object from the predefined location\n  3. The robot places the object to another location\n  4. The robot moves back to the home pose\n"
  },
  {
    "path": "grasp_tutorials/doc/getting_start.rst",
    "content": "Getting Start\n=============\n\nThis tutorial introduces getting start to use this ROS2 Grasp Library.\n\n.. _GraspPlanning: http://docs.ros.org/api/moveit_msgs/html/srv/GraspPlanning.html\n.. _GPD: https://github.com/atenpas/gpd\n.. _OpenVINO™: https://software.intel.com/en-us/openvino-toolkit\n.. _Grasp: http://docs.ros.org/api/moveit_msgs/html/msg/Grasp.html\n\nROS2 Grasp Planner and Detector\n-------------------------------\n\nIn this section, you will start with an RGBD sensor connecting to a Ubuntu host machine.\n\nThe grasp detection relys on OpenVINO™ toolkit. Follow this `grasp_planner <grasp_planner.html>`_ instruction to install the toolkit, then build and install the ROS2 Grasp Planner and Detector with your camera.\n\nAfter launch the grasp planner, from rviz you will see grasp detection results highlighted as blue markers.\n\n.. image:: ./grasp_ros2/img/ros2_grasp_library.png\n   :width: 643 px\n   :height: 435 px\n   :align: center\n\nUse Launch Options for Customization\n------------------------------------\n\nROS2 parameters are supported to customize the Grasp Detector and Grasp Planner for your local workspace. For example, the topic name of point cloud from RGBD sensor, the camera workspace (in the frame_id of the point cloud image), the grasp approach direction and angle, the grasp boundary (in the frame_id of the robot base).\n\nRobot Interface\n---------------\n\nIn this section, you will bring up your robot by implementing the Robot Interface. Currently the robot interface is defined in C++, python vesion is still working in progress.\n\nRobot Interface are the minimum APIs a robot should provide to enable this solution. Follow this `robot_interface <bringup_robot.html>`_ insturction to implement the required `move`, `open`, `close`, `startLoop` interfaces.\n\nThen make sure your implementation passed the Robot Interface tests, to garantee later integration with the example applications. Also you may try the \"Robot Control Applications\" (like Draw X, fixed position pick and place) to verify your implemntation working well.\n\nHand-eye Calibration\n--------------------\n\nNow start to generate transformation between the camera and the robot. Follow this `handeye_calibration <handeye_calibration.html>`_ insturtion to finish the procedure of hand-eye calibration. The calibration procedure need to be done at the time when camera is setup. The resulting transformation will be remembered in your local environment for later publishing when launching the applications.\n\nLaunch Intelligent Visual Grasp Applications\n--------------------------------------------\n\nTo this step, you may start to launch the applications.\n\n`Random Picking <random_pick.html>`_ runs OpenVINO grasp detection on GPU, and sends request to ROS2 MoveIt Grasp Planner for grasp planning and detection. The most likely successful grasps are returned by the Grasp Pose Detection from CNN inference, taking 3D point cloud inputs from the camera. The picking order is not pre-defined, so called random picking.\n\n`Recognition Picking <recognize_pick.html>`_ runs OpenVINO grasp detection on GPU, and runs OpenVINO object segmentation on CPU or Movidius VPU. The masks of recognized objects are returned from the `mask_rcnn` model. The `place_publisher` publishing the name of the object to pick and the position to place, so called recognition picking.\n"
  },
  {
    "path": "grasp_tutorials/doc/grasp_api.rst",
    "content": "ROS2 Grasp Library APIs\n=======================\n\n.. _GraspPlanning: http://docs.ros.org/api/moveit_msgs/html/srv/GraspPlanning.html\n.. _GPD: https://github.com/atenpas/gpd\n.. _OpenVINO™: https://software.intel.com/en-us/openvino-toolkit\n.. _Grasp: http://docs.ros.org/api/moveit_msgs/html/msg/Grasp.html\n.. _PointCloud2: https://github.com/ros2/common_interfaces/blob/master/sensor_msgs/msg/PointCloud2.msg\n.. _ObjectsInMasks: https://github.com/intel/ros2_openvino_toolkit/blob/master/people_msgs/msg/ObjectsInMasks.msg\n.. _Image: https://github.com/ros2/common_interfaces/blob/master/sensor_msgs/msg/Image.msg\n.. _TransformStamped: https://github.com/ros2/common_interfaces/blob/master/geometry_msgs/msg/TransformStamped.msg\n\nGrasp Planning ROS2 Interfaces\n------------------------------\n\n- Subscribed Topics\n\n  - PointCloud2 topic from RGBD sensor (sensor_msgs::msg::`PointCloud2`_)\n\n  - Segmented object topic (people_msgs::msg::`ObjectsInMasks`_)\n\n- Delivered Services\n\n  - plan_grasps (moveit_msgs::srv::`GraspPlanning`_)\n\nHand-Eye Calibration ROS2 Interfaces\n------------------------------------\n\n- Subscribed Topics\n\n  - RGB image from sensor (sensor_msgs::msg::`Image`_)\n\n- Broadcasted Transforms\n\n  - Static transform btw camera and robot (geometry_msgs::msg::`TransformStamped`_)\n\nRobot Interface API\n-------------------\n\n- `API <../api/html/index.html>`_\n"
  },
  {
    "path": "grasp_tutorials/doc/grasp_planner.rst",
    "content": "Grasp Planner\n=============\n\nTutorials\n---------\n\n- `Install OpenVINO™ toolkit`_\n\n.. _Install OpenVINO™ toolkit: https://github.com/intel/ros2_grasp_library/tree/master/grasp_tutorials/doc/grasp_ros2/install_openvino.md\n\n- `Launch ROS2 Grasp Planner and Detector`_\n\n.. _Launch ROS2 Grasp Planner and Detector: https://github.com/intel/ros2_grasp_library/tree/master/grasp_tutorials/doc/grasp_ros2/tutorials_1_grasp_ros2_with_camera.md\n\n- `Launch tests`_\n\n.. _Launch tests: https://github.com/intel/ros2_grasp_library/tree/master/grasp_tutorials/doc/grasp_ros2/tutorials_2_grasp_ros2_test.md\n\n- `Use launch options`_\n\n.. _Use launch options: https://github.com/intel/ros2_grasp_library/tree/master/grasp_tutorials/doc/grasp_ros2/tutorials_3_grasp_ros2_launch_options.md\n\n"
  },
  {
    "path": "grasp_tutorials/doc/grasp_ros2/install_gpd.md",
    "content": "Installation guide for Grasp Pose Detection\n\n### Install [GPG](https://github.com/atenpas/gpg)\n1. Get the code\n```bash\ngit clone https://github.com/atenpas/gpg.git\ncd gpg\n```\n2. Build the library\n```bash\nmkdir build && cd build\ncmake ..\nmake\nsudo make install\n# by default, \"libgrasp_candidates_generator.so\" shall be installed to \"/usr/local/lib\"\n```\n\n### Install [GPD](https://github.com/sharronliu/gpd)\n1. Get the code, originally derived from [GPD](https://github.com/atenpas/gpd) tag 1.5.0\n```bash\ngit clone https://github.com/sharronliu/gpd.git\ngit checkout libgpd\ncd gpd/src/gpd\n```\n2. Build the library\n```bash\nmkdir build && cd build\ncmake -DUSE_OPENVINO=ON ..\nmake\nsudo make install\n# by default, \"libgrasp_pose_detection.so\" shall be installed to \"/usr/local/lib\"\n# and header files installed to \"/usr/local/include/gpd\"\n```\n"
  },
  {
    "path": "grasp_tutorials/doc/grasp_ros2/install_openvino.md",
    "content": "# Intel® DLDT toolkit and Intel® OpenVINO™ toolkit\n\nThis tutorial introduces the DLDT toolkit and OpenVINO toolkit.\n\nIntel® [DLDT](https://github.com/opencv/dldt) is a Deep Learning Deployment Toolkit common to all architectures. The toolkit allows developers to convert pre-trained deep learning models into optimized Intermediate Representation (IR) models, then deploy the IR models through a high-level C++ Inference Engine API integrated with application logic. Additionally, [Open Model Zoo](https://github.com/opencv/open_model_zoo) provides more than 100 pre-trained optimized deep learning models and a set of demos to expedite development of high-performance deep learning inference applications. Online tutorials are availble for\n* [Inference Engine Build Instructions](https://github.com/opencv/dldt/blob/2019/inference-engine/README.md)\n\nIntel® [OpenVINO™](https://software.intel.com/en-us/openvino-toolkit) (Open Visual Inference & Neural Network Optimization) toolkit enables CNN-based deep learning inference at the edge computation, extends workloads across Intel® hardware (including accelerators) and maximizes performance. The toolkit supports heterogeneous execution across various compution vision devices -- CPU, GPU, Intel® Movidius™ NCS, and FPGA -- using a common API. Online tutorials are available for\n* [Model Optimize Developer Guide](https://software.intel.com/en-us/articles/OpenVINO-ModelOptimizer)\n* [Inference Engine Developer Guide](https://software.intel.com/en-us/articles/OpenVINO-InferEngine)\n* [Intel® Neural Compute Stick 2](https://software.intel.com/en-us/neural-compute-stick/get-started)\n\n\n## Install DLDT and OpenVINO\nIt's recommended to refer to the online documents of the toolkits for the latest installation instruction. Below is detailed steps we verified with Ubuntu 18.04 on Intel NUC6i7KYK for your ref.\n1. Build and install Inference Engine\n   ```bash\n   git clone https://github.com/opencv/dldt.git\n   git checkout 2019_R3\n   # follow the instructions below to install all dependents, including mklml, opencl, etc.\n   # https://github.com/opencv/dldt/blob/2019_R3/inference-engine/README.md#build-on-linux-systems\n   # build\n   mkdir build && cd build\n   cmake -DCMAKE_BUILD_TYPE=Release -DCMAKE_INSTALL_PREFIX=/usr/local -DGEMM=MKL -DMKLROOT=/usr/local/lib/mklml -DENABLE_MKL_DNN=ON -DENABLE_CLDNN=ON ..\n   make -j8\n   sudo make install\n   ```\n2. Share the CMake configures for Inference Engine to be found by other packages\n   ```bash\n   sudo mkdir /usr/share/InferenceEngine\n   sudo cp InferenceEngineConfig*.cmake /usr/share/InferenceEngine\n   sudo cp targets.cmake /usr/share/InferenceEngine\n   ```\n   Then Inference Engine will be found when adding \"find_package(InferenceEngine)\" into the CMakeLists.txt\n3. Configure library path for dynamic loading\n   ```bash\n   echo `pwd`/../bin/intel64/Release/lib | sudo tee -a /etc/ld.so.conf.d/openvino.conf\n   sudo ldconfig\n   ```\n4. Optionally install plug-ins for InferenceEngine deployment on heterogeneous devices\n   * Install [plug-in](https://github.com/opencv/dldt/blob/2019_R3/inference-engine/README.md#optional-additional-installation-steps-for-the-intel-movidius-neural-compute-stick-and-neural-compute-stick-2) for deployment on Intel Movidius Neural Computation Sticks Myriad X.\n"
  },
  {
    "path": "grasp_tutorials/doc/grasp_ros2/tutorials_1_grasp_ros2_with_camera.md",
    "content": "# OpenVINO Grasp Library with RGBD Camera\n\nThis tutorial introduce the OpenVINO environment setup, how to build and launch Grasp Library with RGBD camera.\n\n## Requirements\n### Hardware\n* Host running ROS2/ROS\n* RGBD sensor\n### Software\nWe verified the software with Ubuntu 18.04 Bionic and ROS2 Dashing release. Verification with ROS2 MoveIt is still work in progress. Before this, we have verified the grasp detection with MoveIt Melodic branch (tag 0.10.8) and our visual pick & place application to be shared as [MoveIt Example Apps](https://github.com/ros-planning/moveit_example_apps).\n* Install ROS2 packages\n  [ros-dashing-desktop](https://index.ros.org/doc/ros2/Installation/Dashing/Linux-Install-Debians)\n\n* Install non ROS packages\n  ```bash\n  sudo apt-get install libpcl-dev libeigen3-dev\n  ```\n\n* Install [Intel OpenVINO Toolkit](install_openvino.md)\n* Install [GPD](install_gpd.md)\n\n## Build Grasp Library\n```bash\n# get the source codes\nmkdir ~/ros2_ws/src -p\ncd ~/ros2_ws/src\ngit clone https://github.com/intel/ros2_grasp_library.git\n# copy GPD models\ncp -a <path-to-gpd-repo>/models ros2_grasp_library/gpd\n# build\ncd ..\nsource /opt/ros/dashing/setup.bash\ncolcon build --symlink-install --packages-select grasp_msgs moveit_msgs grasp_ros2\nsource ./install/local_setup.bash\n```\n\n## Launch Grasp Library\n```bash\n# Terminal 1, Optionally, launch Rviz2 to illustrate detection results.\nros2 run rviz2 rviz2 -d src/ros2_grasp_library/grasp_ros2/rviz2/grasp.rviz\n# Note You may customize the \".rviz\" file for your own camera, for example:\n# To change to fixed frame: \"Global Options -> Fixed Frame\"\n# To change the point cloud topic: \"Point Cloud 2 -> Topic\"\n\n# Terminal 2, launch RGBD camera\n# e.g. launch [ROS2 Realsenes](https://github.com/intel/ros2_intel_realsense/tree/refactor)\n# or, with a ros-bridge, launch any ROS OpenNI RGBD cameras, like [ROS Realsense](https://github.com/intel-ros/realsense)\nros2 run realsense_node realsense_node\n\n# Terminal 3, launch Grasp Library\nros2 run grasp_ros2 grasp_ros2 __params:=src/ros2_grasp_library/grasp_ros2/cfg/grasp_ros2_params.yaml\n```\n"
  },
  {
    "path": "grasp_tutorials/doc/grasp_ros2/tutorials_2_grasp_ros2_test.md",
    "content": "# Grasp Library Tests and Exampels\n\nThis tutorial documents Grasp Library tests which also serve as example codes for the usage of Grasp Library.\n\n## Grasp Library Tests\nTest Suites enabled:\n* ROS2 built-in tests for static code scanning like, copyright tests, cppcheck tests, cpplint tests, lint_cmake tests, uncrustify tests, xmllint tests.\n* Grasp ROS2 basic functional tests: tgrasp_ros2, basic tests cover ROS2 topic and ROS2 service of Grasp Library.\n\nBefore test, make sure you have setup the environment to build the Grasp Library, following tutorials [Grasp Library with RGBD Camera](tutorials_1_grasp_library_with_camera.md). The tests take inputs from a pre-stored PointCloud file (.pcd). Thus it's unnecessary to launch an RGBD camera.\n```bash\n# Terminal 1, launch Grasp Library\nros2 run grasp_ros2 grasp_ros2 __params:=src/ros2_grasp_library/grasp_ros2/cfg/test_grasp_ros2.yaml\n# Terminal 2, run tests\ncolcon test --packages-select grasp_msgs grasp_ros2\n```\nFor failed cases check detailed logs at \"log/latest_test/grasp_ros2/stdout.log\".\n\n## Grasp Library Examples\nThe [grasp test codes](../grasp_ros2/tests/tgrasp_ros2.cpp) also demonstrate how to use this Grasp Library for grasp detection and grasp planning.\n\n### Grasp Detection Example (Non-MoveIt App)\nThis example creats ROS2 subscription to the \"Detected Grasps\" topic and get the detection results from callback. Grasp Library is expected to work in 'auto_mode=true', sensor-driven grasp detection, see example launch options [here](../grasp_ros2/cfg/grasp_ros2_params.yaml).\n\n```bash\n#include <rclcpp/rclcpp.hpp>\n#include <grasp_msgs/msg/grasp_config_list.hpp>\n#include \"grasp_ros2/consts.hpp\"\n\nstatic rclcpp::Node::SharedPtr node = nullptr;\n\nstatic void topic_cb(const grasp_msgs::msg::GraspConfigList::SharedPtr msg)\n{\n  RCLCPP_INFO(node->get_logger(), \"Grasp Callback Received\");\n}\n\nint main(int argc, char * argv[])\n{\n  // init ROS2\n  rclcpp::init(argc, argv);\n  // create ROS2 node\n  node = rclcpp::Node::make_shared(\"GraspDetectionExample\");\n  // subscribe to the \"Detected Grasps\" topic\n  auto sub = node->create_subscription<grasp_msgs::msg::GraspConfigList>\n    (Consts::kTopicDetectedGrasps, rclcpp::QoS(rclcpp::KeepLast(1)), topic_cb);\n  // create ROS2 executor to process any pending in/out messages\n  rclcpp::spin(node);\n\n  node = nullptr;\n  rclcpp::shutdown();\n  return 0;\n}\n```\n\n### Grasp Planning Example (MoveIt App)\nThis example creates ROS2 client for the \"plan_grasps\" service and get the palnning results from async service response. Grasp Library is expected to work in 'auto_mode=false', service-driven grasp detection, see launch option example [here](../grasp_ros2/cfg/test_grasp_ros2.yaml).\n\n```bash\n#include <rclcpp/rclcpp.hpp>\n#include <moveit_msgs/msg/move_it_error_codes.hpp>\n#include <moveit_msgs/srv/grasp_planning.hpp>\n#include \"grasp_ros2/consts.hpp\"\n\nstatic rclcpp::Node::SharedPtr node = nullptr;\nstatic std::shared_ptr<GraspPlanning::Response> result = nullptr;\n\nint main(int argc, char * argv[])\n{\n  // init ROS2\n  rclcpp::init(argc, argv);\n  // create ROS2 node\n  node = rclcpp::Node::make_shared(\"GraspPlanningExample\");\n  // create ROS2 client for MoveIt \"plan_grasps\" service\n  auto client = node->create_client<GraspPlanning>(\"plan_grasps\");\n  // wait for ROS2 service ready\n  while (!client->wait_for_service(std::chrono::seconds(1))) {\n    if (!rclcpp::ok()) {\n      RCLCPP_ERROR(node->get_logger(), \"Client interrupted\");\n      return 1;\n    }\n    RCLCPP_INFO(node->get_logger(), \"Wait for service\");\n  }\n  // fill in a request\n  auto request = std::make_shared<GraspPlanning::Request>();\n  // send async request\n  auto result_future = client->async_send_request(request);\n  RCLCPP_INFO(node->get_logger(), \"Request sent\");\n  // wait for response\n  if (rclcpp::spin_until_future_complete(node, result_future) !=\n    rclcpp::executor::FutureReturnCode::SUCCESS)\n  {\n    RCLCPP_ERROR(node->get_logger(), \"Request failed\");\n    return 1;\n  }\n  // get grasp planning results from response\n  result = result_future.get();\n  RCLCPP_INFO(node->get_logger(), \"Response received %d\", result->error_code.val);\n\n  node = nullptr;\n  rclcpp::shutdown();\n  return 0;\n}\n```\n"
  },
  {
    "path": "grasp_tutorials/doc/grasp_ros2/tutorials_3_grasp_ros2_launch_options.md",
    "content": "# Grasp Library Launch Options and Customization Notes\nThis tutorial documents the launch options which are used for customization. Each option will be introduced in the following format:\n* **option_name** [**default_value**|other_values]: Description of this option. Customization Notes.\n\n## GraspDetectorGPD Launch Options\n* **cloud_topic** [**\"/camera/depth_registered/points\"**|\"string\"]: Name of point cloud topic as input to the grasp detection, default value compliant with an RGBD OpenNI camera.\n* **device** [**0**|1|2|3]: Configure device for grasp pose inference to execute, 0 for CPU, 1 for GPU, 2 for VPU, 3 for FPGA. In case OpenVINO plug-ins are installed ([tutorial](install_openvino.md)), this configure deploy the CNN based deep learning inference on to the target device. Deploying the inference onto **GPU or VPU** will save CPU loads for other computation tasks.\n* **auto_mode** [false|**true**]: Configure grasp detection mode. When auto_mode is true, Grasp Library works in sensor-driven mode, processing grasp detection when point cloud message arrives. When auto_mode is false, Grasp Library works in service-driven mode, processing grasp detection when a service request arrives. Configure to **service-driven** mode will save most CPU loads against that of the sensor-driven mode.\n* **plane_remove** [**false**|true]: Configure whether or not remove the planes (like the table plane) from point cloud input. Enabling this helps to avoid generating grasp poses across the table.\n* **workspace** [**[-1.0, 1.0, -1.0, 1.0, -1.0, 1,0]**|[1*6 double]]: Configure a boundry cube in camera frame for grasp generation and detection. *This need to be customized according to user's setup.*\n* **finger_width** [**0.005**|double]: The finger thickness in metres. *This need to be customized according to user's robot hand.*\n* **hand_outer_diameter** [**0.12**|double]: The maximum robot hand aperture in metres. *This need to be customized according to user's robot hand.*\n* **hand_depth** [**0.06**|double]: The hand depth (the finger length) in metres. Tuning this parameter will affect the \"GraspConfig::bottom\" field (the hand base) in the grasp detection results. *This need to be customized according to user's robot hand.*\n* **hand_height** [**0.02**|double]: The finger breadth in metres. *This need to be customized according to user's robot hand.*\n\n## GraspPlanner Launch Options\n* **grasp_service_timeout** [**5**|double]: Timeout in seconds for a service request waiting for grasp detection result. Grasp Planner will not take point could inputs from the history buffer. Indeed after receiving a service request, Grasp Planner will start grasp detection on the coming point cloud input. This parameter configures the timeout period for Grasp Planner to wait for the grasp detection result. Usually this's an amount of max latencies in RGBD sensor, Grasp Detector, Grasp Planner, any other nodes in the pipeline, additionally with an estimated worst delay in the system.\n* **grasp_score_threshold** [**200**|integer]: Minimum score expected for grasps returned from this service.\n* **grasp_frame_id** [**\"base\"**|\"string\"]: Frame id expected for grasps returned from this service. When this parameter is specified, Grasp Planner try to transform the grasp from the original frame (usually a camera's color frame) to this target frame, given the TF available.\n* **grasp_approach** [**[0.0, 0.0, -1.0]**|[1*3 double]]: Specify expected approach direction in the target frame specified by 'grasp_frame_id'. Grasp Planner will return grasp poses with approach direction approximate to this parameter. This is useful when a MoveIt application wants to constraint the approaching direction. *This need to be customized according to user's setup.*\n* **grasp_approach_angle** [**M_PI**|3.14|1.57|double]: Maximum angle in radian acceptable between the expected 'grasp_approach' and the real approach returned from this service. Default is [-M_PI, M_PI], which implies any approach directions are acceptable. *This need to be customized according to user's setup.*\n* **grasp_offset** [**[0.0, 0.0, 0.0]**|[1*3 double]]: Offset [x, y, z] in metres applied to the grasps detected. This offset allows adjustment over the final grasp position, to overcome erros that might be accumulated from camera calibration, hand-eye calibration, grasp pose detection, etc. *This need to be customized according to user's setup.*\n* **grasp_boundry** [**[-1.0, 1.0, -1.0, 1.0, -1.0, 1,0]**|[1*6 double]]: Boundry cube in grasp_frame_id expected for grasps returned from this service. This parameter takes effect only after transformation into the target frame specified by \"grasp_frame_id\". When the transformation is unavailalbe, boundry checking will be skipped, and in such case the \"GraspDetectorGPD::workspace\" parameter still takes effect. *This need to be customized according to user's setup.*\n* **eef_offset** [**0.16**|double]: Offset in metres from the gripper base (finger root) to the parent link of gripper. The parent link is usually the end of the robot arm. *This need to be customized according to the gripper geometry.*\n* **eef_yaw_offset** [**0.0**|double]: Gripper yaw offset to its parent link, in radian. *This need to be customized if the gripper has yaw offset to TCP (tool center point) of the robot arm.*\n* **finger_joint_names** [**[\"panda_finger_joint1\", \"panda_finger_joint2\"]**|[1*2 \"string\"]]: Joint names of gripper fingers. Joint names are filled into MoveIt's grasp interface, to control the posture of hand for the position of 'pre_grasp_posture' and 'grasp_posture' (see [moveit_msgs::msg::Grasp](http://docs.ros.org/api/moveit_msgs/html/msg/Grasp.html)). Joint names are usually defined in URDF of the robot hand. *This need to be customized according to user's setup.*\n\nOther ROS parameters not mentioned here, refer to the codes [ros_params.cpp](../grasp_ros2/src/ros_params.cpp) for details.\n\n## Customization Notes\n* **Model training for grasp detection**: It depends on which back-end grasp detection algorithm is used. For [Grasp Pose Detection](https://github.com/atenpas/gpd), the model was trained with 185K labeled grasps and 55 object models from [bigBIRD](http://rll.berkeley.edu/bigbird). In case of any necessity to re-train, please refer to the discussion [#49](https://github.com/atenpas/gpd/issues/49) in the upstream project.\n"
  },
  {
    "path": "grasp_tutorials/doc/handeye_calibration.rst",
    "content": "Hand-eye Calibration\n=====================\n\nHand-eye calibration is used to get the camera pose with respect to the robot.\n\n- `handeye_target_detection <https://github.com/intel/ros2_grasp_library/tree/master/grasp_utils/handeye_target_detection>`_\n\n- `handeye_dashboard <https://github.com/intel/ros2_grasp_library/tree/master/grasp_utils/handeye_dashboard>`_\n\n"
  },
  {
    "path": "grasp_tutorials/doc/overview.rst",
    "content": "Overview\n========\n\n.. _GraspPlanning: http://docs.ros.org/api/moveit_msgs/html/srv/GraspPlanning.html\n.. _GPD: https://github.com/atenpas/gpd\n.. _OpenVINO™: https://software.intel.com/en-us/openvino-toolkit\n.. _Grasp: http://docs.ros.org/api/moveit_msgs/html/msg/Grasp.html\n\nROS2 Grasp Library consists of\n\n.. image:: ../_static/images/ros2_grasp_library.png\n   :width: 523 px\n   :height: 311 px\n   :align: center\n\n- A ROS2 Grasp Planner providing grasp planning service, as an extensible capability of MoveIt (moveit_msgs::srv::`GraspPlanning`_), translating grasp detection results into MoveIt Interfaces (moveit_msgs::msg::`Grasp`_). A ROS2 Grasp Detctor abstracting interfaces for grasp detection results\n\n- A ROS2 hand-eye calibration module generating transformation from camera frame to robot frame\n\n- Robot interfaces controlling the phsical robot to move, pick, place, as well as to feedback robot states\n\n- ROS2 example applications demonstrating how to use this ROS2 Grasp Library in advanced industrial usages for intelligent visual grasp\n"
  },
  {
    "path": "grasp_tutorials/doc/random_pick.rst",
    "content": "Random Pick (OpenVINO Grasp Detection)\n======================================\n\nOverview\n--------\n\nA simple application demonstrating how to pick up objects from clutter scenarios with an industrial robot arm.\nThe application interact with Grasp Planner and Robot Interface from this Grasp Library.\n\nThe Grasp Planner takes grasp detection results from `OpenVINO GPD <https://github.com/sharronliu/gpd>`_,\ntransforms the grasp pose from camera view\nto the robot view with the `Hand-Eye Calibration`_,\ntranslates the Grasp Pose into `moveit_msgs Grasp <http://docs.ros.org/api/moveit_msgs/html/msg/Grasp.html>`_.\n\nThe Robot Interface takes the grasp poses and place poses, to pick and place the object.\n\nWatch this `demo_video <https://www.youtube.com/embed/b4EPvHdidOA?rel=0>`_ to see the output of this application.\n\n.. raw:: html\n\n   <iframe width=\"700px\" height=\"394px\" src=\"https://www.youtube.com/embed/b4EPvHdidOA?rel=0\" frameborder=\"0\" allow=\"autoplay; encrypted-media\" allowfullscreen></iframe>\n\n\nRequirement\n-----------\n\nBefore running the code, make sure you have followed the instructions below\nto setup the environment.\n\n- Hardware\n\n  - Host running ROS2\n\n  - RGBD sensor\n\n  - `Robot Arm <https://www.universal-robots.com/products/ur5-robot>`_\n\n  - `Robot Gripper`_\n\n- Software\n\n  - `ROS2 <https://index.ros.org/doc/ros2/Installation/Dashing/Linux-Install-Debians>`_\n\n  - `Grasp Planner <grasp_planner.html>`_\n\n  - `Robot Interface <robot_interface.html>`_\n\n  - `Hand-Eye Calibration <handeye_calibration.html>`_\n\n  - RGBD Sensor\n\n    - `realsense <https://github.com/intel/ros2_intel_realsense/tree/refactor>`_\n\nDownload and Build the Application\n----------------------------------\n\nWithin your catkin workspace, download and compile the example code\n\n::\n\n  cd <path_of_your_ros2_workspace>/src\n\n  git clone https://github.com/intel/ros2_grasp_library.git\n\n  cd ..\n\n  colcon build --symlink-install\n\n- Build Options\n\n  - BUILD_RANDOM_PICK (**ON** | OFF)\n    Switch on/off building of this application\n\n\nLaunch the Application with Real Robot and Camera\n-------------------------------------------------\n\n- Publish handeye transform, refer to `Hand-Eye Calibration`_\n\n- Launch UR description\n\n::\n\n  ros2 launch ur_description view_ur5_ros2.launch.py\n\n  #load rviz2 configure file \"src/ros2_grasp_library/grasp_apps/random_pick/rviz2/random_pick.rviz\"\n\n- Launch RGBD sensor\n\n::\n\n  ros2 run realsense_node realsense_node\n\n- Launch random pick app\n\n::\n\n  ros2 run random_pick random_pick\n\n- Launch grasp planner\n\n::\n\n  ros2 run grasp_ros2 grasp_ros2 __params:=src/ros2_grasp_library/grasp_ros2/cfg/random_pick.yaml\n\n"
  },
  {
    "path": "grasp_tutorials/doc/recognize_pick.rst",
    "content": "Recognize Pick (OpenVINO Grasp Detection + OpenVINO Object Segmentation)\n========================================================================\n\nOverview\n--------\n\nA simple application demonstrating how to pick up recognized objects with an industrial robot arm.\nThe application interact with Grasp Planner and Robot Interface from this Grasp Library.\n\nComparing against the `random picking <random_pick.html>`_ application, this recognition picking takes the place commands published from the `place_publisher` which specifying the name the object to pick and the position to place.\n\nThe Grasp Detector then takes the object segmentation results from the `OpenVINO Mask-rcnn <https://github.com/intel/ros2_openvino_toolkit>`_ to identify the location of the object in the point cloud image and generates grasp poses for that specific object.\n\nWatch this `demo_video <https://www.youtube.com/embed/trIt0uKRXBs?rel=0>`_ to see the output of this application.\n\n.. raw:: html\n\n  <iframe width=\"700\" height=\"389\" src=\"https://www.youtube.com/embed/trIt0uKRXBs?list=PLxCmGJeiLgoxq3uqcCVSYnSJ9iQk1L9yP\" frameborder=\"0\" allow=\"accelerometer; autoplay; encrypted-media; gyroscope; picture-in-picture\" allowfullscreen></iframe>\n\nRequirement\n-----------\n\nBefore running the code, make sure you have followed the instructions below\nto setup the environment.\n\n- Hardware\n\n  - Host running ROS2\n\n  - RGBD sensor\n\n  - `Robot Arm <https://www.universal-robots.com/products/ur5-robot>`_\n\n  - `Robot Gripper`_\n\n- Software\n\n  - `ROS2 <https://index.ros.org/doc/ros2/Installation/Dashing/Linux-Install-Debians>`_\n\n  - `Grasp Planner <grasp_planner.html>`_\n\n  - `Robot Interface <robot_interface.html>`_\n\n  - `Hand-Eye Calibration <handeye_calibration.html>`_\n\n  - `ROS2 OpenVINO <https://github.com/intel/ros2_openvino_toolkit>`_\n\n  - RGBD Sensor\n\n    - `realsense <https://github.com/intel/ros2_intel_realsense/tree/refactor>`_\n\nDownload and Build the Application\n----------------------------------\n\nWithin your catkin workspace, download and compile the example code\n\n::\n\n  cd <path_of_your_ros2_workspace>/src\n\n  git clone https://github.com/intel/ros2_grasp_library.git\n\n  cd ..\n\n  colcon build --symlink-install\n\n- Build Options\n\n  - BUILD_RECOGNIZE_PICK (**ON** | OFF)\n    Switch on/off building of this application\n\n\nLaunch the Application with Real Robot and Camera\n-------------------------------------------------\n\n- Publish handeye transform, refer to `Hand-Eye Calibration`_\n\n- Publish place object\n\n::\n\n  ros2 run recognize_pick place_publisher sports_ball\n\n- Launch UR description\n\n::\n\n  ros2 launch ur_description view_ur5_ros2.launch.py\n\n  #load rviz2 configure file \"src/ros2_grasp_library/grasp_apps/recognize_pick/rviz2/recognize_pick.rviz\"\n\n- Launch RGBD sensor\n\n::\n\n  ros2 run realsense_node realsense_node\n\n- Launch object segmentation\n\n::\n\n  ros2 launch dynamic_vino_sample pipeline_segmentation.launch.py\n\n  # close the rviz2 window\n\n- Launch recognize pick app\n\n::\n\n  ros2 run recognize_pick recognize_pick\n\n- Launch grasp planner\n\n::\n\n  ros2 run grasp_ros2 grasp_ros2 __params:=src/ros2_grasp_library/grasp_ros2/cfg/recognize_pick.yaml\n\n"
  },
  {
    "path": "grasp_tutorials/doc/robot_interface.rst",
    "content": "Robot Interface\n===============\n\n- `Robot Interface <https://github.com/intel/ros2_grasp_library/tree/master/grasp_utils/robot_interface>`_\n\nRobot Control Apps\n-------------------\n.. toctree::\n   :maxdepth: 2\n\n   ./draw_x\n\n   ./fixed_position_pick\n"
  },
  {
    "path": "grasp_tutorials/doc/template.rst",
    "content": "[App Tutorial Template]\n=======================\n\nOverview\n--------\n*(Describe what this application is in one topic sentence,\nfollowed by a paragraph telling what this application does in details. E.g.)*\n\nA template of application tutorial.\nThe application tutorial contains an overview of the application,\nrequirements on hardware and software,\nguidance to download/build/launch the application,\nexpected output from the application, and customization notes.\n\nRequirements\n------------\n*(Describe the hardware and software requred\nto setup the environment for this application.\nProvide hyperlinkage to the procurement info or installation guides. E.g.)*\n\n- Hardware\n\n  - Host running ROS2\n\n  - `Robot Arm <https://www.universal-robots.com/products/ur5-robot>`_ (optional)\n\n- Software\n\n  - `ROS2 Dashing <https://index.ros.org/doc/ros2/Installation/Dashing/Linux-Install-Debians/>`_ Desktop\n\n  - `robot_interface`_\n\n.. _robot_interface: https://github.com/intel/ros2_grasp_library/tree/master/grasp_utils/robot_interface\n\nDownload and Build the Application\n----------------------------------\n*(Describe how to download and build the application.\nList build options specific to this application. E.g.)*\n\n::\n\n  cd <path_of_your_ros2_workspace>/src\n\n  git clone https://github.com/intel/ros2_grasp_library.git\n\n  cd ..\n\n  colcon build --symlink-install --ament-cmake-args -DBUILD_RANDOM_PICK=ON\n\n- Build Options\n\n  - BUILD_RANDOM_PICK (ON | **OFF** )\n    Switch on/off building of this application\n\nLaunch the Application\n----------------------\n*(Describe how to launch the application.\nProvide hyperlinkage to launch robot contollers.\nList launch options specific to this application. E.g.)*\n\n- Launch this application\n\n::\n\n  ros2 launch template template\n\n- Launch Options\n\n  - grasp_xyz (double | **\"0.545 0.107 0.15\"**)\n    Specify pick position in the \"base\" frame\n\n  - place_xyz (double | **\"-0.107 -0.545 -0.10\"**)\n    Specify place position in the \"base\" frame\n\nExpected Outputs\n----------------\n*(Describe expected outputs from this application.\nIllustrate with screen snapshot when necessary. E.g.)*\n\nYou should see Rviz output like this:\n\n.. image:: ../_static/images/pick_place.png\n\nCustomization Notes\n-------------------\n*(List possible customization items.\nGuide how to customize the application\non new environment and new robots. E.g.)*\n\n- **Change the pick position**\n  Use launch option \"grasp_xyz\" to change the pick position.\n\n- **Change the place position**\n  Use launch option \"place_xyz\" to change to place position.\n"
  },
  {
    "path": "grasp_tutorials/index.rst",
    "content": "Welcome to ROS2 Grasp Library Tutorials\n=======================================\n\nROS2 Grasp Library is a ROS2 intelligent visual grasp solution for advanced industrial usages, with OpenVINO™ grasp detection and MoveIt Grasp Planning. These tutorials aim to help quickly bringup the solution in a new working environment.\n\nThe tutorials introduce how to\n\n- Install, build, and launch the ROS2 Grasp Planner and Detector\n\n- Use launch options to customize in a new workspace\n\n- Bring up the intelligent visual grasp solution on a new robot\n\n- Do hand-eye calibration for a new camera setup\n\n- Launch the example applications\n\nContents:\n---------\n.. toctree::\n   :maxdepth: 2\n\n   doc/overview\n\n   doc/getting_start\n\n   doc/grasp_planner\n\n   doc/robot_interface\n\n   doc/bringup_robot\n\n   doc/handeye_calibration\n\n   doc/random_pick\n\n   doc/recognize_pick\n\n   doc/grasp_api\n\n   doc/template\n"
  },
  {
    "path": "grasp_tutorials/package.xml",
    "content": "<?xml version=\"1.0\"?>\n<?xml-model href=\"http://download.ros.org/schema/package_format2.xsd\" schematypens=\"http://www.w3.org/2001/XMLSchema\"?>\n<package format=\"2\">\n  <name>grasp_tutorials</name>\n  <version>0.5.0</version>\n  <description>Instructions and demo code for developing intelligent manipulation app with this ROS2 Grasp Library</description>\n  <license>Apache License 2.0</license>\n  <maintainer email=\"sharron.liu@intel.com\">Sharron Liu</maintainer>\n  <maintainer email=\"yu.yan@intel.com\">Yu Yan</maintainer>\n  <author email=\"sharron.liu@intel.com\">Sharron Liu</author>\n  <author email=\"yu.yan@intel.com\">Yu Yan</author>\n\n  <buildtool_depend>ament_cmake</buildtool_depend>\n  <build_depend>rclcpp</build_depend>\n  <exec_depend>rclcpp</exec_depend>\n  <test_depend>ament_lint_auto</test_depend>\n  <test_depend>ament_lint_common</test_depend>\n\n  <export>\n    <build_type>ament_cmake</build_type>\n  </export>\n\n</package>\n"
  },
  {
    "path": "grasp_utils/handeye_dashboard/README.md",
    "content": "# handeye_dashboard\n\n## 1.Prerequisite\n\n* System install\n  * Install [ROS2 Dashing](https://index.ros.org/doc/ros2/Installation/Dashing/Linux-Install-Debians/)\n* Install [handeye](https://github.com/RoboticsYY/handeye)\n* Install [criutils](https://github.com/RoboticsYY/criutils)\n* Install [baldor](https://github.com/RoboticsYY/baldor)\n* Install [handeye_tf_service](https://github.com/intel/ros2_grasp_library/tree/master/grasp_utils/handeye_tf_service)\n* Install [handeye_target_detection](https://github.com/intel/ros2_grasp_library/tree/master/grasp_utils/handeye_target_detection)\n\n## 2. Build and install\n\n```shell\nsudo apt install python3-numpy python3-scipy\n```\n\nBuild with the `ros2_grasp_library` package. Installation instructions refer to [here](https://github.com/intel/ros2_grasp_library/blob/master/grasp_tutorials/doc/grasp_ros2/tutorials_1_grasp_ros2_with_camera.md).\n\n## 3.Run\n\n### 3.1 Bring up realsense camera\n\n```shell\nros2 run realsense_node realsense_node __params:=`ros2 pkg prefix realsense_examples`/share/realsense_examples/config/d435.yaml\n```\n\n> Note: other cameras can be used, only need to check that the image topic and camera info topic are published\n\n### 3.2 Bring up calibration board detection\n\n```shell\nros2 launch handeye_target_detection pose_estimation.launch.py\n```\n\nFor detailed information, please refer to the package [handeye_target_detection](https://github.com/intel/ros2_grasp_library/tree/master/grasp_utils/handeye_target_detection)\n\nIf runing successfully, you should see something similar to: \n<img src=\"doc/images/handeye_target_detection.png\" width=\"640\"/>\n\n\nThe detection result is displayed on the left panel of the Rviz2.\n\n### 3.3 Bring up UR5 robot\n\n```shell\n# Terminal 1 (robot frames tf update)\nros2 launch robot_interface ur_test.launch.py move:=false\n\n# Terminal 2 (robot state display in Rviz2)\nros2 launch ur_description view_ur5_ros2.launch.py\n```\n\nThe realtime robot state is displayed:\n\n<img src=\"doc/images/robot_state.png\" width=\"640\"/>\n\n> Note: any robot can be used, only ensure that the robot ROS2 driver is publishing the joint states and link TFs at rate of at least 125Hz\n\n### 3.4 Bring up calibration dashboard\n\n```shell\nros2 launch handeye_dashboard handeye_dashboard.launch.py\n```\n\nIf running successfully, a rqt dashboard similar to the below photo should show up:\n<img src=\"doc/images/handeye_dashboard.png\" width=\"640\"/>\n\nOn the panel of the dashboard, user can input the names of `Camera-Frame`, `Object-Frame`, `Robot-Base-Frame` and `End-Effector-Frame`. The calibration will lookup the TF transforms:\n\n* From `Camera-Frame` to `Object-Frame` \n* From `Robot-Base-Frame` to `End-Effector-Frame`\n\nThe calibration process is controlled by the four buttons on the left panel of the dashboard:\n\n* Step 0: Select camera mount type, `attached on robot` or `fixed beside robot`.\n* Step 1: Use the first button to take snapshots of the two transforms.\n* Step 2: After enough samples are taken, use the second button to save the snapshots and make the AX=XB calculation.\n* Step 3: Use the fourth button to publish the static TF transform between `Camera-Frame` and `Robot-Base-Frame`. Please check TF or point cloud on Rviz2 to make sure the camera pose published.\n\n> Note: Be careful with the third button, it is used to clear the snapshots and the calculation result.\n\n### 3.5 Publish calibration result\n\nPlease check the result at `/tmp/camera-robot.txt`:\n\n```yaml\ncamera-robot pose:\n  Translation: [-0.032727495589941216, -0.09304065368400717, 0.0003508296697299189]\n  Rotation: in Quaternion [0.9997471812284859, 0.01090594636560865, -0.009141740972837598, -0.0174086912647742]\n```\n\nThe result can be published without launching the whole GUI on command line:\n\n```shell\n#ros2 run tf2_ros static_transform_publisher <Translation x, y, z> <Rotation x, y, z, w> <from_frame> <to_frame>\n# NOTE the quaternion stored in \"camera_robot.txt\" is <w, x, y, z>\n\nros2 run tf2_ros static_transform_publisher -0.032727495589941216, -0.09304065368400717, 0.0003508296697299189 0.01090594636560865, -0.009141740972837598, -0.0174086912647742, 0.9997471812284859 base camera_link\n```\n\n## 4.Result\n\nA result with the camera mounted on the robot end-effector looks like this:\n\n<img src=\"doc/images/handeye_calibration_result.png\" width=\"640\"/>\n\nA video of the calibration process can be found at:\n[handeye_calibration_demo](https://videoportal.intel.com/media/Industrial-robot-hand-eye-calibration/0_8ddlp0p1)\n\n###### *Any security issue should be reported using process at https://01.org/security*\n"
  },
  {
    "path": "grasp_utils/handeye_dashboard/config/Default.perspective",
    "content": "{\n  \"keys\": {},\n  \"groups\": {\n    \"mainwindow\": {\n      \"keys\": {\n        \"geometry\": {\n          \"repr(QByteArray.hex)\": \"QtCore.QByteArray(b'01d9d0cb000200000000044a0000004e0000070b000003620000044a0000006c0000070b0000036200000000000000000780')\",\n          \"type\": \"repr(QByteArray.hex)\",\n          \"pretty-print\": \"     J N   b J l   b     \"\n        },\n        \"state\": {\n          \"repr(QByteArray.hex)\": \"QtCore.QByteArray(b'000000ff00000000fd0000000100000003000002c2000002cdfc010000000afb0000006a006300720069006e00730070006500630074005f00640061007300680062006f006100720064005f005f00450064006700650044006500740065006300740069006f006e005f005f0031005f005f004500640067006500200044006500740065006300740069006f006e0100000000000002580000000000000000fb0000003c007200710074005f00700079005f0063006f006e0073006f006c0065005f005f005000790043006f006e0073006f006c0065005f005f0031005f005f0100000000000002580000000000000000fb0000006c007200710074005f007200650063006f006e006600690067007500720065005f005f0050006100720061006d005f005f0031005f005f005f0070006c007500670069006e0063006f006e007400610069006e00650072005f0074006f0070005f00770069006400670065007401000000000000073f0000000000000000fb00000044007200710074005f00670072006100700068005f005f0052006f007300470072006100700068005f005f0031005f005f0052006f007300470072006100700068005500690100000000000007800000000000000000fb0000005a007200710074005f0069006d006100670065005f0076006900650077005f005f0049006d0061006700650056006900650077005f005f0031005f005f0049006d00610067006500560069006500770057006900640067006500740100000000000007800000000000000000fb0000008c00680061006e0064006500790065005f00640061007300680062006f006100720064005f005f00480061006e006400450079006500430061006c006900620072006100740069006f006e005f005f0031005f005f0020004300520049002000470072006f00750070003a00200042006f007200640065007200200044006500740065006300740069006f006e01000000000000073f0000000000000000fb000000a600680061006e0064006500790065005f00640061007300680062006f006100720064005f005f00480061006e006400450079006500430061006c006900620072006100740069006f006e005f005f0031005f005f00200049006e00740065006c0020004f0054004300200052006f0062006f0074006900630073003a002000480061006e0064002d004500790065002000430061006c006900620072006100740069006f006e0100000000000002c20000017200fffffffb00000026007200710074005f007200760069007a005f005f005200560069007a005f005f0031005f005f0100000000000007800000000000000000fb0000004c007200710074005f00740066005f0074007200650065005f005f0052006f0073005400660054007200650065005f005f0031005f005f0052006f0073005400660054007200650065005500690100000000000007800000000000000000fb00000058007200710074005f007000750062006c00690073006800650072005f005f005000750062006c00690073006800650072005f005f0031005f005f005000750062006c006900730068006500720057006900640067006500740100000000000007800000000000000000000002c20000000000000004000000040000000800000008fc00000001000000030000000100000036004d0069006e0069006d0069007a006500640044006f0063006b00570069006400670065007400730054006f006f006c0062006100720000000000ffffffff0000000000000000')\",\n          \"type\": \"repr(QByteArray.hex)\",\n          \"pretty-print\": \"                 jcrinspect_dashboard__EdgeDetection__1__Edge Detection          <rqt_py_console__PyConsole__1__          lrqt_reconfigure__Param__1___plugincontainer_top_widget          Drqt_graph__RosGraph__1__RosGraphUi          Zrqt_image_view__ImageView__1__ImageViewWidget           handeye_dashboard__HandEyeCalibration__1__ CRI Group: Border Detection           handeye_dashboard__HandEyeCalibration__1__ Intel OTC Robotics: Hand-Eye Calibration          &rqt_rviz__RViz__1__          Lrqt_tf_tree__RosTfTree__1__RosTfTreeUi          Xrqt_publisher__Publisher__1__PublisherWidget                            6MinimizedDockWidgetsToolbar        \"\n        }\n      },\n      \"groups\": {\n        \"toolbar_areas\": {\n          \"keys\": {\n            \"MinimizedDockWidgetsToolbar\": {\n              \"repr\": \"8\",\n              \"type\": \"repr\"\n            }\n          },\n          \"groups\": {}\n        }\n      }\n    },\n    \"pluginmanager\": {\n      \"keys\": {\n        \"running-plugins\": {\n          \"repr\": \"{'handeye_dashboard/HandEyeCalibration': [1]}\",\n          \"type\": \"repr\"\n        }\n      },\n      \"groups\": {\n        \"plugin__crinspect_dashboard__EdgeDetection__1\": {\n          \"keys\": {},\n          \"groups\": {\n            \"dock_widget__Edge Detection\": {\n              \"keys\": {\n                \"dock_widget_title\": {\n                  \"repr\": \"'Edge Detection'\",\n                  \"type\": \"repr\"\n                },\n                \"dockable\": {\n                  \"repr\": \"'true'\",\n                  \"type\": \"repr\"\n                },\n                \"parent\": {\n                  \"repr\": \"None\",\n                  \"type\": \"repr\"\n                }\n              },\n              \"groups\": {}\n            }\n          }\n        },\n        \"plugin__handeye_dashboard__HandEyeCalibration__1\": {\n          \"keys\": {},\n          \"groups\": {\n            \"dock_widget__ CRI Group: Border Detection\": {\n              \"keys\": {\n                \"dock_widget_title\": {\n                  \"repr\": \"' CRI Group: Border Detection'\",\n                  \"type\": \"repr\"\n                },\n                \"dockable\": {\n                  \"repr\": \"'true'\",\n                  \"type\": \"repr\"\n                },\n                \"parent\": {\n                  \"repr\": \"None\",\n                  \"type\": \"repr\"\n                }\n              },\n              \"groups\": {}\n            },\n            \"dock_widget__ Intel OTC Robotics: Hand-Eye Calibration\": {\n              \"keys\": {\n                \"dock_widget_title\": {\n                  \"repr\": \"' Intel OTC Robotics: Hand-Eye Calibration'\",\n                  \"type\": \"repr\"\n                },\n                \"dockable\": {\n                  \"repr\": \"True\",\n                  \"type\": \"repr\"\n                },\n                \"parent\": {\n                  \"repr\": \"None\",\n                  \"type\": \"repr\"\n                }\n              },\n              \"groups\": {}\n            }\n          }\n        },\n        \"plugin__rqt_graph__RosGraph__1\": {\n          \"keys\": {},\n          \"groups\": {\n            \"dock_widget__RosGraphUi\": {\n              \"keys\": {\n                \"dock_widget_title\": {\n                  \"repr\": \"'Node Graph'\",\n                  \"type\": \"repr\"\n                },\n                \"dockable\": {\n                  \"repr\": \"'true'\",\n                  \"type\": \"repr\"\n                },\n                \"parent\": {\n                  \"repr\": \"None\",\n                  \"type\": \"repr\"\n                }\n              },\n              \"groups\": {}\n            },\n            \"plugin\": {\n              \"keys\": {\n                \"actionlib_check_box_state\": {\n                  \"repr\": \"'true'\",\n                  \"type\": \"repr\"\n                },\n                \"auto_fit_graph_check_box_state\": {\n                  \"repr\": \"'true'\",\n                  \"type\": \"repr\"\n                },\n                \"dead_sinks_check_box_state\": {\n                  \"repr\": \"'true'\",\n                  \"type\": \"repr\"\n                },\n                \"filter_line_edit_text\": {\n                  \"repr\": \"'/'\",\n                  \"type\": \"repr\"\n                },\n                \"graph_type_combo_box_index\": {\n                  \"repr\": \"'2'\",\n                  \"type\": \"repr\"\n                },\n                \"group_image_check_box_state\": {\n                  \"repr\": \"'true'\",\n                  \"type\": \"repr\"\n                },\n                \"group_tf_check_box_state\": {\n                  \"repr\": \"'true'\",\n                  \"type\": \"repr\"\n                },\n                \"hide_dynamic_reconfigure_check_box_state\": {\n                  \"repr\": \"'true'\",\n                  \"type\": \"repr\"\n                },\n                \"hide_tf_nodes_check_box_state\": {\n                  \"repr\": \"'false'\",\n                  \"type\": \"repr\"\n                },\n                \"highlight_connections_check_box_state\": {\n                  \"repr\": \"'true'\",\n                  \"type\": \"repr\"\n                },\n                \"leaf_topics_check_box_state\": {\n                  \"repr\": \"'true'\",\n                  \"type\": \"repr\"\n                },\n                \"namespace_cluster_check_box_state\": {\n                  \"repr\": \"'true'\",\n                  \"type\": \"repr\"\n                },\n                \"namespace_cluster_spin_box_value\": {\n                  \"repr\": \"'0'\",\n                  \"type\": \"repr\"\n                },\n                \"quiet_check_box_state\": {\n                  \"repr\": \"'true'\",\n                  \"type\": \"repr\"\n                },\n                \"topic_filter_line_edit_text\": {\n                  \"repr\": \"'/'\",\n                  \"type\": \"repr\"\n                },\n                \"unreachable_check_box_state\": {\n                  \"repr\": \"'true'\",\n                  \"type\": \"repr\"\n                }\n              },\n              \"groups\": {}\n            }\n          }\n        },\n        \"plugin__rqt_image_view__ImageView__1\": {\n          \"keys\": {},\n          \"groups\": {\n            \"dock_widget__ImageViewWidget\": {\n              \"keys\": {\n                \"dock_widget_title\": {\n                  \"repr\": \"'Image View'\",\n                  \"type\": \"repr\"\n                },\n                \"dockable\": {\n                  \"repr\": \"'true'\",\n                  \"type\": \"repr\"\n                },\n                \"parent\": {\n                  \"repr\": \"None\",\n                  \"type\": \"repr\"\n                }\n              },\n              \"groups\": {}\n            },\n            \"plugin\": {\n              \"keys\": {\n                \"dynamic_range\": {\n                  \"repr\": \"'false'\",\n                  \"type\": \"repr\"\n                },\n                \"max_range\": {\n                  \"repr\": \"'10'\",\n                  \"type\": \"repr\"\n                },\n                \"mouse_pub_topic\": {\n                  \"repr\": \"'/camera/color/image_raw_mouse_left'\",\n                  \"type\": \"repr\"\n                },\n                \"num_gridlines\": {\n                  \"repr\": \"'0'\",\n                  \"type\": \"repr\"\n                },\n                \"publish_click_location\": {\n                  \"repr\": \"'false'\",\n                  \"type\": \"repr\"\n                },\n                \"rotate\": {\n                  \"repr\": \"'0'\",\n                  \"type\": \"repr\"\n                },\n                \"smooth_image\": {\n                  \"repr\": \"'false'\",\n                  \"type\": \"repr\"\n                },\n                \"toolbar_hidden\": {\n                  \"repr\": \"'false'\",\n                  \"type\": \"repr\"\n                },\n                \"topic\": {\n                  \"repr\": \"'/camera/color/image_raw'\",\n                  \"type\": \"repr\"\n                },\n                \"zoom1\": {\n                  \"repr\": \"'false'\",\n                  \"type\": \"repr\"\n                }\n              },\n              \"groups\": {}\n            }\n          }\n        },\n        \"plugin__rqt_publisher__Publisher__1\": {\n          \"keys\": {},\n          \"groups\": {\n            \"plugin\": {\n              \"keys\": {\n                \"publishers\": {\n                  \"repr\": \"'[]'\",\n                  \"type\": \"repr\"\n                }\n              },\n              \"groups\": {}\n            }\n          }\n        },\n        \"plugin__rqt_py_console__PyConsole__1\": {\n          \"keys\": {},\n          \"groups\": {\n            \"plugin\": {\n              \"keys\": {\n                \"use_spyderlib\": {\n                  \"repr\": \"'false'\",\n                  \"type\": \"repr\"\n                }\n              },\n              \"groups\": {}\n            }\n          }\n        },\n        \"plugin__rqt_reconfigure__Param__1\": {\n          \"keys\": {},\n          \"groups\": {\n            \"plugin\": {\n              \"keys\": {\n                \"_splitter\": {\n                  \"repr(QByteArray.hex)\": \"QtCore.QByteArray(b'000000ff00000001000000020000012c000000640100000009010000000200')\",\n                  \"type\": \"repr(QByteArray.hex)\",\n                  \"pretty-print\": \"       , d     \"\n                },\n                \"splitter\": {\n                  \"repr(QByteArray.hex)\": \"QtCore.QByteArray(b'000000ff0000000100000002000000ae0000006401ffffffff010000000100')\",\n                  \"type\": \"repr(QByteArray.hex)\",\n                  \"pretty-print\": \"         d     \"\n                }\n              },\n              \"groups\": {}\n            }\n          }\n        },\n        \"plugin__rqt_rviz__RViz__1\": {\n          \"keys\": {},\n          \"groups\": {\n            \"dock_widget__\": {\n              \"keys\": {\n                \"dock_widget_title\": {\n                  \"repr\": \"'RViz[*]'\",\n                  \"type\": \"repr\"\n                },\n                \"dockable\": {\n                  \"repr\": \"'true'\",\n                  \"type\": \"repr\"\n                },\n                \"parent\": {\n                  \"repr\": \"None\",\n                  \"type\": \"repr\"\n                }\n              },\n              \"groups\": {}\n            },\n            \"plugin\": {\n              \"keys\": {\n                \"hide_menu\": {\n                  \"repr\": \"'false'\",\n                  \"type\": \"repr\"\n                },\n                \"rviz_config_file\": {\n                  \"repr\": \"''\",\n                  \"type\": \"repr\"\n                }\n              },\n              \"groups\": {}\n            }\n          }\n        },\n        \"plugin__rqt_tf_tree__RosTfTree__1\": {\n          \"keys\": {},\n          \"groups\": {\n            \"dock_widget__RosTfTreeUi\": {\n              \"keys\": {\n                \"dock_widget_title\": {\n                  \"repr\": \"'TF Tree'\",\n                  \"type\": \"repr\"\n                },\n                \"dockable\": {\n                  \"repr\": \"'true'\",\n                  \"type\": \"repr\"\n                },\n                \"parent\": {\n                  \"repr\": \"None\",\n                  \"type\": \"repr\"\n                }\n              },\n              \"groups\": {}\n            },\n            \"plugin\": {\n              \"keys\": {\n                \"auto_fit_graph_check_box_state\": {\n                  \"repr\": \"'true'\",\n                  \"type\": \"repr\"\n                },\n                \"highlight_connections_check_box_state\": {\n                  \"repr\": \"'true'\",\n                  \"type\": \"repr\"\n                }\n              },\n              \"groups\": {}\n            }\n          }\n        }\n      }\n    }\n  }\n}"
  },
  {
    "path": "grasp_utils/handeye_dashboard/data/camera-robot.json",
    "content": "[[-0.036301454848076786, 0.9992976851533406, -0.009291976274627967, 0.04561821843056508], [-0.6780505333941169, -0.0314598013057813, -0.7343417154606937, 0.9203069217128195], [-0.7341183000987421, -0.020357243157558547, 0.6787163649943599, 0.17330773243084327], [0.0, 0.0, 0.0, 1.0]]"
  },
  {
    "path": "grasp_utils/handeye_dashboard/data/dataset.json",
    "content": "[[[[-0.010438622230043895, -0.9979323269914826, -0.06342007498657491, 0.0033080439705093223], [-0.9998615024531576, 0.00959459608795786, 0.013598516022826457, 0.5008113139582124], [-0.012961908734925993, 0.06355324123342021, -0.9978942701762915, -0.060372301548597254], [0.0, 0.0, 0.0, 1.0]], [[-0.6875586751047162, -0.03632578244501072, -0.725219625918938, 0.41460271073562827], [-0.0031314092930839615, -0.998590264027733, 0.052987534995138044, 0.03330071578016419], [-0.7261220713930171, 0.03870299883445136, 0.6864756480146507, 0.09952444636656357], [0.0, 0.0, 0.0, 1.0]]], [[[-0.008961748248317598, -0.9976655439724454, -0.06769896187164245, 0.0028182733709074537], [-0.9998684260250371, 0.008024937238872248, 0.014097199031256578, 0.6464087551366062], [-0.013521009819849696, 0.0678163899988527, -0.9976061946183847, -0.059843427479115904], [0.0, 0.0, 0.0, 1.0]], [[-0.6924936824719508, -0.030749517597935542, -0.7207683170783316, 0.31350517562335173], [-0.007532279185535794, -0.998728577263327, 0.04984469608534958, 0.03153966556375045], [-0.7213846162115652, 0.03994616533616177, 0.6913816163076862, -0.007850266387121965], [0.0, 0.0, 0.0, 1.0]]], [[[0.8153072932423987, -0.5766200979996318, -0.05275680210786331, 0.002662818758949026], [-0.5786681123413987, -0.8146249108547272, -0.03910844377074442, 0.6463802399431477], [-0.020426290534398903, 0.06241407852257416, -0.9978412947243572, -0.059865494315304565], [0.0, 0.0, 0.0, 1.0]], [[-0.4013627744063597, 0.5230139564457451, -0.7519071250385897, 0.299941779110818], [-0.8239569152509737, -0.5646975644299052, 0.047028316332101694, -0.04789781589522607], [-0.40000365639694946, 0.638414490820737, 0.6575895473463446, -0.023315978687881544], [0.0, 0.0, 0.0, 1.0]]], [[[-0.8970241516797143, -0.4412984087619038, -0.024563911079048845, 0.002953784838201412], [-0.4419614617851944, 0.8950674765067383, 0.05936563649581739, 0.6463138544761147], [-0.004211603018081662, 0.06410871176424487, -0.9979340336294508, -0.05999949008282422], [0.0, 0.0, 0.0, 1.0]], [[-0.33076955748461745, -0.648125834293247, -0.685947813439997, 0.3719432878657163], [0.8884903484340645, -0.45886664830224433, 0.0051283345619362875, 0.05767167682226739], [-0.3180823801799544, -0.6077617148172029, 0.7276326665437087, 0.057105059984915746], [0.0, 0.0, 0.0, 1.0]]], [[[-0.018504063963196066, -0.7074024329759976, -0.7065687492639943, 0.0032372935495781907], [-0.9997353522992329, 0.003429870389126538, 0.02274777686374513, 0.6470038183905598], [-0.013668393467225648, 0.7068026837871748, -0.7072786870896592, -0.059357288931555244], [0.0, 0.0, 0.0, 1.0]], [[-0.6853658446821367, -0.5029286718491626, -0.5266273919719515, 0.3421076429504047], [-0.005078548612430511, -0.7198729124525078, 0.6940873131394464, 0.015975601735145794], [-0.7281812049806924, 0.4783782404637381, 0.4908221589982311, -0.0343332739710048], [0.0, 0.0, 0.0, 1.0]]], [[[-0.0027504657357910467, -0.7332322601613177, 0.6799727109207854, 0.002845472758815526], [-0.9999765337069577, 0.006283090410640596, 0.0027303498511312974, 0.6457053198611876], [-0.006274310611859846, -0.6799492447481779, -0.733232335343122, -0.060626899365343226], [0.0, 0.0, 0.0, 1.0]], [[-0.6915333591046056, 0.506773295308891, -0.5147450246551869, 0.28224904063777717], [-0.013673564674840738, -0.7216592689850192, -0.6921133816919678, 0.013756602316886647], [-0.7222150973737563, -0.471581092337019, 0.5059808558390722, 0.02266467777148267], [0.0, 0.0, 0.0, 1.0]]], [[[-0.012332063454638623, -0.9994619378966334, -0.030393336553540984, 0.004097993218297791], [-0.8407591429834274, -0.006089176491371709, 0.5413750875496854, 0.64615193085309], [-0.5412688645218044, 0.03222974752558527, -0.8402316702396508, -0.06021230754793962], [0.0, 0.0, 0.0, 1.0]], [[-0.9661547910294987, -0.012223939964042576, -0.25767323311221146, 0.29676729316642564], [0.0036632151669977134, -0.9994260516729855, 0.033677115256302015, 0.03040455216058514], [-0.25793700902620564, 0.03159339375726285, 0.9656450470258281, 0.02490697506899888], [0.0, 0.0, 0.0, 1.0]]], [[[-0.003357174128429726, -0.9988910351698911, -0.04696199781839158, 0.001835885317386543], [-0.9262102644554243, 0.02081055203932836, -0.3764325529775965, 0.6464912842041449], [0.3769924076149067, 0.04223293479077231, -0.9252529944937621, -0.05980008656208931], [0.0, 0.0, 0.0, 1.0]], [[-0.36670078822199215, -0.010448989784316194, -0.9302802537568218, 0.3345830485083729], [-0.0032679176553805146, -0.9999162903431543, 0.012519305914542166, 0.03181000787689677], [-0.9303331944156175, 0.007630918612558647, 0.3666359453866483, -0.020043688788973814], [0.0, 0.0, 0.0, 1.0]]], [[[0.7446574437823862, -0.6666613964297933, -0.03237088089256192, 0.0031643597102820886], [-0.6673335562736452, -0.7445484406193338, -0.017707180534699947, 0.5072233820073454], [-0.012296995187933639, 0.03478795885930682, -0.9993190590736032, -0.060367525210451176], [0.0, 0.0, 0.0, 1.0]], [[-0.443910069891965, 0.49443533092939984, -0.7473135576029954, 0.3933280171180422], [-0.7558807924534723, -0.6545146846422305, 0.01596105220817956, -0.03640303307207322], [-0.4812359893528553, 0.571965235933458, 0.6642798291647597, 0.07890536429810306], [0.0, 0.0, 0.0, 1.0]]], [[[-0.7429141111748753, -0.6690501291956219, -0.021226116945095193, 0.0032741050943635164], [-0.6693717116236195, 0.7423112635608914, 0.0302572250660696, 0.5071620568017613], [-0.004487214649550014, 0.03668668169723873, -0.9993167427251155, -0.06044055523705681], [0.0, 0.0, 0.0, 1.0]], [[-0.48046806705501044, -0.5087015322537862, -0.7144040786719175, 0.45071476151966217], [0.7282227011695961, -0.6853383186275743, -0.0017568500179715452, 0.06319180175542652], [-0.48871477780161143, -0.5210893782292808, 0.699731181136687, 0.1433650271735377], [0.0, 0.0, 0.0, 1.0]]], [[[-0.039261876638999, -0.998736322425603, -0.031372971019117554, 0.003856882507325049], [-0.9588480663373058, 0.028821203940427136, 0.2824530472213641, 0.5070693530645017], [-0.28119191084381123, 0.041171549293251716, -0.9587679660923178, -0.060559579456594806], [0.0, 0.0, 0.0, 1.0]], [[-0.8544709851442949, -0.03024868497042943, -0.5186177326356064, 0.3978059700725798], [0.018841084449487407, -0.999451044021017, 0.027251131023605622, 0.03521484300132904], [-0.5191573452078907, 0.013513980274447418, 0.8545718362161469, 0.11068576191621118], [0.0, 0.0, 0.0, 1.0]]], [[[-0.03371402648449002, -0.9984542314943421, -0.04418723830752613, 0.0021974138904805596], [-0.9205773739257974, 0.04823658198065089, -0.3875700333806871, 0.5074380087534432], [0.3891023811724699, 0.02761122543218472, -0.9207806238165785, -0.06020099510968434], [0.0, 0.0, 0.0, 1.0]], [[-0.3412676886674044, -0.02648267536232143, -0.9395930143297476, 0.43320086856952345], [0.025158596988047847, -0.9995022629021179, 0.019033429831184757, 0.035470220091972655], [-0.9396294001728557, -0.017143347374405545, 0.3417640940350521, 0.08355517621534359], [0.0, 0.0, 0.0, 1.0]]], [[[-0.010924608787282353, -0.718077482864764, -0.6958774184620802, 0.0033929814685185467], [-0.999244377918227, 0.03379938645038755, -0.0191904839541035, 0.5078913175369048], [0.03730048420141707, 0.6951419495888459, -0.7179041327365081, -0.060032186022762044], [0.0, 0.0, 0.0, 1.0]], [[-0.6473314707912954, -0.5215211110164681, -0.5558576235757925, 0.4399146659327372], [-0.006595991919554844, -0.7254120569538427, 0.6882832560193459, 0.017375583692277607], [-0.7621800704648317, 0.44921384483355364, 0.4661421047235277, 0.06714870404233222], [0.0, 0.0, 0.0, 1.0]]], [[[-0.054476894557100475, -0.6909102023886033, 0.7208851227447782, 0.003091379838226882], [-0.998510371996232, 0.035488904088340156, -0.04144363283491165, 0.5064882402389643], [0.0030504057698869103, -0.7220689924944486, -0.691814328489004, -0.060936184205575464], [0.0, 0.0, 0.0, 1.0]], [[-0.6824250729774157, 0.5204622025114654, -0.513239822625527, 0.3764990713841739], [0.03508316360536168, -0.6780181945843057, -0.7342073953891212, 0.016561499342498287], [-0.7301131360297577, -0.5190476120449378, 0.44443715531809447, 0.12755141440341428], [0.0, 0.0, 0.0, 1.0]]], [[[-0.03611549644578361, -0.9965677551323592, -0.07448745093587986, 0.14067916893485943], [-0.9989185115044544, 0.03818361471393105, -0.02652958614647847, 0.5664887732677953], [0.029282730238145076, 0.07344876444044893, -0.9968689987716401, -0.060098125779635955], [0.0, 0.0, 0.0, 1.0]], [[-0.6656576121433021, -0.03959213593519416, -0.745206284305068, 0.3630872418655754], [0.010153142587813335, -0.9989796914865925, 0.04400556434070075, 0.1725790157469556], [-0.7461882182742288, 0.021726453218165402, 0.6653804206161243, 0.048761560426611526], [0.0, 0.0, 0.0, 1.0]]], [[[-0.03826431227549668, -0.9970577590588037, -0.06642038472276518, 0.14165793848614358], [-0.926873788642762, 0.010574769968521545, 0.3752241385720347, 0.5661310710413413], [-0.3734177584597389, 0.07592100724272371, -0.9245513389348127, -0.060246270576778824], [0.0, 0.0, 0.0, 1.0]], [[-0.9017314552814689, -0.03388080714930842, -0.4309669052988589, 0.3473364929041862], [0.014987583292944813, -0.9987748892619973, 0.047160289722627244, 0.17263893952759132], [-0.4320367517966513, 0.03606676429341106, 0.9011345258119912, 0.07021285991521141], [0.0, 0.0, 0.0, 1.0]]], [[[-0.03131341038225733, -0.9963162830027295, -0.07983316700380039, 0.13985293854207959], [-0.9242367671070555, 0.05927171985290302, -0.37719127979471745, 0.5667848032987314], [0.3805336629757348, 0.06197360284268921, -0.9226881401063771, -0.05996498124871509], [0.0, 0.0, 0.0, 1.0]], [[-0.3715635774586947, -0.0318872674533443, -0.927859746987903, 0.3844279869271573], [0.013509608488782864, -0.9994898825955614, 0.028938988710572794, 0.17305306153493416], [-0.9283092148549298, -0.0017823477409704865, 0.37180482091004946, 0.038024200047753226], [0.0, 0.0, 0.0, 1.0]]], [[[-0.041764950839894066, -0.687909585704095, -0.7245937418842103, 0.14079309296149242], [-0.9991258552509127, 0.027454208451602735, 0.03152446363426603, 0.5671687846777369], [-0.001792833085775769, 0.7252769597434598, -0.6884548768182344, -0.05959482765555195], [0.0, 0.0, 0.0, 1.0]], [[-0.6768683837634881, -0.5240899175840876, -0.5168935570773808, 0.39551063814763227], [0.011732563529726125, -0.709793663728169, 0.7043119350716441, 0.157349985049956], [-0.7360105556406268, 0.4706619946607621, 0.4865858082265948, 0.023481168482895665], [0.0, 0.0, 0.0, 1.0]]], [[[-0.011889960232215974, -0.5698374123162134, 0.8216714382102123, 0.1410571916023701], [-0.9994898444615775, 0.03113271031196954, 0.007127774323236524, 0.565406017091648], [-0.029642531333345268, -0.8211675090219981, -0.5699168732918506, -0.06105273048504627], [0.0, 0.0, 0.0, 1.0]], [[-0.7109896030077978, 0.5840936948193297, -0.39157162829694125, 0.323487128076281], [-0.013402652763522038, -0.5679951129342324, -0.8229227913855157, 0.1443421284719379], [-0.7030747850077648, -0.5798414501871458, 0.4116670248285053, 0.08787329051094274], [0.0, 0.0, 0.0, 1.0]]], [[[-0.5926807620727431, -0.8053906017970772, 0.008688659609805593, 0.14080552753283712], [-0.8052485470337444, 0.5927408998517485, 0.01526444047396619, 0.5663837665360686], [-0.017443960815044998, 0.002050409686257567, -0.9998457401275472, -0.06013563731085096], [0.0, 0.0, 0.0, 1.0]], [[-0.5936125740359616, -0.36825853225473804, -0.7155485765257211, 0.3866486436732393], [0.5700724418039728, -0.8200159711259019, -0.05090400960729838, 0.19821423602229826], [-0.5680154250036312, -0.43813178442111445, 0.6967058320682512, 0.08768169919493388], [0.0, 0.0, 0.0, 1.0]]], [[[0.5904135395946819, -0.8069907876823487, -0.013331198712099084, 0.14086913839838577], [-0.8066986805581643, -0.5905602007659532, 0.021814858628796653, 0.566377711509475], [-0.025477265335899738, -0.0021255274874763946, -0.9996731421239169, -0.06008400026518902], [0.0, 0.0, 0.0, 1.0]], [[-0.5520826841546155, 0.42482297215760806, -0.7174469682030938, 0.3470797820053276], [-0.6037185104175938, -0.7971626830818019, -0.007457672625161144, 0.12000638080085005], [-0.5750901407916819, 0.42901876302674535, 0.6965732057258626, 0.036671327037826315], [0.0, 0.0, 0.0, 1.0]]], [[[-0.06910400916789647, -0.9975986358643576, 0.004647325951215392, 0.16695215433978486], [-0.997402259782493, 0.0691838470547941, 0.020058102788545713, 0.4013932023664068], [-0.020331455867702925, -0.0032491580866977864, -0.9997880149681873, -0.060704050657466604], [0.0, 0.0, 0.0, 1.0]], [[-0.6945838941329506, -0.01034432410198819, -0.7193373401749553, 0.47063974636816786], [0.04273434369502267, -0.9987242210596499, -0.02690178688770893, 0.20332516249045282], [-0.7181413439428633, -0.04942595702329959, 0.6941398165299763, 0.17640235105536622], [0.0, 0.0, 0.0, 1.0]]], [[[-0.06544057489966218, -0.9978509903581679, -0.0033063874584731145, 0.16598002818390512], [-0.9129200433136738, 0.06120775106230966, -0.4035227449922181, 0.40189419421410755], [0.40285794726300495, -0.023388293035579405, -0.9149636397562039, -0.06054856358516285], [0.0, 0.0, 0.0, 1.0]], [[-0.3243981634281017, 0.029364515769605068, -0.9454647305839023, 0.49161918788463776], [0.04761756281065477, -0.9977438510433253, -0.04732626561654288, 0.20318805663192746], [-0.9447213341914364, -0.06037327984175479, 0.3222680061760729, 0.16181438799045894], [0.0, 0.0, 0.0, 1.0]]], [[[0.5150672221269694, -0.8569930970413782, 0.016388664186999594, 0.16686080092817498], [-0.8569041915685984, -0.5152831832026478, -0.014087142389903938, 0.40141046276737347], [0.02051738683590304, -0.006787689737575228, -0.9997664547810415, -0.060691567962575674], [0.0, 0.0, 0.0, 1.0]], [[-0.5470564073038441, 0.3786249981897753, -0.7465737726263917, 0.4567603943108598], [-0.5296230830037351, -0.8472133745540127, -0.04157989810498228, 0.15579847591284002], [-0.6482504741050554, 0.37265615347474756, 0.6640020437468367, 0.15644401375118097], [0.0, 0.0, 0.0, 1.0]]], [[[-0.36872324641829385, -0.9295320056555764, -0.003663060561329992, 0.16687672301312448], [-0.92927367462801, 0.3687092368459874, -0.02244852573789588, 0.401449242974963], [0.0222172274172458, -0.0048733075391695305, -0.9997412893741656, -0.0606535596920081], [0.0, 0.0, 0.0, 1.0]], [[-0.625396977219262, -0.21746019222082125, -0.7493928780581607, 0.48958628218584116], [0.3452821829603899, -0.9383649835888994, -0.015854706061997126, 0.22022420944612042], [-0.6997562682928375, -0.26866749407676394, 0.6619357541418913, 0.191566371711758], [0.0, 0.0, 0.0, 1.0]]], [[[-0.020145289475022476, -0.7001146346674012, 0.7137462193500563, 0.16682877591027745], [-0.9993878836768177, -0.00632114296701225, -0.03440786409473806, 0.40066139254837385], [0.02860114109504941, -0.7140024800209883, -0.699558741816539, -0.061194186789959484], [0.0, 0.0, 0.0, 1.0]], [[-0.6660584198488024, 0.5430230499500395, -0.5113630301180105, 0.4364005622313199], [-0.0055891263598602525, -0.689179675185805, -0.7245689318327977, 0.17991237396861318], [-0.7458786382616476, -0.47974716521699734, 0.46206894988865593, 0.19884999589320737], [0.0, 0.0, 0.0, 1.0]]], [[[0.013119569913197682, -0.8013508432771825, -0.5980507527494978, 0.16667098165143263], [-0.9999023367055055, -0.007633448132540355, -0.011706729708918395, 0.40228511083214635], [0.004816008322519771, 0.5981459324015805, -0.8013727282702356, -0.060271084169215325], [0.0, 0.0, 0.0, 1.0]], [[-0.6653229648611432, -0.40426148880719187, -0.6276288721017946, 0.4985391244256794], [-0.05448219419835332, -0.8121655939683161, 0.580877558948033, 0.18897061016509656], [-0.7445650024972374, 0.4206657778349521, 0.5183273679971023, 0.14774918078911253], [0.0, 0.0, 0.0, 1.0]]], [[[-0.060516778716361276, -0.9969475263606629, -0.04932898921679127, -0.06004544198952544], [-0.9981498015655461, 0.06073340137995555, -0.0029030314297456734, 0.4101656280446876], [0.0058900873046030915, 0.04906203868752862, -0.9987783654201605, -0.060765445662680204], [0.0, 0.0, 0.0, 1.0]], [[-0.6718595923841904, -0.06277390701464716, -0.7380136345078354, 0.47833640090544916], [0.04658058605172378, -0.9980107210557729, 0.04248352222700752, -0.026195011027421757], [-0.73921237619809, -0.00583414568305568, 0.6734471216210833, 0.16854323855985312], [0.0, 0.0, 0.0, 1.0]]], [[[-0.06095863133829868, -0.9970563110232863, -0.04650546112014572, -0.05972243332063343], [-0.9907736434088855, 0.05479282152678802, 0.12395698542404313, 0.41014390224084607], [-0.12104392918128656, 0.05363263332868812, -0.9911972093638011, -0.06084894822603819], [0.0, 0.0, 0.0, 1.0]], [[-0.7578676398099422, -0.06328204260288371, -0.6493319825889635, 0.47347486899478786], [0.0474550116429623, -0.9979952575582975, 0.04187466729555339, -0.026603762232844427], [-0.6506801536844916, 0.0009213984772005641, 0.7593513604557329, 0.17481107177664937], [0.0, 0.0, 0.0, 1.0]]], [[[-0.05725495636770017, -0.9967273840357855, -0.05706480425369509, -0.06101921094540899], [-0.9228931779596155, 0.07464085000337628, -0.3777524660228906, 0.4103755462531268], [0.38077559276683476, 0.031036517587404333, -0.9241464616223516, -0.06049278483587785], [0.0, 0.0, 0.0, 1.0]], [[-0.33918893316295773, -0.05905216227938552, -0.9388629877409714, 0.4993981022651383], [0.04891235316275143, -0.9977849037778822, 0.0450873319353047, -0.02580616391739493], [-0.9394458203259285, -0.030628874009522206, 0.34132597725495795, 0.1564549671398973], [0.0, 0.0, 0.0, 1.0]]], [[[0.8038012427773459, -0.5943850101232284, -0.024698620414990345, -0.06012642304271454], [-0.5948801649388258, -0.8027632248424758, -0.04109494132937003, 0.4102127016724454], [0.004599072944573228, 0.04772488429864065, -0.9988499306436038, -0.06069829906489253], [0.0, 0.0, 0.0, 1.0]], [[-0.3651596611046982, 0.5223776733579412, -0.7705712090904031, 0.4677734993632923], [-0.8174709207089709, -0.5759617147883771, -0.0030654352479083657, -0.1094292061841596], [-0.4454208298868936, 0.6288001824706922, 0.6373465421791312, 0.15244043127263024], [0.0, 0.0, 0.0, 1.0]]], [[[-0.5789557874014296, -0.8141602765100068, -0.044195479265880855, -0.06000308839225335], [-0.8153226384166065, 0.5785903850510218, 0.021958178701663063, 0.41016553259555266], [0.007693602522558327, 0.04874638940126703, -0.9987815566982432, -0.06078008336887876], [0.0, 0.0, 0.0, 1.0]], [[-0.5553864865580668, -0.4076375015032961, -0.72482930260637, 0.5070587365224505], [0.5621637262298026, -0.8263279182187712, 0.03397228993835441, -0.0019736055840804123], [-0.6127950680775017, -0.3886049908845516, 0.6880903760404644, 0.2004030009910496], [0.0, 0.0, 0.0, 1.0]]], [[[-0.05548015843481903, -0.9154334277290223, 0.3986271333167634, -0.060279142339482394], [-0.9983832235656428, 0.05580704227852527, -0.010794115748812061, 0.40978396215472845], [-0.01236490690313713, -0.3985815016134293, -0.9170495600832382, -0.06091882914680531], [0.0, 0.0, 0.0, 1.0]], [[-0.6821584783959954, 0.2738064467780218, -0.6780043068117304, 0.45830512557471137], [0.04259544013201322, -0.9107882173638581, -0.41067073378940233, -0.03253017695163941], [-0.7299626283806732, -0.3090224147435917, 0.6096389983864376, 0.18849989808722625], [0.0, 0.0, 0.0, 1.0]]], [[[-0.047755778671378216, -0.8355972426085118, -0.5472628561747473, -0.05970591143467616], [-0.9985162109810372, 0.05428995270688457, 0.004239981504305934, 0.41062597215904684], [0.026167957726252467, 0.5466533171765594, -0.8369500515612032, -0.0606626175655252], [0.0, 0.0, 0.0, 1.0]], [[-0.6521414418692221, -0.4293615923730387, -0.6247880943100791, 0.5001225869387651], [0.03061137135551542, -0.8383948621268031, 0.5442030862675378, -0.0359163471473094], [-0.7574790318817192, 0.33577176497476213, 0.559895381393048, 0.14651960289628999], [0.0, 0.0, 0.0, 1.0]]], [[[-0.02984705722208417, -0.9994866766090333, -0.01164201254987434, -0.06106877074617639], [-0.9992167564784373, 0.02953220483498117, 0.02633861139650925, 0.6456551147381409], [-0.025981276871879525, 0.012419024060468544, -0.9995852845522951, -0.05996242167698325], [0.0, 0.0, 0.0, 1.0]], [[-0.7042729636329874, -0.008489918569162, -0.7098785205782029, 0.3123423674249464], [0.013422583578266227, -0.9999089908983165, -0.0013580025018818257, -0.031026200698577006], [-0.7098023858410828, -0.010484808219498234, 0.7043228250247942, -0.0032220431982972766], [0.0, 0.0, 0.0, 1.0]]], [[[-0.03242021069844363, -0.9994695986530537, -0.003074297736361392, -0.060091701443056866], [-0.8970819222883799, 0.027742557413987748, 0.44099248883798053, 0.6427779508917342], [-0.4406732969464491, 0.017054966327580412, -0.897505528385152, -0.060201558358486826], [0.0, 0.0, 0.0, 1.0]], [[-0.9329701348975827, -0.016997095379370226, -0.35955225786782474, 0.29992594971704295], [0.01217785477720072, -0.9998031436540401, 0.015664411655855127, -0.03122465687936459], [-0.3597477272231355, 0.01023585307452543, 0.9329934619651014, 0.021695121777657214], [0.0, 0.0, 0.0, 1.0]]], [[[-0.024291079771462755, -0.9995625068830656, -0.016874189674718182, -0.062063596531239884], [-0.9432600622453404, 0.028507747026382556, -0.330828903411411, 0.6429771677830001], [0.33116521317380765, 0.007880557919442283, -0.9435398764177515, -0.05974779029682793], [0.0, 0.0, 0.0, 1.0]], [[-0.3985522768748595, 0.0037410587001441434, -0.917137986934173, 0.33328796123481663], [0.015621878241735453, -0.9998189164819052, -0.010866975897154886, -0.03108869965929458], [-0.917012562355645, -0.01865847594931131, 0.3984216632629095, -0.01405169904591413], [0.0, 0.0, 0.0, 1.0]]], [[[0.47733888478976966, -0.8786444625711006, -0.011467234226291723, -0.06118702983077878], [-0.87870656692248, -0.47736154981237194, -0.0008485328748359328, 0.6428828016915717], [-0.004728457990539963, 0.010481371755262401, -0.999933889080253, -0.059962602140958735], [0.0, 0.0, 0.0, 1.0]], [[-0.5978796981215705, 0.3284409534614118, -0.731208866647156, 0.30417883183544087], [-0.4907430379289749, -0.8712479054554771, 0.009917558352971173, -0.07317282710392953], [-0.633806861195349, 0.3647651673732055, 0.6820815459847137, -0.018984823279784662], [0.0, 0.0, 0.0, 1.0]]], [[[-0.6255964921493804, -0.7800577095588817, -0.011789773879973405, -0.061140521094972144], [-0.7801260228127542, 0.6256186839139082, 0.002156587161083757, 0.6428880876376254], [0.005693640377092446, 0.010546662769833738, -0.99992817260225, -0.05993548707643025], [0.0, 0.0, 0.0, 1.0]], [[-0.5460856400221046, -0.41441184194625086, -0.7280475939225158, 0.35002174173092954], [0.6109600055640879, -0.7916243884748231, -0.007661538552576147, -0.004399626258372629], [-0.5731651990157623, -0.4489918182178586, 0.6854837720986857, 0.03402520582210886], [0.0, 0.0, 0.0, 1.0]]], [[[-0.008494574257628063, -0.7121365660111683, 0.7019895679837436, -0.06119286889216918], [-0.999946488025018, 0.0019042028654941423, -0.010168338009727163, 0.6422295451847669], [0.005904514765386226, -0.702038378837844, -0.7121146335697066, -0.060678952695883956], [0.0, 0.0, 0.0, 1.0]], [[-0.6827841578603238, 0.5217209564370249, -0.5114812189996862, 0.2856903523952288], [-0.004701410613518753, -0.7031873738116079, -0.710989039331956, -0.05161206961559375], [-0.7306050167588585, -0.4830473692365694, 0.4825780232878591, 0.025689450920418414], [0.0, 0.0, 0.0, 1.0]]], [[[-0.008296345731882954, -0.6653838457142012, -0.7464552957211017, -0.06062356766374927], [-0.9999415329121362, 0.0003429014119338547, 0.010808014431381352, 0.6435943647878134], [-0.006935517632038868, 0.7465013196781383, -0.6653477874870959, -0.05943393259219164], [0.0, 0.0, 0.0, 1.0]], [[-0.681412920756174, -0.5337538375834752, -0.500782659735192, 0.3477231236537544], [-0.015493739495026748, -0.6735504705847524, 0.7389788275800049, -0.05179225059524463], [-0.7317351812390874, 0.511308717351828, 0.4506961505238626, -0.03378420910915049], [0.0, 0.0, 0.0, 1.0]]], [[[-0.005694374783102559, -0.9982839656948906, -0.05828119707342751, 0.03520388546320374], [-0.999981559425858, 0.005561666509579647, 0.002438990337272493, 0.5619349328028862], [-0.0021106643642824184, 0.058294010859764586, -0.9982972269789306, -0.0036834314504738813], [0.0, 0.0, 0.0, 1.0]], [[-0.6756673478820645, -0.023604293542909792, -0.7368286587344152, 0.32938449678996623], [-0.009433633400198092, -0.9991286012981544, 0.04065765154142855, 0.06456041589973871], [-0.7371462823394607, 0.0344220190333703, 0.6748558979802861, 0.09307986714909239], [0.0, 0.0, 0.0, 1.0]]], [[[-0.007882379781756536, -0.9985781246397234, -0.05272188426060943, 0.03591689264179926], [-0.959394780944516, -0.0073141761419293, 0.2819719083947704, 0.5618179471810841], [-0.28195659663395023, 0.05280371027094377, -0.9579729880305717, -0.003746872800107437], [0.0, 0.0, 0.0, 1.0]], [[-0.8523038682726439, -0.013598823989009886, -0.5228701445995966, 0.31613462732873293], [-0.01193882450534972, -0.9988957175471367, 0.045440179746792486, 0.06457626394608207], [-0.5229106812801948, 0.04497129586865692, 0.8512003300933196, 0.10736352693260104], [0.0, 0.0, 0.0, 1.0]]], [[[0.006424891707583938, -0.9978270375397073, -0.0655738051456341, 0.0334619868198171], [-0.7483038757610688, 0.03870101633469754, -0.66222620067136, 0.5622962922586965], [0.6633249809011483, 0.05332386416411744, -0.7464291896912246, -0.003470429341321496], [0.0, 0.0, 0.0, 1.0]], [[-0.004221861658502468, -0.025931037753249384, -0.9996548190076294, 0.37304763276071695], [-0.004164373074288108, -0.9996546039183081, 0.025948619651711267, 0.06499479919098389], [-0.9999824167859327, 0.004272487094256346, 0.0041124169286200305, 0.07656691273518543], [0.0, 0.0, 0.0, 1.0]]], [[[0.6665560988952687, -0.7447517197990933, -0.03237040132302399, 0.03504267133355227], [-0.7453476369903814, -0.6650950517876802, -0.04588542383369505, 0.5619729083998567], [0.012643854569530068, 0.054712411241290004, -0.9984220976109197, -0.00358146531092024], [0.0, 0.0, 0.0, 1.0]], [[-0.4778512844076114, 0.42492988036738105, -0.7688255632853006, 0.3162696707973113], [-0.6762051310047099, -0.7365956978273491, 0.013168095729613305, 0.004691479713373695], [-0.5607180849525898, 0.5261761821987047, 0.6393229657175598, 0.07544099009142623], [0.0, 0.0, 0.0, 1.0]]], [[[-0.6428413729860223, -0.7636833318864917, -0.05952090201091115, 0.03528706897810291], [-0.7656348315817361, 0.6429896036270537, 0.019174834975330648, 0.5619848297259824], [0.023627819329186322, 0.05789765302903892, -0.9980428788020466, -0.003603832373609686], [0.0, 0.0, 0.0, 1.0]], [[-0.5270459323283994, -0.44540976626814877, -0.7237628930313029, 0.36292194039801123], [0.6231543314999116, -0.78162452109098, 0.027235769902731438, 0.09392116619557224], [-0.577841902555523, -0.4366614800302968, 0.6895110495914942, 0.13092945347577267], [0.0, 0.0, 0.0, 1.0]]], [[[-0.014298168099962982, -0.6941128844508238, 0.7197241596808759, 0.03531084861779471], [-0.999468408287904, -0.01117060580109841, -0.03062871855759386, 0.5612205011747926], [0.029299543058365796, -0.7197794948492171, -0.693584180594668, -0.0044490281699129525], [0.0, 0.0, 0.0, 1.0]], [[-0.6629691586114739, 0.5509534507817015, -0.5068749252051524, 0.2956303807523064], [3.1656705226534854e-05, -0.6770342718533342, -0.7359514887096011, 0.04509414648419719], [-0.7486467082194689, -0.4879291852387601, 0.44883540019075235, 0.12286289618143711], [0.0, 0.0, 0.0, 1.0]]], [[[0.01716956026124125, -0.7709485641976306, -0.6366659387481376, 0.03531726821969566], [-0.9998524685289161, -0.013555549706626785, -0.010549324565298168, 0.5625274374679576], [-0.000497370152345189, 0.6367531377494444, -0.7710675678493866, -0.003177378654997162], [0.0, 0.0, 0.0, 1.0]], [[-0.6714029115469229, -0.44397765750005336, -0.5933818079509015, 0.3544326691861283], [-0.038345087121615745, -0.7788009096857008, 0.6260980732811442, 0.050197192689039036], [-0.7400998477637668, 0.44311734643713796, 0.5058648363214809, 0.06801930177354622], [0.0, 0.0, 0.0, 1.0]]]]"
  },
  {
    "path": "grasp_utils/handeye_dashboard/launch/handeye_dashboard.launch.py",
    "content": "import os\n\nfrom ament_index_python.packages import get_package_share_directory\nfrom launch import LaunchDescription\nfrom launch_ros.actions import Node\n\ndef generate_launch_description():\n\n    # URDF file to be loaded by Robot State Publisher\n    rqt_config = os.path.join(\n        get_package_share_directory('handeye_dashboard'), \n            'config', 'Default.perspective'\n    )\n    \n    return LaunchDescription( [\n        # Robot State Publisher\n        Node(package='handeye_tf_service', node_executable='handeye_tf_server',\n             output='screen'),\n\n        # Rviz2\n        Node(package='rqt_gui', node_executable='rqt_gui',\n             output='screen', arguments=['--perspective-file', rqt_config]),\n    ])"
  },
  {
    "path": "grasp_utils/handeye_dashboard/package.xml",
    "content": "<?xml version=\"1.0\"?>\n<?xml-model href=\"http://download.ros.org/schema/package_format3.xsd\" schematypens=\"http://www.w3.org/2001/XMLSchema\"?>\n<package format=\"3\">\n  <name>handeye_dashboard</name>\n  <version>0.1.0</version>\n  <description>The handeye_dashboard package</description>\n  <maintainer email=\"yu.yan@intel.com\">Yu Yan</maintainer>\n  <license>Apache License 2.0</license>\n  <author email=\"yu.yan@intel.com\">Yu Yan</author>\n\n  <exec_depend>rclpy</exec_depend>\n  <exec_depend>rqt_gui</exec_depend>\n  <exec_depend>rqt_gui_py</exec_depend>\n  <exec_depend>python_qt_binding</exec_depend>\n  <exec_depend>tf2</exec_depend>\n  <exec_depend>tf2_ros</exec_depend>\n  <exec_depend>handeye</exec_depend>\n\n  <export>\n    <build_type>ament_python</build_type>\n    <rqt_gui plugin=\"${prefix}/plugin.xml\"/>\n  </export>\n</package>\n"
  },
  {
    "path": "grasp_utils/handeye_dashboard/plugin.xml",
    "content": "<library path=\"src\">\n  <class name=\"HandEyeCalibration\" type=\"handeye_dashboard.plugin.HandEyeCalibration\" base_class_type=\"rqt_gui_py::Plugin\">\n    <description>\n      Dashboard for the Hand-Eye Calibration\n    </description>\n    <qtgui>\n      <group>\n        <label>Intel</label>\n        <icon type=\"file\">images/Intel.png</icon>\n        <statustip>Plugins created by Intel OTC-Robotics</statustip>\n      </group>\n      <label>Hand-Eye calibration</label>\n      <icon type=\"file\">images/tool-calibration.png</icon>\n      <statustip>Dashboard for the hand-eye calibration</statustip>\n    </qtgui>\n  </class>\n</library>\n"
  },
  {
    "path": "grasp_utils/handeye_dashboard/resource/handeye_dashboard",
    "content": ""
  },
  {
    "path": "grasp_utils/handeye_dashboard/setup.py",
    "content": "from glob import glob\nfrom setuptools import setup\nfrom setuptools import find_packages\n\npackage_name = 'handeye_dashboard'\n\nsetup(\n    name=package_name,\n    version='0.1.0',\n    packages=find_packages('src', exclude=['test']),\n    data_files=[\n        ('share/ament_index/resource_index/packages',\n            ['resource/' + package_name]),\n        ('share/' + package_name, ['package.xml']),\n        ('share/' + package_name, ['plugin.xml']),\n        ('share/' + package_name + '/images', glob('images/*.png')),\n        ('share/' + package_name + '/data', glob('data/*.json')),\n        ('share/' + package_name + '/launch', glob('launch/*.launch.py')),\n        ('share/' + package_name + '/config', glob('config/*.perspective')),\n    ],\n    install_requires=['setuptools'],\n    zip_safe=True,\n    author='Yu Yan',\n    author_email='yu.yan@intel.com',\n    maintainer='Yu Yan',\n    maintainer_email='yu.yan@intel.com',\n    keywords=['ROS'],\n    classifiers=[\n        'Intended Audience :: Developers',\n        'License :: OSI Approved :: BSD 3-Clause License',\n        'Programming Language :: Python',\n        'Topic :: Software Development',\n    ],\n    description=(\n        'The handeye_dashboard package.'\n    ),\n    license='Apache License 2.0',\n    tests_require=['pytest'],\n    package_dir={'':'src'},\n    entry_points={\n        'console_scripts': [\n            'handeye_dashboard = handeye_dashboard.main:main',\n        ],\n    },\n)\n"
  },
  {
    "path": "grasp_utils/handeye_dashboard/src/handeye_dashboard/__init__.py",
    "content": ""
  },
  {
    "path": "grasp_utils/handeye_dashboard/src/handeye_dashboard/handeye_calibration.py",
    "content": "#!/usr/bin/env python\n\nimport json\nimport rclpy\nimport numpy as np\nimport baldor as br\nfrom rclpy.clock import ROSClock\nfrom geometry_msgs.msg import TransformStamped\nfrom handeye_tf_service.srv import HandeyeTF\nfrom ament_index_python.resources import get_resource\n\n# Rqt widgets\nfrom rqt_gui_py.plugin import Plugin\nfrom python_qt_binding import QtCore\nfrom python_qt_binding.QtGui import QIcon, QImage, QPixmap, QStandardItem, \\\n                                                QIntValidator, QStandardItemModel\nfrom python_qt_binding.QtWidgets import (QComboBox, QAction, QToolBar, QStatusBar,\n                                                 QLineEdit, QWidget, QVBoxLayout,\n                                                       QLabel, QTextEdit, QFrame,\n                                                          QHBoxLayout, QTreeView)\n\nclass bcolors:\n  HEADER = '\\033[95m'\n  OKBLUE = '\\033[94m'\n  OKGREEN = '\\033[92m'\n  WARNING = '\\033[93m'\n  FAIL = '\\033[91m'\n  ENDC = '\\033[0m'\n  BOLD = '\\033[1m'\n  UNDERLINE = '\\033[4m'\n\ndef save_samples_to_file(samples, file_name='dataset.json', pkg='handeye_dashboard'):\n  \"\"\"\n  Saving transform samples to a disc file\n  Parameters\n  -------------\n  samples: list\n    A list of transforms.\n  file_name: string\n    The destination file.\n  Returns\n  --------\n  Success: bool\n    Execution status\n  \"\"\"\n  success = False\n  samples_list = []\n  for sample in samples:\n    samples_list +=[[sample[0].tolist(), sample[1].tolist()]]\n\n  _, path_pkg = get_resource('packages', pkg)\n\n  import json\n  # If the file name exists, write a JSON string into the file.\n  if file_name != None:\n    filename = '/tmp/' + file_name\n    # Writing JSON data\n    with open(filename, 'w') as f:\n      json.dump(samples_list, f)\n      success = True\n\n  return success\n\nclass HandEyeCalibration(Plugin):\n  PLUGIN_TITLE = ' Intel OTC Robotics: Hand-Eye Calibration'\n  def __init__(self, context):\n    super(HandEyeCalibration, self).__init__(context)\n    self.context = context\n    self.node = context.node\n    self.widget = QWidget()\n    self.widget.setObjectName(self.PLUGIN_TITLE)\n    self.widget.setWindowTitle(self.PLUGIN_TITLE)\n\n    # Data\n    self.Tsamples = []\n\n    # Toolbar\n    _, path_pkg = get_resource('packages', 'handeye_dashboard')\n    print(\"{}\".format(path_pkg))\n\n    self.snapshot_action = QAction(QIcon.fromTheme('camera-photo'),\n                                                 'Take a snapshot', self.widget)\n    path = path_pkg + '/share/handeye_dashboard/images/capture.png'\n    self.calibrate_action = QAction(QIcon(QPixmap.fromImage(QImage(path))),\n                                         'Get the camera/robot transform', self.widget)\n    self.clear_action = QAction(QIcon.fromTheme('edit-clear'),\n                              'Clear the record data.', self.widget)\n    path = path_pkg + '/share/handeye_dashboard/images/UR5.png'\n    self.execut_action = QAction(QIcon(QPixmap.fromImage(QImage(path))),\n                                          'EStart the publishing the TF.', self.widget)\n    self.toolbar = QToolBar()\n    self.toolbar.addAction(self.snapshot_action)\n    self.toolbar.addAction(self.calibrate_action)\n    self.toolbar.addAction(self.clear_action)\n    self.toolbar.addAction(self.execut_action)\n\n    # Toolbar0\n    self.l0 = QLabel(self.widget)\n    self.l0.setText(\"Camera-Mount-Type: \")\n    self.l0.setFixedWidth(150)\n    self.l0.setAlignment(QtCore.Qt.AlignRight | QtCore.Qt.AlignVCenter)\n    self.combobox = QComboBox(self.widget)\n    self.combobox.addItem('attached on robot')\n    self.combobox.addItem('fixed beside robot')\n    self.toolbar0 = QToolBar()\n    self.toolbar0.addWidget(self.l0)\n    self.toolbar0.addWidget(self.combobox)\n\n    # Toolbar1\n    self.l1 = QLabel(self.widget)\n    self.l1.setText(\"Camera-Frame: \")\n    self.l1.setFixedWidth(150)\n    self.l1.setAlignment(QtCore.Qt.AlignRight | QtCore.Qt.AlignVCenter)\n    self.camera_frame = QLineEdit(self.widget)\n    self.camera_frame.setText(\"camera_link\")\n    self.toolbar1 = QToolBar()\n    self.toolbar1.addWidget(self.l1)\n    self.toolbar1.addWidget(self.camera_frame)\n\n    # Toolbar2\n    self.l2 = QLabel(self.widget)\n    self.l2.setText(\"Object-Frame: \")\n    self.l2.setFixedWidth(150)\n    self.l2.setAlignment(QtCore.Qt.AlignRight | QtCore.Qt.AlignVCenter)\n    self.object_frame = QLineEdit(self.widget)\n    self.object_frame.setText(\"calib_board\")\n    self.toolbar2 = QToolBar()\n    self.toolbar2.addWidget(self.l2)\n    self.toolbar2.addWidget(self.object_frame)\n\n\n    # Toolbar3\n    self.l3 = QLabel(self.widget)\n    self.l3.setText(\"Robot-Base-Frame: \")\n    self.l3.setFixedWidth(150)\n    self.l3.setAlignment(QtCore.Qt.AlignRight | QtCore.Qt.AlignVCenter)\n    self.base_frame = QLineEdit(self.widget)\n    self.base_frame.setText(\"base\")\n    self.toolbar3 = QToolBar()\n    self.toolbar3.addWidget(self.l3)\n    self.toolbar3.addWidget(self.base_frame)\n\n    # Toolbar4\n    self.l4 = QLabel(self.widget)\n    self.l4.setText(\"End-Effector-Frame: \")\n    self.l4.setFixedWidth(150)\n    self.l4.setAlignment(QtCore.Qt.AlignRight | QtCore.Qt.AlignVCenter)\n    self.endeffector_frame = QLineEdit(self.widget)\n    self.endeffector_frame.setText(\"tool0\")\n    self.toolbar4 = QToolBar()\n    self.toolbar4.addWidget(self.l4)\n    self.toolbar4.addWidget(self.endeffector_frame)\n\n    # Toolbar5\n    self.l5 = QLabel(self.widget)\n    self.l5.setText(\"Sample-Number: \")\n    self.l5.setFixedWidth(150)\n    self.l5.setAlignment(QtCore.Qt.AlignRight | QtCore.Qt.AlignVCenter)\n    self.le5 = QLineEdit(self.widget)\n    self.le5.setValidator(QIntValidator())\n    self.le5.setText('10')\n    self.le5.setReadOnly(True)\n    self.toolbar5 = QToolBar()\n    self.toolbar5.addWidget(self.l5)\n    self.toolbar5.addWidget(self.le5)\n\n    # TreeView\n    self.treeview = QTreeView()\n    self.treeview.setAlternatingRowColors(True)\n    self.model = QStandardItemModel(self.treeview)\n    self.treeview.setModel(self.model)\n    self.treeview.setHeaderHidden(True)\n\n    # TextEdit\n    self.textedit = QTextEdit(self.widget)\n    self.textedit.setReadOnly(True)\n\n    # Layout\n    self.layout = QVBoxLayout()\n    self.layout.addWidget(self.toolbar0)\n    self.layout.addWidget(self.toolbar1)\n    self.layout.addWidget(self.toolbar2)\n    self.layout.addWidget(self.toolbar3)\n    self.layout.addWidget(self.toolbar4)\n    self.layout.addWidget(self.toolbar5)\n    self.layout.addWidget(self.toolbar)\n    self.layoutH = QHBoxLayout()\n    self.layoutH.addWidget(self.treeview)\n    self.layoutH.addWidget(self.textedit)\n    self.layout.addLayout(self.layoutH)\n    self.widget.setLayout(self.layout)\n    # Add the widget to the user interface\n    if context.serial_number() > 1:\n      self.widget.setWindowTitle(self.widget.windowTitle() +\n                                            (' (%d)' % context.serial_number()))\n    context.add_widget(self.widget)\n    # Make the connections\n    self.snapshot_action.triggered.connect(self.take_snapshot)\n    self.calibrate_action.triggered.connect(self.calibration)\n    self.clear_action.triggered.connect(self.clear)\n    self.execut_action.triggered.connect(self.execution)\n\n    # Package path\n    self.path_pkg = path_pkg\n\n    # Set up TF\n    self.cli = self.node.create_client(HandeyeTF, 'handeye_tf_service')\n    while not self.cli.wait_for_service(timeout_sec=1.0):\n        self.node.get_logger().info('service not available, waiting again...')\n    self.req = HandeyeTF.Request()\n\n  def clear(self):\n    # >>> Clear the recorded samples\n    self.textedit.append('Clearing the recorded data ...')\n    self.textedit.clear()\n    self.Tsamples = []\n    self.model.clear()\n\n  def get_tf_transform(self, frame_id, child_frame_id):\n    self.req.transform.header.frame_id = frame_id\n    self.req.transform.child_frame_id = child_frame_id\n    self.req.publish.data = False\n\n    future = self.cli.call_async(self.req)\n    rclpy.spin_until_future_complete(self.node, future)\n\n    transform = TransformStamped()\n\n    try:\n      result = future.result()\n    except Exception as e:\n      self.node.get_logger().info('Service call failed %r' % (e,))\n    else:\n      transform = result.tf_lookup_result\n\n    return transform\n\n  def publish_tf_transform(self, transform_to_publish):\n    self.req.publish.data = True\n    self.req.transform = transform_to_publish\n\n    future = self.cli.call_async(self.req)\n    rclpy.spin_until_future_complete(self.node, future)\n\n    try:\n      future.result()\n    except Exception as e:\n      self.node.get_logger().info('Service call failed %r' % (e,))\n    else:\n      self.node.get_logger().info('Send the camera-robot transform :\\n\\tfrom `{}` to `{}`.'.\n                                                            format(self.req.transform.header.frame_id,\n                                                                   self.req.transform.child_frame_id))\n    \n  def take_snapshot(self):\n    # >>> Take the snapshot\n    self.textedit.append('Taking snapshot ...')\n\n    # Get the transform from `tool0` to `base_link`\n    T = self.get_tf_transform(self.base_frame.text(), self.endeffector_frame.text())\n    bTe = np.zeros((4,4))\n    q = [T.transform.rotation.w, T.transform.rotation.x, T.transform.rotation.y,\n          T.transform.rotation.z]\n    bTe = br.quaternion.to_transform(q)\n    bTe[:3, 3] = np.array([T.transform.translation.x, T.transform.translation.y,\n                                                      T.transform.translation.z])\n    self.textedit.append('Lookup transform: from `{}` to `{}`.'.\n                          format(self.base_frame.text(), self.endeffector_frame.text()))\n    self.node.get_logger().info(bcolors.OKGREEN + 'bTe:' + bcolors.ENDC + '\\n{}'.format(bTe))\n\n    # Get the transform from `calib_board` to `camera_link`\n    T = self.get_tf_transform(self.camera_frame.text(), self.object_frame.text())\n    cTo = np.zeros((4,4))\n    q = [T.transform.rotation.w, T.transform.rotation.x, T.transform.rotation.y,\n          T.transform.rotation.z]\n    cTo = br.quaternion.to_transform(q)\n    cTo[:3, 3] = np.array([T.transform.translation.x, T.transform.translation.y,\n                                                      T.transform.translation.z])\n    self.textedit.append('Lookup transform: from `{}` to `{}`.'.\n                          format(self.camera_frame.text(), self.object_frame.text()))\n    self.node.get_logger().info(bcolors.OKGREEN + 'cTo:' + bcolors.ENDC + '\\n{}'.format(cTo))\n  \n    parent = QStandardItem('Snapshot {}'.format(len(self.Tsamples)))\n    child_1 = QStandardItem('bTe:\\n{}\\n{}\\n{}\\n{}'.format(bTe[0, :], bTe[1, :], bTe[2, :], bTe[3, :]))\n    child_2 = QStandardItem('cTo:\\n{}\\n{}\\n{}\\n{}'.format(cTo[0, :], cTo[1, :], cTo[2, :], cTo[3, :]))\n    parent.appendRow(child_1)\n    parent.appendRow(child_2)\n    self.model.appendRow(parent)\n    self.Tsamples.append((bTe, cTo))\n    self.le5.setText(str(len(self.Tsamples)))\n\n  def calibration(self):\n    # >>> Compute the calibration \n    self.textedit.append('Making the calibration ...')\n    if len(self.Tsamples) == 0:\n      self.textedit.append('No transform recorded, please take snapshots.')\n      return\n    # save samples to `dataset.json` file\n    save_samples_to_file(self.Tsamples)\n    import handeye\n    if self.combobox.currentIndex() == 0:\n      solver_cri = handeye.calibrator.HandEyeCalibrator(setup='Moving')\n    if self.combobox.currentIndex() == 1:\n      solver_cri = handeye.calibrator.HandEyeCalibrator(setup='Fixed')\n    for sample in self.Tsamples:\n      solver_cri.add_sample(sample[0], sample[1])\n    try:\n      bTc = solver_cri.solve(method=handeye.solver.Daniilidis1999)\n      # save the calibration result to 'camera-robot.json' file\n      file_output = '/tmp/' + 'camera-robot.json'\n      with open(file_output, 'w') as f:\n        json.dump(bTc.tolist(), f)\n    except Exception:\n      self.textedit.append(\"Failed to solve the hand-eye calibration.\")\n\n  def execution(self):\n    # >>> Publish the camera-robot transform\n    self.textedit.append('Publishing the camera TF ...')\n    file_input = '/tmp/' + 'camera-robot.json'\n    with open(file_input, 'r') as f:\n      datastore = json.load(f)\n\n    to_frame = self.camera_frame.text()\n    if self.combobox.currentIndex() == 0:\n      from_frame = self.endeffector_frame.text()\n    if self.combobox.currentIndex() == 1:\n      from_frame = self.base_frame.text()\n\n    bTc = np.array(datastore)\n    static_transformStamped = TransformStamped()\n    static_transformStamped.header.stamp = ROSClock().now().to_msg()\n    static_transformStamped.header.frame_id = from_frame\n    static_transformStamped.child_frame_id = to_frame\n\n    static_transformStamped.transform.translation.x = bTc[0,3]\n    static_transformStamped.transform.translation.y = bTc[1,3]\n    static_transformStamped.transform.translation.z = bTc[2,3]\n\n    q = br.transform.to_quaternion(bTc)\n    static_transformStamped.transform.rotation.x = q[1]\n    static_transformStamped.transform.rotation.y = q[2]\n    static_transformStamped.transform.rotation.z = q[3]\n    static_transformStamped.transform.rotation.w = q[0]\n\n    self.publish_tf_transform(static_transformStamped)\n\n    output_string = \"camera-robot pose:\\n\"\n    output_string += \"  Translation: [{}, {}, {}]\\n\".format(bTc[0,3], bTc[1,3], bTc[2,3])\n    output_string += \"  Rotation: in Quaternion [{}, {}, {}, {}]\".format(q[0], q[1], q[2], q[3])\n    file_path = '/tmp/' + 'camera-robot.txt'\n    with open(file_path, 'w') as f:\n      f.write(output_string)\n\n  def shutdown_plugin(self):\n    \"\"\"\n    Unregister subscribers when the plugin shutdown\n    \"\"\"\n    pass\n\n  def save_settings(self, plugin_settings, instance_settings):\n    # Nothing to be done here\n    pass\n\n  def restore_settings(self, plugin_settings, instance_settings):\n    # Nothing to be done here\n    pass\n"
  },
  {
    "path": "grasp_utils/handeye_dashboard/src/handeye_dashboard/main.py",
    "content": "#!/usr/bin/env python3\n\nimport sys\n\nfrom rqt_gui.main import Main\n\ndef main():\n    sys.exit(Main().main(sys.argv, standalone='handeye_dashboard.plugin.HandEyeCalibration'))\n\nif __name__ == '__main__':\n    main()"
  },
  {
    "path": "grasp_utils/handeye_dashboard/src/handeye_dashboard/plugin.py",
    "content": "#!/usr/bin/env python\nfrom .handeye_calibration import HandEyeCalibration\n"
  },
  {
    "path": "grasp_utils/handeye_target_detection/.clang-format",
    "content": "---\nBasedOnStyle:  Google\nAccessModifierOffset: -2\nConstructorInitializerIndentWidth: 2\nAlignEscapedNewlinesLeft: false\nAlignTrailingComments: true\nAllowAllParametersOfDeclarationOnNextLine: false\nAllowShortIfStatementsOnASingleLine: false\nAllowShortLoopsOnASingleLine: false\nAllowShortFunctionsOnASingleLine: None\nAllowShortLoopsOnASingleLine: false\nAlwaysBreakTemplateDeclarations: true\nAlwaysBreakBeforeMultilineStrings: false\nBreakBeforeBinaryOperators: false\nBreakBeforeTernaryOperators: false\nBreakConstructorInitializersBeforeComma: true\nBinPackParameters: true\nColumnLimit:    120\nConstructorInitializerAllOnOneLineOrOnePerLine: true\nDerivePointerBinding: false\nPointerBindsToType: true\nExperimentalAutoDetectBinPacking: false\nIndentCaseLabels: true\nMaxEmptyLinesToKeep: 1\nNamespaceIndentation: None\nObjCSpaceBeforeProtocolList: true\nPenaltyBreakBeforeFirstCallParameter: 19\nPenaltyBreakComment: 60\nPenaltyBreakString: 100\nPenaltyBreakFirstLessLess: 1000\nPenaltyExcessCharacter: 1000\nPenaltyReturnTypeOnItsOwnLine: 70\nSpacesBeforeTrailingComments: 2\nCpp11BracedListStyle: false\nStandard:        Auto\nIndentWidth:     2\nTabWidth:        2\nUseTab:          Never\nIndentFunctionDeclarationAfterType: false\nSpacesInParentheses: false\nSpacesInAngles:  false\nSpaceInEmptyParentheses: false\nSpacesInCStyleCastParentheses: false\nSpaceAfterControlStatementKeyword: true\nSpaceBeforeAssignmentOperators: true\nContinuationIndentWidth: 4\nSortIncludes: false\nSpaceAfterCStyleCast: false\n\n# Configure each individual brace in BraceWrapping\nBreakBeforeBraces: Custom\n\n# Control of individual brace wrapping cases\nBraceWrapping:\n    AfterClass: 'true'\n    AfterControlStatement: 'true'\n    AfterEnum : 'true'\n    AfterFunction : 'true'\n    AfterNamespace : 'true'\n    AfterStruct : 'true'\n    AfterUnion : 'true'\n    BeforeCatch : 'true'\n    BeforeElse : 'true'\n    IndentBraces : 'false'\n..."
  },
  {
    "path": "grasp_utils/handeye_target_detection/CMakeLists.txt",
    "content": "# Copyright (c) 2019 Intel Corporation\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#      http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\ncmake_minimum_required(VERSION 3.5)\nproject(handeye_target_detection)\n\n# Default to C99\nif(NOT CMAKE_C_STANDARD)\n  set(CMAKE_C_STANDARD 99)\nendif()\n\n# Default to C++14\nif(NOT CMAKE_CXX_STANDARD)\n  set(CMAKE_CXX_STANDARD 14)\nendif()\n\nif(CMAKE_COMPILER_IS_GNUCXX OR CMAKE_CXX_COMPILER_ID MATCHES \"Clang\")\n  add_compile_options(-Wall -Wextra -Wpedantic)\nendif()\n\nset (WITH_OPENCL OFF)\n\n# find dependencies\nfind_package(ament_cmake REQUIRED)\nfind_package(tf2 REQUIRED)\nfind_package(tf2_msgs REQUIRED)\nfind_package(tf2_ros REQUIRED)\nfind_package(std_msgs REQUIRED)\nfind_package(cv_bridge REQUIRED)\nfind_package(sensor_msgs REQUIRED)\nfind_package(image_transport REQUIRED)\n\nfind_package(OpenCV REQUIRED)\nif((${OpenCV_VERSION} LESS 3.3))\n  message(WARNING \"handeye_target_detection works better with OpenCV version >= 3.3\")\nendif()\n\n# Set include directory path\ninclude_directories(\n  include \n  ${rclcpp_INCLUDE_DIRS}\n  ${Boost_INCLUDE_DIRS}\n)\n\nset(SOURCES_POSEESTIMATION\n  src/pose_estimation_node.cpp \n  src/pose_estimator.cpp\n \t)\n\nadd_executable(pose_estimation ${SOURCES_POSEESTIMATION})\nament_target_dependencies(pose_estimation rclcpp tf2 tf2_msgs tf2_ros std_msgs sensor_msgs cv_bridge image_transport)\ntarget_link_libraries(pose_estimation\n  ${rclcpp_LIBRARIES}\n  ${OpenCV_LIBS}\n)\n\n# Install target files\ninstall(\n  TARGETS pose_estimation\n  DESTINATION lib/${PROJECT_NAME}/\n)\n\n# Install header files  \ninstall(\n  DIRECTORY include/\n  DESTINATION include\n)\n\n# Install launch files.\ninstall(DIRECTORY\n  launch cfg data\n  DESTINATION share/${PROJECT_NAME}/\n)\n\nif(BUILD_TESTING)\n    find_package(ament_lint_auto REQUIRED)\n    ament_lint_auto_find_test_dependencies()\nendif()\n\nament_package()\n"
  },
  {
    "path": "grasp_utils/handeye_target_detection/README.md",
    "content": "# handeye_target_detection\n\n## 1. Introduction\n\nThis package is used to estimate the pose of\ncalibration patterns. Currently four kinds of OpenCV calibration patterns are supported: CHESSBOARD, ASYMMETRIC_CIRCLES_GRID,\nCHARUCO, ARUCO.\n\n## 2. Prerequisite\n\n* Download and print the calibration pattern on an A4 paper without the border shrink:\n  * [Chessboard](./data/pattern/chessboard_9X6.png)\n    * Width * Height: 9X6\n    * Square size: 0.026\n  * [Asymmetric circles grid 1](./data/pattern/asymmetric_circles_grid_4X11.png)\n    * Width * Height: 4X11\n    * Cricles seperation: 0.035\n  * [Asymmetric circles grid 2](./data/pattern/asymmetric_circles_grid_3X5.png)\n    * Width * Height: 3X5\n    * Cricles seperation: 0.035\n  * [Aruco board 1](./data/pattern/aruco_5X7_DICT_6X6_250.png)\n    * Width * Height: 5X7\n    * Dictionary: 6X6_250\n    * Marker size: 0.035\n    * Marker seperation: 0.007\n  * [Aruco board 2](./data/pattern/aruco_3X4_DICT_4X4_50.png)\n    * Width * Height: 3X4\n    * Dictionary: 4X4_50\n    * Marker size: 0.0256\n    * Marker seperation: 0.0066\n  * [Charuco board](./data/pattern/charuco_5X7_DICT_6X6_250.jpg)\n    * Width * Height: 5X7\n    * Dictionary: 6X6_250\n    * Square size: 0.035\n    * Marker size: 0.022\n* RGB camera:\n  * Video/Image file\n  * Intel<sup>®</sup>RealSense<sup>TM</sup> (Tested with D435)\n  * Standard USB camera\n* ROS Dashing (Ubuntu 18.04, 64 bits)\n\n## 3. Environment Setup\n\n* Install [ROS Dashing](https://index.ros.org/doc/ros2/Installation/Dashing/Linux-Install-Debians/)\n* Install [Intel<sup>®</sup>RealSense<sup>TM</sup> SDK 2.0](https://github.com/IntelRealSense/librealsense)\n* Install [Intel<sup>®</sup>RealSense<sup>TM</sup> ROS2 Wrapper](https://github.com/intel/ros2_intel_realsense)\n\n## 4. Build and install\n\n* Install dependencies\n\n```shell\nsudo apt install ros-dashing-cv-bridge \\\n                 ros-dashing-image-transport\n```\n\n* Build with ros2_grasp_library\n\n## 5. Run\n\nBefore running the code, a camera should be launched and\nguarantee that the topics of the RGB image and camera info are being published.\n\nIf a RealSense D435 camera is used, run the following command to bring up the camera:\n\n```shell\nros2 run realsense_node realsense_node __params:=`ros2 pkg prefix realsense_examples`/share/realsense_examples/config/d435.yaml\n```\n\n> Note: other cameras can be used, only if it can publish RGB image topic and camera_info topic.\n\nRun the following command to bring up the pose estimation of a calibration pattern:\n\n```shell\nros2 launch handeye_target_detection pose_estimation.launch.py\n```\n\nThe launch file loads parameters from `./launch/pose_estimation.yaml` file. For the meaning of these parameters, refer to the table below:\n\n```shell\nLaunch options:\n\n  --pattern (string, default: ARUCO)\n    The pattern of the calibration plate, it should be one of {CHESSBOARD, ASYMMETRIC_CIRCLES_GRID, CHARUCO, ARUCO}\n\n  --image_topic (string, default: /camera/color/image_raw)\n    The RGB image topic, to which the pose estimation node subscribes\n\n  --camera_info_topic (string, default: /camera/color/camera_info)\n    The camera info topic, to which the pose estimation node subscribes\n\n  --publish_image_topic (string, default: /image/detected)\n    The image topic published by the pose estimation ndoe\n  \n  --width (int, default: 3)\n    Usualy the number of squres or markers along the X direction\n\n  --Height (int, default: 4)\n    Usually the number of squares or markers alogn the Y direction\n\n  --dictionary (string, default: DICT_6X6_250)\n    If ARUCO or CHARUCO pattern is used, this parameter indicates which marker dictionary the Board markers belong to. For more infomation, refer to https://docs.opencv.org/3.4.0/d5/dae/tutorial_aruco_detection.html\n\n  --chessboard_square_size (double, default: 0.026)\n    If a CHESSBOARD is used, this indicates the square length\n\n  --circle_grid_seperation (double, default: 0.035)\n    If an ASYMMETRIC_CIRCLES_GRID is used, this indicates the seperation distance between circles\n\n  --aruco_board_marker_size (double, default: 0.035)\n    The size of aruco marker\n\n  --aruco_board_marker_seperation (double, default: 0.007)\n    The seperation distance of aruco marker\n\n  --charuco_board_marker_size (double, default: 0.022)\n    The length of charuco marker\n\n  --charuco_board_square_size (double, default: 0.037)\n    The length of charuco square\n```\n\nFor running properly, user has to customize these parameters.\n\nFor example, to make the pose estimation of the four calibration patterns listed in the `Prerequisite` section, the parameters should be:\n\n* Chessboard\n\n```yml\npose_estimation:\n    ros__parameters:\n        pattern: \"CHESSBOARD\"\n        image_topic: \"/camera/color/image_raw\"\n        camera_info_topic: \"/camera/color/camera_info\"\n        publish_image_topic: \"/image/detected\"\n        width: 9\n        height: 6\n        chessboard_square_size: 0.026\n```\n\n* Asymmetric circles grid\n\n```yml\n# 4X11 0.035\npose_estimation:\n    ros__parameters:\n        pattern: \"ASYMMETRIC_CIRCLES_GRID\"\n        image_topic: \"/camera/color/image_raw\"\n        camera_info_topic: \"/camera/color/camera_info\"\n        publish_image_topic: \"/image/detected\"\n        width: 4\n        height: 11\n        circle_grid_seperation: 0.035\n```\n\n```yml\n# 3X5 0.035\npose_estimation:\n    ros__parameters:\n        pattern: \"ASYMMETRIC_CIRCLES_GRID\"\n        image_topic: \"/camera/color/image_raw\"\n        camera_info_topic: \"/camera/color/camera_info\"\n        publish_image_topic: \"/image/detected\"\n        width: 3\n        height: 5\n        circle_grid_seperation: 0.035\n```\n\n* Aruco board\n\n```yml\n# 5x7 DICT_6X6_250 0.035 0.007\npose_estimation:\n    ros__parameters:\n        pattern: \"ARUCO\"\n        image_topic: \"/camera/color/image_raw\"\n        camera_info_topic: \"/camera/color/camera_info\"\n        publish_image_topic: \"/image/detected\"\n        width: 5\n        height: 7\n        dictionary: \"DICT_6X6_250\"\n        aruco_board_marker_size: 0.035\n        aruco_board_marker_seperation: 0.007\n```\n\n```yml\n# 3x4 DICT_4X4_50 0.0256 0.0066\npose_estimation:\n    ros__parameters:\n        pattern: \"ARUCO\"\n        image_topic: \"/camera/color/image_raw\"\n        camera_info_topic: \"/camera/color/camera_info\"\n        publish_image_topic: \"/image/detected\"\n        width: 3\n        height: 4\n        dictionary: \"DICT_4X4_50\"\n        aruco_board_marker_size: 0.0256\n        aruco_board_marker_seperation: 0.0066\n```\n\n* Charuco board\n\n```yml\npose_estimation:\n    ros__parameters:\n        pattern: \"CHARUCO\"\n        image_topic: \"/camera/color/image_raw\"\n        camera_info_topic: \"/camera/color/camera_info\"\n        publish_image_topic: \"/image/detected\"\n        width: 5\n        height: 7\n        dictionary: \"DICT_6X6_250\"\n        charuco_board_marker_size: 0.022\n        charuco_board_square_size: 0.037\n```\n\n## 6. Results\n\nIf the detection works well, the (red, green, blue) arrows indicating the coordiante system origin at the corner of the calibration board should show up on the picture. For the patterns listed above, it should looks like:\n\nCHESSBOARD|ASYMMETRIC CIRCLES GRID 4X11|ASYMMETRIC CIRCLES GRID 3X5|CHARUCO|ARUCO 5X7|ARUCO 3X4\n----------|----------------------------|----|-------|-----|----\n![CHESSBOARD][image1]|![ASYMMETRIC CIRCLES GRID][image2_1]|![ASYMMETRIC CIRCLES GRID][image2_2]|![CHARUCO][image3]|![ARUCO][image4_1]|![ARUCO][image4_2]\n\n[image1]:data/detected/chessboard/chessboard.png\n[image2_1]:data/detected/circlegrid/4X11_circles_grid.png\n[image2_2]:data/detected/circlegrid/3X5_circles_grid.png\n[image3]:data/detected/charuco/charuco.png\n[image4_1]:data/detected/aruco/5X7_aruco.png\n[image4_2]:data/detected/aruco/3X4_aruco.png\n\n###### *Any security issue should be reported using process at https://01.org/security*\n"
  },
  {
    "path": "grasp_utils/handeye_target_detection/include/PoseEstimator.h",
    "content": "/** Copyright (c) 2019 Intel Corporation\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n *      http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n#ifndef POSEESTIMATOR_H\n#define POSEESTIMATOR_H\n\n// ROS include\n#include <cv_bridge/cv_bridge.h>\n#include <image_transport/image_transport.h>\n#include <tf2/LinearMath/Matrix3x3.h>\n#include <tf2/LinearMath/Quaternion.h>\n#include <tf2/LinearMath/Transform.h>\n#include <tf2/buffer_core.h>\n#include <tf2_ros/transform_broadcaster.h>\n#include <rclcpp/rclcpp.hpp>\n#include <rclcpp/time.hpp>\n#include <sensor_msgs/msg/camera_info.hpp>\n\n// OpenCV include\n#include <opencv2/aruco/charuco.hpp>\n#include <opencv2/calib3d.hpp>\n#include <opencv2/core.hpp>\n#include <opencv2/imgcodecs.hpp>\n#include <opencv2/imgproc/imgproc.hpp>\n\n// System include\n#include <iostream>\n#include <string>\n\nnamespace tf2\n{\nvoid transformTF2ToMsg(const tf2::Transform& tf2, geometry_msgs::msg::TransformStamped& msg,\n                       builtin_interfaces::msg::Time stamp, const std::string& frame_id,\n                       const std::string& child_frame_id);\n}\n\nclass PoseEstimator\n{\npublic:\n  PoseEstimator(std::shared_ptr<rclcpp::Node>& node, std::string pattern, std::string image_topic,\n                std::string camera_info_topic, std::string publish_image_topic, int width, int height,\n                std::string dictionary, double chessboard_square_size, double circle_grid_seperation,\n                double aruco_board_marker_size, double aruco_board_marker_seperation, double charuco_board_marker_size,\n                double charuco_board_square_size);\n\n  ~PoseEstimator()\n  {\n  }\n\n  void imageCB_CHESSBOARD(const sensor_msgs::msg::Image::ConstSharedPtr& msg);\n  void imageCB_ASYMMETRIC_CIRCLES_GRID(const sensor_msgs::msg::Image::ConstSharedPtr& msg);\n  void imageCB_ARUCO(const sensor_msgs::msg::Image::ConstSharedPtr& msg);\n  void imageCB_CHARUCO(const sensor_msgs::msg::Image::ConstSharedPtr& msg);\n  void caminfoCB(const sensor_msgs::msg::CameraInfo::SharedPtr msg);\n  void draw(cv::Mat img, std::vector<cv::Point2f> corners, cv::Mat imgpts);\n  void rotationVectorToTF2Quaternion(tf2::Quaternion&, cv::Vec3d&);\n\nprivate:\n  // ROS variables\n  std::shared_ptr<rclcpp::Node> node_;\n  image_transport::ImageTransport it_;\n  image_transport::Subscriber image_sub_;\n  image_transport::Publisher image_pub_;\n  rclcpp::Subscription<sensor_msgs::msg::CameraInfo>::SharedPtr camerainfo_sub_;\n  tf2_ros::TransformBroadcaster broadcaster_;\n\n  // Native variables\n  cv::Mat camera_matrix_;\n  cv::Mat dist_coeffs_;\n  bool run_;\n  std::string image_topic_;\n  std::string camera_info_topic_;\n  std::string publish_image_topic_;\n  int width_;\n  int height_;\n  double chessboard_square_size_;\n  double circle_grid_seperation_;\n  double aruco_board_marker_size_;\n  double aruco_board_marker_seperation_;\n  double charuco_board_marker_size_;\n  double charuco_board_square_size_;\n  enum Patterns\n  {\n    NOT_EXISTING,\n    CHESSBOARD,\n    ASYMMETRIC_CIRCLES_GRID,\n    CHARUCO,\n    ARUCO\n  };\n  Patterns calibration_pattern_;\n  std::map<std::string, Patterns> pattern_map_;\n  cv::aruco::PREDEFINED_DICTIONARY_NAME dictionary_;\n  std::map<std::string, cv::aruco::PREDEFINED_DICTIONARY_NAME> disctionary_map_;\n  std::string path_;\n};\n\n#endif"
  },
  {
    "path": "grasp_utils/handeye_target_detection/launch/pose_estimation.launch.py",
    "content": "# Copyright (c) 2019 Intel Corporation. All Rights Reserved\n# \n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n# \n#     http://www.apache.org/licenses/LICENSE-2.0\n# \n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport os\n\nimport launch\nimport launch.actions\nimport launch.substitutions\nimport launch_ros.actions\nfrom ament_index_python.packages import get_package_share_directory\n\ndef generate_launch_description():\n\n    # .yaml file for configuring the parameters\n    yaml = os.path.join(\n        get_package_share_directory('handeye_target_detection'), \n            'launch', 'pose_estimation.yaml'\n    )\n\n    rviz = os.path.join(\n        get_package_share_directory('handeye_target_detection'), \n            'cfg', 'handeye.rviz'\n    )\n\n    return launch.LaunchDescription([\n\n        launch_ros.actions.Node(\n            package='handeye_target_detection', node_executable='pose_estimation', \n            output='screen', arguments=['__params:='+yaml]),\n\n        launch_ros.actions.Node(\n            package='rviz2', node_executable='rviz2', \n            output='screen', arguments=['-d', rviz]),\n    ])"
  },
  {
    "path": "grasp_utils/handeye_target_detection/launch/pose_estimation.yaml",
    "content": "pose_estimation:\n    ros__parameters:\n        pattern: \"ARUCO\"\n        image_topic: \"/camera/color/image_raw\"\n        camera_info_topic: \"/camera/color/camera_info\"\n        publish_image_topic: \"/image/detected\"\n        width: 3\n        height: 4\n        dictionary: \"DICT_4X4_50\"\n        chessboard_square_size: 0.026\n        circle_grid_seperation: 0.035\n        aruco_board_marker_size: 0.0256\n        aruco_board_marker_seperation: 0.0066\n        charuco_board_marker_size: 0.022\n        charuco_board_square_size: 0.03"
  },
  {
    "path": "grasp_utils/handeye_target_detection/package.xml",
    "content": "<?xml version=\"1.0\"?>\n<?xml-model href=\"http://download.ros.org/schema/package_format3.xsd\" schematypens=\"http://www.w3.org/2001/XMLSchema\"?>\n<package format=\"3\">\n  <name>handeye_target_detection</name>\n  <version>0.1.0</version>\n  <description>Recognize the calibration pattern pose with RGBD camera.</description>\n  <author email=\"yu.yan@intel.com\">yanyu</author>\n  <maintainer email=\"yu.yan@intel.com\">yanyu</maintainer>\n  <license>Apache License 2.0</license>\n\n  <buildtool_depend>ament_cmake</buildtool_depend>\n  <build_depend>rclcpp</build_depend>\n  <build_depend>std_msgs</build_depend>\n  <build_depend>sensor_msgs</build_depend>\n  <build_depend>cv_bridge</build_depend>\n  <build_depend>image_transport</build_depend>\n  <build_depend>rospy</build_depend>\n  <build_depend>tf</build_depend>\n\n  <exec_depend>rclcpp</exec_depend>\n  <test_depend>ament_cmake_clang_format</test_depend>\n  \n  <export>\n    <build_type>ament_cmake</build_type>\n  </export>\n\n</package>"
  },
  {
    "path": "grasp_utils/handeye_target_detection/src/pose_estimation_node.cpp",
    "content": "/** Copyright (c) 2019 Intel Corporation\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n *      http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n#include <chrono>\n#include <rclcpp/rclcpp.hpp>\n#include \"PoseEstimator.h\"\n\nusing namespace std::chrono_literals;\n\nint main(int argc, char** argv)\n{\n  // Start the ros node\n  rclcpp::init(argc, argv);\n  auto node = std::make_shared<rclcpp::Node>(\n      \"pose_estimation\",\n      rclcpp::NodeOptions().allow_undeclared_parameters(true).automatically_declare_parameters_from_overrides(true));\n\n  // Initialize parameter client\n  auto parameters_client = std::make_shared<rclcpp::SyncParametersClient>(node);\n  while (!parameters_client->wait_for_service(1s))\n  {\n    if (!rclcpp::ok())\n    {\n      RCLCPP_ERROR(node->get_logger(), \"Interrupted while waiting for the service. Exiting.\");\n      rclcpp::shutdown();\n    }\n    RCLCPP_INFO(node->get_logger(), \"service not available, waiting again...\");\n  }\n\n  // Get parameters\n  std::string pattern = parameters_client->get_parameter<std::string>(\"pattern\", \"ARUCO\");\n  std::string image_topic = parameters_client->get_parameter<std::string>(\"image_topic\", \"/camera/color/image_raw\");\n  std::string camera_info_topic =\n      parameters_client->get_parameter<std::string>(\"camera_info_topic\", \"/camera/color/camera_info\");\n  std::string publish_image_topic =\n      parameters_client->get_parameter<std::string>(\"publish_image_topic\", \"/image/detected\");\n  int width = parameters_client->get_parameter(\"width\", 5);\n  int height = parameters_client->get_parameter(\"height\", 7);\n  std::string dictionary = parameters_client->get_parameter<std::string>(\"dictionary\", \"DICT_4X4_50\");\n  double chessboard_square_size = parameters_client->get_parameter(\"chessboard_square_size\", 0.026);\n  double circle_grid_seperation = parameters_client->get_parameter(\"circle_grid_seperation\", 0.035);\n  double aruco_board_marker_size = parameters_client->get_parameter(\"aruco_board_marker_size\", 0.035);\n  double aruco_board_marker_seperation = parameters_client->get_parameter(\"aruco_board_marker_seperation\", 0.007);\n  double charuco_board_marker_size = parameters_client->get_parameter(\"charuco_board_marker_size\", 0.022);\n  double charuco_board_square_size = parameters_client->get_parameter(\"charuco_board_square_size\", 0.037);\n\n  PoseEstimator pe(node, pattern, image_topic, camera_info_topic, publish_image_topic, width, height, dictionary,\n                   chessboard_square_size, circle_grid_seperation, aruco_board_marker_size,\n                   aruco_board_marker_seperation, charuco_board_marker_size, charuco_board_square_size);\n\n  rclcpp::spin(node);\n  RCLCPP_INFO(node->get_logger(), \"Node calibration_pattern_pose_estimation exited.\");\n  return 0;\n}"
  },
  {
    "path": "grasp_utils/handeye_target_detection/src/pose_estimator.cpp",
    "content": "/** Copyright (c) 2019 Intel Corporation\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n *      http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n#include \"PoseEstimator.h\"\nusing std::placeholders::_1;\n\nPoseEstimator::PoseEstimator(std::shared_ptr<rclcpp::Node>& node, std::string pattern, std::string image_topic,\n                             std::string camera_info_topic, std::string publish_image_topic, int width, int height,\n                             std::string dictionary, double chessboard_square_size, double circle_grid_seperation,\n                             double aruco_board_marker_size, double aruco_board_marker_seperation,\n                             double charuco_board_marker_size, double charuco_board_square_size)\n  : node_(node)\n  , it_(node_)\n  , broadcaster_(node_)\n  , run_(false)\n  , width_(width)\n  , height_(height)\n  , chessboard_square_size_(chessboard_square_size)\n  , circle_grid_seperation_(circle_grid_seperation)\n  , aruco_board_marker_size_(aruco_board_marker_size)\n  , aruco_board_marker_seperation_(aruco_board_marker_seperation)\n  , charuco_board_marker_size_(charuco_board_marker_size)\n  , charuco_board_square_size_(charuco_board_square_size)\n{\n  // Parse dictionary parameter\n  disctionary_map_[\"DICT_4X4_50\"] = cv::aruco::DICT_4X4_50;\n  disctionary_map_[\"DICT_4X4_100\"] = cv::aruco::DICT_4X4_100;\n  disctionary_map_[\"DICT_4X4_250\"] = cv::aruco::DICT_4X4_250;\n  disctionary_map_[\"DICT_4X4_1000\"] = cv::aruco::DICT_4X4_1000;\n  disctionary_map_[\"DICT_5X5_50\"] = cv::aruco::DICT_5X5_50;\n  disctionary_map_[\"DICT_5X5_100\"] = cv::aruco::DICT_5X5_100;\n  disctionary_map_[\"DICT_5X5_250\"] = cv::aruco::DICT_5X5_250;\n  disctionary_map_[\"DICT_5X5_1000\"] = cv::aruco::DICT_5X5_1000;\n  disctionary_map_[\"DICT_6X6_50\"] = cv::aruco::DICT_6X6_50;\n  disctionary_map_[\"DICT_6X6_100\"] = cv::aruco::DICT_6X6_100;\n  disctionary_map_[\"DICT_6X6_250\"] = cv::aruco::DICT_6X6_250;\n  disctionary_map_[\"DICT_6X6_1000\"] = cv::aruco::DICT_6X6_1000;\n  disctionary_map_[\"DICT_7X7_50\"] = cv::aruco::DICT_7X7_50;\n  disctionary_map_[\"DICT_7X7_100\"] = cv::aruco::DICT_7X7_100;\n  disctionary_map_[\"DICT_7X7_250\"] = cv::aruco::DICT_7X7_250;\n  disctionary_map_[\"DICT_7X7_1000\"] = cv::aruco::DICT_7X7_1000;\n  std::map<std::string, cv::aruco::PREDEFINED_DICTIONARY_NAME>::iterator it = disctionary_map_.find(dictionary);\n  if (it != disctionary_map_.end())\n    dictionary_ = disctionary_map_[dictionary];\n  else\n  {\n    RCLCPP_ERROR(node_->get_logger(), \"Invalid dictionary input: %s, default dictionary DICT_6X6_250 used.\",\n                 dictionary);\n    dictionary_ = cv::aruco::DICT_6X6_250;\n  }\n\n  // Initialize subscribers and publishers\n  image_pub_ = it_.advertise(publish_image_topic, 1);\n  camerainfo_sub_ = node_->create_subscription<sensor_msgs::msg::CameraInfo>(\n      camera_info_topic, 1, std::bind(&PoseEstimator::caminfoCB, this, _1));\n\n  calibration_pattern_ = NOT_EXISTING;\n  pattern_map_ = { { \"CHESSBOARD\", CHESSBOARD },\n                   { \"ASYMMETRIC_CIRCLES_GRID\", ASYMMETRIC_CIRCLES_GRID },\n                   { \"ARUCO\", ARUCO },\n                   { \"CHARUCO\", CHARUCO } };\n  std::map<std::string, Patterns>::iterator it_pattern = pattern_map_.find(pattern);\n  if (it_pattern != pattern_map_.end())\n    calibration_pattern_ = pattern_map_[pattern];\n  else\n  {\n    RCLCPP_ERROR(node_->get_logger(), \"Invalid pattern input: %s.\", pattern);\n    calibration_pattern_ = NOT_EXISTING;\n  }\n\n  switch (calibration_pattern_)\n  {\n    case CHESSBOARD:\n      image_sub_ = it_.subscribe<PoseEstimator>(image_topic, 1, &PoseEstimator::imageCB_CHESSBOARD, this);\n      break;\n    case ASYMMETRIC_CIRCLES_GRID:\n      image_sub_ = it_.subscribe(image_topic, 1, &PoseEstimator::imageCB_ASYMMETRIC_CIRCLES_GRID, this);\n      break;\n    case ARUCO:\n      image_sub_ = it_.subscribe(image_topic, 1, &PoseEstimator::imageCB_ARUCO, this);\n      break;\n    case CHARUCO:\n      image_sub_ = it_.subscribe(image_topic, 1, &PoseEstimator::imageCB_CHARUCO, this);\n      break;\n    default:\n      break;\n  }\n\n  // Initialize camera intrinsic parameters\n  camera_matrix_ = cv::Mat::eye(3, 3, CV_64F);\n  dist_coeffs_ = cv::Mat::zeros(5, 1, CV_64F);\n}\n\nvoid PoseEstimator::imageCB_CHESSBOARD(const sensor_msgs::msg::Image::ConstSharedPtr& msg)\n{\n  if (run_)\n  {\n    if (!msg)\n    {\n      RCLCPP_ERROR(node_->get_logger(), \"The pointer to image message is NULL.\");\n      return;\n    }\n\n    try\n    {\n      cv_bridge::CvImagePtr cv_ptr = cv_bridge::toCvCopy(msg, sensor_msgs::image_encodings::MONO8);\n\n      // Find the chessboard pattern\n      std::vector<cv::Point2f> pointBuf;  // corners\n      cv::Size patternsize;\n      patternsize.width = width_;\n      patternsize.height = height_;\n      int chessBoardFlags;\n      chessBoardFlags = cv::CALIB_CB_ADAPTIVE_THRESH | cv::CALIB_CB_NORMALIZE_IMAGE;\n      bool found = cv::findChessboardCorners(cv_ptr->image, patternsize, pointBuf, chessBoardFlags);\n\n      if (found)\n      {\n        // Correct corner points\n        cv::cornerSubPix(cv_ptr->image, pointBuf, cv::Size(11, 11), cv::Size(-1, -1),\n                         cv::TermCriteria(cv::TermCriteria::EPS + cv::TermCriteria::COUNT, 30, 0.1));\n\n        // Find the parameters of transform between the calibration plate and\n        // camera plane\n        std::vector<int> sizeOjbPnts = { static_cast<int>(pointBuf.size()), 3 };\n        cv::Mat objectPoints(sizeOjbPnts, CV_64F);\n        for (int i = 0; i < sizeOjbPnts[0]; i++)\n        {\n          objectPoints.at<double>(i, 0) = i % patternsize.width * chessboard_square_size_;\n          objectPoints.at<double>(i, 1) = i / patternsize.width * chessboard_square_size_;\n          objectPoints.at<double>(i, 2) = 0;\n        }\n        cv::Vec3d tvect, rvect;\n        bool solved = cv::solvePnPRansac(objectPoints, pointBuf, camera_matrix_, dist_coeffs_, rvect, tvect);\n        if (solved)\n        {\n          tf2::Quaternion q;\n          rotationVectorToTF2Quaternion(q, rvect);\n          geometry_msgs::msg::TransformStamped transform_stamped;\n          tf2::transformTF2ToMsg(tf2::Transform(q, tf2::Vector3(tvect[0], tvect[1], tvect[2])), transform_stamped,\n                                 node_->now(), msg->header.frame_id, \"calib_board\");\n          broadcaster_.sendTransform(transform_stamped);\n        }\n\n        // Project the axis points\n        cv::Mat axis = cv::Mat::zeros(3, 3, CV_64F);\n        axis.at<double>(0, 0) = 3 * chessboard_square_size_;\n        axis.at<double>(1, 1) = 3 * chessboard_square_size_;\n        axis.at<double>(2, 2) = -3 * chessboard_square_size_;\n        cv::Mat imageAxisPoints;\n        cv::projectPoints(axis, rvect, tvect, camera_matrix_, dist_coeffs_, imageAxisPoints);\n\n        // Draw axis to image\n        cv_bridge::CvImage cv_image_color(msg->header, sensor_msgs::image_encodings::RGB8);\n        cv::cvtColor(cv_ptr->image, cv_image_color.image, cv::COLOR_GRAY2RGB);\n        cv::drawChessboardCorners(cv_image_color.image, patternsize, cv::Mat(pointBuf), found);\n        draw(cv_image_color.image, pointBuf, imageAxisPoints);\n\n        // Output stream\n        image_pub_.publish(cv_image_color.toImageMsg());\n      }\n      else\n        // Output stream\n        image_pub_.publish(cv_ptr->toImageMsg());\n    }\n    catch (cv_bridge::Exception& e)\n    {\n      RCLCPP_ERROR(node_->get_logger(), \"cv_bridge exeption: %s\", e.what());\n    }\n  }\n}\n\nvoid PoseEstimator::imageCB_ASYMMETRIC_CIRCLES_GRID(const sensor_msgs::msg::Image::ConstSharedPtr& msg)\n{\n  if (run_)\n  {\n    if (!msg)\n    {\n      RCLCPP_INFO(node_->get_logger(), \"The pointer to image message is NULL.\");\n      return;\n    }\n    try\n    {\n      cv_bridge::CvImagePtr cv_ptr = cv_bridge::toCvCopy(msg, sensor_msgs::image_encodings::MONO8);\n\n      // Find the circlesgrid pattern\n      std::vector<cv::Point2f> pointBuf;  // corners\n      cv::Size patternsize;\n      patternsize.width = width_;\n      patternsize.height = height_;\n      int chessBoardFlags;\n      chessBoardFlags = cv::CALIB_CB_ASYMMETRIC_GRID;\n      bool found = cv::findCirclesGrid(cv_ptr->image, patternsize, pointBuf, chessBoardFlags);\n\n      if (found)\n      {\n        // Correct corner points\n        std::vector<cv::Point2f> corners2;\n        patternsize.height = (patternsize.height + 1) / 2;\n        for (int i = 0; i < patternsize.height; i++)\n        {\n          for (int j = 0; j < patternsize.width; j++)\n            corners2.push_back(pointBuf[i * patternsize.width * 2 + j]);\n        }\n        pointBuf.clear();\n        for (size_t i = 0; i < corners2.size(); i++)\n          pointBuf.push_back(corners2[i]);\n\n        // Find the parameters of transform between the calibration plate and\n        // camera plane\n        std::vector<int> sizeOjbPnts = { static_cast<int>(pointBuf.size()), 3 };\n        cv::Mat objectPoints(sizeOjbPnts, CV_64F);\n        for (int i = 0; i < sizeOjbPnts[0]; i++)\n        {\n          objectPoints.at<double>(i, 0) = i % patternsize.width * circle_grid_seperation_;\n          objectPoints.at<double>(i, 1) = i / patternsize.width * circle_grid_seperation_;\n          objectPoints.at<double>(i, 2) = 0;\n        }\n        cv::Vec3d tvect, rvect;\n        bool solved = cv::solvePnPRansac(objectPoints, pointBuf, camera_matrix_, dist_coeffs_, rvect, tvect);\n        if (solved)\n        {\n          tf2::Quaternion q;\n          rotationVectorToTF2Quaternion(q, rvect);\n          geometry_msgs::msg::TransformStamped transform_stamped;\n          tf2::transformTF2ToMsg(tf2::Transform(q, tf2::Vector3(tvect[0], tvect[1], tvect[2])), transform_stamped,\n                                 node_->now(), msg->header.frame_id, \"calib_board\");\n          broadcaster_.sendTransform(transform_stamped);\n        }\n\n        // Project the axis points\n        cv::Mat axis = cv::Mat::zeros(3, 3, CV_64F);\n        axis.at<double>(0, 0) = 3 * circle_grid_seperation_;\n        axis.at<double>(1, 1) = 3 * circle_grid_seperation_;\n        axis.at<double>(2, 2) = -3 * circle_grid_seperation_;\n        cv::Mat imageAxisPoints;\n        cv::projectPoints(axis, rvect, tvect, camera_matrix_, dist_coeffs_, imageAxisPoints);\n\n        // Draw axis to image\n        cv_bridge::CvImage cv_image_color(msg->header, sensor_msgs::image_encodings::RGB8);\n        cv::cvtColor(cv_ptr->image, cv_image_color.image, cv::COLOR_GRAY2RGB);\n        cv::drawChessboardCorners(cv_image_color.image, patternsize, cv::Mat(pointBuf), found);\n        draw(cv_image_color.image, pointBuf, imageAxisPoints);\n\n        // Output stream\n        image_pub_.publish(cv_image_color.toImageMsg());\n      }\n      else\n        // Output stream\n        image_pub_.publish(cv_ptr->toImageMsg());\n    }\n    catch (cv_bridge::Exception& e)\n    {\n      RCLCPP_ERROR(node_->get_logger(), \"cv_bridge exeption: %s\", e.what());\n    }\n  }\n}\n\nvoid PoseEstimator::imageCB_ARUCO(const sensor_msgs::msg::Image::ConstSharedPtr& msg)\n{\n  if (run_)\n  {\n    if (!msg)\n    {\n      RCLCPP_INFO(node_->get_logger(), \"The pointer to image message is NULL.\");\n      return;\n    }\n    try\n    {\n      cv_bridge::CvImagePtr cv_ptr = cv_bridge::toCvCopy(msg, sensor_msgs::image_encodings::MONO8);\n\n      // Detect aruco board\n      cv::Ptr<cv::aruco::Dictionary> dictionary = cv::aruco::getPredefinedDictionary(dictionary_);\n      cv::Ptr<cv::aruco::GridBoard> board = cv::aruco::GridBoard::create(width_, height_, aruco_board_marker_size_,\n                                                                         aruco_board_marker_seperation_, dictionary);\n      std::vector<int> ids;\n      std::vector<std::vector<cv::Point2f>> corners;\n      cv::aruco::detectMarkers(cv_ptr->image, dictionary, corners, ids);\n\n      if (ids.size() > 0)\n      {\n        cv::Mat imageColor;\n        cv::cvtColor(cv_ptr->image, imageColor, cv::COLOR_GRAY2RGB);\n        std::vector<std::vector<cv::Point2f>> rejectedCorners;\n        cv::aruco::refineDetectedMarkers(cv_ptr->image, board, corners, ids, rejectedCorners, camera_matrix_,\n                                         dist_coeffs_);\n        cv::aruco::drawDetectedMarkers(imageColor, corners, ids);\n\n        // Estimate the pose of aruco board\n        cv::Vec3d rvect, tvect;\n        int valid = cv::aruco::estimatePoseBoard(corners, ids, board, camera_matrix_, dist_coeffs_, rvect, tvect);\n\n        // If at least one board marker detected\n        if (valid > 0)\n        {\n          cv::aruco::drawAxis(imageColor, camera_matrix_, dist_coeffs_, rvect, tvect, 0.1);\n          tf2::Quaternion q;\n          rotationVectorToTF2Quaternion(q, rvect);\n          geometry_msgs::msg::TransformStamped transform_stamped;\n          tf2::transformTF2ToMsg(tf2::Transform(q, tf2::Vector3(tvect[0], tvect[1], tvect[2])), transform_stamped,\n                                 node_->now(), msg->header.frame_id, \"calib_board\");\n          broadcaster_.sendTransform(transform_stamped);\n        }\n\n        // Output stream\n        cv_bridge::CvImage cv_image_color(msg->header, sensor_msgs::image_encodings::RGB8, imageColor);\n        image_pub_.publish(cv_image_color.toImageMsg());\n      }\n      else\n        // Output stream\n        image_pub_.publish(cv_ptr->toImageMsg());\n    }\n    catch (cv_bridge::Exception& e)\n    {\n      RCLCPP_ERROR(node_->get_logger(), \"cv_bridge exeption: %s\", e.what());\n    }\n  }\n}\n\nvoid PoseEstimator::imageCB_CHARUCO(const sensor_msgs::msg::Image::ConstSharedPtr& msg)\n{\n  if (run_)\n  {\n    if (!msg)\n    {\n      RCLCPP_ERROR(node_->get_logger(), \"The pointer to image message is NULL.\");\n      return;\n    }\n    try\n    {\n      cv_bridge::CvImagePtr cv_ptr = cv_bridge::toCvCopy(msg, sensor_msgs::image_encodings::MONO8);\n\n      // Detect ChArUco\n      cv::Ptr<cv::aruco::Dictionary> dictionary = cv::aruco::getPredefinedDictionary(dictionary_);\n      cv::Ptr<cv::aruco::CharucoBoard> board = cv::aruco::CharucoBoard::create(\n          width_, height_, charuco_board_square_size_, charuco_board_marker_size_, dictionary);\n      cv::Ptr<cv::aruco::DetectorParameters> params_ptr(new cv::aruco::DetectorParameters());\n#if CV_MINOR_VERSION == 2\n      params_ptr->doCornerRefinement = true;\n#else\n      params_ptr->cornerRefinementMethod = cv::aruco::CORNER_REFINE_SUBPIX;\n#endif\n      std::vector<int> ids;\n      std::vector<std::vector<cv::Point2f>> corners;\n      cv::aruco::detectMarkers(cv_ptr->image, dictionary, corners, ids, params_ptr);\n      if (ids.size() > 0)\n      {\n        cv::Mat imageColor;\n        cv::cvtColor(cv_ptr->image, imageColor, cv::COLOR_GRAY2RGB);\n\n        std::vector<cv::Point2f> charucoCorners;\n        std::vector<int> charucoIds;\n        cv::aruco::interpolateCornersCharuco(corners, ids, cv_ptr->image, board, charucoCorners, charucoIds);\n        if (charucoIds.size() > 0)\n        {\n          cv::aruco::drawDetectedCornersCharuco(imageColor, charucoCorners, charucoIds, cv::Scalar(255, 0, 0));\n          // Estimate charuco pose\n          cv::Vec3d rvect, tvect;\n          bool valid = cv::aruco::estimatePoseCharucoBoard(charucoCorners, charucoIds, board, camera_matrix_,\n                                                           dist_coeffs_, rvect, tvect);\n          if (valid)\n          {\n            cv::aruco::drawAxis(imageColor, camera_matrix_, dist_coeffs_, rvect, tvect, 0.1);\n            tf2::Quaternion q;\n            rotationVectorToTF2Quaternion(q, rvect);\n            geometry_msgs::msg::TransformStamped transform_stamped;\n            tf2::transformTF2ToMsg(tf2::Transform(q, tf2::Vector3(tvect[0], tvect[1], tvect[2])), transform_stamped,\n                                   node_->now(), msg->header.frame_id, \"calib_board\");\n            broadcaster_.sendTransform(transform_stamped);\n          }\n        }\n\n        // Output stream\n        cv_bridge::CvImage cv_image_color(msg->header, sensor_msgs::image_encodings::RGB8, imageColor);\n        image_pub_.publish(cv_image_color.toImageMsg());\n      }\n      else\n        // Output stream\n        image_pub_.publish(cv_ptr->toImageMsg());\n    }\n    catch (cv_bridge::Exception& e)\n    {\n      RCLCPP_ERROR(node_->get_logger(), \"cv_bridge exeption: %s\", e.what());\n    }\n  }\n}\n\nvoid PoseEstimator::caminfoCB(const sensor_msgs::msg::CameraInfo::SharedPtr msg)\n{\n  if (!run_)\n  {\n    if (msg->k.size() == 9 && msg->d.size() == 5)\n    {\n      // Store camera matrix info\n      for (size_t i = 0; i < 3; i++)\n        for (size_t j = 0; j < 3; j++)\n          camera_matrix_.at<double>(i, j) = msg->k[i * 3 + j];\n\n      // Store camera distortion info\n      for (size_t i = 0; i < 5; i++)\n        dist_coeffs_.at<double>(i, 0) = msg->d[i];\n\n      // Set the flag to start processing the image\n      run_ = true;\n    }\n    else\n    {\n      RCLCPP_ERROR(node_->get_logger(), \"Got invalid camera info.\");\n      run_ = false;\n    }\n  }\n}\n\nvoid PoseEstimator::draw(cv::Mat img, std::vector<cv::Point2f> corners, cv::Mat imgpts)\n{\n  cv::Point corner(corners[0]);\n  cv::Point axis_point_x(imgpts.ptr<double>(0)[0], imgpts.ptr<double>(0)[1]);\n  cv::Point axis_point_y(imgpts.ptr<double>(1)[0], imgpts.ptr<double>(1)[1]);\n  cv::Point axis_point_z(imgpts.ptr<double>(2)[0], imgpts.ptr<double>(2)[1]);\n  cv::line(img, corner, axis_point_x, cv::Scalar(255, 0, 0), 6);\n  cv::line(img, corner, axis_point_y, cv::Scalar(0, 255, 0), 6);\n  cv::line(img, corner, axis_point_z, cv::Scalar(0, 0, 255), 6);\n}\n\nvoid PoseEstimator::rotationVectorToTF2Quaternion(tf2::Quaternion& q, cv::Vec3d& rvect)\n{\n  q.setRPY(0.0, 0.0, 0.0);\n  cv::Mat rm;\n  cv::Rodrigues(rvect, rm);\n  tf2::Matrix3x3 m(rm.ptr<double>(0)[0], rm.ptr<double>(0)[1], rm.ptr<double>(0)[2], rm.ptr<double>(1)[0],\n                   rm.ptr<double>(1)[1], rm.ptr<double>(1)[2], rm.ptr<double>(2)[0], rm.ptr<double>(2)[1],\n                   rm.ptr<double>(2)[2]);\n  m.getRotation(q);\n}\n"
  },
  {
    "path": "grasp_utils/handeye_tf_service/CMakeLists.txt",
    "content": "cmake_minimum_required(VERSION 3.5)\n\nproject(handeye_tf_service)\n\n# Default to C++14\nif(NOT CMAKE_CXX_STANDARD)\n  set(CMAKE_CXX_STANDARD 14)\nendif()\n\nif(CMAKE_COMPILER_IS_GNUCXX OR CMAKE_CXX_COMPILER_ID MATCHES \"Clang\")\n  add_compile_options(-Wall -Wextra -Wpedantic)\nendif()\n\nfind_package(rclcpp REQUIRED)\nfind_package(tf2_ros REQUIRED)\nfind_package(ament_cmake REQUIRED)\nfind_package(geometry_msgs REQUIRED)\nfind_package(builtin_interfaces REQUIRED)\nfind_package(std_msgs REQUIRED)\nfind_package(rosidl_default_generators REQUIRED)\n\nset(srv_files\n  \"srv/HandeyeTF.srv\")\n\nrosidl_generate_interfaces(${PROJECT_NAME}\n  ${srv_files}\n  DEPENDENCIES geometry_msgs builtin_interfaces std_msgs\n)\n\nament_export_dependencies(rosidl_default_runtime)\n\nadd_executable(handeye_tf_server \n  src/handeye_tf_server.cpp\n)\n\nament_target_dependencies(handeye_tf_server\n  rclcpp tf2_ros\n)\n\nget_default_rmw_implementation(rmw_implementation)\nfind_package(\"${rmw_implementation}\" REQUIRED)\nget_rmw_typesupport(typesupport_impls \"${rmw_implementation}\" LANGUAGE \"cpp\")\n\nforeach(typesupport_impl ${typesupport_impls})\n  rosidl_target_interfaces(handeye_tf_server\n    ${PROJECT_NAME} ${typesupport_impl}\n  )\nendforeach()\n\ninstall(TARGETS handeye_tf_server\n    DESTINATION lib/${PROJECT_NAME})\n\nament_package()"
  },
  {
    "path": "grasp_utils/handeye_tf_service/README.md",
    "content": "# handeye_tf_service"
  },
  {
    "path": "grasp_utils/handeye_tf_service/package.xml",
    "content": "<?xml version=\"1.0\"?>\n<?xml-model href=\"http://download.ros.org/schema/package_format3.xsd\" schematypens=\"http://www.w3.org/2001/XMLSchema\"?>\n<package format=\"3\">\n  <name>handeye_tf_service</name>\n  <version>0.1.0</version>\n  <description>Provide TF get function for handeye.</description>\n  <maintainer email=\"yu.yan@intel.com\">Yu Yan</maintainer>\n  <license>Apache License 2.0</license>\n\n  <buildtool_depend>ament_cmake</buildtool_depend>\n\n  <buildtool_depend>rosidl_default_generators</buildtool_depend>\n\n  <build_depend>rclcpp</build_depend>\n  <build_depend>geometry_msgs</build_depend>\n  <build_depend>std_msgs</build_depend>\n  <build_depend>tf2_ros</build_depend>\n  <build_depend>builtin_interfaces</build_depend>\n\n  <exec_depend>rosidl_default_runtime</exec_depend>\n\n  <member_of_group>rosidl_interface_packages</member_of_group>\n\n  <export>\n    <build_type>ament_cmake</build_type>\n  </export>\n</package>"
  },
  {
    "path": "grasp_utils/handeye_tf_service/src/handeye_tf_server.cpp",
    "content": "/** Copyright (c) 2019 Intel Corporation. All Rights Reserved\n  *\n  * Licensed under the Apache License, Version 2.0 (the \"License\");\n  * you may not use this file except in compliance with the License.\n  * You may obtain a copy of the License at\n  *\n  *     http://www.apache.org/licenses/LICENSE-2.0\n  *\n  * Unless required by applicable law or agreed to in writing, software\n  * distributed under the License is distributed on an \"AS IS\" BASIS,\n  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n  * See the License for the specific language governing permissions and\n  * limitations under the License.\n  */\n\n#include <chrono>\n#include <tf2_ros/buffer.h>\n#include <rclcpp/rclcpp.hpp>\n#include <tf2_ros/transform_listener.h>\n#include <handeye_tf_service/srv/handeye_tf.hpp>\n#include <geometry_msgs/msg/transform_stamped.h>\n#include <tf2_ros/static_transform_broadcaster.h>\n\nusing HandeyeTF = handeye_tf_service::srv::HandeyeTF;\nusing namespace std::chrono_literals;\n\nclass ServerNode : public rclcpp::Node\n{\npublic:\n  explicit ServerNode(const rclcpp::NodeOptions & options)\n  : Node(\"handeye_tf_server\", options), broadcaster_(this)\n  {\n    // Init tf message\n    tf_msg_.header.frame_id = \"base\"; // Used to void TF_NO_FRAME_ID error, updated by user later\n    tf_msg_.child_frame_id = \"camera_link\";\n    // Initialize rotation to avoid TF_DENORMALIZED_QUATERNION error\n    tf_msg_.transform.rotation.x = 0.0;\n    tf_msg_.transform.rotation.y = 0.0;\n    tf_msg_.transform.rotation.z = 0.0;\n    tf_msg_.transform.rotation.w = 1.0;\n    \n    // Init timer\n    timer_ = this->create_wall_timer(\n      100ms, std::bind(&ServerNode::timer_callback, this));\n    \n    // Init tf listener\n    clock_ = this->get_clock();\n    rclcpp::Clock::SharedPtr clock = std::make_shared<rclcpp::Clock>(RCL_SYSTEM_TIME);\n    tf_buffer_ = std::make_shared<tf2_ros::Buffer>(clock_);\n    tf_listener_ = std::make_shared<tf2_ros::TransformListener>(*tf_buffer_);\n\n    // Service handler\n    auto handle_service =\n      [this](const std::shared_ptr<rmw_request_id_t> request_header,\n        const std::shared_ptr<HandeyeTF::Request> request,\n        std::shared_ptr<HandeyeTF::Response> response) -> void\n      {\n        if (request->publish.data) // Publish the camera-robot transform\n        {\n          RCLCPP_INFO(this->get_logger(), \"Incoming publish request\\nframe_id: %s child_frame_id: %s\",\n            request->transform.header.frame_id.data(), request->transform.child_frame_id.data());\n          tf_msg_ = request->transform;\n        }\n        else // Lookup the requested transform\n        {\n          (void)request_header;\n          RCLCPP_INFO(this->get_logger(), \"Incoming lookup request\\nframe_id: %s child_frame_id: %s\",\n            request->transform.header.frame_id.data(), request->transform.child_frame_id.data());\n\n          try\n          {\n            response->tf_lookup_result = tf_buffer_->lookupTransform(request->transform.header.frame_id, \n                                    request->transform.child_frame_id, tf2::TimePoint());\n          }\n          catch (tf2::TransformException &ex)\n          {\n            std::string temp = ex.what();\n            RCLCPP_WARN(this->get_logger(), \"%s\", temp.c_str());\n          }\n        }\n      };\n\n    // Create a service that will use the callback function to handle requests.\n    srv_ = create_service<HandeyeTF>(\"handeye_tf_service\", handle_service);\n    RCLCPP_INFO(this->get_logger(), \"Handeye TF service created.\");\n  }\n\nprivate:\n  void timer_callback()\n  {\n    broadcaster_.sendTransform(tf_msg_);\n  }\n\n  // Handeye service\n  rclcpp::Service<HandeyeTF>::SharedPtr srv_;\n\n  // Variables used for looking up tf transforms\n  std::shared_ptr<tf2_ros::Buffer> tf_buffer_;\n  std::shared_ptr<tf2_ros::TransformListener> tf_listener_;\n\n  // Timer used for static transform publish \n  rclcpp::TimerBase::SharedPtr timer_;\n  // TF message for camera w.r.t robot transform\n  geometry_msgs::msg::TransformStamped tf_msg_;\n  // TF broadcaster\n  tf2_ros::StaticTransformBroadcaster broadcaster_;\n  rclcpp::Clock::SharedPtr clock_;\n};\n\nint main(int argc, char ** argv)\n{\n  rclcpp::init(argc, argv);\n  auto node = std::make_shared<ServerNode>(rclcpp::NodeOptions());\n  rclcpp::spin(node);\n  rclcpp::shutdown();\n  return 0;\n}"
  },
  {
    "path": "grasp_utils/handeye_tf_service/srv/HandeyeTF.srv",
    "content": "geometry_msgs/TransformStamped transform\nstd_msgs/Bool publish\n---\ngeometry_msgs/TransformStamped tf_lookup_result"
  },
  {
    "path": "grasp_utils/robot_interface/CMakeLists.txt",
    "content": "# Copyright (c) 2018 Intel Corporation\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#      http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\ncmake_minimum_required(VERSION 3.5)\nproject(robot_interface)\n\n# Default to C99\nif(NOT CMAKE_C_STANDARD)\n  set(CMAKE_C_STANDARD 99)\nendif()\n\n# Default to C++14\nif(NOT CMAKE_CXX_STANDARD)\n  set(CMAKE_CXX_STANDARD 14)\nendif()\n\nif(CMAKE_COMPILER_IS_GNUCXX OR CMAKE_CXX_COMPILER_ID MATCHES \"Clang\")\n  add_compile_options(-Wall -Wextra -Wpedantic -Wno-unused-parameter)\nendif()\n\n# find dependencies\nfind_package(ament_cmake REQUIRED)\nfind_package(tf2 REQUIRED)\nfind_package(tf2_eigen REQUIRED)\nfind_package(tf2_ros REQUIRED)\nfind_package(eigen3_cmake_module REQUIRED)\nfind_package(Eigen3 REQUIRED)\nfind_package(rclcpp REQUIRED)\nfind_package(sensor_msgs REQUIRED)\nfind_package(geometry_msgs REQUIRED)\nfind_library(\n  ur_modern_driver_LIBRARIES \n  NAMES ur_driver_lib\n  HINTS /usr/local/lib)\nfind_path(ur_modern_driver_INCLUDE_DIRS ur_modern_driver/tcp_socket.h)\n\n# Eigen 3.2 (Wily) only provides EIGEN3_INCLUDE_DIR, not EIGEN3_INCLUDE_DIRS\nif(NOT EIGEN3_INCLUDE_DIRS)\n  set(EIGEN3_INCLUDE_DIRS ${EIGEN3_INCLUDE_DIR})\nendif()\n\n# Set include directory path\ninclude_directories(\n  include\n  ${rclcpp_INCLUDE_DIRS}\n  ${tf2_INCLUDE_DIRS}\n  ${tf2_eigen_INCLUDE_DIRS}\n  ${tf2_ros_INCLUDE_DIRS}\n  ${Eigen3_INCLUDE_DIRS}\n  ${sensor_msgs_INCLUDE_DIRS}\n  ${geometry_msgs_INCLUDE_DIRS}\n  ${ur_modern_driver_INCLUDE_DIRS})\n\ninclude_directories(SYSTEM ${EIGEN3_INCLUDE_DIRS})\n\n# Add robot interface library\nset(${PROJECT_NAME}_SOURCES\n  src/control_base.cpp\n  src/control_ur.cpp\n) \nadd_library(${PROJECT_NAME} ${${PROJECT_NAME}_SOURCES})\nament_target_dependencies(${PROJECT_NAME} rclcpp sensor_msgs geometry_msgs tf2_ros)\ntarget_link_libraries(${PROJECT_NAME} ${ur_modern_driver_LIBRARIES})\n\n# Add test of UR robot interface library\nset(TEST_SOURCE\n  test/ur_test.cpp)\n\nadd_executable(ur_test_move_command test/ur_test_move_command.cpp)\nament_target_dependencies(ur_test_move_command rclcpp sensor_msgs geometry_msgs)\ntarget_link_libraries(ur_test_move_command ${PROJECT_NAME} ${ur_modern_driver_LIBRARIES})\n\nadd_executable(ur_test_state_publish test/ur_test_state_publish.cpp)\nament_target_dependencies(ur_test_state_publish rclcpp sensor_msgs geometry_msgs)\ntarget_link_libraries(ur_test_state_publish ${PROJECT_NAME} ${ur_modern_driver_LIBRARIES})\n\nament_export_include_directories(include ${Eigen3_INCLUDE_DIRS})\nament_export_interfaces(${PROJECT_NAME} HAS_LIBRARY_TARGET)\nament_export_libraries(${PROJECT_NAME} ${ur_modern_driver_LIBRARIES})\nament_export_dependencies(rclcpp)\nament_export_dependencies(sensor_msgs)\nament_export_dependencies(geometry_msgs)\nament_export_dependencies(tf2_ros)\nament_export_dependencies(eigen3_cmake_module)\nament_export_dependencies(Eigen3)\n\n# Install library\ninstall(\n  TARGETS ${PROJECT_NAME}\n  EXPORT ${PROJECT_NAME}\n  ARCHIVE DESTINATION lib\n  LIBRARY DESTINATION lib\n  RUNTIME DESTINATION bin\n  INCLUDES DESTINATION include\n)\n\n# Install executables\ninstall(TARGETS ur_test_move_command ur_test_state_publish\n  DESTINATION lib/${PROJECT_NAME})\n\n# Install header files  \ninstall(\n  DIRECTORY include/\n  DESTINATION include\n)\n\n# Install launch files.\ninstall(DIRECTORY\n  launch\n  DESTINATION share/${PROJECT_NAME}/\n)\n\nif(BUILD_TESTING)\n  find_package(ament_lint_auto REQUIRED)\n  # the following line skips the linter which checks for copyrights\n  # uncomment the line when a copyright and license is not present in all source files\n  #set(ament_cmake_copyright_FOUND TRUE)\n  # the following line skips cpplint (only works in a git repo)\n  # uncomment the line when this package is not in a git repo\n  #set(ament_cmake_cpplint_FOUND TRUE)\n  ament_lint_auto_find_test_dependencies()\nendif()\n\nament_package()\n"
  },
  {
    "path": "grasp_utils/robot_interface/Doxyfile",
    "content": "# Doxyfile 1.8.13\n\n# This file describes the settings to be used by the documentation system\n# doxygen (www.doxygen.org) for a project.\n#\n# All text after a double hash (##) is considered a comment and is placed in\n# front of the TAG it is preceding.\n#\n# All text after a single hash (#) is considered a comment and will be ignored.\n# The format is:\n# TAG = value [value, ...]\n# For lists, items can also be appended using:\n# TAG += value [value, ...]\n# Values that contain spaces should be placed between quotes (\\\" \\\").\n\n#---------------------------------------------------------------------------\n# Project related configuration options\n#---------------------------------------------------------------------------\n\n# This tag specifies the encoding used for all characters in the config file\n# that follow. The default is UTF-8 which is also the encoding used for all text\n# before the first occurrence of this tag. Doxygen uses libiconv (or the iconv\n# built into libc) for the transcoding. See http://www.gnu.org/software/libiconv\n# for the list of possible encodings.\n# The default value is: UTF-8.\n\nDOXYFILE_ENCODING      = UTF-8\n\n# The PROJECT_NAME tag is a single word (or a sequence of words surrounded by\n# double-quotes, unless you are using Doxywizard) that should identify the\n# project for which the documentation is generated. This name is used in the\n# title of most generated pages and in a few other places.\n# The default value is: My Project.\n\nPROJECT_NAME           = \"robot_interface\"\n\n# The PROJECT_NUMBER tag can be used to enter a project or revision number. This\n# could be handy for archiving the generated documentation or if some version\n# control system is used.\n\nPROJECT_NUMBER         =\n\n# Using the PROJECT_BRIEF tag one can provide an optional one line description\n# for a project that appears at the top of each page and should give viewer a\n# quick idea about the purpose of the project. Keep the description short.\n\nPROJECT_BRIEF          = \"Native robot interface for making the visual manipulation\"\n\n# With the PROJECT_LOGO tag one can specify a logo or an icon that is included\n# in the documentation. The maximum height of the logo should not exceed 55\n# pixels and the maximum width should not exceed 200 pixels. Doxygen will copy\n# the logo to the output directory.\n\nPROJECT_LOGO           =\n\n# The OUTPUT_DIRECTORY tag is used to specify the (relative or absolute) path\n# into which the generated documentation will be written. If a relative path is\n# entered, it will be relative to the location where doxygen was started. If\n# left blank the current directory will be used.\n\nOUTPUT_DIRECTORY       = ./build\n\n# If the CREATE_SUBDIRS tag is set to YES then doxygen will create 4096 sub-\n# directories (in 2 levels) under the output directory of each output format and\n# will distribute the generated files over these directories. Enabling this\n# option can be useful when feeding doxygen a huge amount of source files, where\n# putting all generated files in the same directory would otherwise causes\n# performance problems for the file system.\n# The default value is: NO.\n\nCREATE_SUBDIRS         = NO\n\n# If the ALLOW_UNICODE_NAMES tag is set to YES, doxygen will allow non-ASCII\n# characters to appear in the names of generated files. If set to NO, non-ASCII\n# characters will be escaped, for example _xE3_x81_x84 will be used for Unicode\n# U+3044.\n# The default value is: NO.\n\nALLOW_UNICODE_NAMES    = NO\n\n# The OUTPUT_LANGUAGE tag is used to specify the language in which all\n# documentation generated by doxygen is written. Doxygen will use this\n# information to generate all constant output in the proper language.\n# Possible values are: Afrikaans, Arabic, Armenian, Brazilian, Catalan, Chinese,\n# Chinese-Traditional, Croatian, Czech, Danish, Dutch, English (United States),\n# Esperanto, Farsi (Persian), Finnish, French, German, Greek, Hungarian,\n# Indonesian, Italian, Japanese, Japanese-en (Japanese with English messages),\n# Korean, Korean-en (Korean with English messages), Latvian, Lithuanian,\n# Macedonian, Norwegian, Persian (Farsi), Polish, Portuguese, Romanian, Russian,\n# Serbian, Serbian-Cyrillic, Slovak, Slovene, Spanish, Swedish, Turkish,\n# Ukrainian and Vietnamese.\n# The default value is: English.\n\nOUTPUT_LANGUAGE        = English\n\n# If the BRIEF_MEMBER_DESC tag is set to YES, doxygen will include brief member\n# descriptions after the members that are listed in the file and class\n# documentation (similar to Javadoc). Set to NO to disable this.\n# The default value is: YES.\n\nBRIEF_MEMBER_DESC      = YES\n\n# If the REPEAT_BRIEF tag is set to YES, doxygen will prepend the brief\n# description of a member or function before the detailed description\n#\n# Note: If both HIDE_UNDOC_MEMBERS and BRIEF_MEMBER_DESC are set to NO, the\n# brief descriptions will be completely suppressed.\n# The default value is: YES.\n\nREPEAT_BRIEF           = YES\n\n# This tag implements a quasi-intelligent brief description abbreviator that is\n# used to form the text in various listings. Each string in this list, if found\n# as the leading text of the brief description, will be stripped from the text\n# and the result, after processing the whole list, is used as the annotated\n# text. Otherwise, the brief description is used as-is. If left blank, the\n# following values are used ($name is automatically replaced with the name of\n# the entity):The $name class, The $name widget, The $name file, is, provides,\n# specifies, contains, represents, a, an and the.\n\nABBREVIATE_BRIEF       = \"The $name class\" \\\n                         \"The $name widget\" \\\n                         \"The $name file\" \\\n                         is \\\n                         provides \\\n                         specifies \\\n                         contains \\\n                         represents \\\n                         a \\\n                         an \\\n                         the\n\n# If the ALWAYS_DETAILED_SEC and REPEAT_BRIEF tags are both set to YES then\n# doxygen will generate a detailed section even if there is only a brief\n# description.\n# The default value is: NO.\n\nALWAYS_DETAILED_SEC    = NO\n\n# If the INLINE_INHERITED_MEMB tag is set to YES, doxygen will show all\n# inherited members of a class in the documentation of that class as if those\n# members were ordinary class members. Constructors, destructors and assignment\n# operators of the base classes will not be shown.\n# The default value is: NO.\n\nINLINE_INHERITED_MEMB  = NO\n\n# If the FULL_PATH_NAMES tag is set to YES, doxygen will prepend the full path\n# before files name in the file list and in the header files. If set to NO the\n# shortest path that makes the file name unique will be used\n# The default value is: YES.\n\nFULL_PATH_NAMES        = YES\n\n# The STRIP_FROM_PATH tag can be used to strip a user-defined part of the path.\n# Stripping is only done if one of the specified strings matches the left-hand\n# part of the path. The tag can be used to show relative paths in the file list.\n# If left blank the directory from which doxygen is run is used as the path to\n# strip.\n#\n# Note that you can specify absolute paths here, but also relative paths, which\n# will be relative from the directory where doxygen is started.\n# This tag requires that the tag FULL_PATH_NAMES is set to YES.\n\nSTRIP_FROM_PATH        =\n\n# The STRIP_FROM_INC_PATH tag can be used to strip a user-defined part of the\n# path mentioned in the documentation of a class, which tells the reader which\n# header file to include in order to use a class. If left blank only the name of\n# the header file containing the class definition is used. Otherwise one should\n# specify the list of include paths that are normally passed to the compiler\n# using the -I flag.\n\nSTRIP_FROM_INC_PATH    =\n\n# If the SHORT_NAMES tag is set to YES, doxygen will generate much shorter (but\n# less readable) file names. This can be useful is your file systems doesn't\n# support long names like on DOS, Mac, or CD-ROM.\n# The default value is: NO.\n\nSHORT_NAMES            = NO\n\n# If the JAVADOC_AUTOBRIEF tag is set to YES then doxygen will interpret the\n# first line (until the first dot) of a Javadoc-style comment as the brief\n# description. If set to NO, the Javadoc-style will behave just like regular Qt-\n# style comments (thus requiring an explicit @brief command for a brief\n# description.)\n# The default value is: NO.\n\nJAVADOC_AUTOBRIEF      = NO\n\n# If the QT_AUTOBRIEF tag is set to YES then doxygen will interpret the first\n# line (until the first dot) of a Qt-style comment as the brief description. If\n# set to NO, the Qt-style will behave just like regular Qt-style comments (thus\n# requiring an explicit \\brief command for a brief description.)\n# The default value is: NO.\n\nQT_AUTOBRIEF           = NO\n\n# The MULTILINE_CPP_IS_BRIEF tag can be set to YES to make doxygen treat a\n# multi-line C++ special comment block (i.e. a block of //! or /// comments) as\n# a brief description. This used to be the default behavior. The new default is\n# to treat a multi-line C++ comment block as a detailed description. Set this\n# tag to YES if you prefer the old behavior instead.\n#\n# Note that setting this tag to YES also means that rational rose comments are\n# not recognized any more.\n# The default value is: NO.\n\nMULTILINE_CPP_IS_BRIEF = NO\n\n# If the INHERIT_DOCS tag is set to YES then an undocumented member inherits the\n# documentation from any documented member that it re-implements.\n# The default value is: YES.\n\nINHERIT_DOCS           = YES\n\n# If the SEPARATE_MEMBER_PAGES tag is set to YES then doxygen will produce a new\n# page for each member. If set to NO, the documentation of a member will be part\n# of the file/class/namespace that contains it.\n# The default value is: NO.\n\nSEPARATE_MEMBER_PAGES  = NO\n\n# The TAB_SIZE tag can be used to set the number of spaces in a tab. Doxygen\n# uses this value to replace tabs by spaces in code fragments.\n# Minimum value: 1, maximum value: 16, default value: 4.\n\nTAB_SIZE               = 4\n\n# This tag can be used to specify a number of aliases that act as commands in\n# the documentation. An alias has the form:\n# name=value\n# For example adding\n# \"sideeffect=@par Side Effects:\\n\"\n# will allow you to put the command \\sideeffect (or @sideeffect) in the\n# documentation, which will result in a user-defined paragraph with heading\n# \"Side Effects:\". You can put \\n's in the value part of an alias to insert\n# newlines.\n\nALIASES                =\n\n# This tag can be used to specify a number of word-keyword mappings (TCL only).\n# A mapping has the form \"name=value\". For example adding \"class=itcl::class\"\n# will allow you to use the command class in the itcl::class meaning.\n\nTCL_SUBST              =\n\n# Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C sources\n# only. Doxygen will then generate output that is more tailored for C. For\n# instance, some of the names that are used will be different. The list of all\n# members will be omitted, etc.\n# The default value is: NO.\n\nOPTIMIZE_OUTPUT_FOR_C  = NO\n\n# Set the OPTIMIZE_OUTPUT_JAVA tag to YES if your project consists of Java or\n# Python sources only. Doxygen will then generate output that is more tailored\n# for that language. For instance, namespaces will be presented as packages,\n# qualified scopes will look different, etc.\n# The default value is: NO.\n\nOPTIMIZE_OUTPUT_JAVA   = NO\n\n# Set the OPTIMIZE_FOR_FORTRAN tag to YES if your project consists of Fortran\n# sources. Doxygen will then generate output that is tailored for Fortran.\n# The default value is: NO.\n\nOPTIMIZE_FOR_FORTRAN   = NO\n\n# Set the OPTIMIZE_OUTPUT_VHDL tag to YES if your project consists of VHDL\n# sources. Doxygen will then generate output that is tailored for VHDL.\n# The default value is: NO.\n\nOPTIMIZE_OUTPUT_VHDL   = NO\n\n# Doxygen selects the parser to use depending on the extension of the files it\n# parses. With this tag you can assign which parser to use for a given\n# extension. Doxygen has a built-in mapping, but you can override or extend it\n# using this tag. The format is ext=language, where ext is a file extension, and\n# language is one of the parsers supported by doxygen: IDL, Java, Javascript,\n# C#, C, C++, D, PHP, Objective-C, Python, Fortran (fixed format Fortran:\n# FortranFixed, free formatted Fortran: FortranFree, unknown formatted Fortran:\n# Fortran. In the later case the parser tries to guess whether the code is fixed\n# or free formatted code, this is the default for Fortran type files), VHDL. For\n# instance to make doxygen treat .inc files as Fortran files (default is PHP),\n# and .f files as C (default is Fortran), use: inc=Fortran f=C.\n#\n# Note: For files without extension you can use no_extension as a placeholder.\n#\n# Note that for custom extensions you also need to set FILE_PATTERNS otherwise\n# the files are not read by doxygen.\n\nEXTENSION_MAPPING      =\n\n# If the MARKDOWN_SUPPORT tag is enabled then doxygen pre-processes all comments\n# according to the Markdown format, which allows for more readable\n# documentation. See http://daringfireball.net/projects/markdown/ for details.\n# The output of markdown processing is further processed by doxygen, so you can\n# mix doxygen, HTML, and XML commands with Markdown formatting. Disable only in\n# case of backward compatibilities issues.\n# The default value is: YES.\n\nMARKDOWN_SUPPORT       = YES\n\n# When the TOC_INCLUDE_HEADINGS tag is set to a non-zero value, all headings up\n# to that level are automatically included in the table of contents, even if\n# they do not have an id attribute.\n# Note: This feature currently applies only to Markdown headings.\n# Minimum value: 0, maximum value: 99, default value: 0.\n# This tag requires that the tag MARKDOWN_SUPPORT is set to YES.\n\nTOC_INCLUDE_HEADINGS   = 0\n\n# When enabled doxygen tries to link words that correspond to documented\n# classes, or namespaces to their corresponding documentation. Such a link can\n# be prevented in individual cases by putting a % sign in front of the word or\n# globally by setting AUTOLINK_SUPPORT to NO.\n# The default value is: YES.\n\nAUTOLINK_SUPPORT       = YES\n\n# If you use STL classes (i.e. std::string, std::vector, etc.) but do not want\n# to include (a tag file for) the STL sources as input, then you should set this\n# tag to YES in order to let doxygen match functions declarations and\n# definitions whose arguments contain STL classes (e.g. func(std::string);\n# versus func(std::string) {}). This also make the inheritance and collaboration\n# diagrams that involve STL classes more complete and accurate.\n# The default value is: NO.\n\nBUILTIN_STL_SUPPORT    = NO\n\n# If you use Microsoft's C++/CLI language, you should set this option to YES to\n# enable parsing support.\n# The default value is: NO.\n\nCPP_CLI_SUPPORT        = NO\n\n# Set the SIP_SUPPORT tag to YES if your project consists of sip (see:\n# http://www.riverbankcomputing.co.uk/software/sip/intro) sources only. Doxygen\n# will parse them like normal C++ but will assume all classes use public instead\n# of private inheritance when no explicit protection keyword is present.\n# The default value is: NO.\n\nSIP_SUPPORT            = NO\n\n# For Microsoft's IDL there are propget and propput attributes to indicate\n# getter and setter methods for a property. Setting this option to YES will make\n# doxygen to replace the get and set methods by a property in the documentation.\n# This will only work if the methods are indeed getting or setting a simple\n# type. If this is not the case, or you want to show the methods anyway, you\n# should set this option to NO.\n# The default value is: YES.\n\nIDL_PROPERTY_SUPPORT   = YES\n\n# If member grouping is used in the documentation and the DISTRIBUTE_GROUP_DOC\n# tag is set to YES then doxygen will reuse the documentation of the first\n# member in the group (if any) for the other members of the group. By default\n# all members of a group must be documented explicitly.\n# The default value is: NO.\n\nDISTRIBUTE_GROUP_DOC   = NO\n\n# If one adds a struct or class to a group and this option is enabled, then also\n# any nested class or struct is added to the same group. By default this option\n# is disabled and one has to add nested compounds explicitly via \\ingroup.\n# The default value is: NO.\n\nGROUP_NESTED_COMPOUNDS = NO\n\n# Set the SUBGROUPING tag to YES to allow class member groups of the same type\n# (for instance a group of public functions) to be put as a subgroup of that\n# type (e.g. under the Public Functions section). Set it to NO to prevent\n# subgrouping. Alternatively, this can be done per class using the\n# \\nosubgrouping command.\n# The default value is: YES.\n\nSUBGROUPING            = YES\n\n# When the INLINE_GROUPED_CLASSES tag is set to YES, classes, structs and unions\n# are shown inside the group in which they are included (e.g. using \\ingroup)\n# instead of on a separate page (for HTML and Man pages) or section (for LaTeX\n# and RTF).\n#\n# Note that this feature does not work in combination with\n# SEPARATE_MEMBER_PAGES.\n# The default value is: NO.\n\nINLINE_GROUPED_CLASSES = NO\n\n# When the INLINE_SIMPLE_STRUCTS tag is set to YES, structs, classes, and unions\n# with only public data fields or simple typedef fields will be shown inline in\n# the documentation of the scope in which they are defined (i.e. file,\n# namespace, or group documentation), provided this scope is documented. If set\n# to NO, structs, classes, and unions are shown on a separate page (for HTML and\n# Man pages) or section (for LaTeX and RTF).\n# The default value is: NO.\n\nINLINE_SIMPLE_STRUCTS  = NO\n\n# When TYPEDEF_HIDES_STRUCT tag is enabled, a typedef of a struct, union, or\n# enum is documented as struct, union, or enum with the name of the typedef. So\n# typedef struct TypeS {} TypeT, will appear in the documentation as a struct\n# with name TypeT. When disabled the typedef will appear as a member of a file,\n# namespace, or class. And the struct will be named TypeS. This can typically be\n# useful for C code in case the coding convention dictates that all compound\n# types are typedef'ed and only the typedef is referenced, never the tag name.\n# The default value is: NO.\n\nTYPEDEF_HIDES_STRUCT   = NO\n\n# The size of the symbol lookup cache can be set using LOOKUP_CACHE_SIZE. This\n# cache is used to resolve symbols given their name and scope. Since this can be\n# an expensive process and often the same symbol appears multiple times in the\n# code, doxygen keeps a cache of pre-resolved symbols. If the cache is too small\n# doxygen will become slower. If the cache is too large, memory is wasted. The\n# cache size is given by this formula: 2^(16+LOOKUP_CACHE_SIZE). The valid range\n# is 0..9, the default is 0, corresponding to a cache size of 2^16=65536\n# symbols. At the end of a run doxygen will report the cache usage and suggest\n# the optimal cache size from a speed point of view.\n# Minimum value: 0, maximum value: 9, default value: 0.\n\nLOOKUP_CACHE_SIZE      = 0\n\n#---------------------------------------------------------------------------\n# Build related configuration options\n#---------------------------------------------------------------------------\n\n# If the EXTRACT_ALL tag is set to YES, doxygen will assume all entities in\n# documentation are documented, even if no documentation was available. Private\n# class members and static file members will be hidden unless the\n# EXTRACT_PRIVATE respectively EXTRACT_STATIC tags are set to YES.\n# Note: This will also disable the warnings about undocumented members that are\n# normally produced when WARNINGS is set to YES.\n# The default value is: NO.\n\nEXTRACT_ALL            = NO\n\n# If the EXTRACT_PRIVATE tag is set to YES, all private members of a class will\n# be included in the documentation.\n# The default value is: NO.\n\nEXTRACT_PRIVATE        = NO\n\n# If the EXTRACT_PACKAGE tag is set to YES, all members with package or internal\n# scope will be included in the documentation.\n# The default value is: NO.\n\nEXTRACT_PACKAGE        = NO\n\n# If the EXTRACT_STATIC tag is set to YES, all static members of a file will be\n# included in the documentation.\n# The default value is: NO.\n\nEXTRACT_STATIC         = NO\n\n# If the EXTRACT_LOCAL_CLASSES tag is set to YES, classes (and structs) defined\n# locally in source files will be included in the documentation. If set to NO,\n# only classes defined in header files are included. Does not have any effect\n# for Java sources.\n# The default value is: YES.\n\nEXTRACT_LOCAL_CLASSES  = YES\n\n# This flag is only useful for Objective-C code. If set to YES, local methods,\n# which are defined in the implementation section but not in the interface are\n# included in the documentation. If set to NO, only methods in the interface are\n# included.\n# The default value is: NO.\n\nEXTRACT_LOCAL_METHODS  = NO\n\n# If this flag is set to YES, the members of anonymous namespaces will be\n# extracted and appear in the documentation as a namespace called\n# 'anonymous_namespace{file}', where file will be replaced with the base name of\n# the file that contains the anonymous namespace. By default anonymous namespace\n# are hidden.\n# The default value is: NO.\n\nEXTRACT_ANON_NSPACES   = NO\n\n# If the HIDE_UNDOC_MEMBERS tag is set to YES, doxygen will hide all\n# undocumented members inside documented classes or files. If set to NO these\n# members will be included in the various overviews, but no documentation\n# section is generated. This option has no effect if EXTRACT_ALL is enabled.\n# The default value is: NO.\n\nHIDE_UNDOC_MEMBERS     = NO\n\n# If the HIDE_UNDOC_CLASSES tag is set to YES, doxygen will hide all\n# undocumented classes that are normally visible in the class hierarchy. If set\n# to NO, these classes will be included in the various overviews. This option\n# has no effect if EXTRACT_ALL is enabled.\n# The default value is: NO.\n\nHIDE_UNDOC_CLASSES     = NO\n\n# If the HIDE_FRIEND_COMPOUNDS tag is set to YES, doxygen will hide all friend\n# (class|struct|union) declarations. If set to NO, these declarations will be\n# included in the documentation.\n# The default value is: NO.\n\nHIDE_FRIEND_COMPOUNDS  = NO\n\n# If the HIDE_IN_BODY_DOCS tag is set to YES, doxygen will hide any\n# documentation blocks found inside the body of a function. If set to NO, these\n# blocks will be appended to the function's detailed documentation block.\n# The default value is: NO.\n\nHIDE_IN_BODY_DOCS      = NO\n\n# The INTERNAL_DOCS tag determines if documentation that is typed after a\n# \\internal command is included. If the tag is set to NO then the documentation\n# will be excluded. Set it to YES to include the internal documentation.\n# The default value is: NO.\n\nINTERNAL_DOCS          = NO\n\n# If the CASE_SENSE_NAMES tag is set to NO then doxygen will only generate file\n# names in lower-case letters. If set to YES, upper-case letters are also\n# allowed. This is useful if you have classes or files whose names only differ\n# in case and if your file system supports case sensitive file names. Windows\n# and Mac users are advised to set this option to NO.\n# The default value is: system dependent.\n\nCASE_SENSE_NAMES       = YES\n\n# If the HIDE_SCOPE_NAMES tag is set to NO then doxygen will show members with\n# their full class and namespace scopes in the documentation. If set to YES, the\n# scope will be hidden.\n# The default value is: NO.\n\nHIDE_SCOPE_NAMES       = NO\n\n# If the HIDE_COMPOUND_REFERENCE tag is set to NO (default) then doxygen will\n# append additional text to a page's title, such as Class Reference. If set to\n# YES the compound reference will be hidden.\n# The default value is: NO.\n\nHIDE_COMPOUND_REFERENCE= NO\n\n# If the SHOW_INCLUDE_FILES tag is set to YES then doxygen will put a list of\n# the files that are included by a file in the documentation of that file.\n# The default value is: YES.\n\nSHOW_INCLUDE_FILES     = YES\n\n# If the SHOW_GROUPED_MEMB_INC tag is set to YES then Doxygen will add for each\n# grouped member an include statement to the documentation, telling the reader\n# which file to include in order to use the member.\n# The default value is: NO.\n\nSHOW_GROUPED_MEMB_INC  = NO\n\n# If the FORCE_LOCAL_INCLUDES tag is set to YES then doxygen will list include\n# files with double quotes in the documentation rather than with sharp brackets.\n# The default value is: NO.\n\nFORCE_LOCAL_INCLUDES   = NO\n\n# If the INLINE_INFO tag is set to YES then a tag [inline] is inserted in the\n# documentation for inline members.\n# The default value is: YES.\n\nINLINE_INFO            = YES\n\n# If the SORT_MEMBER_DOCS tag is set to YES then doxygen will sort the\n# (detailed) documentation of file and class members alphabetically by member\n# name. If set to NO, the members will appear in declaration order.\n# The default value is: YES.\n\nSORT_MEMBER_DOCS       = YES\n\n# If the SORT_BRIEF_DOCS tag is set to YES then doxygen will sort the brief\n# descriptions of file, namespace and class members alphabetically by member\n# name. If set to NO, the members will appear in declaration order. Note that\n# this will also influence the order of the classes in the class list.\n# The default value is: NO.\n\nSORT_BRIEF_DOCS        = NO\n\n# If the SORT_MEMBERS_CTORS_1ST tag is set to YES then doxygen will sort the\n# (brief and detailed) documentation of class members so that constructors and\n# destructors are listed first. If set to NO the constructors will appear in the\n# respective orders defined by SORT_BRIEF_DOCS and SORT_MEMBER_DOCS.\n# Note: If SORT_BRIEF_DOCS is set to NO this option is ignored for sorting brief\n# member documentation.\n# Note: If SORT_MEMBER_DOCS is set to NO this option is ignored for sorting\n# detailed member documentation.\n# The default value is: NO.\n\nSORT_MEMBERS_CTORS_1ST = NO\n\n# If the SORT_GROUP_NAMES tag is set to YES then doxygen will sort the hierarchy\n# of group names into alphabetical order. If set to NO the group names will\n# appear in their defined order.\n# The default value is: NO.\n\nSORT_GROUP_NAMES       = NO\n\n# If the SORT_BY_SCOPE_NAME tag is set to YES, the class list will be sorted by\n# fully-qualified names, including namespaces. If set to NO, the class list will\n# be sorted only by class name, not including the namespace part.\n# Note: This option is not very useful if HIDE_SCOPE_NAMES is set to YES.\n# Note: This option applies only to the class list, not to the alphabetical\n# list.\n# The default value is: NO.\n\nSORT_BY_SCOPE_NAME     = NO\n\n# If the STRICT_PROTO_MATCHING option is enabled and doxygen fails to do proper\n# type resolution of all parameters of a function it will reject a match between\n# the prototype and the implementation of a member function even if there is\n# only one candidate or it is obvious which candidate to choose by doing a\n# simple string match. By disabling STRICT_PROTO_MATCHING doxygen will still\n# accept a match between prototype and implementation in such cases.\n# The default value is: NO.\n\nSTRICT_PROTO_MATCHING  = NO\n\n# The GENERATE_TODOLIST tag can be used to enable (YES) or disable (NO) the todo\n# list. This list is created by putting \\todo commands in the documentation.\n# The default value is: YES.\n\nGENERATE_TODOLIST      = YES\n\n# The GENERATE_TESTLIST tag can be used to enable (YES) or disable (NO) the test\n# list. This list is created by putting \\test commands in the documentation.\n# The default value is: YES.\n\nGENERATE_TESTLIST      = YES\n\n# The GENERATE_BUGLIST tag can be used to enable (YES) or disable (NO) the bug\n# list. This list is created by putting \\bug commands in the documentation.\n# The default value is: YES.\n\nGENERATE_BUGLIST       = YES\n\n# The GENERATE_DEPRECATEDLIST tag can be used to enable (YES) or disable (NO)\n# the deprecated list. This list is created by putting \\deprecated commands in\n# the documentation.\n# The default value is: YES.\n\nGENERATE_DEPRECATEDLIST= YES\n\n# The ENABLED_SECTIONS tag can be used to enable conditional documentation\n# sections, marked by \\if <section_label> ... \\endif and \\cond <section_label>\n# ... \\endcond blocks.\n\nENABLED_SECTIONS       =\n\n# The MAX_INITIALIZER_LINES tag determines the maximum number of lines that the\n# initial value of a variable or macro / define can have for it to appear in the\n# documentation. If the initializer consists of more lines than specified here\n# it will be hidden. Use a value of 0 to hide initializers completely. The\n# appearance of the value of individual variables and macros / defines can be\n# controlled using \\showinitializer or \\hideinitializer command in the\n# documentation regardless of this setting.\n# Minimum value: 0, maximum value: 10000, default value: 30.\n\nMAX_INITIALIZER_LINES  = 30\n\n# Set the SHOW_USED_FILES tag to NO to disable the list of files generated at\n# the bottom of the documentation of classes and structs. If set to YES, the\n# list will mention the files that were used to generate the documentation.\n# The default value is: YES.\n\nSHOW_USED_FILES        = YES\n\n# Set the SHOW_FILES tag to NO to disable the generation of the Files page. This\n# will remove the Files entry from the Quick Index and from the Folder Tree View\n# (if specified).\n# The default value is: YES.\n\nSHOW_FILES             = YES\n\n# Set the SHOW_NAMESPACES tag to NO to disable the generation of the Namespaces\n# page. This will remove the Namespaces entry from the Quick Index and from the\n# Folder Tree View (if specified).\n# The default value is: YES.\n\nSHOW_NAMESPACES        = YES\n\n# The FILE_VERSION_FILTER tag can be used to specify a program or script that\n# doxygen should invoke to get the current version for each file (typically from\n# the version control system). Doxygen will invoke the program by executing (via\n# popen()) the command command input-file, where command is the value of the\n# FILE_VERSION_FILTER tag, and input-file is the name of an input file provided\n# by doxygen. Whatever the program writes to standard output is used as the file\n# version. For an example see the documentation.\n\nFILE_VERSION_FILTER    =\n\n# The LAYOUT_FILE tag can be used to specify a layout file which will be parsed\n# by doxygen. The layout file controls the global structure of the generated\n# output files in an output format independent way. To create the layout file\n# that represents doxygen's defaults, run doxygen with the -l option. You can\n# optionally specify a file name after the option, if omitted DoxygenLayout.xml\n# will be used as the name of the layout file.\n#\n# Note that if you run doxygen from a directory containing a file called\n# DoxygenLayout.xml, doxygen will parse it automatically even if the LAYOUT_FILE\n# tag is left empty.\n\nLAYOUT_FILE            =\n\n# The CITE_BIB_FILES tag can be used to specify one or more bib files containing\n# the reference definitions. This must be a list of .bib files. The .bib\n# extension is automatically appended if omitted. This requires the bibtex tool\n# to be installed. See also http://en.wikipedia.org/wiki/BibTeX for more info.\n# For LaTeX the style of the bibliography can be controlled using\n# LATEX_BIB_STYLE. To use this feature you need bibtex and perl available in the\n# search path. See also \\cite for info how to create references.\n\nCITE_BIB_FILES         =\n\n#---------------------------------------------------------------------------\n# Configuration options related to warning and progress messages\n#---------------------------------------------------------------------------\n\n# The QUIET tag can be used to turn on/off the messages that are generated to\n# standard output by doxygen. If QUIET is set to YES this implies that the\n# messages are off.\n# The default value is: NO.\n\nQUIET                  = NO\n\n# The WARNINGS tag can be used to turn on/off the warning messages that are\n# generated to standard error (stderr) by doxygen. If WARNINGS is set to YES\n# this implies that the warnings are on.\n#\n# Tip: Turn warnings on while writing the documentation.\n# The default value is: YES.\n\nWARNINGS               = YES\n\n# If the WARN_IF_UNDOCUMENTED tag is set to YES then doxygen will generate\n# warnings for undocumented members. If EXTRACT_ALL is set to YES then this flag\n# will automatically be disabled.\n# The default value is: YES.\n\nWARN_IF_UNDOCUMENTED   = YES\n\n# If the WARN_IF_DOC_ERROR tag is set to YES, doxygen will generate warnings for\n# potential errors in the documentation, such as not documenting some parameters\n# in a documented function, or documenting parameters that don't exist or using\n# markup commands wrongly.\n# The default value is: YES.\n\nWARN_IF_DOC_ERROR      = YES\n\n# This WARN_NO_PARAMDOC option can be enabled to get warnings for functions that\n# are documented, but have no documentation for their parameters or return\n# value. If set to NO, doxygen will only warn about wrong or incomplete\n# parameter documentation, but not about the absence of documentation.\n# The default value is: NO.\n\nWARN_NO_PARAMDOC       = NO\n\n# If the WARN_AS_ERROR tag is set to YES then doxygen will immediately stop when\n# a warning is encountered.\n# The default value is: NO.\n\nWARN_AS_ERROR          = NO\n\n# The WARN_FORMAT tag determines the format of the warning messages that doxygen\n# can produce. The string should contain the $file, $line, and $text tags, which\n# will be replaced by the file and line number from which the warning originated\n# and the warning text. Optionally the format may contain $version, which will\n# be replaced by the version of the file (if it could be obtained via\n# FILE_VERSION_FILTER)\n# The default value is: $file:$line: $text.\n\nWARN_FORMAT            = \"$file:$line: $text\"\n\n# The WARN_LOGFILE tag can be used to specify a file to which warning and error\n# messages should be written. If left blank the output is written to standard\n# error (stderr).\n\nWARN_LOGFILE           =\n\n#---------------------------------------------------------------------------\n# Configuration options related to the input files\n#---------------------------------------------------------------------------\n\n# The INPUT tag is used to specify the files and/or directories that contain\n# documented source files. You may enter file names like myfile.cpp or\n# directories like /usr/src/myproject. Separate the files or directories with\n# spaces. See also FILE_PATTERNS and EXTENSION_MAPPING\n# Note: If this tag is empty the current directory is searched.\n\nINPUT                  = ./README.md ./include/robot_interface/control_base.hpp ./src/control_base.cpp\n\n# This tag can be used to specify the character encoding of the source files\n# that doxygen parses. Internally doxygen uses the UTF-8 encoding. Doxygen uses\n# libiconv (or the iconv built into libc) for the transcoding. See the libiconv\n# documentation (see: http://www.gnu.org/software/libiconv) for the list of\n# possible encodings.\n# The default value is: UTF-8.\n\nINPUT_ENCODING         = UTF-8\n\n# If the value of the INPUT tag contains directories, you can use the\n# FILE_PATTERNS tag to specify one or more wildcard patterns (like *.cpp and\n# *.h) to filter out the source-files in the directories.\n#\n# Note that for custom extensions or not directly supported extensions you also\n# need to set EXTENSION_MAPPING for the extension otherwise the files are not\n# read by doxygen.\n#\n# If left blank the following patterns are tested:*.c, *.cc, *.cxx, *.cpp,\n# *.c++, *.java, *.ii, *.ixx, *.ipp, *.i++, *.inl, *.idl, *.ddl, *.odl, *.h,\n# *.hh, *.hxx, *.hpp, *.h++, *.cs, *.d, *.php, *.php4, *.php5, *.phtml, *.inc,\n# *.m, *.markdown, *.md, *.mm, *.dox, *.py, *.pyw, *.f90, *.f95, *.f03, *.f08,\n# *.f, *.for, *.tcl, *.vhd, *.vhdl, *.ucf and *.qsf.\n\nFILE_PATTERNS          = *.c \\\n                         *.cc \\\n                         *.cxx \\\n                         *.cpp \\\n                         *.c++ \\\n                         *.java \\\n                         *.ii \\\n                         *.ixx \\\n                         *.ipp \\\n                         *.i++ \\\n                         *.inl \\\n                         *.idl \\\n                         *.ddl \\\n                         *.odl \\\n                         *.h \\\n                         *.hh \\\n                         *.hxx \\\n                         *.hpp \\\n                         *.h++ \\\n                         *.cs \\\n                         *.d \\\n                         *.php \\\n                         *.php4 \\\n                         *.php5 \\\n                         *.phtml \\\n                         *.inc \\\n                         *.m \\\n                         *.markdown \\\n                         *.md \\\n                         *.mm \\\n                         *.dox \\\n                         *.py \\\n                         *.pyw \\\n                         *.f90 \\\n                         *.f95 \\\n                         *.f03 \\\n                         *.f08 \\\n                         *.f \\\n                         *.for \\\n                         *.tcl \\\n                         *.vhd \\\n                         *.vhdl \\\n                         *.ucf \\\n                         *.qsf\n\n# The RECURSIVE tag can be used to specify whether or not subdirectories should\n# be searched for input files as well.\n# The default value is: NO.\n\nRECURSIVE              = NO\n\n# The EXCLUDE tag can be used to specify files and/or directories that should be\n# excluded from the INPUT source files. This way you can easily exclude a\n# subdirectory from a directory tree whose root is specified with the INPUT tag.\n#\n# Note that relative paths are relative to the directory from which doxygen is\n# run.\n\nEXCLUDE                =\n\n# The EXCLUDE_SYMLINKS tag can be used to select whether or not files or\n# directories that are symbolic links (a Unix file system feature) are excluded\n# from the input.\n# The default value is: NO.\n\nEXCLUDE_SYMLINKS       = NO\n\n# If the value of the INPUT tag contains directories, you can use the\n# EXCLUDE_PATTERNS tag to specify one or more wildcard patterns to exclude\n# certain files from those directories.\n#\n# Note that the wildcards are matched against the file with absolute path, so to\n# exclude all test directories for example use the pattern */test/*\n\nEXCLUDE_PATTERNS       =\n\n# The EXCLUDE_SYMBOLS tag can be used to specify one or more symbol names\n# (namespaces, classes, functions, etc.) that should be excluded from the\n# output. The symbol name can be a fully qualified name, a word, or if the\n# wildcard * is used, a substring. Examples: ANamespace, AClass,\n# AClass::ANamespace, ANamespace::*Test\n#\n# Note that the wildcards are matched against the file with absolute path, so to\n# exclude all test directories use the pattern */test/*\n\nEXCLUDE_SYMBOLS        =\n\n# The EXAMPLE_PATH tag can be used to specify one or more files or directories\n# that contain example code fragments that are included (see the \\include\n# command).\n\nEXAMPLE_PATH           =\n\n# If the value of the EXAMPLE_PATH tag contains directories, you can use the\n# EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp and\n# *.h) to filter out the source-files in the directories. If left blank all\n# files are included.\n\nEXAMPLE_PATTERNS       = *\n\n# If the EXAMPLE_RECURSIVE tag is set to YES then subdirectories will be\n# searched for input files to be used with the \\include or \\dontinclude commands\n# irrespective of the value of the RECURSIVE tag.\n# The default value is: NO.\n\nEXAMPLE_RECURSIVE      = NO\n\n# The IMAGE_PATH tag can be used to specify one or more files or directories\n# that contain images that are to be included in the documentation (see the\n# \\image command).\n\nIMAGE_PATH             =\n\n# The INPUT_FILTER tag can be used to specify a program that doxygen should\n# invoke to filter for each input file. Doxygen will invoke the filter program\n# by executing (via popen()) the command:\n#\n# <filter> <input-file>\n#\n# where <filter> is the value of the INPUT_FILTER tag, and <input-file> is the\n# name of an input file. Doxygen will then use the output that the filter\n# program writes to standard output. If FILTER_PATTERNS is specified, this tag\n# will be ignored.\n#\n# Note that the filter must not add or remove lines; it is applied before the\n# code is scanned, but not when the output code is generated. If lines are added\n# or removed, the anchors will not be placed correctly.\n#\n# Note that for custom extensions or not directly supported extensions you also\n# need to set EXTENSION_MAPPING for the extension otherwise the files are not\n# properly processed by doxygen.\n\nINPUT_FILTER           =\n\n# The FILTER_PATTERNS tag can be used to specify filters on a per file pattern\n# basis. Doxygen will compare the file name with each pattern and apply the\n# filter if there is a match. The filters are a list of the form: pattern=filter\n# (like *.cpp=my_cpp_filter). See INPUT_FILTER for further information on how\n# filters are used. If the FILTER_PATTERNS tag is empty or if none of the\n# patterns match the file name, INPUT_FILTER is applied.\n#\n# Note that for custom extensions or not directly supported extensions you also\n# need to set EXTENSION_MAPPING for the extension otherwise the files are not\n# properly processed by doxygen.\n\nFILTER_PATTERNS        =\n\n# If the FILTER_SOURCE_FILES tag is set to YES, the input filter (if set using\n# INPUT_FILTER) will also be used to filter the input files that are used for\n# producing the source files to browse (i.e. when SOURCE_BROWSER is set to YES).\n# The default value is: NO.\n\nFILTER_SOURCE_FILES    = NO\n\n# The FILTER_SOURCE_PATTERNS tag can be used to specify source filters per file\n# pattern. A pattern will override the setting for FILTER_PATTERN (if any) and\n# it is also possible to disable source filtering for a specific pattern using\n# *.ext= (so without naming a filter).\n# This tag requires that the tag FILTER_SOURCE_FILES is set to YES.\n\nFILTER_SOURCE_PATTERNS =\n\n# If the USE_MDFILE_AS_MAINPAGE tag refers to the name of a markdown file that\n# is part of the input, its contents will be placed on the main page\n# (index.html). This can be useful if you have a project on for instance GitHub\n# and want to reuse the introduction page also for the doxygen output.\n\nUSE_MDFILE_AS_MAINPAGE = ./README.md\n\n#---------------------------------------------------------------------------\n# Configuration options related to source browsing\n#---------------------------------------------------------------------------\n\n# If the SOURCE_BROWSER tag is set to YES then a list of source files will be\n# generated. Documented entities will be cross-referenced with these sources.\n#\n# Note: To get rid of all source code in the generated output, make sure that\n# also VERBATIM_HEADERS is set to NO.\n# The default value is: NO.\n\nSOURCE_BROWSER         = NO\n\n# Setting the INLINE_SOURCES tag to YES will include the body of functions,\n# classes and enums directly into the documentation.\n# The default value is: NO.\n\nINLINE_SOURCES         = NO\n\n# Setting the STRIP_CODE_COMMENTS tag to YES will instruct doxygen to hide any\n# special comment blocks from generated source code fragments. Normal C, C++ and\n# Fortran comments will always remain visible.\n# The default value is: YES.\n\nSTRIP_CODE_COMMENTS    = YES\n\n# If the REFERENCED_BY_RELATION tag is set to YES then for each documented\n# function all documented functions referencing it will be listed.\n# The default value is: NO.\n\nREFERENCED_BY_RELATION = NO\n\n# If the REFERENCES_RELATION tag is set to YES then for each documented function\n# all documented entities called/used by that function will be listed.\n# The default value is: NO.\n\nREFERENCES_RELATION    = NO\n\n# If the REFERENCES_LINK_SOURCE tag is set to YES and SOURCE_BROWSER tag is set\n# to YES then the hyperlinks from functions in REFERENCES_RELATION and\n# REFERENCED_BY_RELATION lists will link to the source code. Otherwise they will\n# link to the documentation.\n# The default value is: YES.\n\nREFERENCES_LINK_SOURCE = YES\n\n# If SOURCE_TOOLTIPS is enabled (the default) then hovering a hyperlink in the\n# source code will show a tooltip with additional information such as prototype,\n# brief description and links to the definition and documentation. Since this\n# will make the HTML file larger and loading of large files a bit slower, you\n# can opt to disable this feature.\n# The default value is: YES.\n# This tag requires that the tag SOURCE_BROWSER is set to YES.\n\nSOURCE_TOOLTIPS        = YES\n\n# If the USE_HTAGS tag is set to YES then the references to source code will\n# point to the HTML generated by the htags(1) tool instead of doxygen built-in\n# source browser. The htags tool is part of GNU's global source tagging system\n# (see http://www.gnu.org/software/global/global.html). You will need version\n# 4.8.6 or higher.\n#\n# To use it do the following:\n# - Install the latest version of global\n# - Enable SOURCE_BROWSER and USE_HTAGS in the config file\n# - Make sure the INPUT points to the root of the source tree\n# - Run doxygen as normal\n#\n# Doxygen will invoke htags (and that will in turn invoke gtags), so these\n# tools must be available from the command line (i.e. in the search path).\n#\n# The result: instead of the source browser generated by doxygen, the links to\n# source code will now point to the output of htags.\n# The default value is: NO.\n# This tag requires that the tag SOURCE_BROWSER is set to YES.\n\nUSE_HTAGS              = NO\n\n# If the VERBATIM_HEADERS tag is set the YES then doxygen will generate a\n# verbatim copy of the header file for each class for which an include is\n# specified. Set to NO to disable this.\n# See also: Section \\class.\n# The default value is: YES.\n\nVERBATIM_HEADERS       = YES\n\n# If the CLANG_ASSISTED_PARSING tag is set to YES then doxygen will use the\n# clang parser (see: http://clang.llvm.org/) for more accurate parsing at the\n# cost of reduced performance. This can be particularly helpful with template\n# rich C++ code for which doxygen's built-in parser lacks the necessary type\n# information.\n# Note: The availability of this option depends on whether or not doxygen was\n# generated with the -Duse-libclang=ON option for CMake.\n# The default value is: NO.\n\nCLANG_ASSISTED_PARSING = NO\n\n# If clang assisted parsing is enabled you can provide the compiler with command\n# line options that you would normally use when invoking the compiler. Note that\n# the include paths will already be set by doxygen for the files and directories\n# specified with INPUT and INCLUDE_PATH.\n# This tag requires that the tag CLANG_ASSISTED_PARSING is set to YES.\n\nCLANG_OPTIONS          =\n\n#---------------------------------------------------------------------------\n# Configuration options related to the alphabetical class index\n#---------------------------------------------------------------------------\n\n# If the ALPHABETICAL_INDEX tag is set to YES, an alphabetical index of all\n# compounds will be generated. Enable this if the project contains a lot of\n# classes, structs, unions or interfaces.\n# The default value is: YES.\n\nALPHABETICAL_INDEX     = YES\n\n# The COLS_IN_ALPHA_INDEX tag can be used to specify the number of columns in\n# which the alphabetical index list will be split.\n# Minimum value: 1, maximum value: 20, default value: 5.\n# This tag requires that the tag ALPHABETICAL_INDEX is set to YES.\n\nCOLS_IN_ALPHA_INDEX    = 5\n\n# In case all classes in a project start with a common prefix, all classes will\n# be put under the same header in the alphabetical index. The IGNORE_PREFIX tag\n# can be used to specify a prefix (or a list of prefixes) that should be ignored\n# while generating the index headers.\n# This tag requires that the tag ALPHABETICAL_INDEX is set to YES.\n\nIGNORE_PREFIX          =\n\n#---------------------------------------------------------------------------\n# Configuration options related to the HTML output\n#---------------------------------------------------------------------------\n\n# If the GENERATE_HTML tag is set to YES, doxygen will generate HTML output\n# The default value is: YES.\n\nGENERATE_HTML          = YES\n\n# The HTML_OUTPUT tag is used to specify where the HTML docs will be put. If a\n# relative path is entered the value of OUTPUT_DIRECTORY will be put in front of\n# it.\n# The default directory is: html.\n# This tag requires that the tag GENERATE_HTML is set to YES.\n\nHTML_OUTPUT            = html\n\n# The HTML_FILE_EXTENSION tag can be used to specify the file extension for each\n# generated HTML page (for example: .htm, .php, .asp).\n# The default value is: .html.\n# This tag requires that the tag GENERATE_HTML is set to YES.\n\nHTML_FILE_EXTENSION    = .html\n\n# The HTML_HEADER tag can be used to specify a user-defined HTML header file for\n# each generated HTML page. If the tag is left blank doxygen will generate a\n# standard header.\n#\n# To get valid HTML the header file that includes any scripts and style sheets\n# that doxygen needs, which is dependent on the configuration options used (e.g.\n# the setting GENERATE_TREEVIEW). It is highly recommended to start with a\n# default header using\n# doxygen -w html new_header.html new_footer.html new_stylesheet.css\n# YourConfigFile\n# and then modify the file new_header.html. See also section \"Doxygen usage\"\n# for information on how to generate the default header that doxygen normally\n# uses.\n# Note: The header is subject to change so you typically have to regenerate the\n# default header when upgrading to a newer version of doxygen. For a description\n# of the possible markers and block names see the documentation.\n# This tag requires that the tag GENERATE_HTML is set to YES.\n\nHTML_HEADER            =\n\n# The HTML_FOOTER tag can be used to specify a user-defined HTML footer for each\n# generated HTML page. If the tag is left blank doxygen will generate a standard\n# footer. See HTML_HEADER for more information on how to generate a default\n# footer and what special commands can be used inside the footer. See also\n# section \"Doxygen usage\" for information on how to generate the default footer\n# that doxygen normally uses.\n# This tag requires that the tag GENERATE_HTML is set to YES.\n\nHTML_FOOTER            =\n\n# The HTML_STYLESHEET tag can be used to specify a user-defined cascading style\n# sheet that is used by each HTML page. It can be used to fine-tune the look of\n# the HTML output. If left blank doxygen will generate a default style sheet.\n# See also section \"Doxygen usage\" for information on how to generate the style\n# sheet that doxygen normally uses.\n# Note: It is recommended to use HTML_EXTRA_STYLESHEET instead of this tag, as\n# it is more robust and this tag (HTML_STYLESHEET) will in the future become\n# obsolete.\n# This tag requires that the tag GENERATE_HTML is set to YES.\n\nHTML_STYLESHEET        =\n\n# The HTML_EXTRA_STYLESHEET tag can be used to specify additional user-defined\n# cascading style sheets that are included after the standard style sheets\n# created by doxygen. Using this option one can overrule certain style aspects.\n# This is preferred over using HTML_STYLESHEET since it does not replace the\n# standard style sheet and is therefore more robust against future updates.\n# Doxygen will copy the style sheet files to the output directory.\n# Note: The order of the extra style sheet files is of importance (e.g. the last\n# style sheet in the list overrules the setting of the previous ones in the\n# list). For an example see the documentation.\n# This tag requires that the tag GENERATE_HTML is set to YES.\n\nHTML_EXTRA_STYLESHEET  =\n\n# The HTML_EXTRA_FILES tag can be used to specify one or more extra images or\n# other source files which should be copied to the HTML output directory. Note\n# that these files will be copied to the base HTML output directory. Use the\n# $relpath^ marker in the HTML_HEADER and/or HTML_FOOTER files to load these\n# files. In the HTML_STYLESHEET file, use the file name only. Also note that the\n# files will be copied as-is; there are no commands or markers available.\n# This tag requires that the tag GENERATE_HTML is set to YES.\n\nHTML_EXTRA_FILES       =\n\n# The HTML_COLORSTYLE_HUE tag controls the color of the HTML output. Doxygen\n# will adjust the colors in the style sheet and background images according to\n# this color. Hue is specified as an angle on a colorwheel, see\n# http://en.wikipedia.org/wiki/Hue for more information. For instance the value\n# 0 represents red, 60 is yellow, 120 is green, 180 is cyan, 240 is blue, 300\n# purple, and 360 is red again.\n# Minimum value: 0, maximum value: 359, default value: 220.\n# This tag requires that the tag GENERATE_HTML is set to YES.\n\nHTML_COLORSTYLE_HUE    = 220\n\n# The HTML_COLORSTYLE_SAT tag controls the purity (or saturation) of the colors\n# in the HTML output. For a value of 0 the output will use grayscales only. A\n# value of 255 will produce the most vivid colors.\n# Minimum value: 0, maximum value: 255, default value: 100.\n# This tag requires that the tag GENERATE_HTML is set to YES.\n\nHTML_COLORSTYLE_SAT    = 100\n\n# The HTML_COLORSTYLE_GAMMA tag controls the gamma correction applied to the\n# luminance component of the colors in the HTML output. Values below 100\n# gradually make the output lighter, whereas values above 100 make the output\n# darker. The value divided by 100 is the actual gamma applied, so 80 represents\n# a gamma of 0.8, The value 220 represents a gamma of 2.2, and 100 does not\n# change the gamma.\n# Minimum value: 40, maximum value: 240, default value: 80.\n# This tag requires that the tag GENERATE_HTML is set to YES.\n\nHTML_COLORSTYLE_GAMMA  = 80\n\n# If the HTML_TIMESTAMP tag is set to YES then the footer of each generated HTML\n# page will contain the date and time when the page was generated. Setting this\n# to YES can help to show when doxygen was last run and thus if the\n# documentation is up to date.\n# The default value is: NO.\n# This tag requires that the tag GENERATE_HTML is set to YES.\n\nHTML_TIMESTAMP         = NO\n\n# If the HTML_DYNAMIC_SECTIONS tag is set to YES then the generated HTML\n# documentation will contain sections that can be hidden and shown after the\n# page has loaded.\n# The default value is: NO.\n# This tag requires that the tag GENERATE_HTML is set to YES.\n\nHTML_DYNAMIC_SECTIONS  = NO\n\n# With HTML_INDEX_NUM_ENTRIES one can control the preferred number of entries\n# shown in the various tree structured indices initially; the user can expand\n# and collapse entries dynamically later on. Doxygen will expand the tree to\n# such a level that at most the specified number of entries are visible (unless\n# a fully collapsed tree already exceeds this amount). So setting the number of\n# entries 1 will produce a full collapsed tree by default. 0 is a special value\n# representing an infinite number of entries and will result in a full expanded\n# tree by default.\n# Minimum value: 0, maximum value: 9999, default value: 100.\n# This tag requires that the tag GENERATE_HTML is set to YES.\n\nHTML_INDEX_NUM_ENTRIES = 100\n\n# If the GENERATE_DOCSET tag is set to YES, additional index files will be\n# generated that can be used as input for Apple's Xcode 3 integrated development\n# environment (see: http://developer.apple.com/tools/xcode/), introduced with\n# OSX 10.5 (Leopard). To create a documentation set, doxygen will generate a\n# Makefile in the HTML output directory. Running make will produce the docset in\n# that directory and running make install will install the docset in\n# ~/Library/Developer/Shared/Documentation/DocSets so that Xcode will find it at\n# startup. See http://developer.apple.com/tools/creatingdocsetswithdoxygen.html\n# for more information.\n# The default value is: NO.\n# This tag requires that the tag GENERATE_HTML is set to YES.\n\nGENERATE_DOCSET        = NO\n\n# This tag determines the name of the docset feed. A documentation feed provides\n# an umbrella under which multiple documentation sets from a single provider\n# (such as a company or product suite) can be grouped.\n# The default value is: Doxygen generated docs.\n# This tag requires that the tag GENERATE_DOCSET is set to YES.\n\nDOCSET_FEEDNAME        = \"Doxygen generated docs\"\n\n# This tag specifies a string that should uniquely identify the documentation\n# set bundle. This should be a reverse domain-name style string, e.g.\n# com.mycompany.MyDocSet. Doxygen will append .docset to the name.\n# The default value is: org.doxygen.Project.\n# This tag requires that the tag GENERATE_DOCSET is set to YES.\n\nDOCSET_BUNDLE_ID       = org.doxygen.Project\n\n# The DOCSET_PUBLISHER_ID tag specifies a string that should uniquely identify\n# the documentation publisher. This should be a reverse domain-name style\n# string, e.g. com.mycompany.MyDocSet.documentation.\n# The default value is: org.doxygen.Publisher.\n# This tag requires that the tag GENERATE_DOCSET is set to YES.\n\nDOCSET_PUBLISHER_ID    = org.doxygen.Publisher\n\n# The DOCSET_PUBLISHER_NAME tag identifies the documentation publisher.\n# The default value is: Publisher.\n# This tag requires that the tag GENERATE_DOCSET is set to YES.\n\nDOCSET_PUBLISHER_NAME  = Publisher\n\n# If the GENERATE_HTMLHELP tag is set to YES then doxygen generates three\n# additional HTML index files: index.hhp, index.hhc, and index.hhk. The\n# index.hhp is a project file that can be read by Microsoft's HTML Help Workshop\n# (see: http://www.microsoft.com/en-us/download/details.aspx?id=21138) on\n# Windows.\n#\n# The HTML Help Workshop contains a compiler that can convert all HTML output\n# generated by doxygen into a single compiled HTML file (.chm). Compiled HTML\n# files are now used as the Windows 98 help format, and will replace the old\n# Windows help format (.hlp) on all Windows platforms in the future. Compressed\n# HTML files also contain an index, a table of contents, and you can search for\n# words in the documentation. The HTML workshop also contains a viewer for\n# compressed HTML files.\n# The default value is: NO.\n# This tag requires that the tag GENERATE_HTML is set to YES.\n\nGENERATE_HTMLHELP      = NO\n\n# The CHM_FILE tag can be used to specify the file name of the resulting .chm\n# file. You can add a path in front of the file if the result should not be\n# written to the html output directory.\n# This tag requires that the tag GENERATE_HTMLHELP is set to YES.\n\nCHM_FILE               =\n\n# The HHC_LOCATION tag can be used to specify the location (absolute path\n# including file name) of the HTML help compiler (hhc.exe). If non-empty,\n# doxygen will try to run the HTML help compiler on the generated index.hhp.\n# The file has to be specified with full path.\n# This tag requires that the tag GENERATE_HTMLHELP is set to YES.\n\nHHC_LOCATION           =\n\n# The GENERATE_CHI flag controls if a separate .chi index file is generated\n# (YES) or that it should be included in the master .chm file (NO).\n# The default value is: NO.\n# This tag requires that the tag GENERATE_HTMLHELP is set to YES.\n\nGENERATE_CHI           = NO\n\n# The CHM_INDEX_ENCODING is used to encode HtmlHelp index (hhk), content (hhc)\n# and project file content.\n# This tag requires that the tag GENERATE_HTMLHELP is set to YES.\n\nCHM_INDEX_ENCODING     =\n\n# The BINARY_TOC flag controls whether a binary table of contents is generated\n# (YES) or a normal table of contents (NO) in the .chm file. Furthermore it\n# enables the Previous and Next buttons.\n# The default value is: NO.\n# This tag requires that the tag GENERATE_HTMLHELP is set to YES.\n\nBINARY_TOC             = NO\n\n# The TOC_EXPAND flag can be set to YES to add extra items for group members to\n# the table of contents of the HTML help documentation and to the tree view.\n# The default value is: NO.\n# This tag requires that the tag GENERATE_HTMLHELP is set to YES.\n\nTOC_EXPAND             = NO\n\n# If the GENERATE_QHP tag is set to YES and both QHP_NAMESPACE and\n# QHP_VIRTUAL_FOLDER are set, an additional index file will be generated that\n# can be used as input for Qt's qhelpgenerator to generate a Qt Compressed Help\n# (.qch) of the generated HTML documentation.\n# The default value is: NO.\n# This tag requires that the tag GENERATE_HTML is set to YES.\n\nGENERATE_QHP           = NO\n\n# If the QHG_LOCATION tag is specified, the QCH_FILE tag can be used to specify\n# the file name of the resulting .qch file. The path specified is relative to\n# the HTML output folder.\n# This tag requires that the tag GENERATE_QHP is set to YES.\n\nQCH_FILE               =\n\n# The QHP_NAMESPACE tag specifies the namespace to use when generating Qt Help\n# Project output. For more information please see Qt Help Project / Namespace\n# (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#namespace).\n# The default value is: org.doxygen.Project.\n# This tag requires that the tag GENERATE_QHP is set to YES.\n\nQHP_NAMESPACE          = org.doxygen.Project\n\n# The QHP_VIRTUAL_FOLDER tag specifies the namespace to use when generating Qt\n# Help Project output. For more information please see Qt Help Project / Virtual\n# Folders (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#virtual-\n# folders).\n# The default value is: doc.\n# This tag requires that the tag GENERATE_QHP is set to YES.\n\nQHP_VIRTUAL_FOLDER     = doc\n\n# If the QHP_CUST_FILTER_NAME tag is set, it specifies the name of a custom\n# filter to add. For more information please see Qt Help Project / Custom\n# Filters (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#custom-\n# filters).\n# This tag requires that the tag GENERATE_QHP is set to YES.\n\nQHP_CUST_FILTER_NAME   =\n\n# The QHP_CUST_FILTER_ATTRS tag specifies the list of the attributes of the\n# custom filter to add. For more information please see Qt Help Project / Custom\n# Filters (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#custom-\n# filters).\n# This tag requires that the tag GENERATE_QHP is set to YES.\n\nQHP_CUST_FILTER_ATTRS  =\n\n# The QHP_SECT_FILTER_ATTRS tag specifies the list of the attributes this\n# project's filter section matches. Qt Help Project / Filter Attributes (see:\n# http://qt-project.org/doc/qt-4.8/qthelpproject.html#filter-attributes).\n# This tag requires that the tag GENERATE_QHP is set to YES.\n\nQHP_SECT_FILTER_ATTRS  =\n\n# The QHG_LOCATION tag can be used to specify the location of Qt's\n# qhelpgenerator. If non-empty doxygen will try to run qhelpgenerator on the\n# generated .qhp file.\n# This tag requires that the tag GENERATE_QHP is set to YES.\n\nQHG_LOCATION           =\n\n# If the GENERATE_ECLIPSEHELP tag is set to YES, additional index files will be\n# generated, together with the HTML files, they form an Eclipse help plugin. To\n# install this plugin and make it available under the help contents menu in\n# Eclipse, the contents of the directory containing the HTML and XML files needs\n# to be copied into the plugins directory of eclipse. The name of the directory\n# within the plugins directory should be the same as the ECLIPSE_DOC_ID value.\n# After copying Eclipse needs to be restarted before the help appears.\n# The default value is: NO.\n# This tag requires that the tag GENERATE_HTML is set to YES.\n\nGENERATE_ECLIPSEHELP   = NO\n\n# A unique identifier for the Eclipse help plugin. When installing the plugin\n# the directory name containing the HTML and XML files should also have this\n# name. Each documentation set should have its own identifier.\n# The default value is: org.doxygen.Project.\n# This tag requires that the tag GENERATE_ECLIPSEHELP is set to YES.\n\nECLIPSE_DOC_ID         = org.doxygen.Project\n\n# If you want full control over the layout of the generated HTML pages it might\n# be necessary to disable the index and replace it with your own. The\n# DISABLE_INDEX tag can be used to turn on/off the condensed index (tabs) at top\n# of each HTML page. A value of NO enables the index and the value YES disables\n# it. Since the tabs in the index contain the same information as the navigation\n# tree, you can set this option to YES if you also set GENERATE_TREEVIEW to YES.\n# The default value is: NO.\n# This tag requires that the tag GENERATE_HTML is set to YES.\n\nDISABLE_INDEX          = NO\n\n# The GENERATE_TREEVIEW tag is used to specify whether a tree-like index\n# structure should be generated to display hierarchical information. If the tag\n# value is set to YES, a side panel will be generated containing a tree-like\n# index structure (just like the one that is generated for HTML Help). For this\n# to work a browser that supports JavaScript, DHTML, CSS and frames is required\n# (i.e. any modern browser). Windows users are probably better off using the\n# HTML help feature. Via custom style sheets (see HTML_EXTRA_STYLESHEET) one can\n# further fine-tune the look of the index. As an example, the default style\n# sheet generated by doxygen has an example that shows how to put an image at\n# the root of the tree instead of the PROJECT_NAME. Since the tree basically has\n# the same information as the tab index, you could consider setting\n# DISABLE_INDEX to YES when enabling this option.\n# The default value is: NO.\n# This tag requires that the tag GENERATE_HTML is set to YES.\n\nGENERATE_TREEVIEW      = NO\n\n# The ENUM_VALUES_PER_LINE tag can be used to set the number of enum values that\n# doxygen will group on one line in the generated HTML documentation.\n#\n# Note that a value of 0 will completely suppress the enum values from appearing\n# in the overview section.\n# Minimum value: 0, maximum value: 20, default value: 4.\n# This tag requires that the tag GENERATE_HTML is set to YES.\n\nENUM_VALUES_PER_LINE   = 4\n\n# If the treeview is enabled (see GENERATE_TREEVIEW) then this tag can be used\n# to set the initial width (in pixels) of the frame in which the tree is shown.\n# Minimum value: 0, maximum value: 1500, default value: 250.\n# This tag requires that the tag GENERATE_HTML is set to YES.\n\nTREEVIEW_WIDTH         = 250\n\n# If the EXT_LINKS_IN_WINDOW option is set to YES, doxygen will open links to\n# external symbols imported via tag files in a separate window.\n# The default value is: NO.\n# This tag requires that the tag GENERATE_HTML is set to YES.\n\nEXT_LINKS_IN_WINDOW    = NO\n\n# Use this tag to change the font size of LaTeX formulas included as images in\n# the HTML documentation. When you change the font size after a successful\n# doxygen run you need to manually remove any form_*.png images from the HTML\n# output directory to force them to be regenerated.\n# Minimum value: 8, maximum value: 50, default value: 10.\n# This tag requires that the tag GENERATE_HTML is set to YES.\n\nFORMULA_FONTSIZE       = 10\n\n# Use the FORMULA_TRANPARENT tag to determine whether or not the images\n# generated for formulas are transparent PNGs. Transparent PNGs are not\n# supported properly for IE 6.0, but are supported on all modern browsers.\n#\n# Note that when changing this option you need to delete any form_*.png files in\n# the HTML output directory before the changes have effect.\n# The default value is: YES.\n# This tag requires that the tag GENERATE_HTML is set to YES.\n\nFORMULA_TRANSPARENT    = YES\n\n# Enable the USE_MATHJAX option to render LaTeX formulas using MathJax (see\n# http://www.mathjax.org) which uses client side Javascript for the rendering\n# instead of using pre-rendered bitmaps. Use this if you do not have LaTeX\n# installed or if you want to formulas look prettier in the HTML output. When\n# enabled you may also need to install MathJax separately and configure the path\n# to it using the MATHJAX_RELPATH option.\n# The default value is: NO.\n# This tag requires that the tag GENERATE_HTML is set to YES.\n\nUSE_MATHJAX            = NO\n\n# When MathJax is enabled you can set the default output format to be used for\n# the MathJax output. See the MathJax site (see:\n# http://docs.mathjax.org/en/latest/output.html) for more details.\n# Possible values are: HTML-CSS (which is slower, but has the best\n# compatibility), NativeMML (i.e. MathML) and SVG.\n# The default value is: HTML-CSS.\n# This tag requires that the tag USE_MATHJAX is set to YES.\n\nMATHJAX_FORMAT         = HTML-CSS\n\n# When MathJax is enabled you need to specify the location relative to the HTML\n# output directory using the MATHJAX_RELPATH option. The destination directory\n# should contain the MathJax.js script. For instance, if the mathjax directory\n# is located at the same level as the HTML output directory, then\n# MATHJAX_RELPATH should be ../mathjax. The default value points to the MathJax\n# Content Delivery Network so you can quickly see the result without installing\n# MathJax. However, it is strongly recommended to install a local copy of\n# MathJax from http://www.mathjax.org before deployment.\n# The default value is: http://cdn.mathjax.org/mathjax/latest.\n# This tag requires that the tag USE_MATHJAX is set to YES.\n\nMATHJAX_RELPATH        = http://cdn.mathjax.org/mathjax/latest\n\n# The MATHJAX_EXTENSIONS tag can be used to specify one or more MathJax\n# extension names that should be enabled during MathJax rendering. For example\n# MATHJAX_EXTENSIONS = TeX/AMSmath TeX/AMSsymbols\n# This tag requires that the tag USE_MATHJAX is set to YES.\n\nMATHJAX_EXTENSIONS     =\n\n# The MATHJAX_CODEFILE tag can be used to specify a file with javascript pieces\n# of code that will be used on startup of the MathJax code. See the MathJax site\n# (see: http://docs.mathjax.org/en/latest/output.html) for more details. For an\n# example see the documentation.\n# This tag requires that the tag USE_MATHJAX is set to YES.\n\nMATHJAX_CODEFILE       =\n\n# When the SEARCHENGINE tag is enabled doxygen will generate a search box for\n# the HTML output. The underlying search engine uses javascript and DHTML and\n# should work on any modern browser. Note that when using HTML help\n# (GENERATE_HTMLHELP), Qt help (GENERATE_QHP), or docsets (GENERATE_DOCSET)\n# there is already a search function so this one should typically be disabled.\n# For large projects the javascript based search engine can be slow, then\n# enabling SERVER_BASED_SEARCH may provide a better solution. It is possible to\n# search using the keyboard; to jump to the search box use <access key> + S\n# (what the <access key> is depends on the OS and browser, but it is typically\n# <CTRL>, <ALT>/<option>, or both). Inside the search box use the <cursor down\n# key> to jump into the search results window, the results can be navigated\n# using the <cursor keys>. Press <Enter> to select an item or <escape> to cancel\n# the search. The filter options can be selected when the cursor is inside the\n# search box by pressing <Shift>+<cursor down>. Also here use the <cursor keys>\n# to select a filter and <Enter> or <escape> to activate or cancel the filter\n# option.\n# The default value is: YES.\n# This tag requires that the tag GENERATE_HTML is set to YES.\n\nSEARCHENGINE           = YES\n\n# When the SERVER_BASED_SEARCH tag is enabled the search engine will be\n# implemented using a web server instead of a web client using Javascript. There\n# are two flavors of web server based searching depending on the EXTERNAL_SEARCH\n# setting. When disabled, doxygen will generate a PHP script for searching and\n# an index file used by the script. When EXTERNAL_SEARCH is enabled the indexing\n# and searching needs to be provided by external tools. See the section\n# \"External Indexing and Searching\" for details.\n# The default value is: NO.\n# This tag requires that the tag SEARCHENGINE is set to YES.\n\nSERVER_BASED_SEARCH    = NO\n\n# When EXTERNAL_SEARCH tag is enabled doxygen will no longer generate the PHP\n# script for searching. Instead the search results are written to an XML file\n# which needs to be processed by an external indexer. Doxygen will invoke an\n# external search engine pointed to by the SEARCHENGINE_URL option to obtain the\n# search results.\n#\n# Doxygen ships with an example indexer (doxyindexer) and search engine\n# (doxysearch.cgi) which are based on the open source search engine library\n# Xapian (see: http://xapian.org/).\n#\n# See the section \"External Indexing and Searching\" for details.\n# The default value is: NO.\n# This tag requires that the tag SEARCHENGINE is set to YES.\n\nEXTERNAL_SEARCH        = NO\n\n# The SEARCHENGINE_URL should point to a search engine hosted by a web server\n# which will return the search results when EXTERNAL_SEARCH is enabled.\n#\n# Doxygen ships with an example indexer (doxyindexer) and search engine\n# (doxysearch.cgi) which are based on the open source search engine library\n# Xapian (see: http://xapian.org/). See the section \"External Indexing and\n# Searching\" for details.\n# This tag requires that the tag SEARCHENGINE is set to YES.\n\nSEARCHENGINE_URL       =\n\n# When SERVER_BASED_SEARCH and EXTERNAL_SEARCH are both enabled the unindexed\n# search data is written to a file for indexing by an external tool. With the\n# SEARCHDATA_FILE tag the name of this file can be specified.\n# The default file is: searchdata.xml.\n# This tag requires that the tag SEARCHENGINE is set to YES.\n\nSEARCHDATA_FILE        = searchdata.xml\n\n# When SERVER_BASED_SEARCH and EXTERNAL_SEARCH are both enabled the\n# EXTERNAL_SEARCH_ID tag can be used as an identifier for the project. This is\n# useful in combination with EXTRA_SEARCH_MAPPINGS to search through multiple\n# projects and redirect the results back to the right project.\n# This tag requires that the tag SEARCHENGINE is set to YES.\n\nEXTERNAL_SEARCH_ID     =\n\n# The EXTRA_SEARCH_MAPPINGS tag can be used to enable searching through doxygen\n# projects other than the one defined by this configuration file, but that are\n# all added to the same external search index. Each project needs to have a\n# unique id set via EXTERNAL_SEARCH_ID. The search mapping then maps the id of\n# to a relative location where the documentation can be found. The format is:\n# EXTRA_SEARCH_MAPPINGS = tagname1=loc1 tagname2=loc2 ...\n# This tag requires that the tag SEARCHENGINE is set to YES.\n\nEXTRA_SEARCH_MAPPINGS  =\n\n#---------------------------------------------------------------------------\n# Configuration options related to the LaTeX output\n#---------------------------------------------------------------------------\n\n# If the GENERATE_LATEX tag is set to YES, doxygen will generate LaTeX output.\n# The default value is: YES.\n\nGENERATE_LATEX         = YES\n\n# The LATEX_OUTPUT tag is used to specify where the LaTeX docs will be put. If a\n# relative path is entered the value of OUTPUT_DIRECTORY will be put in front of\n# it.\n# The default directory is: latex.\n# This tag requires that the tag GENERATE_LATEX is set to YES.\n\nLATEX_OUTPUT           = latex\n\n# The LATEX_CMD_NAME tag can be used to specify the LaTeX command name to be\n# invoked.\n#\n# Note that when enabling USE_PDFLATEX this option is only used for generating\n# bitmaps for formulas in the HTML output, but not in the Makefile that is\n# written to the output directory.\n# The default file is: latex.\n# This tag requires that the tag GENERATE_LATEX is set to YES.\n\nLATEX_CMD_NAME         = latex\n\n# The MAKEINDEX_CMD_NAME tag can be used to specify the command name to generate\n# index for LaTeX.\n# The default file is: makeindex.\n# This tag requires that the tag GENERATE_LATEX is set to YES.\n\nMAKEINDEX_CMD_NAME     = makeindex\n\n# If the COMPACT_LATEX tag is set to YES, doxygen generates more compact LaTeX\n# documents. This may be useful for small projects and may help to save some\n# trees in general.\n# The default value is: NO.\n# This tag requires that the tag GENERATE_LATEX is set to YES.\n\nCOMPACT_LATEX          = NO\n\n# The PAPER_TYPE tag can be used to set the paper type that is used by the\n# printer.\n# Possible values are: a4 (210 x 297 mm), letter (8.5 x 11 inches), legal (8.5 x\n# 14 inches) and executive (7.25 x 10.5 inches).\n# The default value is: a4.\n# This tag requires that the tag GENERATE_LATEX is set to YES.\n\nPAPER_TYPE             = a4\n\n# The EXTRA_PACKAGES tag can be used to specify one or more LaTeX package names\n# that should be included in the LaTeX output. The package can be specified just\n# by its name or with the correct syntax as to be used with the LaTeX\n# \\usepackage command. To get the times font for instance you can specify :\n# EXTRA_PACKAGES=times or EXTRA_PACKAGES={times}\n# To use the option intlimits with the amsmath package you can specify:\n# EXTRA_PACKAGES=[intlimits]{amsmath}\n# If left blank no extra packages will be included.\n# This tag requires that the tag GENERATE_LATEX is set to YES.\n\nEXTRA_PACKAGES         =\n\n# The LATEX_HEADER tag can be used to specify a personal LaTeX header for the\n# generated LaTeX document. The header should contain everything until the first\n# chapter. If it is left blank doxygen will generate a standard header. See\n# section \"Doxygen usage\" for information on how to let doxygen write the\n# default header to a separate file.\n#\n# Note: Only use a user-defined header if you know what you are doing! The\n# following commands have a special meaning inside the header: $title,\n# $datetime, $date, $doxygenversion, $projectname, $projectnumber,\n# $projectbrief, $projectlogo. Doxygen will replace $title with the empty\n# string, for the replacement values of the other commands the user is referred\n# to HTML_HEADER.\n# This tag requires that the tag GENERATE_LATEX is set to YES.\n\nLATEX_HEADER           =\n\n# The LATEX_FOOTER tag can be used to specify a personal LaTeX footer for the\n# generated LaTeX document. The footer should contain everything after the last\n# chapter. If it is left blank doxygen will generate a standard footer. See\n# LATEX_HEADER for more information on how to generate a default footer and what\n# special commands can be used inside the footer.\n#\n# Note: Only use a user-defined footer if you know what you are doing!\n# This tag requires that the tag GENERATE_LATEX is set to YES.\n\nLATEX_FOOTER           =\n\n# The LATEX_EXTRA_STYLESHEET tag can be used to specify additional user-defined\n# LaTeX style sheets that are included after the standard style sheets created\n# by doxygen. Using this option one can overrule certain style aspects. Doxygen\n# will copy the style sheet files to the output directory.\n# Note: The order of the extra style sheet files is of importance (e.g. the last\n# style sheet in the list overrules the setting of the previous ones in the\n# list).\n# This tag requires that the tag GENERATE_LATEX is set to YES.\n\nLATEX_EXTRA_STYLESHEET =\n\n# The LATEX_EXTRA_FILES tag can be used to specify one or more extra images or\n# other source files which should be copied to the LATEX_OUTPUT output\n# directory. Note that the files will be copied as-is; there are no commands or\n# markers available.\n# This tag requires that the tag GENERATE_LATEX is set to YES.\n\nLATEX_EXTRA_FILES      =\n\n# If the PDF_HYPERLINKS tag is set to YES, the LaTeX that is generated is\n# prepared for conversion to PDF (using ps2pdf or pdflatex). The PDF file will\n# contain links (just like the HTML output) instead of page references. This\n# makes the output suitable for online browsing using a PDF viewer.\n# The default value is: YES.\n# This tag requires that the tag GENERATE_LATEX is set to YES.\n\nPDF_HYPERLINKS         = YES\n\n# If the USE_PDFLATEX tag is set to YES, doxygen will use pdflatex to generate\n# the PDF file directly from the LaTeX files. Set this option to YES, to get a\n# higher quality PDF documentation.\n# The default value is: YES.\n# This tag requires that the tag GENERATE_LATEX is set to YES.\n\nUSE_PDFLATEX           = YES\n\n# If the LATEX_BATCHMODE tag is set to YES, doxygen will add the \\batchmode\n# command to the generated LaTeX files. This will instruct LaTeX to keep running\n# if errors occur, instead of asking the user for help. This option is also used\n# when generating formulas in HTML.\n# The default value is: NO.\n# This tag requires that the tag GENERATE_LATEX is set to YES.\n\nLATEX_BATCHMODE        = NO\n\n# If the LATEX_HIDE_INDICES tag is set to YES then doxygen will not include the\n# index chapters (such as File Index, Compound Index, etc.) in the output.\n# The default value is: NO.\n# This tag requires that the tag GENERATE_LATEX is set to YES.\n\nLATEX_HIDE_INDICES     = NO\n\n# If the LATEX_SOURCE_CODE tag is set to YES then doxygen will include source\n# code with syntax highlighting in the LaTeX output.\n#\n# Note that which sources are shown also depends on other settings such as\n# SOURCE_BROWSER.\n# The default value is: NO.\n# This tag requires that the tag GENERATE_LATEX is set to YES.\n\nLATEX_SOURCE_CODE      = NO\n\n# The LATEX_BIB_STYLE tag can be used to specify the style to use for the\n# bibliography, e.g. plainnat, or ieeetr. See\n# http://en.wikipedia.org/wiki/BibTeX and \\cite for more info.\n# The default value is: plain.\n# This tag requires that the tag GENERATE_LATEX is set to YES.\n\nLATEX_BIB_STYLE        = plain\n\n# If the LATEX_TIMESTAMP tag is set to YES then the footer of each generated\n# page will contain the date and time when the page was generated. Setting this\n# to NO can help when comparing the output of multiple runs.\n# The default value is: NO.\n# This tag requires that the tag GENERATE_LATEX is set to YES.\n\nLATEX_TIMESTAMP        = NO\n\n#---------------------------------------------------------------------------\n# Configuration options related to the RTF output\n#---------------------------------------------------------------------------\n\n# If the GENERATE_RTF tag is set to YES, doxygen will generate RTF output. The\n# RTF output is optimized for Word 97 and may not look too pretty with other RTF\n# readers/editors.\n# The default value is: NO.\n\nGENERATE_RTF           = NO\n\n# The RTF_OUTPUT tag is used to specify where the RTF docs will be put. If a\n# relative path is entered the value of OUTPUT_DIRECTORY will be put in front of\n# it.\n# The default directory is: rtf.\n# This tag requires that the tag GENERATE_RTF is set to YES.\n\nRTF_OUTPUT             = rtf\n\n# If the COMPACT_RTF tag is set to YES, doxygen generates more compact RTF\n# documents. This may be useful for small projects and may help to save some\n# trees in general.\n# The default value is: NO.\n# This tag requires that the tag GENERATE_RTF is set to YES.\n\nCOMPACT_RTF            = NO\n\n# If the RTF_HYPERLINKS tag is set to YES, the RTF that is generated will\n# contain hyperlink fields. The RTF file will contain links (just like the HTML\n# output) instead of page references. This makes the output suitable for online\n# browsing using Word or some other Word compatible readers that support those\n# fields.\n#\n# Note: WordPad (write) and others do not support links.\n# The default value is: NO.\n# This tag requires that the tag GENERATE_RTF is set to YES.\n\nRTF_HYPERLINKS         = NO\n\n# Load stylesheet definitions from file. Syntax is similar to doxygen's config\n# file, i.e. a series of assignments. You only have to provide replacements,\n# missing definitions are set to their default value.\n#\n# See also section \"Doxygen usage\" for information on how to generate the\n# default style sheet that doxygen normally uses.\n# This tag requires that the tag GENERATE_RTF is set to YES.\n\nRTF_STYLESHEET_FILE    =\n\n# Set optional variables used in the generation of an RTF document. Syntax is\n# similar to doxygen's config file. A template extensions file can be generated\n# using doxygen -e rtf extensionFile.\n# This tag requires that the tag GENERATE_RTF is set to YES.\n\nRTF_EXTENSIONS_FILE    =\n\n# If the RTF_SOURCE_CODE tag is set to YES then doxygen will include source code\n# with syntax highlighting in the RTF output.\n#\n# Note that which sources are shown also depends on other settings such as\n# SOURCE_BROWSER.\n# The default value is: NO.\n# This tag requires that the tag GENERATE_RTF is set to YES.\n\nRTF_SOURCE_CODE        = NO\n\n#---------------------------------------------------------------------------\n# Configuration options related to the man page output\n#---------------------------------------------------------------------------\n\n# If the GENERATE_MAN tag is set to YES, doxygen will generate man pages for\n# classes and files.\n# The default value is: NO.\n\nGENERATE_MAN           = NO\n\n# The MAN_OUTPUT tag is used to specify where the man pages will be put. If a\n# relative path is entered the value of OUTPUT_DIRECTORY will be put in front of\n# it. A directory man3 will be created inside the directory specified by\n# MAN_OUTPUT.\n# The default directory is: man.\n# This tag requires that the tag GENERATE_MAN is set to YES.\n\nMAN_OUTPUT             = man\n\n# The MAN_EXTENSION tag determines the extension that is added to the generated\n# man pages. In case the manual section does not start with a number, the number\n# 3 is prepended. The dot (.) at the beginning of the MAN_EXTENSION tag is\n# optional.\n# The default value is: .3.\n# This tag requires that the tag GENERATE_MAN is set to YES.\n\nMAN_EXTENSION          = .3\n\n# The MAN_SUBDIR tag determines the name of the directory created within\n# MAN_OUTPUT in which the man pages are placed. If defaults to man followed by\n# MAN_EXTENSION with the initial . removed.\n# This tag requires that the tag GENERATE_MAN is set to YES.\n\nMAN_SUBDIR             =\n\n# If the MAN_LINKS tag is set to YES and doxygen generates man output, then it\n# will generate one additional man file for each entity documented in the real\n# man page(s). These additional files only source the real man page, but without\n# them the man command would be unable to find the correct page.\n# The default value is: NO.\n# This tag requires that the tag GENERATE_MAN is set to YES.\n\nMAN_LINKS              = NO\n\n#---------------------------------------------------------------------------\n# Configuration options related to the XML output\n#---------------------------------------------------------------------------\n\n# If the GENERATE_XML tag is set to YES, doxygen will generate an XML file that\n# captures the structure of the code including all documentation.\n# The default value is: NO.\n\nGENERATE_XML           = NO\n\n# The XML_OUTPUT tag is used to specify where the XML pages will be put. If a\n# relative path is entered the value of OUTPUT_DIRECTORY will be put in front of\n# it.\n# The default directory is: xml.\n# This tag requires that the tag GENERATE_XML is set to YES.\n\nXML_OUTPUT             = xml\n\n# If the XML_PROGRAMLISTING tag is set to YES, doxygen will dump the program\n# listings (including syntax highlighting and cross-referencing information) to\n# the XML output. Note that enabling this will significantly increase the size\n# of the XML output.\n# The default value is: YES.\n# This tag requires that the tag GENERATE_XML is set to YES.\n\nXML_PROGRAMLISTING     = YES\n\n#---------------------------------------------------------------------------\n# Configuration options related to the DOCBOOK output\n#---------------------------------------------------------------------------\n\n# If the GENERATE_DOCBOOK tag is set to YES, doxygen will generate Docbook files\n# that can be used to generate PDF.\n# The default value is: NO.\n\nGENERATE_DOCBOOK       = NO\n\n# The DOCBOOK_OUTPUT tag is used to specify where the Docbook pages will be put.\n# If a relative path is entered the value of OUTPUT_DIRECTORY will be put in\n# front of it.\n# The default directory is: docbook.\n# This tag requires that the tag GENERATE_DOCBOOK is set to YES.\n\nDOCBOOK_OUTPUT         = docbook\n\n# If the DOCBOOK_PROGRAMLISTING tag is set to YES, doxygen will include the\n# program listings (including syntax highlighting and cross-referencing\n# information) to the DOCBOOK output. Note that enabling this will significantly\n# increase the size of the DOCBOOK output.\n# The default value is: NO.\n# This tag requires that the tag GENERATE_DOCBOOK is set to YES.\n\nDOCBOOK_PROGRAMLISTING = NO\n\n#---------------------------------------------------------------------------\n# Configuration options for the AutoGen Definitions output\n#---------------------------------------------------------------------------\n\n# If the GENERATE_AUTOGEN_DEF tag is set to YES, doxygen will generate an\n# AutoGen Definitions (see http://autogen.sf.net) file that captures the\n# structure of the code including all documentation. Note that this feature is\n# still experimental and incomplete at the moment.\n# The default value is: NO.\n\nGENERATE_AUTOGEN_DEF   = NO\n\n#---------------------------------------------------------------------------\n# Configuration options related to the Perl module output\n#---------------------------------------------------------------------------\n\n# If the GENERATE_PERLMOD tag is set to YES, doxygen will generate a Perl module\n# file that captures the structure of the code including all documentation.\n#\n# Note that this feature is still experimental and incomplete at the moment.\n# The default value is: NO.\n\nGENERATE_PERLMOD       = NO\n\n# If the PERLMOD_LATEX tag is set to YES, doxygen will generate the necessary\n# Makefile rules, Perl scripts and LaTeX code to be able to generate PDF and DVI\n# output from the Perl module output.\n# The default value is: NO.\n# This tag requires that the tag GENERATE_PERLMOD is set to YES.\n\nPERLMOD_LATEX          = NO\n\n# If the PERLMOD_PRETTY tag is set to YES, the Perl module output will be nicely\n# formatted so it can be parsed by a human reader. This is useful if you want to\n# understand what is going on. On the other hand, if this tag is set to NO, the\n# size of the Perl module output will be much smaller and Perl will parse it\n# just the same.\n# The default value is: YES.\n# This tag requires that the tag GENERATE_PERLMOD is set to YES.\n\nPERLMOD_PRETTY         = YES\n\n# The names of the make variables in the generated doxyrules.make file are\n# prefixed with the string contained in PERLMOD_MAKEVAR_PREFIX. This is useful\n# so different doxyrules.make files included by the same Makefile don't\n# overwrite each other's variables.\n# This tag requires that the tag GENERATE_PERLMOD is set to YES.\n\nPERLMOD_MAKEVAR_PREFIX =\n\n#---------------------------------------------------------------------------\n# Configuration options related to the preprocessor\n#---------------------------------------------------------------------------\n\n# If the ENABLE_PREPROCESSING tag is set to YES, doxygen will evaluate all\n# C-preprocessor directives found in the sources and include files.\n# The default value is: YES.\n\nENABLE_PREPROCESSING   = YES\n\n# If the MACRO_EXPANSION tag is set to YES, doxygen will expand all macro names\n# in the source code. If set to NO, only conditional compilation will be\n# performed. Macro expansion can be done in a controlled way by setting\n# EXPAND_ONLY_PREDEF to YES.\n# The default value is: NO.\n# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.\n\nMACRO_EXPANSION        = NO\n\n# If the EXPAND_ONLY_PREDEF and MACRO_EXPANSION tags are both set to YES then\n# the macro expansion is limited to the macros specified with the PREDEFINED and\n# EXPAND_AS_DEFINED tags.\n# The default value is: NO.\n# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.\n\nEXPAND_ONLY_PREDEF     = NO\n\n# If the SEARCH_INCLUDES tag is set to YES, the include files in the\n# INCLUDE_PATH will be searched if a #include is found.\n# The default value is: YES.\n# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.\n\nSEARCH_INCLUDES        = YES\n\n# The INCLUDE_PATH tag can be used to specify one or more directories that\n# contain include files that are not input files but should be processed by the\n# preprocessor.\n# This tag requires that the tag SEARCH_INCLUDES is set to YES.\n\nINCLUDE_PATH           =\n\n# You can use the INCLUDE_FILE_PATTERNS tag to specify one or more wildcard\n# patterns (like *.h and *.hpp) to filter out the header-files in the\n# directories. If left blank, the patterns specified with FILE_PATTERNS will be\n# used.\n# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.\n\nINCLUDE_FILE_PATTERNS  =\n\n# The PREDEFINED tag can be used to specify one or more macro names that are\n# defined before the preprocessor is started (similar to the -D option of e.g.\n# gcc). The argument of the tag is a list of macros of the form: name or\n# name=definition (no spaces). If the definition and the \"=\" are omitted, \"=1\"\n# is assumed. To prevent a macro definition from being undefined via #undef or\n# recursively expanded use the := operator instead of the = operator.\n# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.\n\nPREDEFINED             =\n\n# If the MACRO_EXPANSION and EXPAND_ONLY_PREDEF tags are set to YES then this\n# tag can be used to specify a list of macro names that should be expanded. The\n# macro definition that is found in the sources will be used. Use the PREDEFINED\n# tag if you want to use a different macro definition that overrules the\n# definition found in the source code.\n# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.\n\nEXPAND_AS_DEFINED      =\n\n# If the SKIP_FUNCTION_MACROS tag is set to YES then doxygen's preprocessor will\n# remove all references to function-like macros that are alone on a line, have\n# an all uppercase name, and do not end with a semicolon. Such function macros\n# are typically used for boiler-plate code, and will confuse the parser if not\n# removed.\n# The default value is: YES.\n# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.\n\nSKIP_FUNCTION_MACROS   = YES\n\n#---------------------------------------------------------------------------\n# Configuration options related to external references\n#---------------------------------------------------------------------------\n\n# The TAGFILES tag can be used to specify one or more tag files. For each tag\n# file the location of the external documentation should be added. The format of\n# a tag file without this location is as follows:\n# TAGFILES = file1 file2 ...\n# Adding location for the tag files is done as follows:\n# TAGFILES = file1=loc1 \"file2 = loc2\" ...\n# where loc1 and loc2 can be relative or absolute paths or URLs. See the\n# section \"Linking to external documentation\" for more information about the use\n# of tag files.\n# Note: Each tag file must have a unique name (where the name does NOT include\n# the path). If a tag file is not located in the directory in which doxygen is\n# run, you must also specify the path to the tagfile here.\n\nTAGFILES               =\n\n# When a file name is specified after GENERATE_TAGFILE, doxygen will create a\n# tag file that is based on the input files it reads. See section \"Linking to\n# external documentation\" for more information about the usage of tag files.\n\nGENERATE_TAGFILE       =\n\n# If the ALLEXTERNALS tag is set to YES, all external class will be listed in\n# the class index. If set to NO, only the inherited external classes will be\n# listed.\n# The default value is: NO.\n\nALLEXTERNALS           = NO\n\n# If the EXTERNAL_GROUPS tag is set to YES, all external groups will be listed\n# in the modules index. If set to NO, only the current project's groups will be\n# listed.\n# The default value is: YES.\n\nEXTERNAL_GROUPS        = YES\n\n# If the EXTERNAL_PAGES tag is set to YES, all external pages will be listed in\n# the related pages index. If set to NO, only the current project's pages will\n# be listed.\n# The default value is: YES.\n\nEXTERNAL_PAGES         = YES\n\n# The PERL_PATH should be the absolute path and name of the perl script\n# interpreter (i.e. the result of 'which perl').\n# The default file (with absolute path) is: /usr/bin/perl.\n\nPERL_PATH              = /usr/bin/perl\n\n#---------------------------------------------------------------------------\n# Configuration options related to the dot tool\n#---------------------------------------------------------------------------\n\n# If the CLASS_DIAGRAMS tag is set to YES, doxygen will generate a class diagram\n# (in HTML and LaTeX) for classes with base or super classes. Setting the tag to\n# NO turns the diagrams off. Note that this option also works with HAVE_DOT\n# disabled, but it is recommended to install and use dot, since it yields more\n# powerful graphs.\n# The default value is: YES.\n\nCLASS_DIAGRAMS         = YES\n\n# You can define message sequence charts within doxygen comments using the \\msc\n# command. Doxygen will then run the mscgen tool (see:\n# http://www.mcternan.me.uk/mscgen/)) to produce the chart and insert it in the\n# documentation. The MSCGEN_PATH tag allows you to specify the directory where\n# the mscgen tool resides. If left empty the tool is assumed to be found in the\n# default search path.\n\nMSCGEN_PATH            =\n\n# You can include diagrams made with dia in doxygen documentation. Doxygen will\n# then run dia to produce the diagram and insert it in the documentation. The\n# DIA_PATH tag allows you to specify the directory where the dia binary resides.\n# If left empty dia is assumed to be found in the default search path.\n\nDIA_PATH               =\n\n# If set to YES the inheritance and collaboration graphs will hide inheritance\n# and usage relations if the target is undocumented or is not a class.\n# The default value is: YES.\n\nHIDE_UNDOC_RELATIONS   = YES\n\n# If you set the HAVE_DOT tag to YES then doxygen will assume the dot tool is\n# available from the path. This tool is part of Graphviz (see:\n# http://www.graphviz.org/), a graph visualization toolkit from AT&T and Lucent\n# Bell Labs. The other options in this section have no effect if this option is\n# set to NO\n# The default value is: YES.\n\nHAVE_DOT               = YES\n\n# The DOT_NUM_THREADS specifies the number of dot invocations doxygen is allowed\n# to run in parallel. When set to 0 doxygen will base this on the number of\n# processors available in the system. You can set it explicitly to a value\n# larger than 0 to get control over the balance between CPU load and processing\n# speed.\n# Minimum value: 0, maximum value: 32, default value: 0.\n# This tag requires that the tag HAVE_DOT is set to YES.\n\nDOT_NUM_THREADS        = 0\n\n# When you want a differently looking font in the dot files that doxygen\n# generates you can specify the font name using DOT_FONTNAME. You need to make\n# sure dot is able to find the font, which can be done by putting it in a\n# standard location or by setting the DOTFONTPATH environment variable or by\n# setting DOT_FONTPATH to the directory containing the font.\n# The default value is: Helvetica.\n# This tag requires that the tag HAVE_DOT is set to YES.\n\nDOT_FONTNAME           = Helvetica\n\n# The DOT_FONTSIZE tag can be used to set the size (in points) of the font of\n# dot graphs.\n# Minimum value: 4, maximum value: 24, default value: 10.\n# This tag requires that the tag HAVE_DOT is set to YES.\n\nDOT_FONTSIZE           = 10\n\n# By default doxygen will tell dot to use the default font as specified with\n# DOT_FONTNAME. If you specify a different font using DOT_FONTNAME you can set\n# the path where dot can find it using this tag.\n# This tag requires that the tag HAVE_DOT is set to YES.\n\nDOT_FONTPATH           =\n\n# If the CLASS_GRAPH tag is set to YES then doxygen will generate a graph for\n# each documented class showing the direct and indirect inheritance relations.\n# Setting this tag to YES will force the CLASS_DIAGRAMS tag to NO.\n# The default value is: YES.\n# This tag requires that the tag HAVE_DOT is set to YES.\n\nCLASS_GRAPH            = YES\n\n# If the COLLABORATION_GRAPH tag is set to YES then doxygen will generate a\n# graph for each documented class showing the direct and indirect implementation\n# dependencies (inheritance, containment, and class references variables) of the\n# class with other documented classes.\n# The default value is: YES.\n# This tag requires that the tag HAVE_DOT is set to YES.\n\nCOLLABORATION_GRAPH    = YES\n\n# If the GROUP_GRAPHS tag is set to YES then doxygen will generate a graph for\n# groups, showing the direct groups dependencies.\n# The default value is: YES.\n# This tag requires that the tag HAVE_DOT is set to YES.\n\nGROUP_GRAPHS           = YES\n\n# If the UML_LOOK tag is set to YES, doxygen will generate inheritance and\n# collaboration diagrams in a style similar to the OMG's Unified Modeling\n# Language.\n# The default value is: NO.\n# This tag requires that the tag HAVE_DOT is set to YES.\n\nUML_LOOK               = NO\n\n# If the UML_LOOK tag is enabled, the fields and methods are shown inside the\n# class node. If there are many fields or methods and many nodes the graph may\n# become too big to be useful. The UML_LIMIT_NUM_FIELDS threshold limits the\n# number of items for each type to make the size more manageable. Set this to 0\n# for no limit. Note that the threshold may be exceeded by 50% before the limit\n# is enforced. So when you set the threshold to 10, up to 15 fields may appear,\n# but if the number exceeds 15, the total amount of fields shown is limited to\n# 10.\n# Minimum value: 0, maximum value: 100, default value: 10.\n# This tag requires that the tag HAVE_DOT is set to YES.\n\nUML_LIMIT_NUM_FIELDS   = 10\n\n# If the TEMPLATE_RELATIONS tag is set to YES then the inheritance and\n# collaboration graphs will show the relations between templates and their\n# instances.\n# The default value is: NO.\n# This tag requires that the tag HAVE_DOT is set to YES.\n\nTEMPLATE_RELATIONS     = NO\n\n# If the INCLUDE_GRAPH, ENABLE_PREPROCESSING and SEARCH_INCLUDES tags are set to\n# YES then doxygen will generate a graph for each documented file showing the\n# direct and indirect include dependencies of the file with other documented\n# files.\n# The default value is: YES.\n# This tag requires that the tag HAVE_DOT is set to YES.\n\nINCLUDE_GRAPH          = YES\n\n# If the INCLUDED_BY_GRAPH, ENABLE_PREPROCESSING and SEARCH_INCLUDES tags are\n# set to YES then doxygen will generate a graph for each documented file showing\n# the direct and indirect include dependencies of the file with other documented\n# files.\n# The default value is: YES.\n# This tag requires that the tag HAVE_DOT is set to YES.\n\nINCLUDED_BY_GRAPH      = YES\n\n# If the CALL_GRAPH tag is set to YES then doxygen will generate a call\n# dependency graph for every global function or class method.\n#\n# Note that enabling this option will significantly increase the time of a run.\n# So in most cases it will be better to enable call graphs for selected\n# functions only using the \\callgraph command. Disabling a call graph can be\n# accomplished by means of the command \\hidecallgraph.\n# The default value is: NO.\n# This tag requires that the tag HAVE_DOT is set to YES.\n\nCALL_GRAPH             = NO\n\n# If the CALLER_GRAPH tag is set to YES then doxygen will generate a caller\n# dependency graph for every global function or class method.\n#\n# Note that enabling this option will significantly increase the time of a run.\n# So in most cases it will be better to enable caller graphs for selected\n# functions only using the \\callergraph command. Disabling a caller graph can be\n# accomplished by means of the command \\hidecallergraph.\n# The default value is: NO.\n# This tag requires that the tag HAVE_DOT is set to YES.\n\nCALLER_GRAPH           = NO\n\n# If the GRAPHICAL_HIERARCHY tag is set to YES then doxygen will graphical\n# hierarchy of all classes instead of a textual one.\n# The default value is: YES.\n# This tag requires that the tag HAVE_DOT is set to YES.\n\nGRAPHICAL_HIERARCHY    = YES\n\n# If the DIRECTORY_GRAPH tag is set to YES then doxygen will show the\n# dependencies a directory has on other directories in a graphical way. The\n# dependency relations are determined by the #include relations between the\n# files in the directories.\n# The default value is: YES.\n# This tag requires that the tag HAVE_DOT is set to YES.\n\nDIRECTORY_GRAPH        = YES\n\n# The DOT_IMAGE_FORMAT tag can be used to set the image format of the images\n# generated by dot. For an explanation of the image formats see the section\n# output formats in the documentation of the dot tool (Graphviz (see:\n# http://www.graphviz.org/)).\n# Note: If you choose svg you need to set HTML_FILE_EXTENSION to xhtml in order\n# to make the SVG files visible in IE 9+ (other browsers do not have this\n# requirement).\n# Possible values are: png, png:cairo, png:cairo:cairo, png:cairo:gd, png:gd,\n# png:gd:gd, jpg, jpg:cairo, jpg:cairo:gd, jpg:gd, jpg:gd:gd, gif, gif:cairo,\n# gif:cairo:gd, gif:gd, gif:gd:gd, svg, png:gd, png:gd:gd, png:cairo,\n# png:cairo:gd, png:cairo:cairo, png:cairo:gdiplus, png:gdiplus and\n# png:gdiplus:gdiplus.\n# The default value is: png.\n# This tag requires that the tag HAVE_DOT is set to YES.\n\nDOT_IMAGE_FORMAT       = png\n\n# If DOT_IMAGE_FORMAT is set to svg, then this option can be set to YES to\n# enable generation of interactive SVG images that allow zooming and panning.\n#\n# Note that this requires a modern browser other than Internet Explorer. Tested\n# and working are Firefox, Chrome, Safari, and Opera.\n# Note: For IE 9+ you need to set HTML_FILE_EXTENSION to xhtml in order to make\n# the SVG files visible. Older versions of IE do not have SVG support.\n# The default value is: NO.\n# This tag requires that the tag HAVE_DOT is set to YES.\n\nINTERACTIVE_SVG        = NO\n\n# The DOT_PATH tag can be used to specify the path where the dot tool can be\n# found. If left blank, it is assumed the dot tool can be found in the path.\n# This tag requires that the tag HAVE_DOT is set to YES.\n\nDOT_PATH               =\n\n# The DOTFILE_DIRS tag can be used to specify one or more directories that\n# contain dot files that are included in the documentation (see the \\dotfile\n# command).\n# This tag requires that the tag HAVE_DOT is set to YES.\n\nDOTFILE_DIRS           =\n\n# The MSCFILE_DIRS tag can be used to specify one or more directories that\n# contain msc files that are included in the documentation (see the \\mscfile\n# command).\n\nMSCFILE_DIRS           =\n\n# The DIAFILE_DIRS tag can be used to specify one or more directories that\n# contain dia files that are included in the documentation (see the \\diafile\n# command).\n\nDIAFILE_DIRS           =\n\n# When using plantuml, the PLANTUML_JAR_PATH tag should be used to specify the\n# path where java can find the plantuml.jar file. If left blank, it is assumed\n# PlantUML is not used or called during a preprocessing step. Doxygen will\n# generate a warning when it encounters a \\startuml command in this case and\n# will not generate output for the diagram.\n\nPLANTUML_JAR_PATH      =\n\n# When using plantuml, the PLANTUML_CFG_FILE tag can be used to specify a\n# configuration file for plantuml.\n\nPLANTUML_CFG_FILE      =\n\n# When using plantuml, the specified paths are searched for files specified by\n# the !include statement in a plantuml block.\n\nPLANTUML_INCLUDE_PATH  =\n\n# The DOT_GRAPH_MAX_NODES tag can be used to set the maximum number of nodes\n# that will be shown in the graph. If the number of nodes in a graph becomes\n# larger than this value, doxygen will truncate the graph, which is visualized\n# by representing a node as a red box. Note that doxygen if the number of direct\n# children of the root node in a graph is already larger than\n# DOT_GRAPH_MAX_NODES then the graph will not be shown at all. Also note that\n# the size of a graph can be further restricted by MAX_DOT_GRAPH_DEPTH.\n# Minimum value: 0, maximum value: 10000, default value: 50.\n# This tag requires that the tag HAVE_DOT is set to YES.\n\nDOT_GRAPH_MAX_NODES    = 50\n\n# The MAX_DOT_GRAPH_DEPTH tag can be used to set the maximum depth of the graphs\n# generated by dot. A depth value of 3 means that only nodes reachable from the\n# root by following a path via at most 3 edges will be shown. Nodes that lay\n# further from the root node will be omitted. Note that setting this option to 1\n# or 2 may greatly reduce the computation time needed for large code bases. Also\n# note that the size of a graph can be further restricted by\n# DOT_GRAPH_MAX_NODES. Using a depth of 0 means no depth restriction.\n# Minimum value: 0, maximum value: 1000, default value: 0.\n# This tag requires that the tag HAVE_DOT is set to YES.\n\nMAX_DOT_GRAPH_DEPTH    = 0\n\n# Set the DOT_TRANSPARENT tag to YES to generate images with a transparent\n# background. This is disabled by default, because dot on Windows does not seem\n# to support this out of the box.\n#\n# Warning: Depending on the platform used, enabling this option may lead to\n# badly anti-aliased labels on the edges of a graph (i.e. they become hard to\n# read).\n# The default value is: NO.\n# This tag requires that the tag HAVE_DOT is set to YES.\n\nDOT_TRANSPARENT        = NO\n\n# Set the DOT_MULTI_TARGETS tag to YES to allow dot to generate multiple output\n# files in one run (i.e. multiple -o and -T options on the command line). This\n# makes dot run faster, but since only newer versions of dot (>1.8.10) support\n# this, this feature is disabled by default.\n# The default value is: NO.\n# This tag requires that the tag HAVE_DOT is set to YES.\n\nDOT_MULTI_TARGETS      = NO\n\n# If the GENERATE_LEGEND tag is set to YES doxygen will generate a legend page\n# explaining the meaning of the various boxes and arrows in the dot generated\n# graphs.\n# The default value is: YES.\n# This tag requires that the tag HAVE_DOT is set to YES.\n\nGENERATE_LEGEND        = YES\n\n# If the DOT_CLEANUP tag is set to YES, doxygen will remove the intermediate dot\n# files that are used to generate the various graphs.\n# The default value is: YES.\n# This tag requires that the tag HAVE_DOT is set to YES.\n\nDOT_CLEANUP            = YES\n"
  },
  {
    "path": "grasp_utils/robot_interface/README.md",
    "content": "# robot_interface\n\nROS2 package to use robot native interface\n\n## Install\n\nInstall dependency **ur_modern_driver**:\n\n```shell\ngit clone -b libur_modern_driver https://github.com/RoboticsYY/ur_modern_driver.git\ncd ur_modern_driver/libur_modern_driver\nmkdir build && cd build\ncmake .. && sudo make install\n```\n\nInstall dependency **ros2_ur_description**:\n\n```shell\nmkdir -p ~/ros2_ws/src && cd ~/ros2_ws/src\ngit clone https://github.com/RoboticsYY/ros2_ur_description.git\ncd .. && colcon build\n```\n\nInstall dependence **eigen3-cmake-module**:\n```shell\nsudo apt install ros-dashing-eigen3-cmake-module\n```\n\nInstall **robot_interface**:\n\nThe installation should refer to the installation of **ros2_grasp_library**.\n\n## Launch\n\nLaunch the UR robot control test executable:\n\n```shell\nros2 launch robot_interface ur_test.launch.py move:=true\n```\n\nLaunch the Rivz2 display:\n\n```shell\nros2 launch ur_description view_ur5_ros2.launch.py\n```\n\n## Generate Document\n\n```shell\ncd <path to root of ros2_grasp_library>/grasp_utils/robot_interface\n\ndoxygen Doxyfile\n```\n"
  },
  {
    "path": "grasp_utils/robot_interface/include/robot_interface/control_base.hpp",
    "content": "// Copyright (c) 2019 Intel Corporation. All Rights Reserved\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n//     http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\n\n/**\n * @file control_base.hpp\n * @author Yu Yan\n * @date 29 Sep 2019\n * @brief Native robot control interface for visual manipulation.\n *\n * This file contains the control interface template to make the visual grasping. The interface is used \n * for the control between a PC and an industrial robot controller. Collision detection is not considered in this\n * interface. The specific behaviors are supposed to be filled with the communication protocal of an \n * industrial robot, which are usually specified by the robot manufacturors. \n */\n\n#pragma once\n\n#include <mutex>\n#include <thread>\n#include <Eigen/Geometry>\n#include <rclcpp/rclcpp.hpp>\n#include <tf2_eigen/tf2_eigen.h>\n#include <sensor_msgs/msg/joint_state.hpp>\n#include <geometry_msgs/msg/pose_stamped.hpp>\n#include <tf2_ros/static_transform_broadcaster.h>\n\n/**\n * @brief Data type to represent robot arm's end-effector pose in 3D cartesian space.\n * \n * @note <b>TCP</b> stands for Tool Center Point. Usually it refers to The location on the end effector \n * or tool of a robot manipulator whose position and orientation define the coordinates of the controlled object.\n */\nstruct TcpPose\n{\n  double x; /**< Translation along X axis. */\n  double y; /**< Translation along Y axis. */\n  double z; /**< Translation along Z axis. */\n  double alpha; /**< Euler angle of the rotation along X axis. */\n  double beta;  /**< Euler angle of the rotation along Y axis. */\n  double gamma; /**< Euler angle of the rotation along Z axis. */\n};\n\n/**\n * @brief Robot arm control interface.\n */\nclass ArmControlBase: public rclcpp::Node\n{\npublic:\n\n  /**\n   * @brief Constructor of class #ArmControlBase.\n   * @param node_name The name of the ROS2 node.\n   * @param options ROS2 node options.\n   */\n  ArmControlBase(const std::string node_name, const rclcpp::NodeOptions & options)\n  : Node(node_name, options), broadcaster_(this)\n  {\n    joint_pub_ = this->create_publisher<sensor_msgs::msg::JointState>(\"/joint_states\", 1);\n    time_out_ = 15.0;\n\n    tf_msg_.header.frame_id = \"base\"; // Used to void TF_NO_FRAME_ID error, updated by user later\n    tf_msg_.child_frame_id = \"pose_goal\";\n    // Initialize rotation to avoid TF_DENORMALIZED_QUATERNION error\n    tf_msg_.transform.rotation.x = 0.0;\n    tf_msg_.transform.rotation.y = 0.0;\n    tf_msg_.transform.rotation.z = 0.0;\n    tf_msg_.transform.rotation.w = 1.0;\n    tf_thread_ = std::thread(&ArmControlBase::publishTFGoal, this);\n  }\n\n  /**\n   * @brief Default destructor of class #ArmControlBase.\n   */\n  virtual ~ArmControlBase()\n  {\n    rclcpp::shutdown();\n    tf_thread_.join();\n  }\n\n  /**\n   * @brief Move the robot end-effector to a goal pose (position and orientation) w.r.t the robot base in 3D Cartesian space.\n   * @param x Goal position on X dimension.\n   * @param y Goal position on Y dimension.\n   * @param z Goal position on Z dimension.\n   * @param alpha Goal rotation euler angle along X axis.\n   * @param beta Goal rotation euler angle along Y axis.\n   * @param gamma Goal rotation euler angle along Z axis.\n   * @param vel Max joint velocity. \n   * @param acc Max joint acceleration.\n   * @return If the robot successfully receives the \"move\" command, return True. Otherwise, return false.\n   */\n  virtual bool moveToTcpPose(double x, double y, double z, \n                             double alpha, double beta, double gamma, \n                             double vel, double acc) = 0;\n\n  /**\n   * @brief Move the robot end-effector to a goal pose (position and orientation) w.r.t the robot base in 3D Cartesian space.\n   * @param pose Goal pose as a Eigen transform (Isometry3d).\n   * @param vel Max joint velocity. \n   * @param acc Max joint acceleration.\n   * @return If the robot successfully receives the \"move\" command, return True. Otherwise, return false.\n   */\n  virtual bool moveToTcpPose(const Eigen::Isometry3d& pose, double vel, double acc);\n\n  /**\n   * @brief Move the robot end-effector to a goal pose (position and orientation) w.r.t the robot base in 3D Cartesian space.\n   * @param pose_stamped Goal pose as geometry_msgs/PoseStamped.\n   * @param vel Max joint velocity. \n   * @param acc Max joint acceleration.\n   * @return If the robot successfully receives the \"move\" command, return True. Otherwise, return false.\n   */\n  virtual bool moveToTcpPose(const geometry_msgs::msg::PoseStamped& pose_stamped, double vel, double acc);\n\n  /**\n   * @brief Move the robot to a joint value goal.\n   * @param joint_values Goal joint values, the number of joints depends on the robot arm model.\n   * @param vel Max joint velocity. \n   * @param acc Max joint acceleration.\n   * @return If the robot successfully receives the \"move\" command, return True. Otherwise, return false.\n   */\n  virtual bool moveToJointValues(const std::vector<double>& joint_values, double vel, double acc) = 0;\n\n  /**\n   * @brief Open the robot gripper and make it ready for grasping.\n   * @param distance How large the fingers of the gripper open.\n   * @return If the robot successfully receives the \"open\" command, return true. Otherwise, return false.\n   */\n  virtual bool open(const double distance = 0) = 0;\n\n  /**\n   * @brief Close the robot gripper and let it grasp an object.\n   * @param distance How large the fingers of the gripper close.\n   * @return If the robot successfully receives the \"close\" command, return true. Otherwise, return false.\n   */\n  virtual bool close(const double distance = 0) = 0;\n\n  /**\n   * @brief Make the robot arm to pick an object from a grasp pose w.r.t the robot base.\n   * \n   * This function defines a sequence of motions: \n   * -# Move the end-effector to a pose above the object.\n   * -# Open gripper.\n   * -# Stretch the end-effector along its Z axis to the grasp pose that gripper can grasp the object.\n   * -# Close gripper.\n   * -# Move the end-effector back to the pose above the object.\n   * \n   * @param x Position of grasp pose on X dimension.\n   * @param y Position of grasp pose on Y dimension.\n   * @param z Position of grasp pose on Z dimension.\n   * @param alpha Rotation euler angle of grasp pose along X axis.\n   * @param beta Rotation euler angle of grasp pose along Y axis.\n   * @param gamma Rotation euler angle of grasp pose along Z axis.\n   * @param vel Max joint velocity. \n   * @param acc Max joint acceleration.\n   * @param vel_scale Scale factor to slow down the end-effector velocity, when it stretch to or move back from the grasp pose.\n   * @param approach The stretch distance.\n   * @return If the robot successfully finishes the \"pick\" motions, return True. Otherwise, return false.\n   * @note The grasp pose should have Z axis point out from the end-effector link.\n   */\n  virtual bool pick(double x, double y, double z, \n                    double alpha, double beta, double gamma, \n                    double vel, double acc, double vel_scale, double approach);\n  \n    /**\n   * @brief Make the robot arm to pick an object from a grasp pose w.r.t the robot base.\n   * \n   * This function defines a sequence of motions: \n   * -# Move the end-effector to a pose above the object.\n   * -# Open gripper.\n   * -# Stretch the end-effector along its Z axis to the grasp pose that gripper can grasp the object.\n   * -# Close gripper.\n   * -# Move the end-effector back to the pose above the object.\n   * \n   * @param pose_stamped Pose received from the grasp planning algorithm. See also https://github.com/intel/ros2_grasp_library.\n   * @param vel Max joint velocity. \n   * @param acc Max joint acceleration.\n   * @param vel_scale Scale factor to slow down the end-effector velocity, when it stretch to or move back from the grasp pose.\n   * @param approach The stretch distance.\n   * @return If the robot successfully finishes the \"pick\" motions, return True. Otherwise, return false.\n   * @note The grasp pose should have Z axis point out from the end-effector link.\n   */\n  virtual bool pick(const geometry_msgs::msg::PoseStamped& pose_stamped, \n                    double vel, double acc, double vel_scale, double approach);\n\n  /**\n   * @brief Make the robot arm to place an object from a place pose w.r.t the robot base.\n   * \n   * This function defines a sequence of motions: \n   * -# Move the end-effector to a pre-place pose.\n   * -# Stretch the end-effector along its Z axis to the place pose.\n   * -# Open gripper.\n   * -# Move the end-effector back to the pre-place pose.\n   * \n   * @param x Position of place pose on X dimension.\n   * @param y Position of place pose on Y dimension.\n   * @param z Position of place pose on Z dimension.\n   * @param alpha Rotation euler angle of place pose along X axis.\n   * @param beta Rotation euler angle of place pose along Y axis.\n   * @param gamma Rotation euler angle of place pose along Z axis.\n   * @param vel Max joint velocity. \n   * @param acc Max joint acceleration.\n   * @param vel_scale Scale factor to slow down the end-effector velocity, when it stretch to or move back from the place pose.\n   * @param retract The retract distance from the place pose.\n   * @return If the robot successfully finishes the \"place\" motions, return True. Otherwise, return false.\n   * @note The place pose should have Z axis point out from the end-effector link.\n   */\n  virtual bool place(double x, double y, double z, \n                     double alpha, double beta, double gamma,\n                     double vel, double acc, double vel_scale, double retract);\n\n  /**\n   * @brief Make the robot arm to place an object from a place pose w.r.t the robot base.\n   * \n   * This function defines a sequence of motions: \n   * -# Move the end-effector to a pre-place pose.\n   * -# Stretch the end-effector along its Z axis to the place pose.\n   * -# Open gripper.\n   * -# Move the end-effector back to the pre-place pose.\n   * \n   * @param pose_stamped Pose of the end-effector to place an object.\n   * @param vel Max joint velocity. \n   * @param acc Max joint acceleration.\n   * @param vel_scale Scale factor to slow down the end-effector velocity, when it stretch to or retract back from the place pose.\n   * @param retract The retract distance from the place pose.\n   * @return If the robot successfully finishes the \"place\" motions, return True. Otherwise, return false.\n   * @note The place pose should have Z axis point out from the end-effector link.\n   */\n  virtual bool place(const geometry_msgs::msg::PoseStamped& pose_stamped,\n                     double vel, double acc, double vel_scale, double retract);\n\n  /**\n   * @brief Convert <b>geometry_msgs::msg::PoseStamped</b> to #TcpPose.\n   * \n   * @param pose_stamped Pose of the end-effector.\n   * @param tcp_pose Variable to store the converted result.\n   */\n  void toTcpPose(const geometry_msgs::msg::PoseStamped& pose_stamped, TcpPose& tcp_pose);\n\n  /**\n   * @brief Convert <b>Eigen::Isometry3d</b> to #TcpPose.\n   * \n   * @param pose Pose of the end-effector.\n   * @param tcp_pose Variable to store the converted result.\n   */\n  void toTcpPose(const Eigen::Isometry3d& pose, TcpPose& tcp_pose);\n\n  /**\n   * @brief Function to check if the end-effector arrived the goal pose.\n   * \n   * @param tcp_goal Goal pose of the end-effector.\n   * @return If the end-effector arrived the goal pose within a <b>time_out_</b> duration, return true. Otherwise, return false.\n   */\n  virtual bool checkTcpGoalArrived(Eigen::Isometry3d& tcp_goal);\n\n  /**\n   * @brief Function to check if the robot arm arrived the joint value goal.\n   * \n   * @param joint_values Joint value goal of the robot arm.\n   * @return If the robot arrived the joint value goal within a <b>time_out_</b> duration, return true. Otherwise, return false.\n   */\n  virtual bool checkJointValueGoalArrived(const std::vector<double>& joint_goal);\n\n  /** \n   * @brief Parse arguments\n   * \n   * This function is used to parse the communication or control configuration parameters. A common method is \n   * defining the configuration parameters in a .yaml file, then loading them as ROS2 node parameters and parsing them\n   * by ROS2 node parameter client.\n   */\n  virtual void parseArgs() = 0;\n\n  /**\n   * @brief Start control loop.\n   * \n   * This function is used to initialize the communication process and start the thread that reads and publishes the robot state.\n   */\n  virtual bool startLoop() = 0;\n\n  /**\n   * @brief Publish <b>tf_msg_</b>.\n   * \n   */\n  virtual void publishTFGoal();\n\n  /**\n   * @brief Update <b>tf_msg_</b>.\n   * @param pose_stamped Pose goal input to the move or pick/place commands.\n   */\n  void updateTFGoal(const geometry_msgs::msg::PoseStamped& pose_stamped);\n\n  /**\n   * @brief This function is used to rotate a unit vector along z axis, i.e. (0, 0, 1) by the assigned rpy euler angles.\n   * @param alpha Rotation euler angle around X axis.\n   * @param beta Rotation euler angle around Y axis.\n   * @param gamma Rotation euler angle around Z axis.\n   */\n  Eigen::Vector3d getUnitApproachVector(const double& alpha, const double& beta, const double& gamma);\n\nprotected:\n  /// Joint state publisher\n  rclcpp::Publisher<sensor_msgs::msg::JointState>::SharedPtr joint_pub_;\n  /// Joint names\n  std::vector<std::string> joint_names_;\n  /// Current end-effctor pose\n  TcpPose tcp_pose_;\n  /// Current joint value\n  std::vector<double> joint_values_;\n  /// Mutex to guard the tcp_pose_ usage\n  std::mutex m_;\n  /// Time duration to finish a pick or place task\n  double time_out_;\n  /// Thread to publish tf pose\n  std::thread tf_thread_;\n  /// TF message converted from the pose stamped input to the move or pick/place commands.\n  geometry_msgs::msg::TransformStamped tf_msg_;\n  /// TF broadcaster\n  tf2_ros::StaticTransformBroadcaster broadcaster_;\n};"
  },
  {
    "path": "grasp_utils/robot_interface/include/robot_interface/control_ur.hpp",
    "content": "// Copyright (c) 2019 Intel Corporation. All Rights Reserved\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n//     http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\n\n/**\n * @file ur_control.hpp\n */\n\n#pragma once\n\n#include <rclcpp/rclcpp.hpp>\n#include <robot_interface/control_base.hpp>\n#include <tf2/LinearMath/Quaternion.h>\n\n#include \"ur_modern_driver/log.h\"\n#include \"ur_modern_driver/pipeline.h\"\n#include \"ur_modern_driver/ur/commander.h\"\n#include \"ur_modern_driver/ur/factory.h\"\n#include \"ur_modern_driver/ur/messages.h\"\n#include \"ur_modern_driver/ur/parser.h\"\n#include \"ur_modern_driver/ur/producer.h\"\n#include \"ur_modern_driver/ur/rt_state.h\"\n#include \"ur_modern_driver/ur/state.h\"\n\nstatic const std::vector<std::string> JOINTS = { \"shoulder_pan_joint\", \"shoulder_lift_joint\", \"elbow_joint\",\n                                                 \"wrist_1_joint\",      \"wrist_2_joint\",       \"wrist_3_joint\" };\nstatic const std::string HOST = \"192.168.0.5\";\nstatic const bool SHUTDOWN_ON_DISCONNECT = true;\nstatic const int UR_SECONDARY_PORT = 30002;\nstatic const int UR_RT_PORT = 30003;\n\nstruct ProgArgs\n{\npublic:\n  std::string host;\n  std::vector<std::string> joint_names;\n  bool shutdown_on_disconnect;\n};\n\nclass IgnorePipelineStoppedNotifier : public INotifier\n{\npublic:\n  void started(std::string name)\n  {\n    LOG_INFO(\"Starting pipeline %s\", name.c_str());\n  }\n  void stopped(std::string name)\n  {\n    LOG_INFO(\"Stopping pipeline %s\", name.c_str());\n  }\n};\n\nclass ShutdownOnPipelineStoppedNotifier : public INotifier\n{\npublic:\n  void started(std::string name)\n  {\n    LOG_INFO(\"Starting pipeline %s\", name.c_str());\n  }\n  void stopped(std::string name)\n  {\n    LOG_INFO(\"Shutting down on stopped pipeline %s\", name.c_str());\n    rclcpp::shutdown();\n    exit(1);\n  }\n};\n\nclass URControl: public ArmControlBase, public URRTPacketConsumer\n{\npublic:\n  URControl(const std::string node_name, const rclcpp::NodeOptions & options = rclcpp::NodeOptions())\n  : ArmControlBase(node_name, options), gripper_powered_up_(false)\n  {\n    for (auto const& joint : JOINTS)\n    {\n      joint_names_.push_back(joint);\n    }\n  }\n\n  ~URControl()\n  {\n    rt_pl_->stop();\n    state_pl_->stop();\n    factory_.reset(nullptr);\n    notifier_ = nullptr;\n    LOG_INFO(\"UR control interface shut down.\");\n  }\n\n  // Overload ArmControlBase functions\n  virtual bool moveToTcpPose(double x, double y, double z, \n                             double alpha, double beta, double gamma, \n                             double vel, double acc);\n  \n  virtual bool moveToJointValues(const std::vector<double>& joint_values, double vel, double acc);\n\n  virtual bool open(const double distance = 0);\n\n  virtual bool close(const double distance = 0);\n\n  // Send URScript to ur robot controller\n  bool urscriptInterface(const std::string command_script);\n\n  // Start socket communication loop\n  bool startLoop();\n\n  // Overload URRTPacketConsumer functions\n  virtual bool consume(RTState_V1_6__7& state);\n  virtual bool consume(RTState_V1_8& state);\n  virtual bool consume(RTState_V3_0__1& state);\n  virtual bool consume(RTState_V3_2__3& state);\n\n  virtual void setupConsumer()\n  {\n  }\n  virtual void teardownConsumer()\n  {\n  }\n  virtual void stopConsumer()\n  {\n  }\n\n  // Functions to publish joint states\n  bool publishJoints(RTShared& packet, rclcpp::Time t);\n  bool publish(RTShared& packet);\n\n  // Function to get tool pose\n  bool getTcpPose(RTShared& packet);\n\n  // Function to get current joint values\n  bool getJointValues(RTShared& packet);\n\n  // Parse parameters\n  void parseArgs();\n\nprivate:\n\n  ProgArgs args_;\n  std::string local_ip_;\n  std::unique_ptr<URFactory> factory_;\n\n  // Robot rt message\n  std::unique_ptr<URParser<RTPacket>> rt_parser_;\n  std::unique_ptr<URStream> rt_stream_;\n  std::unique_ptr<URProducer<RTPacket>> rt_prod_;\n  std::unique_ptr<URCommander> rt_commander_;\n  vector<IConsumer<RTPacket> *> rt_vec_;\n  std::unique_ptr<MultiConsumer<RTPacket>> rt_cons_;\n  std::unique_ptr<Pipeline<RTPacket>> rt_pl_;\n\n  INotifier *notifier_;\n\n  // Robot state message\n  std::unique_ptr<URParser<StatePacket>> state_parser_;\n  std::unique_ptr<URStream> state_stream_;\n  std::unique_ptr<URProducer<StatePacket>> state_prod_;\n  vector<IConsumer<StatePacket> *> state_vec_;\n  std::unique_ptr<MultiConsumer<StatePacket>> state_cons_;\n  std::unique_ptr<Pipeline<StatePacket>> state_pl_;\n\n  bool gripper_powered_up_;\n};"
  },
  {
    "path": "grasp_utils/robot_interface/launch/ur_test.launch.py",
    "content": "# Copyright (c) 2019 Intel Corporation. All Rights Reserved\n# \n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n# \n#     http://www.apache.org/licenses/LICENSE-2.0\n# \n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport os\n\nimport launch\nimport launch.actions\nimport launch.substitutions\nimport launch_ros.actions\nfrom ament_index_python.packages import get_package_share_directory\n\ndef generate_launch_description():\n\n    # .yaml file for configuring the parameters\n    yaml = os.path.join(\n        get_package_share_directory('robot_interface'), \n            'launch', 'ur_test.yaml'\n    )\n\n    return launch.LaunchDescription([\n\n        launch.actions.DeclareLaunchArgument(\n            \"move\",\n            default_value=[\"true\"],\n            description=\"If using the move command test\"\n        ),\n\n        launch_ros.actions.Node(\n            package='robot_interface', \n            node_executable='ur_test_state_publish', \n            output='screen', arguments=['__params:='+yaml],\n            condition=launch.conditions.UnlessCondition(launch.substitutions.LaunchConfiguration(\"move\"))),\n\n        launch_ros.actions.Node(\n            package='robot_interface', \n            node_executable='ur_test_move_command', \n            output='screen', arguments=['__params:='+yaml],\n            condition=launch.conditions.IfCondition(launch.substitutions.LaunchConfiguration(\"move\"))),        \n    ])"
  },
  {
    "path": "grasp_utils/robot_interface/launch/ur_test.yaml",
    "content": "ur_test:\n    ros__parameters:\n        host: \"192.168.0.5\"\n        shutdown_on_disconnect: true\n        joint_names: [\"shoulder_pan_joint\", \"shoulder_lift_joint\", \"elbow_joint\", \"wrist_1_joint\", \"wrist_2_joint\", \"wrist_3_joint\"]"
  },
  {
    "path": "grasp_utils/robot_interface/package.xml",
    "content": "<?xml version=\"1.0\"?>\n<?xml-model href=\"http://download.ros.org/schema/package_format3.xsd\" schematypens=\"http://www.w3.org/2001/XMLSchema\"?>\n<package format=\"3\">\n  <name>robot_interface</name>\n  <version>0.5.0</version>\n  <description>Native robot motion control interface</description>\n  <maintainer email=\"yu.yan@intel.com\">Yu Yan</maintainer>\n  <license>Apache License 2.0</license>\n\n  <buildtool_depend>ament_cmake</buildtool_depend>\n  <buildtool_depend>eigen3_cmake_module</buildtool_depend>\n\n  <buildtool_export_depend>eigen3_cmake_module</buildtool_export_depend>\n\n  <build_depend>rclcpp</build_depend>\n  <build_depend>tf2</build_depend>\n  <build_depend>tf2_eigen</build_depend>\n  <build_depend>eigen</build_depend>\n\n  <exec_depend>rclcpp</exec_depend>\n\n  <test_depend>ament_lint_auto</test_depend>\n  <test_depend>ament_lint_common</test_depend>\n\n  <build_export_depend>eigen</build_export_depend>\n\n  <export>\n    <build_type>ament_cmake</build_type>\n  </export>\n</package>\n"
  },
  {
    "path": "grasp_utils/robot_interface/src/control_base.cpp",
    "content": "// Copyright (c) 2019 Intel Corporation. All Rights Reserved\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n//     http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\n\n/**\n * @file control_base.cpp \n */\n\n#include <robot_interface/control_base.hpp>\n#include <chrono>\n#include <thread>\n\nvoid ArmControlBase::publishTFGoal()\n{\n  while (rclcpp::ok())\n  {\n    broadcaster_.sendTransform(tf_msg_);\n    std::this_thread::sleep_for(std::chrono::milliseconds(100));\n  }\n}\n\nvoid ArmControlBase::updateTFGoal(const geometry_msgs::msg::PoseStamped& pose_stamped)\n{\n  std::unique_lock<std::mutex> lock(m_);\n  tf_msg_.transform.translation.x = pose_stamped.pose.position.x;\n  tf_msg_.transform.translation.y = pose_stamped.pose.position.y;\n  tf_msg_.transform.translation.z = pose_stamped.pose.position.z;\n  tf_msg_.transform.rotation.x = pose_stamped.pose.orientation.x;\n  tf_msg_.transform.rotation.y = pose_stamped.pose.orientation.y;\n  tf_msg_.transform.rotation.z = pose_stamped.pose.orientation.z;\n  tf_msg_.transform.rotation.w = pose_stamped.pose.orientation.w;\n  tf_msg_.header.stamp = this->now();\n  tf_msg_.header.frame_id = pose_stamped.header.frame_id;\n}\n\nbool ArmControlBase::moveToTcpPose(const Eigen::Isometry3d& pose, double vel, double acc)\n{\n  TcpPose tcp_pose;\n  toTcpPose(pose, tcp_pose);\n  return this->moveToTcpPose(tcp_pose.x, tcp_pose.y, tcp_pose.z, \n                             tcp_pose.alpha, tcp_pose.beta, tcp_pose.gamma, vel, acc);\n}\n\nbool ArmControlBase::moveToTcpPose(const geometry_msgs::msg::PoseStamped& pose_stamped, double vel, double acc)\n{\n  updateTFGoal(pose_stamped);\n\n  TcpPose tcp_pose;\n  toTcpPose(pose_stamped, tcp_pose);\n  return this->moveToTcpPose(tcp_pose.x, tcp_pose.y, tcp_pose.z, \n                             tcp_pose.alpha, tcp_pose.beta, tcp_pose.gamma, vel, acc);\n}\n\nvoid ArmControlBase::toTcpPose(const geometry_msgs::msg::PoseStamped& pose_stamped, TcpPose& tcp_pose)\n{\n  tcp_pose.x = pose_stamped.pose.position.x;\n  tcp_pose.y = pose_stamped.pose.position.y;\n  tcp_pose.z = pose_stamped.pose.position.z;\n\n  tf2::Matrix3x3 r(tf2::Quaternion(pose_stamped.pose.orientation.x, \n                    pose_stamped.pose.orientation.y, \n                    pose_stamped.pose.orientation.z, \n                    pose_stamped.pose.orientation.w));\n  r.getRPY(tcp_pose.alpha, tcp_pose.beta, tcp_pose.gamma);\n}\n\nvoid ArmControlBase::toTcpPose(const Eigen::Isometry3d& pose, TcpPose& tcp_pose)\n{\n  tcp_pose.x = pose.translation().x();\n  tcp_pose.y = pose.translation().y();\n  tcp_pose.z = pose.translation().z();\n\n  Eigen::Vector3d euler_angles = pose.rotation().matrix().eulerAngles(0, 1, 2);\n  tcp_pose.alpha = euler_angles[0];\n  tcp_pose.beta = euler_angles[1];\n  tcp_pose.gamma = euler_angles[2]; \n}\n\nEigen::Vector3d ArmControlBase::getUnitApproachVector(const double& alpha, const double& beta, const double& gamma)\n{\n  tf2::Quaternion q;\n  q.setRPY(alpha, beta, gamma);\n  tf2::Matrix3x3 r(q);\n\n  tf2::Vector3 approach_vector = r * tf2::Vector3(0, 0, 1);\n  approach_vector = approach_vector.normalize();\n  return Eigen::Vector3d(approach_vector[0], approach_vector[1], approach_vector[2]);\n}\n\nbool ArmControlBase::pick(double x, double y, double z, \n                          double alpha, double beta, double gamma, \n                          double vel, double acc, double vel_scale, double approach)\n{\n  Eigen::Vector3d pre_grasp_origin = Eigen::Vector3d(x, y, z) - getUnitApproachVector(alpha, beta, gamma) * approach;\n\n  Eigen::Isometry3d grasp, orientation, pre_grasp;\n  orientation = Eigen::AngleAxisd(alpha, Eigen::Vector3d::UnitX())\n                * Eigen::AngleAxisd(beta, Eigen::Vector3d::UnitY())\n                * Eigen::AngleAxisd(gamma, Eigen::Vector3d::UnitZ());\n  grasp = Eigen::Translation3d(x, y, z) * orientation;\n  pre_grasp = Eigen::Translation3d(pre_grasp_origin) * orientation;\n\n  if (// Move to pre_grasp\n      moveToTcpPose(pre_grasp, vel, acc) &&\n      // Open gripper\n      open() &&\n      // Move to grasp\n      moveToTcpPose(grasp, vel*vel_scale, acc*vel_scale) &&\n      // Close gripper\n      close() &&\n      // Move to pre_grasp\n      moveToTcpPose(pre_grasp, vel*vel_scale, acc*vel_scale))\n  {\n    std::cout << \"Pick finished.\" << std::endl;\n    return true;\n  }\n  else\n  {\n    std::cerr << \"Pick failed.\" << std::endl;\n    return false;\n  }\n}\n\nbool ArmControlBase::pick(const geometry_msgs::msg::PoseStamped& pose_stamped, \n          double vel, double acc, double vel_scale, double approach)\n{\n  updateTFGoal(pose_stamped);\n\n  TcpPose tcp_pose;\n  toTcpPose(pose_stamped, tcp_pose);\n  return pick(tcp_pose.x, tcp_pose.y, tcp_pose.z, \n              tcp_pose.alpha, tcp_pose.beta, tcp_pose.gamma, vel, acc, vel_scale, approach);\n}\n\nbool ArmControlBase::place(double x, double y, double z, \n                           double alpha, double beta, double gamma,\n                           double vel, double acc, double vel_scale, double retract)\n{\n  Eigen::Vector3d pre_place_origin = Eigen::Vector3d(x, y, z) - getUnitApproachVector(alpha, beta, gamma) * retract;\n\n  Eigen::Isometry3d place, orientation, pre_place;\n  orientation = Eigen::AngleAxisd(alpha, Eigen::Vector3d::UnitX())\n                * Eigen::AngleAxisd(beta, Eigen::Vector3d::UnitY())\n                * Eigen::AngleAxisd(gamma, Eigen::Vector3d::UnitZ()); \n  place = Eigen::Translation3d(x, y, z) * orientation;\n  pre_place = Eigen::Translation3d(pre_place_origin) * orientation;\n\n  \n  if (// Move to pre_place\n      moveToTcpPose(pre_place, vel, acc) &&\n      // Move to place\n      moveToTcpPose(place, vel*vel_scale, acc*vel_scale) &&\n      // Open gripper\n      open() &&\n      // Move to pre_grasp\n      moveToTcpPose(pre_place, vel*vel_scale, acc*vel_scale))\n  {\n    std::cout << \"Place finished.\" << std::endl;\n    return true;\n  }\n  else\n  {\n    std::cerr << \"Place failed.\" << std::endl;\n    return false;\n  }\n}\n\nbool ArmControlBase::place(const geometry_msgs::msg::PoseStamped& pose_stamped, \n          double vel, double acc, double vel_scale, double retract)\n{\n  updateTFGoal(pose_stamped);\n\n  TcpPose tcp_pose;\n  toTcpPose(pose_stamped, tcp_pose);\n  return place(tcp_pose.x, tcp_pose.y, tcp_pose.z, \n               tcp_pose.alpha, tcp_pose.beta, tcp_pose.gamma, vel, acc, vel_scale, retract);\n}\n\nbool ArmControlBase::checkTcpGoalArrived(Eigen::Isometry3d& tcp_goal)\n{\n  bool wait = true;\n  bool arrived = false;\n\n  auto start = std::chrono::high_resolution_clock::now();\n  while(wait)\n  {\n    std::unique_lock<std::mutex> lock(m_);\n    Eigen::Vector3d t(tcp_pose_.x, tcp_pose_.y, tcp_pose_.z);\n    if (tcp_goal.translation().isApprox(t, 0.01))\n    {\n      wait = false;\n      arrived = true;\n    }\n    else\n    {\n      auto finish = std::chrono::high_resolution_clock::now();\n      std::chrono::duration<double> elapsed = finish - start;\n      if (elapsed.count() > time_out_)\n      {\n        wait = false;\n        arrived = false;\n        std::cerr << \"Motion timeout\" << std::endl;\n        printf(\"Tcp pose: (%f %f %f %f %f %f). \\n\", tcp_pose_.x, tcp_pose_.y, tcp_pose_.z, \n                                        tcp_pose_.alpha, tcp_pose_.beta, tcp_pose_.gamma);\n      }\n    }\n  }\n  std::this_thread::sleep_for(std::chrono::milliseconds(500));\n  return arrived;\n}\n\nbool ArmControlBase::checkJointValueGoalArrived(const std::vector<double>& joint_goal)\n{\n  bool wait = true;\n  bool arrived = false;\n\n  if (joint_goal.size() != joint_values_.size())\n  {\n    std::cerr << \"Num of joints of goal dosen't match current joint state.\" << std::endl;\n    wait = false;\n  }\n\n  auto start = std::chrono::high_resolution_clock::now();\n  while(wait)\n  {\n    std::unique_lock<std::mutex> lock(m_);\n    double num_joints = joint_goal.size();\n    Eigen::Map<const Eigen::VectorXd> goal(joint_goal.data(), num_joints);\n    Eigen::Map<const Eigen::VectorXd> current(joint_values_.data(), num_joints);\n    if (current.isApprox(goal, 0.01))\n    {\n      wait = false;\n      arrived = true;\n    }\n    else\n    {\n      auto finish = std::chrono::high_resolution_clock::now();\n      std::chrono::duration<double> elapsed = finish - start;\n      if (elapsed.count() > time_out_)\n      {\n        wait = false;\n        arrived = false;\n        std::cerr << \"Motion timeout\" << std::endl;\n        std::stringstream ss;\n        ss << \"Current joint values: \";\n        for (auto value : joint_values_)\n          ss << value << \" \";\n        std::cerr << ss.str() << std::endl;\n      }\n    }\n  }\n\n  std::this_thread::sleep_for(std::chrono::milliseconds(500));\n  return arrived;\n}"
  },
  {
    "path": "grasp_utils/robot_interface/src/control_ur.cpp",
    "content": "// Copyright (c) 2019 Intel Corporation. All Rights Reserved\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n//     http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\n\n/**\n * @file ur_control.cpp \n */\n\n#include <robot_interface/control_ur.hpp>\n\nbool URControl::moveToTcpPose(double x, double y, double z, \n                              double alpha, double beta, double gamma, \n                              double vel, double acc)\n{\n  // Convert euler angles around (x, y, z) to rotation vector\n  tf2::Quaternion q;\n  q.setRPY(alpha, beta, gamma);\n  tf2Scalar angle = q.getAngle();\n  tf2::Vector3 axis = q.getAxis();\n  tf2::Vector3 rotation_vector = axis * angle;\n\n  // Get URScript command\n  std::string command_script = \"movel(p[\" +\n                               std::to_string(x) + \",\" + std::to_string(y) + \",\" + std::to_string(z) + \",\" +\n                               std::to_string(rotation_vector[0]) + \",\" + std::to_string(rotation_vector[1]) + \",\" + std::to_string(rotation_vector[2]) + \"],\" + \n                               std::to_string(vel) + \",\" + std::to_string(acc) + \")\\n\";\n\n  Eigen::Isometry3d pose_goal = Eigen::Translation3d(x, y, z)\n    * Eigen::AngleAxisd(alpha, Eigen::Vector3d::UnitX())\n    * Eigen::AngleAxisd(beta, Eigen::Vector3d::UnitY())\n    * Eigen::AngleAxisd(gamma, Eigen::Vector3d::UnitZ());\n\n  // Send command and check goal arrived           \n  return (urscriptInterface(command_script) && checkTcpGoalArrived(pose_goal));\n}\n\nbool URControl::moveToJointValues(const std::vector<double>& joint_values, double vel, double acc)\n{\n  // Get URScript command\n  std::string command_script = \"movej([\" +\n                               std::to_string(joint_values[0]) + \",\" + std::to_string(joint_values[1]) + \",\" + std::to_string(joint_values[2]) + \",\" +\n                               std::to_string(joint_values[3]) + \",\" + std::to_string(joint_values[4]) + \",\" + std::to_string(joint_values[5]) + \"],\" + \n                               std::to_string(vel) + \",\" + std::to_string(acc) + \")\\n\";\n\n  // Send command and check goal arrived           \n  return urscriptInterface(command_script) && checkJointValueGoalArrived(joint_values);;\n}\n\nbool URControl::open(const double distance)\n{\n  rt_commander_->setToolVoltage(static_cast<uint8_t>(24));\n  if (!gripper_powered_up_)\n  {\n    rt_commander_->setToolVoltage(static_cast<uint8_t>(24));\n    gripper_powered_up_ = true;\n    std::cout << \"Gripper powered up.\" << std::endl;    \n  }\n\n  rt_commander_->setDigitalOut(16, true);\n  rt_commander_->setDigitalOut(17, false);\n  std::this_thread::sleep_for(std::chrono::seconds(1));\n  return true;\n}\n\nbool URControl::close(const double distance)\n{\n  if (!gripper_powered_up_)\n  {\n    rt_commander_->setToolVoltage(static_cast<uint8_t>(24));\n    gripper_powered_up_ = true;\n    std::cout << \"Gripper powered up.\" << std::endl;\n  }\n\n  rt_commander_->setDigitalOut(16, false);\n  rt_commander_->setDigitalOut(17, true);\n  std::this_thread::sleep_for(std::chrono::seconds(1));\n  return true;\n}\n\nbool URControl::urscriptInterface(const std::string command_script)\n{\n  bool res = rt_commander_->uploadProg(command_script);\n  if (!res)\n  {\n    LOG_ERROR(\"Program upload failed!\");\n  }\n\n  return res;\n}\n\nvoid URControl::parseArgs()\n{\n  // Initialize parameter client\n  auto parameters_client = std::make_shared<rclcpp::SyncParametersClient>(this);\n  while (!parameters_client->wait_for_service(1s)) {\n    if (!rclcpp::ok()) {\n      RCLCPP_ERROR(this->get_logger(), \"Interrupted while waiting for the service. Exiting.\");\n      rclcpp::shutdown();\n    }\n    RCLCPP_INFO(this->get_logger(), \"service not available, waiting again...\");\n  }\n\n  // Get parameters\n  args_.host = parameters_client->get_parameter(\"host\", HOST);\n  args_.joint_names = parameters_client->get_parameter(\"joint_names\", JOINTS);\n  args_.shutdown_on_disconnect = parameters_client->get_parameter(\"shutdown_on_disconnect\", SHUTDOWN_ON_DISCONNECT);\n\n  // Print parameters\n  RCLCPP_INFO(this->get_logger(), args_.host);\n  std::stringstream ss;\n  for (auto & name : args_.joint_names)\n  {\n    ss << name << \" \";\n  }\n  RCLCPP_INFO(this->get_logger(), ss.str().c_str());\n  RCLCPP_INFO(this->get_logger(), std::to_string(args_.shutdown_on_disconnect));\n}\n\nbool URControl::startLoop()\n{\n  // Initialize socket communication\n  factory_.reset(new URFactory(args_.host));\n\n  notifier_ = nullptr;\n\n  if (args_.shutdown_on_disconnect)\n  {\n    LOG_INFO(\"Notifier: Pipeline disconnect will shutdown the node\");\n    notifier_ = new ShutdownOnPipelineStoppedNotifier();\n  }\n  else\n  {\n    LOG_INFO(\"Notifier: Pipeline disconnect will be ignored.\");\n    notifier_ = new IgnorePipelineStoppedNotifier();\n  }\n\n  // RT packets\n  rt_parser_ = factory_->getRTParser();\n  rt_stream_.reset(new URStream(args_.host, UR_RT_PORT));\n  rt_prod_.reset(new URProducer<RTPacket>(*rt_stream_, *rt_parser_));\n  rt_commander_ = factory_->getCommander(*rt_stream_);\n  rt_vec_.push_back(this);\n  rt_cons_.reset(new MultiConsumer<RTPacket>(rt_vec_));\n  rt_pl_.reset(new Pipeline<RTPacket>(*rt_prod_, *rt_cons_, \"RTPacket\", *notifier_));\n\n  // Message packets\n  state_parser_ = factory_->getStateParser();\n  state_stream_.reset(new URStream(args_.host, UR_SECONDARY_PORT));\n  state_prod_.reset(new URProducer<StatePacket>(*state_stream_, *state_parser_));\n  state_cons_.reset(new MultiConsumer<StatePacket>(state_vec_));\n  state_pl_.reset(new Pipeline<StatePacket>(*state_prod_, *state_cons_, \"StatePacket\", *notifier_));\n\n  LOG_INFO(\"Starting main loop\");\n\n  rt_pl_->run();\n  state_pl_->run();\n\n  return true;\n}\n\nbool URControl::getTcpPose(RTShared& packet)\n{\n  auto tv = packet.tool_vector_actual;\n  \n  tcp_pose_.x = tv.position.x;\n  tcp_pose_.y = tv.position.y;\n  tcp_pose_.z = tv.position.z;\n  tcp_pose_.alpha = tv.rotation.x;\n  tcp_pose_.beta = tv.rotation.y;\n  tcp_pose_.gamma = tv.rotation.z;\n\n  return true;\n}\n\nbool URControl::getJointValues(RTShared& packet)\n{\n  joint_values_.assign(packet.q_actual.begin(), packet.q_actual.end());\n\n  return true;\n}\n\nbool URControl::consume(RTState_V1_6__7& state)\n{\n  return publish(state) && getTcpPose(state) && getJointValues(state);\n}\nbool URControl::consume(RTState_V1_8& state)\n{\n  return publish(state) && getTcpPose(state) && getJointValues(state);\n}\nbool URControl::consume(RTState_V3_0__1& state)\n{\n  return publish(state) && getTcpPose(state) && getJointValues(state);\n}\nbool URControl::consume(RTState_V3_2__3& state)\n{\n  return publish(state) && getTcpPose(state) && getJointValues(state);\n}\n\nbool URControl::publish(RTShared& packet)\n{\n  return publishJoints(packet, rclcpp::Node::now());\n}\n\nbool URControl::publishJoints(RTShared& packet, rclcpp::Time t)\n{\n  sensor_msgs::msg::JointState joint_msg;\n  joint_msg.header.stamp = t;\n\n  joint_msg.name.assign(joint_names_.begin(), joint_names_.end());\n  joint_msg.position.assign(packet.q_actual.begin(), packet.q_actual.end());\n  joint_msg.velocity.assign(packet.qd_actual.begin(), packet.qd_actual.end());\n  joint_msg.effort.assign(packet.i_actual.begin(), packet.i_actual.end());\n\n  joint_pub_->publish(joint_msg);\n\n  return true;\n}"
  },
  {
    "path": "grasp_utils/robot_interface/test/ur_test_move_command.cpp",
    "content": "// Copyright (c) 2019 Intel Corporation. All Rights Reserved\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n//     http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\n\n/**\n * @file ur_test.cpp \n */\n\n#include <robot_interface/control_ur.hpp>\n\nint main(int argc, char * argv[])\n{\n  rclcpp::init(argc, argv);\n\n  std::shared_ptr<ArmControlBase> arm_control(new URControl(\"ur_test\",  rclcpp::NodeOptions()\n                                                                          .allow_undeclared_parameters(true)\n                                                                          .automatically_declare_parameters_from_overrides(true)));\n\n  arm_control->parseArgs();\n  arm_control->startLoop();\n\n  rclcpp::sleep_for(std::chrono::seconds(2));\n\n  while(rclcpp::ok())\n  {\n    geometry_msgs::msg::PoseStamped pose_stamped;\n    pose_stamped.header.frame_id = \"base\";\n    pose_stamped.header.stamp = arm_control->now();\n    pose_stamped.pose.position.x = -0.068673; \n    pose_stamped.pose.position.y = -0.595636; \n    pose_stamped.pose.position.z = 0.201606;\n    pose_stamped.pose.orientation.x = -0.311507;\n    pose_stamped.pose.orientation.y =  0.950216;\n    pose_stamped.pose.orientation.z = -0.004305;\n    pose_stamped.pose.orientation.w =  0.005879;\n\n    arm_control->moveToTcpPose(pose_stamped, 0.3, 0.3);\n\n    pose_stamped.header.frame_id = \"base\";\n    pose_stamped.header.stamp = arm_control->now();\n    pose_stamped.pose.position.x = -0.157402; \n    pose_stamped.pose.position.y = -0.679509; \n    pose_stamped.pose.position.z = 0.094437;\n    pose_stamped.pose.orientation.x = 0.190600;\n    pose_stamped.pose.orientation.y = 0.948295;\n    pose_stamped.pose.orientation.z = 0.239947;\n    pose_stamped.pose.orientation.w = 0.082662;\n\n    arm_control->pick(pose_stamped, 1.05, 1.4, 0.5, 0.1);\n    arm_control->place(pose_stamped, 1.05, 1.4, 0.5, 0.1);\n\n    arm_control->moveToTcpPose(-0.350, -0.296, 0.12, 3.14159, 0, 0, 0.3, 0.3);\n\n    arm_control->moveToJointValues(std::vector<double>{0.87, -1.44, 1.68, -1.81, -1.56, 0}, 1.05, 1.4);\n\n    arm_control->pick(-0.153, -0.433, 0.145, 2.8, -0.144, 0.0245, 1.05, 1.4, 0.5, 0.1);\n    arm_control->place(-0.350, -0.296, 0.145, 3.14159, 0, 0, 1.05, 1.4, 0.5, 0.1);\n  }\n  rclcpp::shutdown();\n  return 0;\n}"
  },
  {
    "path": "grasp_utils/robot_interface/test/ur_test_state_publish.cpp",
    "content": "// Copyright (c) 2019 Intel Corporation. All Rights Reserved\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n//     http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\n\n/**\n * @file ur_test.cpp \n */\n\n#include <robot_interface/control_ur.hpp>\n\nint main(int argc, char * argv[])\n{\n  rclcpp::init(argc, argv);\n\n  std::shared_ptr<ArmControlBase> arm_control(new URControl(\"ur_test\",  rclcpp::NodeOptions()\n                                                                          .allow_undeclared_parameters(true)\n                                                                          .automatically_declare_parameters_from_overrides(true)));\n\n  arm_control->parseArgs();\n  arm_control->startLoop();\n\n  rclcpp::spin(arm_control);\n  rclcpp::shutdown();\n  return 0;\n}"
  },
  {
    "path": "moveit_msgs_light/CMakeLists.txt",
    "content": "cmake_minimum_required(VERSION 3.5)\n\nproject(moveit_msgs)\n\nif(NOT CMAKE_CXX_STANDARD)\n  set(CMAKE_CXX_STANDARD 14)\nendif()\n\nif(CMAKE_COMPILER_IS_GNUCXX OR CMAKE_CXX_COMPILER_ID MATCHES \"Clang\")\n  add_compile_options(-Wall -Wextra -Wpedantic)\nendif()\n\nfind_package(ament_cmake REQUIRED)\nfind_package(rosidl_default_generators REQUIRED)\nfind_package(builtin_interfaces REQUIRED)\nfind_package(std_msgs REQUIRED)\nfind_package(geometry_msgs REQUIRED)\nfind_package(shape_msgs REQUIRED)\nfind_package(trajectory_msgs REQUIRED)\n\nset(msg_files\n  \"msg/CollisionObject.msg\"\n  \"msg/Grasp.msg\"\n  \"msg/GripperTranslation.msg\"\n  \"msg/MoveItErrorCodes.msg\"\n  \"msg/ObjectType.msg\"\n  \"msg/PlaceLocation.msg\"\n)\n\nset(srv_files\n  \"srv/GraspPlanning.srv\"\n)\n\nrosidl_generate_interfaces(${PROJECT_NAME}\n  ${msg_files}\n  ${srv_files}\n  DEPENDENCIES builtin_interfaces std_msgs geometry_msgs shape_msgs trajectory_msgs\n  ADD_LINTER_TESTS\n)\n\n# todo install ros1_bridge mapping rules\n\nament_export_dependencies(rosidl_default_runtime)\n\nament_package()\n"
  },
  {
    "path": "moveit_msgs_light/README.md",
    "content": "This's a temporary solution for ROS2 interface, till MoveIt is ported to ROS2.\n"
  },
  {
    "path": "moveit_msgs_light/msg/CollisionObject.msg",
    "content": "# a header, used for interpreting the poses\nstd_msgs/Header header\n\n# the id of the object (name used in MoveIt)\nstring id\n\n# The object type in a database of known objects\nmoveit_msgs/ObjectType type\n\n# the the collision geometries associated with the object;\n# their poses are with respect to the specified header\n\n# solid geometric primitives\nshape_msgs/SolidPrimitive[] primitives\ngeometry_msgs/Pose[] primitive_poses\n\n# meshes\nshape_msgs/Mesh[] meshes\ngeometry_msgs/Pose[] mesh_poses\n\n# bounding planes (equation is specified, but the plane can be oriented using an additional pose)\nshape_msgs/Plane[] planes\ngeometry_msgs/Pose[] plane_poses\n\n# Adds the object to the planning scene. If the object previously existed, it is replaced.\nbyte ADD=0\n\n# Removes the object from the environment entirely (everything that matches the specified id)\nbyte REMOVE=1\n\n# Append to an object that already exists in the planning scene. If the does not exist, it is added.\nbyte APPEND=2\n\n# If an object already exists in the scene, new poses can be sent (the geometry arrays must be left empty)\n# if solely moving the object is desired\nbyte MOVE=3\n\n# Operation to be performed\nbyte operation\n"
  },
  {
    "path": "moveit_msgs_light/msg/Grasp.msg",
    "content": "# This message contains a description of a grasp that would be used\n# with a particular end-effector to grasp an object, including how to\n# approach it, grip it, etc.  This message does not contain any\n# information about a \"grasp point\" (a position ON the object).\n# Whatever generates this message should have already combined\n# information about grasp points with information about the geometry\n# of the end-effector to compute the grasp_pose in this message.\n\n# A name for this grasp\nstring id\n\n# The internal posture of the hand for the pre-grasp\n# only positions are used\ntrajectory_msgs/JointTrajectory pre_grasp_posture\n\n# The internal posture of the hand for the grasp\n# positions and efforts are used\ntrajectory_msgs/JointTrajectory grasp_posture\n\n# The position of the end-effector for the grasp.  This is the pose of\n# the \"parent_link\" of the end-effector, not actually the pose of any\n# link *in* the end-effector.  Typically this would be the pose of the\n# most distal wrist link before the hand (end-effector) links began.\ngeometry_msgs/PoseStamped grasp_pose\n\n# The estimated probability of success for this grasp, or some other\n# measure of how \"good\" it is.\nfloat64 grasp_quality\n\n# The approach direction to take before picking an object\nGripperTranslation pre_grasp_approach\n\n# The retreat direction to take after a grasp has been completed (object is attached)\nGripperTranslation post_grasp_retreat\n\n# The retreat motion to perform when releasing the object; this information\n# is not necessary for the grasp itself, but when releasing the object,\n# the information will be necessary. The grasp used to perform a pickup\n# is returned as part of the result, so this information is available for \n# later use.\nGripperTranslation post_place_retreat\n\n# the maximum contact force to use while grasping (<=0 to disable)\nfloat32 max_contact_force\n\n# an optional list of obstacles that we have semantic information about\n# and that can be touched/pushed/moved in the course of grasping\nstring[] allowed_touch_objects\n"
  },
  {
    "path": "moveit_msgs_light/msg/GripperTranslation.msg",
    "content": "# defines a translation for the gripper, used in pickup or place tasks\n# for example for lifting an object off a table or approaching the table for placing\n\n# the direction of the translation\ngeometry_msgs/Vector3Stamped direction\n\n# the desired translation distance\nfloat32 desired_distance\n\n# the min distance that must be considered feasible before the\n# grasp is even attempted\nfloat32 min_distance\n"
  },
  {
    "path": "moveit_msgs_light/msg/MoveItErrorCodes.msg",
    "content": "int32 val\n\n# overall behavior\nint32 SUCCESS=1\nint32 FAILURE=99999\n\nint32 PLANNING_FAILED=-1\nint32 INVALID_MOTION_PLAN=-2\nint32 MOTION_PLAN_INVALIDATED_BY_ENVIRONMENT_CHANGE=-3\nint32 CONTROL_FAILED=-4\nint32 UNABLE_TO_AQUIRE_SENSOR_DATA=-5\nint32 TIMED_OUT=-6\nint32 PREEMPTED=-7\n\n# planning & kinematics request errors\nint32 START_STATE_IN_COLLISION=-10\nint32 START_STATE_VIOLATES_PATH_CONSTRAINTS=-11\n\nint32 GOAL_IN_COLLISION=-12\nint32 GOAL_VIOLATES_PATH_CONSTRAINTS=-13\nint32 GOAL_CONSTRAINTS_VIOLATED=-14\n\nint32 INVALID_GROUP_NAME=-15\nint32 INVALID_GOAL_CONSTRAINTS=-16\nint32 INVALID_ROBOT_STATE=-17\nint32 INVALID_LINK_NAME=-18\nint32 INVALID_OBJECT_NAME=-19\n\n# system errors\nint32 FRAME_TRANSFORM_FAILURE=-21\nint32 COLLISION_CHECKING_UNAVAILABLE=-22\nint32 ROBOT_STATE_STALE=-23\nint32 SENSOR_INFO_STALE=-24\n\n# kinematics errors\nint32 NO_IK_SOLUTION=-31\n"
  },
  {
    "path": "moveit_msgs_light/msg/ObjectType.msg",
    "content": "################################################## OBJECT ID #########################################################\n\n# Contains information about the type of a found object. Those two sets of parameters together uniquely define an\n# object\n\n# The key of the found object: the unique identifier in the given db\nstring key\n\n# The db parameters stored as a JSON/compressed YAML string. An object id does not make sense without the corresponding\n# database. E.g., in object_recognition, it can look like: \"{'type':'CouchDB', 'root':'http://localhost'}\"\n# There is no conventional format for those parameters and it's nice to keep that flexibility.\n# The object_recognition_core as a generic DB type that can read those fields\n# Current examples:\n# For CouchDB:\n#   type: 'CouchDB'\n#   root: 'http://localhost:5984'\n#   collection: 'object_recognition'\n# For SQL household database:\n#   type: 'SqlHousehold'\n#   host: 'wgs36'\n#   port: 5432\n#   user: 'willow'\n#   password: 'willow'\n#   name: 'household_objects'\n#   module: 'tabletop'\nstring db\n"
  },
  {
    "path": "moveit_msgs_light/msg/PlaceLocation.msg",
    "content": "# A name for this grasp\nstring id\n\n# The internal posture of the hand for the grasp\n# positions and efforts are used\ntrajectory_msgs/JointTrajectory post_place_posture\n\n# The position of the end-effector for the grasp relative to a reference frame \n# (that is always specified elsewhere, not in this message)\ngeometry_msgs/PoseStamped place_pose\n\n# The approach motion\nGripperTranslation pre_place_approach\n\n# The retreat motion\nGripperTranslation post_place_retreat\n\n# an optional list of obstacles that we have semantic information about\n# and that can be touched/pushed/moved in the course of grasping\nstring[] allowed_touch_objects\n"
  },
  {
    "path": "moveit_msgs_light/package.xml",
    "content": "<?xml version=\"1.0\"?>\n<?xml-model href=\"http://download.ros.org/schema/package_format2.xsd\" schematypens=\"http://www.w3.org/2001/XMLSchema\"?>\n<package format=\"3\">\n  <name>moveit_msgs</name>\n  <version>0.5.0</version>\n  <description>ROS2 messages definitions for MoveIt</description>\n  <maintainer email=\"sharron.liu@intel.com\">Sharron LIU</maintainer>\n  <license>Apache License 2.0</license>\n\n  <buildtool_depend>ament_cmake</buildtool_depend>\n  <buildtool_depend>rosidl_default_generators</buildtool_depend>\n  <build_depend>builtin_interfaces</build_depend>\n  <build_depend>std_msgs</build_depend>\n  <build_depend>geometry_msgs</build_depend>\n  <build_depend>shape_msgs</build_depend>\n  <build_depend>trajectory_msgs</build_depend>\n\n  <exec_depend>rosidl_default_runtime</exec_depend>\n  <exec_depend>builtin_interfaces</exec_depend>\n  <exec_depend>std_msgs</exec_depend>\n  <exec_depend>geometry_msgs</exec_depend>\n  <exec_depend>shape_msgs</exec_depend>\n  <exec_depend>trajectory_msgs</exec_depend>\n\n  <test_depend>ament_lint_common</test_depend>\n\n  <member_of_group>rosidl_interface_packages</member_of_group>\n\n  <export>\n    <build_type>ament_cmake</build_type>\n  </export>\n</package>\n"
  },
  {
    "path": "moveit_msgs_light/srv/GraspPlanning.srv",
    "content": "# Requests that grasp planning be performed for the target object\n# returns a list of candidate grasps to be tested and executed\n\n# the planning group used\nstring group_name\n\n# the object to be grasped\nCollisionObject target\n\n# the names of the relevant support surfaces (e.g. tables) in the collision map\n# can be left empty if no names are available\nstring[] support_surfaces\n\n# an optional list of grasps to be evaluated by the planner\nGrasp[] candidate_grasps\n\n# an optional list of obstacles that we have semantic information about\n# and that can be moved in the course of grasping\nCollisionObject[] movable_obstacles\n\n---\n\n# the list of planned grasps\nGrasp[] grasps\n\n# whether an error occurred\nMoveItErrorCodes error_code\n"
  }
]