Full Code of jesseweisberg/moveo_ros for AI

master b9282bdadbf2 cached
321 files
51.7 MB
509.2k tokens
1350 symbols
1 requests
Download .txt
Showing preview only (2,087K chars total). Download the full file or copy to clipboard to get everything.
Repository: jesseweisberg/moveo_ros
Branch: master
Commit: b9282bdadbf2
Files: 321
Total size: 51.7 MB

Directory structure:
gitextract_koctdad1/

├── LICENSE
├── README.md
├── moveo_moveit/
│   ├── CMakeLists.txt
│   ├── moveo_moveit_arduino/
│   │   ├── MultiStepperTest/
│   │   │   └── MultiStepperTest.ino
│   │   └── moveo_moveit_arduino.ino
│   ├── msg/
│   │   └── ArmJointState.msg
│   ├── package.xml
│   ├── scripts/
│   │   ├── README.md
│   │   └── moveo_objrec_publisher.py
│   └── src/
│       ├── move_group_interface_coor_1.cpp
│       └── moveit_convert.cpp
├── moveo_moveit_config/
│   ├── .setup_assistant
│   ├── CMakeLists.txt
│   ├── config/
│   │   ├── fake_controllers.yaml
│   │   ├── joint_limits.yaml
│   │   ├── kinematics.yaml
│   │   ├── moveo_urdf.srdf
│   │   └── ompl_planning.yaml
│   ├── launch/
│   │   ├── default_warehouse_db.launch
│   │   ├── demo.launch
│   │   ├── fake_moveit_controller_manager.launch.xml
│   │   ├── joystick_control.launch
│   │   ├── move_group.launch
│   │   ├── moveit.rviz
│   │   ├── moveit_rviz.launch
│   │   ├── moveo_urdf_moveit_controller_manager.launch.xml
│   │   ├── moveo_urdf_moveit_sensor_manager.launch.xml
│   │   ├── ompl_planning_pipeline.launch.xml
│   │   ├── planning_context.launch
│   │   ├── planning_pipeline.launch.xml
│   │   ├── run_benchmark_ompl.launch
│   │   ├── sensor_manager.launch.xml
│   │   ├── setup_assistant.launch
│   │   ├── trajectory_execution.launch.xml
│   │   ├── warehouse.launch
│   │   └── warehouse_settings.launch.xml
│   └── package.xml
├── moveo_urdf/
│   ├── CMakeLists.txt
│   ├── config/
│   │   └── joint_names_move_urdf.yaml
│   ├── launch/
│   │   ├── display.launch
│   │   ├── gazebo.launch
│   │   ├── gazebo_old.launch
│   │   └── gazebo_sdf.launch
│   ├── meshes/
│   │   ├── Gripper_Idol_Gear.STL
│   │   ├── Gripper_Idol_Gear_col.STL
│   │   ├── Gripper_Servo_Gear.STL
│   │   ├── Gripper_Servo_Gear_col.STL
│   │   ├── Link_1.STL
│   │   ├── Link_1_col.STL
│   │   ├── Link_2.STL
│   │   ├── Link_2_col.STL
│   │   ├── Link_3.STL
│   │   ├── Link_3_col.STL
│   │   ├── Link_4.STL
│   │   ├── Link_4_col.STL
│   │   ├── Link_5.STL
│   │   ├── Link_5_col.STL
│   │   ├── Pivot_Arm_Gripper_Idol.STL
│   │   ├── Pivot_Arm_Gripper_Idol_col.STL
│   │   ├── Pivot_Arm_Gripper_Servo.STL
│   │   ├── Pivot_Arm_Gripper_Servo_col.STL
│   │   ├── Tip_Gripper_Idol.STL
│   │   ├── Tip_Gripper_Idol_col.STL
│   │   ├── Tip_Gripper_Servo.STL
│   │   ├── Tip_Gripper_Servo_col.STL
│   │   ├── base_link.STL
│   │   └── base_link_col.STL
│   ├── package.xml
│   └── urdf/
│       ├── moveo_urdf.urdf
│       ├── moveo_urdf_new.urdf
│       └── moveo_urdf_og.urdf
└── object_detector_app/
    ├── LICENSE
    ├── README.md
    ├── __init__.py
    ├── environment.yml
    ├── object_detection/
    │   ├── BUILD
    │   ├── CONTRIBUTING.md
    │   ├── README.md
    │   ├── __init__.py
    │   ├── anchor_generators/
    │   │   ├── BUILD
    │   │   ├── __init__.py
    │   │   ├── grid_anchor_generator.py
    │   │   ├── grid_anchor_generator_test.py
    │   │   ├── multiple_grid_anchor_generator.py
    │   │   └── multiple_grid_anchor_generator_test.py
    │   ├── box_coders/
    │   │   ├── BUILD
    │   │   ├── __init__.py
    │   │   ├── faster_rcnn_box_coder.py
    │   │   ├── faster_rcnn_box_coder_test.py
    │   │   ├── keypoint_box_coder.py
    │   │   ├── keypoint_box_coder_test.py
    │   │   ├── mean_stddev_box_coder.py
    │   │   ├── mean_stddev_box_coder_test.py
    │   │   ├── square_box_coder.py
    │   │   └── square_box_coder_test.py
    │   ├── builders/
    │   │   ├── BUILD
    │   │   ├── __init__.py
    │   │   ├── anchor_generator_builder.py
    │   │   ├── anchor_generator_builder_test.py
    │   │   ├── box_coder_builder.py
    │   │   ├── box_coder_builder_test.py
    │   │   ├── box_predictor_builder.py
    │   │   ├── box_predictor_builder_test.py
    │   │   ├── hyperparams_builder.py
    │   │   ├── hyperparams_builder_test.py
    │   │   ├── image_resizer_builder.py
    │   │   ├── image_resizer_builder_test.py
    │   │   ├── input_reader_builder.py
    │   │   ├── input_reader_builder_test.py
    │   │   ├── losses_builder.py
    │   │   ├── losses_builder_test.py
    │   │   ├── matcher_builder.py
    │   │   ├── matcher_builder_test.py
    │   │   ├── model_builder.py
    │   │   ├── model_builder_test.py
    │   │   ├── optimizer_builder.py
    │   │   ├── optimizer_builder_test.py
    │   │   ├── post_processing_builder.py
    │   │   ├── post_processing_builder_test.py
    │   │   ├── preprocessor_builder.py
    │   │   ├── preprocessor_builder_test.py
    │   │   ├── region_similarity_calculator_builder.py
    │   │   └── region_similarity_calculator_builder_test.py
    │   ├── core/
    │   │   ├── BUILD
    │   │   ├── __init__.py
    │   │   ├── anchor_generator.py
    │   │   ├── balanced_positive_negative_sampler.py
    │   │   ├── balanced_positive_negative_sampler_test.py
    │   │   ├── batcher.py
    │   │   ├── batcher_test.py
    │   │   ├── box_coder.py
    │   │   ├── box_coder_test.py
    │   │   ├── box_list.py
    │   │   ├── box_list_ops.py
    │   │   ├── box_list_ops_test.py
    │   │   ├── box_list_test.py
    │   │   ├── box_predictor.py
    │   │   ├── box_predictor_test.py
    │   │   ├── data_decoder.py
    │   │   ├── keypoint_ops.py
    │   │   ├── keypoint_ops_test.py
    │   │   ├── losses.py
    │   │   ├── losses_test.py
    │   │   ├── matcher.py
    │   │   ├── matcher_test.py
    │   │   ├── minibatch_sampler.py
    │   │   ├── minibatch_sampler_test.py
    │   │   ├── model.py
    │   │   ├── post_processing.py
    │   │   ├── post_processing_test.py
    │   │   ├── prefetcher.py
    │   │   ├── prefetcher_test.py
    │   │   ├── preprocessor.py
    │   │   ├── preprocessor_test.py
    │   │   ├── region_similarity_calculator.py
    │   │   ├── region_similarity_calculator_test.py
    │   │   ├── standard_fields.py
    │   │   ├── target_assigner.py
    │   │   └── target_assigner_test.py
    │   ├── create_pascal_tf_record.py
    │   ├── create_pascal_tf_record_test.py
    │   ├── create_pet_tf_record.py
    │   ├── data/
    │   │   ├── mscoco_label_map.pbtxt
    │   │   ├── pascal_label_map.pbtxt
    │   │   └── pet_label_map.pbtxt
    │   ├── data_decoders/
    │   │   ├── BUILD
    │   │   ├── __init__.py
    │   │   ├── tf_example_decoder.py
    │   │   └── tf_example_decoder_test.py
    │   ├── eval.py
    │   ├── eval_util.py
    │   ├── evaluator.py
    │   ├── export_inference_graph.py
    │   ├── exporter.py
    │   ├── exporter_test.py
    │   ├── g3doc/
    │   │   ├── configuring_jobs.md
    │   │   ├── defining_your_own_model.md
    │   │   ├── detection_model_zoo.md
    │   │   ├── exporting_models.md
    │   │   ├── installation.md
    │   │   ├── preparing_inputs.md
    │   │   ├── running_locally.md
    │   │   ├── running_notebook.md
    │   │   ├── running_on_cloud.md
    │   │   └── running_pets.md
    │   ├── matchers/
    │   │   ├── BUILD
    │   │   ├── __init__.py
    │   │   ├── argmax_matcher.py
    │   │   ├── argmax_matcher_test.py
    │   │   ├── bipartite_matcher.py
    │   │   └── bipartite_matcher_test.py
    │   ├── meta_architectures/
    │   │   ├── BUILD
    │   │   ├── __init__.py
    │   │   ├── faster_rcnn_meta_arch.py
    │   │   ├── faster_rcnn_meta_arch_test.py
    │   │   ├── faster_rcnn_meta_arch_test_lib.py
    │   │   ├── rfcn_meta_arch.py
    │   │   ├── rfcn_meta_arch_test.py
    │   │   ├── ssd_meta_arch.py
    │   │   └── ssd_meta_arch_test.py
    │   ├── models/
    │   │   ├── BUILD
    │   │   ├── __init__.py
    │   │   ├── faster_rcnn_inception_resnet_v2_feature_extractor.py
    │   │   ├── faster_rcnn_inception_resnet_v2_feature_extractor_test.py
    │   │   ├── faster_rcnn_resnet_v1_feature_extractor.py
    │   │   ├── faster_rcnn_resnet_v1_feature_extractor_test.py
    │   │   ├── feature_map_generators.py
    │   │   ├── feature_map_generators_test.py
    │   │   ├── ssd_feature_extractor_test.py
    │   │   ├── ssd_inception_v2_feature_extractor.py
    │   │   ├── ssd_inception_v2_feature_extractor_test.py
    │   │   ├── ssd_mobilenet_v1_feature_extractor.py
    │   │   └── ssd_mobilenet_v1_feature_extractor_test.py
    │   ├── object_detection_tutorial.ipynb
    │   ├── protos/
    │   │   ├── BUILD
    │   │   ├── __init__.py
    │   │   ├── anchor_generator.proto
    │   │   ├── anchor_generator_pb2.py
    │   │   ├── argmax_matcher.proto
    │   │   ├── argmax_matcher_pb2.py
    │   │   ├── bipartite_matcher.proto
    │   │   ├── bipartite_matcher_pb2.py
    │   │   ├── box_coder.proto
    │   │   ├── box_coder_pb2.py
    │   │   ├── box_predictor.proto
    │   │   ├── box_predictor_pb2.py
    │   │   ├── eval.proto
    │   │   ├── eval_pb2.py
    │   │   ├── faster_rcnn.proto
    │   │   ├── faster_rcnn_box_coder.proto
    │   │   ├── faster_rcnn_box_coder_pb2.py
    │   │   ├── faster_rcnn_pb2.py
    │   │   ├── grid_anchor_generator.proto
    │   │   ├── grid_anchor_generator_pb2.py
    │   │   ├── hyperparams.proto
    │   │   ├── hyperparams_pb2.py
    │   │   ├── image_resizer.proto
    │   │   ├── image_resizer_pb2.py
    │   │   ├── input_reader.proto
    │   │   ├── input_reader_pb2.py
    │   │   ├── losses.proto
    │   │   ├── losses_pb2.py
    │   │   ├── matcher.proto
    │   │   ├── matcher_pb2.py
    │   │   ├── mean_stddev_box_coder.proto
    │   │   ├── mean_stddev_box_coder_pb2.py
    │   │   ├── model.proto
    │   │   ├── model_pb2.py
    │   │   ├── optimizer.proto
    │   │   ├── optimizer_pb2.py
    │   │   ├── pipeline.proto
    │   │   ├── pipeline_pb2.py
    │   │   ├── post_processing.proto
    │   │   ├── post_processing_pb2.py
    │   │   ├── preprocessor.proto
    │   │   ├── preprocessor_pb2.py
    │   │   ├── region_similarity_calculator.proto
    │   │   ├── region_similarity_calculator_pb2.py
    │   │   ├── square_box_coder.proto
    │   │   ├── square_box_coder_pb2.py
    │   │   ├── ssd.proto
    │   │   ├── ssd_anchor_generator.proto
    │   │   ├── ssd_anchor_generator_pb2.py
    │   │   ├── ssd_pb2.py
    │   │   ├── string_int_label_map.proto
    │   │   ├── string_int_label_map_pb2.py
    │   │   ├── train.proto
    │   │   └── train_pb2.py
    │   ├── samples/
    │   │   ├── cloud/
    │   │   │   └── cloud.yml
    │   │   └── configs/
    │   │       ├── faster_rcnn_inception_resnet_v2_atrous_pets.config
    │   │       ├── faster_rcnn_resnet101_pets.config
    │   │       ├── faster_rcnn_resnet101_voc07.config
    │   │       ├── faster_rcnn_resnet152_pets.config
    │   │       ├── faster_rcnn_resnet50_pets.config
    │   │       ├── rfcn_resnet101_pets.config
    │   │       ├── ssd_inception_v2_pets.config
    │   │       └── ssd_mobilenet_v1_pets.config
    │   ├── ssd_mobilenet_v1_coco_11_06_2017/
    │   │   └── frozen_inference_graph.pb
    │   ├── test_images/
    │   │   └── image_info.txt
    │   ├── train.py
    │   ├── trainer.py
    │   ├── trainer_test.py
    │   └── utils/
    │       ├── BUILD
    │       ├── __init__.py
    │       ├── category_util.py
    │       ├── category_util_test.py
    │       ├── dataset_util.py
    │       ├── dataset_util_test.py
    │       ├── label_map_util.py
    │       ├── label_map_util_test.py
    │       ├── learning_schedules.py
    │       ├── learning_schedules_test.py
    │       ├── metrics.py
    │       ├── metrics_test.py
    │       ├── np_box_list.py
    │       ├── np_box_list_ops.py
    │       ├── np_box_list_ops_test.py
    │       ├── np_box_list_test.py
    │       ├── np_box_ops.py
    │       ├── np_box_ops_test.py
    │       ├── object_detection_evaluation.py
    │       ├── object_detection_evaluation_test.py
    │       ├── ops.py
    │       ├── ops_test.py
    │       ├── per_image_evaluation.py
    │       ├── per_image_evaluation_test.py
    │       ├── shape_utils.py
    │       ├── shape_utils_test.py
    │       ├── static_shape.py
    │       ├── static_shape_test.py
    │       ├── test_utils.py
    │       ├── test_utils_test.py
    │       ├── variables_helper.py
    │       ├── variables_helper_test.py
    │       ├── visualization_utils.py
    │       └── visualization_utils_test.py
    ├── object_detection_app.py
    ├── object_detection_multithreading.py
    └── utils/
        ├── __init__.py
        ├── app_utils.py
        └── test_app_utils.py

================================================
FILE CONTENTS
================================================

================================================
FILE: LICENSE
================================================
MIT License

Copyright (c) 2018 Jesse Weisberg

Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:

The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.

THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.


================================================
FILE: README.md
================================================
# moveo_ros
ROS packages that can be used to plan and execute motion trajectories for the BCN3D Moveo robotic arm in simulation and real-life.
### [Video Demo Here!](https://youtu.be/2RcTTqs17O8)

- **_New Feature_: Object-Specific Pick and Place** (With an ordinary webcam, Tensorflow, OpenCV, and ROS, you can 'pick and place' (or sort) objects that are detected in real-time)
	- **[Video Demo](https://youtu.be/kkUbyFa2MWc)**
	- **[How to Use](https://github.com/jesseweisberg/moveo_ros/tree/master/moveo_moveit/scripts)**



## How to Use:

### Getting the BCN3D Simulation Working with Motion Planning
![moveit_screenshot.png](/moveit_screenshot.png)

1. Make sure you have ROS installed correctly with a functioning workspace-- I used ROS Kinetic on Ubuntu 16.04 (if you have a different distro, you may need to change some things).  I currently have 'moveo_ros' in the 'src' folder of my catkin workspace.

2. To plan and execute trajectories for the Moveo in simulation (RVIZ with Moveit plugin), execute the following terminal command:
	```
	roslaunch moveo_moveit_config demo.launch
	```

3. Once the window loads, in the bottom-left corner check "Allow Approximate IK Solutions."  Then click on the "Planning" tab in the MotionPlanning panel of RVIZ.  Select a new goal state by either dragging the interactive marker (light blue ball on the end effector) or under "Select Goal State."  Once goal state is updated, "Plan and Execute" will plan and execute the trajectory from the start state to the updated goal state.


### Moving the real robot, synced with the simulated robot's trajectories.
4. Make sure you download the AccelStepper ([AccelStepper Library Download](http://www.airspayce.com/mikem/arduino/AccelStepper/AccelStepper-1.57.zip)) and ros_lib ([rosserial-arduino tutorial](http://wiki.ros.org/rosserial_arduino/Tutorials/Arduino%20IDE%20Setup)) libraries into your Arduino environment.
	- If ros_lib already exists in your Arduino libraries (<Arduino sketchbook>/libraries), follow the last troubleshooting tip or you'll get an error saying "ArmJointState.h: no such file".  ROS makes you remove ros_lib and regenerate it every time you introduce a new custom message.

5. Change the pin layout between your robot and the RAMPS 1.4 in **'moveo_moveit_arduino.ino'** and upload the file to your Arduino (I'm using MEGA 2560).  Make sure the robot and the simulation are in the same position (to set the simulation upright initially-- select "Upright" from "Select Goal States" in RVIZ.

6. In 'moveit_convert.cpp' replace the stepsPerRevolution array with the steps/revolution (or microsteps/revolution) of each of your motors.  (Note: if you don't already know these values, you can experimentally get how many microsteps/revolution your motors have using the MultiStepperTest.ino and recording/eyeballing the results)

7. With the simulation already running, execute each of the following commands in it's own, separate terminal: 
	- ``` rosrun rosserial_python serial_node.py /dev/ttyUSB0 ```(establishes rosserial node that communicates with Arduino)
	- ```rosrun moveo_moveit moveit_convert ``` (converts simulation joint_state rotations to steps and publishes on the /joint_steps topic, which the Arduino script subscribes to)
	- ```rostopic pub gripper_angle std_msgs/UInt16 <angle 0-180> ```(publishes gripper_angle)

**Now, whatever trajectories are planned and executed in simulation are echoed on the real robot.**

## About Directories
### moveo_urdf
Contains the URDF (Unified Robot Description File) for the BCN3D Moveo. Necessary for simulation in RVIZ and moveit configuration.

### moveo_moveit_config
Configuration for moveit, a motion planning framework that has a plugin in RVIZ, which is what we are using here.

### moveo_moveit
- _moveit_convert.cpp_: Converts simulation 'joint\_state' rotations (from the 'move\_group/fake\_controller\_joint\_states' topic) to steps and publishes on the /joint\_steps topic.  Joint\_steps is an array of 6 Int16 values (though we only have 5 joints in this case) that represent the accumulated steps executed by each joint since the moveit\_convert node has started running. 

- _move\_group\_interface\_coor\_1.cpp_: Can hardcode a pose/position for the end effector in the script and plan/execute a trajectory there.  Also reads/outputs the current pose/position of the end effector.

## Troubleshooting
- After step 7, there should be 3 new topics created: 
	- **/joint\_steps**: steps necessary to move each motor to desired position
	- **/joint\_steps\_feedback**: same as /joint_steps, except published back by arduino to check that information is being received by Arduino correctly 
	- **/gripper\_angle**: current angle of the gripper

- **To move Moveo from the command line:**
	- ```rostopic pub joint_steps moveo_moveit/ArmJointState <Joint1 Joint2 Joint3 Joint4 Joint5 0>```  
	- Change "Joint1, Joint2, etc." to the number of steps you want each joint to move.

- **Use ```rostopic list``` and search for these topics to check if they are currently running**

- **Use ```rostopic echo /<topic>``` to view the data on \<topic> in your terminal** 

- If you get the following ```"error: moveo_moveit/ArmJointState.h: No such file or directory"```, perform the following steps in terminal:
	```
	cd <Arduino sketchbook>/libraries
	rm -rf ros_lib 
	rosrun rosserial_arduino make_libraries.py .
	```
	- More info on the ROS wiki: 
		- Section 2.2 here: (http://wiki.ros.org/rosserial_arduino/Tutorials/Arduino%20IDE%20Setup)
		- (http://wiki.ros.org/rosserial/Tutorials/Adding%20Other%20Messages)
	
- Here is my current layout and wiring schematic for reference:
![aerialRobotSketch.pdf](/aerial_robot_sketch.png)


================================================
FILE: moveo_moveit/CMakeLists.txt
================================================
cmake_minimum_required(VERSION 2.8.3)
project(moveo_moveit)

add_compile_options(-std=c++11)

find_package(Eigen3 REQUIRED)

# Eigen 3.2 (Wily) only provides EIGEN3_INCLUDE_DIR, not EIGEN3_INCLUDE_DIRS
if(NOT EIGEN3_INCLUDE_DIRS)
  set(EIGEN3_INCLUDE_DIRS ${EIGEN3_INCLUDE_DIR})
endif()

find_package(catkin REQUIRED
  COMPONENTS
    roscpp
    rospy
    std_msgs
    moveit_core
    moveit_ros_planning
    moveit_ros_planning_interface
    pluginlib
    geometric_shapes
    moveit_visual_tools
)

find_package(Boost REQUIRED system filesystem date_time thread)


################################################
## Declare ROS messages, services and actions ##
################################################

## Generate messages in the 'msg' folder
add_message_files(
  FILES
  ArmJointState.msg
)


## Generate added messages and services with any dependencies listed here
generate_messages(
  DEPENDENCIES
  sensor_msgs
  std_msgs
)

catkin_package(
  CATKIN_DEPENDS
    moveit_core
    moveit_ros_planning_interface
    interactive_markers
  DEPENDS
    EIGEN3
)

###########
## Build ##
###########

include_directories(SYSTEM ${Boost_INCLUDE_DIR} ${EIGEN3_INCLUDE_DIRS})
include_directories(${catkin_INCLUDE_DIRS})
link_directories(${catkin_LIBRARY_DIRS})

## Declare a C++ executable
add_executable(moveit_convert src/moveit_convert.cpp)
add_executable(move_group_1 src/move_group_interface_coor_1.cpp)

add_dependencies(moveit_convert moveo_moveit_generate_messages_cpp)
## Specify libraries to link a library or executable target against
catkin_install_python(PROGRAMS scripts/moveo_objrec_publisher.py
  DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION})

target_link_libraries(moveit_convert
  ${catkin_LIBRARIES}
)
target_link_libraries(move_group_1
  ${catkin_LIBRARIES}
)




================================================
FILE: moveo_moveit/moveo_moveit_arduino/MultiStepperTest/MultiStepperTest.ino
================================================
// MultiStepper.pde
// -*- mode: C++ -*-
// Use MultiStepper class to manage multiple steppers and make them all move to 
// the same position at the same time for linear 2d (or 3d) motion.

#include <AccelStepper.h>
#include <MultiStepper.h>

// Joint 1
#define E1_STEP_PIN 36
#define E1_DIR_PIN 34
#define E1_ENABLE_PIN 30

// Joint 2
#define Z_STEP_PIN         46
#define Z_DIR_PIN          48
#define Z_ENABLE_PIN       62
#define Z_MIN_PIN          18
#define Z_MAX_PIN          19

// Joint 3
#define Y_STEP_PIN         60
#define Y_DIR_PIN          61
#define Y_ENABLE_PIN       56
#define Y_MIN_PIN          14
#define Y_MAX_PIN          15

// Joint 4
#define X_STEP_PIN 54
#define X_DIR_PIN 55
#define X_ENABLE_PIN 38

// Joint 5 
#define E0_STEP_PIN 26
#define E0_DIR_PIN 28
#define E0_ENABLE_PIN 24


// EG X-Y position bed driven by 2 steppers
// Alas its not possible to build an array of these with different pins for each :-(
AccelStepper joint1(1,E1_STEP_PIN, E1_DIR_PIN);
AccelStepper joint2(1,Z_STEP_PIN, Z_DIR_PIN);
AccelStepper joint3(1,Y_STEP_PIN, Y_DIR_PIN);
AccelStepper joint4(1,X_STEP_PIN, X_DIR_PIN);
AccelStepper joint5(1, E0_STEP_PIN, E0_DIR_PIN);

// Up to 10 steppers can be handled as a group by MultiStepper
MultiStepper steppers;

//test with uint8 converted to long
unsigned int x = 1000;

void setup() {
  Serial.begin(250000);

  // Configure each stepper
  joint1.setMaxSpeed(1500);
  joint2.setMaxSpeed(750);
  joint3.setMaxSpeed(2000);
  joint4.setMaxSpeed(500);
  joint5.setMaxSpeed(1000);

  // Then give them to MultiStepper to manage
  steppers.addStepper(joint1);
  steppers.addStepper(joint2);
  steppers.addStepper(joint3);
  steppers.addStepper(joint4);
  steppers.addStepper(joint5);
}

void loop() {
  long positions[5]; // Array of desired stepper positions

  // Back of the envelope calculation for microsteps/revolution, where positions[i] is the number of steps (or microsteps).
  positions[0] = 0; //4100 microsteps is 1/8 revolutions ----> 32800 microsteps/rev
  positions[1] = 0; //2000 is 40/360 revolutions ---> 18000 microsteps/rev
  positions[2] = 0; //4000 is 20/360 revolutions ---> 72000 microsteps/rev
  positions[3] = 0; //820 is 1/4 revolution (200steps/revolution * 16microsteps/step (since microstepping) ~= 32800 microsteps/rev)
  positions[4] = 0; //2000 is 50/360 revolution ---> 14400
  
  steppers.moveTo(positions);
  steppers.runSpeedToPosition(); // Blocks until all are in position
  delay(1);
  
  // Move to a different coordinate
  positions[0] = 0;
  positions[1] = 0;
  positions[2] = 0;
  positions[3] = 0;
  positions[4] = 0;
  steppers.moveTo(positions);
  steppers.runSpeedToPosition(); // Blocks until all are in position
  delay(1);
}


================================================
FILE: moveo_moveit/moveo_moveit_arduino/moveo_moveit_arduino.ino
================================================
/* Purpose: This sketch uses ROS as well as MultiStepper, AccelStepper, and Servo libraries to control the 
 * BCN3D Moveo robotic arm. In this setup, a Ramps 1.4 shield is used on top of an Arduino Mega 2560.  
 * Subscribing to the following ROS topics: 1) joint_steps, 2) gripper_angle
 *    1) joint_steps is computed from the simulation in PC and sent Arduino via rosserial.  It contains
 *       the steps (relative to the starting position) necessary for each motor to move to reach the goal position.
 *    2) gripper_angle contains the necessary gripper angle to grasp the object when the goal state is reached 
 * 
 * Publishing to the following ROS topics: joint_steps_feedback
 *    1) joint_steps_feedback is a topic used for debugging to make sure the Arduino is receiving the joint_steps data
 *       accurately
 *       
 * Author: Jesse Weisberg
 */
#if (ARDUINO >= 100)
  #include <Arduino.h>
#else
  #include <WProgram.h>
#endif
#include <ros.h>

#include <moveo_moveit/ArmJointState.h>
#include <Servo.h> 
#include <std_msgs/Bool.h>
#include <std_msgs/String.h>
#include <math.h>
#include <std_msgs/Int16.h>
#include <std_msgs/UInt16.h>
#include <AccelStepper.h>
#include <MultiStepper.h>

// Joint 1
#define E1_STEP_PIN        36
#define E1_DIR_PIN         34
#define E1_ENABLE_PIN      30

// Joint 2
#define Z_STEP_PIN         46
#define Z_DIR_PIN          48
#define Z_ENABLE_PIN       62
#define Z_MIN_PIN          18
#define Z_MAX_PIN          19

// Joint 3
#define Y_STEP_PIN         60
#define Y_DIR_PIN          61
#define Y_ENABLE_PIN       56
#define Y_MIN_PIN          14
#define Y_MAX_PIN          15

// Joint 4
#define X_STEP_PIN         54
#define X_DIR_PIN          55
#define X_ENABLE_PIN       38

// Joint 5 
#define E0_STEP_PIN        26
#define E0_DIR_PIN         28
#define E0_ENABLE_PIN      24

AccelStepper joint1(1,E1_STEP_PIN, E1_DIR_PIN);
AccelStepper joint2(1,Z_STEP_PIN, Z_DIR_PIN);
AccelStepper joint3(1,Y_STEP_PIN, Y_DIR_PIN);
AccelStepper joint4(1,X_STEP_PIN, X_DIR_PIN);
AccelStepper joint5(1, E0_STEP_PIN, E0_DIR_PIN);

Servo gripper;
MultiStepper steppers;

int joint_step[6];
int joint_status = 0;

ros::NodeHandle nh;
std_msgs::Int16 msg;

//instantiate publisher (for debugging purposes)
//ros::Publisher steps("joint_steps_feedback",&msg);

void arm_cb(const moveo_moveit::ArmJointState& arm_steps){
  joint_status = 1;
  joint_step[0] = arm_steps.position1;
  joint_step[1] = arm_steps.position2;
  joint_step[2] = arm_steps.position3;
  joint_step[3] = arm_steps.position4;
  joint_step[4] = arm_steps.position5;
  joint_step[5] = arm_steps.position6; //gripper position <0-180>
}

void gripper_cb( const std_msgs::UInt16& cmd_msg){
  gripper.write(cmd_msg.data); // Set servo angle, should be from 0-180  
  digitalWrite(13, HIGH-digitalRead(13));  // Toggle led  
}

//instantiate subscribers
ros::Subscriber<moveo_moveit::ArmJointState> arm_sub("joint_steps",arm_cb); //subscribes to joint_steps on arm
ros::Subscriber<std_msgs::UInt16> gripper_sub("gripper_angle", gripper_cb); //subscribes to gripper position
//to publish from terminal: rostopic pub gripper_angle std_msgs/UInt16 <0-180>

void setup() {
  //put your setup code here, to run once:
  //Serial.begin(57600);
  pinMode(13,OUTPUT);
  joint_status = 1;

  nh.initNode();
  nh.subscribe(arm_sub);
  nh.subscribe(gripper_sub);
  //nh.advertise(steps);

  // Configure each stepper
  joint1.setMaxSpeed(1500);
  joint2.setMaxSpeed(750);
  joint3.setMaxSpeed(2000);
  joint4.setMaxSpeed(500);
  joint5.setMaxSpeed(1000);

  // Then give them to MultiStepper to manage
  steppers.addStepper(joint1);
  steppers.addStepper(joint2);
  steppers.addStepper(joint3);
  steppers.addStepper(joint4);
  steppers.addStepper(joint5);

  // Configure gripper servo
  gripper.attach(11);
  
  digitalWrite(13, 1); //toggle led
}

void loop() {
  if (joint_status == 1) // If command callback (arm_cb) is being called, execute stepper command
  { 
    long positions[5];  // Array of desired stepper positions must be long
    positions[0] = joint_step[0]; // negated since the real robot rotates in the opposite direction as ROS
    positions[1] = -joint_step[1]; 
    positions[2] = joint_step[2]; 
    positions[3] = joint_step[3]; 
    positions[4] = -joint_step[4]; 

    // Publish back to ros to check if everything's correct
    //msg.data=positions[4];
    //steps.publish(&msg);

    steppers.moveTo(positions);
    nh.spinOnce();
    steppers.runSpeedToPosition(); // Blocks until all are in position
    gripper.write(joint_step[5]);  // move gripper after manipulator reaches goal   
  }
  digitalWrite(13, HIGH-digitalRead(13)); //toggle led
  joint_status = 0;
  
  nh.spinOnce();
  delay(1);
  
}


================================================
FILE: moveo_moveit/msg/ArmJointState.msg
================================================
int16 position1
int16 position2
int16 position3
int16 position4
int16 position5
int16 position6


================================================
FILE: moveo_moveit/package.xml
================================================
<?xml version="1.0"?>
<package>
  <name>moveo_moveit</name>
  <version>0.0.0</version>
  <description>The moveo_moveit package</description>

  <maintainer email="jesse.weisberg@gmail.com">Jesse Weisberg</maintainer>


  <!-- One license tag required, multiple allowed, one license per tag -->
  <!-- Commonly used license strings: -->
  <!--   BSD, MIT, Boost Software License, GPLv2, GPLv3, LGPLv2.1, LGPLv3 -->
  <license>TODO</license>


  <!-- <url type="website">http://wiki.ros.org/carmen_move</url> -->
  <!-- <author email="jane.doe@example.com">Jane Doe</author> -->


  <!-- The *_depend tags are used to specify dependencies -->
  <!-- Dependencies can be catkin packages or system dependencies -->
  <!-- Examples: -->
  
  <!-- Use buildtool_depend for build tool packages: -->
  <!--   <buildtool_depend>catkin</buildtool_depend> -->

  <!-- Use test_depend for packages you need only for testing: -->
  <!--   <test_depend>gtest</test_depend> -->
  <buildtool_depend>catkin</buildtool_depend>
  
  <build_depend>message_generation</build_depend>
  <build_depend>pluginlib</build_depend>
  <build_depend>eigen</build_depend>
  <build_depend>moveit_core</build_depend>
  <build_depend>moveit_ros_planning_interface</build_depend>
  <build_depend>moveit_ros_perception</build_depend>
  <build_depend>interactive_markers</build_depend>
  <build_depend>geometric_shapes</build_depend>
  <build_depend>moveit_visual_tools</build_depend>

  <run_depend>pluginlib</run_depend>
  <run_depend>moveit_core</run_depend>
  <run_depend>moveit_fake_controller_manager</run_depend>
  <run_depend>moveit_ros_planning_interface</run_depend>
  <run_depend>moveit_ros_perception</run_depend>
  <run_depend>interactive_markers</run_depend>
  <run_depend>moveit_visual_tools</run_depend>


<!--  <build_depend>roscpp</build_depend>
  <build_depend>rospy</build_depend>
  <build_depend>sensor_msgs</build_depend>
  <build_depend>std_msgs</build_depend>
  <build_depend>moveit_ros_planning_interface</build_depend>
  <build_depend>moveit_core</build_depend>

  <run_depend>message_generation</run_depend>
  <run_depend>message_runtime</run_depend>
  <run_depend>roscpp</run_depend>
  <run_depend>rospy</run_depend>
  <run_depend>sensor_msgs</run_depend>
  <run_depend>std_msgs</run_depend>
  <run_depend>moveit_core</run_depend>
  <run_depend>moveit_ros_planning_interface</run_depend>
  <run_depend>interactive_markers</run_depend>
-->



  <!-- The export tag contains other, unspecified, tags -->
  <export>
    <!-- Other tools can request additional information be placed here -->

  </export>
</package>


================================================
FILE: moveo_moveit/scripts/README.md
================================================
# Object-Specific Pick and Place
This script uses real-time object recognition in a monocular image to perform predefined 'pick and place' movements.  In this example, apples are moved to the left, and bananas to the right (though many object-specific grasps and trajectories can be made).

- [Video Demo](https://youtu.be/kkUbyFa2MWc)

## How to Use
1. Connect your webcam via USB to your laptop, with the other USB port connected to the Arduino
2. Upload Arduino firmware (moveo_moveit/moveo_moveit_arduino/moveo_moveit_arduino.ino)
3. Create a virtualenv that has Python 3, OpenCV 3, and Tensorflow 1.2+
4. Within that virtualenv (in moveo_ros/object_detector_app), in terminal run: ```python object_detection_multithreading.py```
5. In another terminal (no virtualenv), run: ``` roscore ```
5. In another terminal (no virtualenv), run: ``` rosrun rosserial_python serial_node.py /dev/ttyUSB0 ``` (establishes rosserial node that communicates with Arduino)
6. Tweak your predefined object-specific trajectories (for now, this is not so robust, but I'm working on it!)
7. In another terminal (no virtualenv), run: ``` rosrun moveo_moveit moveo_objrec_publisher.py ```
8. Now, when object is placed in the FOV of the camera, a trajectory will be performed based on what object is detected! In this example, I've set up the framework to perform a sequence of trajectories that together form different 'pick and place' trajectories for each object.

## How it Works
### Real-Time Object Recognition
Here, we use Python 3, Tensorflow's Object Detection API, OpenCV, and an ordinary webcam to create this application.  For more information about how this works, see [here](https://github.com/jesseweisberg/moveo_ros/tree/master/object_detector_app).  While real-time object recognition is going on, the label of the recognized object is sent via ZMQ to a node in ROS.  Then, the node in ROS publishes to a rostopic.  The intermedicary step of sending via ZMQ is necessary because ROS only supports Python 2, whereas the object recognition app uses Python 3.  Thus, ZMQ really is just used to send the detected object info from a Python 3 (non-ROS friendly) environment to a Python 2 (ROS friendly) environment.

### Using Real-Time Object Recognition to Perform Object-Specific Pick and Place
The node, **moveo_objrec_publisher.py** (in moveo_ros/moveo_moveit/scripts), receives the label of the recognized object from the object_detection_multithreading.py script, and publishes a sequence of trajectories (on the /joint_steps topic) to perform a 'pick and place' motion for that specific object.  The Arduino subscribes to the /joint_steps topic and performs the trajectories.


================================================
FILE: moveo_moveit/scripts/moveo_objrec_publisher.py
================================================
#!/usr/bin/env python
#!/usr/bin/env python3

'''
Subscribes to a zmq socket and publishes that information to a ros topic.  This is one workaround for using
Python 2 and Python 3 in the same ROS application.

In my case, this receives real-time object detection info from a script in Python 3 and publishes to a rostopic.

Author: Jesse Weisberg
'''
import rospy
from std_msgs.msg import String
import sys
import zmq
from msgpack import loads
import time
import pyttsx
from datetime import datetime 
from espeak import espeak
from moveo_moveit.msg import ArmJointState

fixated_object_label = None
gripper = {'open': 0, 'banana': 70, 'apple': 50}
upright = [0, 0, 0, 0, 0, 0]

#predefined movements for pick and place of an apple and banana
apple_pick = [0, -2243, -23410, 14, -800, gripper['apple']]
apple_move = [0, -1113, -17410, 14, -3300, gripper['apple']]
apple_place = [-4600, -2400, -18410, 91, -800, gripper['open']]

banana_pick = [0, -2243, -24410, 14, -400, gripper['banana']]
banana_move = [0, -1043, -17410, 14, -3300, gripper['banana']]
banana_place = [4600, -2400, -20410, -91, -400, gripper['open']]


object_trajectories = {"apple": [upright, apple_pick, apple_move, apple_place, upright],
                       "banana": [upright, banana_pick, banana_move, banana_place, upright]}


#subscribe to detected object from object_detection_pupil.py (Pupil object detection plugin) via zmq
def subscribe_detected_object():
    context = zmq.Context()
    socket = context.socket(zmq.SUB)
    addr = '127.0.0.1'  # remote ip or localhost
    port = "5556"  # same as in the pupil remote gui
    print('retrieving objects...')
    socket.connect("tcp://{}:{}".format(addr, port))

    #subscribe to detected_objects topic
    while True:
        try:
            socket.setsockopt_string(zmq.SUBSCRIBE, 'detected_object')
        except TypeError:
            socket.setsockopt(zmq.SUBSCRIBE, 'detected_object')
        #process object
        detected_object = socket.recv_string() 
        if len(detected_object.split())==3:
            fixated_object_label = detected_object.split()[1]
            confidence = detected_object.split()[2]
        if len(detected_object.split())==4:
            fixated_object_label = detected_object.split()[1] + ' ' + detected_object.split()[2]
            confidence = detected_object.split()[3]

        # Potential improvement idea with emg sensory feedback
        # activate grasp for robotic manipulator: turn on "ready to execute switch"
        # time.sleep(3), during this time wait for emg sensory input
        # set up another rostopic that with emg sensory input, 
        # arduino reads that if higher than thresh, execute predetermined motion planning/grasp  
        return fixated_object_label


# publish detected object to a ros topic
def publish_detected_object():
    pub = rospy.Publisher('joint_steps', ArmJointState, queue_size=4)
    rospy.init_node('pick_and_place_object_detection', anonymous=True)
    rate = rospy.Rate(.1) # 20hz

    while not rospy.is_shutdown():
        fixated_object_label = subscribe_detected_object()
        rospy.loginfo(fixated_object_label)
        
        # check if fixated object label is a key in object_trajectories
        # if so, publish each trajectory in object_trajectories[key] to ArmJointState
        if fixated_object_label in object_trajectories:
            for i in object_trajectories[fixated_object_label]:
                goal = ArmJointState()
                goal.position1 = i[0]
                goal.position2 = i[1]
                goal.position3 = i[2]
                goal.position4 = i[3]
                goal.position5 = i[4]
                goal.position6 = i[5]
                pub.publish(goal)
                rospy.sleep(10)
                
        espeak.synth(fixated_object_label)
        while espeak.is_playing():
             pass

        #rate.sleep()
    

if __name__ == '__main__':
    try:
        publish_detected_object()
    except rospy.ROSInterruptException:
        pass


================================================
FILE: moveo_moveit/src/move_group_interface_coor_1.cpp
================================================
#include <moveit/move_group_interface/move_group_interface.h>
#include <moveit/planning_scene_interface/planning_scene_interface.h>

#include <moveit_msgs/DisplayRobotState.h>
#include <moveit_msgs/DisplayTrajectory.h>

#include <moveit_msgs/AttachedCollisionObject.h>
#include <moveit_msgs/CollisionObject.h>

#include <moveit_visual_tools/moveit_visual_tools.h>

int main(int argc, char **argv)
{	
  ros::init(argc, argv, "move_group_1");
  ros::NodeHandle node_handle;
  ros::AsyncSpinner spinner(1);
  spinner.start();

  //----------------------------
  //Setup
  //----------------------------

  static const std::string PLANNING_GROUP = "arm";

  // The :move_group_interface:`MoveGroup` class can be easily
  // setup using just the name of the planning group you would like to control and plan for
  moveit::planning_interface::MoveGroupInterface move_group(PLANNING_GROUP);

  //Using :planning_scene_interface:'PlanningSceneInterface' class to deal directly with the world
  moveit::planning_interface::PlanningSceneInterface planning_scene_interface;

  // Raw pointers are frequently used to refer to the planning group for improved performance.
  const robot_state::JointModelGroup *joint_model_group =
    move_group.getCurrentState()->getJointModelGroup(PLANNING_GROUP);
  
  move_group.setEndEffectorLink("Link_5");
  geometry_msgs::PoseStamped current_pose = move_group.getCurrentPose();
  // We can print the name of the reference frame for this robot.
  // also printing the current position and orientation of the robot.
  ros::Publisher pose_pub = node_handle.advertise<geometry_msgs::PoseStamped>("robot_pose", 10);
  ROS_INFO_NAMED("moveo", "x position: %f", current_pose.pose.position.x);
  ROS_INFO_NAMED("moveo", "y position: %f", current_pose.pose.position.y);
  ROS_INFO_NAMED("moveo", "z position: %f", current_pose.pose.position.z);
  ROS_INFO_NAMED("moveo", "x orientation: %f", current_pose.pose.orientation.x);
  ROS_INFO_NAMED("moveo", "y orientation: %f", current_pose.pose.orientation.y);
  ROS_INFO_NAMED("moveo", "z orientation: %f", current_pose.pose.orientation.z);
  ROS_INFO_NAMED("moveo", "w orientation: %f", current_pose.pose.orientation.w);
 
  // Visualization
  // ^^^^^^^^^^^^^
  //
  // The package MoveItVisualTools provides many capabilties for visualizing objects, robots,
  // and trajectories in Rviz as well as debugging tools such as step-by-step introspection of a script
  namespace rvt = rviz_visual_tools;
  moveit_visual_tools::MoveItVisualTools visual_tools("odom");
  visual_tools.deleteAllMarkers();

  // Remote control is an introspection tool that allows users to step through a high level script
  // via buttons and keyboard shortcuts in Rviz
  visual_tools.loadRemoteControl();

  // Rviz provides many types of markers, in this demo we will use text, cylinders, and spheres
  Eigen::Affine3d text_pose = Eigen::Affine3d::Identity();
  text_pose.translation().z() = 1.0; // above head of PR2
  visual_tools.publishText(text_pose, "MoveGroupInterface Moveo Demo", rvt::WHITE, rvt::XLARGE);

  // Batch publishing is used to reduce the number of messages being sent to Rviz for large visualizations
  visual_tools.trigger();


  //-----------------------------
  //Getting Basic Information
  //-----------------------------

  // We can print the name of the reference frame for this robot.
  ROS_INFO_NAMED("moveo", "Reference frame: %s", move_group.getPlanningFrame().c_str());

  // We can also print the name of the end-effector link for this group.
  ROS_INFO_NAMED("moveo", "End effector link: %s", move_group.getEndEffectorLink().c_str());

  //-----------------------------
  //Planning to a Pose Goal
  //-----------------------------

  //Plan a motion for this group to a desired pose for end-effector
  // hardcode desired position here before running node in a separate terminal
  geometry_msgs::Pose target_pose1;
  //default pose
  target_pose1.position.x = 0.120679;
  target_pose1.position.y = 0.072992;
  target_pose1.position.z = 0.569166;
  target_pose1.orientation.x = -0.386473;
  target_pose1.orientation.y =  -0.418023;
  target_pose1.orientation.z = -0.760978;
  target_pose1.orientation.w = 0.311139;

  //upright pose
  // target_pose1.position.x = 0.000130;
  // target_pose1.position.y = -0.240464;
  // target_pose1.position.z = 0.756570;
  // target_pose1.orientation.x = 0.359602;
  // target_pose1.orientation.y =  0.240924;
  // target_pose1.orientation.z = -0.747187;
  // target_pose1.orientation.w = 0.504335;


//upright pose using robot_pose_publisher
// position: 
//   x: 0.450865569212
//   y: -0.0923533864181
//   z: -0.646847372618
// orientation: 
//   x: -0.359579060437
//   y: -0.240936531262
//   z: 0.747165791213
//   w: 0.5043766129


  move_group.setPoseTarget(target_pose1);

  // Now, we call the planner to compute the plan and visualize it.
  // Note that we are just planning, not asking move_group
  // to actually move the robot.
  moveit::planning_interface::MoveGroupInterface::Plan my_plan;

  moveit::planning_interface::MoveItErrorCode success = move_group.plan(my_plan);

  ROS_INFO_NAMED("moveo", "Visualizing plan 1 (pose goal) %s", success ? "" : "FAILED");

  // Visualizing plans
  // ^^^^^^^^^^^^^^^^^
  // We can also visualize the plan as a line with markers in Rviz.
  ROS_INFO_NAMED("moveo", "Visualizing plan 1 as trajectory line");
  visual_tools.publishAxisLabeled(target_pose1, "pose1");
  visual_tools.publishText(text_pose, "Pose Goal", rvt::WHITE, rvt::XLARGE);
  visual_tools.publishTrajectoryLine(my_plan.trajectory_, joint_model_group);
  visual_tools.trigger();
  visual_tools.prompt("Execute trajectory");
  move_group.move();

  ros::shutdown();  
  return 0;
}


================================================
FILE: moveo_moveit/src/moveit_convert.cpp
================================================
#include "ros/ros.h"
#include "sensor_msgs/JointState.h"
#include "moveo_moveit/ArmJointState.h"
#include "math.h"

moveo_moveit::ArmJointState arm_steps;
moveo_moveit::ArmJointState total;
int stepsPerRevolution[6] = {32800,18000,72000,3280,14400,0};  // microsteps/revolution (using 16ths) from observation, for each motor
int joint_status = 0;
double cur_angle[6];
int joint_step[6];
double prev_angle[6] = {0,0,0,0,0,0}; 
double init_angle[6] = {0,0,0,0,0,0};
double total_steps[6] = {0,0,0,0,0,0};
int count = 0;


//keep a running sum of all the step counts and use that as the final step to send to arduino accelstepper

// int angle_to_steps(double x)
// {
//   float steps;
//   steps=((x / M_PI)*stepsPerRevolution)+0.5; // (radians)*(1 revolution/PI radians)*(200 steps/revolution)
//   return steps;
// }

//command callback (for position) function 
void cmd_cb(const sensor_msgs::JointState& cmd_arm)
{
  if (count==0){
    prev_angle[0] = cmd_arm.position[0];
    prev_angle[1] = cmd_arm.position[1];
    prev_angle[2] = cmd_arm.position[2];
    prev_angle[3] = cmd_arm.position[3];
    prev_angle[4] = cmd_arm.position[4];
    prev_angle[5] = cmd_arm.position[5];

    init_angle[0] = cmd_arm.position[0];
    init_angle[1] = cmd_arm.position[1];
    init_angle[2] = cmd_arm.position[2];
    init_angle[3] = cmd_arm.position[3];
    init_angle[4] = cmd_arm.position[4];
    init_angle[5] = cmd_arm.position[5];
  }

  // ros::NodeHandle nh;
  // ros::Subscriber sub = nh.subscribe("/move_group/fake_controller_joint_states",1000,cmd_cb);
  // ros::Publisher pub = nh.advertise<moveo_moveit::ArmJointState>("joint_steps",50);
  ROS_INFO_STREAM("Received /move_group/fake_controller_joint_states");
    
  // arm_steps.position1 = (cmd_arm.position[0]*stepsPerRevolution[0]/M_PI+0.5)-prev_angle[0];
  // arm_steps.position2 = (cmd_arm.position[1]*stepsPerRevolution[1]/M_PI+0.5)-prev_angle[1];
  // arm_steps.position3 = (cmd_arm.position[2]*stepsPerRevolution[2]/M_PI+0.5)-prev_angle[2];
  // arm_steps.position4 = (cmd_arm.position[3]*stepsPerRevolution[3]/M_PI+0.5)-prev_angle[3];
  // arm_steps.position5 = (cmd_arm.position[4]*stepsPerRevolution[4]/M_PI+0.5)-prev_angle[4];
  // arm_steps.position6 = (cmd_arm.position[5]*stepsPerRevolution[5]/M_PI+0.5)-prev_angle[5];

  //compute relative step count to move each joint-- only works if all joint_angles start at 0
  //otherwise, we need to set the current command to the initial joint_angles
  //ROS_INFO_NAMED("test", "cmd_arm.position[4]: %f, prev_angle[4]: %f, init_angle[4]: %f", cmd_arm.position[4], prev_angle[4], init_angle[4]);
  //ROS_INFO_NAMED("test", "arm_steps.position5 #1: %f", (cmd_arm.position[4]-prev_angle[4])*stepsPerRevolution[4]/M_PI);

  arm_steps.position1 = (int)((cmd_arm.position[0]-prev_angle[0])*stepsPerRevolution[0]/(2*M_PI));
  arm_steps.position2 = (int)((cmd_arm.position[1]-prev_angle[1])*stepsPerRevolution[1]/(2*M_PI));
  arm_steps.position3 = (int)((cmd_arm.position[2]-prev_angle[2])*stepsPerRevolution[2]/(2*M_PI));
  arm_steps.position4 = (int)((cmd_arm.position[3]-prev_angle[3])*stepsPerRevolution[3]/(2*M_PI));
  arm_steps.position5 = (int)((cmd_arm.position[4]-prev_angle[4])*stepsPerRevolution[4]/(2*M_PI));
  arm_steps.position6 = (int)((cmd_arm.position[5]-prev_angle[5])*stepsPerRevolution[5]/(2*M_PI));

  ROS_INFO_NAMED("test", "arm_steps.position5 #2: %d", arm_steps.position5);

  if (count!=0){
    prev_angle[0] = cmd_arm.position[0];
    prev_angle[1] = cmd_arm.position[1];
    prev_angle[2] = cmd_arm.position[2];
    prev_angle[3] = cmd_arm.position[3];
    prev_angle[4] = cmd_arm.position[4];
    prev_angle[5] = cmd_arm.position[5];
  }

  //total steps taken to get to goal
  // total_steps[0]+=arm_steps.position1;
  // total_steps[1]+=arm_steps.position2;
  // total_steps[2]+=arm_steps.position3;
  // total_steps[3]+=arm_steps.position4;
  // total_steps[4]+=arm_steps.position5;

  total.position1 += arm_steps.position1;
  total.position2 += arm_steps.position2;
  total.position3 += arm_steps.position3;
  total.position4 += arm_steps.position4;
  total.position5 += arm_steps.position5;

  ROS_INFO_NAMED("test", "total_steps[4]: %f, total: %d", total_steps[4], total.position5);
  ROS_INFO_NAMED("test", "arm_steps.position5 #3: %d", arm_steps.position5);

  ROS_INFO_STREAM("Done conversion to /joint_steps");
  joint_status = 1;
  count=1;
}

int main(int argc, char **argv)
{
  ros::init(argc, argv, "moveo_moveit");
  ros::NodeHandle nh;
  ROS_INFO_STREAM("In main function");
  ros::Subscriber sub = nh.subscribe("/move_group/fake_controller_joint_states",1000,cmd_cb);
  ros::Publisher pub = nh.advertise<moveo_moveit::ArmJointState>("joint_steps",50);
  
  ros::Rate loop_rate(20);

  while (ros::ok())
  {
    if(joint_status==1)
    {
      joint_status = 0;
      //pub.publish(arm_steps);
      pub.publish(total);
      ROS_INFO_STREAM("Published to /joint_steps");
    }
    ros::spinOnce();
    loop_rate.sleep();  
  }

  //ros::spin();
  return 0;
}


================================================
FILE: moveo_moveit_config/.setup_assistant
================================================
moveit_setup_assistant_config:
  URDF:
    package: moveo_urdf
    relative_path: urdf/moveo_urdf.urdf
  SRDF:
    relative_path: config/moveo_urdf.srdf
  CONFIG:
    author_name: Jesse Weisberg
    author_email: jesse.weisberg@gmail.com
    generated_timestamp: 1510359526

================================================
FILE: moveo_moveit_config/CMakeLists.txt
================================================
cmake_minimum_required(VERSION 2.8.3)
project(moveo_moveit_config)

find_package(catkin REQUIRED)

catkin_package()

install(DIRECTORY launch DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION}
  PATTERN "setup_assistant.launch" EXCLUDE)
install(DIRECTORY config DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION})


================================================
FILE: moveo_moveit_config/config/fake_controllers.yaml
================================================
controller_list:
  - name: fake_arm_controller
    joints:
      - Joint_1
      - Joint_2
      - Joint_3
      - Joint_4
      - Joint_5
  - name: fake_gripper_controller
    joints:
      - Gripper_Idol_Gear_Joint
      - Gripper_Servo_Gear_Joint

================================================
FILE: moveo_moveit_config/config/joint_limits.yaml
================================================
# joint_limits.yaml allows the dynamics properties specified in the URDF to be overwritten or augmented as needed
# Specific joint properties can be changed with the keys [max_position, min_position, max_velocity, max_acceleration]
# Joint limits can be turned off with [has_velocity_limits, has_acceleration_limits]
joint_limits:
  Gripper_Idol_Gear_Joint:
    has_velocity_limits: true
    max_velocity: 1
    has_acceleration_limits: false
    max_acceleration: 0
  Gripper_Servo_Gear_Joint:
    has_velocity_limits: true
    max_velocity: 1
    has_acceleration_limits: false
    max_acceleration: 0
  Joint_1:
    has_velocity_limits: true
    max_velocity: 1
    has_acceleration_limits: false
    max_acceleration: 0
  Joint_2:
    has_velocity_limits: true
    max_velocity: 1
    has_acceleration_limits: false
    max_acceleration: 0
  Joint_3:
    has_velocity_limits: true
    max_velocity: 1
    has_acceleration_limits: false
    max_acceleration: 0
  Joint_4:
    has_velocity_limits: true
    max_velocity: 1
    has_acceleration_limits: false
    max_acceleration: 0
  Joint_5:
    has_velocity_limits: true
    max_velocity: 1
    has_acceleration_limits: false
    max_acceleration: 0
  Pivot_Arm_Gripper_Idol_Joint:
    has_velocity_limits: true
    max_velocity: 1
    has_acceleration_limits: false
    max_acceleration: 0
  Pivot_Arm_Gripper_Servo_Joint:
    has_velocity_limits: true
    max_velocity: 1
    has_acceleration_limits: false
    max_acceleration: 0
  Tip_Gripper_Idol_Joint:
    has_velocity_limits: true
    max_velocity: 1
    has_acceleration_limits: false
    max_acceleration: 0
  Tip_Gripper_Servo_Joint:
    has_velocity_limits: true
    max_velocity: 1
    has_acceleration_limits: false
    max_acceleration: 0

================================================
FILE: moveo_moveit_config/config/kinematics.yaml
================================================
arm:
  kinematics_solver: kdl_kinematics_plugin/KDLKinematicsPlugin
  kinematics_solver_search_resolution: 0.005
  kinematics_solver_timeout: 0.005
  kinematics_solver_attempts: 3

================================================
FILE: moveo_moveit_config/config/moveo_urdf.srdf
================================================
<?xml version="1.0" ?>
<!--This does not replace URDF, and is not an extension of URDF.
    This is a format for representing semantic information about the robot structure.
    A URDF file must exist for this robot as well, where the joints and the links that are referenced are defined
-->
<robot name="moveo_urdf">
    <!--GROUPS: Representation of a set of joints and links. This can be useful for specifying DOF to plan for, defining arms, end effectors, etc-->
    <!--LINKS: When a link is specified, the parent joint of that link (if it exists) is automatically included-->
    <!--JOINTS: When a joint is specified, the child link of that joint (which will always exist) is automatically included-->
    <!--CHAINS: When a chain is specified, all the links along the chain (including endpoints) are included in the group. Additionally, all the joints that are parents to included links are also included. This means that joints along the chain and the parent joint of the base link are included in the group-->
    <!--SUBGROUPS: Groups can also be formed by referencing to already defined group names-->
    <group name="arm">
        <chain base_link="base_link" tip_link="Link_5" />
    </group>
    <group name="gripper">
        <link name="Gripper_Idol_Gear" />
        <link name="Tip_Gripper_Idol" />
        <link name="Gripper_Servo_Gear" />
        <link name="Tip_Gripper_Servo" />
        <link name="Pivot_Arm_Gripper_Idol" />
        <link name="Pivot_Arm_Gripper_Servo" />
    </group>
    <!--GROUP STATES: Purpose: Define a named state for a particular group, in terms of joint values. This is useful to define states like 'folded arms'-->
    <group_state name="Upright" group="arm">
        <joint name="Joint_1" value="0" />
        <joint name="Joint_2" value="0.6386" />
        <joint name="Joint_3" value="0.5523" />
        <joint name="Joint_4" value="-0.4143" />
        <joint name="Joint_5" value="0.9148" />
    </group_state>
    <!--END EFFECTOR: Purpose: Represent information about an end effector.-->
    <end_effector name="gripper_ee" parent_link="Link_5" group="gripper" />
    <!--VIRTUAL JOINT: Purpose: this element defines a virtual joint between a robot link and an external frame of reference (considered fixed with respect to the robot)-->
    <virtual_joint name="base_odom" type="planar" parent_frame="odom" child_link="base_link" />
    <!--DISABLE COLLISIONS: By default it is assumed that any link of the robot could potentially come into collision with any other link in the robot. This tag disables collision checking between a specified pair of links. -->
    <disable_collisions link1="Gripper_Idol_Gear" link2="Gripper_Servo_Gear" reason="Never" />
    <disable_collisions link1="Gripper_Idol_Gear" link2="Link_2" reason="Never" />
    <disable_collisions link1="Gripper_Idol_Gear" link2="Link_3" reason="Never" />
    <disable_collisions link1="Gripper_Idol_Gear" link2="Link_4" reason="Never" />
    <disable_collisions link1="Gripper_Idol_Gear" link2="Link_5" reason="Adjacent" />
    <disable_collisions link1="Gripper_Idol_Gear" link2="Pivot_Arm_Gripper_Servo" reason="Never" />
    <disable_collisions link1="Gripper_Idol_Gear" link2="Tip_Gripper_Idol" reason="Adjacent" />
    <disable_collisions link1="Gripper_Idol_Gear" link2="Tip_Gripper_Servo" reason="Never" />
    <disable_collisions link1="Gripper_Servo_Gear" link2="Link_2" reason="Never" />
    <disable_collisions link1="Gripper_Servo_Gear" link2="Link_3" reason="Never" />
    <disable_collisions link1="Gripper_Servo_Gear" link2="Link_4" reason="Never" />
    <disable_collisions link1="Gripper_Servo_Gear" link2="Link_5" reason="Adjacent" />
    <disable_collisions link1="Gripper_Servo_Gear" link2="Pivot_Arm_Gripper_Idol" reason="Never" />
    <disable_collisions link1="Gripper_Servo_Gear" link2="Tip_Gripper_Idol" reason="Never" />
    <disable_collisions link1="Gripper_Servo_Gear" link2="Tip_Gripper_Servo" reason="Adjacent" />
    <disable_collisions link1="Link_1" link2="Link_2" reason="Adjacent" />
    <disable_collisions link1="Link_1" link2="base_link" reason="Adjacent" />
    <disable_collisions link1="Link_2" link2="Link_3" reason="Adjacent" />
    <disable_collisions link1="Link_2" link2="Link_4" reason="Never" />
    <disable_collisions link1="Link_2" link2="Link_5" reason="Never" />
    <disable_collisions link1="Link_2" link2="Pivot_Arm_Gripper_Idol" reason="Never" />
    <disable_collisions link1="Link_2" link2="Pivot_Arm_Gripper_Servo" reason="Never" />
    <disable_collisions link1="Link_3" link2="Link_4" reason="Adjacent" />
    <disable_collisions link1="Link_3" link2="Link_5" reason="Never" />
    <disable_collisions link1="Link_3" link2="Pivot_Arm_Gripper_Idol" reason="Never" />
    <disable_collisions link1="Link_3" link2="Pivot_Arm_Gripper_Servo" reason="Never" />
    <disable_collisions link1="Link_3" link2="Tip_Gripper_Idol" reason="Never" />
    <disable_collisions link1="Link_3" link2="Tip_Gripper_Servo" reason="Never" />
    <disable_collisions link1="Link_4" link2="Link_5" reason="Adjacent" />
    <disable_collisions link1="Link_4" link2="Pivot_Arm_Gripper_Idol" reason="Never" />
    <disable_collisions link1="Link_4" link2="Pivot_Arm_Gripper_Servo" reason="Never" />
    <disable_collisions link1="Link_4" link2="Tip_Gripper_Idol" reason="Never" />
    <disable_collisions link1="Link_4" link2="Tip_Gripper_Servo" reason="Never" />
    <disable_collisions link1="Link_5" link2="Pivot_Arm_Gripper_Idol" reason="Adjacent" />
    <disable_collisions link1="Link_5" link2="Pivot_Arm_Gripper_Servo" reason="Adjacent" />
    <disable_collisions link1="Link_5" link2="Tip_Gripper_Idol" reason="Never" />
    <disable_collisions link1="Link_5" link2="Tip_Gripper_Servo" reason="Never" />
    <disable_collisions link1="Pivot_Arm_Gripper_Idol" link2="Pivot_Arm_Gripper_Servo" reason="Never" />
    <disable_collisions link1="Pivot_Arm_Gripper_Idol" link2="Tip_Gripper_Idol" reason="Default" />
    <disable_collisions link1="Pivot_Arm_Gripper_Idol" link2="Tip_Gripper_Servo" reason="Never" />
    <disable_collisions link1="Pivot_Arm_Gripper_Servo" link2="Tip_Gripper_Idol" reason="Never" />
    <disable_collisions link1="Pivot_Arm_Gripper_Servo" link2="Tip_Gripper_Servo" reason="Default" />
    <disable_collisions link1="Tip_Gripper_Idol" link2="Tip_Gripper_Servo" reason="Never" />
</robot>


================================================
FILE: moveo_moveit_config/config/ompl_planning.yaml
================================================
planner_configs:
  SBLkConfigDefault:
    type: geometric::SBL
    range: 0.0  # Max motion added to tree. ==> maxDistance_ default: 0.0, if 0.0, set on setup()
  ESTkConfigDefault:
    type: geometric::EST
    range: 0.0  # Max motion added to tree. ==> maxDistance_ default: 0.0, if 0.0 setup()
    goal_bias: 0.05  # When close to goal select goal, with this probability. default: 0.05
  LBKPIECEkConfigDefault:
    type: geometric::LBKPIECE
    range: 0.0  # Max motion added to tree. ==> maxDistance_ default: 0.0, if 0.0, set on setup()
    border_fraction: 0.9  # Fraction of time focused on boarder default: 0.9
    min_valid_path_fraction: 0.5  # Accept partially valid moves above fraction. default: 0.5
  BKPIECEkConfigDefault:
    type: geometric::BKPIECE
    range: 0.0  # Max motion added to tree. ==> maxDistance_ default: 0.0, if 0.0, set on setup()
    border_fraction: 0.9  # Fraction of time focused on boarder default: 0.9
    failed_expansion_score_factor: 0.5  # When extending motion fails, scale score by factor. default: 0.5
    min_valid_path_fraction: 0.5  # Accept partially valid moves above fraction. default: 0.5
  KPIECEkConfigDefault:
    type: geometric::KPIECE
    range: 0.0  # Max motion added to tree. ==> maxDistance_ default: 0.0, if 0.0, set on setup()
    goal_bias: 0.05  # When close to goal select goal, with this probability. default: 0.05 
    border_fraction: 0.9  # Fraction of time focused on boarder default: 0.9 (0.0,1.]
    failed_expansion_score_factor: 0.5  # When extending motion fails, scale score by factor. default: 0.5
    min_valid_path_fraction: 0.5  # Accept partially valid moves above fraction. default: 0.5
  RRTkConfigDefault:
    type: geometric::RRT
    range: 0.0  # Max motion added to tree. ==> maxDistance_ default: 0.0, if 0.0, set on setup()
    goal_bias: 0.05  # When close to goal select goal, with this probability? default: 0.05
  RRTConnectkConfigDefault:
    type: geometric::RRTConnect
    range: 0.0  # Max motion added to tree. ==> maxDistance_ default: 0.0, if 0.0, set on setup()
  RRTstarkConfigDefault:
    type: geometric::RRTstar
    range: 0.0  # Max motion added to tree. ==> maxDistance_ default: 0.0, if 0.0, set on setup()
    goal_bias: 0.05  # When close to goal select goal, with this probability? default: 0.05
    delay_collision_checking: 1  # Stop collision checking as soon as C-free parent found. default 1
  TRRTkConfigDefault:
    type: geometric::TRRT
    range: 0.0  # Max motion added to tree. ==> maxDistance_ default: 0.0, if 0.0, set on setup()
    goal_bias: 0.05  # When close to goal select goal, with this probability? default: 0.05
    max_states_failed: 10  # when to start increasing temp. default: 10
    temp_change_factor: 2.0  # how much to increase or decrease temp. default: 2.0
    min_temperature: 10e-10  # lower limit of temp change. default: 10e-10
    init_temperature: 10e-6  # initial temperature. default: 10e-6
    frountier_threshold: 0.0  # dist new state to nearest neighbor to disqualify as frontier. default: 0.0 set in setup() 
    frountierNodeRatio: 0.1  # 1/10, or 1 nonfrontier for every 10 frontier. default: 0.1
    k_constant: 0.0  # value used to normalize expresssion. default: 0.0 set in setup()
  PRMkConfigDefault:
    type: geometric::PRM
    max_nearest_neighbors: 10  # use k nearest neighbors. default: 10
  PRMstarkConfigDefault:
    type: geometric::PRMstar
  FMTkConfigDefault:
    type: geometric::FMT
    num_samples: 1000  # number of states that the planner should sample. default: 1000
    radius_multiplier: 1.1  # multiplier used for the nearest neighbors search radius. default: 1.1
    nearest_k: 1  # use Knearest strategy. default: 1
    cache_cc: 1  # use collision checking cache. default: 1
    heuristics: 0  # activate cost to go heuristics. default: 0
    extended_fmt: 1  # activate the extended FMT*: adding new samples if planner does not finish successfully. default: 1
  BFMTkConfigDefault:
    type: geometric::BFMT
    num_samples: 1000  # number of states that the planner should sample. default: 1000
    radius_multiplier: 1.0  # multiplier used for the nearest neighbors search radius. default: 1.0
    nearest_k: 1  # use the Knearest strategy. default: 1
    balanced: 0  # exploration strategy: balanced true expands one tree every iteration. False will select the tree with lowest maximum cost to go. default: 1
    optimality: 1  # termination strategy: optimality true finishes when the best possible path is found. Otherwise, the algorithm will finish when the first feasible path is found. default: 1
    heuristics: 1  # activates cost to go heuristics. default: 1
    cache_cc: 1  # use the collision checking cache. default: 1
    extended_fmt: 1  # Activates the extended FMT*: adding new samples if planner does not finish successfully. default: 1
  PDSTkConfigDefault:
    type: geometric::PDST
  STRIDEkConfigDefault:
    type: geometric::STRIDE
    range: 0.0  # Max motion added to tree. ==> maxDistance_ default: 0.0, if 0.0, set on setup()
    goal_bias: 0.05  # When close to goal select goal, with this probability. default: 0.05 
    use_projected_distance: 0  # whether nearest neighbors are computed based on distances in a projection of the state rather distances in the state space itself. default: 0
    degree: 16  # desired degree of a node in the Geometric Near-neightbor Access Tree (GNAT). default: 16 
    max_degree: 18  # max degree of a node in the GNAT. default: 12
    min_degree: 12  # min degree of a node in the GNAT. default: 12
    max_pts_per_leaf: 6  # max points per leaf in the GNAT. default: 6
    estimated_dimension: 0.0  # estimated dimension of the free space. default: 0.0
    min_valid_path_fraction: 0.2  # Accept partially valid moves above fraction. default: 0.2
  BiTRRTkConfigDefault:
    type: geometric::BiTRRT
    range: 0.0  # Max motion added to tree. ==> maxDistance_ default: 0.0, if 0.0, set on setup()
    temp_change_factor: 0.1  # how much to increase or decrease temp. default: 0.1
    init_temperature: 100  # initial temperature. default: 100
    frountier_threshold: 0.0  # dist new state to nearest neighbor to disqualify as frontier. default: 0.0 set in setup() 
    frountier_node_ratio: 0.1  # 1/10, or 1 nonfrontier for every 10 frontier. default: 0.1
    cost_threshold: 1e300  # the cost threshold. Any motion cost that is not better will not be expanded. default: inf
  LBTRRTkConfigDefault:
    type: geometric::LBTRRT
    range: 0.0  # Max motion added to tree. ==> maxDistance_ default: 0.0, if 0.0, set on setup()
    goal_bias: 0.05  # When close to goal select goal, with this probability. default: 0.05 
    epsilon: 0.4  # optimality approximation factor. default: 0.4
  BiESTkConfigDefault:
    type: geometric::BiEST
    range: 0.0  # Max motion added to tree. ==> maxDistance_ default: 0.0, if 0.0, set on setup()
  ProjESTkConfigDefault:
    type: geometric::ProjEST
    range: 0.0  # Max motion added to tree. ==> maxDistance_ default: 0.0, if 0.0, set on setup()
    goal_bias: 0.05  # When close to goal select goal, with this probability. default: 0.05 
  LazyPRMkConfigDefault:
    type: geometric::LazyPRM
    range: 0.0  # Max motion added to tree. ==> maxDistance_ default: 0.0, if 0.0, set on setup()
  LazyPRMstarkConfigDefault:
    type: geometric::LazyPRMstar
  SPARSkConfigDefault:
    type: geometric::SPARS
    stretch_factor: 3.0  # roadmap spanner stretch factor. multiplicative upper bound on path quality. It does not make sense to make this parameter more than 3. default: 3.0
    sparse_delta_fraction: 0.25  # delta fraction for connection distance. This value represents the visibility range of sparse samples. default: 0.25
    dense_delta_fraction: 0.001  # delta fraction for interface detection. default: 0.001
    max_failures: 1000  # maximum consecutive failure limit. default: 1000
  SPARStwokConfigDefault:
    type: geometric::SPARStwo
    stretch_factor: 3.0  # roadmap spanner stretch factor. multiplicative upper bound on path quality. It does not make sense to make this parameter more than 3. default: 3.0
    sparse_delta_fraction: 0.25  # delta fraction for connection distance. This value represents the visibility range of sparse samples. default: 0.25
    dense_delta_fraction: 0.001  # delta fraction for interface detection. default: 0.001
    max_failures: 5000  # maximum consecutive failure limit. default: 5000
arm:
  planner_configs:
    - SBLkConfigDefault
    - ESTkConfigDefault
    - LBKPIECEkConfigDefault
    - BKPIECEkConfigDefault
    - KPIECEkConfigDefault
    - RRTkConfigDefault
    - RRTConnectkConfigDefault
    - RRTstarkConfigDefault
    - TRRTkConfigDefault
    - PRMkConfigDefault
    - PRMstarkConfigDefault
    - FMTkConfigDefault
    - BFMTkConfigDefault
    - PDSTkConfigDefault
    - STRIDEkConfigDefault
    - BiTRRTkConfigDefault
    - LBTRRTkConfigDefault
    - BiESTkConfigDefault
    - ProjESTkConfigDefault
    - LazyPRMkConfigDefault
    - LazyPRMstarkConfigDefault
    - SPARSkConfigDefault
    - SPARStwokConfigDefault
  projection_evaluator: joints(Joint_1,Joint_2)
  longest_valid_segment_fraction: 0.005
gripper:
  planner_configs:
    - SBLkConfigDefault
    - ESTkConfigDefault
    - LBKPIECEkConfigDefault
    - BKPIECEkConfigDefault
    - KPIECEkConfigDefault
    - RRTkConfigDefault
    - RRTConnectkConfigDefault
    - RRTstarkConfigDefault
    - TRRTkConfigDefault
    - PRMkConfigDefault
    - PRMstarkConfigDefault
    - FMTkConfigDefault
    - BFMTkConfigDefault
    - PDSTkConfigDefault
    - STRIDEkConfigDefault
    - BiTRRTkConfigDefault
    - LBTRRTkConfigDefault
    - BiESTkConfigDefault
    - ProjESTkConfigDefault
    - LazyPRMkConfigDefault
    - LazyPRMstarkConfigDefault
    - SPARSkConfigDefault
    - SPARStwokConfigDefault
  projection_evaluator: joints(Gripper_Idol_Gear_Joint,Tip_Gripper_Idol_Joint)
  longest_valid_segment_fraction: 0.005

================================================
FILE: moveo_moveit_config/launch/default_warehouse_db.launch
================================================
<launch>

  <arg name="reset" default="false"/>
  <!-- If not specified, we'll use a default database location -->
  <arg name="moveit_warehouse_database_path" default="$(find moveo_moveit_config)/default_warehouse_mongo_db" />

  <!-- Launch the warehouse with the configured database location -->
  <include file="$(find moveo_moveit_config)/launch/warehouse.launch">
    <arg name="moveit_warehouse_database_path" value="$(arg moveit_warehouse_database_path)" />
  </include>

  <!-- If we want to reset the database, run this node -->
  <node if="$(arg reset)" name="$(anon moveit_default_db_reset)" type="moveit_init_demo_warehouse" pkg="moveit_ros_warehouse" respawn="false" output="screen" />

</launch>


================================================
FILE: moveo_moveit_config/launch/demo.launch
================================================
<launch>

  <!-- By default, we do not start a database (it can be large) -->
  <arg name="db" default="false" />
  <!-- Allow user to specify database location -->
  <arg name="db_path" default="$(find moveo_moveit_config)/default_warehouse_mongo_db" />

  <!-- By default, we are not in debug mode -->
  <arg name="debug" default="false" />

  <!--
  By default, hide joint_state_publisher's GUI

  MoveIt!'s "demo" mode replaces the real robot driver with the joint_state_publisher.
  The latter one maintains and publishes the current joint configuration of the simulated robot.
  It also provides a GUI to move the simulated robot around "manually".
  This corresponds to moving around the real robot without the use of MoveIt.
  -->
  <arg name="use_gui" default="false" />

  <!-- Load the URDF, SRDF and other .yaml configuration files on the param server -->
  <include file="$(find moveo_moveit_config)/launch/planning_context.launch">
    <arg name="load_robot_description" value="true"/>
  </include>

  <!-- If needed, broadcast static tf for robot root -->
    <node pkg="tf" type="static_transform_publisher" name="virtual_joint_broadcaster_0" args="0 0 0 0 0 0 odom base_link 100" />


  <!-- We do not have a robot connected, so publish fake joint states -->
  <node name="joint_state_publisher" pkg="joint_state_publisher" type="joint_state_publisher">
    <param name="/use_gui" value="$(arg use_gui)"/>
    <rosparam param="/source_list">[/move_group/fake_controller_joint_states]</rosparam>
  </node>

  <!-- Given the published joint states, publish tf for the robot links -->
  <node name="robot_state_publisher" pkg="robot_state_publisher" type="robot_state_publisher" respawn="true" output="screen" />

  <!-- Run the main MoveIt executable without trajectory execution (we do not have controllers configured by default) -->
  <include file="$(find moveo_moveit_config)/launch/move_group.launch">
    <arg name="allow_trajectory_execution" value="true"/>
    <arg name="fake_execution" value="true"/>
    <arg name="info" value="true"/>
    <arg name="debug" value="$(arg debug)"/>
  </include>

  <!-- Run Rviz and load the default config to see the state of the move_group node -->
  <include file="$(find moveo_moveit_config)/launch/moveit_rviz.launch">
    <arg name="config" value="true"/>
    <arg name="debug" value="$(arg debug)"/>
  </include>

  <!-- If database loading was enabled, start mongodb as well -->
  <include file="$(find moveo_moveit_config)/launch/default_warehouse_db.launch" if="$(arg db)">
    <arg name="moveit_warehouse_database_path" value="$(arg db_path)"/>
  </include>

</launch>


================================================
FILE: moveo_moveit_config/launch/fake_moveit_controller_manager.launch.xml
================================================
<launch>

  <!-- Set the param that trajectory_execution_manager needs to find the controller plugin -->
  <param name="moveit_controller_manager" value="moveit_fake_controller_manager/MoveItFakeControllerManager"/>

  <!-- The rest of the params are specific to this plugin -->
  <rosparam file="$(find moveo_moveit_config)/config/fake_controllers.yaml"/>

</launch>


================================================
FILE: moveo_moveit_config/launch/joystick_control.launch
================================================
<launch>
  <!-- See moveit_ros/visualization/doc/joystick.rst for documentation -->

  <arg name="dev" default="/dev/input/js0" />

  <!-- Launch joy node -->
  <node pkg="joy" type="joy_node" name="joy">
    <param name="dev" value="$(arg dev)" /> <!-- Customize this to match the location your joystick is plugged in on-->
    <param name="deadzone" value="0.2" />
    <param name="autorepeat_rate" value="40" />
    <param name="coalesce_interval" value="0.025" />
  </node>

  <!-- Launch python interface -->
  <node pkg="moveit_ros_visualization" type="moveit_joy.py" output="screen" name="moveit_joy"/>
        
</launch>


================================================
FILE: moveo_moveit_config/launch/move_group.launch
================================================
<launch>

  <include file="$(find moveo_moveit_config)/launch/planning_context.launch" />

  <!-- GDB Debug Option -->
  <arg name="debug" default="false" />
  <arg unless="$(arg debug)" name="launch_prefix" value="" />
  <arg     if="$(arg debug)" name="launch_prefix"
	   value="gdb -x $(find moveo_moveit_config)/launch/gdb_settings.gdb --ex run --args" />

  <!-- Verbose Mode Option -->
  <arg name="info" default="$(arg debug)" />
  <arg unless="$(arg info)" name="command_args" value="" />
  <arg     if="$(arg info)" name="command_args" value="--debug" />

  <!-- move_group settings -->
  <arg name="allow_trajectory_execution" default="true"/>
  <arg name="fake_execution" default="false"/>
  <arg name="max_safe_path_cost" default="1"/>
  <arg name="jiggle_fraction" default="0.05" />
  <arg name="publish_monitored_planning_scene" default="true"/>

  <!-- Planning Functionality -->
  <include ns="move_group" file="$(find moveo_moveit_config)/launch/planning_pipeline.launch.xml">
    <arg name="pipeline" value="ompl" />
  </include>

  <!-- Trajectory Execution Functionality -->
  <include ns="move_group" file="$(find moveo_moveit_config)/launch/trajectory_execution.launch.xml" if="$(arg allow_trajectory_execution)">
    <arg name="moveit_manage_controllers" value="true" />
    <arg name="moveit_controller_manager" value="moveo_urdf" unless="$(arg fake_execution)"/>
    <arg name="moveit_controller_manager" value="fake" if="$(arg fake_execution)"/>
  </include>

  <!-- Sensors Functionality -->
  <include ns="move_group" file="$(find moveo_moveit_config)/launch/sensor_manager.launch.xml" if="$(arg allow_trajectory_execution)">
    <arg name="moveit_sensor_manager" value="moveo_urdf" />
  </include>

  <!-- Start the actual move_group node/action server -->
  <node name="move_group" launch-prefix="$(arg launch_prefix)" pkg="moveit_ros_move_group" type="move_group" respawn="false" output="screen" args="$(arg command_args)">
    <!-- Set the display variable, in case OpenGL code is used internally -->
    <env name="DISPLAY" value="$(optenv DISPLAY :0)" />

    <param name="allow_trajectory_execution" value="$(arg allow_trajectory_execution)"/>
    <param name="max_safe_path_cost" value="$(arg max_safe_path_cost)"/>
    <param name="jiggle_fraction" value="$(arg jiggle_fraction)" />

    <!-- load these non-default MoveGroup capabilities -->
    <!--
    <param name="capabilities" value="
                  a_package/AwsomeMotionPlanningCapability
                  another_package/GraspPlanningPipeline
                  " />
    -->

    <!-- inhibit these default MoveGroup capabilities -->
    <!--
    <param name="disable_capabilities" value="
                  move_group/MoveGroupKinematicsService
                  move_group/ClearOctomapService
                  " />
    -->

    <!-- Publish the planning scene of the physical robot so that rviz plugin can know actual robot -->
    <param name="planning_scene_monitor/publish_planning_scene" value="$(arg publish_monitored_planning_scene)" />
    <param name="planning_scene_monitor/publish_geometry_updates" value="$(arg publish_monitored_planning_scene)" />
    <param name="planning_scene_monitor/publish_state_updates" value="$(arg publish_monitored_planning_scene)" />
    <param name="planning_scene_monitor/publish_transforms_updates" value="$(arg publish_monitored_planning_scene)" />
  </node>

</launch>


================================================
FILE: moveo_moveit_config/launch/moveit.rviz
================================================
Panels:
  - Class: rviz/Displays
    Help Height: 0
    Name: Displays
    Property Tree Widget:
      Expanded:
        - /MotionPlanning1/Scene Robot1
        - /MarkerArray1
        - /Axes1
      Splitter Ratio: 0.742560029
    Tree Height: 294
  - Class: rviz/Help
    Name: Help
  - Class: rviz/Views
    Expanded:
      - /Current View1
    Name: Views
    Splitter Ratio: 0.5
  - Class: rviz_visual_tools/RvizVisualToolsGui
    Name: RvizVisualToolsGui
Visualization Manager:
  Class: ""
  Displays:
    - Alpha: 0.5
      Cell Size: 0.200000003
      Class: rviz/Grid
      Color: 160; 160; 164
      Enabled: true
      Line Style:
        Line Width: 0.0299999993
        Value: Lines
      Name: Grid
      Normal Cell Count: 0
      Offset:
        X: 0
        Y: 0
        Z: 0
      Plane: XY
      Plane Cell Count: 10
      Reference Frame: <Fixed Frame>
      Value: true
    - Class: moveit_rviz_plugin/MotionPlanning
      Enabled: true
      Move Group Namespace: ""
      MoveIt_Goal_Tolerance: 0
      MoveIt_Planning_Attempts: 10
      MoveIt_Planning_Time: 5
      MoveIt_Use_Constraint_Aware_IK: true
      MoveIt_Warehouse_Host: 127.0.0.1
      MoveIt_Warehouse_Port: 33829
      MoveIt_Workspace:
        Center:
          X: 0
          Y: 0
          Z: 0
        Size:
          X: 2
          Y: 2
          Z: 2
      Name: MotionPlanning
      Planned Path:
        Color Enabled: false
        Interrupt Display: false
        Links:
          All Links Enabled: true
          Expand Joint Details: false
          Expand Link Details: false
          Expand Tree: false
          Gripper_Idol_Gear:
            Alpha: 1
            Show Axes: false
            Show Trail: false
            Value: true
          Gripper_Servo_Gear:
            Alpha: 1
            Show Axes: false
            Show Trail: false
            Value: true
          Link Tree Style: Links in Alphabetic Order
          Link_1:
            Alpha: 1
            Show Axes: false
            Show Trail: false
            Value: true
          Link_2:
            Alpha: 1
            Show Axes: false
            Show Trail: false
            Value: true
          Link_3:
            Alpha: 1
            Show Axes: false
            Show Trail: false
            Value: true
          Link_4:
            Alpha: 1
            Show Axes: false
            Show Trail: false
            Value: true
          Link_5:
            Alpha: 1
            Show Axes: false
            Show Trail: false
            Value: true
          Pivot_Arm_Gripper_Idol:
            Alpha: 1
            Show Axes: false
            Show Trail: false
            Value: true
          Pivot_Arm_Gripper_Servo:
            Alpha: 1
            Show Axes: false
            Show Trail: false
            Value: true
          Tip_Gripper_Idol:
            Alpha: 1
            Show Axes: false
            Show Trail: false
            Value: true
          Tip_Gripper_Servo:
            Alpha: 1
            Show Axes: false
            Show Trail: false
            Value: true
          base_link:
            Alpha: 1
            Show Axes: false
            Show Trail: false
            Value: true
          odom:
            Alpha: 1
            Show Axes: false
            Show Trail: false
        Loop Animation: true
        Robot Alpha: 0.5
        Robot Color: 150; 50; 150
        Show Robot Collision: false
        Show Robot Visual: true
        Show Trail: false
        State Display Time: 0.05 s
        Trail Step Size: 1
        Trajectory Topic: move_group/display_planned_path
      Planning Metrics:
        Payload: 1
        Show Joint Torques: false
        Show Manipulability: false
        Show Manipulability Index: false
        Show Weight Limit: false
        TextHeight: 0.0799999982
      Planning Request:
        Colliding Link Color: 255; 0; 0
        Goal State Alpha: 1
        Goal State Color: 250; 128; 0
        Interactive Marker Size: 0.200000003
        Joint Violation Color: 255; 0; 255
        Planning Group: arm
        Query Goal State: true
        Query Start State: false
        Show Workspace: false
        Start State Alpha: 1
        Start State Color: 0; 255; 0
      Planning Scene Topic: move_group/monitored_planning_scene
      Robot Description: robot_description
      Scene Geometry:
        Scene Alpha: 1
        Scene Color: 50; 230; 50
        Scene Display Time: 0.200000003
        Show Scene Geometry: true
        Voxel Coloring: Z-Axis
        Voxel Rendering: Occupied Voxels
      Scene Robot:
        Attached Body Color: 150; 50; 150
        Links:
          All Links Enabled: true
          Expand Joint Details: false
          Expand Link Details: false
          Expand Tree: false
          Gripper_Idol_Gear:
            Alpha: 1
            Show Axes: false
            Show Trail: false
            Value: true
          Gripper_Servo_Gear:
            Alpha: 1
            Show Axes: false
            Show Trail: false
            Value: true
          Link Tree Style: Links in Alphabetic Order
          Link_1:
            Alpha: 1
            Show Axes: false
            Show Trail: false
            Value: true
          Link_2:
            Alpha: 1
            Show Axes: false
            Show Trail: false
            Value: true
          Link_3:
            Alpha: 1
            Show Axes: false
            Show Trail: false
            Value: true
          Link_4:
            Alpha: 1
            Show Axes: false
            Show Trail: false
            Value: true
          Link_5:
            Alpha: 1
            Show Axes: false
            Show Trail: false
            Value: true
          Pivot_Arm_Gripper_Idol:
            Alpha: 1
            Show Axes: false
            Show Trail: false
            Value: true
          Pivot_Arm_Gripper_Servo:
            Alpha: 1
            Show Axes: false
            Show Trail: false
            Value: true
          Tip_Gripper_Idol:
            Alpha: 1
            Show Axes: false
            Show Trail: false
            Value: true
          Tip_Gripper_Servo:
            Alpha: 1
            Show Axes: false
            Show Trail: false
            Value: true
          base_link:
            Alpha: 1
            Show Axes: false
            Show Trail: false
            Value: true
          odom:
            Alpha: 1
            Show Axes: false
            Show Trail: false
        Robot Alpha: 0.5
        Show Robot Collision: false
        Show Robot Visual: true
      Value: true
    - Class: rviz/MarkerArray
      Enabled: true
      Marker Topic: rviz_visual_tools
      Name: MarkerArray
      Namespaces:
        {}
      Queue Size: 100
      Value: true
    - Class: rviz/Axes
      Enabled: true
      Length: 0.100000001
      Name: Axes
      Radius: 0.0199999996
      Reference Frame: <Fixed Frame>
      Value: true
  Enabled: true
  Global Options:
    Background Color: 48; 48; 48
    Fixed Frame: base_link
    Frame Rate: 30
  Name: root
  Tools:
    - Class: rviz/Interact
      Hide Inactive Objects: true
    - Class: rviz/MoveCamera
    - Class: rviz/Select
  Value: true
  Views:
    Current:
      Class: rviz/XYOrbit
      Distance: 1.85478854
      Enable Stereo Rendering:
        Stereo Eye Separation: 0.0599999987
        Stereo Focal Distance: 1
        Swap Stereo Eyes: false
        Value: false
      Focal Point:
        X: 0.138867736
        Y: -0.210714757
        Z: 2.23518001e-07
      Focal Shape Fixed Size: true
      Focal Shape Size: 0.0500000007
      Invert Z Axis: false
      Name: Current View
      Near Clip Distance: 0.00999999978
      Pitch: 0.205203816
      Target Frame: base_link
      Value: XYOrbit (rviz)
      Yaw: 2.23356652
    Saved: ~
Window Geometry:
  Displays:
    collapsed: false
  Height: 1148
  Help:
    collapsed: false
  Hide Left Dock: false
  Hide Right Dock: true
  MotionPlanning:
    collapsed: false
  MotionPlanning - Slider:
    collapsed: false
  QMainWindow State: 000000ff00000000fd0000000100000000000002b800000441fc0200000009fb000000100044006900730070006c006100790073010000002200000165000000db00fffffffb00000024005200760069007a00560069007300750061006c0054006f006f006c0073004700750069010000018b0000005a0000003d00fffffffb0000002e004d006f00740069006f006e0050006c0061006e006e0069006e00670020002d00200053006c0069006400650072000000016f000000590000004400fffffffb0000001c004d006f00740069006f006e0050006c0061006e006e0069006e006701000001e90000027a0000018700fffffffb0000000800480065006c00700000000342000000bb0000007000fffffffb0000000a005600690065007700730000000365000000b0000000ac00fffffffb0000000c00430061006d00650072006100000002ff000001610000000000000000fb0000001e004d006f00740069006f006e00200050006c0061006e006e0069006e00670100000374000001890000000000000000fb000000120020002d00200053006c00690064006500720000000000ffffffff0000000000000000000003450000044100000001000000020000000100000002fc0000000100000002000000010000000a0054006f006f006c00730100000000ffffffff0000000000000000
  RvizVisualToolsGui:
    collapsed: false
  Views:
    collapsed: false
  Width: 1537
  X: 388
  Y: 24


================================================
FILE: moveo_moveit_config/launch/moveit_rviz.launch
================================================
<launch>

  <arg name="debug" default="false" />
  <arg unless="$(arg debug)" name="launch_prefix" value="" />
  <arg     if="$(arg debug)" name="launch_prefix" value="gdb --ex run --args" />

  <arg name="config" default="false" />
  <arg unless="$(arg config)" name="command_args" value="" />
  <arg     if="$(arg config)" name="command_args" value="-d $(find moveo_moveit_config)/launch/moveit.rviz" />
  
  <node name="$(anon rviz)" launch-prefix="$(arg launch_prefix)" pkg="rviz" type="rviz" respawn="false"
	args="$(arg command_args)" output="screen">
    <rosparam command="load" file="$(find moveo_moveit_config)/config/kinematics.yaml"/>
  </node>

</launch>


================================================
FILE: moveo_moveit_config/launch/moveo_urdf_moveit_controller_manager.launch.xml
================================================
<launch>

</launch>


================================================
FILE: moveo_moveit_config/launch/moveo_urdf_moveit_sensor_manager.launch.xml
================================================
<launch>

</launch>


================================================
FILE: moveo_moveit_config/launch/ompl_planning_pipeline.launch.xml
================================================
<launch>

  <!-- OMPL Plugin for MoveIt! -->
  <arg name="planning_plugin" value="ompl_interface/OMPLPlanner" />

  <!-- The request adapters (plugins) used when planning with OMPL. 
       ORDER MATTERS -->
  <arg name="planning_adapters" value="default_planner_request_adapters/AddTimeParameterization
				       default_planner_request_adapters/FixWorkspaceBounds
				       default_planner_request_adapters/FixStartStateBounds
				       default_planner_request_adapters/FixStartStateCollision
				       default_planner_request_adapters/FixStartStatePathConstraints" />

  <arg name="start_state_max_bounds_error" value="0.1" />

  <param name="planning_plugin" value="$(arg planning_plugin)" />
  <param name="request_adapters" value="$(arg planning_adapters)" />
  <param name="start_state_max_bounds_error" value="$(arg start_state_max_bounds_error)" />

  <rosparam command="load" file="$(find moveo_moveit_config)/config/ompl_planning.yaml"/>

</launch>


================================================
FILE: moveo_moveit_config/launch/planning_context.launch
================================================
<launch>
  <!-- By default we do not overwrite the URDF. Change the following to true to change the default behavior -->
  <arg name="load_robot_description" default="false"/>

  <!-- The name of the parameter under which the URDF is loaded -->
  <arg name="robot_description" default="robot_description"/>

  <!-- Load universal robot description format (URDF) -->
  <param if="$(arg load_robot_description)" name="$(arg robot_description)" textfile="$(find moveo_urdf)/urdf/moveo_urdf.urdf"/>

  <!-- The semantic description that corresponds to the URDF -->
  <param name="$(arg robot_description)_semantic" textfile="$(find moveo_moveit_config)/config/moveo_urdf.srdf" />
  
  <!-- Load updated joint limits (override information from URDF) -->
  <group ns="$(arg robot_description)_planning">
    <rosparam command="load" file="$(find moveo_moveit_config)/config/joint_limits.yaml"/>
  </group>

  <!-- Load default settings for kinematics; these settings are overridden by settings in a node's namespace -->
  <group ns="$(arg robot_description)_kinematics">
    <rosparam command="load" file="$(find moveo_moveit_config)/config/kinematics.yaml"/>
  </group>
  
</launch>


================================================
FILE: moveo_moveit_config/launch/planning_pipeline.launch.xml
================================================
<launch>

  <!-- This file makes it easy to include different planning pipelines; 
       It is assumed that all planning pipelines are named XXX_planning_pipeline.launch  -->  

  <arg name="pipeline" default="ompl" />

  <include file="$(find moveo_moveit_config)/launch/$(arg pipeline)_planning_pipeline.launch.xml" />

</launch>


================================================
FILE: moveo_moveit_config/launch/run_benchmark_ompl.launch
================================================
<launch>

  <!-- This argument must specify the list of .cfg files to process for benchmarking -->
  <arg name="cfg" />

  <!-- Load URDF -->
  <include file="$(find moveo_moveit_config)/launch/planning_context.launch">
    <arg name="load_robot_description" value="true"/>
  </include>

  <!-- Start the database -->
  <include file="$(find moveo_moveit_config)/launch/warehouse.launch">
    <arg name="moveit_warehouse_database_path" value="moveit_ompl_benchmark_warehouse"/>
  </include>  

  <!-- Start Benchmark Executable -->
  <node name="$(anon moveit_benchmark)" pkg="moveit_ros_benchmarks" type="moveit_run_benchmark" args="$(arg cfg) --benchmark-planners" respawn="false" output="screen">
    <rosparam command="load" file="$(find moveo_moveit_config)/config/kinematics.yaml"/>
    <rosparam command="load" file="$(find moveo_moveit_config)/config/ompl_planning.yaml"/>
  </node>

</launch>


================================================
FILE: moveo_moveit_config/launch/sensor_manager.launch.xml
================================================
<launch>

  <!-- This file makes it easy to include the settings for sensor managers -->  

  <!-- Params for the octomap monitor -->
  <!--  <param name="octomap_frame" type="string" value="some frame in which the robot moves" /> -->
  <param name="octomap_resolution" type="double" value="0.025" />
  <param name="max_range" type="double" value="5.0" />

  <!-- Load the robot specific sensor manager; this sets the moveit_sensor_manager ROS parameter -->
  <arg name="moveit_sensor_manager" default="moveo_urdf" />
  <include file="$(find moveo_moveit_config)/launch/$(arg moveit_sensor_manager)_moveit_sensor_manager.launch.xml" />
  
</launch>


================================================
FILE: moveo_moveit_config/launch/setup_assistant.launch
================================================
<!-- Re-launch the MoveIt Setup Assistant with this configuration package already loaded -->
<launch>

  <!-- Debug Info -->
  <arg name="debug" default="false" />
  <arg unless="$(arg debug)" name="launch_prefix" value="" />
  <arg     if="$(arg debug)" name="launch_prefix" value="gdb --ex run --args" />

  <!-- Run -->
  <node pkg="moveit_setup_assistant" type="moveit_setup_assistant" name="moveit_setup_assistant" 
	args="--config_pkg=moveo_moveit_config"
	launch-prefix="$(arg launch_prefix)"
        output="screen" />

</launch>


================================================
FILE: moveo_moveit_config/launch/trajectory_execution.launch.xml
================================================
<launch>

  <!-- This file makes it easy to include the settings for trajectory execution  -->  

  <!-- Flag indicating whether MoveIt! is allowed to load/unload  or switch controllers -->
  <arg name="moveit_manage_controllers" default="true"/>
  <param name="moveit_manage_controllers" value="$(arg moveit_manage_controllers)"/>

  <!-- When determining the expected duration of a trajectory, this multiplicative factor is applied to get the allowed duration of execution -->
  <param name="trajectory_execution/allowed_execution_duration_scaling" value="1.2"/> <!-- default 1.2 -->
  <!-- Allow more than the expected execution time before triggering a trajectory cancel (applied after scaling) -->
  <param name="trajectory_execution/allowed_goal_duration_margin" value="0.5"/> <!-- default 0.5 -->
  <!-- Allowed joint-value tolerance for validation that trajectory's first point matches current robot state -->
  <param name="trajectory_execution/allowed_start_tolerance" value="0.01"/> <!-- default 0.01 -->
  
  <!-- Load the robot specific controller manager; this sets the moveit_controller_manager ROS parameter -->
  <arg name="moveit_controller_manager" default="moveo_urdf" />
  <include file="$(find moveo_moveit_config)/launch/$(arg moveit_controller_manager)_moveit_controller_manager.launch.xml" />
  
</launch>


================================================
FILE: moveo_moveit_config/launch/warehouse.launch
================================================
<launch>
  
  <!-- The path to the database must be specified -->
  <arg name="moveit_warehouse_database_path" />

  <!-- Load warehouse parameters -->  
  <include file="$(find moveo_moveit_config)/launch/warehouse_settings.launch.xml" />

  <!-- Run the DB server -->
  <node name="$(anon mongo_wrapper_ros)" cwd="ROS_HOME" type="mongo_wrapper_ros.py" pkg="warehouse_ros_mongo">
    <param name="overwrite" value="false"/>
    <param name="database_path" value="$(arg moveit_warehouse_database_path)" />
  </node>

</launch>


================================================
FILE: moveo_moveit_config/launch/warehouse_settings.launch.xml
================================================
<launch>
  <!-- Set the parameters for the warehouse and run the mongodb server. -->

  <!-- The default DB port for moveit (not default MongoDB port to avoid potential conflicts) -->  
  <arg name="moveit_warehouse_port" default="33829" /> 

  <!-- The default DB host for moveit -->
  <arg name="moveit_warehouse_host" default="localhost" /> 
  
  <!-- Set parameters for the warehouse -->
  <param name="warehouse_port" value="$(arg moveit_warehouse_port)"/>
  <param name="warehouse_host" value="$(arg moveit_warehouse_host)"/>
  <param name="warehouse_exec" value="mongod" />
  <param name="warehouse_plugin" value="warehouse_ros_mongo::MongoDatabaseConnection" />

</launch>


================================================
FILE: moveo_moveit_config/package.xml
================================================
<package>

  <name>moveo_moveit_config</name>
  <version>0.3.0</version>
  <description>
     An automatically generated package with all the configuration and launch files for using the moveo_urdf with the MoveIt! Motion Planning Framework
  </description>
  <author email="jesse.weisberg@gmail.com">Jesse Weisberg</author>
  <maintainer email="jesse.weisberg@gmail.com">Jesse Weisberg</maintainer>

  <license>BSD</license>

  <url type="website">http://moveit.ros.org/</url>
  <url type="bugtracker">https://github.com/ros-planning/moveit/issues</url>
  <url type="repository">https://github.com/ros-planning/moveit</url>

  <buildtool_depend>catkin</buildtool_depend>

  <run_depend>moveit_ros_move_group</run_depend>
  <run_depend>moveit_kinematics</run_depend>
  <run_depend>moveit_planners_ompl</run_depend>
  <run_depend>moveit_ros_visualization</run_depend>
  <run_depend>joint_state_publisher</run_depend>
  <run_depend>robot_state_publisher</run_depend>
  <run_depend>xacro</run_depend>
  <!-- This package is referenced in the warehouse launch files, but does not build out of the box at the moment. Commented the dependency until this works. -->
  <!-- <run_depend>warehouse_ros_mongo</run_depend> -->
  <build_depend>moveo_urdf</build_depend>
  <run_depend>moveo_urdf</run_depend>


</package>


================================================
FILE: moveo_urdf/CMakeLists.txt
================================================
cmake_minimum_required(VERSION 2.8.3)

project(moveo_urdf)

find_package(catkin REQUIRED)

catkin_package()

find_package(roslaunch)

foreach(dir config launch meshes urdf)
	install(DIRECTORY ${dir}/
		DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION}/${dir})
endforeach(dir)


================================================
FILE: moveo_urdf/config/joint_names_move_urdf.yaml
================================================
controller_joint_names: ['Joint_1', 'Joint_2', 'Joint_3', 'Joint_4', 'Joint_5', 'Gripper_Servo_Gear_Joint', 'Gripper_Idol_Gear_Joint', 'Pivot_Arm_Gripper_Servo_Joint', 'Pivot_Arm_Gripper_Idol_Joint', ]


================================================
FILE: moveo_urdf/launch/display.launch
================================================
<launch>
  <arg
    name="model" />
  <arg
    name="gui"
    default="true" />
  <param
    name="robot_description"
    textfile="$(find moveo_urdf)/urdf/moveo_urdf.urdf" />
  <param
    name="use_gui"
    value="$(arg gui)" />
  <node
    name="joint_state_publisher"
    pkg="joint_state_publisher"
    type="joint_state_publisher" />
  <node
    name="robot_state_publisher"
    pkg="robot_state_publisher"
    type="state_publisher" />
  <node
    name="rviz"
    pkg="rviz"
    type="rviz"
    args="-d $(find moveo_urdf)/urdf.rviz" />
</launch>


================================================
FILE: moveo_urdf/launch/gazebo.launch
================================================
<launch>

  <!-- these are the arguments you can pass this launch file, for example paused:=true -->
  <arg name="paused" default="true"/>
  <arg name="use_sim_time" default="true"/>
  <arg name="gui" default="true"/>
  <arg name="headless" default="false"/>
  <arg name="debug" default="false"/>
  <arg name="model" default="$(find moveo_urdf)/urdf/moveo_urdf.urdf"/>

  <!-- We resume the logic in empty_world.launch, changing only the name of the world to be launched -->
  <include file="$(find gazebo_ros)/launch/empty_world.launch">
    <arg name="debug" value="$(arg debug)" />
    <arg name="gui" value="$(arg gui)" />
    <arg name="paused" value="$(arg paused)"/>
    <arg name="use_sim_time" value="$(arg use_sim_time)"/>
    <arg name="headless" value="$(arg headless)"/>
  </include>

  <param name="robot_description" command="$(find xacro)/xacro.py $(arg model)" />

  <!-- push robot_description to factory and spawn robot in gazebo -->
  <node name="urdf_spawner" pkg="gazebo_ros" type="spawn_model"
        args="-z 1.0 -unpause -urdf -model robot -param robot_description" respawn="false" output="screen" />

  <node pkg="robot_state_publisher" type="robot_state_publisher"  name="robot_state_publisher">
    <param name="publish_frequency" type="double" value="30.0" />
  </node>

</launch>


================================================
FILE: moveo_urdf/launch/gazebo_old.launch
================================================
<launch>
  <include
    file="$(find gazebo_ros)/launch/empty_world.launch" />
  <node
    name="tf_footprint_base"
    pkg="tf"
    type="static_transform_publisher"
    args="0 0 0 0 0 0 base_link base_footprint 40" />
  <node
    name="spawn_model"
    pkg="gazebo_ros"
    type="spawn_model"
    args="-file $(find moveo_urdf_5)/urdf/moveo_urdf_5.urdf -urdf -model moveo_urdf_5"
    output="screen" />
  <node
    name="fake_joint_calibration"
    pkg="rostopic"
    type="rostopic"
    args="pub /calibrated std_msgs/Bool true" />
</launch>


================================================
FILE: moveo_urdf/launch/gazebo_sdf.launch
================================================
<launch>
  <include
    file="$(find gazebo_ros)/launch/empty_world.launch" />
  <node
    name="tf_footprint_base"
    pkg="tf"
    type="static_transform_publisher"
    args="0 0 0 0 0 0 base_link base_footprint 40" />
  <node
    name="spawn_model"
    pkg="gazebo_ros"
    type="spawn_model"
    args="-file $(find model_editor_models)/robot5/model.sdf -sdf -model robot5"
    output="screen" />
  <node
    name="fake_joint_calibration"
    pkg="rostopic"
    type="rostopic"
    args="pub /calibrated std_msgs/Bool true" />
</launch>


================================================
FILE: moveo_urdf/meshes/Link_1.STL
================================================
[File too large to display: 10.1 MB]

================================================
FILE: moveo_urdf/meshes/base_link.STL
================================================
[File too large to display: 11.9 MB]

================================================
FILE: moveo_urdf/package.xml
================================================
<package>
  <name>moveo_urdf</name>
  <version>1.0.0</version>
  <description>
    <p>URDF Description package for moveo_urdf</p>
    <p>This package contains configuration data, 3D models and launch files
for moveo_urdf robot</p>
  </description>
  <author>Jesse Weisberg</author>
  <maintainer email="jesse.weisberg@gmail.com" />
  <license>BSD</license>
  <buildtool_depend>catkin</buildtool_depend>
  <build_depend>roslaunch</build_depend>
  <run_depend>robot_state_publisher</run_depend>
  <run_depend>rviz</run_depend>
  <run_depend>joint_state_publisher</run_depend>
  <run_depend>gazebo</run_depend>
  <export>
    <architecture_independent />
  </export>
</package>


================================================
FILE: moveo_urdf/urdf/moveo_urdf.urdf
================================================
<?xml version='1.0'?>
<robot
  name="moveo_urdf">	

  <link
    name="base_link">
    <visual>
      <origin
        xyz="0 0 .13"
        rpy="0 0 0" />
      <geometry>
        <mesh
          filename="package://moveo_urdf/meshes/base_link.STL" />
      </geometry>
      <material
        name="">
        <color
          rgba="0.75294 0.75294 0.75294 1" />
      </material>
    </visual>
    <collision>
      <origin
        xyz="0 0 .13"
        rpy="0 0 0" />
      <geometry>
        <mesh
          filename="package://moveo_urdf/meshes/base_link_col.STL" />
      </geometry>
    </collision>
  </link>

  <!-- dummy link -->
  <link
    name="odom">
    <inertial>
      <origin
        xyz="0.034155 -0.20591 -0.049641"
        rpy="0 0 0" />
      <mass
        value="8.0643" />
      <inertia
        ixx="0.11291"
        ixy="0.00066071"
        ixz="-0.00030472"
        iyy="0.089946"
        iyz="0.011461"
        izz="0.19187" />
    </inertial>
   </link>

  <joint
    name="odom_joint"
    type="fixed">
    <origin
      xyz="0 0 0"
      rpy="0 0 0" />
    <parent
      link="base_link" />
    <child
      link="odom" />
  </joint>





  <link
    name="Link_1">
    <inertial>
      <origin
        xyz="4.0186E-05 0.090634 0.00010221"
        rpy="0 0 0" />
      <mass
        value="4.2526" />
      <inertia
        ixx="0.0034071"
        ixy="8.9041E-06"
        ixz="1.179E-07"
        iyy="0.018642"
        iyz="-3.6544E-06"
        izz="0.021987" />
    </inertial>
    <visual>
      <origin
        xyz="0 0 0"
        rpy="0 0 0" />
      <geometry>
        <mesh
          filename="package://moveo_urdf/meshes/Link_1.STL" />
      </geometry>
      <material
        name="">
        <color
          rgba="0.75294 0.75294 0.75294 1" />
      </material>
    </visual>
    <collision>
      <origin
        xyz="0 0 0"
        rpy="0 0 0" />
      <geometry>
        <mesh
          filename="package://moveo_urdf/meshes/Link_1_col.STL" />
      </geometry>
    </collision>
  </link>
  <joint
    name="Joint_1"
    type="revolute">
    <origin
      xyz="0 -0.2425 .13"
      rpy="1.5708 9.5417E-17 2.7766" />
    <parent
      link="base_link" />
    <child
      link="Link_1" />
    <axis
      xyz="0 1 0" />
    <limit
      lower="-1.5707"
      upper="1.5707"
      effort="5"
      velocity="1" />
  </joint>
  <link
    name="Link_2">
    <inertial>
      <origin
        xyz="2.2815E-05 0.0017616 -0.080002"
        rpy="0 0 0" />
      <mass
        value="1.9323" />
      <inertia
        ixx="0.012138"
        ixy="-3.5576E-06"
        ixz="-9.0632E-06"
        iyy="0.011445"
        iyz="0.00014273"
        izz="0.00070271" />
    </inertial>
    <visual>
      <origin
        xyz="0 0 0"
        rpy="0 0 0" />
      <geometry>
        <mesh
          filename="package://moveo_urdf/meshes/Link_2.STL" />
      </geometry>
      <material
        name="">
        <color
          rgba="0.75294 0.75294 0.75294 1" />
      </material>
    </visual>
    <collision>
      <origin
        xyz="0 0 0"
        rpy="0 0 0" />
      <geometry>
        <mesh
          filename="package://moveo_urdf/meshes/Link_2_col.STL" />
      </geometry>
    </collision>
  </link>
  <joint
    name="Joint_2"
    type="revolute">
    <origin
      xyz="0 0.18264 0"
      rpy="-3.1416 0.95134 1.5708" />
    <parent
      link="Link_1" />
    <child
      link="Link_2" />
    <axis
      xyz="0 -1 0" />
    <limit
      lower="-1.5707"
      upper="1.5707"
      effort="5"
      velocity="1" />
  </joint>
  <link
    name="Link_3">
    <inertial>
      <origin
        xyz="0.05095 -0.00039084 -0.00045591"
        rpy="0 0 0" />
      <mass
        value="1.1381" />
      <inertia
        ixx="0.00023572"
        ixy="-3.8425E-06"
        ixz="4.1286E-06"
        iyy="0.00029123"
        iyz="-1.0387E-07"
        izz="0.00052457" />
    </inertial>
    <visual>
      <origin
        xyz="0 0 0"
        rpy="0 0 0" />
      <geometry>
        <mesh
          filename="package://moveo_urdf/meshes/Link_3.STL" />
      </geometry>
      <material
        name="">
        <color
          rgba="0.75294 0.75294 0.75294 1" />
      </material>
    </visual>
    <collision>
      <origin
        xyz="0 0 0"
        rpy="0 0 0" />
      <geometry>
        <mesh
          filename="package://moveo_urdf/meshes/Link_3_col.STL" />
      </geometry>
    </collision>
  </link>
  <joint
    name="Joint_3"
    type="revolute">
    <origin
      xyz="0 0 -0.22112"
      rpy="-3.1416 0.98126 -3.1416" />
    <parent
      link="Link_2" />
    <child
      link="Link_3" />
    <axis
      xyz="0 -1 0" />
    <limit
      lower="-1.5707"
      upper="1.5707"
      effort="5"
      velocity="1" />
  </joint>
  <link
    name="Link_4">
    <inertial>
      <origin
        xyz="-0.0027417 0.0025097 0.012864"
        rpy="0 0 0" />
      <mass
        value="0.62964" />
      <inertia
        ixx="0.0001717"
        ixy="1.3446E-06"
        ixz="8.5164E-06"
        iyy="0.00010506"
        iyz="5.1412E-05"
        izz="7.8944E-05" />
    </inertial>
    <visual>
      <origin
        xyz="0 0 0"
        rpy="0 0 0" />
      <geometry>
        <mesh
          filename="package://moveo_urdf/meshes/Link_4.STL" />
      </geometry>
      <material
        name="">
        <color
          rgba="0.29412 0.29412 0.29412 1" />
      </material>
    </visual>
    <collision>
      <origin
        xyz="0 0 0"
        rpy="0 0 0" />
      <geometry>
        <mesh
          filename="package://moveo_urdf/meshes/Link_4_col.STL" />
      </geometry>
    </collision>
  </link>
  <joint
    name="Joint_4"
    type="revolute">
    <origin
      xyz="0.16988 -0.00099213 0"
      rpy="3.1416 -1.2279 1.5708" />
    <parent
      link="Link_3" />
    <child
      link="Link_4" />
    <axis
      xyz="0.010353 -0.99993 -0.0059382" />
    <limit
      lower="-1.5707"
      upper="1.5707"
      effort="5"
      velocity="1" />
  </joint>
  <link
    name="Link_5">
    <inertial>
      <origin
        xyz="-0.011366 0.00012239 0.0078967"
        rpy="0 0 0" />
      <mass
        value="0.19875" />
      <inertia
        ixx="6.2676E-05"
        ixy="4.2551E-06"
        ixz="4.0215E-05"
        iyy="9.7792E-05"
        iyz="8.5888E-07"
        izz="9.5807E-05" />
    </inertial>
    <visual>
      <origin
        xyz="0 0 0"
        rpy="0 0 0" />
      <geometry>
        <mesh
          filename="package://moveo_urdf/meshes/Link_5.STL" />
      </geometry>
      <material
        name="">
        <color
          rgba="0.79216 0.81961 0.93333 1" />
      </material>
    </visual>
    <collision>
      <origin
        xyz="0 0 0"
        rpy="0 0 0" />
      <geometry>
        <mesh
          filename="package://moveo_urdf/meshes/Link_5_col.STL" />
      </geometry>
    </collision>
  </link>
  <joint
    name="Joint_5"
    type="revolute">
    <origin
      xyz="-0.0021346 0.053041 0.0016936"
      rpy="-1.5639 -0.091135 -0.00062919" />
    <parent
      link="Link_4" />
    <child
      link="Link_5" />
    <axis
      xyz="0 1 0" />
    <limit
      lower="-1.5707"
      upper="1.5707"
      effort="5"
      velocity="1" />
  </joint>
  <link
    name="Gripper_Servo_Gear">
    <inertial>
      <origin
        xyz="-0.0063957 -0.0033021 -0.00082714"
        rpy="0 0 0" />
      <mass
        value="0.0048997" />
      <inertia
        ixx="2.0097E-09"
        ixy="-3.0673E-09"
        ixz="-6.3297E-10"
        iyy="4.9761E-09"
        iyz="-3.9668E-10"
        izz="6.8221E-09" />
    </inertial>
    <visual>
      <origin
        xyz="0 0 0"
        rpy="0 0 0" />
      <geometry>
        <mesh
          filename="package://moveo_urdf/meshes/Gripper_Servo_Gear.STL" />
      </geometry>
      <material
        name="">
        <color
          rgba="0.9098 0.44314 0.031373 1" />
      </material>
    </visual>
    <collision>
      <origin
        xyz="0 0 0"
        rpy="0 0 0" />
      <geometry>
        <mesh
          filename="package://moveo_urdf/meshes/Gripper_Servo_Gear_col.STL" />
      </geometry>
    </collision>
  </link>
  <joint
    name="Gripper_Servo_Gear_Joint"
    type="revolute">
    <origin
      xyz="-0.05013 0.01413 0.041516"
      rpy="0.9321 0.032705 -1.5268" />
    <parent
      link="Link_5" />
    <child
      link="Gripper_Servo_Gear" />
    <axis
      xyz="0 1 0" />
    <limit
      lower="0"
      upper="1.5707"
      effort="5"
      velocity="1" />
  </joint>
  <link
    name="Tip_Gripper_Servo">
    <inertial>
      <origin
        xyz="0.028047 -0.000564 0.0049632"
        rpy="0 0 0" />
      <mass
        value="0.010676" />
      <inertia
        ixx="8.9609E-09"
        ixy="6.3808E-09"
        ixz="-2.6788E-08"
        iyy="1.7457E-07"
        iyz="7.7239E-10"
        izz="1.7361E-07" />
    </inertial>
    <visual>
      <origin
        xyz="0 0 0"
        rpy="0 0 0" />
      <geometry>
        <mesh
          filename="package://moveo_urdf/meshes/Tip_Gripper_Servo.STL" />
      </geometry>
      <material
        name="">
        <color
          rgba="0.6 1 0.27843 1" />
      </material>
    </visual>
    <collision>
      <origin
        xyz="0 0 0"
        rpy="0 0 0" />
      <geometry>
        <mesh
          filename="package://moveo_urdf/meshes/Tip_Gripper_Servo_col.STL" />
      </geometry>
    </collision>
  </link>
  <joint
    name="Tip_Gripper_Servo_Joint"
    type="continuous">
    <origin
      xyz="-0.039906 -0.004 -0.0027473"
      rpy="-3.1416 -1.5569 -2.0392E-12" />
    <parent
      link="Gripper_Servo_Gear" />
    <child
      link="Tip_Gripper_Servo" />
    <axis
      xyz="0 1 0" />
    <limit
      lower="0"
      upper="1.3"
      effort="5"
      velocity="1" />
    <mimic joint="Gripper_Servo_Gear_Joint" multiplier="1" offset="0"/> 
  </joint>
  <link
    name="Gripper_Idol_Gear">
    <inertial>
      <origin
        xyz="-0.0061853 0.00086502 -3.5867E-05"
        rpy="0 0 0" />
      <mass
        value="0.0051273" />
      <inertia
        ixx="2.3312E-09"
        ixy="1.1098E-09"
        ixz="-5.1524E-11"
        iyy="8.8855E-09"
        iyz="6.4357E-12"
        izz="1.1216E-08" />
    </inertial>
    <visual>
      <origin
        xyz="0 0 0"
        rpy="0 0 0" />
      <geometry>
        <mesh
          filename="package://moveo_urdf/meshes/Gripper_Idol_Gear.STL" />
      </geometry>
      <material
        name="">
        <color
          rgba="0.058824 0.64706 1 1" />
      </material>
    </visual>
    <collision>
      <origin
        xyz="0 0 0"
        rpy="0 0 0" />
      <geometry>
        <mesh
          filename="package://moveo_urdf/meshes/Gripper_Idol_Gear_col.STL" />
      </geometry>
    </collision>
  </link>
  <joint
    name="Gripper_Idol_Gear_Joint"
    type="revolute">
    <origin
      xyz="-0.052696 -0.01387 0.038065"
      rpy="2.2091 -0.040996 1.626" />
    <parent
      link="Link_5" />
    <child
      link="Gripper_Idol_Gear" />
    <axis
      xyz="0 1 0" />
    <limit
      lower="-1.5707"
      upper="0"
      effort="5"
      velocity="1" />
  </joint>
  <link
    name="Tip_Gripper_Idol">
    <inertial>
      <origin
        xyz="0.0059646 0.00098916 0.027851"
        rpy="0 0 0" />
      <mass
        value="0.010676" />
      <inertia
        ixx="1.6895E-07"
        ixy="-1.6878E-09"
        ixz="-3.8254E-08"
        iyy="1.7457E-07"
        iyz="-6.203E-09"
        izz="1.3622E-08" />
    </inertial>
    <visual>
      <origin
        xyz="0 0 0"
        rpy="0 0 0" />
      <geometry>
        <mesh
          filename="package://moveo_urdf/meshes/Tip_Gripper_Idol.STL" />
      </geometry>
      <material
        name="">
        <color
          rgba="0.79216 0.81961 0.93333 1" />
      </material>
    </visual>
    <collision>
      <origin
        xyz="0 0 0"
        rpy="0 0 0" />
      <geometry>
        <mesh
          filename="package://moveo_urdf/meshes/Tip_Gripper_Idol_col.STL" />
      </geometry>
    </collision>
  </link>
  <joint
    name="Tip_Gripper_Idol_Joint"
    type="revolute">
    <origin
      xyz="-0.039906 -0.000125 -0.0027473"
      rpy="3.1416 0.5236 3.1416" />
    <parent
      link="Gripper_Idol_Gear" />
    <child
      link="Tip_Gripper_Idol" />
    <axis
      xyz="0 1 0" />
    <limit
      lower="-1.5"
      upper="0"
      effort="5"
      velocity="1" />
    <mimic joint="Gripper_Idol_Gear_Joint" multiplier="-1" offset="0"/>
  </joint>
  <link
    name="Pivot_Arm_Gripper_Servo">
    <inertial>
      <origin
        xyz="-0.017611 0.00080352 -1.2115E-11"
        rpy="0 0 0" />
      <mass
        value="0.0018597" />
      <inertia
        ixx="2.3199E-09"
        ixy="3.1597E-09"
        ixz="3.9677E-16"
        iyy="7.8256E-08"
        iyz="-2.0327E-16"
        izz="8.0576E-08" />
    </inertial>
    <visual>
      <origin
        xyz="0 0 0"
        rpy="0 0 0" />
      <geometry>
        <mesh
          filename="package://moveo_urdf/meshes/Pivot_Arm_Gripper_Servo.STL" />
      </geometry>
      <material
        name="">
        <color
          rgba="0.79216 0.81961 0.93333 1" />
      </material>
    </visual>
    <collision>
      <origin
        xyz="0 0 0"
        rpy="0 0 0" />
      <geometry>
        <mesh
          filename="package://moveo_urdf/meshes/Pivot_Arm_Gripper_Servo_col.STL" />
      </geometry>
    </collision>
  </link>
  <joint
    name="Pivot_Arm_Gripper_Servo_Joint"
    type="revolute">
    <origin
      xyz="-0.068745 0.00713 0.05"
      rpy="0.93142 -0.0083007 -1.582" />
    <parent
      link="Link_5" />
    <child
      link="Pivot_Arm_Gripper_Servo" />
    <axis
      xyz="0 1 0" />
    <limit
      lower="-1.5707"
      upper="1.5707"
      effort="5"
      velocity="1" />
    <mimic joint="Gripper_Servo_Gear_Joint" multiplier="1" offset="0"/> 
  </joint>
  <link
    name="Pivot_Arm_Gripper_Idol">
    <inertial>
      <origin
        xyz="0.017611 0.00067852 -1.3978E-11"
        rpy="0 0 0" />
      <mass
        value="0.0018597" />
      <inertia
        ixx="2.3199E-09"
        ixy="-3.1597E-09"
        ixz="-4.5779E-16"
        iyy="7.8256E-08"
        iyz="-2.26E-16"
        izz="8.0576E-08" />
    </inertial>
    <visual>
      <origin
        xyz="0 0 0"
        rpy="0 0 0" />
      <geometry>
        <mesh
          filename="package://moveo_urdf/meshes/Pivot_Arm_Gripper_Idol.STL" />
      </geometry>
      <material
        name="">
        <color
          rgba="0.79216 0.81961 0.93333 1" />
      </material>
    </visual>
    <collision>
      <origin
        xyz="0 0 0"
        rpy="0 0 0" />
      <geometry>
        <mesh
          filename="package://moveo_urdf/meshes/Pivot_Arm_Gripper_Idol_col.STL" />
      </geometry>
    </collision>
  </link>
  <joint
    name="Pivot_Arm_Gripper_Idol_Joint"
    type="revolute">
    <origin
      xyz="-0.06867 -0.00687 0.0501"
      rpy="0.93138 1.7645E-12 -1.5708" />
    <parent
      link="Link_5" />
    <child
      link="Pivot_Arm_Gripper_Idol" />
    <axis
      xyz="0 1 0" />
    <limit
      lower="-1.5707"
      upper="1.5707"
      effort="5"
      velocity="1" />
    <mimic joint="Gripper_Idol_Gear_Joint" multiplier="1" offset="0"/> 
  </joint>
  <!--
  <gazebo>
    <plugin name="MimicJointPlugin" filename="libroboticsgroup_gazebo_mimic_joint_plugin.so">
      joint="Gripper_Idol_Gear_Joint"
      mimicJoint="Pivot_Arm_Gripper_Idol_Joint"
      multiplier="1"
      offset="0"
    </plugin>
    <plugin name="MimicJointPlugin" filename="libroboticsgroup_gazebo_mimic_joint_plugin.so">
      joint="Gripper_Servo_Gear_Joint"
      mimicJoint="Pivot_Arm_Gripper_Servo_Joint"
      multiplier="1"
      offset="0"
    </plugin>
    <plugin name="MimicJointPlugin" filename="libroboticsgroup_gazebo_mimic_joint_plugin.so">
      joint="Gripper_Servo_Gear_Joint"
      mimicJoint="Tip_Gripper_Servo_Joint
      multiplier="1"
      offset="0"
    </plugin>
    <plugin name="MimicJointPlugin" filename="libroboticsgroup_gazebo_mimic_joint_plugin.so">
      joint="Gripper_Idol_Gear_Joint"
      mimicJoint="Tip_Gripper_Idol_Joint
      multiplier="-1"
      offset="0"
    </plugin>
  </gazebo>
  -->
</robot>


  



================================================
FILE: moveo_urdf/urdf/moveo_urdf_new.urdf
================================================
<robot
  name="moveo_urdf_5">
    <link
    name="base_link">
    <inertial>
      <origin
        xyz="0.034155 -0.20591 -0.049641"
        rpy="0 0 0" />
      <mass
        value="8.0643" />
      <inertia
        ixx="0.11291"
        ixy="0.00066071"
        ixz="-0.00030472"
        iyy="0.089946"
        iyz="0.011461"
        izz="0.19187" />
    </inertial>
    <visual>
      <origin
        xyz="0 0 0"
        rpy="0 0 0" />
      <geometry>
        <mesh
          filename="package://moveo_urdf_4/meshes/base_link.STL" />
      </geometry>
      <material
        name="">
        <color
          rgba="0.75294 0.75294 0.75294 1" />
      </material>
    </visual>
    <collision>
      <origin
        xyz="0 0 0"
        rpy="0 0 0" />
      <geometry>
        <mesh
          filename="package://moveo_urdf_4/meshes/base_link.STL" />
      </geometry>
    </collision>
  </link>
  <link
    name="Link_1">
    <inertial>
      <origin
        xyz="4.0186E-05 0.090634 0.00010221"
        rpy="0 0 0" />
      <mass
        value="4.2526" />
      <inertia
        ixx="0.0034071"
        ixy="8.9041E-06"
        ixz="1.179E-07"
        iyy="0.018642"
        iyz="-3.6544E-06"
        izz="0.021987" />
    </inertial>
    <visual>
      <origin
        xyz="0 0 0"
        rpy="0 0 0" />
      <geometry>
        <mesh
          filename="package://moveo_urdf_4/meshes/Link_1.STL" />
      </geometry>
      <material
        name="">
        <color
          rgba="0.75294 0.75294 0.75294 1" />
      </material>
    </visual>
    <collision>
      <origin
        xyz="0 0 0"
        rpy="0 0 0" />
      <geometry>
        <mesh
          filename="package://moveo_urdf_4/meshes/Link_1.STL" />
      </geometry>
    </collision>
  </link>
  <joint
    name="Joint_1"
    type="revolute">
    <origin
      xyz="0 -0.2425 0"
      rpy="1.5708 9.5417E-17 2.7766" />
    <parent
      link="base_link" />
    <child
      link="Link_1" />
    <axis
      xyz="0 1 0" />
    <limit
      lower="-1"
      upper="1"
      effort="5"
      velocity="1" />
  </joint>
  <link
    name="Link_2">
    <inertial>
      <origin
        xyz="2.2815E-05 0.0017616 -0.080002"
        rpy="0 0 0" />
      <mass
        value="1.9323" />
      <inertia
        ixx="0.012138"
        ixy="-3.5576E-06"
        ixz="-9.0632E-06"
        iyy="0.011445"
        iyz="0.00014273"
        izz="0.00070271" />
    </inertial>
    <visual>
      <origin
        xyz="0 0 0"
        rpy="0 0 0" />
      <geometry>
        <mesh
          filename="package://moveo_urdf_5/meshes/Link_2.STL" />
      </geometry>
      <material
        name="">
        <color
          rgba="0.75294 0.75294 0.75294 1" />
      </material>
    </visual>
    <collision>
      <origin
        xyz="0 0 0"
        rpy="0 0 0" />
      <geometry>
        <mesh
          filename="package://moveo_urdf_5/meshes/Link_2.STL" />
      </geometry>
    </collision>
  </link>
  <joint
    name="Joint_2"
    type="revolute">
    <origin
      xyz="0 0.18264 0"
      rpy="-3.1416 0.95134 1.5708" />
    <parent
      link="Link_1" />
    <child
      link="Link_2" />
    <axis
      xyz="0 -1 0" />
    <limit
      lower="-1"
      upper="1"
      effort="5"
      velocity="1" />
  </joint>
  <link
    name="Link_3">
    <inertial>
      <origin
        xyz="0.05095 -0.00039084 -0.00045591"
        rpy="0 0 0" />
      <mass
        value="1.1381" />
      <inertia
        ixx="0.00023572"
        ixy="-3.8425E-06"
        ixz="4.1286E-06"
        iyy="0.00029123"
        iyz="-1.0387E-07"
        izz="0.00052457" />
    </inertial>
    <visual>
      <origin
        xyz="0 0 0"
        rpy="0 0 0" />
      <geometry>
        <mesh
          filename="package://moveo_urdf_5/meshes/Link_3.STL" />
      </geometry>
      <material
        name="">
        <color
          rgba="0.75294 0.75294 0.75294 1" />
      </material>
    </visual>
    <collision>
      <origin
        xyz="0 0 0"
        rpy="0 0 0" />
      <geometry>
        <mesh
          filename="package://moveo_urdf_5/meshes/Link_3.STL" />
      </geometry>
    </collision>
  </link>
  <joint
    name="Joint_3"
    type="revolute">
    <origin
      xyz="0 0 -0.22112"
      rpy="-3.1416 0.98126 -3.1416" />
    <parent
      link="Link_2" />
    <child
      link="Link_3" />
    <axis
      xyz="0 -1 0" />
    <limit
      lower="-1"
      upper="1"
      effort="5"
      velocity="1" />
  </joint>
  <link
    name="Link_4">
    <inertial>
      <origin
        xyz="-0.0027417 0.0025097 0.012864"
        rpy="0 0 0" />
      <mass
        value="0.62964" />
      <inertia
        ixx="0.0001717"
        ixy="1.3446E-06"
        ixz="8.5164E-06"
        iyy="0.00010506"
        iyz="5.1412E-05"
        izz="7.8944E-05" />
    </inertial>
    <visual>
      <origin
        xyz="0 0 0"
        rpy="0 0 0" />
      <geometry>
        <mesh
          filename="package://moveo_urdf_5/meshes/Link_4.STL" />
      </geometry>
      <material
        name="">
        <color
          rgba="0.29412 0.29412 0.29412 1" />
      </material>
    </visual>
    <collision>
      <origin
        xyz="0 0 0"
        rpy="0 0 0" />
      <geometry>
        <mesh
          filename="package://moveo_urdf_5/meshes/Link_4.STL" />
      </geometry>
    </collision>
  </link>
  <joint
    name="Joint_4"
    type="revolute">
    <origin
      xyz="0.16988 -0.00099213 0"
      rpy="3.1416 -1.2279 1.5708" />
    <parent
      link="Link_3" />
    <child
      link="Link_4" />
    <axis
      xyz="0.010353 -0.99993 -0.0059382" />
    <limit
      lower="-1"
      upper="1"
      effort="5"
      velocity="1" />
  </joint>
  <link
    name="Link_5">
    <inertial>
      <origin
        xyz="-0.011366 0.00012239 0.0078967"
        rpy="0 0 0" />
      <mass
        value="0.19875" />
      <inertia
        ixx="6.2676E-05"
        ixy="4.2551E-06"
        ixz="4.0215E-05"
        iyy="9.7792E-05"
        iyz="8.5888E-07"
        izz="9.5807E-05" />
    </inertial>
    <visual>
      <origin
        xyz="0 0 0"
        rpy="0 0 0" />
      <geometry>
        <mesh
          filename="package://moveo_urdf_5/meshes/Link_5.STL" />
      </geometry>
      <material
        name="">
        <color
          rgba="0.79216 0.81961 0.93333 1" />
      </material>
    </visual>
    <collision>
      <origin
        xyz="0 0 0"
        rpy="0 0 0" />
      <geometry>
        <mesh
          filename="package://moveo_urdf_5/meshes/Link_5.STL" />
      </geometry>
    </collision>
  </link>
  <joint
    name="Joint_5"
    type="revolute">
    <origin
      xyz="-0.0021346 0.053041 0.0016936"
      rpy="-1.5639 -0.091135 -0.00062919" />
    <parent
      link="Link_4" />
    <child
      link="Link_5" />
    <axis
      xyz="0 1 0" />
    <limit
      lower="-1"
      upper="1"
      effort="5"
      velocity="1" />
  </joint>
    <link
    name="Gripper_Servo_Gear">
    <inertial>
      <origin
        xyz="-0.0063957 -0.0033021 -0.00082714"
        rpy="0 0 0" />
      <mass
        value="0.0048997" />
      <inertia
        ixx="2.0097E-09"
        ixy="-3.0673E-09"
        ixz="-6.3297E-10"
        iyy="4.9761E-09"
        iyz="-3.9668E-10"
        izz="6.8221E-09" />
    </inertial>
    <visual>
      <origin
        xyz="0 0 0"
        rpy="0 0 0" />
      <geometry>
        <mesh
          filename="package://moveo_urdf_5/meshes/Gripper_Servo_Gear.STL" />
      </geometry>
      <material
        name="">
        <color
          rgba="0.9098 0.44314 0.031373 1" />
      </material>
    </visual>
    <collision>
      <origin
        xyz="0 0 0"
        rpy="0 0 0" />
      <geometry>
        <mesh
          filename="package://moveo_urdf_5/meshes/Gripper_Servo_Gear.STL" />
      </geometry>
    </collision>
  </link>
  <joint
    name="Gripper_Servo_Gear_Joint"
    type="revolute">
    <origin
      xyz="-0.05013 0.01413 0.041516"
      rpy="0.9321 0.032705 -1.5268" />
    <parent
      link="Link_5" />
    <child
      link="Gripper_Servo_Gear" />
    <axis
      xyz="0 1 0" />
    <limit
      lower="0"
      upper="1.5707"
      effort="5"
      velocity="1" />
  </joint>
  <link
    name="Tip_Gripper_Servo">
    <inertial>
      <origin
        xyz="0.028047 -0.000564 0.0049632"
        rpy="0 0 0" />
      <mass
        value="0.010676" />
      <inertia
        ixx="8.9609E-09"
        ixy="6.3808E-09"
        ixz="-2.6788E-08"
        iyy="1.7457E-07"
        iyz="7.7239E-10"
        izz="1.7361E-07" />
    </inertial>
    <visual>
      <origin
        xyz="0 0 0"
        rpy="0 0 0" />
      <geometry>
        <mesh
          filename="package://moveo_urdf_5/meshes/Tip_Gripper_Servo.STL" />
      </geometry>
      <material
        name="">
        <color
          rgba="0.6 1 0.27843 1" />
      </material>
    </visual>
    <collision>
      <origin
        xyz="0 0 0"
        rpy="0 0 0" />
      <geometry>
        <mesh
          filename="package://moveo_urdf_5/meshes/Tip_Gripper_Servo.STL" />
      </geometry>
    </collision>
  </link>
  <joint
    name="Tip_Gripper_Servo_Joint"
    type="revolute">
    <origin
      xyz="-0.039906 -0.004 -0.0027473"
      rpy="-3.1416 -1.5569 -2.0392E-12" />
    <parent
      link="Gripper_Servo_Gear" />
    <child
      link="Tip_Gripper_Servo" />
    <axis
      xyz="0 1 0" />
    <limit
      lower="0"
      upper="1.3"
      effort="5"
      velocity="1" /> 
  </joint>
  <link
    name="Gripper_Idol_Gear">
    <inertial>
      <origin
        xyz="-0.020173 -0.027941 -0.043405"
        rpy="0 0 0" />
      <mass
        value="0.0051273" />
      <inertia
        ixx="2.3661E-09"
        ixy="1.1076E-09"
        ixz="5.5775E-10"
        iyy="8.8855E-09"
        iyz="-6.98E-11"
        izz="1.1181E-08" />
    </inertial>
    <visual>
      <origin
        xyz="0 0 0"
        rpy="0 0 0" />
      <geometry>
        <mesh
          filename="package://moveo_urdf_4/meshes/Gripper_Idol_Gear.STL" />
      </geometry>
      <material
        name="">
        <color
          rgba="0.058824 0.64706 1 1" />
      </material>
    </visual>
    <collision>
      <origin
        xyz="0 0 0"
        rpy="0 0 0" />
      <geometry>
        <mesh
          filename="package://moveo_urdf_4/meshes/Gripper_Idol_Gear.STL" />
      </geometry>
    </collision>
  </link>
  <joint
    name="Gripper_Idol_Gear_Joint"
    type="revolute">
    <origin
      xyz="0 0 0.088169"
      rpy="2.2206 -1.7526E-12 1.5708" />
    <parent
      link="Link_5" />
    <child
      link="Gripper_Idol_Gear" />
    <axis
      xyz="0 1 0" />
    <limit
      lower="-1"
      upper="1"
      effort="5"
      velocity="1" />
  </joint>
  <link
    name="Tip_Gripper_Idol">
    <inertial>
      <origin
        xyz="0.07316 -0.036799 0.032473"
        rpy="0 0 0" />
      <mass
        value="0.010676" />
      <inertia
        ixx="1.6298E-07"
        ixy="-2.1098E-09"
        ixz="-4.8536E-08"
        iyy="1.7457E-07"
        iyz="-6.0724E-09"
        izz="1.9597E-08" />
    </inertial>
    <visual>
      <origin
        xyz="0 0 0"
        rpy="0 0 0" />
      <geometry>
        <mesh
          filename="package://moveo_urdf_4/meshes/Tip_Gripper_Idol.STL" />
      </geometry>
      <material
        name="">
        <color
          rgba="0.79216 0.81961 0.93333 1" />
      </material>
    </visual>
    <collision>
      <origin
        xyz="0 0 0"
        rpy="0 0 0" />
      <geometry>
        <mesh
          filename="package://moveo_urdf_4/meshes/Tip_Gripper_Idol.STL" />
      </geometry>
    </collision>
  </link>
  <joint
    name="Tip_Gripper_Idol_Joint"
    type="fixed">
    <origin
      xyz="0 0.0088579 -0.0067312"
      rpy="-3.1416 0.5236 -3.1416" />
    <parent
      link="Gripper_Idol_Gear" />
    <child
      link="Tip_Gripper_Idol" />
    <axis
      xyz="0 0 0" />
  </joint>
  <link
    name="Pivot_Arm_Gripper_Servo">
    <inertial>
      <origin
        xyz="-0.0246088378758516 -0.039491188677419 0.0548184657290344"
        rpy="0 0 0" />
      <mass
        value="0.00185970821819588" />
      <inertia
        ixx="2.3350521316943E-09"
        ixy="3.15938078443685E-09"
        ixz="-1.08844692176622E-09"
        iyy="7.82558758772591E-08"
        iyz="4.39516056818752E-11"
        izz="8.05606441237718E-08" />
    </inertial>
    <visual>
      <origin
        xyz="0 0 0"
        rpy="0 0 0" />
      <geometry>
        <mesh
          filename="package://moveo_urdf_4/meshes/Pivot_Arm_Gripper_Servo.STL" />
      </geometry>
      <material
        name="">
        <color
          rgba="0.792156862745098 0.819607843137255 0.933333333333333 1" />
      </material>
    </visual>
    <collision>
      <origin
        xyz="0 0 0"
        rpy="0 0 0" />
      <geometry>
        <mesh
          filename="package://moveo_urdf_4/meshes/Pivot_Arm_Gripper_Servo.STL" />
      </geometry>
    </collision>
  </link>
  <joint
    name="Pivot_Arm_Gripper_Servo_Joint"
    type="revolute">
    <origin
      xyz="0 0 0.1026"
      rpy="0.92098 1.754E-12 -1.5708" />
    <parent
      link="Link_5" />
    <child
      link="Pivot_Arm_Gripper_Servo" />
    <axis
      xyz="0 1 0" />
    <limit
      lower="-1"
      upper="1"
      effort="5"
      velocity="1" />
  </joint>
  <link
    name="Pivot_Arm_Gripper_Idol">
    <inertial>
      <origin
        xyz="0.024736 -0.038928 0.055435"
        rpy="0 0 0" />
      <mass
        value="0.0018597" />
      <inertia
        ixx="2.3203E-09"
        ixy="-3.1597E-09"
        ixz="1.7754E-10"
        iyy="7.8256E-08"
        iyz="7.1682E-12"
        izz="8.0575E-08" />
    </inertial>
    <visual>
      <origin
        xyz="0 0 0"
        rpy="0 0 0" />
      <geometry>
        <mesh
          filename="package://moveo_urdf_4/meshes/Pivot_Arm_Gripper_Idol.STL" />
      </geometry>
      <material
        name="">
        <color
          rgba="0.79216 0.81961 0.93333 1" />
      </material>
    </visual>
    <collision>
      <origin
        xyz="0 0 0"
        rpy="0 0 0" />
      <geometry>
        <mesh
          filename="package://moveo_urdf_4/meshes/Pivot_Arm_Gripper_Idol.STL" />
      </geometry>
    </collision>
  </link>
  <joint
    name="Pivot_Arm_Gripper_Idol_Joint"
    type="revolute">
    <origin
      xyz="0 0 0.10189"
      rpy="0.92098 -0.0013727 -1.5726" />
    <parent
      link="Link_5" />
    <child
      link="Pivot_Arm_Gripper_Idol" />
    <axis
      xyz="0 1 0" />
    <limit
      lower="-1"
      upper="1"
      effort="5"
      velocity="1" />
  </joint>

</robot>


================================================
FILE: moveo_urdf/urdf/moveo_urdf_og.urdf
================================================
<robot
  name="moveo_urdf_5">
    <link
    name="base_link">
    <inertial>
      <origin
        xyz="0.034155 -0.20591 -0.049641"
        rpy="0 0 0" />
      <mass
        value="8.0643" />
      <inertia
        ixx="0.11291"
        ixy="0.00066071"
        ixz="-0.00030472"
        iyy="0.089946"
        iyz="0.011461"
        izz="0.19187" />
    </inertial>
    <visual>
      <origin
        xyz="0 0 0"
        rpy="0 0 0" />
      <geometry>
        <mesh
          filename="package://moveo_urdf_4/meshes/base_link.STL" />
      </geometry>
      <material
        name="">
        <color
          rgba="0.75294 0.75294 0.75294 1" />
      </material>
    </visual>
    <collision>
      <origin
        xyz="0 0 0"
        rpy="0 0 0" />
      <geometry>
        <mesh
          filename="package://moveo_urdf_4/meshes/base_link.STL" />
      </geometry>
    </collision>
  </link>
  <link
    name="Link_1">
    <inertial>
      <origin
        xyz="4.0186E-05 0.090634 0.00010221"
        rpy="0 0 0" />
      <mass
        value="4.2526" />
      <inertia
        ixx="0.0034071"
        ixy="8.9041E-06"
        ixz="1.179E-07"
        iyy="0.018642"
        iyz="-3.6544E-06"
        izz="0.021987" />
    </inertial>
    <visual>
      <origin
        xyz="0 0 0"
        rpy="0 0 0" />
      <geometry>
        <mesh
          filename="package://moveo_urdf_4/meshes/Link_1.STL" />
      </geometry>
      <material
        name="">
        <color
          rgba="0.75294 0.75294 0.75294 1" />
      </material>
    </visual>
    <collision>
      <origin
        xyz="0 0 0"
        rpy="0 0 0" />
      <geometry>
        <mesh
          filename="package://moveo_urdf_4/meshes/Link_1.STL" />
      </geometry>
    </collision>
  </link>
  <joint
    name="Joint_1"
    type="revolute">
    <origin
      xyz="0 -0.2425 0"
      rpy="1.5708 9.5417E-17 2.7766" />
    <parent
      link="base_link" />
    <child
      link="Link_1" />
    <axis
      xyz="0 1 0" />
    <limit
      lower="-1"
      upper="1"
      effort="5"
      velocity="1" />
  </joint>
  <link
    name="Link_2">
    <inertial>
      <origin
        xyz="2.2815E-05 0.0017616 -0.080002"
        rpy="0 0 0" />
      <mass
        value="1.9323" />
      <inertia
        ixx="0.012138"
        ixy="-3.5576E-06"
        ixz="-9.0632E-06"
        iyy="0.011445"
        iyz="0.00014273"
        izz="0.00070271" />
    </inertial>
    <visual>
      <origin
        xyz="0 0 0"
        rpy="0 0 0" />
      <geometry>
        <mesh
          filename="package://moveo_urdf_5/meshes/Link_2.STL" />
      </geometry>
      <material
        name="">
        <color
          rgba="0.75294 0.75294 0.75294 1" />
      </material>
    </visual>
    <collision>
      <origin
        xyz="0 0 0"
        rpy="0 0 0" />
      <geometry>
        <mesh
          filename="package://moveo_urdf_5/meshes/Link_2.STL" />
      </geometry>
    </collision>
  </link>
  <joint
    name="Joint_2"
    type="revolute">
    <origin
      xyz="0 0.18264 0"
      rpy="-3.1416 0.95134 1.5708" />
    <parent
      link="Link_1" />
    <child
      link="Link_2" />
    <axis
      xyz="0 -1 0" />
    <limit
      lower="-1"
      upper="1"
      effort="5"
      velocity="1" />
  </joint>
  <link
    name="Link_3">
    <inertial>
      <origin
        xyz="0.05095 -0.00039084 -0.00045591"
        rpy="0 0 0" />
      <mass
        value="1.1381" />
      <inertia
        ixx="0.00023572"
        ixy="-3.8425E-06"
        ixz="4.1286E-06"
        iyy="0.00029123"
        iyz="-1.0387E-07"
        izz="0.00052457" />
    </inertial>
    <visual>
      <origin
        xyz="0 0 0"
        rpy="0 0 0" />
      <geometry>
        <mesh
          filename="package://moveo_urdf_5/meshes/Link_3.STL" />
      </geometry>
      <material
        name="">
        <color
          rgba="0.75294 0.75294 0.75294 1" />
      </material>
    </visual>
    <collision>
      <origin
        xyz="0 0 0"
        rpy="0 0 0" />
      <geometry>
        <mesh
          filename="package://moveo_urdf_5/meshes/Link_3.STL" />
      </geometry>
    </collision>
  </link>
  <joint
    name="Joint_3"
    type="revolute">
    <origin
      xyz="0 0 -0.22112"
      rpy="-3.1416 0.98126 -3.1416" />
    <parent
      link="Link_2" />
    <child
      link="Link_3" />
    <axis
      xyz="0 -1 0" />
    <limit
      lower="-1"
      upper="1"
      effort="5"
      velocity="1" />
  </joint>
  <link
    name="Link_4">
    <inertial>
      <origin
        xyz="-0.0027417 0.0025097 0.012864"
        rpy="0 0 0" />
      <mass
        value="0.62964" />
      <inertia
        ixx="0.0001717"
        ixy="1.3446E-06"
        ixz="8.5164E-06"
        iyy="0.00010506"
        iyz="5.1412E-05"
        izz="7.8944E-05" />
    </inertial>
    <visual>
      <origin
        xyz="0 0 0"
        rpy="0 0 0" />
      <geometry>
        <mesh
          filename="package://moveo_urdf_5/meshes/Link_4.STL" />
      </geometry>
      <material
        name="">
        <color
          rgba="0.29412 0.29412 0.29412 1" />
      </material>
    </visual>
    <collision>
      <origin
        xyz="0 0 0"
        rpy="0 0 0" />
      <geometry>
        <mesh
          filename="package://moveo_urdf_5/meshes/Link_4.STL" />
      </geometry>
    </collision>
  </link>
  <joint
    name="Joint_4"
    type="revolute">
    <origin
      xyz="0.16988 -0.00099213 0"
      rpy="3.1416 -1.2279 1.5708" />
    <parent
      link="Link_3" />
    <child
      link="Link_4" />
    <axis
      xyz="0.010353 -0.99993 -0.0059382" />
    <limit
      lower="-1"
      upper="1"
      effort="5"
      velocity="1" />
  </joint>
  <link
    name="Link_5">
    <inertial>
      <origin
        xyz="-0.011366 0.00012239 0.0078967"
        rpy="0 0 0" />
      <mass
        value="0.19875" />
      <inertia
        ixx="6.2676E-05"
        ixy="4.2551E-06"
        ixz="4.0215E-05"
        iyy="9.7792E-05"
        iyz="8.5888E-07"
        izz="9.5807E-05" />
    </inertial>
    <visual>
      <origin
        xyz="0 0 0"
        rpy="0 0 0" />
      <geometry>
        <mesh
          filename="package://moveo_urdf_5/meshes/Link_5.STL" />
      </geometry>
      <material
        name="">
        <color
          rgba="0.79216 0.81961 0.93333 1" />
      </material>
    </visual>
    <collision>
      <origin
        xyz="0 0 0"
        rpy="0 0 0" />
      <geometry>
        <mesh
          filename="package://moveo_urdf_5/meshes/Link_5.STL" />
      </geometry>
    </collision>
  </link>
  <joint
    name="Joint_5"
    type="revolute">
    <origin
      xyz="-0.0021346 0.053041 0.0016936"
      rpy="-1.5639 -0.091135 -0.00062919" />
    <parent
      link="Link_4" />
    <child
      link="Link_5" />
    <axis
      xyz="0 1 0" />
    <limit
      lower="-1"
      upper="1"
      effort="5"
      velocity="1" />
  </joint>
  <link
    name="Gripper_Servo_Gear">
    <inertial>
      <origin
        xyz="-0.0063957 -0.0033021 -0.00082714"
        rpy="0 0 0" />
      <mass
        value="0.0048997" />
      <inertia
        ixx="2.0097E-09"
        ixy="-3.0673E-09"
        ixz="-6.3297E-10"
        iyy="4.9761E-09"
        iyz="-3.9668E-10"
        izz="6.8221E-09" />
    </inertial>
    <visual>
      <origin
        xyz="0 0 0"
        rpy="0 0 0" />
      <geometry>
        <mesh
          filename="package://moveo_urdf_5/meshes/Gripper_Servo_Gear.STL" />
      </geometry>
      <material
        name="">
        <color
          rgba="0.9098 0.44314 0.031373 1" />
      </material>
    </visual>
    <collision>
      <origin
        xyz="0 0 0"
        rpy="0 0 0" />
      <geometry>
        <mesh
          filename="package://moveo_urdf_5/meshes/Gripper_Servo_Gear.STL" />
      </geometry>
    </collision>
  </link>
  <joint
    name="Gripper_Servo_Gear_Joint"
    type="revolute">
    <origin
      xyz="-0.05013 0.01413 0.041516"
      rpy="0.9321 0.032705 -1.5268" />
    <parent
      link="Link_5" />
    <child
      link="Gripper_Servo_Gear" />
    <axis
      xyz="0 1 0" />
    <limit
      lower="0"
      upper="1.5707"
      effort="5"
      velocity="1" />
  </joint>
  <link
    name="Tip_Gripper_Servo">
    <inertial>
      <origin
        xyz="0.028047 -0.000564 0.0049632"
        rpy="0 0 0" />
      <mass
        value="0.010676" />
      <inertia
        ixx="8.9609E-09"
        ixy="6.3808E-09"
        ixz="-2.6788E-08"
        iyy="1.7457E-07"
        iyz="7.7239E-10"
        izz="1.7361E-07" />
    </inertial>
    <visual>
      <origin
        xyz="0 0 0"
        rpy="0 0 0" />
      <geometry>
        <mesh
          filename="package://moveo_urdf_5/meshes/Tip_Gripper_Servo.STL" />
      </geometry>
      <material
        name="">
        <color
          rgba="0.6 1 0.27843 1" />
      </material>
    </visual>
    <collision>
      <origin
        xyz="0 0 0"
        rpy="0 0 0" />
      <geometry>
        <mesh
          filename="package://moveo_urdf_5/meshes/Tip_Gripper_Servo.STL" />
      </geometry>
    </collision>
  </link>
  <joint
    name="Tip_Gripper_Servo_Joint"
    type="revolute">
    <origin
      xyz="-0.039906 -0.004 -0.0027473"
      rpy="-3.1416 -1.5569 -2.0392E-12" />
    <parent
      link="Gripper_Servo_Gear" />
    <child
      link="Tip_Gripper_Servo" />
    <axis
      xyz="0 1 0" />
    <limit
      lower="0"
      upper="1.3"
      effort="5"
      velocity="1" />
    <mimic joint="Gripper_Servo_Gear_Joint" multiplier="1" offset="0"/> 
  </joint>
  <link
    name="Gripper_Idol_Gear">
    <inertial>
      <origin
        xyz="-0.0061853 0.00086502 -3.5867E-05"
        rpy="0 0 0" />
      <mass
        value="0.0051273" />
      <inertia
        ixx="2.3312E-09"
        ixy="1.1098E-09"
        ixz="-5.1524E-11"
        iyy="8.8855E-09"
        iyz="6.4357E-12"
        izz="1.1216E-08" />
    </inertial>
    <visual>
      <origin
        xyz="0 0 0"
        rpy="0 0 0" />
      <geometry>
        <mesh
          filename="package://moveo_urdf_5/meshes/Gripper_Idol_Gear.STL" />
      </geometry>
      <material
        name="">
        <color
          rgba="0.058824 0.64706 1 1" />
      </material>
    </visual>
    <collision>
      <origin
        xyz="0 0 0"
        rpy="0 0 0" />
      <geometry>
        <mesh
          filename="package://moveo_urdf_5/meshes/Gripper_Idol_Gear.STL" />
      </geometry>
    </collision>
  </link>
  <joint
    name="Gripper_Idol_Gear_Joint"
    type="revolute">
    <origin
      xyz="-0.052696 -0.01387 0.038065"
      rpy="2.2091 -0.040996 1.626" />
    <parent
      link="Link_5" />
    <child
      link="Gripper_Idol_Gear" />
    <axis
      xyz="0 1 0" />
    <limit
      lower="-1.5707"
      upper="0"
      effort="5"
      velocity="1" />
  </joint>
  <link
    name="Tip_Gripper_Idol">
    <inertial>
      <origin
        xyz="0.0059646 0.00098916 0.027851"
        rpy="0 0 0" />
      <mass
        value="0.010676" />
      <inertia
        ixx="1.6895E-07"
        ixy="-1.6878E-09"
        ixz="-3.8254E-08"
        iyy="1.7457E-07"
        iyz="-6.203E-09"
        izz="1.3622E-08" />
    </inertial>
    <visual>
      <origin
        xyz="0 0 0"
        rpy="0 0 0" />
      <geometry>
        <mesh
          filename="package://moveo_urdf_5/meshes/Tip_Gripper_Idol.STL" />
      </geometry>
      <material
        name="">
        <color
          rgba="0.79216 0.81961 0.93333 1" />
      </material>
    </visual>
    <collision>
      <origin
        xyz="0 0 0"
        rpy="0 0 0" />
      <geometry>
        <mesh
          filename="package://moveo_urdf_5/meshes/Tip_Gripper_Idol.STL" />
      </geometry>
    </collision>
  </link>
  <joint
    name="Tip_Gripper_Idol_Joint"
    type="revolute">
    <origin
      xyz="-0.039906 -0.000125 -0.0027473"
      rpy="3.1416 0.5236 3.1416" />
    <parent
      link="Gripper_Idol_Gear" />
    <child
      link="Tip_Gripper_Idol" />
    <axis
      xyz="0 1 0" />
    <limit
      lower="-1.5"
      upper="0"
      effort="5"
      velocity="1" />
    <mimic joint="Gripper_Idol_Gear_Joint" multiplier="-1" offset="0"/>
  </joint>
  <link
    name="Pivot_Arm_Gripper_Servo">
    <inertial>
      <origin
        xyz="-0.017611 0.00080352 -1.2115E-11"
        rpy="0 0 0" />
      <mass
        value="0.0018597" />
      <inertia
        ixx="2.3199E-09"
        ixy="3.1597E-09"
        ixz="3.9677E-16"
        iyy="7.8256E-08"
        iyz="-2.0327E-16"
        izz="8.0576E-08" />
    </inertial>
    <visual>
      <origin
        xyz="0 0 0"
        rpy="0 0 0" />
      <geometry>
        <mesh
          filename="package://moveo_urdf_5/meshes/Pivot_Arm_Gripper_Servo.STL" />
      </geometry>
      <material
        name="">
        <color
          rgba="0.79216 0.81961 0.93333 1" />
      </material>
    </visual>
    <collision>
      <origin
        xyz="0 0 0"
        rpy="0 0 0" />
      <geometry>
        <mesh
          filename="package://moveo_urdf_5/meshes/Pivot_Arm_Gripper_Servo.STL" />
      </geometry>
    </collision>
  </link>
  <joint
    name="Pivot_Arm_Gripper_Servo_Joint"
    type="revolute">
    <origin
      xyz="-0.068745 0.00713 0.05"
      rpy="0.93142 -0.0083007 -1.582" />
    <parent
      link="Link_5" />
    <child
      link="Pivot_Arm_Gripper_Servo" />
    <axis
      xyz="0 1 0" />
    <limit
      lower="-1"
      upper="1"
      effort="5"
      velocity="1" />
    <mimic joint="Gripper_Servo_Gear_Joint" multiplier="1" offset="0"/> 
  </joint>
  <link
    name="Pivot_Arm_Gripper_Idol">
    <inertial>
      <origin
        xyz="0.017611 0.00067852 -1.3978E-11"
        rpy="0 0 0" />
      <mass
        value="0.0018597" />
      <inertia
        ixx="2.3199E-09"
        ixy="-3.1597E-09"
        ixz="-4.5779E-16"
        iyy="7.8256E-08"
        iyz="-2.26E-16"
        izz="8.0576E-08" />
    </inertial>
    <visual>
      <origin
        xyz="0 0 0"
        rpy="0 0 0" />
      <geometry>
        <mesh
          filename="package://moveo_urdf_5/meshes/Pivot_Arm_Gripper_Idol.STL" />
      </geometry>
      <material
        name="">
        <color
          rgba="0.79216 0.81961 0.93333 1" />
      </material>
    </visual>
    <collision>
      <origin
        xyz="0 0 0"
        rpy="0 0 0" />
      <geometry>
        <mesh
          filename="package://moveo_urdf_5/meshes/Pivot_Arm_Gripper_Idol.STL" />
      </geometry>
    </collision>
  </link>
  <joint
    name="Pivot_Arm_Gripper_Idol_Joint"
    type="revolute">
    <origin
      xyz="-0.06867 -0.00687 0.0501"
      rpy="0.93138 1.7645E-12 -1.5708" />
    <parent
      link="Link_5" />
    <child
      link="Pivot_Arm_Gripper_Idol" />
    <axis
      xyz="0 1 0" />
    <limit
      lower="-1"
      upper="1"
      effort="5"
      velocity="1" />
    <mimic joint="Gripper_Idol_Gear_Joint" multiplier="1" offset="0"/> 
  </joint>

  <gazebo>
    <plugin name="MimicJointPlugin" filename="libroboticsgroup_gazebo_mimic_joint_plugin.so">
      joint="Gripper_Idol_Gear_Joint"
      mimicJoint="Pivot_Arm_Gripper_Idol_Joint"
      multiplier="1"
      offset="0"
    </plugin>
    <plugin name="MimicJointPlugin" filename="libroboticsgroup_gazebo_mimic_joint_plugin.so">
      joint="Gripper_Servo_Gear_Joint"
      mimicJoint="Pivot_Arm_Gripper_Servo_Joint"
      multiplier="1"
      offset="0"
    </plugin>
    <plugin name="MimicJointPlugin" filename="libroboticsgroup_gazebo_mimic_joint_plugin.so">
      joint="Gripper_Servo_Gear_Joint"
      mimicJoint="Tip_Gripper_Servo_Joint
      multiplier="1"
      offset="0"
    </plugin>
    <plugin name="MimicJointPlugin" filename="libroboticsgroup_gazebo_mimic_joint_plugin.so">
      joint="Gripper_Idol_Gear_Joint"
      mimicJoint="Tip_Gripper_Idol_Joint
      multiplier="-1"
      offset="0"
    </plugin>
  </gazebo>
</robot>


  



================================================
FILE: object_detector_app/LICENSE
================================================
MIT License

Copyright (c) 2017 Dat Tran

Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:

The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.

THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

================================================
FILE: object_detector_app/README.md
================================================
# Object-Detector-App

A real-time object recognition application using [Google's TensorFlow Object Detection API](https://github.com/tensorflow/models/tree/master/research/object_detection) and [OpenCV](http://opencv.org/).

## Getting Started
1. `conda env create -f environment.yml`
2. `python object_detection_app.py`
    Optional arguments (default value):
    * Device index of the camera `--source=0`
    * Width of the frames in the video stream `--width=480`
    * Height of the frames in the video stream `--height=360`
    * Number of workers `--num-workers=2`
    * Size of the queue `--queue-size=5`

## Tests
```
pytest -vs utils/
```

## Requirements
- [Anaconda / Python 3.5](https://www.continuum.io/downloads)
- [TensorFlow 1.2](https://www.tensorflow.org/)
- [OpenCV 3.0](http://opencv.org/)

## Notes
- OpenCV 3.1 might crash on OSX after a while, so that's why I had to switch to version 3.0. See open issue and solution [here](https://github.com/opencv/opencv/issues/5874).
- Moving the `.read()` part of the video stream in a multiple child processes did not work. However, it was possible to move it to a separate thread.

## Copyright

See [LICENSE](LICENSE) for details.
Copyright (c) 2017 [Dat Tran](http://www.dat-tran.com/).


================================================
FILE: object_detector_app/__init__.py
================================================
from . utils import *
from . object_detection import *
from . object_detection_multithreading import *


================================================
FILE: object_detector_app/environment.yml
================================================
name: object-detection
channels: !!python/tuple
- menpo
- defaults
dependencies:
- cycler=0.10.0=py35_0
- freetype=2.5.5=2
- icu=54.1=0
- jbig=2.1=0
- jlaura::opencv3=3.0.0=py35_0
- jpeg=9b=0
- libpng=1.6.27=0
- libtiff=4.0.6=3
- matplotlib=2.0.2=np113py35_0
- menpo::tbb=4.3_20141023=0
- mkl=2017.0.1=0
- numpy=1.13.0=py35_0
- olefile=0.44=py35_0
- openssl=1.0.2l=0
- pillow=4.1.1=py35_0
- pip=9.0.1=py35_1
- py=1.4.34=py35_0
- pyparsing=2.2.0=py35_0
- pyqt=5.6.0=py35_2
- pytest=3.2.1=py35_0
- python=3.5.3=1
- python-dateutil=2.6.1=py35_0
- pytz=2017.2=py35_0
- qt=5.6.2=2
- readline=6.2=2
- setuptools=27.2.0=py35_0
- sip=4.18=py35_0
- six=1.10.0=py35_0
- sqlite=3.13.0=0
- tk=8.5.18=0
- wheel=0.29.0=py35_0
- xz=5.2.2=1
- zlib=1.2.8=3
- pip:
  - backports.weakref==1.0rc1
  - bleach==1.5.0
  - html5lib==0.9999999
  - markdown==2.2.0
  - protobuf==3.3.0
  - tensorflow==1.2.0
  - werkzeug==0.12.2
prefix: /Users/datitran/anaconda/envs/object-detection



================================================
FILE: object_detector_app/object_detection/BUILD
================================================
# Tensorflow Object Detection API: main runnables.

package(
    default_visibility = ["//visibility:public"],
)

licenses(["notice"])

# Apache 2.0

py_binary(
    name = "train",
    srcs = [
        "train.py",
    ],
    deps = [
        ":trainer",
        "//tensorflow",
        "//tensorflow_models/object_detection/builders:input_reader_builder",
        "//tensorflow_models/object_detection/builders:model_builder",
        "//tensorflow_models/object_detection/protos:input_reader_py_pb2",
        "//tensorflow_models/object_detection/protos:model_py_pb2",
        "//tensorflow_models/object_detection/protos:pipeline_py_pb2",
        "//tensorflow_models/object_detection/protos:train_py_pb2",
    ],
)

py_library(
    name = "trainer",
    srcs = ["trainer.py"],
    deps = [
        "//tensorflow",
        "//tensorflow_models/object_detection/builders:optimizer_builder",
        "//tensorflow_models/object_detection/builders:preprocessor_builder",
        "//tensorflow_models/object_detection/core:batcher",
        "//tensorflow_models/object_detection/core:standard_fields",
        "//tensorflow_models/object_detection/utils:ops",
        "//tensorflow_models/object_detection/utils:variables_helper",
        "//tensorflow_models/slim:model_deploy",
    ],
)

py_test(
    name = "trainer_test",
    srcs = ["trainer_test.py"],
    deps = [
        ":trainer",
        "//tensorflow",
        "//tensorflow_models/object_detection/core:losses",
        "//tensorflow_models/object_detection/core:model",
        "//tensorflow_models/object_detection/core:standard_fields",
        "//tensorflow_models/object_detection/protos:train_py_pb2",
    ],
)

py_library(
    name = "eval_util",
    srcs = [
        "eval_util.py",
    ],
    deps = [
        "//tensorflow",
        "//tensorflow_models/object_detection/utils:label_map_util",
        "//tensorflow_models/object_detection/utils:object_detection_evaluation",
        "//tensorflow_models/object_detection/utils:visualization_utils",
    ],
)

py_library(
    name = "evaluator",
    srcs = ["evaluator.py"],
    deps = [
        "//tensorflow",
        "//tensorflow_models/object_detection:eval_util",
        "//tensorflow_models/object_detection/core:box_list",
        "//tensorflow_models/object_detection/core:box_list_ops",
        "//tensorflow_models/object_detection/core:prefetcher",
        "//tensorflow_models/object_detection/core:standard_fields",
        "//tensorflow_models/object_detection/protos:eval_py_pb2",
    ],
)

py_binary(
    name = "eval",
    srcs = [
        "eval.py",
    ],
    deps = [
        ":evaluator",
        "//tensorflow",
        "//tensorflow_models/object_detection/builders:input_reader_builder",
        "//tensorflow_models/object_detection/builders:model_builder",
        "//tensorflow_models/object_detection/protos:eval_py_pb2",
        "//tensorflow_models/object_detection/protos:input_reader_py_pb2",
        "//tensorflow_models/object_detection/protos:model_py_pb2",
        "//tensorflow_models/object_detection/protos:pipeline_py_pb2",
        "//tensorflow_models/object_detection/utils:label_map_util",
    ],
)

py_library(
    name = "exporter",
    srcs = [
        "exporter.py",
    ],
    deps = [
        "//tensorflow",
        "//tensorflow/python/tools:freeze_graph_lib",
        "//tensorflow_models/object_detection/builders:model_builder",
        "//tensorflow_models/object_detection/core:standard_fields",
        "//tensorflow_models/object_detection/data_decoders:tf_example_decoder",
    ],
)

py_test(
    name = "exporter_test",
    srcs = [
        "exporter_test.py",
    ],
    deps = [
        ":exporter",
        "//tensorflow",
        "//tensorflow_models/object_detection/builders:model_builder",
        "//tensorflow_models/object_detection/core:model",
        "//tensorflow_models/object_detection/protos:pipeline_py_pb2",
    ],
)

py_binary(
    name = "export_inference_graph",
    srcs = [
        "export_inference_graph.py",
    ],
    deps = [
        ":exporter",
        "//tensorflow",
        "//tensorflow_models/object_detection/protos:pipeline_py_pb2",
    ],
)

py_binary(
    name = "create_pascal_tf_record",
    srcs = [
        "create_pascal_tf_record.py",
    ],
    deps = [
        "//third_party/py/PIL:pil",
        "//third_party/py/lxml",
        "//tensorflow",
        "//tensorflow_models/object_detection/utils:dataset_util",
        "//tensorflow_models/object_detection/utils:label_map_util",
    ],
)

py_test(
    name = "create_pascal_tf_record_test",
    srcs = [
        "create_pascal_tf_record_test.py",
    ],
    deps = [
        ":create_pascal_tf_record",
        "//tensorflow",
    ],
)

py_binary(
    name = "create_pet_tf_record",
    srcs = [
        "create_pet_tf_record.py",
    ],
    deps = [
        "//third_party/py/PIL:pil",
        "//third_party/py/lxml",
        "//tensorflow",
        "//tensorflow_models/object_detection/utils:dataset_util",
        "//tensorflow_models/object_detection/utils:label_map_util",
    ],
)


================================================
FILE: object_detector_app/object_detection/CONTRIBUTING.md
================================================
# Contributing to the Tensorflow Object Detection API

Patches to Tensorflow Object Detection API are welcome!

We require contributors to fill out either the individual or corporate
Contributor License Agreement (CLA).

  * If you are an individual writing original source code and you're sure you own the intellectual property, then you'll need to sign an [individual CLA](http://code.google.com/legal/individual-cla-v1.0.html).
  * If you work for a company that wants to allow you to contribute your work, then you'll need to sign a [corporate CLA](http://code.google.com/legal/corporate-cla-v1.0.html).

Please follow the
[Tensorflow contributing guidelines](https://github.com/tensorflow/tensorflow/blob/master/CONTRIBUTING.md)
when submitting pull requests.


================================================
FILE: object_detector_app/object_detection/README.md
================================================
# Tensorflow Object Detection API
Creating accurate machine learning models capable of localizing and identifying
multiple objects in a single image remains a core challenge in computer vision.
The TensorFlow Object Detection API is an open source framework built on top of
TensorFlow that makes it easy to construct, train and deploy object detection
models.  At Google we’ve certainly found this codebase to be useful for our
computer vision needs, and we hope that you will as well.
<p align="center">
  <img src="g3doc/img/kites_detections_output.jpg" width=676 height=450>
</p>
Contributions to the codebase are welcome and we would love to hear back from
you if you find this API useful.  Finally if you use the Tensorflow Object
Detection API for a research publication, please consider citing:

```
"Speed/accuracy trade-offs for modern convolutional object detectors."
Huang J, Rathod V, Sun C, Zhu M, Korattikara A, Fathi A, Fischer I, Wojna Z,
Song Y, Guadarrama S, Murphy K, CVPR 2017
```
\[[link](https://arxiv.org/abs/1611.10012)\]\[[bibtex](
https://scholar.googleusercontent.com/scholar.bib?q=info:l291WsrB-hQJ:scholar.google.com/&output=citation&scisig=AAGBfm0AAAAAWUIIlnPZ_L9jxvPwcC49kDlELtaeIyU-&scisf=4&ct=citation&cd=-1&hl=en&scfhb=1)\]

## Maintainers

* Jonathan Huang, github: [jch1](https://github.com/jch1)
* Vivek Rathod, github: [tombstone](https://github.com/tombstone)
* Derek Chow, github: [derekjchow](https://github.com/derekjchow)
* Chen Sun, github: [jesu9](https://github.com/jesu9)
* Menglong Zhu, github: [dreamdragon](https://github.com/dreamdragon)


## Table of contents

Quick Start:
* <a href='object_detection_tutorial.ipynb'>
      Quick Start: Jupyter notebook for off-the-shelf inference</a><br>
* <a href="g3doc/running_pets.md">Quick Start: Training a pet detector</a><br>

Setup:
* <a href='g3doc/installation.md'>Installation</a><br>
* <a href='g3doc/configuring_jobs.md'>
      Configuring an object detection pipeline</a><br>
* <a href='g3doc/preparing_inputs.md'>Preparing inputs</a><br>

Running:
* <a href='g3doc/running_locally.md'>Running locally</a><br>
* <a href='g3doc/running_on_cloud.md'>Running on the cloud</a><br>

Extras:
* <a href='g3doc/detection_model_zoo.md'>Tensorflow detection model zoo</a><br>
* <a href='g3doc/exporting_models.md'>
      Exporting a trained model for inference</a><br>
* <a href='g3doc/defining_your_own_model.md'>
      Defining your own model architecture</a><br>

## Release information

### June 15, 2017

In addition to our base Tensorflow detection model definitions, this
release includes:

* A selection of trainable detection models, including:
  * Single Shot Multibox Detector (SSD) with MobileNet,
  * SSD with Inception V2,
  * Region-Based Fully Convolutional Networks (R-FCN) with Resnet 101,
  * Faster RCNN with Resnet 101,
  * Faster RCNN with Inception Resnet v2
* Frozen weights (trained on the COCO dataset) for each of the above models to
  be used for out-of-the-box inference purposes.
* A [Jupyter notebook](object_detection_tutorial.ipynb) for performing
  out-of-the-box inference with one of our released models
* Convenient [local training](g3doc/running_locally.md) scripts as well as
  distributed training and evaluation pipelines via
  [Google Cloud](g3doc/running_on_cloud.md).


<b>Thanks to contributors</b>: Jonathan Huang, Vivek Rathod, Derek Chow,
Chen Sun, Menglong Zhu, Matthew Tang, Anoop Korattikara, Alireza Fathi, Ian Fischer, Zbigniew Wojna, Yang Song, Sergio Guadarrama, Jasper Uijlings,
Viacheslav Kovalevskyi, Kevin Murphy


================================================
FILE: object_detector_app/object_detection/__init__.py
================================================
from . import *



================================================
FILE: object_detector_app/object_detection/anchor_generators/BUILD
================================================
# Tensorflow Object Detection API: Anchor Generator implementations.

package(
    default_visibility = ["//visibility:public"],
)

licenses(["notice"])

# Apache 2.0
py_library(
    name = "grid_anchor_generator",
    srcs = [
        "grid_anchor_generator.py",
    ],
    deps = [
        "//tensorflow",
        "//tensorflow_models/object_detection/core:anchor_generator",
        "//tensorflow_models/object_detection/core:box_list",
        "//tensorflow_models/object_detection/utils:ops",
    ],
)

py_test(
    name = "grid_anchor_generator_test",
    srcs = [
        "grid_anchor_generator_test.py",
    ],
    deps = [
        ":grid_anchor_generator",
        "//tensorflow",
    ],
)

py_library(
    name = "multiple_grid_anchor_generator",
    srcs = [
        "multiple_grid_anchor_generator.py",
    ],
    deps = [
        ":grid_anchor_generator",
        "//tensorflow",
        "//tensorflow_models/object_detection/core:anchor_generator",
        "//tensorflow_models/object_detection/core:box_list_ops",
    ],
)

py_test(
    name = "multiple_grid_anchor_generator_test",
    srcs = [
        "multiple_grid_anchor_generator_test.py",
    ],
    deps = [
        ":multiple_grid_anchor_generator",
        "//third_party/py/numpy",
    ],
)


================================================
FILE: object_detector_app/object_detection/anchor_generators/__init__.py
================================================


================================================
FILE: object_detector_app/object_detection/anchor_generators/grid_anchor_generator.py
================================================
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================

"""Generates grid anchors on the fly as used in Faster RCNN.

Generates grid anchors on the fly as described in:
"Faster R-CNN: Towards Real-Time Object Detection with Region Proposal Networks"
Shaoqing Ren, Kaiming He, Ross Girshick, and Jian Sun.
"""

import tensorflow as tf

from object_detection.core import anchor_generator
from object_detection.core import box_list
from object_detection.utils import ops


class GridAnchorGenerator(anchor_generator.AnchorGenerator):
  """Generates a grid of anchors at given scales and aspect ratios."""

  def __init__(self,
               scales=(0.5, 1.0, 2.0),
               aspect_ratios=(0.5, 1.0, 2.0),
               base_anchor_size=None,
               anchor_stride=None,
               anchor_offset=None):
    """Constructs a GridAnchorGenerator.

    Args:
      scales: a list of (float) scales, default=(0.5, 1.0, 2.0)
      aspect_ratios: a list of (float) aspect ratios, default=(0.5, 1.0, 2.0)
      base_anchor_size: base anchor size as height, width (
                        (length-2 float32 list, default=[256, 256])
      anchor_stride: difference in centers between base anchors for adjacent
                     grid positions (length-2 float32 list, default=[16, 16])
      anchor_offset: center of the anchor with scale and aspect ratio 1 for the
                     upper left element of the grid, this should be zero for
                     feature networks with only VALID padding and even receptive
                     field size, but may need additional calculation if other
                     padding is used (length-2 float32 tensor, default=[0, 0])
    """
    # Handle argument defaults
    if base_anchor_size is None:
      base_anchor_size = [256, 256]
    base_anchor_size = tf.constant(base_anchor_size, tf.float32)
    if anchor_stride is None:
      anchor_stride = [16, 16]
    anchor_stride = tf.constant(anchor_stride, dtype=tf.float32)
    if anchor_offset is None:
      anchor_offset = [0, 0]
    anchor_offset = tf.constant(anchor_offset, dtype=tf.float32)

    self._scales = scales
    self._aspect_ratios = aspect_ratios
    self._base_anchor_size = base_anchor_size
    self._anchor_stride = anchor_stride
    self._anchor_offset = anchor_offset

  def name_scope(self):
    return 'GridAnchorGenerator'

  def num_anchors_per_location(self):
    """Returns the number of anchors per spatial location.

    Returns:
      a list of integers, one for each expected feature map to be passed to
      the `generate` function.
    """
    return [len(self._scales) * len(self._aspect_ratios)]

  def _generate(self, feature_map_shape_list):
    """Generates a collection of bounding boxes to be used as anchors.

    Args:
      feature_map_shape_list: list of pairs of convnet layer resolutions in the
        format [(height_0, width_0)].  For example, setting
        feature_map_shape_list=[(8, 8)] asks for anchors that correspond
        to an 8x8 layer.  For this anchor generator, only lists of length 1 are
        allowed.

    Returns:
      boxes: a BoxList holding a collection of N anchor boxes
    Raises:
      ValueError: if feature_map_shape_list, box_specs_list do not have the same
        length.
      ValueError: if feature_map_shape_list does not consist of pairs of
        integers
    """
    if not (isinstance(feature_map_shape_list, list)
            and len(feature_map_shape_list) == 1):
      raise ValueError('feature_map_shape_list must be a list of length 1.')
    if not all([isinstance(list_item, tuple) and len(list_item) == 2
                for list_item in feature_map_shape_list]):
      raise ValueError('feature_map_shape_list must be a list of pairs.')
    grid_height, grid_width = feature_map_shape_list[0]
    scales_grid, aspect_ratios_grid = ops.meshgrid(self._scales,
                                                   self._aspect_ratios)
    scales_grid = tf.reshape(scales_grid, [-1])
    aspect_ratios_grid = tf.reshape(aspect_ratios_grid, [-1])
    return tile_anchors(grid_height,
                        grid_width,
                        scales_grid,
                        aspect_ratios_grid,
                        self._base_anchor_size,
                        self._anchor_stride,
                        self._anchor_offset)


def tile_anchors(grid_height,
                 grid_width,
                 scales,
                 aspect_ratios,
                 base_anchor_size,
                 anchor_stride,
                 anchor_offset):
  """Create a tiled set of anchors strided along a grid in image space.

  This op creates a set of anchor boxes by placing a "basis" collection of
  boxes with user-specified scales and aspect ratios centered at evenly
  distributed points along a grid.  The basis collection is specified via the
  scale and aspect_ratios arguments.  For example, setting scales=[.1, .2, .2]
  and aspect ratios = [2,2,1/2] means that we create three boxes: one with scale
  .1, aspect ratio 2, one with scale .2, aspect ratio 2, and one with scale .2
  and aspect ratio 1/2.  Each box is multiplied by "base_anchor_size" before
  placing it over its respective center.

  Grid points are specified via grid_height, grid_width parameters as well as
  the anchor_stride and anchor_offset parameters.

  Args:
    grid_height: size of the grid in the y direction (int or int scalar tensor)
    grid_width: size of the grid in the x direction (int or int scalar tensor)
    scales: a 1-d  (float) tensor representing the scale of each box in the
      basis set.
    aspect_ratios: a 1-d (float) tensor representing the aspect ratio of each
      box in the basis set.  The length of the scales and aspect_ratios tensors
      must be equal.
    base_anchor_size: base anchor size as [height, width]
      (float tensor of shape [2])
    anchor_stride: difference in centers between base anchors for adjacent grid
                   positions (float tensor of shape [2])
    anchor_offset: center of the anchor with scale and aspect ratio 1 for the
                   upper left element of the grid, this should be zero for
                   feature networks with only VALID padding and even receptive
                   field size, but may need some additional calculation if other
                   padding is used (float tensor of shape [2])
  Returns:
    a BoxList holding a collection of N anchor boxes
  """
  ratio_sqrts = tf.sqrt(aspect_ratios)
  heights = scales / ratio_sqrts * base_anchor_size[0]
  widths = scales * ratio_sqrts * base_anchor_size[1]

  # Get a grid of box centers
  y_centers = tf.to_float(tf.range(grid_height))
  y_centers = y_centers * anchor_stride[0] + anchor_offset[0]
  x_centers = tf.to_float(tf.range(grid_width))
  x_centers = x_centers * anchor_stride[1] + anchor_offset[1]
  x_centers, y_centers = ops.meshgrid(x_centers, y_centers)

  widths_grid, x_centers_grid = ops.meshgrid(widths, x_centers)
  heights_grid, y_centers_grid = ops.meshgrid(heights, y_centers)
  bbox_centers = tf.stack([y_centers_grid, x_centers_grid], axis=3)
  bbox_sizes = tf.stack([heights_grid, widths_grid], axis=3)
  bbox_centers = tf.reshape(bbox_centers, [-1, 2])
  bbox_sizes = tf.reshape(bbox_sizes, [-1, 2])
  bbox_corners = _center_size_bbox_to_corners_bbox(bbox_centers, bbox_sizes)
  return box_list.BoxList(bbox_corners)


def _center_size_bbox_to_corners_bbox(centers, sizes):
  """Converts bbox center-size representation to corners representation.

  Args:
    centers: a tensor with shape [N, 2] representing bounding box centers
    sizes: a tensor with shape [N, 2] representing bounding boxes

  Returns:
    corners: tensor with shape [N, 4] representing bounding boxes in corners
      representation
  """
  return tf.concat([centers - .5 * sizes, centers + .5 * sizes], 1)


================================================
FILE: object_detector_app/object_detection/anchor_generators/grid_anchor_generator_test.py
================================================
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================

"""Tests for object_detection.grid_anchor_generator."""

import tensorflow as tf

from object_detection.anchor_generators import grid_anchor_generator


class GridAnchorGeneratorTest(tf.test.TestCase):

  def test_construct_single_anchor(self):
    """Builds a 1x1 anchor grid to test the size of the output boxes."""
    scales = [0.5, 1.0, 2.0]
    aspect_ratios = [0.25, 1.0, 4.0]
    anchor_offset = [7, -3]
    exp_anchor_corners = [[-121, -35, 135, 29], [-249, -67, 263, 61],
                          [-505, -131, 519, 125], [-57, -67, 71, 61],
                          [-121, -131, 135, 125], [-249, -259, 263, 253],
                          [-25, -131, 39, 125], [-57, -259, 71, 253],
                          [-121, -515, 135, 509]]

    anchor_generator = grid_anchor_generator.GridAnchorGenerator(
        scales, aspect_ratios,
        anchor_offset=anchor_offset)
    anchors = anchor_generator.generate(feature_map_shape_list=[(1, 1)])
    anchor_corners = anchors.get()

    with self.test_session():
      anchor_corners_out = anchor_corners.eval()
      self.assertAllClose(anchor_corners_out, exp_anchor_corners)

  def test_construct_anchor_grid(self):
    base_anchor_size = [10, 10]
    anchor_stride = [19, 19]
    anchor_offset = [0, 0]
    scales = [0.5, 1.0, 2.0]
    aspect_ratios = [1.0]

    exp_anchor_corners = [[-2.5, -2.5, 2.5, 2.5], [-5., -5., 5., 5.],
                          [-10., -10., 10., 10.], [-2.5, 16.5, 2.5, 21.5],
                          [-5., 14., 5, 24], [-10., 9., 10, 29],
                          [16.5, -2.5, 21.5, 2.5], [14., -5., 24, 5],
                          [9., -10., 29, 10], [16.5, 16.5, 21.5, 21.5],
                          [14., 14., 24, 24], [9., 9., 29, 29]]

    anchor_generator = grid_anchor_generator.GridAnchorGenerator(
        scales,
        aspect_ratios,
        base_anchor_size=base_anchor_size,
        anchor_stride=anchor_stride,
        anchor_offset=anchor_offset)

    anchors = anchor_generator.generate(feature_map_shape_list=[(2, 2)])
    anchor_corners = anchors.get()

    with self.test_session():
      anchor_corners_out = anchor_corners.eval()
      self.assertAllClose(anchor_corners_out, exp_anchor_corners)


if __name__ == '__main__':
  tf.test.main()


================================================
FILE: object_detector_app/object_detection/anchor_generators/multiple_grid_anchor_generator.py
================================================
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================

"""Generates grid anchors on the fly corresponding to multiple CNN layers.

Generates grid anchors on the fly corresponding to multiple CNN layers as
described in:
"SSD: Single Shot MultiBox Detector"
Wei Liu, Dragomir Anguelov, Dumitru Erhan, Christian Szegedy, Scott Reed,
Cheng-Yang Fu, Alexander C. Berg
(see Section 2.2: Choosing scales and aspect ratios for default boxes)
"""

import numpy as np

import tensorflow as tf

from object_detection.anchor_generators import grid_anchor_generator
from object_detection.core import anchor_generator
from object_detection.core import box_list_ops


class MultipleGridAnchorGenerator(anchor_generator.AnchorGenerator):
  """Generate a grid of anchors for multiple CNN layers."""

  def __init__(self,
               box_specs_list,
               base_anchor_size=None,
               clip_window=None):
    """Constructs a MultipleGridAnchorGenerator.

    To construct anchors, at multiple grid resolutions, one must provide a
    list of feature_map_shape_list (e.g., [(8, 8), (4, 4)]), and for each grid
    size, a corresponding list of (scale, aspect ratio) box specifications.

    For example:
    box_specs_list = [[(.1, 1.0), (.1, 2.0)],  # for 8x8 grid
                      [(.2, 1.0), (.3, 1.0), (.2, 2.0)]]  # for 4x4 grid

    To support the fully convolutional setting, we pass grid sizes in at
    generation time, while scale and aspect ratios are fixed at construction
    time.

    Args:
      box_specs_list: list of list of (scale, aspect ratio) pairs with the
        outside list having the same number of entries as feature_map_shape_list
        (which is passed in at generation time).
      base_anchor_size: base anchor size as [height, width]
                        (length-2 float tensor, default=[256, 256]).
      clip_window: a tensor of shape [4] specifying a window to which all
        anchors should be clipped. If clip_window is None, then no clipping
        is performed.

    Raises:
      ValueError: if box_specs_list is not a list of list of pairs
      ValueError: if clip_window is not either None or a tensor of shape [4]
    """
    if isinstance(box_specs_list, list) and all(
        [isinstance(list_item, list) for list_item in box_specs_list]):
      self._box_specs = box_specs_list
    else:
      raise ValueError('box_specs_list is expected to be a '
                       'list of lists of pairs')
    if base_anchor_size is None:
      base_anchor_size = tf.constant([256, 256], dtype=tf.float32)
    self._base_anchor_size = base_anchor_size
    if clip_window is not None and clip_window.get_shape().as_list() != [4]:
      raise ValueError('clip_window must either be None or a shape [4] tensor')
    self._clip_window = clip_window
    self._scales = []
    self._aspect_ratios = []
    for box_spec in self._box_specs:
      if not all([isinstance(entry, tuple) and len(entry) == 2
                  for entry in box_spec]):
        raise ValueError('box_specs_list is expected to be a '
                         'list of lists of pairs')
      scales, aspect_ratios = zip(*box_spec)
      self._scales.append(scales)
      self._aspect_ratios.append(aspect_ratios)

  def name_scope(self):
    return 'MultipleGridAnchorGenerator'

  def num_anchors_per_location(self):
    """Returns the number of anchors per spatial location.

    Returns:
      a list of integers, one for each expected feature map to be passed to
      the Generate function.
    """
    return [len(box_specs) for box_specs in self._box_specs]

  def _generate(self,
                feature_map_shape_list,
                im_height=1,
                im_width=1,
                anchor_strides=None,
                anchor_offsets=None):
    """Generates a collection of bounding boxes to be used as anchors.

    The number of anchors generated for a single grid with shape MxM where we
    place k boxes over each grid center is k*M^2 and thus the total number of
    anchors is the sum over all grids. In our box_specs_list example
    (see the constructor docstring), we would place two boxes over each grid
    point on an 8x8 grid and three boxes over each grid point on a 4x4 grid and
    thus end up with 2*8^2 + 3*4^2 = 176 anchors in total. The layout of the
    output anchors follows the order of how the grid sizes and box_specs are
    specified (with box_spec index varying the fastest, followed by width
    index, then height index, then grid index).

    Args:
      feature_map_shape_list: list of pairs of convnet layer resolutions in the
        format [(height_0, width_0), (height_1, width_1), ...]. For example,
        setting feature_map_shape_list=[(8, 8), (7, 7)] asks for anchors that
        correspond to an 8x8 layer followed by a 7x7 layer.
      im_height: the height of the image to generate the grid for. If both
        im_height and im_width are 1, the generated anchors default to
        normalized coordinates, otherwise absolute coordinates are used for the
        grid.
      im_width: the width of the image to generate the grid for. If both
        im_height and im_width are 1, the generated anchors default to
        normalized coordinates, otherwise absolute coordinates are used for the
        grid.
      anchor_strides: list of pairs of strides (in y and x directions
        respectively). For example, setting
        anchor_strides=[(.25, .25), (.5, .5)] means that we want the anchors
        corresponding to the first layer to be strided by .25 and those in the
        second layer to be strided by .5 in both y and x directions. By
        default, if anchor_strides=None, then they are set to be the reciprocal
        of the corresponding grid sizes. The pairs can also be specified as
        dynamic tf.int or tf.float numbers, e.g. for variable shape input
        images.
      anchor_offsets: list of pairs of offsets (in y and x directions
        respectively). The offset specifies where we want the center of the
        (0, 0)-th anchor to lie for each layer. For example, setting
        anchor_offsets=[(.125, .125), (.25, .25)]) means that we want the
        (0, 0)-th anchor of the first layer to lie at (.125, .125) in image
        space and likewise that we want the (0, 0)-th anchor of the second
        layer to lie at (.25, .25) in image space. By default, if
        anchor_offsets=None, then they are set to be half of the corresponding
        anchor stride. The pairs can also be specified as dynamic tf.int or
        tf.float numbers, e.g. for variable shape input images.

    Returns:
      boxes: a BoxList holding a collection of N anchor boxes
    Raises:
      ValueError: if feature_map_shape_list, box_specs_list do not have the same
        length.
      ValueError: if feature_map_shape_list does not consist of pairs of
        integers
    """
    if not (isinstance(feature_map_shape_list, list)
            and len(feature_map_shape_list) == len(self._box_specs)):
      raise ValueError('feature_map_shape_list must be a list with the same '
                       'length as self._box_specs')
    if not all([isinstance(list_item, tuple) and len(list_item) == 2
                for list_item in feature_map_shape_list]):
      raise ValueError('feature_map_shape_list must be a list of pairs.')
    if not anchor_strides:
      anchor_strides = [(tf.to_float(im_height) / tf.to_float(pair[0]),
                         tf.to_float(im_width) / tf.to_float(pair[1]))
                        for pair in feature_map_shape_list]
    if not anchor_offsets:
      anchor_offsets = [(0.5 * stride[0], 0.5 * stride[1])
                        for stride in anchor_strides]
    for arg, arg_name in zip([anchor_strides, anchor_offsets],
                             ['anchor_strides', 'anchor_offsets']):
      if not (isinstance(arg, list) and len(arg) == len(self._box_specs)):
        raise ValueError('%s must be a list with the same length '
                         'as self._box_specs' % arg_name)
      if not all([isinstance(list_item, tuple) and len(list_item) == 2
                  for list_item in arg]):
        raise ValueError('%s must be a list of pairs.' % arg_name)

    anchor_grid_list = []
    min_im_shape = tf.to_float(tf.minimum(im_height, im_width))
    base_anchor_size = min_im_shape * self._base_anchor_size
    for grid_size, scales, aspect_ratios, stride, offset in zip(
        feature_map_shape_list, self._scales, self._aspect_ratios,
        anchor_strides, anchor_offsets):
      anchor_grid_list.append(
          grid_anchor_generator.tile_anchors(
              grid_height=grid_size[0],
              grid_width=grid_size[1],
              scales=scales,
              aspect_ratios=aspect_ratios,
              base_anchor_size=base_anchor_size,
              anchor_stride=stride,
              anchor_offset=offset))
    concatenated_anchors = box_list_ops.concatenate(anchor_grid_list)
    num_anchors = concatenated_anchors.num_boxes_static()
    if num_anchors is None:
      num_anchors = concatenated_anchors.num_boxes()
    if self._clip_window is not None:
      clip_window = tf.multiply(
          tf.to_float([im_height, im_width, im_height, im_width]),
          self._clip_window)
      concatenated_anchors = box_list_ops.clip_to_window(
          concatenated_anchors, clip_window, filter_nonoverlapping=False)
      # TODO: make reshape an option for the clip_to_window op
      concatenated_anchors.set(
          tf.reshape(concatenated_anchors.get(), [num_anchors, 4]))

    stddevs_tensor = 0.01 * tf.ones(
        [num_anchors, 4], dtype=tf.float32, name='stddevs')
    concatenated_anchors.add_field('stddev', stddevs_tensor)

    return concatenated_anchors


def create_ssd_anchors(num_layers=6,
                       min_scale=0.2,
                       max_scale=0.95,
                       aspect_ratios=(1.0, 2.0, 3.0, 1.0/2, 1.0/3),
                       base_anchor_size=None,
                       reduce_boxes_in_lowest_layer=True):
  """Creates MultipleGridAnchorGenerator for SSD anchors.

  This function instantiates a MultipleGridAnchorGenerator that reproduces
  ``default box`` construction proposed by Liu et al in the SSD paper.
  See Section 2.2 for details. Grid sizes are assumed to be passed in
  at generation time from finest resolution to coarsest resolution --- this is
  used to (linearly) interpolate scales of anchor boxes corresponding to the
  intermediate grid sizes.

  Anchors that are returned by calling the `generate` method on the returned
  MultipleGridAnchorGenerator object are always in normalized coordinates
  and clipped to the unit square: (i.e. all coordinates lie in [0, 1]x[0, 1]).

  Args:
    num_layers: integer number of grid layers to create anchors for (actual
      grid sizes passed in at generation time)
    min_scale: scale of anchors corresponding to finest resolution (float)
    max_scale: scale of anchors corresponding to coarsest resolution (float)
    aspect_ratios: list or tuple of (float) aspect ratios to place on each
      grid point.
    base_anchor_size: base anchor size as [height, width].
    reduce_boxes_in_lowest_layer: a boolean to indicate whether the fixed 3
      boxes per location is used in the lowest layer.

  Returns:
    a MultipleGridAnchorGenerator
  """
  if base_anchor_size is None:
    base_anchor_size = [1.0, 1.0]
  base_anchor_size = tf.constant(base_anchor_size, dtype=tf.float32)
  box_specs_list = []
  scales = [min_scale + (max_scale - min_scale) * i / (num_layers - 1)
            for i in range(num_layers)] + [1.0]
  for layer, scale, scale_next in zip(
      range(num_layers), scales[:-1], scales[1:]):
    layer_box_specs = []
    if layer == 0 and reduce_boxes_in_lowest_layer:
      layer_box_specs = [(0.1, 1.0), (scale, 2.0), (scale, 0.5)]
    else:
      for aspect_ratio in aspect_ratios:
        layer_box_specs.append((scale, aspect_ratio))
        if aspect_ratio == 1.0:
          layer_box_specs.append((np.sqrt(scale*scale_next), 1.0))
    box_specs_list.append(layer_box_specs)
  return MultipleGridAnchorGenerator(box_specs_list, base_anchor_size)


================================================
FILE: object_detector_app/object_detection/anchor_generators/multiple_grid_anchor_generator_test.py
================================================
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================

"""Tests for anchor_generators.multiple_grid_anchor_generator_test.py."""

import numpy as np

import tensorflow as tf

from object_detection.anchor_generators import multiple_grid_anchor_generator as ag


class MultipleGridAnchorGeneratorTest(tf.test.TestCase):

  def test_construct_single_anchor_grid(self):
    """Builds a 1x1 anchor grid to test the size of the output boxes."""
    exp_anchor_corners = [[-121, -35, 135, 29], [-249, -67, 263, 61],
                          [-505, -131, 519, 125], [-57, -67, 71, 61],
                          [-121, -131, 135, 125], [-249, -259, 263, 253],
                          [-25, -131, 39, 125], [-57, -259, 71, 253],
                          [-121, -515, 135, 509]]

    base_anchor_size = tf.constant([256, 256], dtype=tf.float32)
    box_specs_list = [[(.5, .25), (1.0, .25), (2.0, .25),
                       (.5, 1.0), (1.0, 1.0), (2.0, 1.0),
                       (.5, 4.0), (1.0, 4.0), (2.0, 4.0)]]
    anchor_generator = ag.MultipleGridAnchorGenerator(
        box_specs_list, base_anchor_size)
    anchors = anchor_generator.generate(feature_map_shape_list=[(1, 1)],
                                        anchor_strides=[(16, 16)],
                                        anchor_offsets=[(7, -3)])
    anchor_corners = anchors.get()
    with self.test_session():
      anchor_corners_out = anchor_corners.eval()
      self.assertAllClose(anchor_corners_out, exp_anchor_corners)

  def test_construct_anchor_grid(self):
    base_anchor_size = tf.constant([10, 10], dtype=tf.float32)
    box_specs_list = [[(0.5, 1.0), (1.0, 1.0), (2.0, 1.0)]]

    exp_anchor_corners = [[-2.5, -2.5, 2.5, 2.5], [-5., -5., 5., 5.],
                          [-10., -10., 10., 10.], [-2.5, 16.5, 2.5, 21.5],
                          [-5., 14., 5, 24], [-10., 9., 10, 29],
                          [16.5, -2.5, 21.5, 2.5], [14., -5., 24, 5],
                          [9., -10., 29, 10], [16.5, 16.5, 21.5, 21.5],
                          [14., 14., 24, 24], [9., 9., 29, 29]]

    anchor_generator = ag.MultipleGridAnchorGenerator(
        box_specs_list, base_anchor_size)
    anchors = anchor_generator.generate(feature_map_shape_list=[(2, 2)],
                                        anchor_strides=[(19, 19)],
                                        anchor_offsets=[(0, 0)])
    anchor_corners = anchors.get()

    with self.test_session():
      anchor_corners_out = anchor_corners.eval()
      self.assertAllClose(anchor_corners_out, exp_anchor_corners)

  def test_construct_anchor_grid_non_square(self):
    base_anchor_size = tf.constant([1, 1], dtype=tf.float32)
    box_specs_list = [[(1.0, 1.0)]]

    exp_anchor_corners = [[0., -0.25, 1., 0.75], [0., 0.25, 1., 1.25]]

    anchor_generator = ag.MultipleGridAnchorGenerator(box_specs_list,
                                                      base_anchor_size)
    anchors = anchor_generator.generate(feature_map_shape_list=[(tf.constant(
        1, dtype=tf.int32), tf.constant(2, dtype=tf.int32))])
    anchor_corners = anchors.get()

    with self.test_session():
      anchor_corners_out = anchor_corners.eval()
      self.assertAllClose(anchor_corners_out, exp_anchor_corners)

  def test_construct_anchor_grid_unnormalized(self):
    base_anchor_size = tf.constant([1, 1], dtype=tf.float32)
    box_specs_list = [[(1.0, 1.0)]]

    exp_anchor_corners = [[0., 0., 320., 320.], [0., 320., 320., 640.]]

    anchor_generator = ag.MultipleGridAnchorGenerator(box_specs_list,
                                                      base_anchor_size)
    anchors = anchor_generator.generate(
        feature_map_shape_list=[(tf.constant(1, dtype=tf.int32), tf.constant(
            2, dtype=tf.int32))],
        im_height=320,
        im_width=640)
    anchor_corners = anchors.get()

    with self.test_session():
      anchor_corners_out = anchor_corners.eval()
      self.assertAllClose(anchor_corners_out, exp_anchor_corners)

  def test_construct_multiple_grids(self):
    base_anchor_size = tf.constant([1.0, 1.0], dtype=tf.float32)
    box_specs_list = [[(1.0, 1.0), (2.0, 1.0), (1.0, 0.5)],
                      [(1.0, 1.0), (1.0, 0.5)]]

    # height and width of box with .5 aspect ratio
    h = np.sqrt(2)
    w = 1.0/np.sqrt(2)
    exp_small_grid_corners = [[-.25, -.25, .75, .75],
                              [.25-.5*h, .25-.5*w, .25+.5*h, .25+.5*w],
                              [-.25, .25, .75, 1.25],
                              [.25-.5*h, .75-.5*w, .25+.5*h, .75+.5*w],
                              [.25, -.25, 1.25, .75],
                              [.75-.5*h, .25-.5*w, .75+.5*h, .25+.5*w],
                              [.25, .25, 1.25, 1.25],
                              [.75-.5*h, .75-.5*w, .75+.5*h, .75+.5*w]]
    # only test first entry of larger set of anchors
    exp_big_grid_corners = [[.125-.5, .125-.5, .125+.5, .125+.5],
                            [.125-1.0, .125-1.0, .125+1.0, .125+1.0],
                            [.125-.5*h, .125-.5*w, .125+.5*h, .125+.5*w],]

    anchor_generator = ag.MultipleGridAnchorGenerator(
        box_specs_list, base_anchor_size)
    anchors = anchor_generator.generate(feature_map_shape_list=[(4, 4), (2, 2)],
                                        anchor_strides=[(.25, .25), (.5, .5)],
                                        anchor_offsets=[(.125, .125),
                                                        (.25, .25)])
    anchor_corners = anchors.get()

    with self.test_session():
      anchor_corners_out = anchor_corners.eval()
      self.assertEquals(anchor_corners_out.shape, (56, 4))
      big_grid_corners = anchor_corners_out[0:3, :]
      small_grid_corners = anchor_corners_out[48:, :]
      self.assertAllClose(small_grid_corners, exp_small_grid_corners)
      self.assertAllClose(big_grid_corners, exp_big_grid_corners)

  def test_construct_multiple_grids_with_clipping(self):
    base_anchor_size = tf.constant([1.0, 1.0], dtype=tf.float32)
    box_specs_list = [[(1.0, 1.0), (2.0, 1.0), (1.0, 0.5)],
                      [(1.0, 1.0), (1.0, 0.5)]]

    # height and width of box with .5 aspect ratio
    h = np.sqrt(2)
    w = 1.0/np.sqrt(2)
    exp_small_grid_corners = [[0, 0, .75, .75],
                              [0, 0, .25+.5*h, .25+.5*w],
                              [0, .25, .75, 1],
                              [0, .75-.5*w, .25+.5*h, 1],
                              [.25, 0, 1, .75],
                              [.75-.5*h, 0, 1, .25+.5*w],
                              [.25, .25, 1, 1],
                              [.75-.5*h, .75-.5*w, 1, 1]]

    clip_window = tf.constant([0, 0, 1, 1], dtype=tf.float32)
    anchor_generator = ag.MultipleGridAnchorGenerator(
        box_specs_list, base_anchor_size, clip_window=clip_window)
    anchors = anchor_generator.generate(feature_map_shape_list=[(4, 4), (2, 2)])
    anchor_corners = anchors.get()

    with self.test_session():
      anchor_corners_out = anchor_corners.eval()
      small_grid_corners = anchor_corners_out[48:, :]
      self.assertAllClose(small_grid_corners, exp_small_grid_corners)

  def test_invalid_box_specs(self):
    # not all box specs are pairs
    box_specs_list = [[(1.0, 1.0), (2.0, 1.0), (1.0, 0.5)],
                      [(1.0, 1.0), (1.0, 0.5, .3)]]
    with self.assertRaises(ValueError):
      ag.MultipleGridAnchorGenerator(box_specs_list)

    # box_specs_list is not a list of lists
    box_specs_list = [(1.0, 1.0), (2.0, 1.0), (1.0, 0.5)]
    with self.assertRaises(ValueError):
      ag.MultipleGridAnchorGenerator(box_specs_list)

  def test_invalid_generate_arguments(self):
    base_anchor_size = tf.constant([1.0, 1.0], dtype=tf.float32)
    box_specs_list = [[(1.0, 1.0), (2.0, 1.0), (1.0, 0.5)],
                      [(1.0, 1.0), (1.0, 0.5)]]
    anchor_generator = ag.MultipleGridAnchorGenerator(
        box_specs_list, base_anchor_size)

    # incompatible lengths with box_specs_list
    with self.assertRaises(ValueError):
      anchor_generator.generate(feature_map_shape_list=[(4, 4), (2, 2)],
                                anchor_strides=[(.25, .25)],
                                anchor_offsets=[(.125, .125), (.25, .25)])
    with self.assertRaises(ValueError):
      anchor_generator.generate(feature_map_shape_list=[(4, 4), (2, 2), (1, 1)],
                                anchor_strides=[(.25, .25), (.5, .5)],
                                anchor_offsets=[(.125, .125), (.25, .25)])
    with self.assertRaises(ValueError):
      anchor_generator.generate(feature_map_shape_list=[(4, 4), (2, 2)],
                                anchor_strides=[(.5, .5)],
                                anchor_offsets=[(.25, .25)])

    # not pairs
    with self.assertRaises(ValueError):
      anchor_generator.generate(feature_map_shape_list=[(4, 4, 4), (2, 2)],
                                anchor_strides=[(.25, .25), (.5, .5)],
                                anchor_offsets=[(.125, .125), (.25, .25)])
    with self.assertRaises(ValueError):
      anchor_generator.generate(feature_map_shape_list=[(4, 4), (2, 2)],
                                anchor_strides=[(.25, .25, .1), (.5, .5)],
                                anchor_offsets=[(.125, .125),
                                                (.25, .25)])
    with self.assertRaises(ValueError):
      anchor_generator.generate(feature_map_shape_list=[(4), (2, 2)],
                                anchor_strides=[(.25, .25), (.5, .5)],
                                anchor_offsets=[(.125), (.25)])


class CreateSSDAnchorsTest(tf.test.TestCase):

  def test_create_ssd_anchors_returns_correct_shape(self):
    anchor_generator = ag.create_ssd_anchors(
        num_layers=6, min_scale=0.2, max_scale=0.95,
        aspect_ratios=(1.0, 2.0, 3.0, 1.0/2, 1.0/3),
        reduce_boxes_in_lowest_layer=True)

    feature_map_shape_list = [(38, 38), (19, 19), (10, 10),
                              (5, 5), (3, 3), (1, 1)]
    anchors = anchor_generator.generate(
        feature_map_shape_list=feature_map_shape_list)
    anchor_corners = anchors.get()
    with self.test_session():
      anchor_corners_out = anchor_corners.eval()
      self.assertEquals(anchor_corners_out.shape, (7308, 4))

    anchor_generator = ag.create_ssd_anchors(
        num_layers=6, min_scale=0.2, max_scale=0.95,
        aspect_ratios=(1.0, 2.0, 3.0, 1.0/2, 1.0/3),
        reduce_boxes_in_lowest_layer=False)

    feature_map_shape_list = [(38, 38), (19, 19), (10, 10),
                              (5, 5), (3, 3), (1, 1)]
    anchors = anchor_generator.generate(
        feature_map_shape_list=feature_map_shape_list)
    anchor_corners = anchors.get()
    with self.test_session():
      anchor_corners_out = anchor_corners.eval()
      self.assertEquals(anchor_corners_out.shape, (11640, 4))


if __name__ == '__main__':
  tf.test.main()


================================================
FILE: object_detector_app/object_detection/box_coders/BUILD
================================================
# Tensorflow Object Detection API: Box Coder implementations.

package(
    default_visibility = ["//visibility:public"],
)

licenses(["notice"])

# Apache 2.0
py_library(
    name = "faster_rcnn_box_coder",
    srcs = [
        "faster_rcnn_box_coder.py",
    ],
    deps = [
        "//tensorflow_models/object_detection/core:box_coder",
        "//tensorflow_models/object_detection/core:box_list",
    ],
)

py_test(
    name = "faster_rcnn_box_coder_test",
    srcs = [
        "faster_rcnn_box_coder_test.py",
    ],
    deps = [
        ":faster_rcnn_box_coder",
        "//tensorflow",
        "//tensorflow_models/object_detection/core:box_list",
    ],
)

py_library(
    name = "keypoint_box_coder",
    srcs = [
        "keypoint_box_coder.py",
    ],
    deps = [
        "//tensorflow_models/object_detection/core:box_coder",
        "//tensorflow_models/object_detection/core:box_list",
        "//tensorflow_models/object_detection/core:standard_fields",
    ],
)

py_test(
    name = "keypoint_box_coder_test",
    srcs = [
        "keypoint_box_coder_test.py",
    ],
    deps = [
        ":keypoint_box_coder",
        "//tensorflow",
        "//tensorflow_models/object_detection/core:box_list",
        "//tensorflow_models/object_detection/core:standard_fields",
    ],
)

py_library(
    name = "mean_stddev_box_coder",
    srcs = [
        "mean_stddev_box_coder.py",
    ],
    deps = [
        "//tensorflow_models/object_detection/core:box_coder",
        "//tensorflow_models/object_detection/core:box_list",
    ],
)

py_test(
    name = "mean_stddev_box_coder_test",
    srcs = [
        "mean_stddev_box_coder_test.py",
    ],
    deps = [
        ":mean_stddev_box_coder",
        "//tensorflow",
        "//tensorflow_models/object_detection/core:box_list",
    ],
)

py_library(
    name = "square_box_coder",
    srcs = [
        "square_box_coder.py",
    ],
    deps = [
        "//tensorflow_models/object_detection/core:box_coder",
        "//tensorflow_models/object_detection/core:box_list",
    ],
)

py_test(
    name = "square_box_coder_test",
    srcs = [
        "square_box_coder_test.py",
    ],
    deps = [
        ":square_box_coder",
        "//tensorflow",
        "//tensorflow_models/object_detection/core:box_list",
    ],
)


================================================
FILE: object_detector_app/object_detection/box_coders/__init__.py
================================================


================================================
FILE: object_detector_app/object_detection/box_coders/faster_rcnn_box_coder.py
================================================
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================

"""Faster RCNN box coder.

Faster RCNN box coder follows the coding schema described below:
  ty = (y - ya) / ha
  tx = (x - xa) / wa
  th = log(h / ha)
  tw = log(w / wa)
  where x, y, w, h denote the box's center coordinates, width and height
  respectively. Similarly, xa, ya, wa, ha denote the anchor's center
  coordinates, width and height. tx, ty, tw and th denote the anchor-encoded
  center, width and height respectively.

  See http://arxiv.org/abs/1506.01497 for details.
"""

import tensorflow as tf

from object_detection.core import box_coder
from object_detection.core import box_list

EPSILON = 1e-8


class FasterRcnnBoxCoder(box_coder.BoxCoder):
  """Faster RCNN box coder."""

  def __init__(self, scale_factors=None):
    """Constructor for FasterRcnnBoxCoder.

    Args:
      scale_factors: List of 4 positive scalars to scale ty, tx, th and tw.
        If set to None, does not perform scaling. For Faster RCNN,
        the open-source implementation recommends using [10.0, 10.0, 5.0, 5.0].
    """
    if scale_factors:
      assert len(scale_factors) == 4
      for scalar in scale_factors:
        assert scalar > 0
    self._scale_factors = scale_factors

  @property
  def code_size(self):
    return 4

  def _encode(self, boxes, anchors):
    """Encode a box collection with respect to anchor collection.

    Args:
      boxes: BoxList holding N boxes to be encoded.
      anchors: BoxList of anchors.

    Returns:
      a tensor representing N anchor-encoded boxes of the format
      [ty, tx, th, tw].
    """
    # Convert anchors to the center coordinate representation.
    ycenter_a, xcenter_a, ha, wa = anchors.get_center_coordinates_and_sizes()
    ycenter, xcenter, h, w = boxes.get_center_coordinates_and_sizes()
    # Avoid NaN in division and log below.
    ha += EPSILON
    wa += EPSILON
    h += EPSILON
    w += EPSILON

    tx = (xcenter - xcenter_a) / wa
    ty = (ycenter - ycenter_a) / ha
    tw = tf.log(w / wa)
    th = tf.log(h / ha)
    # Scales location targets as used in paper for joint training.
    if self._scale_factors:
      ty *= self._scale_factors[0]
      tx *= self._scale_factors[1]
      th *= self._scale_factors[2]
      tw *= self._scale_factors[3]
    return tf.transpose(tf.stack([ty, tx, th, tw]))

  def _decode(self, rel_codes, anchors):
    """Decode relative codes to boxes.

    Args:
      rel_codes: a tensor representing N anchor-encoded boxes.
      anchors: BoxList of anchors.

    Returns:
      boxes: BoxList holding N bounding boxes.
    """
    ycenter_a, xcenter_a, ha, wa = anchors.get_center_coordinates_and_sizes()

    ty, tx, th, tw = tf.unstack(tf.transpose(rel_codes))
    if self._scale_factors:
      ty /= self._scale_factors[0]
      tx /= self._scale_factors[1]
      th /= self._scale_factors[2]
      tw /= self._scale_factors[3]
    w = tf.exp(tw) * wa
    h = tf.exp(th) * ha
    ycenter = ty * ha + ycenter_a
    xcenter = tx * wa + xcenter_a
    ymin = ycenter - h / 2.
    xmin = xcenter - w / 2.
    ymax = ycenter + h / 2.
    xmax = xcenter + w / 2.
    return box_list.BoxList(tf.transpose(tf.stack([ymin, xmin, ymax, xmax])))


================================================
FILE: object_detector_app/object_detection/box_coders/faster_rcnn_box_coder_test.py
================================================
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================

"""Tests for object_detection.box_coder.faster_rcnn_box_coder."""

import tensorflow as tf

from object_detection.box_coders import faster_rcnn_box_coder
from object_detection.core import box_list


class FasterRcnnBoxCoderTest(tf.test.TestCase):

  def test_get_corre
Download .txt
gitextract_koctdad1/

├── LICENSE
├── README.md
├── moveo_moveit/
│   ├── CMakeLists.txt
│   ├── moveo_moveit_arduino/
│   │   ├── MultiStepperTest/
│   │   │   └── MultiStepperTest.ino
│   │   └── moveo_moveit_arduino.ino
│   ├── msg/
│   │   └── ArmJointState.msg
│   ├── package.xml
│   ├── scripts/
│   │   ├── README.md
│   │   └── moveo_objrec_publisher.py
│   └── src/
│       ├── move_group_interface_coor_1.cpp
│       └── moveit_convert.cpp
├── moveo_moveit_config/
│   ├── .setup_assistant
│   ├── CMakeLists.txt
│   ├── config/
│   │   ├── fake_controllers.yaml
│   │   ├── joint_limits.yaml
│   │   ├── kinematics.yaml
│   │   ├── moveo_urdf.srdf
│   │   └── ompl_planning.yaml
│   ├── launch/
│   │   ├── default_warehouse_db.launch
│   │   ├── demo.launch
│   │   ├── fake_moveit_controller_manager.launch.xml
│   │   ├── joystick_control.launch
│   │   ├── move_group.launch
│   │   ├── moveit.rviz
│   │   ├── moveit_rviz.launch
│   │   ├── moveo_urdf_moveit_controller_manager.launch.xml
│   │   ├── moveo_urdf_moveit_sensor_manager.launch.xml
│   │   ├── ompl_planning_pipeline.launch.xml
│   │   ├── planning_context.launch
│   │   ├── planning_pipeline.launch.xml
│   │   ├── run_benchmark_ompl.launch
│   │   ├── sensor_manager.launch.xml
│   │   ├── setup_assistant.launch
│   │   ├── trajectory_execution.launch.xml
│   │   ├── warehouse.launch
│   │   └── warehouse_settings.launch.xml
│   └── package.xml
├── moveo_urdf/
│   ├── CMakeLists.txt
│   ├── config/
│   │   └── joint_names_move_urdf.yaml
│   ├── launch/
│   │   ├── display.launch
│   │   ├── gazebo.launch
│   │   ├── gazebo_old.launch
│   │   └── gazebo_sdf.launch
│   ├── meshes/
│   │   ├── Gripper_Idol_Gear.STL
│   │   ├── Gripper_Idol_Gear_col.STL
│   │   ├── Gripper_Servo_Gear.STL
│   │   ├── Gripper_Servo_Gear_col.STL
│   │   ├── Link_1.STL
│   │   ├── Link_1_col.STL
│   │   ├── Link_2.STL
│   │   ├── Link_2_col.STL
│   │   ├── Link_3.STL
│   │   ├── Link_3_col.STL
│   │   ├── Link_4.STL
│   │   ├── Link_4_col.STL
│   │   ├── Link_5.STL
│   │   ├── Link_5_col.STL
│   │   ├── Pivot_Arm_Gripper_Idol.STL
│   │   ├── Pivot_Arm_Gripper_Idol_col.STL
│   │   ├── Pivot_Arm_Gripper_Servo.STL
│   │   ├── Pivot_Arm_Gripper_Servo_col.STL
│   │   ├── Tip_Gripper_Idol.STL
│   │   ├── Tip_Gripper_Idol_col.STL
│   │   ├── Tip_Gripper_Servo.STL
│   │   ├── Tip_Gripper_Servo_col.STL
│   │   ├── base_link.STL
│   │   └── base_link_col.STL
│   ├── package.xml
│   └── urdf/
│       ├── moveo_urdf.urdf
│       ├── moveo_urdf_new.urdf
│       └── moveo_urdf_og.urdf
└── object_detector_app/
    ├── LICENSE
    ├── README.md
    ├── __init__.py
    ├── environment.yml
    ├── object_detection/
    │   ├── BUILD
    │   ├── CONTRIBUTING.md
    │   ├── README.md
    │   ├── __init__.py
    │   ├── anchor_generators/
    │   │   ├── BUILD
    │   │   ├── __init__.py
    │   │   ├── grid_anchor_generator.py
    │   │   ├── grid_anchor_generator_test.py
    │   │   ├── multiple_grid_anchor_generator.py
    │   │   └── multiple_grid_anchor_generator_test.py
    │   ├── box_coders/
    │   │   ├── BUILD
    │   │   ├── __init__.py
    │   │   ├── faster_rcnn_box_coder.py
    │   │   ├── faster_rcnn_box_coder_test.py
    │   │   ├── keypoint_box_coder.py
    │   │   ├── keypoint_box_coder_test.py
    │   │   ├── mean_stddev_box_coder.py
    │   │   ├── mean_stddev_box_coder_test.py
    │   │   ├── square_box_coder.py
    │   │   └── square_box_coder_test.py
    │   ├── builders/
    │   │   ├── BUILD
    │   │   ├── __init__.py
    │   │   ├── anchor_generator_builder.py
    │   │   ├── anchor_generator_builder_test.py
    │   │   ├── box_coder_builder.py
    │   │   ├── box_coder_builder_test.py
    │   │   ├── box_predictor_builder.py
    │   │   ├── box_predictor_builder_test.py
    │   │   ├── hyperparams_builder.py
    │   │   ├── hyperparams_builder_test.py
    │   │   ├── image_resizer_builder.py
    │   │   ├── image_resizer_builder_test.py
    │   │   ├── input_reader_builder.py
    │   │   ├── input_reader_builder_test.py
    │   │   ├── losses_builder.py
    │   │   ├── losses_builder_test.py
    │   │   ├── matcher_builder.py
    │   │   ├── matcher_builder_test.py
    │   │   ├── model_builder.py
    │   │   ├── model_builder_test.py
    │   │   ├── optimizer_builder.py
    │   │   ├── optimizer_builder_test.py
    │   │   ├── post_processing_builder.py
    │   │   ├── post_processing_builder_test.py
    │   │   ├── preprocessor_builder.py
    │   │   ├── preprocessor_builder_test.py
    │   │   ├── region_similarity_calculator_builder.py
    │   │   └── region_similarity_calculator_builder_test.py
    │   ├── core/
    │   │   ├── BUILD
    │   │   ├── __init__.py
    │   │   ├── anchor_generator.py
    │   │   ├── balanced_positive_negative_sampler.py
    │   │   ├── balanced_positive_negative_sampler_test.py
    │   │   ├── batcher.py
    │   │   ├── batcher_test.py
    │   │   ├── box_coder.py
    │   │   ├── box_coder_test.py
    │   │   ├── box_list.py
    │   │   ├── box_list_ops.py
    │   │   ├── box_list_ops_test.py
    │   │   ├── box_list_test.py
    │   │   ├── box_predictor.py
    │   │   ├── box_predictor_test.py
    │   │   ├── data_decoder.py
    │   │   ├── keypoint_ops.py
    │   │   ├── keypoint_ops_test.py
    │   │   ├── losses.py
    │   │   ├── losses_test.py
    │   │   ├── matcher.py
    │   │   ├── matcher_test.py
    │   │   ├── minibatch_sampler.py
    │   │   ├── minibatch_sampler_test.py
    │   │   ├── model.py
    │   │   ├── post_processing.py
    │   │   ├── post_processing_test.py
    │   │   ├── prefetcher.py
    │   │   ├── prefetcher_test.py
    │   │   ├── preprocessor.py
    │   │   ├── preprocessor_test.py
    │   │   ├── region_similarity_calculator.py
    │   │   ├── region_similarity_calculator_test.py
    │   │   ├── standard_fields.py
    │   │   ├── target_assigner.py
    │   │   └── target_assigner_test.py
    │   ├── create_pascal_tf_record.py
    │   ├── create_pascal_tf_record_test.py
    │   ├── create_pet_tf_record.py
    │   ├── data/
    │   │   ├── mscoco_label_map.pbtxt
    │   │   ├── pascal_label_map.pbtxt
    │   │   └── pet_label_map.pbtxt
    │   ├── data_decoders/
    │   │   ├── BUILD
    │   │   ├── __init__.py
    │   │   ├── tf_example_decoder.py
    │   │   └── tf_example_decoder_test.py
    │   ├── eval.py
    │   ├── eval_util.py
    │   ├── evaluator.py
    │   ├── export_inference_graph.py
    │   ├── exporter.py
    │   ├── exporter_test.py
    │   ├── g3doc/
    │   │   ├── configuring_jobs.md
    │   │   ├── defining_your_own_model.md
    │   │   ├── detection_model_zoo.md
    │   │   ├── exporting_models.md
    │   │   ├── installation.md
    │   │   ├── preparing_inputs.md
    │   │   ├── running_locally.md
    │   │   ├── running_notebook.md
    │   │   ├── running_on_cloud.md
    │   │   └── running_pets.md
    │   ├── matchers/
    │   │   ├── BUILD
    │   │   ├── __init__.py
    │   │   ├── argmax_matcher.py
    │   │   ├── argmax_matcher_test.py
    │   │   ├── bipartite_matcher.py
    │   │   └── bipartite_matcher_test.py
    │   ├── meta_architectures/
    │   │   ├── BUILD
    │   │   ├── __init__.py
    │   │   ├── faster_rcnn_meta_arch.py
    │   │   ├── faster_rcnn_meta_arch_test.py
    │   │   ├── faster_rcnn_meta_arch_test_lib.py
    │   │   ├── rfcn_meta_arch.py
    │   │   ├── rfcn_meta_arch_test.py
    │   │   ├── ssd_meta_arch.py
    │   │   └── ssd_meta_arch_test.py
    │   ├── models/
    │   │   ├── BUILD
    │   │   ├── __init__.py
    │   │   ├── faster_rcnn_inception_resnet_v2_feature_extractor.py
    │   │   ├── faster_rcnn_inception_resnet_v2_feature_extractor_test.py
    │   │   ├── faster_rcnn_resnet_v1_feature_extractor.py
    │   │   ├── faster_rcnn_resnet_v1_feature_extractor_test.py
    │   │   ├── feature_map_generators.py
    │   │   ├── feature_map_generators_test.py
    │   │   ├── ssd_feature_extractor_test.py
    │   │   ├── ssd_inception_v2_feature_extractor.py
    │   │   ├── ssd_inception_v2_feature_extractor_test.py
    │   │   ├── ssd_mobilenet_v1_feature_extractor.py
    │   │   └── ssd_mobilenet_v1_feature_extractor_test.py
    │   ├── object_detection_tutorial.ipynb
    │   ├── protos/
    │   │   ├── BUILD
    │   │   ├── __init__.py
    │   │   ├── anchor_generator.proto
    │   │   ├── anchor_generator_pb2.py
    │   │   ├── argmax_matcher.proto
    │   │   ├── argmax_matcher_pb2.py
    │   │   ├── bipartite_matcher.proto
    │   │   ├── bipartite_matcher_pb2.py
    │   │   ├── box_coder.proto
    │   │   ├── box_coder_pb2.py
    │   │   ├── box_predictor.proto
    │   │   ├── box_predictor_pb2.py
    │   │   ├── eval.proto
    │   │   ├── eval_pb2.py
    │   │   ├── faster_rcnn.proto
    │   │   ├── faster_rcnn_box_coder.proto
    │   │   ├── faster_rcnn_box_coder_pb2.py
    │   │   ├── faster_rcnn_pb2.py
    │   │   ├── grid_anchor_generator.proto
    │   │   ├── grid_anchor_generator_pb2.py
    │   │   ├── hyperparams.proto
    │   │   ├── hyperparams_pb2.py
    │   │   ├── image_resizer.proto
    │   │   ├── image_resizer_pb2.py
    │   │   ├── input_reader.proto
    │   │   ├── input_reader_pb2.py
    │   │   ├── losses.proto
    │   │   ├── losses_pb2.py
    │   │   ├── matcher.proto
    │   │   ├── matcher_pb2.py
    │   │   ├── mean_stddev_box_coder.proto
    │   │   ├── mean_stddev_box_coder_pb2.py
    │   │   ├── model.proto
    │   │   ├── model_pb2.py
    │   │   ├── optimizer.proto
    │   │   ├── optimizer_pb2.py
    │   │   ├── pipeline.proto
    │   │   ├── pipeline_pb2.py
    │   │   ├── post_processing.proto
    │   │   ├── post_processing_pb2.py
    │   │   ├── preprocessor.proto
    │   │   ├── preprocessor_pb2.py
    │   │   ├── region_similarity_calculator.proto
    │   │   ├── region_similarity_calculator_pb2.py
    │   │   ├── square_box_coder.proto
    │   │   ├── square_box_coder_pb2.py
    │   │   ├── ssd.proto
    │   │   ├── ssd_anchor_generator.proto
    │   │   ├── ssd_anchor_generator_pb2.py
    │   │   ├── ssd_pb2.py
    │   │   ├── string_int_label_map.proto
    │   │   ├── string_int_label_map_pb2.py
    │   │   ├── train.proto
    │   │   └── train_pb2.py
    │   ├── samples/
    │   │   ├── cloud/
    │   │   │   └── cloud.yml
    │   │   └── configs/
    │   │       ├── faster_rcnn_inception_resnet_v2_atrous_pets.config
    │   │       ├── faster_rcnn_resnet101_pets.config
    │   │       ├── faster_rcnn_resnet101_voc07.config
    │   │       ├── faster_rcnn_resnet152_pets.config
    │   │       ├── faster_rcnn_resnet50_pets.config
    │   │       ├── rfcn_resnet101_pets.config
    │   │       ├── ssd_inception_v2_pets.config
    │   │       └── ssd_mobilenet_v1_pets.config
    │   ├── ssd_mobilenet_v1_coco_11_06_2017/
    │   │   └── frozen_inference_graph.pb
    │   ├── test_images/
    │   │   └── image_info.txt
    │   ├── train.py
    │   ├── trainer.py
    │   ├── trainer_test.py
    │   └── utils/
    │       ├── BUILD
    │       ├── __init__.py
    │       ├── category_util.py
    │       ├── category_util_test.py
    │       ├── dataset_util.py
    │       ├── dataset_util_test.py
    │       ├── label_map_util.py
    │       ├── label_map_util_test.py
    │       ├── learning_schedules.py
    │       ├── learning_schedules_test.py
    │       ├── metrics.py
    │       ├── metrics_test.py
    │       ├── np_box_list.py
    │       ├── np_box_list_ops.py
    │       ├── np_box_list_ops_test.py
    │       ├── np_box_list_test.py
    │       ├── np_box_ops.py
    │       ├── np_box_ops_test.py
    │       ├── object_detection_evaluation.py
    │       ├── object_detection_evaluation_test.py
    │       ├── ops.py
    │       ├── ops_test.py
    │       ├── per_image_evaluation.py
    │       ├── per_image_evaluation_test.py
    │       ├── shape_utils.py
    │       ├── shape_utils_test.py
    │       ├── static_shape.py
    │       ├── static_shape_test.py
    │       ├── test_utils.py
    │       ├── test_utils_test.py
    │       ├── variables_helper.py
    │       ├── variables_helper_test.py
    │       ├── visualization_utils.py
    │       └── visualization_utils_test.py
    ├── object_detection_app.py
    ├── object_detection_multithreading.py
    └── utils/
        ├── __init__.py
        ├── app_utils.py
        └── test_app_utils.py
Download .txt
SYMBOL INDEX (1350 symbols across 147 files)

FILE: moveo_moveit/scripts/moveo_objrec_publisher.py
  function subscribe_detected_object (line 42) | def subscribe_detected_object():
  function publish_detected_object (line 74) | def publish_detected_object():

FILE: moveo_moveit/src/move_group_interface_coor_1.cpp
  function main (line 12) | int main(int argc, char **argv)

FILE: moveo_moveit/src/moveit_convert.cpp
  function cmd_cb (line 28) | void cmd_cb(const sensor_msgs::JointState& cmd_arm)
  function main (line 102) | int main(int argc, char **argv)

FILE: object_detector_app/object_detection/anchor_generators/grid_anchor_generator.py
  class GridAnchorGenerator (line 30) | class GridAnchorGenerator(anchor_generator.AnchorGenerator):
    method __init__ (line 33) | def __init__(self,
    method name_scope (line 71) | def name_scope(self):
    method num_anchors_per_location (line 74) | def num_anchors_per_location(self):
    method _generate (line 83) | def _generate(self, feature_map_shape_list):
  function tile_anchors (line 121) | def tile_anchors(grid_height,
  function _center_size_bbox_to_corners_bbox (line 183) | def _center_size_bbox_to_corners_bbox(centers, sizes):

FILE: object_detector_app/object_detection/anchor_generators/grid_anchor_generator_test.py
  class GridAnchorGeneratorTest (line 23) | class GridAnchorGeneratorTest(tf.test.TestCase):
    method test_construct_single_anchor (line 25) | def test_construct_single_anchor(self):
    method test_construct_anchor_grid (line 46) | def test_construct_anchor_grid(self):

FILE: object_detector_app/object_detection/anchor_generators/multiple_grid_anchor_generator.py
  class MultipleGridAnchorGenerator (line 35) | class MultipleGridAnchorGenerator(anchor_generator.AnchorGenerator):
    method __init__ (line 38) | def __init__(self,
    method name_scope (line 93) | def name_scope(self):
    method num_anchors_per_location (line 96) | def num_anchors_per_location(self):
    method _generate (line 105) | def _generate(self,
  function create_ssd_anchors (line 223) | def create_ssd_anchors(num_layers=6,

FILE: object_detector_app/object_detection/anchor_generators/multiple_grid_anchor_generator_test.py
  class MultipleGridAnchorGeneratorTest (line 25) | class MultipleGridAnchorGeneratorTest(tf.test.TestCase):
    method test_construct_single_anchor_grid (line 27) | def test_construct_single_anchor_grid(self):
    method test_construct_anchor_grid (line 49) | def test_construct_anchor_grid(self):
    method test_construct_anchor_grid_non_square (line 71) | def test_construct_anchor_grid_non_square(self):
    method test_construct_anchor_grid_unnormalized (line 87) | def test_construct_anchor_grid_unnormalized(self):
    method test_construct_multiple_grids (line 106) | def test_construct_multiple_grids(self):
    method test_construct_multiple_grids_with_clipping (line 143) | def test_construct_multiple_grids_with_clipping(self):
    method test_invalid_box_specs (line 171) | def test_invalid_box_specs(self):
    method test_invalid_generate_arguments (line 183) | def test_invalid_generate_arguments(self):
  class CreateSSDAnchorsTest (line 220) | class CreateSSDAnchorsTest(tf.test.TestCase):
    method test_create_ssd_anchors_returns_correct_shape (line 222) | def test_create_ssd_anchors_returns_correct_shape(self):

FILE: object_detector_app/object_detection/box_coders/faster_rcnn_box_coder.py
  class FasterRcnnBoxCoder (line 39) | class FasterRcnnBoxCoder(box_coder.BoxCoder):
    method __init__ (line 42) | def __init__(self, scale_factors=None):
    method code_size (line 57) | def code_size(self):
    method _encode (line 60) | def _encode(self, boxes, anchors):
    method _decode (line 92) | def _decode(self, rel_codes, anchors):

FILE: object_detector_app/object_detection/box_coders/faster_rcnn_box_coder_test.py
  class FasterRcnnBoxCoderTest (line 24) | class FasterRcnnBoxCoderTest(tf.test.TestCase):
    method test_get_correct_relative_codes_after_encoding (line 26) | def test_get_correct_relative_codes_after_encoding(self):
    method test_get_correct_relative_codes_after_encoding_with_scaling (line 39) | def test_get_correct_relative_codes_after_encoding_with_scaling(self):
    method test_get_correct_boxes_after_decoding (line 54) | def test_get_correct_boxes_after_decoding(self):
    method test_get_correct_boxes_after_decoding_with_scaling (line 66) | def test_get_correct_boxes_after_decoding_with_scaling(self):
    method test_very_small_Width_nan_after_encoding (line 80) | def test_very_small_Width_nan_after_encoding(self):

FILE: object_detector_app/object_detection/box_coders/keypoint_box_coder.py
  class KeypointBoxCoder (line 47) | class KeypointBoxCoder(box_coder.BoxCoder):
    method __init__ (line 50) | def __init__(self, num_keypoints, scale_factors=None):
    method code_size (line 74) | def code_size(self):
    method _encode (line 77) | def _encode(self, boxes, anchors):
    method _decode (line 128) | def _decode(self, rel_codes, anchors):

FILE: object_detector_app/object_detection/box_coders/keypoint_box_coder_test.py
  class KeypointBoxCoderTest (line 25) | class KeypointBoxCoderTest(tf.test.TestCase):
    method test_get_correct_relative_codes_after_encoding (line 27) | def test_get_correct_relative_codes_after_encoding(self):
    method test_get_correct_relative_codes_after_encoding_with_scaling (line 50) | def test_get_correct_relative_codes_after_encoding_with_scaling(self):
    method test_get_correct_boxes_after_decoding (line 75) | def test_get_correct_boxes_after_decoding(self):
    method test_get_correct_boxes_after_decoding_with_scaling (line 98) | def test_get_correct_boxes_after_decoding_with_scaling(self):
    method test_very_small_width_nan_after_encoding (line 123) | def test_very_small_width_nan_after_encoding(self):

FILE: object_detector_app/object_detection/box_coders/mean_stddev_box_coder.py
  class MeanStddevBoxCoder (line 25) | class MeanStddevBoxCoder(box_coder.BoxCoder):
    method code_size (line 29) | def code_size(self):
    method _encode (line 32) | def _encode(self, boxes, anchors):
    method _decode (line 52) | def _decode(self, rel_codes, anchors):

FILE: object_detector_app/object_detection/box_coders/mean_stddev_box_coder_test.py
  class MeanStddevBoxCoderTest (line 24) | class MeanStddevBoxCoderTest(tf.test.TestCase):
    method testGetCorrectRelativeCodesAfterEncoding (line 26) | def testGetCorrectRelativeCodesAfterEncoding(self):
    method testGetCorrectBoxesAfterDecoding (line 41) | def testGetCorrectBoxesAfterDecoding(self):

FILE: object_detector_app/object_detection/box_coders/square_box_coder.py
  class SquareBoxCoder (line 43) | class SquareBoxCoder(box_coder.BoxCoder):
    method __init__ (line 46) | def __init__(self, scale_factors=None):
    method code_size (line 68) | def code_size(self):
    method _encode (line 71) | def _encode(self, boxes, anchors):
    method _decode (line 101) | def _decode(self, rel_codes, anchors):

FILE: object_detector_app/object_detection/box_coders/square_box_coder_test.py
  class SquareBoxCoderTest (line 24) | class SquareBoxCoderTest(tf.test.TestCase):
    method test_correct_relative_codes_with_default_scale (line 26) | def test_correct_relative_codes_with_default_scale(self):
    method test_correct_relative_codes_with_non_default_scale (line 41) | def test_correct_relative_codes_with_non_default_scale(self):
    method test_correct_relative_codes_with_small_width (line 55) | def test_correct_relative_codes_with_small_width(self):
    method test_correct_boxes_with_default_scale (line 68) | def test_correct_boxes_with_default_scale(self):
    method test_correct_boxes_with_non_default_scale (line 82) | def test_correct_boxes_with_non_default_scale(self):

FILE: object_detector_app/object_detection/builders/anchor_generator_builder.py
  function build (line 23) | def build(anchor_generator_config):

FILE: object_detector_app/object_detection/builders/anchor_generator_builder_test.py
  class AnchorGeneratorBuilderTest (line 27) | class AnchorGeneratorBuilderTest(tf.test.TestCase):
    method assert_almost_list_equal (line 29) | def assert_almost_list_equal(self, expected_list, actual_list, delta=N...
    method test_build_grid_anchor_generator_with_defaults (line 34) | def test_build_grid_anchor_generator_with_defaults(self):
    method test_build_grid_anchor_generator_with_non_default_parameters (line 56) | def test_build_grid_anchor_generator_with_non_default_parameters(self):
    method test_build_ssd_anchor_generator_with_defaults (line 88) | def test_build_ssd_anchor_generator_with_defaults(self):
    method test_build_ssd_anchor_generator_withoud_reduced_boxes (line 119) | def test_build_ssd_anchor_generator_withoud_reduced_boxes(self):
    method test_build_ssd_anchor_generator_with_non_default_parameters (line 153) | def test_build_ssd_anchor_generator_with_non_default_parameters(self):
    method test_raise_value_error_on_empty_anchor_genertor (line 184) | def test_raise_value_error_on_empty_anchor_genertor(self):

FILE: object_detector_app/object_detection/builders/box_coder_builder.py
  function build (line 23) | def build(box_coder_config):

FILE: object_detector_app/object_detection/builders/box_coder_builder_test.py
  class BoxCoderBuilderTest (line 28) | class BoxCoderBuilderTest(tf.test.TestCase):
    method test_build_faster_rcnn_box_coder_with_defaults (line 30) | def test_build_faster_rcnn_box_coder_with_defaults(self):
    method test_build_faster_rcnn_box_coder_with_non_default_parameters (line 42) | def test_build_faster_rcnn_box_coder_with_non_default_parameters(self):
    method test_build_mean_stddev_box_coder (line 58) | def test_build_mean_stddev_box_coder(self):
    method test_build_square_box_coder_with_defaults (line 70) | def test_build_square_box_coder_with_defaults(self):
    method test_build_square_box_coder_with_non_default_parameters (line 82) | def test_build_square_box_coder_with_non_default_parameters(self):
    method test_raise_error_on_empty_box_coder (line 97) | def test_raise_error_on_empty_box_coder(self):

FILE: object_detector_app/object_detection/builders/box_predictor_builder.py
  function build (line 22) | def build(argscope_fn, box_predictor_config, is_training, num_classes):

FILE: object_detector_app/object_detection/builders/box_predictor_builder_test.py
  class ConvolutionalBoxPredictorBuilderTest (line 27) | class ConvolutionalBoxPredictorBuilderTest(tf.test.TestCase):
    method test_box_predictor_calls_conv_argscope_fn (line 29) | def test_box_predictor_calls_conv_argscope_fn(self):
    method test_construct_non_default_conv_box_predictor (line 74) | def test_construct_non_default_conv_box_predictor(self):
    method test_construct_default_conv_box_predictor (line 120) | def test_construct_default_conv_box_predictor(self):
  class MaskRCNNBoxPredictorBuilderTest (line 151) | class MaskRCNNBoxPredictorBuilderTest(tf.test.TestCase):
    method test_box_predictor_builder_calls_fc_argscope_fn (line 153) | def test_box_predictor_builder_calls_fc_argscope_fn(self):
    method test_non_default_mask_rcnn_box_predictor (line 183) | def test_non_default_mask_rcnn_box_predictor(self):
    method test_build_default_mask_rcnn_box_predictor (line 223) | def test_build_default_mask_rcnn_box_predictor(self):
    method test_build_box_predictor_with_mask_branch (line 240) | def test_build_box_predictor_with_mask_branch(self):
  class RfcnBoxPredictorBuilderTest (line 269) | class RfcnBoxPredictorBuilderTest(tf.test.TestCase):
    method test_box_predictor_calls_fc_argscope_fn (line 271) | def test_box_predictor_calls_fc_argscope_fn(self):
    method test_non_default_rfcn_box_predictor (line 316) | def test_non_default_rfcn_box_predictor(self):
    method test_default_rfcn_box_predictor (line 358) | def test_default_rfcn_box_predictor(self):

FILE: object_detector_app/object_detection/builders/hyperparams_builder.py
  function build (line 24) | def build(hyperparams_config, is_training):
  function _build_activation_fn (line 79) | def _build_activation_fn(activation_fn):
  function _build_regularizer (line 100) | def _build_regularizer(regularizer):
  function _build_initializer (line 120) | def _build_initializer(initializer):
  function _build_batch_norm_params (line 151) | def _build_batch_norm_params(batch_norm, is_training):

FILE: object_detector_app/object_detection/builders/hyperparams_builder_test.py
  class HyperparamsBuilderTest (line 30) | class HyperparamsBuilderTest(tf.test.TestCase):
    method _get_scope_key (line 33) | def _get_scope_key(self, op):
    method test_default_arg_scope_has_conv2d_op (line 36) | def test_default_arg_scope_has_conv2d_op(self):
    method test_default_arg_scope_has_separable_conv2d_op (line 52) | def test_default_arg_scope_has_separable_conv2d_op(self):
    method test_default_arg_scope_has_conv2d_transpose_op (line 68) | def test_default_arg_scope_has_conv2d_transpose_op(self):
    method test_explicit_fc_op_arg_scope_has_fully_connected_op (line 84) | def test_explicit_fc_op_arg_scope_has_fully_connected_op(self):
    method test_separable_conv2d_and_conv2d_and_transpose_have_same_parameters (line 101) | def test_separable_conv2d_and_conv2d_and_transpose_have_same_parameter...
    method test_return_l1_regularized_weights (line 119) | def test_return_l1_regularized_weights(self):
    method test_return_l2_regularizer_weights (line 141) | def test_return_l2_regularizer_weights(self):
    method test_return_non_default_batch_norm_params_with_train_during_train (line 164) | def test_return_non_default_batch_norm_params_with_train_during_train(...
    method test_return_batch_norm_params_with_notrain_during_eval (line 194) | def test_return_batch_norm_params_with_notrain_during_eval(self):
    method test_return_batch_norm_params_with_notrain_when_train_is_false (line 224) | def test_return_batch_norm_params_with_notrain_when_train_is_false(self):
    method test_do_not_use_batch_norm_if_default (line 254) | def test_do_not_use_batch_norm_if_default(self):
    method test_use_none_activation (line 272) | def test_use_none_activation(self):
    method test_use_relu_activation (line 290) | def test_use_relu_activation(self):
    method test_use_relu_6_activation (line 308) | def test_use_relu_6_activation(self):
    method _assert_variance_in_range (line 326) | def _assert_variance_in_range(self, initializer, shape, variance,
    method test_variance_in_range_with_variance_scaling_initializer_fan_in (line 339) | def test_variance_in_range_with_variance_scaling_initializer_fan_in(se...
    method test_variance_in_range_with_variance_scaling_initializer_fan_out (line 361) | def test_variance_in_range_with_variance_scaling_initializer_fan_out(s...
    method test_variance_in_range_with_variance_scaling_initializer_fan_avg (line 383) | def test_variance_in_range_with_variance_scaling_initializer_fan_avg(s...
    method test_variance_in_range_with_variance_scaling_initializer_uniform (line 405) | def test_variance_in_range_with_variance_scaling_initializer_uniform(s...
    method test_variance_in_range_with_truncated_normal_initializer (line 427) | def test_variance_in_range_with_truncated_normal_initializer(self):

FILE: object_detector_app/object_detection/builders/image_resizer_builder.py
  function build (line 23) | def build(image_resizer_config):

FILE: object_detector_app/object_detection/builders/image_resizer_builder_test.py
  class ImageResizerBuilderTest (line 23) | class ImageResizerBuilderTest(tf.test.TestCase):
    method _shape_of_resized_random_image_given_text_proto (line 25) | def _shape_of_resized_random_image_given_text_proto(
    method test_built_keep_aspect_ratio_resizer_returns_expected_shape (line 36) | def test_built_keep_aspect_ratio_resizer_returns_expected_shape(self):
    method test_built_fixed_shape_resizer_returns_expected_shape (line 49) | def test_built_fixed_shape_resizer_returns_expected_shape(self):
    method test_raises_error_on_invalid_input (line 62) | def test_raises_error_on_invalid_input(self):

FILE: object_detector_app/object_detection/builders/input_reader_builder.py
  function build (line 34) | def build(input_reader_config):

FILE: object_detector_app/object_detection/builders/input_reader_builder_test.py
  class InputReaderBuilderTest (line 31) | class InputReaderBuilderTest(tf.test.TestCase):
    method create_tf_record (line 33) | def create_tf_record(self):
    method test_build_tf_record_input_reader (line 61) | def test_build_tf_record_input_reader(self):

FILE: object_detector_app/object_detection/builders/losses_builder.py
  function build (line 22) | def build(loss_config):
  function build_hard_example_miner (line 55) | def build_hard_example_miner(config,
  function _build_localization_loss (line 94) | def _build_localization_loss(loss_config):
  function _build_classification_loss (line 127) | def _build_classification_loss(loss_config):

FILE: object_detector_app/object_detection/builders/losses_builder_test.py
  class LocalizationLossBuilderTest (line 26) | class LocalizationLossBuilderTest(tf.test.TestCase):
    method test_build_weighted_l2_localization_loss (line 28) | def test_build_weighted_l2_localization_loss(self):
    method test_build_weighted_smooth_l1_localization_loss (line 45) | def test_build_weighted_smooth_l1_localization_loss(self):
    method test_build_weighted_iou_localization_loss (line 62) | def test_build_weighted_iou_localization_loss(self):
    method test_anchorwise_output (line 79) | def test_anchorwise_output(self):
    method test_raise_error_on_empty_localization_config (line 102) | def test_raise_error_on_empty_localization_config(self):
  class ClassificationLossBuilderTest (line 115) | class ClassificationLossBuilderTest(tf.test.TestCase):
    method test_build_weighted_sigmoid_classification_loss (line 117) | def test_build_weighted_sigmoid_classification_loss(self):
    method test_build_weighted_softmax_classification_loss (line 134) | def test_build_weighted_softmax_classification_loss(self):
    method test_build_bootstrapped_sigmoid_classification_loss (line 151) | def test_build_bootstrapped_sigmoid_classification_loss(self):
    method test_anchorwise_output (line 169) | def test_anchorwise_output(self):
    method test_raise_error_on_empty_config (line 192) | def test_raise_error_on_empty_config(self):
  class HardExampleMinerBuilderTest (line 205) | class HardExampleMinerBuilderTest(tf.test.TestCase):
    method test_do_not_build_hard_example_miner_by_default (line 207) | def test_do_not_build_hard_example_miner_by_default(self):
    method test_build_hard_example_miner_for_classification_loss (line 223) | def test_build_hard_example_miner_for_classification_loss(self):
    method test_build_hard_example_miner_for_localization_loss (line 243) | def test_build_hard_example_miner_for_localization_loss(self):
    method test_build_hard_example_miner_with_non_default_values (line 263) | def test_build_hard_example_miner_with_non_default_values(self):
  class LossBuilderTest (line 291) | class LossBuilderTest(tf.test.TestCase):
    method test_build_all_loss_parameters (line 293) | def test_build_all_loss_parameters(self):

FILE: object_detector_app/object_detection/builders/matcher_builder.py
  function build (line 23) | def build(matcher_config):

FILE: object_detector_app/object_detection/builders/matcher_builder_test.py
  class MatcherBuilderTest (line 27) | class MatcherBuilderTest(tf.test.TestCase):
    method test_build_arg_max_matcher_with_defaults (line 29) | def test_build_arg_max_matcher_with_defaults(self):
    method test_build_arg_max_matcher_without_thresholds (line 43) | def test_build_arg_max_matcher_without_thresholds(self):
    method test_build_arg_max_matcher_with_non_default_parameters (line 58) | def test_build_arg_max_matcher_with_non_default_parameters(self):
    method test_build_bipartite_matcher (line 76) | def test_build_bipartite_matcher(self):
    method test_raise_error_on_empty_matcher (line 87) | def test_raise_error_on_empty_matcher(self):

FILE: object_detector_app/object_detection/builders/model_builder.py
  function build (line 55) | def build(model_config, is_training):
  function _build_ssd_feature_extractor (line 79) | def _build_ssd_feature_extractor(feature_extractor_config, is_training,
  function _build_ssd_model (line 108) | def _build_ssd_model(ssd_config, is_training):
  function _build_faster_rcnn_feature_extractor (line 164) | def _build_faster_rcnn_feature_extractor(
  function _build_faster_rcnn_model (line 193) | def _build_faster_rcnn_model(frcnn_config, is_training):

FILE: object_detector_app/object_detection/builders/model_builder_test.py
  class ModelBuilderTest (line 41) | class ModelBuilderTest(tf.test.TestCase):
    method create_model (line 43) | def create_model(self, model_config):
    method test_create_ssd_inception_v2_model_from_config (line 55) | def test_create_ssd_inception_v2_model_from_config(self):
    method test_create_ssd_mobilenet_v1_model_from_config (line 126) | def test_create_ssd_mobilenet_v1_model_from_config(self):
    method test_create_faster_rcnn_resnet_v1_models_from_config (line 197) | def test_create_faster_rcnn_resnet_v1_models_from_config(self):
    method test_create_faster_rcnn_inception_resnet_v2_model_from_config (line 264) | def test_create_faster_rcnn_inception_resnet_v2_model_from_config(self):
    method test_create_faster_rcnn_model_from_config_with_example_miner (line 331) | def test_create_faster_rcnn_model_from_config_with_example_miner(self):
    method test_create_rfcn_resnet_v1_model_from_config (line 387) | def test_create_rfcn_resnet_v1_model_from_config(self):

FILE: object_detector_app/object_detection/builders/optimizer_builder.py
  function build (line 24) | def build(optimizer_config, global_summaries):
  function _create_learning_rate (line 69) | def _create_learning_rate(learning_rate_config, global_summaries):

FILE: object_detector_app/object_detection/builders/optimizer_builder_test.py
  class LearningRateBuilderTest (line 26) | class LearningRateBuilderTest(tf.test.TestCase):
    method testBuildConstantLearningRate (line 28) | def testBuildConstantLearningRate(self):
    method testBuildExponentialDecayLearningRate (line 41) | def testBuildExponentialDecayLearningRate(self):
    method testBuildManualStepLearningRate (line 57) | def testBuildManualStepLearningRate(self):
    method testRaiseErrorOnEmptyLearningRate (line 77) | def testRaiseErrorOnEmptyLearningRate(self):
  class OptimizerBuilderTest (line 88) | class OptimizerBuilderTest(tf.test.TestCase):
    method testBuildRMSPropOptimizer (line 90) | def testBuildRMSPropOptimizer(self):
    method testBuildMomentumOptimizer (line 112) | def testBuildMomentumOptimizer(self):
    method testBuildAdamOptimizer (line 130) | def testBuildAdamOptimizer(self):
    method testBuildMovingAverageOptimizer (line 147) | def testBuildMovingAverageOptimizer(self):
    method testBuildMovingAverageOptimizerWithNonDefaultDecay (line 165) | def testBuildMovingAverageOptimizerWithNonDefaultDecay(self):
    method testBuildEmptyOptimizer (line 186) | def testBuildEmptyOptimizer(self):

FILE: object_detector_app/object_detection/builders/post_processing_builder.py
  function build (line 24) | def build(post_processing_config):
  function _build_non_max_suppressor (line 62) | def _build_non_max_suppressor(nms_config):
  function _build_score_converter (line 90) | def _build_score_converter(score_converter_config):

FILE: object_detector_app/object_detection/builders/post_processing_builder_test.py
  class PostProcessingBuilderTest (line 24) | class PostProcessingBuilderTest(tf.test.TestCase):
    method test_build_non_max_suppressor_with_correct_parameters (line 26) | def test_build_non_max_suppressor_with_correct_parameters(self):
    method test_build_identity_score_converter (line 44) | def test_build_identity_score_converter(self):
    method test_build_sigmoid_score_converter (line 53) | def test_build_sigmoid_score_converter(self):
    method test_build_softmax_score_converter (line 62) | def test_build_softmax_score_converter(self):

FILE: object_detector_app/object_detection/builders/preprocessor_builder.py
  function _get_step_config_from_proto (line 24) | def _get_step_config_from_proto(preprocessor_step_config, step_name):
  function _get_dict_from_proto (line 45) | def _get_dict_from_proto(config):
  function build (line 104) | def build(preprocessor_step_config):

FILE: object_detector_app/object_detection/builders/preprocessor_builder_test.py
  class PreprocessorBuilderTest (line 27) | class PreprocessorBuilderTest(tf.test.TestCase):
    method assert_dictionary_close (line 29) | def assert_dictionary_close(self, dict1, dict2):
    method test_build_normalize_image (line 39) | def test_build_normalize_image(self):
    method test_build_random_horizontal_flip (line 59) | def test_build_random_horizontal_flip(self):
    method test_build_random_pixel_value_scale (line 70) | def test_build_random_pixel_value_scale(self):
    method test_build_random_image_scale (line 83) | def test_build_random_image_scale(self):
    method test_build_random_rgb_to_gray (line 97) | def test_build_random_rgb_to_gray(self):
    method test_build_random_adjust_brightness (line 109) | def test_build_random_adjust_brightness(self):
    method test_build_random_adjust_contrast (line 121) | def test_build_random_adjust_contrast(self):
    method test_build_random_adjust_hue (line 134) | def test_build_random_adjust_hue(self):
    method test_build_random_adjust_saturation (line 146) | def test_build_random_adjust_saturation(self):
    method test_build_random_distort_color (line 159) | def test_build_random_distort_color(self):
    method test_build_random_jitter_boxes (line 171) | def test_build_random_jitter_boxes(self):
    method test_build_random_crop_image (line 183) | def test_build_random_crop_image(self):
    method test_build_random_pad_image (line 207) | def test_build_random_pad_image(self):
    method test_build_random_crop_pad_image (line 222) | def test_build_random_crop_pad_image(self):
    method test_build_random_crop_to_aspect_ratio (line 249) | def test_build_random_crop_to_aspect_ratio(self):
    method test_build_random_black_patches (line 263) | def test_build_random_black_patches(self):
    method test_build_random_resize_method (line 279) | def test_build_random_resize_method(self):
    method test_build_scale_boxes_to_pixel_coordinates (line 292) | def test_build_scale_boxes_to_pixel_coordinates(self):
    method test_build_resize_image (line 302) | def test_build_resize_image(self):
    method test_build_subtract_channel_mean (line 318) | def test_build_subtract_channel_mean(self):
    method test_build_ssd_random_crop (line 330) | def test_build_ssd_random_crop(self):
    method test_build_ssd_random_crop_empty_operations (line 363) | def test_build_ssd_random_crop_empty_operations(self):
    method test_build_ssd_random_crop_pad (line 374) | def test_build_ssd_random_crop_pad(self):
    method test_build_ssd_random_crop_fixed_aspect_ratio (line 420) | def test_build_ssd_random_crop_fixed_aspect_ratio(self):

FILE: object_detector_app/object_detection/builders/region_similarity_calculator_builder.py
  function build (line 22) | def build(region_similarity_calculator_config):

FILE: object_detector_app/object_detection/builders/region_similarity_calculator_builder_test.py
  class RegionSimilarityCalculatorBuilderTest (line 26) | class RegionSimilarityCalculatorBuilderTest(tf.test.TestCase):
    method testBuildIoaSimilarityCalculator (line 28) | def testBuildIoaSimilarityCalculator(self):
    method testBuildIouSimilarityCalculator (line 40) | def testBuildIouSimilarityCalculator(self):
    method testBuildNegSqDistSimilarityCalculator (line 52) | def testBuildNegSqDistSimilarityCalculator(self):

FILE: object_detector_app/object_detection/core/anchor_generator.py
  class AnchorGenerator (line 38) | class AnchorGenerator(object):
    method name_scope (line 43) | def name_scope(self):
    method check_num_anchors (line 54) | def check_num_anchors(self):
    method num_anchors_per_location (line 68) | def num_anchors_per_location(self):
    method generate (line 77) | def generate(self, feature_map_shape_list, **params):
    method _generate (line 110) | def _generate(self, feature_map_shape_list, **params):
    method _assert_correct_number_of_anchors (line 124) | def _assert_correct_number_of_anchors(self, anchors, feature_map_shape...

FILE: object_detector_app/object_detection/core/balanced_positive_negative_sampler.py
  class BalancedPositiveNegativeSampler (line 34) | class BalancedPositiveNegativeSampler(minibatch_sampler.MinibatchSampler):
    method __init__ (line 37) | def __init__(self, positive_fraction=0.5):
    method subsample (line 51) | def subsample(self, indicator, batch_size, labels):

FILE: object_detector_app/object_detection/core/balanced_positive_negative_sampler_test.py
  class BalancedPositiveNegativeSamplerTest (line 24) | class BalancedPositiveNegativeSamplerTest(tf.test.TestCase):
    method test_subsample_all_examples (line 26) | def test_subsample_all_examples(self):
    method test_subsample_selection (line 43) | def test_subsample_selection(self):
    method test_raises_error_with_incorrect_label_shape (line 65) | def test_raises_error_with_incorrect_label_shape(self):
    method test_raises_error_with_incorrect_indicator_shape (line 73) | def test_raises_error_with_incorrect_indicator_shape(self):

FILE: object_detector_app/object_detection/core/batcher.py
  class BatchQueue (line 24) | class BatchQueue(object):
    method __init__ (line 66) | def __init__(self, tensor_dict, batch_size, batch_queue_capacity,
    method dequeue (line 100) | def dequeue(self):

FILE: object_detector_app/object_detection/core/batcher_test.py
  class BatcherTest (line 26) | class BatcherTest(tf.test.TestCase):
    method test_batch_and_unpad_2d_tensors_of_different_sizes_in_1st_dimension (line 28) | def test_batch_and_unpad_2d_tensors_of_different_sizes_in_1st_dimensio...
    method test_batch_and_unpad_2d_tensors_of_different_sizes_in_all_dimensions (line 60) | def test_batch_and_unpad_2d_tensors_of_different_sizes_in_all_dimensions(
    method test_batch_and_unpad_2d_tensors_of_same_size_in_all_dimensions (line 93) | def test_batch_and_unpad_2d_tensors_of_same_size_in_all_dimensions(self):
    method test_batcher_when_batch_size_is_one (line 124) | def test_batcher_when_batch_size_is_one(self):

FILE: object_detector_app/object_detection/core/box_coder.py
  class BoxCoder (line 43) | class BoxCoder(object):
    method code_size (line 48) | def code_size(self):
    method encode (line 61) | def encode(self, boxes, anchors):
    method decode (line 74) | def decode(self, rel_codes, anchors):
    method _encode (line 89) | def _encode(self, boxes, anchors):
    method _decode (line 102) | def _decode(self, rel_codes, anchors):
  function batch_decode (line 116) | def batch_decode(encoded_boxes, box_coder, anchors):

FILE: object_detector_app/object_detection/core/box_coder_test.py
  class MockBoxCoder (line 24) | class MockBoxCoder(box_coder.BoxCoder):
    method code_size (line 27) | def code_size(self):
    method _encode (line 30) | def _encode(self, boxes, anchors):
    method _decode (line 33) | def _decode(self, rel_codes, anchors):
  class BoxCoderTest (line 37) | class BoxCoderTest(tf.test.TestCase):
    method test_batch_decode (line 39) | def test_batch_decode(self):

FILE: object_detector_app/object_detection/core/box_list.py
  class BoxList (line 40) | class BoxList(object):
    method __init__ (line 43) | def __init__(self, boxes):
    method num_boxes (line 59) | def num_boxes(self):
    method num_boxes_static (line 67) | def num_boxes_static(self):
    method get_all_fields (line 78) | def get_all_fields(self):
    method get_extra_fields (line 82) | def get_extra_fields(self):
    method add_field (line 86) | def add_field(self, field, field_data):
    method has_field (line 98) | def has_field(self, field):
    method get (line 101) | def get(self):
    method set (line 109) | def set(self, boxes):
    method get_field (line 122) | def get_field(self, field):
    method set_field (line 142) | def set_field(self, field, value):
    method get_center_coordinates_and_sizes (line 158) | def get_center_coordinates_and_sizes(self, scope=None):
    method transpose_coordinates (line 176) | def transpose_coordinates(self, scope=None):
    method as_tensor_dict (line 187) | def as_tensor_dict(self, fields=None):

FILE: object_detector_app/object_detection/core/box_list_ops.py
  class SortOrder (line 32) | class SortOrder(object):
  function area (line 43) | def area(boxlist, scope=None):
  function height_width (line 59) | def height_width(boxlist, scope=None):
  function scale (line 76) | def scale(boxlist, y_scale, x_scale, scope=None):
  function clip_to_window (line 102) | def clip_to_window(boxlist, window, filter_nonoverlapping=True, scope=No...
  function prune_outside_window (line 140) | def prune_outside_window(boxlist, window, scope=None):
  function prune_completely_outside_window (line 172) | def prune_completely_outside_window(boxlist, window, scope=None):
  function intersection (line 203) | def intersection(boxlist1, boxlist2, scope=None):
  function matched_intersection (line 228) | def matched_intersection(boxlist1, boxlist2, scope=None):
  function iou (line 253) | def iou(boxlist1, boxlist2, scope=None):
  function matched_iou (line 275) | def matched_iou(boxlist1, boxlist2, scope=None):
  function ioa (line 296) | def ioa(boxlist1, boxlist2, scope=None):
  function prune_non_overlapping_boxes (line 317) | def prune_non_overlapping_boxes(
  function prune_small_boxes (line 345) | def prune_small_boxes(boxlist, min_side, scope=None):
  function change_coordinate_frame (line 363) | def change_coordinate_frame(boxlist, window, scope=None):
  function sq_dist (line 393) | def sq_dist(boxlist1, boxlist2, scope=None):
  function boolean_mask (line 422) | def boolean_mask(boxlist, indicator, fields=None, scope=None):
  function gather (line 461) | def gather(boxlist, indices, fields=None, scope=None):
  function concatenate (line 500) | def concatenate(boxlists, fields=None, scope=None):
  function sort_by_field (line 554) | def sort_by_field(boxlist, field, order=SortOrder.descend, scope=None):
  function visualize_boxes_in_image (line 597) | def visualize_boxes_in_image(image, boxlist, normalized=False, scope=None):
  function filter_field_value_equals (line 626) | def filter_field_value_equals(boxlist, field, value, scope=None):
  function filter_greater_than (line 652) | def filter_greater_than(boxlist, thresh, scope=None):
  function non_max_suppression (line 690) | def non_max_suppression(boxlist, thresh, max_output_size, scope=None):
  function _copy_extra_fields (line 723) | def _copy_extra_fields(boxlist_to_copy_to, boxlist_to_copy_from):
  function to_normalized_coordinates (line 738) | def to_normalized_coordinates(boxlist, height, width,
  function to_absolute_coordinates (line 775) | def to_absolute_coordinates(boxlist, height, width,
  function refine_boxes_multi_class (line 809) | def refine_boxes_multi_class(pool_boxes,
  function refine_boxes (line 855) | def refine_boxes(pool_boxes,
  function box_voting (line 891) | def box_voting(selected_boxes, pool_boxes, iou_thresh=0.5):
  function pad_or_clip_box_list (line 955) | def pad_or_clip_box_list(boxlist, num_boxes, scope=None):

FILE: object_detector_app/object_detection/core/box_list_ops_test.py
  class BoxListOpsTest (line 25) | class BoxListOpsTest(tf.test.TestCase):
    method test_area (line 28) | def test_area(self):
    method test_height_width (line 37) | def test_height_width(self):
    method test_scale (line 48) | def test_scale(self):
    method test_clip_to_window_filter_boxes_which_fall_outside_the_window (line 64) | def test_clip_to_window_filter_boxes_which_fall_outside_the_window(
    method test_clip_to_window_without_filtering_boxes_which_fall_outside_the_window (line 86) | def test_clip_to_window_without_filtering_boxes_which_fall_outside_the...
    method test_prune_outside_window_filters_boxes_which_fall_outside_the_window (line 108) | def test_prune_outside_window_filters_boxes_which_fall_outside_the_win...
    method test_prune_completely_outside_window (line 131) | def test_prune_completely_outside_window(self):
    method test_intersection (line 156) | def test_intersection(self):
    method test_matched_intersection (line 168) | def test_matched_intersection(self):
    method test_iou (line 179) | def test_iou(self):
    method test_matched_iou (line 191) | def test_matched_iou(self):
    method test_iouworks_on_empty_inputs (line 202) | def test_iouworks_on_empty_inputs(self):
    method test_ioa (line 219) | def test_ioa(self):
    method test_prune_non_overlapping_boxes (line 237) | def test_prune_non_overlapping_boxes(self):
    method test_prune_small_boxes (line 262) | def test_prune_small_boxes(self):
    method test_prune_small_boxes_prunes_boxes_with_negative_side (line 276) | def test_prune_small_boxes_prunes_boxes_with_negative_side(self):
    method test_change_coordinate_frame (line 292) | def test_change_coordinate_frame(self):
    method test_ioaworks_on_empty_inputs (line 305) | def test_ioaworks_on_empty_inputs(self):
    method test_pairwise_distances (line 322) | def test_pairwise_distances(self):
    method test_boolean_mask (line 336) | def test_boolean_mask(self):
    method test_boolean_mask_with_field (line 347) | def test_boolean_mask_with_field(self):
    method test_gather (line 364) | def test_gather(self):
    method test_gather_with_field (line 375) | def test_gather_with_field(self):
    method test_gather_with_invalid_field (line 391) | def test_gather_with_invalid_field(self):
    method test_gather_with_invalid_inputs (line 401) | def test_gather_with_invalid_inputs(self):
    method test_gather_with_dynamic_indexing (line 413) | def test_gather_with_dynamic_indexing(self):
    method test_sort_by_field_ascending_order (line 430) | def test_sort_by_field_ascending_order(self):
    method test_sort_by_field_descending_order (line 455) | def test_sort_by_field_descending_order(self):
    method test_sort_by_field_invalid_inputs (line 477) | def test_sort_by_field_invalid_inputs(self):
    method test_visualize_boxes_in_image (line 497) | def test_visualize_boxes_in_image(self):
    method test_filter_field_value_equals (line 515) | def test_filter_field_value_equals(self):
    method test_filter_greater_than (line 537) | def test_filter_greater_than(self):
    method test_clip_box_list (line 554) | def test_clip_box_list(self):
    method test_pad_box_list (line 575) | def test_pad_box_list(self):
  class ConcatenateTest (line 597) | class ConcatenateTest(tf.test.TestCase):
    method test_invalid_input_box_list_list (line 599) | def test_invalid_input_box_list_list(self):
    method test_concatenate_with_missing_fields (line 609) | def test_concatenate_with_missing_fields(self):
    method test_concatenate_with_incompatible_field_shapes (line 619) | def test_concatenate_with_incompatible_field_shapes(self):
    method test_concatenate_is_correct (line 631) | def test_concatenate_is_correct(self):
  class NonMaxSuppressionTest (line 657) | class NonMaxSuppressionTest(tf.test.TestCase):
    method test_with_invalid_scores_field (line 659) | def test_with_invalid_scores_field(self):
    method test_select_from_three_clusters (line 677) | def test_select_from_three_clusters(self):
    method test_select_at_most_two_boxes_from_three_clusters (line 698) | def test_select_at_most_two_boxes_from_three_clusters(self):
    method test_select_at_most_thirty_boxes_from_three_clusters (line 718) | def test_select_at_most_thirty_boxes_from_three_clusters(self):
    method test_select_single_box (line 739) | def test_select_single_box(self):
    method test_select_from_ten_identical_boxes (line 753) | def test_select_from_ten_identical_boxes(self):
    method test_copy_extra_fields (line 767) | def test_copy_extra_fields(self):
  class CoordinatesConversionTest (line 783) | class CoordinatesConversionTest(tf.test.TestCase):
    method test_to_normalized_coordinates (line 785) | def test_to_normalized_coordinates(self):
    method test_to_normalized_coordinates_already_normalized (line 799) | def test_to_normalized_coordinates_already_normalized(self):
    method test_to_absolute_coordinates (line 811) | def test_to_absolute_coordinates(self):
    method test_to_absolute_coordinates_already_abolute (line 826) | def test_to_absolute_coordinates_already_abolute(self):
    method test_convert_to_normalized_and_back (line 839) | def test_convert_to_normalized_and_back(self):
    method test_convert_to_absolute_and_back (line 858) | def test_convert_to_absolute_and_back(self):
  class BoxRefinementTest (line 877) | class BoxRefinementTest(tf.test.TestCase):
    method test_box_voting (line 879) | def test_box_voting(self):
    method test_box_voting_fails_with_negative_scores (line 899) | def test_box_voting_fails_with_negative_scores(self):
    method test_box_voting_fails_when_unmatched (line 910) | def test_box_voting_fails_when_unmatched(self):
    method test_refine_boxes (line 922) | def test_refine_boxes(self):
    method test_refine_boxes_multi_class (line 941) | def test_refine_boxes_multi_class(self):

FILE: object_detector_app/object_detection/core/box_list_test.py
  class BoxListTest (line 23) | class BoxListTest(tf.test.TestCase):
    method test_num_boxes (line 26) | def test_num_boxes(self):
    method test_get_correct_center_coordinates_and_sizes (line 35) | def test_get_correct_center_coordinates_and_sizes(self):
    method test_create_box_list_with_dynamic_shape (line 44) | def test_create_box_list_with_dynamic_shape(self):
    method test_transpose_coordinates (line 56) | def test_transpose_coordinates(self):
    method test_box_list_invalid_inputs (line 65) | def test_box_list_invalid_inputs(self):
    method test_num_boxes_static (line 77) | def test_num_boxes_static(self):
    method test_num_boxes_static_for_uninferrable_shape (line 83) | def test_num_boxes_static_for_uninferrable_shape(self):
    method test_as_tensor_dict (line 88) | def test_as_tensor_dict(self):
    method test_as_tensor_dict_with_features (line 106) | def test_as_tensor_dict_with_features(self):
    method test_as_tensor_dict_missing_field (line 124) | def test_as_tensor_dict_missing_field(self):

FILE: object_detector_app/object_detection/core/box_predictor.py
  class BoxPredictor (line 41) | class BoxPredictor(object):
    method __init__ (line 44) | def __init__(self, is_training, num_classes):
    method num_classes (line 58) | def num_classes(self):
    method predict (line 61) | def predict(self, image_features, num_predictions_per_location, scope,
    method _predict (line 96) | def _predict(self, image_features, num_predictions_per_location, **par...
  class RfcnBoxPredictor (line 119) | class RfcnBoxPredictor(BoxPredictor):
    method __init__ (line 131) | def __init__(self,
    method num_classes (line 163) | def num_classes(self):
    method _predict (line 166) | def _predict(self, image_features, num_predictions_per_location,
  class MaskRCNNBoxPredictor (line 253) | class MaskRCNNBoxPredictor(BoxPredictor):
    method __init__ (line 271) | def __init__(self,
    method num_classes (line 327) | def num_classes(self):
    method _predict (line 330) | def _predict(self, image_features, num_predictions_per_location):
  class ConvolutionalBoxPredictor (line 414) | class ConvolutionalBoxPredictor(BoxPredictor):
    method __init__ (line 426) | def __init__(self,
    method _predict (line 481) | def _predict(self, image_features, num_predictions_per_location):

FILE: object_detector_app/object_detection/core/box_predictor_test.py
  class MaskRCNNBoxPredictorTest (line 27) | class MaskRCNNBoxPredictorTest(tf.test.TestCase):
    method _build_arg_scope_with_hyperparams (line 29) | def _build_arg_scope_with_hyperparams(self,
    method test_get_boxes_with_five_classes (line 47) | def test_get_boxes_with_five_classes(self):
    method test_value_error_on_predict_instance_masks_with_no_conv_hyperparms (line 72) | def test_value_error_on_predict_instance_masks_with_no_conv_hyperparms...
    method test_get_instance_masks (line 83) | def test_get_instance_masks(self):
    method test_do_not_return_instance_masks_and_keypoints_without_request (line 101) | def test_do_not_return_instance_masks_and_keypoints_without_request(se...
    method test_value_error_on_predict_keypoints (line 117) | def test_value_error_on_predict_keypoints(self):
  class RfcnBoxPredictorTest (line 129) | class RfcnBoxPredictorTest(tf.test.TestCase):
    method _build_arg_scope_with_conv_hyperparams (line 131) | def _build_arg_scope_with_conv_hyperparams(self):
    method test_get_correct_box_encoding_and_class_prediction_shapes (line 146) | def test_get_correct_box_encoding_and_class_prediction_shapes(self):
  class ConvolutionalBoxPredictorTest (line 176) | class ConvolutionalBoxPredictorTest(tf.test.TestCase):
    method _build_arg_scope_with_conv_hyperparams (line 178) | def _build_arg_scope_with_conv_hyperparams(self):
    method test_get_boxes_for_five_aspect_ratios_per_location (line 194) | def test_get_boxes_for_five_aspect_ratios_per_location(self):
    method test_get_boxes_for_one_aspect_ratio_per_location (line 223) | def test_get_boxes_for_one_aspect_ratio_per_location(self):
    method test_get_multi_class_predictions_for_five_aspect_ratios_per_location (line 252) | def test_get_multi_class_predictions_for_five_aspect_ratios_per_location(
    method test_get_boxes_for_five_aspect_ratios_per_location_fully_convolutional (line 286) | def test_get_boxes_for_five_aspect_ratios_per_location_fully_convoluti...

FILE: object_detector_app/object_detection/core/data_decoder.py
  class DataDecoder (line 25) | class DataDecoder(object):
    method Decode (line 31) | def Decode(self, data):

FILE: object_detector_app/object_detection/core/keypoint_ops.py
  function scale (line 26) | def scale(keypoints, y_scale, x_scale, scope=None):
  function clip_to_window (line 45) | def clip_to_window(keypoints, window, scope=None):
  function prune_outside_window (line 68) | def prune_outside_window(keypoints, window, scope=None):
  function change_coordinate_frame (line 99) | def change_coordinate_frame(keypoints, window, scope=None):
  function to_normalized_coordinates (line 128) | def to_normalized_coordinates(keypoints, height, width,
  function to_absolute_coordinates (line 166) | def to_absolute_coordinates(keypoints, height, width,
  function flip_horizontal (line 202) | def flip_horizontal(keypoints, flip_point, flip_permutation, scope=None):

FILE: object_detector_app/object_detection/core/keypoint_ops_test.py
  class KeypointOpsTest (line 23) | class KeypointOpsTest(tf.test.TestCase):
    method test_scale (line 26) | def test_scale(self):
    method test_clip_to_window (line 44) | def test_clip_to_window(self):
    method test_prune_outside_window (line 61) | def test_prune_outside_window(self):
    method test_change_coordinate_frame (line 76) | def test_change_coordinate_frame(self):
    method test_to_normalized_coordinates (line 93) | def test_to_normalized_coordinates(self):
    method test_to_normalized_coordinates_already_normalized (line 109) | def test_to_normalized_coordinates_already_normalized(self):
    method test_to_absolute_coordinates (line 121) | def test_to_absolute_coordinates(self):
    method test_to_absolute_coordinates_already_absolute (line 137) | def test_to_absolute_coordinates_already_absolute(self):
    method test_flip_horizontal (line 149) | def test_flip_horizontal(self):

FILE: object_detector_app/object_detection/core/losses.py
  class Loss (line 40) | class Loss(object):
    method __call__ (line 44) | def __call__(self,
    method _compute_loss (line 74) | def _compute_loss(self, prediction_tensor, target_tensor, **params):
  class WeightedL2LocalizationLoss (line 89) | class WeightedL2LocalizationLoss(Loss):
    method __init__ (line 95) | def __init__(self, anchorwise_output=False):
    method _compute_loss (line 104) | def _compute_loss(self, prediction_tensor, target_tensor, weights):
  class WeightedSmoothL1LocalizationLoss (line 126) | class WeightedSmoothL1LocalizationLoss(Loss):
    method __init__ (line 135) | def __init__(self, anchorwise_output=False):
    method _compute_loss (line 144) | def _compute_loss(self, prediction_tensor, target_tensor, weights):
  class WeightedIOULocalizationLoss (line 168) | class WeightedIOULocalizationLoss(Loss):
    method _compute_loss (line 176) | def _compute_loss(self, prediction_tensor, target_tensor, weights):
  class WeightedSigmoidClassificationLoss (line 196) | class WeightedSigmoidClassificationLoss(Loss):
    method __init__ (line 199) | def __init__(self, anchorwise_output=False):
    method _compute_loss (line 208) | def _compute_loss(self,
  class WeightedSoftmaxClassificationLoss (line 241) | class WeightedSoftmaxClassificationLoss(Loss):
    method __init__ (line 244) | def __init__(self, anchorwise_output=False):
    method _compute_loss (line 253) | def _compute_loss(self, prediction_tensor, target_tensor, weights):
  class BootstrappedSigmoidClassificationLoss (line 275) | class BootstrappedSigmoidClassificationLoss(Loss):
    method __init__ (line 292) | def __init__(self, alpha, bootstrap_type='soft', anchorwise_output=Fal...
    method _compute_loss (line 311) | def _compute_loss(self, prediction_tensor, target_tensor, weights):
  class HardExampleMiner (line 339) | class HardExampleMiner(object):
    method __init__ (line 355) | def __init__(self,
    method __call__ (line 409) | def __call__(self,
    method summarize (line 493) | def summarize(self):
    method _subsample_selection_to_desired_neg_pos_ratio (line 501) | def _subsample_selection_to_desired_neg_pos_ratio(self,

FILE: object_detector_app/object_detection/core/losses_test.py
  class WeightedL2LocalizationLossTest (line 27) | class WeightedL2LocalizationLossTest(tf.test.TestCase):
    method testReturnsCorrectLoss (line 29) | def testReturnsCorrectLoss(self):
    method testReturnsCorrectAnchorwiseLoss (line 46) | def testReturnsCorrectAnchorwiseLoss(self):
    method testReturnsCorrectLossSum (line 61) | def testReturnsCorrectLossSum(self):
    method testReturnsCorrectNanLoss (line 77) | def testReturnsCorrectNanLoss(self):
  class WeightedSmoothL1LocalizationLossTest (line 97) | class WeightedSmoothL1LocalizationLossTest(tf.test.TestCase):
    method testReturnsCorrectLoss (line 99) | def testReturnsCorrectLoss(self):
  class WeightedIOULocalizationLossTest (line 121) | class WeightedIOULocalizationLossTest(tf.test.TestCase):
    method testReturnsCorrectLoss (line 123) | def testReturnsCorrectLoss(self):
  class WeightedSigmoidClassificationLossTest (line 139) | class WeightedSigmoidClassificationLossTest(tf.test.TestCase):
    method testReturnsCorrectLoss (line 141) | def testReturnsCorrectLoss(self):
    method testReturnsCorrectAnchorWiseLoss (line 168) | def testReturnsCorrectAnchorWiseLoss(self):
    method testReturnsCorrectLossWithClassIndices (line 196) | def testReturnsCorrectLossWithClassIndices(self):
  class WeightedSoftmaxClassificationLossTest (line 228) | class WeightedSoftmaxClassificationLossTest(tf.test.TestCase):
    method testReturnsCorrectLoss (line 230) | def testReturnsCorrectLoss(self):
    method testReturnsCorrectAnchorWiseLoss (line 257) | def testReturnsCorrectAnchorWiseLoss(self):
  class BootstrappedSigmoidClassificationLossTest (line 286) | class BootstrappedSigmoidClassificationLossTest(tf.test.TestCase):
    method testReturnsCorrectLossSoftBootstrapping (line 288) | def testReturnsCorrectLossSoftBootstrapping(self):
    method testReturnsCorrectLossHardBootstrapping (line 316) | def testReturnsCorrectLossHardBootstrapping(self):
    method testReturnsCorrectAnchorWiseLoss (line 344) | def testReturnsCorrectAnchorWiseLoss(self):
  class HardExampleMinerTest (line 375) | class HardExampleMinerTest(tf.test.TestCase):
    method testHardMiningWithSingleLossType (line 377) | def testHardMiningWithSingleLossType(self):
    method testHardMiningWithBothLossType (line 405) | def testHardMiningWithBothLossType(self):
    method testHardMiningNMS (line 432) | def testHardMiningNMS(self):
    method testEnforceNegativesPerPositiveRatio (line 459) | def testEnforceNegativesPerPositiveRatio(self):
    method testEnforceNegativesPerPositiveRatioWithMinNegativesPerImage (line 509) | def testEnforceNegativesPerPositiveRatioWithMinNegativesPerImage(self):

FILE: object_detector_app/object_detection/core/matcher.py
  class Match (line 40) | class Match(object):
    method __init__ (line 47) | def __init__(self, match_results):
    method match_results (line 68) | def match_results(self):
    method matched_column_indices (line 76) | def matched_column_indices(self):
    method matched_column_indicator (line 86) | def matched_column_indicator(self):
    method num_matched_columns (line 94) | def num_matched_columns(self):
    method unmatched_column_indices (line 98) | def unmatched_column_indices(self):
    method unmatched_column_indicator (line 108) | def unmatched_column_indicator(self):
    method num_unmatched_columns (line 116) | def num_unmatched_columns(self):
    method ignored_column_indices (line 120) | def ignored_column_indices(self):
    method ignored_column_indicator (line 130) | def ignored_column_indicator(self):
    method num_ignored_columns (line 139) | def num_ignored_columns(self):
    method unmatched_or_ignored_column_indices (line 143) | def unmatched_or_ignored_column_indices(self):
    method matched_row_indices (line 153) | def matched_row_indices(self):
    method _reshape_and_cast (line 168) | def _reshape_and_cast(self, t):
  class Matcher (line 172) | class Matcher(object):
    method match (line 177) | def match(self, similarity_matrix, scope=None, **params):
    method _match (line 197) | def _match(self, similarity_matrix, **params):

FILE: object_detector_app/object_detection/core/matcher_test.py
  class AnchorMatcherTest (line 23) | class AnchorMatcherTest(tf.test.TestCase):
    method test_get_correct_matched_columnIndices (line 25) | def test_get_correct_matched_columnIndices(self):
    method test_get_correct_counts (line 35) | def test_get_correct_counts(self):
    method testGetCorrectUnmatchedColumnIndices (line 55) | def testGetCorrectUnmatchedColumnIndices(self):
    method testGetCorrectMatchedRowIndices (line 65) | def testGetCorrectMatchedRowIndices(self):
    method test_get_correct_ignored_column_indices (line 75) | def test_get_correct_ignored_column_indices(self):
    method test_get_correct_matched_column_indicator (line 85) | def test_get_correct_matched_column_indicator(self):
    method test_get_correct_unmatched_column_indicator (line 95) | def test_get_correct_unmatched_column_indicator(self):
    method test_get_correct_ignored_column_indicator (line 105) | def test_get_correct_ignored_column_indicator(self):
    method test_get_correct_unmatched_ignored_column_indices (line 115) | def test_get_correct_unmatched_ignored_column_indices(self):
    method test_all_columns_accounted_for (line 128) | def test_all_columns_accounted_for(self):

FILE: object_detector_app/object_detection/core/minibatch_sampler.py
  class MinibatchSampler (line 39) | class MinibatchSampler(object):
    method __init__ (line 43) | def __init__(self):
    method subsample (line 48) | def subsample(self, indicator, batch_size, **params):
    method subsample_indicator (line 64) | def subsample_indicator(indicator, num_samples):

FILE: object_detector_app/object_detection/core/minibatch_sampler_test.py
  class MinibatchSamplerTest (line 24) | class MinibatchSamplerTest(tf.test.TestCase):
    method test_subsample_indicator_when_more_true_elements_than_num_samples (line 26) | def test_subsample_indicator_when_more_true_elements_than_num_samples(...
    method test_subsample_when_more_true_elements_than_num_samples_no_shape (line 37) | def test_subsample_when_more_true_elements_than_num_samples_no_shape(s...
    method test_subsample_indicator_when_less_true_elements_than_num_samples (line 50) | def test_subsample_indicator_when_less_true_elements_than_num_samples(...
    method test_subsample_indicator_when_num_samples_is_zero (line 61) | def test_subsample_indicator_when_num_samples_is_zero(self):
    method test_subsample_indicator_when_indicator_all_false (line 72) | def test_subsample_indicator_when_indicator_all_false(self):

FILE: object_detector_app/object_detection/core/model.py
  class DetectionModel (line 53) | class DetectionModel(object):
    method __init__ (line 57) | def __init__(self, num_classes):
    method num_classes (line 69) | def num_classes(self):
    method groundtruth_lists (line 72) | def groundtruth_lists(self, field):
    method preprocess (line 91) | def preprocess(self, inputs):
    method predict (line 130) | def predict(self, preprocessed_inputs):
    method postprocess (line 146) | def postprocess(self, prediction_dict, **params):
    method loss (line 179) | def loss(self, prediction_dict):
    method provide_groundtruth (line 194) | def provide_groundtruth(self,
    method restore_fn (line 231) | def restore_fn(self, checkpoint_path, from_detection_checkpoint=True):

FILE: object_detector_app/object_detection/core/post_processing.py
  function multiclass_non_max_suppression (line 25) | def multiclass_non_max_suppression(boxes,
  function batch_multiclass_non_max_suppression (line 167) | def batch_multiclass_non_max_suppression(boxes,

FILE: object_detector_app/object_detection/core/post_processing_test.py
  class MulticlassNonMaxSuppressionTest (line 23) | class MulticlassNonMaxSuppressionTest(tf.test.TestCase):
    method test_with_invalid_scores_size (line 25) | def test_with_invalid_scores_size(self):
    method test_multiclass_nms_select_with_shared_boxes (line 43) | def test_multiclass_nms_select_with_shared_boxes(self):
    method test_multiclass_nms_select_with_shared_boxes_given_keypoints (line 77) | def test_multiclass_nms_select_with_shared_boxes_given_keypoints(self):
    method test_multiclass_nms_with_shared_boxes_given_keypoint_heatmaps (line 130) | def test_multiclass_nms_with_shared_boxes_given_keypoint_heatmaps(self):
    method test_multiclass_nms_with_additional_fields (line 186) | def test_multiclass_nms_with_additional_fields(self):
    method test_multiclass_nms_select_with_shared_boxes_given_masks (line 248) | def test_multiclass_nms_select_with_shared_boxes_given_masks(self):
    method test_multiclass_nms_select_with_clip_window (line 301) | def test_multiclass_nms_select_with_clip_window(self):
    method test_multiclass_nms_select_with_clip_window_change_coordinate_frame (line 325) | def test_multiclass_nms_select_with_clip_window_change_coordinate_fram...
    method test_multiclass_nms_select_with_per_class_cap (line 349) | def test_multiclass_nms_select_with_per_class_cap(self):
    method test_multiclass_nms_select_with_total_cap (line 382) | def test_multiclass_nms_select_with_total_cap(self):
    method test_multiclass_nms_threshold_then_select_with_shared_boxes (line 416) | def test_multiclass_nms_threshold_then_select_with_shared_boxes(self):
    method test_multiclass_nms_select_with_separate_boxes (line 439) | def test_multiclass_nms_select_with_separate_boxes(self):
    method test_batch_multiclass_nms_with_batch_size_1 (line 474) | def test_batch_multiclass_nms_with_batch_size_1(self):
    method test_batch_multiclass_nms_with_batch_size_2 (line 509) | def test_batch_multiclass_nms_with_batch_size_2(self):
    method test_batch_multiclass_nms_with_masks (line 550) | def test_batch_multiclass_nms_with_masks(self):
    method test_batch_multiclass_nms_with_masks_and_num_valid_boxes (line 610) | def test_batch_multiclass_nms_with_masks_and_num_valid_boxes(self):

FILE: object_detector_app/object_detection/core/prefetcher.py
  function prefetch (line 20) | def prefetch(tensor_dict, capacity):

FILE: object_detector_app/object_detection/core/prefetcher_test.py
  class PrefetcherTest (line 24) | class PrefetcherTest(tf.test.TestCase):
    method test_prefetch_tensors_with_fully_defined_shapes (line 26) | def test_prefetch_tensors_with_fully_defined_shapes(self):
    method test_prefetch_tensors_with_partially_defined_shapes (line 61) | def test_prefetch_tensors_with_partially_defined_shapes(self):

FILE: object_detector_app/object_detection/core/preprocessor.py
  function _apply_with_random_selector (line 57) | def _apply_with_random_selector(x, func, num_cases):
  function _apply_with_random_selector_tuples (line 76) | def _apply_with_random_selector_tuples(x, func, num_cases):
  function _random_integer (line 104) | def _random_integer(minval, maxval, seed):
  function normalize_image (line 119) | def normalize_image(image, original_minval, original_maxval, target_minval,
  function flip_boxes (line 150) | def flip_boxes(boxes):
  function retain_boxes_above_threshold (line 170) | def retain_boxes_above_threshold(
  function _flip_masks (line 224) | def _flip_masks(masks):
  function random_horizontal_flip (line 238) | def random_horizontal_flip(
  function random_pixel_value_scale (line 328) | def random_pixel_value_scale(image, minval=0.9, maxval=1.1, seed=None):
  function random_image_scale (line 359) | def random_image_scale(image,
  function random_rgb_to_gray (line 403) | def random_rgb_to_gray(image, probability=0.1, seed=None):
  function random_adjust_brightness (line 432) | def random_adjust_brightness(image, max_delta=0.2):
  function random_adjust_contrast (line 452) | def random_adjust_contrast(image, min_delta=0.8, max_delta=1.25):
  function random_adjust_hue (line 474) | def random_adjust_hue(image, max_delta=0.02):
  function random_adjust_saturation (line 493) | def random_adjust_saturation(image, min_delta=0.8, max_delta=1.25):
  function random_distort_color (line 515) | def random_distort_color(image, color_ordering=0):
  function random_jitter_boxes (line 551) | def random_jitter_boxes(boxes, ratio=0.05, seed=None):
  function _strict_random_crop_image (line 603) | def _strict_random_crop_image(image,
  function random_crop_image (line 730) | def random_crop_image(image,
  function random_pad_image (line 829) | def random_pad_image(image,
  function random_crop_pad_image (line 932) | def random_crop_pad_image(image,
  function random_crop_to_aspect_ratio (line 1028) | def random_crop_to_aspect_ratio(image,
  function random_black_patches (line 1162) | def random_black_patches(image,
  function image_to_float (line 1226) | def image_to_float(image):
  function random_resize_method (line 1240) | def random_resize_method(image, target_size):
  function resize_to_range (line 1259) | def resize_to_range(image,
  function scale_boxes_to_pixel_coordinates (line 1361) | def scale_boxes_to_pixel_coordinates(image, boxes, keypoints=None):
  function resize_image (line 1393) | def resize_image(image,
  function subtract_channel_mean (line 1429) | def subtract_channel_mean(image, means=None):
  function one_hot_encoding (line 1449) | def one_hot_encoding(labels, num_classes=None):
  function rgb_to_gray (line 1474) | def rgb_to_gray(image):
  function ssd_random_crop (line 1487) | def ssd_random_crop(image,
  function ssd_random_crop_pad (line 1586) | def ssd_random_crop_pad(image,
  function ssd_random_crop_fixed_aspect_ratio (line 1664) | def ssd_random_crop_fixed_aspect_ratio(
  function get_default_func_arg_map (line 1751) | def get_default_func_arg_map(include_instance_masks=False,
  function preprocess (line 1844) | def preprocess(tensor_dict, preprocess_options, func_arg_map=None):

FILE: object_detector_app/object_detection/core/preprocessor_test.py
  class PreprocessorTest (line 27) | class PreprocessorTest(tf.test.TestCase):
    method createColorfulTestImage (line 29) | def createColorfulTestImage(self):
    method createTestImages (line 42) | def createTestImages(self):
    method createTestBoxes (line 58) | def createTestBoxes(self):
    method createTestLabelScores (line 63) | def createTestLabelScores(self):
    method createTestLabelScoresWithMissingScore (line 66) | def createTestLabelScoresWithMissingScore(self):
    method createTestMasks (line 69) | def createTestMasks(self):
    method createTestKeypoints (line 79) | def createTestKeypoints(self):
    method createTestKeypointsInsideCrop (line 86) | def createTestKeypointsInsideCrop(self):
    method createTestKeypointsOutsideCrop (line 93) | def createTestKeypointsOutsideCrop(self):
    method createKeypointFlipPermutation (line 100) | def createKeypointFlipPermutation(self):
    method createTestLabels (line 103) | def createTestLabels(self):
    method createTestBoxesOutOfImage (line 107) | def createTestBoxesOutOfImage(self):
    method expectedImagesAfterNormalization (line 112) | def expectedImagesAfterNormalization(self):
    method expectedMaxImageAfterColorScale (line 128) | def expectedMaxImageAfterColorScale(self):
    method expectedMinImageAfterColorScale (line 144) | def expectedMinImageAfterColorScale(self):
    method expectedImagesAfterMirroring (line 160) | def expectedImagesAfterMirroring(self):
    method expectedBoxesAfterMirroring (line 176) | def expectedBoxesAfterMirroring(self):
    method expectedBoxesAfterXY (line 181) | def expectedBoxesAfterXY(self):
    method expectedMasksAfterMirroring (line 186) | def expectedMasksAfterMirroring(self):
    method expectedLabelScoresAfterThresholding (line 196) | def expectedLabelScoresAfterThresholding(self):
    method expectedBoxesAfterThresholding (line 199) | def expectedBoxesAfterThresholding(self):
    method expectedLabelsAfterThresholding (line 202) | def expectedLabelsAfterThresholding(self):
    method expectedMasksAfterThresholding (line 205) | def expectedMasksAfterThresholding(self):
    method expectedKeypointsAfterThresholding (line 212) | def expectedKeypointsAfterThresholding(self):
    method expectedLabelScoresAfterThresholdingWithMissingScore (line 218) | def expectedLabelScoresAfterThresholdingWithMissingScore(self):
    method expectedBoxesAfterThresholdingWithMissingScore (line 221) | def expectedBoxesAfterThresholdingWithMissingScore(self):
    method expectedLabelsAfterThresholdingWithMissingScore (line 224) | def expectedLabelsAfterThresholdingWithMissingScore(self):
    method testNormalizeImage (line 227) | def testNormalizeImage(self):
    method testRetainBoxesAboveThreshold (line 250) | def testRetainBoxesAboveThreshold(self):
    method testRetainBoxesAboveThresholdWithMasks (line 272) | def testRetainBoxesAboveThresholdWithMasks(self):
    method testRetainBoxesAboveThresholdWithKeypoints (line 286) | def testRetainBoxesAboveThresholdWithKeypoints(self):
    method testRetainBoxesAboveThresholdWithMissingScore (line 302) | def testRetainBoxesAboveThresholdWithMissingScore(self):
    method testRandomFlipBoxes (line 324) | def testRandomFlipBoxes(self):
    method testFlipMasks (line 344) | def testFlipMasks(self):
    method testRandomHorizontalFlip (line 352) | def testRandomHorizontalFlip(self):
    method testRunRandomHorizontalFlipWithMaskAndKeypoints (line 383) | def testRunRandomHorizontalFlipWithMaskAndKeypoints(self):
    method testRandomPixelValueScale (line 414) | def testRandomPixelValueScale(self):
    method testRandomImageScale (line 438) | def testRandomImageScale(self):
    method testRandomRGBtoGray (line 458) | def testRandomRGBtoGray(self):
    method testRandomAdjustBrightness (line 491) | def testRandomAdjustBrightness(self):
    method testRandomAdjustContrast (line 511) | def testRandomAdjustContrast(self):
    method testRandomAdjustHue (line 531) | def testRandomAdjustHue(self):
    method testRandomDistortColor (line 551) | def testRandomDistortColor(self):
    method testRandomJitterBoxes (line 571) | def testRandomJitterBoxes(self):
    method testRandomCropImage (line 586) | def testRandomCropImage(self):
    method testRandomCropImageGrayscale (line 620) | def testRandomCropImageGrayscale(self):
    method testRandomCropImageWithBoxOutOfImage (line 657) | def testRandomCropImageWithBoxOutOfImage(self):
    method testRandomCropImageWithRandomCoefOne (line 690) | def testRandomCropImageWithRandomCoefOne(self):
    method testRandomCropWithMockSampleDistortedBoundingBox (line 736) | def testRandomCropWithMockSampleDistortedBoundingBox(self):
    method testStrictRandomCropImageWithMasks (line 784) | def testStrictRandomCropImageWithMasks(self):
    method testStrictRandomCropImageWithKeypoints (line 813) | def testStrictRandomCropImageWithKeypoints(self):
    method testRunRandomCropImageWithMasks (line 851) | def testRunRandomCropImageWithMasks(self):
    method testRunRandomCropImageWithKeypointsInsideCrop (line 902) | def testRunRandomCropImageWithKeypointsInsideCrop(self):
    method testRunRandomCropImageWithKeypointsOutsideCrop (line 962) | def testRunRandomCropImageWithKeypointsOutsideCrop(self):
    method testRunRetainBoxesAboveThreshold (line 1022) | def testRunRetainBoxesAboveThreshold(self):
    method testRunRetainBoxesAboveThresholdWithMasks (line 1060) | def testRunRetainBoxesAboveThresholdWithMasks(self):
    method testRunRetainBoxesAboveThresholdWithKeypoints (line 1091) | def testRunRetainBoxesAboveThresholdWithKeypoints(self):
    method testRunRandomCropToAspectRatioWithMasks (line 1122) | def testRunRandomCropToAspectRatioWithMasks(self):
    method testRunRandomCropToAspectRatioWithKeypoints (line 1165) | def testRunRandomCropToAspectRatioWithKeypoints(self):
    method testRandomPadImage (line 1212) | def testRandomPadImage(self):
    method testRandomCropPadImageWithRandomCoefOne (line 1256) | def testRandomCropPadImageWithRandomCoefOne(self):
    method testRandomCropToAspectRatio (line 1302) | def testRandomCropToAspectRatio(self):
    method testRandomBlackPatches (line 1344) | def testRandomBlackPatches(self):
    method testRandomResizeMethod (line 1368) | def testRandomResizeMethod(self):
    method testResizeToRange (line 1393) | def testResizeToRange(self):
    method testResizeToRangeWithMasks (line 1410) | def testResizeToRangeWithMasks(self):
    method testResizeToRangeWithNoInstanceMask (line 1437) | def testResizeToRangeWithNoInstanceMask(self):
    method testResizeImageWithMasks (line 1464) | def testResizeImageWithMasks(self):
    method testResizeImageWithNoInstanceMask (line 1491) | def testResizeImageWithNoInstanceMask(self):
    method testResizeToRange4DImageTensor (line 1518) | def testResizeToRange4DImageTensor(self):
    method testResizeToRangeSameMinMax (line 1523) | def testResizeToRangeSameMinMax(self):
    method testScaleBoxesToPixelCoordinates (line 1540) | def testScaleBoxesToPixelCoordinates(self):
    method testScaleBoxesToPixelCoordinatesWithKeypoints (line 1557) | def testScaleBoxesToPixelCoordinatesWithKeypoints(self):
    method testSubtractChannelMean (line 1578) | def testSubtractChannelMean(self):
    method testOneHotEncoding (line 1590) | def testOneHotEncoding(self):
    method testSSDRandomCrop (line 1599) | def testSSDRandomCrop(self):
    method testSSDRandomCropPad (line 1633) | def testSSDRandomCropPad(self):
    method testSSDRandomCropFixedAspectRatio (line 1667) | def testSSDRandomCropFixedAspectRatio(self):
    method testSSDRandomCropFixedAspectRatioWithMasksAndKeypoints (line 1703) | def testSSDRandomCropFixedAspectRatioWithMasksAndKeypoints(self):

FILE: object_detector_app/object_detection/core/region_similarity_calculator.py
  class RegionSimilarityCalculator (line 29) | class RegionSimilarityCalculator(object):
    method compare (line 33) | def compare(self, boxlist1, boxlist2, scope=None):
    method _compare (line 54) | def _compare(self, boxlist1, boxlist2):
  class IouSimilarity (line 58) | class IouSimilarity(RegionSimilarityCalculator):
    method _compare (line 64) | def _compare(self, boxlist1, boxlist2):
  class NegSqDistSimilarity (line 77) | class NegSqDistSimilarity(RegionSimilarityCalculator):
    method _compare (line 84) | def _compare(self, boxlist1, boxlist2):
  class IoaSimilarity (line 97) | class IoaSimilarity(RegionSimilarityCalculator):
    method _compare (line 104) | def _compare(self, boxlist1, boxlist2):

FILE: object_detector_app/object_detection/core/region_similarity_calculator_test.py
  class RegionSimilarityCalculatorTest (line 23) | class RegionSimilarityCalculatorTest(tf.test.TestCase):
    method test_get_correct_pairwise_similarity_based_on_iou (line 25) | def test_get_correct_pairwise_similarity_based_on_iou(self):
    method test_get_correct_pairwise_similarity_based_on_squared_distances (line 38) | def test_get_correct_pairwise_similarity_based_on_squared_distances(se...
    method test_get_correct_pairwise_similarity_based_on_ioa (line 53) | def test_get_correct_pairwise_similarity_based_on_ioa(self):

FILE: object_detector_app/object_detection/core/standard_fields.py
  class InputDataFields (line 26) | class InputDataFields(object):
  class BoxListFields (line 76) | class BoxListFields(object):
  class TfExampleFields (line 99) | class TfExampleFields(object):

FILE: object_detector_app/object_detection/core/target_assigner.py
  class TargetAssigner (line 47) | class TargetAssigner(object):
    method __init__ (line 50) | def __init__(self, similarity_calc, matcher, box_coder,
    method box_coder (line 93) | def box_coder(self):
    method assign (line 96) | def assign(self, anchors, groundtruth_boxes, groundtruth_labels=None,
    method _reset_target_shape (line 168) | def _reset_target_shape(self, target, num_anchors):
    method _create_regression_targets (line 184) | def _create_regression_targets(self, anchors, groundtruth_boxes, match):
    method _default_regression_target (line 214) | def _default_regression_target(self):
    method _create_classification_targets (line 227) | def _create_classification_targets(self, groundtruth_labels, match):
    method _create_regression_weights (line 262) | def _create_regression_weights(self, match):
    method _create_classification_weights (line 280) | def _create_classification_weights(self,
    method get_box_coder (line 310) | def get_box_coder(self):
  function create_target_assigner (line 321) | def create_target_assigner(reference, stage=None,
  function batch_assign_targets (line 385) | def batch_assign_targets(target_assigner,

FILE: object_detector_app/object_detection/core/target_assigner_test.py
  class TargetAssignerTest (line 28) | class TargetAssignerTest(tf.test.TestCase):
    method test_assign_agnostic (line 30) | def test_assign_agnostic(self):
    method test_assign_with_ignored_matches (line 74) | def test_assign_with_ignored_matches(self):
    method test_assign_multiclass (line 124) | def test_assign_multiclass(self):
    method test_assign_multiclass_unequal_class_weights (line 182) | def test_assign_multiclass_unequal_class_weights(self):
    method test_assign_multidimensional_class_targets (line 217) | def test_assign_multidimensional_class_targets(self):
    method test_assign_empty_groundtruth (line 275) | def test_assign_empty_groundtruth(self):
    method test_raises_error_on_invalid_groundtruth_labels (line 330) | def test_raises_error_on_invalid_groundtruth_labels(self):
  class BatchTargetAssignerTest (line 356) | class BatchTargetAssignerTest(tf.test.TestCase):
    method _get_agnostic_target_assigner (line 358) | def _get_agnostic_target_assigner(self):
    method _get_multi_class_target_assigner (line 368) | def _get_multi_class_target_assigner(self, num_classes):
    method _get_multi_dimensional_target_assigner (line 379) | def _get_multi_dimensional_target_assigner(self, target_dimensions):
    method test_batch_assign_targets (line 391) | def test_batch_assign_targets(self):
    method test_batch_assign_multiclass_targets (line 446) | def test_batch_assign_multiclass_targets(self):
    method test_batch_assign_multidimensional_targets (line 515) | def test_batch_assign_multidimensional_targets(self):
    method test_batch_assign_empty_groundtruth (line 595) | def test_batch_assign_empty_groundtruth(self):
  class CreateTargetAssignerTest (line 641) | class CreateTargetAssignerTest(tf.test.TestCase):
    method test_create_target_assigner (line 643) | def test_create_target_assigner(self):

FILE: object_detector_app/object_detection/create_pascal_tf_record.py
  function dict_to_tf_example (line 58) | def dict_to_tf_example(data,
  function main (line 143) | def main(_):

FILE: object_detector_app/object_detection/create_pascal_tf_record_test.py
  class DictToTFExampleTest (line 27) | class DictToTFExampleTest(tf.test.TestCase):
    method _assertProtoEqual (line 29) | def _assertProtoEqual(self, proto_field, expectation):
    method test_dict_to_tf_example (line 39) | def test_dict_to_tf_example(self):

FILE: object_detector_app/object_detection/create_pet_tf_record.py
  function get_class_name_from_filename (line 50) | def get_class_name_from_filename(file_name):
  function dict_to_tf_example (line 64) | def dict_to_tf_example(data,
  function create_tf_record (line 147) | def create_tf_record(output_filename,
  function main (line 182) | def main(_):

FILE: object_detector_app/object_detection/data_decoders/tf_example_decoder.py
  class TfExampleDecoder (line 29) | class TfExampleDecoder(data_decoder.DataDecoder):
    method __init__ (line 32) | def __init__(self):
    method Decode (line 85) | def Decode(self, tf_example_string_tensor):
    method _reshape_instance_masks (line 128) | def _reshape_instance_masks(self, keys_to_tensors):

FILE: object_detector_app/object_detection/data_decoders/tf_example_decoder_test.py
  class TfExampleDecoderTest (line 25) | class TfExampleDecoderTest(tf.test.TestCase):
    method _EncodeImage (line 27) | def _EncodeImage(self, image_tensor, encoding_type='jpeg'):
    method _DecodeImage (line 37) | def _DecodeImage(self, image_encoded, encoding_type='jpeg'):
    method _Int64Feature (line 47) | def _Int64Feature(self, value):
    method _FloatFeature (line 50) | def _FloatFeature(self, value):
    method _BytesFeature (line 53) | def _BytesFeature(self, value):
    method testDecodeJpegImage (line 56) | def testDecodeJpegImage(self):
    method testDecodeImageKeyAndFilename (line 77) | def testDecodeImageKeyAndFilename(self):
    method testDecodePngImage (line 95) | def testDecodePngImage(self):
    method testDecodeBoundingBox (line 116) | def testDecodeBoundingBox(self):
    method testDecodeObjectLabel (line 145) | def testDecodeObjectLabel(self):
    method testDecodeObjectArea (line 168) | def testDecodeObjectArea(self):
    method testDecodeObjectIsCrowd (line 189) | def testDecodeObjectIsCrowd(self):
    method testDecodeObjectDifficult (line 212) | def testDecodeObjectDifficult(self):
    method testDecodeInstanceSegmentation (line 235) | def testDecodeInstanceSegmentation(self):

FILE: object_detector_app/object_detection/eval.py
  function get_configs_from_pipeline_file (line 82) | def get_configs_from_pipeline_file():
  function get_configs_from_multiple_files (line 106) | def get_configs_from_multiple_files():
  function main (line 134) | def main(unused_argv):

FILE: object_detector_app/object_detection/eval_util.py
  function write_metrics (line 33) | def write_metrics(metrics, global_step, summary_dir):
  function evaluate_detection_results_pascal_voc (line 53) | def evaluate_detection_results_pascal_voc(result_lists,
  function visualize_detection_results (line 165) | def visualize_detection_results(result_dict,
  function run_checkpoint_once (line 285) | def run_checkpoint_once(tensor_dict,
  function repeated_checkpoint_run (line 415) | def repeated_checkpoint_run(tensor_dict,

FILE: object_detector_app/object_detection/evaluator.py
  function _extract_prediction_tensors (line 38) | def _extract_prediction_tensors(model,
  function evaluate (line 109) | def evaluate(create_input_dict_fn, create_model_fn, eval_config, categor...

FILE: object_detector_app/object_detection/export_inference_graph.py
  function main (line 77) | def main(_):

FILE: object_detector_app/object_detection/exporter.py
  function freeze_graph_with_def_protos (line 35) | def freeze_graph_with_def_protos(
  function _tf_example_input_placeholder (line 101) | def _tf_example_input_placeholder():
  function _image_tensor_input_placeholder (line 110) | def _image_tensor_input_placeholder():
  function _add_output_tensor_nodes (line 121) | def _add_output_tensor_nodes(postprocessed_tensors):
  function _write_inference_graph (line 151) | def _write_inference_graph(inference_graph_path,
  function _export_inference_graph (line 199) | def _export_inference_graph(input_type,
  function export_inference_graph (line 215) | def export_inference_graph(input_type, pipeline_config, checkpoint_path,

FILE: object_detector_app/object_detection/exporter_test.py
  class FakeModel (line 27) | class FakeModel(model.DetectionModel):
    method preprocess (line 29) | def preprocess(self, inputs):
    method predict (line 35) | def predict(self, preprocessed_inputs):
    method postprocess (line 38) | def postprocess(self, prediction_dict):
    method restore_fn (line 48) | def restore_fn(self, checkpoint_path, from_detection_checkpoint):
    method loss (line 51) | def loss(self, prediction_dict):
  class ExportInferenceGraphTest (line 55) | class ExportInferenceGraphTest(tf.test.TestCase):
    method _save_checkpoint_from_mock_model (line 57) | def _save_checkpoint_from_mock_model(self, checkpoint_path,
    method _load_inference_graph (line 71) | def _load_inference_graph(self, inference_graph_path):
    method _create_tf_example (line 81) | def _create_tf_example(self, image_array):
    method test_export_graph_with_image_tensor_input (line 93) | def test_export_graph_with_image_tensor_input(self):
    method test_export_graph_with_tf_example_input (line 108) | def test_export_graph_with_tf_example_input(self):
    method test_export_frozen_graph (line 122) | def test_export_frozen_graph(self):
    method test_export_frozen_graph_with_moving_averages (line 139) | def test_export_frozen_graph_with_moving_averages(self):
    method test_export_and_run_inference_with_image_tensor (line 156) | def test_export_and_run_inference_with_image_tensor(self):
    method test_export_and_run_inference_with_tf_example (line 189) | def test_export_and_run_inference_with_tf_example(self):

FILE: object_detector_app/object_detection/matchers/argmax_matcher.py
  class ArgMaxMatcher (line 35) | class ArgMaxMatcher(matcher.Matcher):
    method __init__ (line 54) | def __init__(self,
    method _match (line 102) | def _match(self, similarity_matrix):
    method _set_values_using_indicator (line 177) | def _set_values_using_indicator(self, x, indicator, val):

FILE: object_detector_app/object_detection/matchers/argmax_matcher_test.py
  class ArgMaxMatcherTest (line 24) | class ArgMaxMatcherTest(tf.test.TestCase):
    method test_return_correct_matches_with_default_thresholds (line 26) | def test_return_correct_matches_with_default_thresholds(self):
    method test_return_correct_matches_with_empty_rows (line 49) | def test_return_correct_matches_with_empty_rows(self):
    method test_return_correct_matches_with_matched_threshold (line 60) | def test_return_correct_matches_with_matched_threshold(self):
    method test_return_correct_matches_with_matched_and_unmatched_threshold (line 88) | def test_return_correct_matches_with_matched_and_unmatched_threshold(s...
    method test_return_correct_matches_negatives_lower_than_unmatched_false (line 114) | def test_return_correct_matches_negatives_lower_than_unmatched_false(s...
    method test_return_correct_matches_unmatched_row_not_using_force_match (line 141) | def test_return_correct_matches_unmatched_row_not_using_force_match(se...
    method test_return_correct_matches_unmatched_row_while_using_force_match (line 167) | def test_return_correct_matches_unmatched_row_while_using_force_match(...
    method test_valid_arguments_corner_case (line 194) | def test_valid_arguments_corner_case(self):
    method test_invalid_arguments_corner_case_negatives_lower_than_thres_false (line 198) | def test_invalid_arguments_corner_case_negatives_lower_than_thres_fals...
    method test_invalid_arguments_no_matched_threshold (line 204) | def test_invalid_arguments_no_matched_threshold(self):
    method test_invalid_arguments_unmatched_thres_larger_than_matched_thres (line 209) | def test_invalid_arguments_unmatched_thres_larger_than_matched_thres(s...
    method test_set_values_using_indicator (line 214) | def test_set_values_using_indicator(self):

FILE: object_detector_app/object_detection/matchers/bipartite_matcher.py
  class GreedyBipartiteMatcher (line 24) | class GreedyBipartiteMatcher(matcher.Matcher):
    method _match (line 27) | def _match(self, similarity_matrix, num_valid_rows=-1):

FILE: object_detector_app/object_detection/matchers/bipartite_matcher_test.py
  class GreedyBipartiteMatcherTest (line 23) | class GreedyBipartiteMatcherTest(tf.test.TestCase):
    method test_get_expected_matches_when_all_rows_are_valid (line 25) | def test_get_expected_matches_when_all_rows_are_valid(self):
    method test_get_expected_matches_with_valid_rows_set_to_minus_one (line 36) | def test_get_expected_matches_with_valid_rows_set_to_minus_one(self):
    method test_get_no_matches_with_zero_valid_rows (line 47) | def test_get_no_matches_with_zero_valid_rows(self):
    method test_get_expected_matches_with_only_one_valid_row (line 58) | def test_get_expected_matches_with_only_one_valid_row(self):

FILE: object_detector_app/object_detection/meta_architectures/faster_rcnn_meta_arch.py
  class FasterRCNNFeatureExtractor (line 88) | class FasterRCNNFeatureExtractor(object):
    method __init__ (line 91) | def __init__(self,
    method preprocess (line 111) | def preprocess(self, resized_inputs):
    method extract_proposal_features (line 115) | def extract_proposal_features(self, preprocessed_inputs, scope):
    method _extract_proposal_features (line 134) | def _extract_proposal_features(self, preprocessed_inputs, scope):
    method extract_box_classifier_features (line 138) | def extract_box_classifier_features(self, proposal_feature_maps, scope):
    method _extract_box_classifier_features (line 156) | def _extract_box_classifier_features(self, proposal_feature_maps, scope):
    method restore_from_classification_checkpoint_fn (line 160) | def restore_from_classification_checkpoint_fn(
  class FasterRCNNMetaArch (line 194) | class FasterRCNNMetaArch(model.DetectionModel):
    method __init__ (line 197) | def __init__(self,
    method first_stage_feature_extractor_scope (line 394) | def first_stage_feature_extractor_scope(self):
    method second_stage_feature_extractor_scope (line 398) | def second_stage_feature_extractor_scope(self):
    method first_stage_box_predictor_scope (line 402) | def first_stage_box_predictor_scope(self):
    method second_stage_box_predictor_scope (line 406) | def second_stage_box_predictor_scope(self):
    method max_num_proposals (line 410) | def max_num_proposals(self):
    method preprocess (line 425) | def preprocess(self, inputs):
    method predict (line 453) | def predict(self, preprocessed_inputs):
    method _predict_second_stage (line 556) | def _predict_second_stage(self, rpn_box_encodings,
    method _extract_rpn_feature_maps (line 636) | def _extract_rpn_feature_maps(self, preprocessed_inputs):
    method _predict_rpn_proposals (line 676) | def _predict_rpn_proposals(self, rpn_box_predictor_features):
    method _remove_invalid_anchors_and_predictions (line 716) | def _remove_invalid_anchors_and_predictions(
    method _flatten_first_two_dimensions (line 764) | def _flatten_first_two_dimensions(self, inputs):
    method postprocess (line 783) | def postprocess(self, prediction_dict):
    method _postprocess_rpn (line 838) | def _postprocess_rpn(self,
    method _format_groundtruth_data (line 925) | def _format_groundtruth_data(self, image_shape):
    method _sample_box_classifier_minibatch (line 957) | def _sample_box_classifier_minibatch(self,
    method _compute_second_stage_input_feature_maps (line 993) | def _compute_second_stage_input_feature_maps(self, features_to_crop,
    method _postprocess_box_classifier (line 1030) | def _postprocess_box_classifier(self,
    method _batch_decode_refined_boxes (line 1107) | def _batch_decode_refined_boxes(self, refined_box_encodings, proposal_...
    method loss (line 1132) | def loss(self, prediction_dict, scope=None):
    method _loss_rpn (line 1176) | def _loss_rpn(self,
    method _loss_box_classifier (line 1253) | def _loss_box_classifier(self,
    method _padded_batched_proposals_indicator (line 1352) | def _padded_batched_proposals_indicator(self,
    method _unpad_proposals_and_apply_hard_mining (line 1371) | def _unpad_proposals_and_apply_hard_mining(self,
    method restore_fn (line 1416) | def restore_fn(self, checkpoint_path, from_detection_checkpoint=True):

FILE: object_detector_app/object_detection/meta_architectures/faster_rcnn_meta_arch_test.py
  class FasterRCNNMetaArchTest (line 23) | class FasterRCNNMetaArchTest(
    method test_postprocess_second_stage_only_inference_mode_with_masks (line 26) | def test_postprocess_second_stage_only_inference_mode_with_masks(self):

FILE: object_detector_app/object_detection/meta_architectures/faster_rcnn_meta_arch_test_lib.py
  class FakeFasterRCNNFeatureExtractor (line 34) | class FakeFasterRCNNFeatureExtractor(
    method __init__ (line 38) | def __init__(self):
    method preprocess (line 45) | def preprocess(self, resized_inputs):
    method _extract_proposal_features (line 48) | def _extract_proposal_features(self, preprocessed_inputs, scope):
    method _extract_box_classifier_features (line 53) | def _extract_box_classifier_features(self, proposal_feature_maps, scope):
  class FasterRCNNMetaArchTestBase (line 59) | class FasterRCNNMetaArchTestBase(tf.test.TestCase):
    method _build_arg_scope_with_hyperparams (line 62) | def _build_arg_scope_with_hyperparams(self,
    method _get_second_stage_box_predictor_text_proto (line 69) | def _get_second_stage_box_predictor_text_proto(self):
    method _get_second_stage_box_predictor (line 92) | def _get_second_stage_box_predictor(self, num_classes, is_training):
    method _get_model (line 102) | def _get_model(self, box_predictor, **common_kwargs):
    method _build_model (line 110) | def _build_model(self,
    method test_predict_gives_correct_shapes_in_inference_mode_first_stage_only (line 229) | def test_predict_gives_correct_shapes_in_inference_mode_first_stage_only(
    method test_predict_gives_valid_anchors_in_training_mode_first_stage_only (line 285) | def test_predict_gives_valid_anchors_in_training_mode_first_stage_only...
    method test_predict_gives_correct_shapes_in_inference_mode_both_stages (line 338) | def test_predict_gives_correct_shapes_in_inference_mode_both_stages(se...
    method test_predict_gives_correct_shapes_in_train_mode_both_stages (line 373) | def test_predict_gives_correct_shapes_in_train_mode_both_stages(self):
    method test_postprocess_first_stage_only_inference_mode (line 425) | def test_postprocess_first_stage_only_inference_mode(self):
    method test_postprocess_first_stage_only_train_mode (line 478) | def test_postprocess_first_stage_only_train_mode(self):
    method test_postprocess_second_stage_only_inference_mode (line 537) | def test_postprocess_second_stage_only_inference_mode(self):
    method test_loss_first_stage_only_mode (line 574) | def test_loss_first_stage_only_mode(self):
    method test_loss_full (line 624) | def test_loss_full(self):
    method test_loss_full_zero_padded_proposals (line 704) | def test_loss_full_zero_padded_proposals(self):
    method test_loss_full_zero_padded_proposals_nonzero_loss_with_two_images (line 775) | def test_loss_full_zero_padded_proposals_nonzero_loss_with_two_images(...
    method test_loss_with_hard_mining (line 874) | def test_loss_with_hard_mining(self):
    method test_restore_fn_classification (line 960) | def test_restore_fn_classification(self):
    method test_restore_fn_detection (line 994) | def test_restore_fn_detection(self):

FILE: object_detector_app/object_detection/meta_architectures/rfcn_meta_arch.py
  class RFCNMetaArch (line 48) | class RFCNMetaArch(faster_rcnn_meta_arch.FasterRCNNMetaArch):
    method __init__ (line 51) | def __init__(self,
    method _predict_second_stage (line 193) | def _predict_second_stage(self, rpn_box_encodings,

FILE: object_detector_app/object_detection/meta_architectures/rfcn_meta_arch_test.py
  class RFCNMetaArchTest (line 24) | class RFCNMetaArchTest(
    method _get_second_stage_box_predictor_text_proto (line 27) | def _get_second_stage_box_predictor_text_proto(self):
    method _get_model (line 50) | def _get_model(self, box_predictor, **common_kwargs):

FILE: object_detector_app/object_detection/meta_architectures/ssd_meta_arch.py
  class SSDFeatureExtractor (line 37) | class SSDFeatureExtractor(object):
    method __init__ (line 40) | def __init__(self,
    method preprocess (line 51) | def preprocess(self, resized_inputs):
    method extract_features (line 65) | def extract_features(self, preprocessed_inputs):
  class SSDMetaArch (line 82) | class SSDMetaArch(model.DetectionModel):
    method __init__ (line 85) | def __init__(self,
    method anchors (line 183) | def anchors(self):
    method preprocess (line 190) | def preprocess(self, inputs):
    method predict (line 215) | def predict(self, preprocessed_inputs):
    method _add_box_predictions_to_feature_maps (line 254) | def _add_box_predictions_to_feature_maps(self, feature_maps):
    method _get_feature_map_spatial_dims (line 316) | def _get_feature_map_spatial_dims(self, feature_maps):
    method postprocess (line 331) | def postprocess(self, prediction_dict):
    method loss (line 383) | def loss(self, prediction_dict, scope=None):
    method _assign_targets (line 445) | def _assign_targets(self, groundtruth_boxes_list, groundtruth_classes_...
    method _summarize_input (line 484) | def _summarize_input(self, groundtruth_boxes_list, match_list):
    method _apply_hard_mining (line 517) | def _apply_hard_mining(self, location_losses, cls_losses, prediction_d...
    method restore_fn (line 565) | def restore_fn(self, checkpoint_path, from_detection_checkpoint=True):

FILE: object_detector_app/object_detection/meta_architectures/ssd_meta_arch_test.py
  class FakeSSDFeatureExtractor (line 33) | class FakeSSDFeatureExtractor(ssd_meta_arch.SSDFeatureExtractor):
    method __init__ (line 35) | def __init__(self):
    method preprocess (line 39) | def preprocess(self, resized_inputs):
    method extract_features (line 42) | def extract_features(self, preprocessed_inputs):
  class MockAnchorGenerator2x2 (line 49) | class MockAnchorGenerator2x2(anchor_generator.AnchorGenerator):
    method name_scope (line 52) | def name_scope(self):
    method num_anchors_per_location (line 55) | def num_anchors_per_location(self):
    method _generate (line 58) | def _generate(self, feature_map_shape_list):
  class SsdMetaArchTest (line 66) | class SsdMetaArchTest(tf.test.TestCase):
    method setUp (line 68) | def setUp(self):
    method test_predict_results_have_correct_keys_and_shapes (line 119) | def test_predict_results_have_correct_keys_and_shapes(self):
    method test_postprocess_results_are_correct (line 143) | def test_postprocess_results_are_correct(self):
    method test_loss_results_are_correct (line 181) | def test_loss_results_are_correct(self):
    method test_restore_fn_detection (line 210) | def test_restore_fn_detection(self):
    method test_restore_fn_classification (line 223) | def test_restore_fn_classification(self):

FILE: object_detector_app/object_detection/models/faster_rcnn_inception_resnet_v2_feature_extractor.py
  class FasterRCNNInceptionResnetV2FeatureExtractor (line 34) | class FasterRCNNInceptionResnetV2FeatureExtractor(
    method __init__ (line 38) | def __init__(self,
    method preprocess (line 59) | def preprocess(self, resized_inputs):
    method _extract_proposal_features (line 75) | def _extract_proposal_features(self, preprocessed_inputs, scope):
    method _extract_box_classifier_features (line 112) | def _extract_box_classifier_features(self, proposal_feature_maps, scope):
    method restore_from_classification_checkpoint_fn (line 169) | def restore_from_classification_checkpoint_fn(

FILE: object_detector_app/object_detection/models/faster_rcnn_inception_resnet_v2_feature_extractor_test.py
  class FasterRcnnInceptionResnetV2FeatureExtractorTest (line 23) | class FasterRcnnInceptionResnetV2FeatureExtractorTest(tf.test.TestCase):
    method _build_feature_extractor (line 25) | def _build_feature_extractor(self, first_stage_features_stride):
    method test_extract_proposal_features_returns_expected_size (line 32) | def test_extract_proposal_features_returns_expected_size(self):
    method test_extract_proposal_features_stride_eight (line 47) | def test_extract_proposal_features_stride_eight(self):
    method test_extract_proposal_features_half_size_input (line 62) | def test_extract_proposal_features_half_size_input(self):
    method test_extract_proposal_features_dies_on_invalid_stride (line 77) | def test_extract_proposal_features_dies_on_invalid_stride(self):
    method test_extract_proposal_features_dies_with_incorrect_rank_inputs (line 81) | def test_extract_proposal_features_dies_with_incorrect_rank_inputs(self):
    method test_extract_box_classifier_features_returns_expected_size (line 90) | def test_extract_box_classifier_features_returns_expected_size(self):

FILE: object_detector_app/object_detection/models/faster_rcnn_resnet_v1_feature_extractor.py
  class FasterRCNNResnetV1FeatureExtractor (line 36) | class FasterRCNNResnetV1FeatureExtractor(
    method __init__ (line 40) | def __init__(self,
    method preprocess (line 67) | def preprocess(self, resized_inputs):
    method _extract_proposal_features (line 85) | def _extract_proposal_features(self, preprocessed_inputs, scope):
    method _extract_box_classifier_features (line 131) | def _extract_box_classifier_features(self, proposal_feature_maps, scope):
  class FasterRCNNResnet50FeatureExtractor (line 164) | class FasterRCNNResnet50FeatureExtractor(FasterRCNNResnetV1FeatureExtrac...
    method __init__ (line 167) | def __init__(self,
  class FasterRCNNResnet101FeatureExtractor (line 189) | class FasterRCNNResnet101FeatureExtractor(FasterRCNNResnetV1FeatureExtra...
    method __init__ (line 192) | def __init__(self,
  class FasterRCNNResnet152FeatureExtractor (line 214) | class FasterRCNNResnet152FeatureExtractor(FasterRCNNResnetV1FeatureExtra...
    method __init__ (line 217) | def __init__(self,

FILE: object_detector_app/object_detection/models/faster_rcnn_resnet_v1_feature_extractor_test.py
  class FasterRcnnResnetV1FeatureExtractorTest (line 24) | class FasterRcnnResnetV1FeatureExtractorTest(tf.test.TestCase):
    method _build_feature_extractor (line 26) | def _build_feature_extractor(self,
    method test_extract_proposal_features_returns_expected_size (line 43) | def test_extract_proposal_features_returns_expected_size(self):
    method test_extract_proposal_features_stride_eight (line 59) | def test_extract_proposal_features_stride_eight(self):
    method test_extract_proposal_features_half_size_input (line 74) | def test_extract_proposal_features_half_size_input(self):
    method test_extract_proposal_features_dies_on_invalid_stride (line 89) | def test_extract_proposal_features_dies_on_invalid_stride(self):
    method test_extract_proposal_features_dies_on_very_small_images (line 93) | def test_extract_proposal_features_dies_on_very_small_images(self):
    method test_extract_proposal_features_dies_with_incorrect_rank_inputs (line 109) | def test_extract_proposal_features_dies_with_incorrect_rank_inputs(self):
    method test_extract_box_classifier_features_returns_expected_size (line 118) | def test_extract_box_classifier_features_returns_expected_size(self):

FILE: object_detector_app/object_detection/models/feature_map_generators.py
  function get_depth_fn (line 32) | def get_depth_fn(depth_multiplier, min_depth):
  function multi_resolution_feature_maps (line 48) | def multi_resolution_feature_maps(feature_map_layout, depth_multiplier,

FILE: object_detector_app/object_detection/models/feature_map_generators_test.py
  class MultiResolutionFeatureMapGeneratorTest (line 38) | class MultiResolutionFeatureMapGeneratorTest(tf.test.TestCase):
    method test_get_expected_feature_map_shapes_with_inception_v2 (line 40) | def test_get_expected_feature_map_shapes_with_inception_v2(self):
    method test_get_expected_feature_map_shapes_with_inception_v3 (line 69) | def test_get_expected_feature_map_shapes_with_inception_v3(self):
  class GetDepthFunctionTest (line 100) | class GetDepthFunctionTest(tf.test.TestCase):
    method test_return_min_depth_when_multiplier_is_small (line 102) | def test_return_min_depth_when_multiplier_is_small(self):
    method test_return_correct_depth_with_multiplier (line 107) | def test_return_correct_depth_with_multiplier(self):

FILE: object_detector_app/object_detection/models/ssd_feature_extractor_test.py
  class SsdFeatureExtractorTestBase (line 24) | class SsdFeatureExtractorTestBase(object):
    method _validate_features_shape (line 26) | def _validate_features_shape(self,
    method _create_feature_extractor (line 49) | def _create_feature_extractor(self, depth_multiplier):
    method check_extract_features_returns_correct_shape (line 59) | def check_extract_features_returns_correct_shape(
    method check_extract_features_raises_error_with_invalid_image_size (line 71) | def check_extract_features_raises_error_with_invalid_image_size(
    method check_feature_extractor_variables_under_scope (line 86) | def check_feature_extractor_variables_under_scope(self,

FILE: object_detector_app/object_detection/models/ssd_inception_v2_feature_extractor.py
  class SSDInceptionV2FeatureExtractor (line 26) | class SSDInceptionV2FeatureExtractor(ssd_meta_arch.SSDFeatureExtractor):
    method __init__ (line 29) | def __init__(self,
    method preprocess (line 45) | def preprocess(self, resized_inputs):
    method extract_features (line 60) | def extract_features(self, preprocessed_inputs):

FILE: object_detector_app/object_detection/models/ssd_inception_v2_feature_extractor_test.py
  class SsdInceptionV2FeatureExtractorTest (line 24) | class SsdInceptionV2FeatureExtractorTest(
    method _create_feature_extractor (line 28) | def _create_feature_extractor(self, depth_multiplier):
    method test_extract_features_returns_correct_shapes_128 (line 41) | def test_extract_features_returns_correct_shapes_128(self):
    method test_extract_features_returns_correct_shapes_299 (line 51) | def test_extract_features_returns_correct_shapes_299(self):
    method test_extract_features_returns_correct_shapes_enforcing_min_depth (line 61) | def test_extract_features_returns_correct_shapes_enforcing_min_depth(s...
    method test_extract_features_raises_error_with_invalid_image_size (line 71) | def test_extract_features_raises_error_with_invalid_image_size(self):
    method test_preprocess_returns_correct_value_range (line 78) | def test_preprocess_returns_correct_value_range(self):
    method test_variables_only_created_in_scope (line 87) | def test_variables_only_created_in_scope(self):

FILE: object_detector_app/object_detection/models/ssd_mobilenet_v1_feature_extractor.py
  class SSDMobileNetV1FeatureExtractor (line 27) | class SSDMobileNetV1FeatureExtractor(ssd_meta_arch.SSDFeatureExtractor):
    method __init__ (line 30) | def __init__(self,
    method preprocess (line 46) | def preprocess(self, resized_inputs):
    method extract_features (line 61) | def extract_features(self, preprocessed_inputs):

FILE: object_detector_app/object_detection/models/ssd_mobilenet_v1_feature_extractor_test.py
  class SsdMobilenetV1FeatureExtractorTest (line 24) | class SsdMobilenetV1FeatureExtractorTest(
    method _create_feature_extractor (line 27) | def _create_feature_extractor(self, depth_multiplier):
    method test_extract_features_returns_correct_shapes_128 (line 40) | def test_extract_features_returns_correct_shapes_128(self):
    method test_extract_features_returns_correct_shapes_299 (line 50) | def test_extract_features_returns_correct_shapes_299(self):
    method test_extract_features_returns_correct_shapes_enforcing_min_depth (line 60) | def test_extract_features_returns_correct_shapes_enforcing_min_depth(s...
    method test_extract_features_raises_error_with_invalid_image_size (line 70) | def test_extract_features_raises_error_with_invalid_image_size(self):
    method test_preprocess_returns_correct_value_range (line 77) | def test_preprocess_returns_correct_value_range(self):
    method test_variables_only_created_in_scope (line 86) | def test_variables_only_created_in_scope(self):

FILE: object_detector_app/object_detection/train.py
  function get_configs_from_pipeline_file (line 91) | def get_configs_from_pipeline_file():
  function get_configs_from_multiple_files (line 112) | def get_configs_from_multiple_files():
  function main (line 140) | def main(_):

FILE: object_detector_app/object_detection/trainer.py
  function _create_input_queue (line 38) | def _create_input_queue(batch_size_per_clone, create_tensor_dict_fn,
  function _get_inputs (line 81) | def _get_inputs(input_queue, num_classes):
  function _create_losses (line 112) | def _create_losses(input_queue, create_model_fn):
  function train (line 138) | def train(create_tensor_dict_fn, create_model_fn, train_config, master, ...

FILE: object_detector_app/object_detection/trainer_test.py
  function get_input_function (line 32) | def get_input_function():
  class FakeDetectionModel (line 47) | class FakeDetectionModel(model.DetectionModel):
    method __init__ (line 50) | def __init__(self):
    method preprocess (line 57) | def preprocess(self, inputs):
    method predict (line 69) | def predict(self, preprocessed_inputs):
    method postprocess (line 90) | def postprocess(self, prediction_dict, **params):
    method loss (line 108) | def loss(self, prediction_dict):
    method restore_fn (line 142) | def restore_fn(self, checkpoint_path, from_detection_checkpoint=True):
  class TrainerTest (line 159) | class TrainerTest(tf.test.TestCase):
    method test_configure_trainer_and_train_two_steps (line 161) | def test_configure_trainer_and_train_two_steps(self):

FILE: object_detector_app/object_detection/utils/category_util.py
  function load_categories_from_csv_file (line 22) | def load_categories_from_csv_file(csv_path):
  function save_categories_to_csv_file (line 60) | def save_categories_to_csv_file(categories, csv_path):

FILE: object_detector_app/object_detection/utils/category_util_test.py
  class EvalUtilTest (line 24) | class EvalUtilTest(tf.test.TestCase):
    method test_load_categories_from_csv_file (line 26) | def test_load_categories_from_csv_file(self):
    method test_save_categories_to_csv_file (line 41) | def test_save_categories_to_csv_file(self):

FILE: object_detector_app/object_detection/utils/dataset_util.py
  function int64_feature (line 21) | def int64_feature(value):
  function int64_list_feature (line 25) | def int64_list_feature(value):
  function bytes_feature (line 29) | def bytes_feature(value):
  function bytes_list_feature (line 33) | def bytes_list_feature(value):
  function float_list_feature (line 37) | def float_list_feature(value):
  function read_examples_list (line 41) | def read_examples_list(path):
  function recursive_parse_xml_to_dict (line 63) | def recursive_parse_xml_to_dict(xml):

FILE: object_detector_app/object_detection/utils/dataset_util_test.py
  class DatasetUtilTest (line 24) | class DatasetUtilTest(tf.test.TestCase):
    method test_read_examples_list (line 26) | def test_read_examples_list(self):

FILE: object_detector_app/object_detection/utils/label_map_util.py
  function create_category_index (line 25) | def create_category_index(categories):
  function convert_label_map_to_categories (line 44) | def convert_label_map_to_categories(label_map,
  function load_labelmap (line 95) | def load_labelmap(path):
  function get_label_map_dict (line 113) | def get_label_map_dict(label_map_path):

FILE: object_detector_app/object_detection/utils/label_map_util_test.py
  class LabelMapUtilTest (line 26) | class LabelMapUtilTest(tf.test.TestCase):
    method _generate_label_map (line 28) | def _generate_label_map(self, num_classes):
    method test_get_label_map_dict (line 37) | def test_get_label_map_dict(self):
    method test_keep_categories_with_unique_id (line 56) | def test_keep_categories_with_unique_id(self):
    method test_convert_label_map_to_categories_no_label_map (line 87) | def test_convert_label_map_to_categories_no_label_map(self):
    method test_convert_label_map_to_coco_categories (line 102) | def test_convert_label_map_to_coco_categories(self):
    method test_convert_label_map_to_coco_categories_with_few_classes (line 118) | def test_convert_label_map_to_coco_categories_with_few_classes(self):
    method test_create_category_index (line 131) | def test_create_category_index(self):

FILE: object_detector_app/object_detection/utils/learning_schedules.py
  function exponential_decay_with_burnin (line 21) | def exponential_decay_with_burnin(global_step,
  function manual_stepping (line 62) | def manual_stepping(global_step, boundaries, rates):

FILE: object_detector_app/object_detection/utils/learning_schedules_test.py
  class LearningSchedulesTest (line 22) | class LearningSchedulesTest(tf.test.TestCase):
    method testExponentialDecayWithBurnin (line 24) | def testExponentialDecayWithBurnin(self):
    method testManualStepping (line 43) | def testManualStepping(self):

FILE: object_detector_app/object_detection/utils/metrics.py
  function compute_precision_recall (line 22) | def compute_precision_recall(scores, labels, num_gt):
  function compute_average_precision (line 70) | def compute_average_precision(precision, recall):
  function compute_cor_loc (line 122) | def compute_cor_loc(num_gt_imgs_per_class,

FILE: object_detector_app/object_detection/utils/metrics_test.py
  class MetricsTest (line 24) | class MetricsTest(tf.test.TestCase):
    method test_compute_cor_loc (line 26) | def test_compute_cor_loc(self):
    method test_compute_cor_loc_nans (line 35) | def test_compute_cor_loc_nans(self):
    method test_compute_precision_recall (line 44) | def test_compute_precision_recall(self):
    method test_compute_average_precision (line 55) | def test_compute_average_precision(self):
    method test_compute_precision_recall_and_ap_no_groundtruth (line 65) | def test_compute_precision_recall_and_ap_no_groundtruth(self):

FILE: object_detector_app/object_detection/utils/np_box_list.py
  class BoxList (line 21) | class BoxList(object):
    method __init__ (line 33) | def __init__(self, data):
    method num_boxes (line 54) | def num_boxes(self):
    method get_extra_fields (line 58) | def get_extra_fields(self):
    method has_field (line 62) | def has_field(self, field):
    method add_field (line 65) | def add_field(self, field, field_data):
    method get (line 82) | def get(self):
    method get_field (line 90) | def get_field(self, field):
    method get_coordinates (line 106) | def get_coordinates(self):
    method _is_valid_boxes (line 119) | def _is_valid_boxes(self, data):

FILE: object_detector_app/object_detection/utils/np_box_list_ops.py
  class SortOrder (line 29) | class SortOrder(object):
  function area (line 40) | def area(boxlist):
  function intersection (line 53) | def intersection(boxlist1, boxlist2):
  function iou (line 66) | def iou(boxlist1, boxlist2):
  function ioa (line 79) | def ioa(boxlist1, boxlist2):
  function gather (line 96) | def gather(boxlist, indices, fields=None):
  function sort_by_field (line 131) | def sort_by_field(boxlist, field, order=SortOrder.DESCEND):
  function non_max_suppression (line 162) | def non_max_suppression(boxlist,
  function multi_class_non_max_suppression (line 236) | def multi_class_non_max_suppression(boxlist, score_thresh, iou_thresh,
  function scale (line 309) | def scale(boxlist, y_scale, x_scale):
  function clip_to_window (line 335) | def clip_to_window(boxlist, window):
  function prune_non_overlapping_boxes (line 369) | def prune_non_overlapping_boxes(boxlist1, boxlist2, minoverlap=0.0):
  function prune_outside_window (line 392) | def prune_outside_window(boxlist, window):
  function concatenate (line 425) | def concatenate(boxlists, fields=None):
  function filter_scores_greater_than (line 474) | def filter_scores_greater_than(boxlist, thresh):
  function change_coordinate_frame (line 507) | def change_coordinate_frame(boxlist, window):
  function _copy_extra_fields (line 537) | def _copy_extra_fields(boxlist_to_copy_to, boxlist_to_copy_from):
  function _update_valid_indices_by_removing_high_iou_boxes (line 552) | def _update_valid_indices_by_removing_high_iou_boxes(

FILE: object_detector_app/object_detection/utils/np_box_list_ops_test.py
  class AreaRelatedTest (line 25) | class AreaRelatedTest(tf.test.TestCase):
    method setUp (line 27) | def setUp(self):
    method test_area (line 36) | def test_area(self):
    method test_intersection (line 41) | def test_intersection(self):
    method test_iou (line 47) | def test_iou(self):
    method test_ioa (line 54) | def test_ioa(self):
    method test_scale (line 68) | def test_scale(self):
    method test_clip_to_window (line 79) | def test_clip_to_window(self):
    method test_prune_outside_window (line 94) | def test_prune_outside_window(self):
    method test_concatenate (line 108) | def test_concatenate(self):
    method test_change_coordinate_frame (line 126) | def test_change_coordinate_frame(self):
    method test_filter_scores_greater_than (line 137) | def test_filter_scores_greater_than(self):
  class GatherOpsTest (line 151) | class GatherOpsTest(tf.test.TestCase):
    method setUp (line 153) | def setUp(self):
    method test_gather_with_out_of_range_indices (line 164) | def test_gather_with_out_of_range_indices(self):
    method test_gather_with_invalid_multidimensional_indices (line 170) | def test_gather_with_invalid_multidimensional_indices(self):
    method test_gather_without_fields_specified (line 176) | def test_gather_without_fields_specified(self):
    method test_gather_with_invalid_field_specified (line 194) | def test_gather_with_invalid_field_specified(self):
    method test_gather_with_fields_specified (line 204) | def test_gather_with_fields_specified(self):
  class SortByFieldTest (line 222) | class SortByFieldTest(tf.test.TestCase):
    method setUp (line 224) | def setUp(self):
    method test_with_invalid_field (line 235) | def test_with_invalid_field(self):
    method test_with_invalid_sorting_order (line 241) | def test_with_invalid_sorting_order(self):
    method test_with_descending_sorting (line 245) | def test_with_descending_sorting(self):
    method test_with_ascending_sorting (line 256) | def test_with_ascending_sorting(self):
  class NonMaximumSuppressionTest (line 270) | class NonMaximumSuppressionTest(tf.test.TestCase):
    method setUp (line 272) | def setUp(self):
    method test_with_no_scores_field (line 282) | def test_with_no_scores_field(self):
    method test_nms_disabled_max_output_size_equals_three (line 291) | def test_nms_disabled_max_output_size_equals_three(self):
    method test_select_from_three_clusters (line 304) | def test_select_from_three_clusters(self):
    method test_select_at_most_two_from_three_clusters (line 317) | def test_select_at_most_two_from_three_clusters(self):
    method test_select_at_most_thirty_from_three_clusters (line 329) | def test_select_at_most_thirty_from_three_clusters(self):
    method test_select_from_ten_indentical_boxes (line 342) | def test_select_from_ten_indentical_boxes(self):
    method test_different_iou_threshold (line 353) | def test_different_iou_threshold(self):
    method test_multiclass_nms (line 385) | def test_multiclass_nms(self):

FILE: object_detector_app/object_detection/utils/np_box_list_test.py
  class BoxListTest (line 24) | class BoxListTest(tf.test.TestCase):
    method test_invalid_box_data (line 26) | def test_invalid_box_data(self):
    method test_has_field_with_existed_field (line 39) | def test_has_field_with_existed_field(self):
    method test_has_field_with_nonexisted_field (line 46) | def test_has_field_with_nonexisted_field(self):
    method test_get_field_with_existed_field (line 53) | def test_get_field_with_existed_field(self):
    method test_get_field_with_nonexited_field (line 60) | def test_get_field_with_nonexited_field(self):
  class AddExtraFieldTest (line 69) | class AddExtraFieldTest(tf.test.TestCase):
    method setUp (line 71) | def setUp(self):
    method test_add_already_existed_field (line 77) | def test_add_already_existed_field(self):
    method test_add_invalid_field_data (line 81) | def test_add_invalid_field_data(self):
    method test_add_single_dimensional_field_data (line 88) | def test_add_single_dimensional_field_data(self):
    method test_add_multi_dimensional_field_data (line 94) | def test_add_multi_dimensional_field_data(self):
    method test_get_extra_fields (line 101) | def test_get_extra_fields(self):
    method test_get_coordinates (line 114) | def test_get_coordinates(self):
    method test_num_boxes (line 127) | def test_num_boxes(self):

FILE: object_detector_app/object_detection/utils/np_box_ops.py
  function area (line 25) | def area(boxes):
  function intersection (line 37) | def intersection(boxes1, boxes2):
  function iou (line 63) | def iou(boxes1, boxes2):
  function ioa (line 81) | def ioa(boxes1, boxes2):

FILE: object_detector_app/object_detection/utils/np_box_ops_test.py
  class BoxOpsTests (line 24) | class BoxOpsTests(tf.test.TestCase):
    method setUp (line 26) | def setUp(self):
    method testArea (line 35) | def testArea(self):
    method testIntersection (line 40) | def testIntersection(self):
    method testIOU (line 46) | def testIOU(self):
    method testIOA (line 53) | def testIOA(self):

FILE: object_detector_app/object_detection/utils/object_detection_evaluation.py
  class ObjectDetectionEvaluation (line 38) | class ObjectDetectionEvaluation(object):
    method __init__ (line 41) | def __init__(self,
    method clear_detections (line 67) | def clear_detections(self):
    method add_single_ground_truth_image_info (line 77) | def add_single_ground_truth_image_info(self,
    method add_single_detected_image_info (line 110) | def add_single_detected_image_info(self, image_key, detected_boxes,
    method _update_ground_truth_statistics (line 160) | def _update_ground_truth_statistics(self, groundtruth_class_labels,
    method evaluate (line 182) | def evaluate(self):
    method get_eval_result (line 221) | def get_eval_result(self):
  class EvalResult (line 227) | class EvalResult(object):
    method __init__ (line 229) | def __init__(self, average_precisions, precisions, recalls, all_corloc):

FILE: object_detector_app/object_detection/utils/object_detection_evaluation_test.py
  class ObjectDetectionEvaluationTest (line 24) | class ObjectDetectionEvaluationTest(tf.test.TestCase):
    method setUp (line 26) | def setUp(self):
    method test_add_single_ground_truth_image_info (line 60) | def test_add_single_ground_truth_image_info(self):
    method test_add_single_detected_image_info (line 79) | def test_add_single_detected_image_info(self):
    method test_evaluate (line 96) | def test_evaluate(self):

FILE: object_detector_app/object_detection/utils/ops.py
  function expanded_shape (line 27) | def expanded_shape(orig_shape, start_dim, num_dims):
  function normalized_to_image_coordinates (line 49) | def normalized_to_image_coordinates(normalized_boxes, image_shape,
  function meshgrid (line 77) | def meshgrid(x, y):
  function pad_to_multiple (line 116) | def pad_to_multiple(tensor, multiple):
  function padded_one_hot_encoding (line 176) | def padded_one_hot_encoding(indices, depth, left_pad):
  function dense_to_sparse_boxes (line 218) | def dense_to_sparse_boxes(dense_locations, dense_num_boxes, num_classes):
  function indices_to_dense_vector (line 249) | def indices_to_dense_vector(indices,
  function retain_groundtruth (line 281) | def retain_groundtruth(tensor_dict, valid_indices):
  function retain_groundtruth_with_positive_classes (line 334) | def retain_groundtruth_with_positive_classes(tensor_dict):
  function filter_groundtruth_with_nan_box_coordinates (line 360) | def filter_groundtruth_with_nan_box_coordinates(tensor_dict):
  function normalize_to_target (line 384) | def normalize_to_target(inputs,
  function position_sensitive_crop_regions (line 461) | def position_sensitive_crop_regions(image,
  function reframe_box_masks_to_image_masks (line 611) | def reframe_box_masks_to_image_masks(box_masks, boxes, image_height,

FILE: object_detector_app/object_detection/utils/ops_test.py
  class NormalizedToImageCoordinatesTest (line 24) | class NormalizedToImageCoordinatesTest(tf.test.TestCase):
    method test_normalized_to_image_coordinates (line 26) | def test_normalized_to_image_coordinates(self):
  class MeshgridTest (line 45) | class MeshgridTest(tf.test.TestCase):
    method test_meshgrid_numpy_comparison (line 47) | def test_meshgrid_numpy_comparison(self):
    method test_meshgrid_multidimensional (line 58) | def test_meshgrid_multidimensional(self):
  class OpsTestPadToMultiple (line 86) | class OpsTestPadToMultiple(tf.test.TestCase):
    method test_zero_padding (line 88) | def test_zero_padding(self):
    method test_no_padding (line 95) | def test_no_padding(self):
    method test_padding (line 102) | def test_padding(self):
  class OpsTestPaddedOneHotEncoding (line 110) | class OpsTestPaddedOneHotEncoding(tf.test.TestCase):
    method test_correct_one_hot_tensor_with_no_pad (line 112) | def test_correct_one_hot_tensor_with_no_pad(self):
    method test_correct_one_hot_tensor_with_pad_one (line 124) | def test_correct_one_hot_tensor_with_pad_one(self):
    method test_correct_one_hot_tensor_with_pad_three (line 136) | def test_correct_one_hot_tensor_with_pad_three(self):
    method test_correct_padded_one_hot_tensor_with_empty_indices (line 148) | def test_correct_padded_one_hot_tensor_with_empty_indices(self):
    method test_return_none_on_zero_depth (line 160) | def test_return_none_on_zero_depth(self):
    method test_raise_value_error_on_rank_two_input (line 165) | def test_raise_value_error_on_rank_two_input(self):
    method test_raise_value_error_on_negative_pad (line 170) | def test_raise_value_error_on_negative_pad(self):
    method test_raise_value_error_on_float_pad (line 175) | def test_raise_value_error_on_float_pad(self):
    method test_raise_value_error_on_float_depth (line 180) | def test_raise_value_error_on_float_depth(self):
  class OpsDenseToSparseBoxesTest (line 186) | class OpsDenseToSparseBoxesTest(tf.test.TestCase):
    method test_return_all_boxes_when_all_input_boxes_are_valid (line 188) | def test_return_all_boxes_when_all_input_boxes_are_valid(self):
    method test_return_only_valid_boxes_when_input_contains_invalid_boxes (line 213) | def test_return_only_valid_boxes_when_input_contains_invalid_boxes(self):
  class OpsTestIndicesToDenseVector (line 241) | class OpsTestIndicesToDenseVector(tf.test.TestCase):
    method test_indices_to_dense_vector (line 243) | def test_indices_to_dense_vector(self):
    method test_indices_to_dense_vector_size_at_inference (line 259) | def test_indices_to_dense_vector_size_at_inference(self):
    method test_indices_to_dense_vector_int (line 279) | def test_indices_to_dense_vector_int(self):
    method test_indices_to_dense_vector_custom_values (line 296) | def test_indices_to_dense_vector_custom_values(self):
    method test_indices_to_dense_vector_all_indices_as_input (line 318) | def test_indices_to_dense_vector_all_indices_as_input(self):
    method test_indices_to_dense_vector_empty_indices_as_input (line 333) | def test_indices_to_dense_vector_empty_indices_as_input(self):
  class GroundtruthFilterTest (line 348) | class GroundtruthFilterTest(tf.test.TestCase):
    method test_filter_groundtruth (line 350) | def test_filter_groundtruth(self):
    method test_filter_with_missing_fields (line 415) | def test_filter_with_missing_fields(self):
    method test_filter_with_empty_fields (line 447) | def test_filter_with_empty_fields(self):
    method test_filter_with_empty_groundtruth_boxes (line 498) | def test_filter_with_empty_groundtruth_boxes(self):
  class RetainGroundTruthWithPositiveClasses (line 537) | class RetainGroundTruthWithPositiveClasses(tf.test.TestCase):
    method test_filter_groundtruth_with_positive_classes (line 539) | def test_filter_groundtruth_with_positive_classes(self):
  class GroundtruthFilterWithNanBoxTest (line 605) | class GroundtruthFilterWithNanBoxTest(tf.test.TestCase):
    method test_filter_groundtruth_with_nan_box_coordinates (line 607) | def test_filter_groundtruth_with_nan_box_coordinates(self):
  class OpsTestNormalizeToTarget (line 642) | class OpsTestNormalizeToTarget(tf.test.TestCase):
    method test_create_normalize_to_target (line 644) | def test_create_normalize_to_target(self):
    method test_invalid_dim (line 654) | def test_invalid_dim(self):
    method test_invalid_target_norm_values (line 663) | def test_invalid_target_norm_values(self):
    method test_correct_output_shape (line 671) | def test_correct_output_shape(self):
    method test_correct_initial_output_values (line 680) | def test_correct_initial_output_values(self):
    method test_multiple_target_norm_values (line 694) | def test_multiple_target_norm_values(self):
  class OpsTestPositionSensitiveCropRegions (line 709) | class OpsTestPositionSensitiveCropRegions(tf.test.TestCase):
    method test_position_sensitive (line 711) | def test_position_sensitive(self):
    method test_position_sensitive_with_equal_channels (line 734) | def test_position_sensitive_with_equal_channels(self):
    method test_position_sensitive_with_single_bin (line 762) | def test_position_sensitive_with_single_bin(self):
    method test_raise_value_error_on_num_bins_less_than_one (line 783) | def test_raise_value_error_on_num_bins_less_than_one(self):
    method test_raise_value_error_on_non_divisible_crop_size (line 796) | def test_raise_value_error_on_non_divisible_crop_size(self):
    method test_raise_value_error_on_non_divisible_num_channels (line 810) | def test_raise_value_error_on_non_divisible_num_channels(self):
    method test_position_sensitive_with_global_pool_false (line 824) | def test_position_sensitive_with_global_pool_false(self):
    method test_position_sensitive_with_global_pool_false_and_known_boxes (line 863) | def test_position_sensitive_with_global_pool_false_and_known_boxes(self):
    method test_position_sensitive_with_global_pool_false_and_single_bin (line 900) | def test_position_sensitive_with_global_pool_false_and_single_bin(self):
    method test_position_sensitive_with_global_pool_false_and_do_global_pool (line 920) | def test_position_sensitive_with_global_pool_false_and_do_global_pool(...
    method test_raise_value_error_on_non_square_block_size (line 969) | def test_raise_value_error_on_non_square_block_size(self):
  class ReframeBoxMasksToImageMasksTest (line 984) | class ReframeBoxMasksToImageMasksTest(tf.test.TestCase):
    method testZeroImageOnEmptyMask (line 986) | def testZeroImageOnEmptyMask(self):
    method testMaskIsCenteredInImageWhenBoxIsCentered (line 1001) | def testMaskIsCenteredInImageWhenBoxIsCentered(self):
    method testMaskOffCenterRemainsOffCenterInImage (line 1016) | def testMaskOffCenterRemainsOffCenterInImage(self):

FILE: object_detector_app/object_detection/utils/per_image_evaluation.py
  class PerImageEvaluation (line 28) | class PerImageEvaluation(object):
    method __init__ (line 31) | def __init__(self,
    method compute_object_detection_metrics (line 50) | def compute_object_detection_metrics(self, detected_boxes, detected_sc...
    method _compute_cor_loc (line 94) | def _compute_cor_loc(self, detected_boxes, detected_scores,
    method _compute_is_aclass_correctly_detected_in_image (line 131) | def _compute_is_aclass_correctly_detected_in_image(
    method _compute_tp_fp (line 158) | def _compute_tp_fp(self, detected_boxes, detected_scores,
    method _remove_invalid_boxes (line 203) | def _remove_invalid_boxes(self, detected_boxes, detected_scores,
    method _compute_tp_fp_for_single_class (line 210) | def _compute_tp_fp_for_single_class(self, detected_boxes, detected_sco...

FILE: object_detector_app/object_detection/utils/per_image_evaluation_test.py
  class SingleClassTpFpWithDifficultBoxesTest (line 24) | class SingleClassTpFpWithDifficultBoxesTest(tf.test.TestCase):
    method setUp (line 26) | def setUp(self):
    method test_match_to_not_difficult_box (line 41) | def test_match_to_not_difficult_box(self):
    method test_match_to_difficult_box (line 52) | def test_match_to_difficult_box(self):
  class SingleClassTpFpNoDifficultBoxesTest (line 64) | class SingleClassTpFpNoDifficultBoxesTest(tf.test.TestCase):
    method setUp (line 66) | def setUp(self):
    method test_no_true_positives (line 84) | def test_no_true_positives(self):
    method test_one_true_positives_with_large_iou_threshold (line 95) | def test_one_true_positives_with_large_iou_threshold(self):
    method test_one_true_positives_with_very_small_iou_threshold (line 106) | def test_one_true_positives_with_very_small_iou_threshold(self):
    method test_two_true_positives_with_large_iou_threshold (line 117) | def test_two_true_positives_with_large_iou_threshold(self):
  class MultiClassesTpFpTest (line 129) | class MultiClassesTpFpTest(tf.test.TestCase):
    method test_tp_fp (line 131) | def test_tp_fp(self):
  class CorLocTest (line 160) | class CorLocTest(tf.test.TestCase):
    method test_compute_corloc_with_normal_iou_threshold (line 162) | def test_compute_corloc_with_normal_iou_threshold(self):
    method test_compute_corloc_with_very_large_iou_threshold (line 186) | def test_compute_corloc_with_very_large_iou_threshold(self):

FILE: object_detector_app/object_detection/utils/shape_utils.py
  function _is_tensor (line 21) | def _is_tensor(t):
  function _set_dim_0 (line 33) | def _set_dim_0(t, d0):
  function pad_tensor (line 49) | def pad_tensor(t, length):
  function clip_tensor (line 75) | def clip_tensor(t, length):
  function pad_or_clip_tensor (line 94) | def pad_or_clip_tensor(t, length):

FILE: object_detector_app/object_detection/utils/shape_utils_test.py
  class UtilTest (line 23) | class UtilTest(tf.test.TestCase):
    method test_pad_tensor_using_integer_input (line 25) | def test_pad_tensor_using_integer_input(self):
    method test_pad_tensor_using_tensor_input (line 39) | def test_pad_tensor_using_tensor_input(self):
    method test_clip_tensor_using_integer_input (line 50) | def test_clip_tensor_using_integer_input(self):
    method test_clip_tensor_using_tensor_input (line 64) | def test_clip_tensor_using_tensor_input(self):
    method test_pad_or_clip_tensor_using_integer_input (line 75) | def test_pad_or_clip_tensor_using_integer_input(self):
    method test_pad_or_clip_tensor_using_tensor_input (line 99) | def test_pad_or_clip_tensor_using_tensor_input(self):

FILE: object_detector_app/object_detection/utils/static_shape.py
  function get_batch_size (line 22) | def get_batch_size(tensor_shape):
  function get_height (line 35) | def get_height(tensor_shape):
  function get_width (line 48) | def get_width(tensor_shape):
  function get_depth (line 61) | def get_depth(tensor_shape):

FILE: object_detector_app/object_detection/utils/static_shape_test.py
  class StaticShapeTest (line 23) | class StaticShapeTest(tf.test.TestCase):
    method test_return_correct_batchSize (line 25) | def test_return_correct_batchSize(self):
    method test_return_correct_height (line 29) | def test_return_correct_height(self):
    method test_return_correct_width (line 33) | def test_return_correct_width(self):
    method test_return_correct_depth (line 37) | def test_return_correct_depth(self):
    method test_die_on_tensor_shape_with_rank_three (line 41) | def test_die_on_tensor_shape_with_rank_three(self):

FILE: object_detector_app/object_detection/utils/test_utils.py
  class MockBoxCoder (line 27) | class MockBoxCoder(box_coder.BoxCoder):
    method code_size (line 31) | def code_size(self):
    method _encode (line 34) | def _encode(self, boxes, anchors):
    method _decode (line 37) | def _decode(self, rel_codes, anchors):
  class MockBoxPredictor (line 41) | class MockBoxPredictor(box_predictor.BoxPredictor):
    method __init__ (line 44) | def __init__(self, is_training, num_classes):
    method _predict (line 47) | def _predict(self, image_features, num_predictions_per_location):
  class MockAnchorGenerator (line 62) | class MockAnchorGenerator(anchor_generator.AnchorGenerator):
    method name_scope (line 65) | def name_scope(self):
    method num_anchors_per_location (line 68) | def num_anchors_per_location(self):
    method _generate (line 71) | def _generate(self, feature_map_shape_list):
  class MockMatcher (line 76) | class MockMatcher(matcher.Matcher):
    method _match (line 79) | def _match(self, similarity_matrix):
  function create_diagonal_gradient_image (line 83) | def create_diagonal_gradient_image(height, width, depth):
  function create_random_boxes (line 113) | def create_random_boxes(num_boxes, max_height, max_width):

FILE: object_detector_app/object_detection/utils/test_utils_test.py
  class TestUtilsTest (line 24) | class TestUtilsTest(tf.test.TestCase):
    method test_diagonal_gradient_image (line 26) | def test_diagonal_gradient_image(self):
    method test_random_boxes (line 53) | def test_random_boxes(self):

FILE: object_detector_app/object_detection/utils/variables_helper.py
  function filter_variables (line 28) | def filter_variables(variables, filter_regex_list, invert=False):
  function multiply_gradients_matching_regex (line 57) | def multiply_gradients_matching_regex(grads_and_vars, regex_list, multip...
  function freeze_gradients_matching_regex (line 79) | def freeze_gradients_matching_regex(grads_and_vars, regex_list):
  function get_variables_available_in_checkpoint (line 99) | def get_variables_available_in_checkpoint(variables, checkpoint_path):

FILE: object_detector_app/object_detection/utils/variables_helper_test.py
  class FilterVariablesTest (line 24) | class FilterVariablesTest(tf.test.TestCase):
    method _create_variables (line 26) | def _create_variables(self):
    method test_return_all_variables_when_empty_regex (line 32) | def test_return_all_variables_when_empty_regex(self):
    method test_return_variables_which_do_not_match_single_regex (line 37) | def test_return_variables_which_do_not_match_single_regex(self):
    method test_return_variables_which_do_not_match_any_regex_in_list (line 43) | def test_return_variables_which_do_not_match_any_regex_in_list(self):
    method test_return_variables_matching_empty_regex_list (line 50) | def test_return_variables_matching_empty_regex_list(self):
    method test_return_variables_matching_some_regex_in_list (line 56) | def test_return_variables_matching_some_regex_in_list(self):
  class MultiplyGradientsMatchingRegexTest (line 65) | class MultiplyGradientsMatchingRegexTest(tf.test.TestCase):
    method _create_grads_and_vars (line 67) | def _create_grads_and_vars(self):
    method test_multiply_all_feature_extractor_variables (line 77) | def test_multiply_all_feature_extractor_variables(self):
    method test_multiply_all_bias_variables (line 90) | def test_multiply_all_bias_variables(self):
  class FreezeGradientsMatchingRegexTest (line 104) | class FreezeGradientsMatchingRegexTest(tf.test.TestCase):
    method _create_grads_and_vars (line 106) | def _create_grads_and_vars(self):
    method test_freeze_all_feature_extractor_variables (line 116) | def test_freeze_all_feature_extractor_variables(self):
  class GetVariablesAvailableInCheckpointTest (line 129) | class GetVariablesAvailableInCheckpointTest(tf.test.TestCase):
    method test_return_all_variables_from_checkpoint (line 131) | def test_return_all_variables_from_checkpoint(self):
    method test_return_variables_available_in_checkpoint (line 146) | def test_return_variables_available_in_checkpoint(self):
    method test_return_variables_available_an_checkpoint_with_dict_inputs (line 162) | def test_return_variables_available_an_checkpoint_with_dict_inputs(self):

FILE: object_detector_app/object_detection/utils/visualization_utils.py
  function save_image_array_as_png (line 61) | def save_image_array_as_png(image, output_path):
  function encode_image_array_as_png_str (line 73) | def encode_image_array_as_png_str(image):
  function draw_bounding_box_on_image_array (line 90) | def draw_bounding_box_on_image_array(image,
  function draw_bounding_box_on_image (line 122) | def draw_bounding_box_on_image(image,
  function draw_bounding_boxes_on_image_array (line 181) | def draw_bounding_boxes_on_image_array(image,
  function draw_bounding_boxes_on_image (line 209) | def draw_bounding_boxes_on_image(image,
  function draw_keypoints_on_image_array (line 244) | def draw_keypoints_on_image_array(image,
  function draw_keypoints_on_image (line 265) | def draw_keypoints_on_image(image,
  function draw_mask_on_image_array (line 293) | def draw_mask_on_image_array(image, mask, color='red', alpha=0.7):
  function visualize_boxes_and_labels_on_image_array (line 323) | def visualize_boxes_and_labels_on_image_array(image,

FILE: object_detector_app/object_detection/utils/visualization_utils_test.py
  class VisualizationUtilsTest (line 31) | class VisualizationUtilsTest(tf.test.TestCase):
    method create_colorful_test_image (line 33) | def create_colorful_test_image(self):
    method test_draw_bounding_box_on_image (line 53) | def test_draw_bounding_box_on_image(self):
    method test_draw_bounding_box_on_image_array (line 69) | def test_draw_bounding_box_on_image_array(self):
    method test_draw_bounding_boxes_on_image (line 86) | def test_draw_bounding_boxes_on_image(self):
    method test_draw_bounding_boxes_on_image_array (line 99) | def test_draw_bounding_boxes_on_image_array(self):
    method test_draw_keypoints_on_image (line 113) | def test_draw_keypoints_on_image(self):
    method test_draw_keypoints_on_image_array (line 125) | def test_draw_keypoints_on_image_array(self):
    method test_draw_mask_on_image_array (line 138) | def test_draw_mask_on_image_array(self):

FILE: object_detector_app/object_detection_app.py
  function detect_objects (line 33) | def detect_objects(image_np, sess, detection_graph):
  function worker (line 64) | def worker(input_q, output_q):

FILE: object_detector_app/object_detection_multithreading.py
  function detect_objects (line 33) | def detect_objects(image_np, sess, detection_graph):
  function worker (line 63) | def worker(input_q, output_q):
  function publish_detected_object (line 87) | def publish_detected_object():

FILE: object_detector_app/utils/app_utils.py
  class FPS (line 12) | class FPS:
    method __init__ (line 13) | def __init__(self):
    method start (line 20) | def start(self):
    method stop (line 25) | def stop(self):
    method update (line 29) | def update(self):
    method elapsed (line 34) | def elapsed(self):
    method fps (line 39) | def fps(self):
  class WebcamVideoStream (line 44) | class WebcamVideoStream:
    method __init__ (line 45) | def __init__(self, src, width, height):
    method start (line 57) | def start(self):
    method update (line 62) | def update(self):
    method read (line 72) | def read(self):
    method stop (line 76) | def stop(self):
  function standard_colors (line 81) | def standard_colors():
  function color_name_to_rgb (line 110) | def color_name_to_rgb():
  function draw_boxes_and_labels (line 117) | def draw_boxes_and_labels(

FILE: object_detector_app/utils/test_app_utils.py
  class TestUtils (line 5) | class TestUtils(unittest.TestCase):
    method setUp (line 6) | def setUp(self):
    method test_all_colors (line 10) | def test_all_colors(self):
Condensed preview — 321 files, each showing path, character count, and a content snippet. Download the .json file or copy for the full structured content (2,117K chars).
[
  {
    "path": "LICENSE",
    "chars": 1071,
    "preview": "MIT License\n\nCopyright (c) 2018 Jesse Weisberg\n\nPermission is hereby granted, free of charge, to any person obtaining a "
  },
  {
    "path": "README.md",
    "chars": 5709,
    "preview": "# moveo_ros\nROS packages that can be used to plan and execute motion trajectories for the BCN3D Moveo robotic arm in sim"
  },
  {
    "path": "moveo_moveit/CMakeLists.txt",
    "chars": 1789,
    "preview": "cmake_minimum_required(VERSION 2.8.3)\nproject(moveo_moveit)\n\nadd_compile_options(-std=c++11)\n\nfind_package(Eigen3 REQUIR"
  },
  {
    "path": "moveo_moveit/moveo_moveit_arduino/MultiStepperTest/MultiStepperTest.ino",
    "chars": 2725,
    "preview": "// MultiStepper.pde\n// -*- mode: C++ -*-\n// Use MultiStepper class to manage multiple steppers and make them all move to"
  },
  {
    "path": "moveo_moveit/moveo_moveit_arduino/moveo_moveit_arduino.ino",
    "chars": 4727,
    "preview": "/* Purpose: This sketch uses ROS as well as MultiStepper, AccelStepper, and Servo libraries to control the \n * BCN3D Mov"
  },
  {
    "path": "moveo_moveit/msg/ArmJointState.msg",
    "chars": 96,
    "preview": "int16 position1\nint16 position2\nint16 position3\nint16 position4\nint16 position5\nint16 position6\n"
  },
  {
    "path": "moveo_moveit/package.xml",
    "chars": 2602,
    "preview": "<?xml version=\"1.0\"?>\n<package>\n  <name>moveo_moveit</name>\n  <version>0.0.0</version>\n  <description>The moveo_moveit p"
  },
  {
    "path": "moveo_moveit/scripts/README.md",
    "chars": 2676,
    "preview": "# Object-Specific Pick and Place\nThis script uses real-time object recognition in a monocular image to perform predefine"
  },
  {
    "path": "moveo_moveit/scripts/moveo_objrec_publisher.py",
    "chars": 4035,
    "preview": "#!/usr/bin/env python\n#!/usr/bin/env python3\n\n'''\nSubscribes to a zmq socket and publishes that information to a ros top"
  },
  {
    "path": "moveo_moveit/src/move_group_interface_coor_1.cpp",
    "chars": 5724,
    "preview": "#include <moveit/move_group_interface/move_group_interface.h>\n#include <moveit/planning_scene_interface/planning_scene_i"
  },
  {
    "path": "moveo_moveit/src/moveit_convert.cpp",
    "chars": 5007,
    "preview": "#include \"ros/ros.h\"\n#include \"sensor_msgs/JointState.h\"\n#include \"moveo_moveit/ArmJointState.h\"\n#include \"math.h\"\n\nmove"
  },
  {
    "path": "moveo_moveit_config/.setup_assistant",
    "chars": 273,
    "preview": "moveit_setup_assistant_config:\n  URDF:\n    package: moveo_urdf\n    relative_path: urdf/moveo_urdf.urdf\n  SRDF:\n    relat"
  },
  {
    "path": "moveo_moveit_config/CMakeLists.txt",
    "chars": 308,
    "preview": "cmake_minimum_required(VERSION 2.8.3)\nproject(moveo_moveit_config)\n\nfind_package(catkin REQUIRED)\n\ncatkin_package()\n\nins"
  },
  {
    "path": "moveo_moveit_config/config/fake_controllers.yaml",
    "chars": 249,
    "preview": "controller_list:\n  - name: fake_arm_controller\n    joints:\n      - Joint_1\n      - Joint_2\n      - Joint_3\n      - Joint"
  },
  {
    "path": "moveo_moveit_config/config/joint_limits.yaml",
    "chars": 1757,
    "preview": "# joint_limits.yaml allows the dynamics properties specified in the URDF to be overwritten or augmented as needed\n# Spec"
  },
  {
    "path": "moveo_moveit_config/config/kinematics.yaml",
    "chars": 179,
    "preview": "arm:\n  kinematics_solver: kdl_kinematics_plugin/KDLKinematicsPlugin\n  kinematics_solver_search_resolution: 0.005\n  kinem"
  },
  {
    "path": "moveo_moveit_config/config/moveo_urdf.srdf",
    "chars": 6380,
    "preview": "<?xml version=\"1.0\" ?>\n<!--This does not replace URDF, and is not an extension of URDF.\n    This is a format for represe"
  },
  {
    "path": "moveo_moveit_config/config/ompl_planning.yaml",
    "chars": 9934,
    "preview": "planner_configs:\n  SBLkConfigDefault:\n    type: geometric::SBL\n    range: 0.0  # Max motion added to tree. ==> maxDistan"
  },
  {
    "path": "moveo_moveit_config/launch/default_warehouse_db.launch",
    "chars": 711,
    "preview": "<launch>\n\n  <arg name=\"reset\" default=\"false\"/>\n  <!-- If not specified, we'll use a default database location -->\n  <ar"
  },
  {
    "path": "moveo_moveit_config/launch/demo.launch",
    "chars": 2638,
    "preview": "<launch>\n\n  <!-- By default, we do not start a database (it can be large) -->\n  <arg name=\"db\" default=\"false\" />\n  <!--"
  },
  {
    "path": "moveo_moveit_config/launch/fake_moveit_controller_manager.launch.xml",
    "chars": 368,
    "preview": "<launch>\n\n  <!-- Set the param that trajectory_execution_manager needs to find the controller plugin -->\n  <param name=\""
  },
  {
    "path": "moveo_moveit_config/launch/joystick_control.launch",
    "chars": 629,
    "preview": "<launch>\n  <!-- See moveit_ros/visualization/doc/joystick.rst for documentation -->\n\n  <arg name=\"dev\" default=\"/dev/inp"
  },
  {
    "path": "moveo_moveit_config/launch/move_group.launch",
    "chars": 3416,
    "preview": "<launch>\n\n  <include file=\"$(find moveo_moveit_config)/launch/planning_context.launch\" />\n\n  <!-- GDB Debug Option -->\n "
  },
  {
    "path": "moveo_moveit_config/launch/moveit.rviz",
    "chars": 9157,
    "preview": "Panels:\n  - Class: rviz/Displays\n    Help Height: 0\n    Name: Displays\n    Property Tree Widget:\n      Expanded:\n       "
  },
  {
    "path": "moveo_moveit_config/launch/moveit_rviz.launch",
    "chars": 668,
    "preview": "<launch>\n\n  <arg name=\"debug\" default=\"false\" />\n  <arg unless=\"$(arg debug)\" name=\"launch_prefix\" value=\"\" />\n  <arg   "
  },
  {
    "path": "moveo_moveit_config/launch/moveo_urdf_moveit_controller_manager.launch.xml",
    "chars": 20,
    "preview": "<launch>\n\n</launch>\n"
  },
  {
    "path": "moveo_moveit_config/launch/moveo_urdf_moveit_sensor_manager.launch.xml",
    "chars": 20,
    "preview": "<launch>\n\n</launch>\n"
  },
  {
    "path": "moveo_moveit_config/launch/ompl_planning_pipeline.launch.xml",
    "chars": 964,
    "preview": "<launch>\n\n  <!-- OMPL Plugin for MoveIt! -->\n  <arg name=\"planning_plugin\" value=\"ompl_interface/OMPLPlanner\" />\n\n  <!--"
  },
  {
    "path": "moveo_moveit_config/launch/planning_context.launch",
    "chars": 1178,
    "preview": "<launch>\n  <!-- By default we do not overwrite the URDF. Change the following to true to change the default behavior -->"
  },
  {
    "path": "moveo_moveit_config/launch/planning_pipeline.launch.xml",
    "chars": 333,
    "preview": "<launch>\n\n  <!-- This file makes it easy to include different planning pipelines; \n       It is assumed that all plannin"
  },
  {
    "path": "moveo_moveit_config/launch/run_benchmark_ompl.launch",
    "chars": 902,
    "preview": "<launch>\n\n  <!-- This argument must specify the list of .cfg files to process for benchmarking -->\n  <arg name=\"cfg\" />\n"
  },
  {
    "path": "moveo_moveit_config/launch/sensor_manager.launch.xml",
    "chars": 649,
    "preview": "<launch>\n\n  <!-- This file makes it easy to include the settings for sensor managers -->  \n\n  <!-- Params for the octoma"
  },
  {
    "path": "moveo_moveit_config/launch/setup_assistant.launch",
    "chars": 538,
    "preview": "<!-- Re-launch the MoveIt Setup Assistant with this configuration package already loaded -->\n<launch>\n\n  <!-- Debug Info"
  },
  {
    "path": "moveo_moveit_config/launch/trajectory_execution.launch.xml",
    "chars": 1331,
    "preview": "<launch>\n\n  <!-- This file makes it easy to include the settings for trajectory execution  -->  \n\n  <!-- Flag indicating"
  },
  {
    "path": "moveo_moveit_config/launch/warehouse.launch",
    "chars": 527,
    "preview": "<launch>\n  \n  <!-- The path to the database must be specified -->\n  <arg name=\"moveit_warehouse_database_path\" />\n\n  <!-"
  },
  {
    "path": "moveo_moveit_config/launch/warehouse_settings.launch.xml",
    "chars": 681,
    "preview": "<launch>\n  <!-- Set the parameters for the warehouse and run the mongodb server. -->\n\n  <!-- The default DB port for mov"
  },
  {
    "path": "moveo_moveit_config/package.xml",
    "chars": 1308,
    "preview": "<package>\n\n  <name>moveo_moveit_config</name>\n  <version>0.3.0</version>\n  <description>\n     An automatically generated"
  },
  {
    "path": "moveo_urdf/CMakeLists.txt",
    "chars": 288,
    "preview": "cmake_minimum_required(VERSION 2.8.3)\r\n\r\nproject(moveo_urdf)\r\n\r\nfind_package(catkin REQUIRED)\r\n\r\ncatkin_package()\r\n\r\nfin"
  },
  {
    "path": "moveo_urdf/config/joint_names_move_urdf.yaml",
    "chars": 203,
    "preview": "controller_joint_names: ['Joint_1', 'Joint_2', 'Joint_3', 'Joint_4', 'Joint_5', 'Gripper_Servo_Gear_Joint', 'Gripper_Ido"
  },
  {
    "path": "moveo_urdf/launch/display.launch",
    "chars": 579,
    "preview": "<launch>\r\n  <arg\r\n    name=\"model\" />\r\n  <arg\r\n    name=\"gui\"\r\n    default=\"true\" />\r\n  <param\r\n    name=\"robot_descript"
  },
  {
    "path": "moveo_urdf/launch/gazebo.launch",
    "chars": 1341,
    "preview": "<launch>\r\n\r\n  <!-- these are the arguments you can pass this launch file, for example paused:=true -->\r\n  <arg name=\"pau"
  },
  {
    "path": "moveo_urdf/launch/gazebo_old.launch",
    "chars": 546,
    "preview": "<launch>\n  <include\n    file=\"$(find gazebo_ros)/launch/empty_world.launch\" />\n  <node\n    name=\"tf_footprint_base\"\n    "
  },
  {
    "path": "moveo_urdf/launch/gazebo_sdf.launch",
    "chars": 540,
    "preview": "<launch>\n  <include\n    file=\"$(find gazebo_ros)/launch/empty_world.launch\" />\n  <node\n    name=\"tf_footprint_base\"\n    "
  },
  {
    "path": "moveo_urdf/package.xml",
    "chars": 696,
    "preview": "<package>\r\n  <name>moveo_urdf</name>\r\n  <version>1.0.0</version>\r\n  <description>\r\n    <p>URDF Description package for m"
  },
  {
    "path": "moveo_urdf/urdf/moveo_urdf.urdf",
    "chars": 16188,
    "preview": "<?xml version='1.0'?>\n<robot\n  name=\"moveo_urdf\">\t\n\n  <link\n    name=\"base_link\">\n    <visual>\n      <origin\n        xyz"
  },
  {
    "path": "moveo_urdf/urdf/moveo_urdf_new.urdf",
    "chars": 15250,
    "preview": "<robot\r\n  name=\"moveo_urdf_5\">\r\n    <link\r\n    name=\"base_link\">\r\n    <inertial>\r\n      <origin\r\n        xyz=\"0.034155 -"
  },
  {
    "path": "moveo_urdf/urdf/moveo_urdf_og.urdf",
    "chars": 15832,
    "preview": "<robot\n  name=\"moveo_urdf_5\">\n    <link\n    name=\"base_link\">\n    <inertial>\n      <origin\n        xyz=\"0.034155 -0.2059"
  },
  {
    "path": "object_detector_app/LICENSE",
    "chars": 1064,
    "preview": "MIT License\n\nCopyright (c) 2017 Dat Tran\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\no"
  },
  {
    "path": "object_detector_app/README.md",
    "chars": 1254,
    "preview": "# Object-Detector-App\n\nA real-time object recognition application using [Google's TensorFlow Object Detection API](https"
  },
  {
    "path": "object_detector_app/__init__.py",
    "chars": 103,
    "preview": "from . utils import *\nfrom . object_detection import *\nfrom . object_detection_multithreading import *\n"
  },
  {
    "path": "object_detector_app/environment.yml",
    "chars": 958,
    "preview": "name: object-detection\nchannels: !!python/tuple\n- menpo\n- defaults\ndependencies:\n- cycler=0.10.0=py35_0\n- freetype=2.5.5"
  },
  {
    "path": "object_detector_app/object_detection/BUILD",
    "chars": 5073,
    "preview": "# Tensorflow Object Detection API: main runnables.\n\npackage(\n    default_visibility = [\"//visibility:public\"],\n)\n\nlicens"
  },
  {
    "path": "object_detector_app/object_detection/CONTRIBUTING.md",
    "chars": 765,
    "preview": "# Contributing to the Tensorflow Object Detection API\n\nPatches to Tensorflow Object Detection API are welcome!\n\nWe requi"
  },
  {
    "path": "object_detector_app/object_detection/README.md",
    "chars": 3565,
    "preview": "# Tensorflow Object Detection API\nCreating accurate machine learning models capable of localizing and identifying\nmultip"
  },
  {
    "path": "object_detector_app/object_detection/__init__.py",
    "chars": 17,
    "preview": "from . import *\n\n"
  },
  {
    "path": "object_detector_app/object_detection/anchor_generators/BUILD",
    "chars": 1267,
    "preview": "# Tensorflow Object Detection API: Anchor Generator implementations.\n\npackage(\n    default_visibility = [\"//visibility:p"
  },
  {
    "path": "object_detector_app/object_detection/anchor_generators/__init__.py",
    "chars": 0,
    "preview": ""
  },
  {
    "path": "object_detector_app/object_detection/anchor_generators/grid_anchor_generator.py",
    "chars": 8499,
    "preview": "# Copyright 2017 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"L"
  },
  {
    "path": "object_detector_app/object_detection/anchor_generators/grid_anchor_generator_test.py",
    "chars": 2950,
    "preview": "# Copyright 2017 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"L"
  },
  {
    "path": "object_detector_app/object_detection/anchor_generators/multiple_grid_anchor_generator.py",
    "chars": 12824,
    "preview": "# Copyright 2017 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"L"
  },
  {
    "path": "object_detector_app/object_detection/anchor_generators/multiple_grid_anchor_generator_test.py",
    "chars": 11513,
    "preview": "# Copyright 2017 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"L"
  },
  {
    "path": "object_detector_app/object_detection/box_coders/BUILD",
    "chars": 2281,
    "preview": "# Tensorflow Object Detection API: Box Coder implementations.\n\npackage(\n    default_visibility = [\"//visibility:public\"]"
  },
  {
    "path": "object_detector_app/object_detection/box_coders/__init__.py",
    "chars": 0,
    "preview": ""
  },
  {
    "path": "object_detector_app/object_detection/box_coders/faster_rcnn_box_coder.py",
    "chars": 3837,
    "preview": "# Copyright 2017 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"L"
  },
  {
    "path": "object_detector_app/object_detection/box_coders/faster_rcnn_box_coder_test.py",
    "chars": 4213,
    "preview": "# Copyright 2017 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"L"
  },
  {
    "path": "object_detector_app/object_detection/box_coders/keypoint_box_coder.py",
    "chars": 6346,
    "preview": "# Copyright 2017 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"L"
  },
  {
    "path": "object_detector_app/object_detection/box_coders/keypoint_box_coder_test.py",
    "chars": 5900,
    "preview": "# Copyright 2017 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"L"
  },
  {
    "path": "object_detector_app/object_detection/box_coders/mean_stddev_box_coder.py",
    "chars": 2364,
    "preview": "# Copyright 2017 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"L"
  },
  {
    "path": "object_detector_app/object_detection/box_coders/mean_stddev_box_coder_test.py",
    "chars": 2396,
    "preview": "# Copyright 2017 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"L"
  },
  {
    "path": "object_detector_app/object_detection/box_coders/square_box_coder.py",
    "chars": 4276,
    "preview": "# Copyright 2017 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"L"
  },
  {
    "path": "object_detector_app/object_detection/box_coders/square_box_coder_test.py",
    "chars": 4305,
    "preview": "# Copyright 2017 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"L"
  },
  {
    "path": "object_detector_app/object_detection/builders/BUILD",
    "chars": 9126,
    "preview": "# Tensorflow Object Detection API: component builders.\n\npackage(\n    default_visibility = [\"//visibility:public\"],\n)\n\nli"
  },
  {
    "path": "object_detector_app/object_detection/builders/__init__.py",
    "chars": 0,
    "preview": ""
  },
  {
    "path": "object_detector_app/object_detection/builders/anchor_generator_builder.py",
    "chars": 3078,
    "preview": "# Copyright 2017 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"L"
  },
  {
    "path": "object_detector_app/object_detection/builders/anchor_generator_builder_test.py",
    "chars": 8189,
    "preview": "# Copyright 2017 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"L"
  },
  {
    "path": "object_detector_app/object_detection/builders/box_coder_builder.py",
    "chars": 2313,
    "preview": "# Copyright 2017 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"L"
  },
  {
    "path": "object_detector_app/object_detection/builders/box_coder_builder_test.py",
    "chars": 4008,
    "preview": "# Copyright 2017 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"L"
  },
  {
    "path": "object_detector_app/object_detection/builders/box_predictor_builder.py",
    "chars": 4989,
    "preview": "# Copyright 2017 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"L"
  },
  {
    "path": "object_detector_app/object_detection/builders/box_predictor_builder_test.py",
    "chars": 15071,
    "preview": "# Copyright 2017 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"L"
  },
  {
    "path": "object_detector_app/object_detection/builders/hyperparams_builder.py",
    "chars": 6054,
    "preview": "# Copyright 2017 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"L"
  },
  {
    "path": "object_detector_app/object_detection/builders/hyperparams_builder_test.py",
    "chars": 15480,
    "preview": "# Copyright 2017 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"L"
  },
  {
    "path": "object_detector_app/object_detection/builders/image_resizer_builder.py",
    "chars": 2714,
    "preview": "# Copyright 2017 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"L"
  },
  {
    "path": "object_detector_app/object_detection/builders/image_resizer_builder_test.py",
    "chars": 2611,
    "preview": "# Copyright 2017 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"L"
  },
  {
    "path": "object_detector_app/object_detection/builders/input_reader_builder.py",
    "chars": 2444,
    "preview": "# Copyright 2017 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"L"
  },
  {
    "path": "object_detector_app/object_detection/builders/input_reader_builder_test.py",
    "chars": 3531,
    "preview": "# Copyright 2017 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"L"
  },
  {
    "path": "object_detector_app/object_detection/builders/losses_builder.py",
    "chars": 5486,
    "preview": "# Copyright 2017 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"L"
  },
  {
    "path": "object_detector_app/object_detection/builders/losses_builder_test.py",
    "chars": 10316,
    "preview": "# Copyright 2017 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"L"
  },
  {
    "path": "object_detector_app/object_detection/builders/matcher_builder.py",
    "chars": 2100,
    "preview": "# Copyright 2017 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"L"
  },
  {
    "path": "object_detector_app/object_detection/builders/matcher_builder_test.py",
    "chars": 3794,
    "preview": "# Copyright 2017 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"L"
  },
  {
    "path": "object_detector_app/object_detection/builders/model_builder.py",
    "chars": 13133,
    "preview": "# Copyright 2017 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"L"
  },
  {
    "path": "object_detector_app/object_detection/builders/model_builder_test.py",
    "chars": 13240,
    "preview": "# Copyright 2017 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"L"
  },
  {
    "path": "object_detector_app/object_detection/builders/optimizer_builder.py",
    "chars": 4035,
    "preview": "# Copyright 2017 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"L"
  },
  {
    "path": "object_detector_app/object_detection/builders/optimizer_builder_test.py",
    "chars": 6543,
    "preview": "# Copyright 2017 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"L"
  },
  {
    "path": "object_detector_app/object_detection/builders/post_processing_builder.py",
    "chars": 4276,
    "preview": "# Copyright 2017 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"L"
  },
  {
    "path": "object_detector_app/object_detection/builders/post_processing_builder_test.py",
    "chars": 3017,
    "preview": "# Copyright 2017 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"L"
  },
  {
    "path": "object_detector_app/object_detection/builders/preprocessor_builder.py",
    "chars": 11970,
    "preview": "# Copyright 2017 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"L"
  },
  {
    "path": "object_detector_app/object_detection/builders/preprocessor_builder_test.py",
    "chars": 16634,
    "preview": "# Copyright 2017 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"L"
  },
  {
    "path": "object_detector_app/object_detection/builders/region_similarity_calculator_builder.py",
    "chars": 2163,
    "preview": "# Copyright 2017 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"L"
  },
  {
    "path": "object_detector_app/object_detection/builders/region_similarity_calculator_builder_test.py",
    "chars": 2657,
    "preview": "# Copyright 2017 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"L"
  },
  {
    "path": "object_detector_app/object_detection/core/BUILD",
    "chars": 6606,
    "preview": "# Tensorflow Object Detection API: Core.\n\npackage(\n    default_visibility = [\"//visibility:public\"],\n)\n\nlicenses([\"notic"
  },
  {
    "path": "object_detector_app/object_detection/core/__init__.py",
    "chars": 0,
    "preview": ""
  },
  {
    "path": "object_detector_app/object_detection/core/anchor_generator.py",
    "chars": 5407,
    "preview": "# Copyright 2017 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"L"
  },
  {
    "path": "object_detector_app/object_detection/core/balanced_positive_negative_sampler.py",
    "chars": 3877,
    "preview": "# Copyright 2017 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"L"
  },
  {
    "path": "object_detector_app/object_detection/core/balanced_positive_negative_sampler_test.py",
    "chars": 3329,
    "preview": "# Copyright 2017 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"L"
  },
  {
    "path": "object_detector_app/object_detection/core/batcher.py",
    "chars": 5613,
    "preview": "# Copyright 2017 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"L"
  },
  {
    "path": "object_detector_app/object_detection/core/batcher_test.py",
    "chars": 5833,
    "preview": "# Copyright 2017 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"L"
  },
  {
    "path": "object_detector_app/object_detection/core/box_coder.py",
    "chars": 4910,
    "preview": "# Copyright 2017 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"L"
  },
  {
    "path": "object_detector_app/object_detection/core/box_coder_test.py",
    "chars": 2082,
    "preview": "# Copyright 2017 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"L"
  },
  {
    "path": "object_detector_app/object_detection/core/box_list.py",
    "chars": 6745,
    "preview": "# Copyright 2017 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"L"
  },
  {
    "path": "object_detector_app/object_detection/core/box_list_ops.py",
    "chars": 37111,
    "preview": "# Copyright 2017 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"L"
  },
  {
    "path": "object_detector_app/object_detection/core/box_list_ops_test.py",
    "chars": 42277,
    "preview": "# Copyright 2017 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"L"
  },
  {
    "path": "object_detector_app/object_detection/core/box_list_test.py",
    "chars": 5524,
    "preview": "# Copyright 2017 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"L"
  },
  {
    "path": "object_detector_app/object_detection/core/box_predictor.py",
    "chars": 24375,
    "preview": "# Copyright 2017 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"L"
  },
  {
    "path": "object_detector_app/object_detection/core/box_predictor_test.py",
    "chars": 12666,
    "preview": "# Copyright 2017 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"L"
  },
  {
    "path": "object_detector_app/object_detection/core/data_decoder.py",
    "chars": 1383,
    "preview": "# Copyright 2017 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"L"
  },
  {
    "path": "object_detector_app/object_detection/core/keypoint_ops.py",
    "chars": 9185,
    "preview": "# Copyright 2017 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"L"
  },
  {
    "path": "object_detector_app/object_detection/core/keypoint_ops_test.py",
    "chars": 5545,
    "preview": "# Copyright 2017 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"L"
  },
  {
    "path": "object_detector_app/object_detection/core/losses.py",
    "chars": 23854,
    "preview": "# Copyright 2017 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"L"
  },
  {
    "path": "object_detector_app/object_detection/core/losses_test.py",
    "chars": 25521,
    "preview": "# Copyright 2017 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"L"
  },
  {
    "path": "object_detector_app/object_detection/core/matcher.py",
    "chars": 7621,
    "preview": "# Copyright 2017 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"L"
  },
  {
    "path": "object_detector_app/object_detection/core/matcher_test.py",
    "chars": 7101,
    "preview": "# Copyright 2017 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"L"
  },
  {
    "path": "object_detector_app/object_detection/core/minibatch_sampler.py",
    "chars": 3097,
    "preview": "# Copyright 2017 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"L"
  },
  {
    "path": "object_detector_app/object_detection/core/minibatch_sampler_test.py",
    "chars": 3364,
    "preview": "# Copyright 2017 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"L"
  },
  {
    "path": "object_detector_app/object_detection/core/model.py",
    "chars": 10755,
    "preview": "# Copyright 2017 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"L"
  },
  {
    "path": "object_detector_app/object_detection/core/post_processing.py",
    "chars": 14457,
    "preview": "# Copyright 2017 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"L"
  },
  {
    "path": "object_detector_app/object_detection/core/post_processing_test.py",
    "chars": 29506,
    "preview": "# Copyright 2017 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"L"
  },
  {
    "path": "object_detector_app/object_detection/core/prefetcher.py",
    "chars": 2567,
    "preview": "# Copyright 2017 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"L"
  },
  {
    "path": "object_detector_app/object_detection/core/prefetcher_test.py",
    "chars": 4311,
    "preview": "# Copyright 2017 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"L"
  },
  {
    "path": "object_detector_app/object_detection/core/preprocessor.py",
    "chars": 76814,
    "preview": "# Copyright 2017 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"L"
  },
  {
    "path": "object_detector_app/object_detection/core/preprocessor_test.py",
    "chars": 75940,
    "preview": "# Copyright 2017 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"L"
  },
  {
    "path": "object_detector_app/object_detection/core/region_similarity_calculator.py",
    "chars": 3687,
    "preview": "# Copyright 2017 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"L"
  },
  {
    "path": "object_detector_app/object_detection/core/region_similarity_calculator_test.py",
    "chars": 3445,
    "preview": "# Copyright 2017 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"L"
  },
  {
    "path": "object_detector_app/object_detection/core/standard_fields.py",
    "chars": 6234,
    "preview": "# Copyright 2017 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"L"
  },
  {
    "path": "object_detector_app/object_detection/core/target_assigner.py",
    "chars": 20587,
    "preview": "# Copyright 2017 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"L"
  },
  {
    "path": "object_detector_app/object_detection/core/target_assigner_test.py",
    "chars": 30064,
    "preview": "# Copyright 2017 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"L"
  },
  {
    "path": "object_detector_app/object_detection/create_pascal_tf_record.py",
    "chars": 6901,
    "preview": "# Copyright 2017 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"L"
  },
  {
    "path": "object_detector_app/object_detection/create_pascal_tf_record_test.py",
    "chars": 3952,
    "preview": "# Copyright 2017 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"L"
  },
  {
    "path": "object_detector_app/object_detection/create_pet_tf_record.py",
    "chars": 7727,
    "preview": "# Copyright 2017 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"L"
  },
  {
    "path": "object_detector_app/object_detection/data/mscoco_label_map.pbtxt",
    "chars": 5056,
    "preview": "item {\n  name: \"/m/01g317\"\n  id: 1\n  display_name: \"person\"\n}\nitem {\n  name: \"/m/0199g\"\n  id: 2\n  display_name: \"bicycle"
  },
  {
    "path": "object_detector_app/object_detection/data/pascal_label_map.pbtxt",
    "chars": 751,
    "preview": "item {\n  id: 0\n  name: 'none_of_the_above'\n}\n\nitem {\n  id: 1\n  name: 'aeroplane'\n}\n\nitem {\n  id: 2\n  name: 'bicycle'\n}\n\n"
  },
  {
    "path": "object_detector_app/object_detection/data/pet_label_map.pbtxt",
    "chars": 1578,
    "preview": "item {\n  id: 0\n  name: 'none_of_the_above'\n}\n\nitem {\n  id: 1\n  name: 'Abyssinian'\n}\n\nitem {\n  id: 2\n  name: 'american_bu"
  },
  {
    "path": "object_detector_app/object_detection/data_decoders/BUILD",
    "chars": 649,
    "preview": "# Tensorflow Object Detection API: data decoders.\n\npackage(\n    default_visibility = [\"//visibility:public\"],\n)\n\nlicense"
  },
  {
    "path": "object_detector_app/object_detection/data_decoders/__init__.py",
    "chars": 0,
    "preview": ""
  },
  {
    "path": "object_detector_app/object_detection/data_decoders/tf_example_decoder.py",
    "chars": 7057,
    "preview": "# Copyright 2017 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"L"
  },
  {
    "path": "object_detector_app/object_detection/data_decoders/tf_example_decoder_test.py",
    "chars": 12324,
    "preview": "# Copyright 2017 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"L"
  },
  {
    "path": "object_detector_app/object_detection/eval.py",
    "chars": 6044,
    "preview": "# Copyright 2017 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"L"
  },
  {
    "path": "object_detector_app/object_detection/eval_util.py",
    "chars": 24890,
    "preview": "# Copyright 2017 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"L"
  },
  {
    "path": "object_detector_app/object_detection/evaluator.py",
    "chars": 9123,
    "preview": "# Copyright 2017 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"L"
  },
  {
    "path": "object_detector_app/object_detection/export_inference_graph.py",
    "chars": 3745,
    "preview": "# Copyright 2017 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"L"
  },
  {
    "path": "object_detector_app/object_detection/exporter.py",
    "chars": 9392,
    "preview": "# Copyright 2017 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"L"
  },
  {
    "path": "object_detector_app/object_detection/exporter_test.py",
    "chars": 10186,
    "preview": "# Copyright 2017 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"L"
  },
  {
    "path": "object_detector_app/object_detection/g3doc/configuring_jobs.md",
    "chars": 5526,
    "preview": "# Configuring the Object Detection Training Pipeline\n\n## Overview\n\nThe Tensorflow Object Detection API uses protobuf fil"
  },
  {
    "path": "object_detector_app/object_detection/g3doc/defining_your_own_model.md",
    "chars": 6835,
    "preview": "# So you want to create a new model!\n\nIn this section, we discuss some of the abstractions that we use\nfor defining dete"
  },
  {
    "path": "object_detector_app/object_detection/g3doc/detection_model_zoo.md",
    "chars": 2208,
    "preview": "# Tensorflow detection model zoo\n\nWe provide a collection of detection models pre-trained on the\n[COCO dataset](http://m"
  },
  {
    "path": "object_detector_app/object_detection/g3doc/exporting_models.md",
    "chars": 792,
    "preview": "# Exporting a trained model for inference\n\nAfter your model has been trained, you should export it to a Tensorflow\ngraph"
  },
  {
    "path": "object_detector_app/object_detection/g3doc/installation.md",
    "chars": 1979,
    "preview": "# Installation\n\n## Dependencies\n\nTensorflow Object Detection API depends on the following libraries:\n\n* Protobuf 2.6\n* P"
  },
  {
    "path": "object_detector_app/object_detection/g3doc/preparing_inputs.md",
    "chars": 1697,
    "preview": "# Preparing Inputs\n\nTensorflow Object Detection API reads data using the TFRecord file format. Two\nsample scripts (`crea"
  },
  {
    "path": "object_detector_app/object_detection/g3doc/running_locally.md",
    "chars": 2796,
    "preview": "# Running Locally\n\nThis page walks through the steps required to train an object detection model\non a local machine. It "
  },
  {
    "path": "object_detector_app/object_detection/g3doc/running_notebook.md",
    "chars": 543,
    "preview": "# Quick Start: Jupyter notebook for off-the-shelf inference\n\nIf you'd like to hit the ground running and run detection o"
  },
  {
    "path": "object_detector_app/object_detection/g3doc/running_on_cloud.md",
    "chars": 4843,
    "preview": "# Running on Google Cloud Platform\n\nThe Tensorflow Object Detection API supports distributed training on Google\nCloud ML"
  },
  {
    "path": "object_detector_app/object_detection/g3doc/running_pets.md",
    "chars": 12558,
    "preview": "# Quick Start: Distributed Training on the Oxford-IIT Pets Dataset on Google Cloud\n\nThis page is a walkthrough for train"
  },
  {
    "path": "object_detector_app/object_detection/matchers/BUILD",
    "chars": 942,
    "preview": "# Tensorflow Object Detection API: Matcher implementations.\n\npackage(\n    default_visibility = [\"//visibility:public\"],\n"
  },
  {
    "path": "object_detector_app/object_detection/matchers/__init__.py",
    "chars": 0,
    "preview": ""
  },
  {
    "path": "object_detector_app/object_detection/matchers/argmax_matcher.py",
    "chars": 8479,
    "preview": "# Copyright 2017 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"L"
  },
  {
    "path": "object_detector_app/object_detection/matchers/argmax_matcher_test.py",
    "chars": 9704,
    "preview": "# Copyright 2017 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"L"
  },
  {
    "path": "object_detector_app/object_detection/matchers/bipartite_matcher.py",
    "chars": 2176,
    "preview": "# Copyright 2017 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"L"
  },
  {
    "path": "object_detector_app/object_detection/matchers/bipartite_matcher_test.py",
    "chars": 2929,
    "preview": "# Copyright 2017 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"L"
  },
  {
    "path": "object_detector_app/object_detection/meta_architectures/BUILD",
    "chars": 3700,
    "preview": "# Tensorflow Object Detection API: Meta-architectures.\n\npackage(\n    default_visibility = [\"//visibility:public\"],\n)\n\nli"
  },
  {
    "path": "object_detector_app/object_detection/meta_architectures/__init__.py",
    "chars": 0,
    "preview": ""
  },
  {
    "path": "object_detector_app/object_detection/meta_architectures/faster_rcnn_meta_arch.py",
    "chars": 69301,
    "preview": "# Copyright 2017 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"L"
  },
  {
    "path": "object_detector_app/object_detection/meta_architectures/faster_rcnn_meta_arch_test.py",
    "chars": 3508,
    "preview": "# Copyright 2017 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"L"
  },
  {
    "path": "object_detector_app/object_detection/meta_architectures/faster_rcnn_meta_arch_test_lib.py",
    "chars": 43127,
    "preview": "# Copyright 2017 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"L"
  },
  {
    "path": "object_detector_app/object_detection/meta_architectures/rfcn_meta_arch.py",
    "chars": 13557,
    "preview": "# Copyright 2017 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"L"
  },
  {
    "path": "object_detector_app/object_detection/meta_architectures/rfcn_meta_arch_test.py",
    "chars": 1768,
    "preview": "# Copyright 2017 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"L"
  },
  {
    "path": "object_detector_app/object_detection/meta_architectures/ssd_meta_arch.py",
    "chars": 26523,
    "preview": "# Copyright 2017 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"L"
  },
  {
    "path": "object_detector_app/object_detection/meta_architectures/ssd_meta_arch_test.py",
    "chars": 10740,
    "preview": "# Copyright 2017 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"L"
  },
  {
    "path": "object_detector_app/object_detection/models/BUILD",
    "chars": 3218,
    "preview": "# Tensorflow Object Detection API: Models.\n\npackage(\n    default_visibility = [\"//visibility:public\"],\n)\n\nlicenses([\"not"
  },
  {
    "path": "object_detector_app/object_detection/models/__init__.py",
    "chars": 0,
    "preview": ""
  },
  {
    "path": "object_detector_app/object_detection/models/faster_rcnn_inception_resnet_v2_feature_extractor.py",
    "chars": 9409,
    "preview": "# Copyright 2017 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"L"
  },
  {
    "path": "object_detector_app/object_detection/models/faster_rcnn_inception_resnet_v2_feature_extractor_test.py",
    "chars": 4535,
    "preview": "# Copyright 2017 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"L"
  },
  {
    "path": "object_detector_app/object_detection/models/faster_rcnn_resnet_v1_feature_extractor.py",
    "chars": 8811,
    "preview": "# Copyright 2017 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"L"
  },
  {
    "path": "object_detector_app/object_detection/models/faster_rcnn_resnet_v1_feature_extractor_test.py",
    "chars": 5761,
    "preview": "# Copyright 2017 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"L"
  },
  {
    "path": "object_detector_app/object_detection/models/feature_map_generators.py",
    "chars": 7593,
    "preview": "# Copyright 2017 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"L"
  },
  {
    "path": "object_detector_app/object_detection/models/feature_map_generators_test.py",
    "chars": 4458,
    "preview": "# Copyright 2017 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"L"
  },
  {
    "path": "object_detector_app/object_detection/models/ssd_feature_extractor_test.py",
    "chars": 3926,
    "preview": "# Copyright 2017 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"L"
  },
  {
    "path": "object_detector_app/object_detection/models/ssd_inception_v2_feature_extractor.py",
    "chars": 3756,
    "preview": "# Copyright 2017 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"L"
  },
  {
    "path": "object_detector_app/object_detection/models/ssd_inception_v2_feature_extractor_test.py",
    "chars": 3936,
    "preview": "# Copyright 2017 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"L"
  },
  {
    "path": "object_detector_app/object_detection/models/ssd_mobilenet_v1_feature_extractor.py",
    "chars": 3813,
    "preview": "# Copyright 2017 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"L"
  },
  {
    "path": "object_detector_app/object_detection/models/ssd_mobilenet_v1_feature_extractor_test.py",
    "chars": 3872,
    "preview": "# Copyright 2017 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"L"
  },
  {
    "path": "object_detector_app/object_detection/object_detection_tutorial.ipynb",
    "chars": 12143,
    "preview": "{\n \"cells\": [\n  {\n   \"cell_type\": \"markdown\",\n   \"metadata\": {},\n   \"source\": [\n    \"# Object Detection Demo\\n\",\n    \"We"
  },
  {
    "path": "object_detector_app/object_detection/protos/BUILD",
    "chars": 6473,
    "preview": "# Tensorflow Object Detection API: Configuration protos.\n\npackage(\n    default_visibility = [\"//visibility:public\"],\n)\n\n"
  },
  {
    "path": "object_detector_app/object_detection/protos/__init__.py",
    "chars": 0,
    "preview": ""
  },
  {
    "path": "object_detector_app/object_detection/protos/anchor_generator.proto",
    "chars": 477,
    "preview": "syntax = \"proto2\";\n\npackage object_detection.protos;\n\nimport \"object_detection/protos/grid_anchor_generator.proto\";\nimpo"
  },
  {
    "path": "object_detector_app/object_detection/protos/anchor_generator_pb2.py",
    "chars": 4085,
    "preview": "# Generated by the protocol buffer compiler.  DO NOT EDIT!\n# source: object_detection/protos/anchor_generator.proto\n\nimp"
  },
  {
    "path": "object_detector_app/object_detection/protos/argmax_matcher.proto",
    "chars": 979,
    "preview": "syntax = \"proto2\";\n\npackage object_detection.protos;\n\n// Configuration proto for ArgMaxMatcher. See\n// matchers/argmax_m"
  },
  {
    "path": "object_detector_app/object_detection/protos/argmax_matcher_pb2.py",
    "chars": 3874,
    "preview": "# Generated by the protocol buffer compiler.  DO NOT EDIT!\n# source: object_detection/protos/argmax_matcher.proto\n\nimpor"
  },
  {
    "path": "object_detector_app/object_detection/protos/bipartite_matcher.proto",
    "chars": 179,
    "preview": "syntax = \"proto2\";\n\npackage object_detection.protos;\n\n// Configuration proto for bipartite matcher. See\n// matchers/bipa"
  },
  {
    "path": "object_detector_app/object_detection/protos/bipartite_matcher_pb2.py",
    "chars": 1703,
    "preview": "# Generated by the protocol buffer compiler.  DO NOT EDIT!\n# source: object_detection/protos/bipartite_matcher.proto\n\nim"
  },
  {
    "path": "object_detector_app/object_detection/protos/box_coder.proto",
    "chars": 552,
    "preview": "syntax = \"proto2\";\n\npackage object_detection.protos;\n\nimport \"object_detection/protos/faster_rcnn_box_coder.proto\";\nimpo"
  },
  {
    "path": "object_detector_app/object_detection/protos/box_coder_pb2.py",
    "chars": 4814,
    "preview": "# Generated by the protocol buffer compiler.  DO NOT EDIT!\n# source: object_detection/protos/box_coder.proto\n\nimport sys"
  }
]

// ... and 121 more files (download for full content)

About this extraction

This page contains the full source code of the jesseweisberg/moveo_ros GitHub repository, extracted and formatted as plain text for AI agents and large language models (LLMs). The extraction includes 321 files (51.7 MB), approximately 509.2k tokens, and a symbol index with 1350 extracted functions, classes, methods, constants, and types. Use this with OpenClaw, Claude, ChatGPT, Cursor, Windsurf, or any other AI tool that accepts text input. You can copy the full output to your clipboard or download it as a .txt file.

Extracted by GitExtract — free GitHub repo to text converter for AI. Built by Nikandr Surkov.

Copied to clipboard!