Repository: alexbaucom17/DominoRobot Branch: master Commit: 4f7273faa068 Files: 159 Total size: 596.1 KB Directory structure: gitextract_eu8r6t07/ ├── .gitignore ├── LICENSE ├── README.md ├── doc/ │ ├── DominoRobotControllerFlowchart.drawio │ ├── DominoRobotSoftwareArchitecture.drawio │ ├── KukaPsuedoCode.txt │ ├── MassAndPowerCalcs.xlsx │ ├── NetworkPrototcols.txt │ ├── SpeedConversions.ods │ ├── SubsystemDiagram.drawio │ ├── SwClassHeirarchy.txt │ └── clearcore_api.txt ├── experimental_testing/ │ ├── ImageParser.py │ ├── RobotArduinoTesting/ │ │ ├── StepperDriverMotion_4Motor_Accel/ │ │ │ └── StepperDriverMotion_4Motor_Accel.ino │ │ ├── TestMotor/ │ │ │ └── TestMotor.ino │ │ ├── WifiServerWithESP/ │ │ │ ├── RobotServer.cpp │ │ │ ├── RobotServer.h │ │ │ └── WifiServerWithESP.ino │ │ ├── WifiTesting/ │ │ │ └── WifiTesting.ino │ │ ├── all_motors/ │ │ │ └── all_motors.ino │ │ ├── all_motors_speed_control/ │ │ │ ├── Motor.cpp │ │ │ ├── Motor.h │ │ │ └── all_motors_speed_control.ino │ │ ├── kalman_test/ │ │ │ ├── KalmanFilter.cpp │ │ │ ├── KalmanFilter.h │ │ │ └── kalman_test.ino │ │ ├── lifter_nano_test/ │ │ │ └── lifter_nano_test.ino │ │ ├── motor_driver_test_script/ │ │ │ └── motor_driver_test_script.ino │ │ ├── new_dc_motor_tuning/ │ │ │ ├── Motor.cpp │ │ │ ├── Motor.h │ │ │ ├── constants.h │ │ │ └── new_dc_motor_tuning.ino │ │ ├── single_motor_speed_control/ │ │ │ ├── Motor.cpp │ │ │ ├── Motor.h │ │ │ └── single_motor_speed_control.ino │ │ ├── stepper_nano_test/ │ │ │ └── stepper_nano_test.ino │ │ └── test_encoders/ │ │ └── test_encoders.ino │ ├── TrajGen.py │ └── TrajGenv2.py └── src/ ├── master/ │ ├── DominoDesign-Questions.psd │ ├── DominoDesign.psd │ ├── FieldPlanner.py │ ├── MarvelMindHandler.py │ ├── MasterMain.py │ ├── README.md │ ├── RobotClient.py │ ├── Runtime.py │ ├── Utils.py │ ├── __init__.py │ ├── camera_utils.py │ ├── config.py │ ├── plans/ │ │ ├── AccuracyTesting_1x3_withdistance.p │ │ ├── AccuracyTesting_1x3_withdistance086.p │ │ ├── AccuracyTesting_2x2.p │ │ ├── AccuracyTesting_3x3_withdistance_2axis.p │ │ ├── DockingTest1.p │ │ ├── DockingTest2.p │ │ ├── FullPlan_DominoBros.p │ │ ├── FullTest1.p │ │ ├── HackyTestPlan.p │ │ ├── LargeScale1_5x5.p │ │ ├── LargeScale2_5x5.p │ │ ├── VisionAccuracyTesting_2x2_SmallTestArea.p │ │ ├── autosaved.p │ │ ├── plan.p │ │ ├── plan2.p │ │ ├── vision_offsets small_testing_area.csv │ │ └── vision_offsets_full_plan.csv │ ├── plot_logs2.py │ └── requirements.txt ├── robot/ │ ├── Makefile │ ├── README.md │ ├── src/ │ │ ├── KalmanFilter.cpp │ │ ├── KalmanFilter.h │ │ ├── Localization.cpp │ │ ├── Localization.h │ │ ├── MarvelmindWrapper.cpp │ │ ├── MarvelmindWrapper.h │ │ ├── RobotController.cpp │ │ ├── RobotController.h │ │ ├── RobotServer.cpp │ │ ├── RobotServer.h │ │ ├── SmoothTrajectoryGenerator.cpp │ │ ├── SmoothTrajectoryGenerator.h │ │ ├── StatusUpdater.cpp │ │ ├── StatusUpdater.h │ │ ├── TrayController.cpp │ │ ├── TrayController.h │ │ ├── camera_tracker/ │ │ │ ├── CameraPipeline.cpp │ │ │ ├── CameraPipeline.h │ │ │ ├── CameraTracker.cpp │ │ │ ├── CameraTracker.h │ │ │ ├── CameraTrackerBase.h │ │ │ ├── CameraTrackerFactory.cpp │ │ │ ├── CameraTrackerFactory.h │ │ │ └── CameraTrackerMock.h │ │ ├── constants.cfg │ │ ├── constants.h │ │ ├── main.cpp │ │ ├── robot.cpp │ │ ├── robot.h │ │ ├── robot_controller_modes/ │ │ │ ├── RobotControllerModeBase.cpp │ │ │ ├── RobotControllerModeBase.h │ │ │ ├── RobotControllerModePosition.cpp │ │ │ ├── RobotControllerModePosition.h │ │ │ ├── RobotControllerModeStopFast.cpp │ │ │ ├── RobotControllerModeStopFast.h │ │ │ ├── RobotControllerModeVision.cpp │ │ │ └── RobotControllerModeVision.h │ │ ├── serial/ │ │ │ ├── MockSerialComms.cpp │ │ │ ├── MockSerialComms.h │ │ │ ├── SerialComms.cpp │ │ │ ├── SerialComms.h │ │ │ ├── SerialCommsBase.cpp │ │ │ ├── SerialCommsBase.h │ │ │ ├── SerialCommsFactory.cpp │ │ │ └── SerialCommsFactory.h │ │ ├── sockets/ │ │ │ ├── ClientSocket.cpp │ │ │ ├── ClientSocket.h │ │ │ ├── MockSocketMultiThreadWrapper.cpp │ │ │ ├── MockSocketMultiThreadWrapper.h │ │ │ ├── README.md │ │ │ ├── ServerSocket.cpp │ │ │ ├── ServerSocket.h │ │ │ ├── Socket.cpp │ │ │ ├── Socket.h │ │ │ ├── SocketException.h │ │ │ ├── SocketMultiThreadWrapper.cpp │ │ │ ├── SocketMultiThreadWrapper.h │ │ │ ├── SocketMultiThreadWrapperBase.h │ │ │ ├── SocketMultiThreadWrapperFactory.cpp │ │ │ ├── SocketMultiThreadWrapperFactory.h │ │ │ └── SocketTimeoutException.h │ │ ├── utils.cpp │ │ └── utils.h │ └── test/ │ ├── CameraPipeline_Test.cpp │ ├── CameraTracker_Test.cpp │ ├── Localization_Test.cpp │ ├── RobotController_Test.cpp │ ├── RobotServer_Test.cpp │ ├── Robot_Test.cpp │ ├── SanityCheckTest.cpp │ ├── SmoothTrajectoryGenerator_Test.cpp │ ├── SocketWrapper_Test.cpp │ ├── StatusUpdater_Test.cpp │ ├── TrayController_Test.cpp │ ├── test-main.cpp │ ├── test-utils.h │ ├── test_constants.cfg │ └── utils_Test.cpp ├── robot_motor_driver/ │ ├── SerialComms.cpp │ ├── SerialComms.h │ └── robot_motor_driver.ino └── tools/ ├── 80-domino-bot.rules ├── IR_calibration_1.yml ├── IR_calibration_2.yml ├── camera_calibration.py ├── motor_test_script.py └── plot_logs.py ================================================ FILE CONTENTS ================================================ ================================================ FILE: .gitignore ================================================ .VSCodeCounter/ beacon_state.txt build/ lib/ .vscode/ scratch log/ __pycache__ *.so marvelmind_SW* *previous_plan_state.json venv/ ================================================ FILE: LICENSE ================================================ GNU GENERAL PUBLIC LICENSE Version 3, 29 June 2007 Copyright (C) 2007 Free Software Foundation, Inc. Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed. Preamble The GNU General Public License is a free, copyleft license for software and other kinds of works. The licenses for most software and other practical works are designed to take away your freedom to share and change the works. By contrast, the GNU General Public License is intended to guarantee your freedom to share and change all versions of a program--to make sure it remains free software for all its users. We, the Free Software Foundation, use the GNU General Public License for most of our software; it applies also to any other work released this way by its authors. You can apply it to your programs, too. When we speak of free software, we are referring to freedom, not price. Our General Public Licenses are designed to make sure that you have the freedom to distribute copies of free software (and charge for them if you wish), that you receive source code or can get it if you want it, that you can change the software or use pieces of it in new free programs, and that you know you can do these things. To protect your rights, we need to prevent others from denying you these rights or asking you to surrender the rights. Therefore, you have certain responsibilities if you distribute copies of the software, or if you modify it: responsibilities to respect the freedom of others. For example, if you distribute copies of such a program, whether gratis or for a fee, you must pass on to the recipients the same freedoms that you received. You must make sure that they, too, receive or can get the source code. And you must show them these terms so they know their rights. Developers that use the GNU GPL protect your rights with two steps: (1) assert copyright on the software, and (2) offer you this License giving you legal permission to copy, distribute and/or modify it. For the developers' and authors' protection, the GPL clearly explains that there is no warranty for this free software. For both users' and authors' sake, the GPL requires that modified versions be marked as changed, so that their problems will not be attributed erroneously to authors of previous versions. Some devices are designed to deny users access to install or run modified versions of the software inside them, although the manufacturer can do so. This is fundamentally incompatible with the aim of protecting users' freedom to change the software. The systematic pattern of such abuse occurs in the area of products for individuals to use, which is precisely where it is most unacceptable. Therefore, we have designed this version of the GPL to prohibit the practice for those products. If such problems arise substantially in other domains, we stand ready to extend this provision to those domains in future versions of the GPL, as needed to protect the freedom of users. Finally, every program is threatened constantly by software patents. States should not allow patents to restrict development and use of software on general-purpose computers, but in those that do, we wish to avoid the special danger that patents applied to a free program could make it effectively proprietary. To prevent this, the GPL assures that patents cannot be used to render the program non-free. The precise terms and conditions for copying, distribution and modification follow. TERMS AND CONDITIONS 0. Definitions. "This License" refers to version 3 of the GNU General Public License. "Copyright" also means copyright-like laws that apply to other kinds of works, such as semiconductor masks. "The Program" refers to any copyrightable work licensed under this License. Each licensee is addressed as "you". "Licensees" and "recipients" may be individuals or organizations. To "modify" a work means to copy from or adapt all or part of the work in a fashion requiring copyright permission, other than the making of an exact copy. The resulting work is called a "modified version" of the earlier work or a work "based on" the earlier work. A "covered work" means either the unmodified Program or a work based on the Program. To "propagate" a work means to do anything with it that, without permission, would make you directly or secondarily liable for infringement under applicable copyright law, except executing it on a computer or modifying a private copy. Propagation includes copying, distribution (with or without modification), making available to the public, and in some countries other activities as well. To "convey" a work means any kind of propagation that enables other parties to make or receive copies. Mere interaction with a user through a computer network, with no transfer of a copy, is not conveying. An interactive user interface displays "Appropriate Legal Notices" to the extent that it includes a convenient and prominently visible feature that (1) displays an appropriate copyright notice, and (2) tells the user that there is no warranty for the work (except to the extent that warranties are provided), that licensees may convey the work under this License, and how to view a copy of this License. If the interface presents a list of user commands or options, such as a menu, a prominent item in the list meets this criterion. 1. Source Code. The "source code" for a work means the preferred form of the work for making modifications to it. "Object code" means any non-source form of a work. A "Standard Interface" means an interface that either is an official standard defined by a recognized standards body, or, in the case of interfaces specified for a particular programming language, one that is widely used among developers working in that language. The "System Libraries" of an executable work include anything, other than the work as a whole, that (a) is included in the normal form of packaging a Major Component, but which is not part of that Major Component, and (b) serves only to enable use of the work with that Major Component, or to implement a Standard Interface for which an implementation is available to the public in source code form. A "Major Component", in this context, means a major essential component (kernel, window system, and so on) of the specific operating system (if any) on which the executable work runs, or a compiler used to produce the work, or an object code interpreter used to run it. The "Corresponding Source" for a work in object code form means all the source code needed to generate, install, and (for an executable work) run the object code and to modify the work, including scripts to control those activities. However, it does not include the work's System Libraries, or general-purpose tools or generally available free programs which are used unmodified in performing those activities but which are not part of the work. For example, Corresponding Source includes interface definition files associated with source files for the work, and the source code for shared libraries and dynamically linked subprograms that the work is specifically designed to require, such as by intimate data communication or control flow between those subprograms and other parts of the work. The Corresponding Source need not include anything that users can regenerate automatically from other parts of the Corresponding Source. The Corresponding Source for a work in source code form is that same work. 2. Basic Permissions. All rights granted under this License are granted for the term of copyright on the Program, and are irrevocable provided the stated conditions are met. This License explicitly affirms your unlimited permission to run the unmodified Program. The output from running a covered work is covered by this License only if the output, given its content, constitutes a covered work. This License acknowledges your rights of fair use or other equivalent, as provided by copyright law. You may make, run and propagate covered works that you do not convey, without conditions so long as your license otherwise remains in force. You may convey covered works to others for the sole purpose of having them make modifications exclusively for you, or provide you with facilities for running those works, provided that you comply with the terms of this License in conveying all material for which you do not control copyright. Those thus making or running the covered works for you must do so exclusively on your behalf, under your direction and control, on terms that prohibit them from making any copies of your copyrighted material outside their relationship with you. Conveying under any other circumstances is permitted solely under the conditions stated below. Sublicensing is not allowed; section 10 makes it unnecessary. 3. Protecting Users' Legal Rights From Anti-Circumvention Law. No covered work shall be deemed part of an effective technological measure under any applicable law fulfilling obligations under article 11 of the WIPO copyright treaty adopted on 20 December 1996, or similar laws prohibiting or restricting circumvention of such measures. When you convey a covered work, you waive any legal power to forbid circumvention of technological measures to the extent such circumvention is effected by exercising rights under this License with respect to the covered work, and you disclaim any intention to limit operation or modification of the work as a means of enforcing, against the work's users, your or third parties' legal rights to forbid circumvention of technological measures. 4. Conveying Verbatim Copies. You may convey verbatim copies of the Program's source code as you receive it, in any medium, provided that you conspicuously and appropriately publish on each copy an appropriate copyright notice; keep intact all notices stating that this License and any non-permissive terms added in accord with section 7 apply to the code; keep intact all notices of the absence of any warranty; and give all recipients a copy of this License along with the Program. You may charge any price or no price for each copy that you convey, and you may offer support or warranty protection for a fee. 5. Conveying Modified Source Versions. You may convey a work based on the Program, or the modifications to produce it from the Program, in the form of source code under the terms of section 4, provided that you also meet all of these conditions: a) The work must carry prominent notices stating that you modified it, and giving a relevant date. b) The work must carry prominent notices stating that it is released under this License and any conditions added under section 7. This requirement modifies the requirement in section 4 to "keep intact all notices". c) You must license the entire work, as a whole, under this License to anyone who comes into possession of a copy. This License will therefore apply, along with any applicable section 7 additional terms, to the whole of the work, and all its parts, regardless of how they are packaged. This License gives no permission to license the work in any other way, but it does not invalidate such permission if you have separately received it. d) If the work has interactive user interfaces, each must display Appropriate Legal Notices; however, if the Program has interactive interfaces that do not display Appropriate Legal Notices, your work need not make them do so. A compilation of a covered work with other separate and independent works, which are not by their nature extensions of the covered work, and which are not combined with it such as to form a larger program, in or on a volume of a storage or distribution medium, is called an "aggregate" if the compilation and its resulting copyright are not used to limit the access or legal rights of the compilation's users beyond what the individual works permit. Inclusion of a covered work in an aggregate does not cause this License to apply to the other parts of the aggregate. 6. Conveying Non-Source Forms. You may convey a covered work in object code form under the terms of sections 4 and 5, provided that you also convey the machine-readable Corresponding Source under the terms of this License, in one of these ways: a) Convey the object code in, or embodied in, a physical product (including a physical distribution medium), accompanied by the Corresponding Source fixed on a durable physical medium customarily used for software interchange. b) Convey the object code in, or embodied in, a physical product (including a physical distribution medium), accompanied by a written offer, valid for at least three years and valid for as long as you offer spare parts or customer support for that product model, to give anyone who possesses the object code either (1) a copy of the Corresponding Source for all the software in the product that is covered by this License, on a durable physical medium customarily used for software interchange, for a price no more than your reasonable cost of physically performing this conveying of source, or (2) access to copy the Corresponding Source from a network server at no charge. c) Convey individual copies of the object code with a copy of the written offer to provide the Corresponding Source. This alternative is allowed only occasionally and noncommercially, and only if you received the object code with such an offer, in accord with subsection 6b. d) Convey the object code by offering access from a designated place (gratis or for a charge), and offer equivalent access to the Corresponding Source in the same way through the same place at no further charge. You need not require recipients to copy the Corresponding Source along with the object code. If the place to copy the object code is a network server, the Corresponding Source may be on a different server (operated by you or a third party) that supports equivalent copying facilities, provided you maintain clear directions next to the object code saying where to find the Corresponding Source. Regardless of what server hosts the Corresponding Source, you remain obligated to ensure that it is available for as long as needed to satisfy these requirements. e) Convey the object code using peer-to-peer transmission, provided you inform other peers where the object code and Corresponding Source of the work are being offered to the general public at no charge under subsection 6d. A separable portion of the object code, whose source code is excluded from the Corresponding Source as a System Library, need not be included in conveying the object code work. A "User Product" is either (1) a "consumer product", which means any tangible personal property which is normally used for personal, family, or household purposes, or (2) anything designed or sold for incorporation into a dwelling. In determining whether a product is a consumer product, doubtful cases shall be resolved in favor of coverage. For a particular product received by a particular user, "normally used" refers to a typical or common use of that class of product, regardless of the status of the particular user or of the way in which the particular user actually uses, or expects or is expected to use, the product. A product is a consumer product regardless of whether the product has substantial commercial, industrial or non-consumer uses, unless such uses represent the only significant mode of use of the product. "Installation Information" for a User Product means any methods, procedures, authorization keys, or other information required to install and execute modified versions of a covered work in that User Product from a modified version of its Corresponding Source. The information must suffice to ensure that the continued functioning of the modified object code is in no case prevented or interfered with solely because modification has been made. If you convey an object code work under this section in, or with, or specifically for use in, a User Product, and the conveying occurs as part of a transaction in which the right of possession and use of the User Product is transferred to the recipient in perpetuity or for a fixed term (regardless of how the transaction is characterized), the Corresponding Source conveyed under this section must be accompanied by the Installation Information. But this requirement does not apply if neither you nor any third party retains the ability to install modified object code on the User Product (for example, the work has been installed in ROM). The requirement to provide Installation Information does not include a requirement to continue to provide support service, warranty, or updates for a work that has been modified or installed by the recipient, or for the User Product in which it has been modified or installed. Access to a network may be denied when the modification itself materially and adversely affects the operation of the network or violates the rules and protocols for communication across the network. Corresponding Source conveyed, and Installation Information provided, in accord with this section must be in a format that is publicly documented (and with an implementation available to the public in source code form), and must require no special password or key for unpacking, reading or copying. 7. Additional Terms. "Additional permissions" are terms that supplement the terms of this License by making exceptions from one or more of its conditions. Additional permissions that are applicable to the entire Program shall be treated as though they were included in this License, to the extent that they are valid under applicable law. If additional permissions apply only to part of the Program, that part may be used separately under those permissions, but the entire Program remains governed by this License without regard to the additional permissions. When you convey a copy of a covered work, you may at your option remove any additional permissions from that copy, or from any part of it. (Additional permissions may be written to require their own removal in certain cases when you modify the work.) You may place additional permissions on material, added by you to a covered work, for which you have or can give appropriate copyright permission. Notwithstanding any other provision of this License, for material you add to a covered work, you may (if authorized by the copyright holders of that material) supplement the terms of this License with terms: a) Disclaiming warranty or limiting liability differently from the terms of sections 15 and 16 of this License; or b) Requiring preservation of specified reasonable legal notices or author attributions in that material or in the Appropriate Legal Notices displayed by works containing it; or c) Prohibiting misrepresentation of the origin of that material, or requiring that modified versions of such material be marked in reasonable ways as different from the original version; or d) Limiting the use for publicity purposes of names of licensors or authors of the material; or e) Declining to grant rights under trademark law for use of some trade names, trademarks, or service marks; or f) Requiring indemnification of licensors and authors of that material by anyone who conveys the material (or modified versions of it) with contractual assumptions of liability to the recipient, for any liability that these contractual assumptions directly impose on those licensors and authors. All other non-permissive additional terms are considered "further restrictions" within the meaning of section 10. If the Program as you received it, or any part of it, contains a notice stating that it is governed by this License along with a term that is a further restriction, you may remove that term. If a license document contains a further restriction but permits relicensing or conveying under this License, you may add to a covered work material governed by the terms of that license document, provided that the further restriction does not survive such relicensing or conveying. If you add terms to a covered work in accord with this section, you must place, in the relevant source files, a statement of the additional terms that apply to those files, or a notice indicating where to find the applicable terms. Additional terms, permissive or non-permissive, may be stated in the form of a separately written license, or stated as exceptions; the above requirements apply either way. 8. Termination. You may not propagate or modify a covered work except as expressly provided under this License. Any attempt otherwise to propagate or modify it is void, and will automatically terminate your rights under this License (including any patent licenses granted under the third paragraph of section 11). However, if you cease all violation of this License, then your license from a particular copyright holder is reinstated (a) provisionally, unless and until the copyright holder explicitly and finally terminates your license, and (b) permanently, if the copyright holder fails to notify you of the violation by some reasonable means prior to 60 days after the cessation. Moreover, your license from a particular copyright holder is reinstated permanently if the copyright holder notifies you of the violation by some reasonable means, this is the first time you have received notice of violation of this License (for any work) from that copyright holder, and you cure the violation prior to 30 days after your receipt of the notice. Termination of your rights under this section does not terminate the licenses of parties who have received copies or rights from you under this License. If your rights have been terminated and not permanently reinstated, you do not qualify to receive new licenses for the same material under section 10. 9. Acceptance Not Required for Having Copies. You are not required to accept this License in order to receive or run a copy of the Program. Ancillary propagation of a covered work occurring solely as a consequence of using peer-to-peer transmission to receive a copy likewise does not require acceptance. However, nothing other than this License grants you permission to propagate or modify any covered work. These actions infringe copyright if you do not accept this License. Therefore, by modifying or propagating a covered work, you indicate your acceptance of this License to do so. 10. Automatic Licensing of Downstream Recipients. Each time you convey a covered work, the recipient automatically receives a license from the original licensors, to run, modify and propagate that work, subject to this License. You are not responsible for enforcing compliance by third parties with this License. An "entity transaction" is a transaction transferring control of an organization, or substantially all assets of one, or subdividing an organization, or merging organizations. If propagation of a covered work results from an entity transaction, each party to that transaction who receives a copy of the work also receives whatever licenses to the work the party's predecessor in interest had or could give under the previous paragraph, plus a right to possession of the Corresponding Source of the work from the predecessor in interest, if the predecessor has it or can get it with reasonable efforts. You may not impose any further restrictions on the exercise of the rights granted or affirmed under this License. For example, you may not impose a license fee, royalty, or other charge for exercise of rights granted under this License, and you may not initiate litigation (including a cross-claim or counterclaim in a lawsuit) alleging that any patent claim is infringed by making, using, selling, offering for sale, or importing the Program or any portion of it. 11. Patents. A "contributor" is a copyright holder who authorizes use under this License of the Program or a work on which the Program is based. The work thus licensed is called the contributor's "contributor version". A contributor's "essential patent claims" are all patent claims owned or controlled by the contributor, whether already acquired or hereafter acquired, that would be infringed by some manner, permitted by this License, of making, using, or selling its contributor version, but do not include claims that would be infringed only as a consequence of further modification of the contributor version. For purposes of this definition, "control" includes the right to grant patent sublicenses in a manner consistent with the requirements of this License. Each contributor grants you a non-exclusive, worldwide, royalty-free patent license under the contributor's essential patent claims, to make, use, sell, offer for sale, import and otherwise run, modify and propagate the contents of its contributor version. In the following three paragraphs, a "patent license" is any express agreement or commitment, however denominated, not to enforce a patent (such as an express permission to practice a patent or covenant not to sue for patent infringement). To "grant" such a patent license to a party means to make such an agreement or commitment not to enforce a patent against the party. If you convey a covered work, knowingly relying on a patent license, and the Corresponding Source of the work is not available for anyone to copy, free of charge and under the terms of this License, through a publicly available network server or other readily accessible means, then you must either (1) cause the Corresponding Source to be so available, or (2) arrange to deprive yourself of the benefit of the patent license for this particular work, or (3) arrange, in a manner consistent with the requirements of this License, to extend the patent license to downstream recipients. "Knowingly relying" means you have actual knowledge that, but for the patent license, your conveying the covered work in a country, or your recipient's use of the covered work in a country, would infringe one or more identifiable patents in that country that you have reason to believe are valid. If, pursuant to or in connection with a single transaction or arrangement, you convey, or propagate by procuring conveyance of, a covered work, and grant a patent license to some of the parties receiving the covered work authorizing them to use, propagate, modify or convey a specific copy of the covered work, then the patent license you grant is automatically extended to all recipients of the covered work and works based on it. A patent license is "discriminatory" if it does not include within the scope of its coverage, prohibits the exercise of, or is conditioned on the non-exercise of one or more of the rights that are specifically granted under this License. You may not convey a covered work if you are a party to an arrangement with a third party that is in the business of distributing software, under which you make payment to the third party based on the extent of your activity of conveying the work, and under which the third party grants, to any of the parties who would receive the covered work from you, a discriminatory patent license (a) in connection with copies of the covered work conveyed by you (or copies made from those copies), or (b) primarily for and in connection with specific products or compilations that contain the covered work, unless you entered into that arrangement, or that patent license was granted, prior to 28 March 2007. Nothing in this License shall be construed as excluding or limiting any implied license or other defenses to infringement that may otherwise be available to you under applicable patent law. 12. No Surrender of Others' Freedom. If conditions are imposed on you (whether by court order, agreement or otherwise) that contradict the conditions of this License, they do not excuse you from the conditions of this License. If you cannot convey a covered work so as to satisfy simultaneously your obligations under this License and any other pertinent obligations, then as a consequence you may not convey it at all. For example, if you agree to terms that obligate you to collect a royalty for further conveying from those to whom you convey the Program, the only way you could satisfy both those terms and this License would be to refrain entirely from conveying the Program. 13. Use with the GNU Affero General Public License. Notwithstanding any other provision of this License, you have permission to link or combine any covered work with a work licensed under version 3 of the GNU Affero General Public License into a single combined work, and to convey the resulting work. The terms of this License will continue to apply to the part which is the covered work, but the special requirements of the GNU Affero General Public License, section 13, concerning interaction through a network will apply to the combination as such. 14. Revised Versions of this License. The Free Software Foundation may publish revised and/or new versions of the GNU General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns. Each version is given a distinguishing version number. If the Program specifies that a certain numbered version of the GNU General Public License "or any later version" applies to it, you have the option of following the terms and conditions either of that numbered version or of any later version published by the Free Software Foundation. If the Program does not specify a version number of the GNU General Public License, you may choose any version ever published by the Free Software Foundation. If the Program specifies that a proxy can decide which future versions of the GNU General Public License can be used, that proxy's public statement of acceptance of a version permanently authorizes you to choose that version for the Program. Later license versions may give you additional or different permissions. However, no additional obligations are imposed on any author or copyright holder as a result of your choosing to follow a later version. 15. Disclaimer of Warranty. THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION. 16. Limitation of Liability. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. 17. Interpretation of Sections 15 and 16. If the disclaimer of warranty and limitation of liability provided above cannot be given local legal effect according to their terms, reviewing courts shall apply local law that most closely approximates an absolute waiver of all civil liability in connection with the Program, unless a warranty or assumption of liability accompanies a copy of the Program in return for a fee. END OF TERMS AND CONDITIONS How to Apply These Terms to Your New Programs If you develop a new program, and you want it to be of the greatest possible use to the public, the best way to achieve this is to make it free software which everyone can redistribute and change under these terms. To do so, attach the following notices to the program. It is safest to attach them to the start of each source file to most effectively state the exclusion of warranty; and each file should have at least the "copyright" line and a pointer to where the full notice is found. Copyright (C) This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see . Also add information on how to contact you by electronic and paper mail. If the program does terminal interaction, make it output a short notice like this when it starts in an interactive mode: Copyright (C) This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. This is free software, and you are welcome to redistribute it under certain conditions; type `show c' for details. The hypothetical commands `show w' and `show c' should show the appropriate parts of the General Public License. Of course, your program's commands might be different; for a GUI interface, you would use an "about box". You should also get your employer (if you work as a programmer) or school, if any, to sign a "copyright disclaimer" for the program, if necessary. For more information on this, and how to apply and follow the GNU GPL, see . The GNU General Public License does not permit incorporating your program into proprietary programs. If your program is a subroutine library, you may consider it more useful to permit linking proprietary applications with the library. If this is what you want to do, use the GNU Lesser General Public License instead of this License. But first, please read . ================================================ FILE: README.md ================================================ # Domino Robot This is the code for Mark Rober's Domino Robot project. You can find a bunch more info about the project, including details about the software architecture [on my website](https://www.baucomrobotics.com/domino-robot). Here is the video that Mark made about the project: [![Marks video](https://img.youtube.com/vi/8HEfIJlcFbs/0.jpg)](https://www.youtube.com/watch?v=8HEfIJlcFbs) ## Folder structure If you are interested in browsing the files in this repo, here is the general structure to get you oriented. - `doc`: Some high level documentation stuff, much of it is probably out of date - `experimental_testing`: Various experimental files - `src`: The real stuff - `master`: Code for running the master controller and GUI - `robot`: Code that runs on the Raspberry Pi of the robot - `robot_motor_driver`: Code that runs on the ClearCore of the robot - `tools`: Some various utility scripts ## Usage (Not Recommended) This repository exists mostly for those who are interested in browsing the code to see how it works. It is almost certainly a bad idea to try and download this code and use it unless you have identical hardware to the robot in the video and/or really know what you are doing. You should consider this code completely unsupported as I do not plan to make future updates or fix any bugs you may find. If for some reason you really want to try and run this code, there are some (out of date) instructions on how to run the [the master code](src/master/README.md) and also some (possibly also out of date) info on which libraries are used for compiling [the robot code](src/robot/README.md). Good luck. ================================================ FILE: doc/DominoRobotControllerFlowchart.drawio ================================================ 7VxZc6M4EP41rpp5SIob/Bg7x0zNzFZ2kp1j32SQbRJAXiEn9vz6FSDMIYGxjQ05kqoEGiFA/XV/3S3BQB37qxsMFvNvyIHeQJGc1UC9HCiKLJsW/RdJ1onE0OREMMOuwxplgjv3D2RCiUmXrgPDQkOCkEfcRVFooyCANinIAMboudhsirziVRdgBjnBnQ08XvrTdcg8fS5Jyg58gu5szi5t6eyAD9LGTBDOgYOecyL1aqCOMUIk2fJXY+hFg5eOS3LedcXRzY1hGJAmJ9z8a3sPo0/Sw83vv3788C2HwPBMS3p5At6SPTC7WbJORwCjZeDAqBNpoI6e5y6BdwtgR0efqc6pbE58j+7JdNMB4TxuG+1MXc8bIw/huCPVAdCa2lQeEoweYe6IYVtwMqVHZhg4Ln2e3DETAgNGV2Z3CjGBq8ohkDcDSxEJkQ8JXtMm7IRUFQyLisr2nzPNGqn+5jmlqumJgKFptuk6G3C6wcZ8h/FXTjn+OrQcTTT+ljJRDUM8/kPTkUyznfE3zO0KUM1jKSB4Ivc3n9XV31c/F2Pv/hH/M70/UzkFrAaK4ZFomJYTujmLNgkGD6mYXiZ35AP5yGkMOtSFsF2EyRzNUAC8q0w6Kuo0a/MVoQVT3gMkZM38IVgSVNSzBybQu0WhS1wUUJlNNQCpxkaRflzqvr6WGpCo4xHw3Jmw+QU7MEGEID/CDiqgQIp/qJyeiNe/2I3HO7+jnXM93b1c5Q9ertlefMMjYD/O4mdPuw5QADfY4oCkSNFQFp0/wDNIahSqiBGIoQeI+1TsTIQmduotcumtbJAr62YBuRwgQ7TENmRn5b1wqSOK7/qOkges6ShtiKbTEBLOADbPvL9NpMOTM4qnl2AUcOWSX7ntBJmmznYzZEY76zxMBYA2GyG6CxPc05QShNaofScL40zzAJM7yIErfAhDBGC1lzgeESFeOwFrBe6U14a71vDEPGiKU+ZA00ThUAc/LIUmejMHf4ExWOeaLaIGYfPrqJJUMoGkx9M7faM6EooQERlImp0Z/y2jrGUkZ5uZKXGmR/M3F1NTqTC9jTS5SocWyegjpYi8PVbSR0jhTS6iRDOyLQ+EoWun4mvXy6zdSRsxe6ESdvwlGnVTMmlq1EWbPmvJqNUtNl0Ra+1q02qtSW+9K6MnHoCP+u5phAdtgmJN3cAAYkB3ONvcKUMVpJg0w5/E6WcpYZ1aNrSFBYOJpWu6VAdQLk+tTkpL6rD4nFSWBTmpdayUVG8Ufdt+pUd9Q+nosdyX0jB20foVC3PI+Y7oEFLRmI4fRp5HR5mBZoILKEl5PBros4ToL2gDWVqsqkl+h7jgw+1n2oCaDJVI19fR9iKFQ1y9Zbf3sWlA8LKdjlKuhHXudUwOO104kio739+StYaWbPTKklU+Gq8kgbGHqMtsSAYUn6Q45M2dMKaB/B8wibuKlMLiItqvPhrol1vdca0T3509eEzUApsvXLM5I/ZEg/y0jDDxlM5VzTQKdiu3Eqdaoj6PH/DxlZMxQthxA0AgldPoLwinCPtdUcbMQ5MI2BL1EooUo/xk9DCdAj0GZ5keprZD9SWgB8ccTup9VXN60HoXlA5fKT207vbFFj7UhgWFqkdKRbnrKDXJZVt+ROVjh2qy8lGcQr6TVa2Z9ZisZPXckkX9Hp+w+KDoc0CHKozY6osbQJ8+lx0egQucoTBVmE4Vcarg0NxCN9rhArNcL+qcC2S+QtEnMtjMrGymWZpNUW/qv+dpyff3IKsMi8u/+xOP3LTEILdeY6igjhLONPM4MyBDTS9exzoFRfHxS0ZRCxAcHtjmeAyGZBvRZdL44u/8V3YtbRCgLBvFxRZn7TDgmSzk1RNU6PkC3z18DFybyr4lcCtU+lrmwUn8K+BBPfoV8aAR/7TDg1Z55lTvnAdfa81Mblo0k/tVNUvXFm9JRBL//F41q9Vsm2UzpVUvfAI/y1fGrhF+Bth5S4mGpnXuYF9r1Sn1m9sdrNUvB8svl650sO+OtR7TrUS4hqm2EtKa54ZyqihWs7owYvHqrqZrg3PLgTdFgZpSQmFVGINbfklYcobDrxvLVoV17HiUtv3OXrUCrRTzl2oF29uX34kqL8XS1YPaq3Vrt1qLRviyZ7/m6VgknUzTJZN2b2SeTtf6FjIpL9G5vhLfajUN6noV06VR/4tCjNIFZDpb5XgCTLbO94clfhZHeV+A58d1eqoQcoTiZrfrAdXeEYlm9MctbHkHo2rCb+srfL2lEq1xAbZn9QG+Ii56nZpNkPkHFwk8OCUCh+q7jpPg8UQlApwYZCURbG6Ih0St8bVTJDAlVnPYt0iQFm7TVaknqBHwFdjbbLk4xY8blWDjnQ828CGmt6RI8QTYN4CfoOe7gRPybyG/7Cmwcrqhd74UROWXglQa/LEKgjtYe0Q4LKXdy3HvQOhSidANXlWqQFNH+9CFxtfS3zUVHzV6pql0Xvu9yN6kyF6l752ZUpP1c0lSVEsa6vRvcYmjVqftk89P6vw0TLlut636p2qi6t8lorwZVfGq3xvbVtDrhVuI5CzHUJWWHHrpjS3ZFLiJocBPaC34CeG3ow4HgSwEwdiDAI8RhvurvBgFpa4gFzIxUXNoiEK1YjBX+PRVpVNo/rEqSy4qXPSxKpG+j/atMNGq2zb0/R2EiwnE8bPfum9b6dx7mcdTOt3NPv6XMEP2CUX16n8= ================================================ FILE: doc/DominoRobotSoftwareArchitecture.drawio ================================================ 7V1bc9o4FP41mek+pCNLvj4mtElv2WabXvdlR2ABao3FCNEk/fUrgwW2JYwBG5s2yUOsY9kYfed+jpwz1Js8XHM8Hd+wkERnEIQPZ+jFGYSW4/nyT0J5TCnAtpaUEadhSlsT7ugvoiam1DkNySw3UTAWCTrNEwcsjslA5GiYc3afnzZkUf5Tp3iUfiJYE+4GOCLatC80FOMl1Xcys18ROhqrT7ZAemaC1eSUMBvjkN1nSOjlGepxxsTyaPLQI1GyempdltddbTi7ejBOYlHlgv7s/T90dh1dzV5fvjl/IaZ/2/A8vctPHM3TL3yDZ4JwSXv2Dk8Fm/6VPr14VEvC2TwOSXJXcIYu78dUkLspHiRn7yUXSNpYTCI5suThT8IFlct5EdFRLGnyjpI6ZLFIsbZcOR5xHFL5PXosYnzxKai/+E3m0ijK0IdO8ivpM8HZD5I54y5+VmcUXHKhL/W1Ul9cPh55yJDStbsmbEIEf5RT0rPnloOW1zzmOfR+zRaeoo0zLOEoIk5ZcbS69xoteZACtgN40NGwIaHk3nTIuBizEYtx9HJNvcyjt57zjiXILDD7ToR4TOHBc8HyiJIHKr5mjr8lt3rupKMXD+mdF4NHNYjl9/2aHWSuSobryxYjdV2RTzbiOGNzPiAla5UulcB8RESZQKRgJQtZyhacRFjQn3k1UTvEjiafV5RIFQbBbYSlxuOHSadB7jyCXQJ0uQsx8YcDo9wNfNIfanKH6pI7kJc7V5c7GBjkLmhK7BSLZEC5/vT6twcCAq9jOEANhw/zWNAJkcQbHEvTfKB4mI3XCWClCQ0EOlouMKDVIFyBBleiw+pFaELDcGHmxozTX9J64BIJk6D1Fw5D0cXwB2RgRK3vO7YDGkMtKIDmGETM5GKsfMz6XQxd131gfemyQvA6ln7iMAHmODLWNbQs6G1FCznHRUt35xVavShZziNBFXgh8DyDOnSIH9omqHzYRwbfvS6onAJUQUXbZXmNQaUbr4+924aAWmlFA1ah46Eg1LEibghCYsIqCFzLd5rCquhmIKRjZRmxai7O8jQ4jhhnrWOrb5kz2+KsdWj1LXuu8TgLumdtBVqLSy84x4+ZCVNGYzHL3Pk2IazZDTkwx27nDijwy/KOa+5ZPdoBDOW3ylA7BO4tMIZTN18cJvuupqc/zRoIu4dD7ABDBDEcDkLJLgY1HHpBHzTm3awCBiUXlt12vAf1AEIXozi8SLK+cjSI8GxGl44h5kInZwApsvla93rQzYjLOXgOwJZUlxzcEi6j0CSNqqllG9lZxWzJ+6EtynkxKt5yf0FUYXN9GjrDESZ/V9F2U+SapoZBwYkrutHLb55elU2EF1W+nb+R5uQtV0a7UV3aHxnYtvPuhGu7+dQtAPY+fFuUtA2yuSBf0Ug9/f7sXjnxG3TK7iCkKbsbJqTuh+AFl5+eHDzrRQTzHuPkBIs0NdgoaZMKJRlTDO6ajBRqLAZXZrIt6Yb7RQuwKN3eSUg3qireyO2UeKsqeFtc4u3FJbafiyoTb0iqzd+LT7xO8QnSC4B3ci1xcmGPTSaz2gORzpcyLFS1ltFYJIL0oFDlWT+TiA2oSL7AKxyH0Z9YoZVmue1YUYlxBqF3dLhsbLkYCMriPwigYkr1uPhga353MfjPfRO+dT7h96N/73u+oe9ICdCzD3g27RO++AK39E/1bB0nB5ljqLYbu41cpynIdEu0qi3J5eQsOliUzGDtIiybBK5MSOvRgHm4fFMcYjJRqI6ihREuFUB2P8eccTDhTlWLSu6gcXGq5qJR7cnovYoU5xbKZ2MtEBSaPbdeAVH5FcWkWGH+wYWQMhwa8m6rN4fs0jqwqf2grGWhDh0TwOdOgQkMKXnLc3Qts2pKrl3L6G5W1hiApCH8KDahe3hpJsE3tIeYwFo11tUOlq+BdctmNHWGjwZVSavHpnaRshaTOqCCMAdVYGjttlyD9a6lk8cIVdv5oeZbDvY33oFuvM0xRmupnLLHzjb8cPydDARbPMQ1iQnHiwx/IzZvlzaeTa1AZe1DdcihXZBDk32DBjlsLOYxtBi/Y3J16S+8VJv168vT9E3yzqbJ1pkdk8aQUym6090bA7dtjtlfg1qoogr169age4U/bt6TCtJxo5GJpddf3+JokvSrgysaLTOJZ9CN5PJd9uXAHSVHz64j1l/EL9K1qrssu0P80j0n69zL64gABLqOQCYd0ViKxNLDz8909se7wzAoyJvJHTZGLo25wyho1x3ep6hu5dU5Qtv0+f59XqXu71Yl31plvPSxM0J58UCldUi2eKeZhabyQifoI1sAwJadZLvdTHOnY1XLryiE7bWflT73U7S6URIR1BOy7ctiu91nO4Q9B8hU1QRQxwybngB6Mmzl4qQCv/bECT6Ztk1ro4DYbtpqzyIchKlhS3OlyH4VGjYZ2Z+koAbIEMofVU4NG5/vBBbzRLt+moZYNNOasj0PcwK9Kq4hvIemXpXmqshQT7Or3qI7wn8+gbepqGzCzrw5ujns7I2i11Lus4P1kUIG261a2GqsFQAa+srZ4AdJRO5mHgkq/34cc4JDefBFojOtWwhPGtBVoroMUHRUQPVc9sdllaXZ7swq79npWj806p4WbXdP6H7lynQT0REiDUMmu8vtmg7Ml7pcr7z1sjh/a6+mtoP5KM2aUM/MP3VrblIxnTMQ6oFOqYQGnltW7nUJycbUvTYcHqB8KmfwUd3K5zCHQM/g32AZS0UTGnfBqetecKVFxp5pm9RxI2PU7puC9twnHFgoK7XJPuHmXnJi5v6qNQJlelt2GWzlAKoOuqMYdL0i0cMTwnES/HEsw8GnxMuGxItJNaAjhwx6HvuOJk1LQIH45I1tCNc9w25KY4N/c96YYQMswfwJPFO9AWwHz/TeqebAs9s1ynvZZN9dvdFl9e6OI795DMKqRrmZOF5/pZhdiNJQgWU2vAls55dMqr6Q1avTy+N7aPtl83f1BuRw/W9GltPX/60Fvfwf ================================================ FILE: doc/KukaPsuedoCode.txt ================================================ KukaInit(); int tile_count = 0; int num_tiles = 333; int num_rows = 20; int num_cols = 15; while (tile_count < num_tiles) { string line = ReadLineFromFile(tile_count); vector colors = ParseColors(line); int color_count = 0; for (int row = 0; row < num_rows; row++) { for (int col = 0; col < num_cols; col++) { Pose pickup_pose = PickupPoseFromColor(colors[color_count]); Pose dropoff_pose = DropoffPoseFromRowCol(row, col); MoveToPickup(pickup_pose); CloseGripper(); MoveToDropoff(dropoff_pose); OpenGripper(); color_count++; } } tile_count++; } ================================================ FILE: doc/NetworkPrototcols.txt ================================================ Message spec - All messages are sent as json encoded byte strings - All messages will contain a 'type' field that will be a string denoting the command type - All messages will contain a 'sn' field that will be an int denoting the message serial number - All messages will contain a 'data' field that will house any additional data required by the message type. This field may be empty Master to robot commands: Move: // Move to a waypoint in a straight line float x float y float a Place: // Place dominoes on the floor null Dock: // Dock at charging station Undock: // Undock from charging station Dropoff: // Drop off pallete on conveyor Pickup: // Pick up pallete from conveyor Position: // Fallback for sending position data if usb controller for marvelmind sensor doesn't work float x float y float a float time Robot to master commands: RobotStatus: // Give a status update to server float est_pos_x float est_pos_y float est_pos_a float est_cur_vel_x float est_cur_vel_y float est_cur_vel_a string cur_task int cur_task_msg_sn int bat_percent //Can we even get this? string error_msg bool error_present //Add other fields/sensor statuses as needed Master to base station commands: NextTile: // The next tile to fill int[tile_x*tile_y] dominoes int tile_id Base Station to master commands: BaseStatus: // Status update for server int cur_tile_id float est_time_to_tile_full // Other sensor statuses General commands: StatusRequest: // Request status from robot KeepAlive: // Keep the connection alive Ack: //Acknowledge that command was recieved int sn Done: //Let the server know the command was done int sn ================================================ FILE: doc/SubsystemDiagram.drawio ================================================ 7V1dk5o6GP41zvRc2OFbvNx1t+10tmec2jltrzoIUWmReCDu6vn1J0AikGQVFQjSzuyFhBDgeZ+8eb/CDvTJevc+cjarT9ADwUBTvN1Afxhommpr2iD5U7x91jIaGVnDMvI90ilvmPn/AdKokNat74G41BFBGCB/U250YRgCF5XanCiCL+VuCxiU77pxluSOSt4wc50AcN2++h5aZa22Wej9AfjLFb2zqpAza4d2Jg3xyvHgS6FJfxzokwhClP1a7yYgSMCjuGTXvXvl7OHBIhCiKhcMnS92bEPraWY8fbPd7ce75aehno3y7ARb8sLkYdGeIgA8DAg5hBFawSUMneAxb72P4Db0QHIbBR/lfZ4g3OBGFTf+BAjtiXSdLYK4aYXWATkbIydCd4m0cIMbOHHsu7T5nR/QbvwbExBiuI1ccOQ1KXOcaAnQkX7k7ZNXLtyA4PkewDVA0R53iEDgIP+5zBGHUG156JdLA/8gAjlDOConnE9OjECE26YTTk4vKx+B2cZJgXjBs7GMsRNvsvmx8HeJrO4XGNgJDGCUXq57JrA9I0U9gr9A4YytzXXLOob/M4gQ2B1FjJyl84ZoA1Unxy+FuUWaVoVpRdvqh1gK38HOR9+Sy9+OTHL4vXDqYUeGTg/25KDiJMGyifbZ4CY9/E7HSw7y0dMjOvzlk0vjJ5cffLHv3v38gb5+/Kz9+PkhiqbKkKr0+mZXeimGxNkXOmygH6K4MPI0acgpqI3KHNQsRm8y/fXx0f74R/YE4qsNlbl6zFA5g41cxbD5gEklgotBt26G4Jcyt/LE8PhOuJFbYBh+NjwpjFuYA5Z9xhyoi7q6KZe6WsvUDWEIGN4qneWtrkvhrX4eb4dqw8Q9hmHBbvsboBcY/UoE8OIjd4WHxURNTbl6rbjFYqG5rsiK86y5ZdZkxWlqx8w4Tb89VaGWFEWuN9pQFRf5UAqvGsQdzbrtvOu4YfzhRne4YXWLGzdoYvSXG3a3uHGLntMV3KjkOREC5dy4LnRQmRta7bGDqtwQe22qXG4UqaFUpEbZM1Frp0YLzomILkdCTdLitgrnAHyGc5j4JfiWygRipGEQJNa/FeDXuJ8nv5bJrzd3kbf1Q/hX3Y5BO+FdnYnvGrIdA2N8ewu85PBXo9Gucd1qXOz3MzTUWX5lL8QFXfn4ATuSoWvlkV4J3zYd2DAkhDUoFBJzg5fbr54Tr9LHOEr0pnk5pIJiBXeKT9UjW6c4fi4zh2fH3E5c0Aw5VYNbc3m2imhSIsb5vKuiPk+yqrA6moLVkbZdSb7Xgvhna0X7QhLXJmuTk/XXFUgLT9YQwSjmJJ9roURQJ0wpNqRqu0AcUp3bpmEedYDOCKlqZUzNUTXLSW/KcqKRmgLIT/4iqz6IEdhs0l8p3j2A26KRIGlw2zzcTpopUGIQPcM+YDyy5GJMn6dUUBM9g2Dthx5uvweOC8NakfYcYC+ESFuuDeaLepA2xqzy0CQjPeaQnsLYR74AXvyOiF16i1iRtbcILGlyAn8ZJos5Rg5rI/0+Qcx3neCOnFj7npdaoSKhlS3TOqTA2D2mxatwQyAFrSkp6LxNNMOam18eeyMCXR2VVY7Kq/V2RcCbKv2fCJwULF7xNyUFYdjhUIIrJQpEfxdi8s1G8jtRSGKTlPzptI+YS+2Eam2+ALpkErxSpXGV8dVKicaQcfZUU7KBa484pI/AqpwHK2F8LYu4WcJtzK/hh+qXIm6q2RhwvGdA8wYLmLrlOYLWv1tITwzjVDHd4Q4Yk11+kuYansASJCTPxsLPlg2Xnezv6mQYZQkfdmwURDxuc2aMBW7JCbf6zOnRmstnmAy2Bq91RqKV32wKW34PxQyEcc3gtuTlseAKdFO74GocuBO43uAVs15029mZ0jnq8qbJoYy0TsXQijnCoSudu6dXVboXka6bKr+EHhL4VVZhXbgKP0BsakJ8ehpgia1Beu/ZPkZgXVia5+wdf5vleshsVRBUFBgig4zNSdRX+8MRhyYfQOhCD9Scfqi8tnCQCoCvHEFsMx4udtk5EP8U3x0pvmu5vErMpSvzkwabAmPjQVXzk6bCDDQelQdqeM8dv3D/AwLo+mjfaTV9lQJhY35tBr+FQuADrxP8zqjbwe/rdDiTf2gz+C0UAZ9B/gwSGBMDBzlZDPwGKiIvE4qECsiju66L9oq/8AdJ7t7bYiw15c3jbGprlnVrEB+KTsu+ha5V8y1GjUEu2Xjpf3W4GHZBxqGZPdfXsYM3D2h1uNZTXUgL0JiqVfm6kU+Q90w3WkbXdKMo7sIqy2brNMWK8riTV7uaklYEqpQJYdrGWybGVtXNYutJLZUhTcNu1gHxDiyzt7G1ox7+jisus5bUVZavQOuZau+YYpf7CYbLamzI9qzmrN4aI3Zi0LVbmIv0KQtz8cHHcyiM0xrymbz65qsmIPbz5NXsi4GW+zmDy+bg6PI52Hahmxh0o6U5mF567vYtZquOaZsMyxr4cJfGu1W06NUPl4P8I0g1Tvl20mTdm/GjPzO+5SXX4qf7kWyatDWXD75/SCoNbnr6cVnqFncUiVHmoxnTCMQxRpFFt79pJunV/YLNRn0XAlMUIz3bSjlxa/tcrnMAJG74EkuhQxGx3yjxRL8J0vxHU69jB++GC/LxSY9iDkq58aTTkN2GLD3rpPMJwJ6FJoea2rHopGArLK8qe512yma/rLTTUDNryztxn76pL/GUfpeZ/kOWrHv+b230x/8B ================================================ FILE: doc/SwClassHeirarchy.txt ================================================ - robot - status updater - status - robot server - socket multi thread wrapper - robot controller - serial comms - localization - Kalman filter - controller mode - position mode - trajectory generator - x,y,a controllers - vision mode - trajectory generator - Camera tracker * - Kalman filter - x,y,a controllers - tray controller - serial comms - marvelmind wrapper - camera tracker - side camera - rear camera - motor controller - serial comms - robot velocity handler - lifter action handler - master - gui - runtime - plan - robot interface - robot client - tcp client - field planner ================================================ FILE: doc/clearcore_api.txt ================================================ Tentative thoughts on api with clearcore for motor control over serial comms There's a few options here: - Clearcore does all trajectory generation, kalman filtering, kinematics, and control - Central processor sends it marvelmind position data and forwards command from master - This requires a more extensive api to handle forwarding commands - Clearcore recieves global velocity command from central processor, handles frame transformation and kinematics only - Sends back computed global velocity - Central processor has to send global angle along with command - Cnetral processor handles kalman filtering and trajectory generation - Relatively simple api - Clearcore recieves local velocity command from central processors, handles local kinematics only - Sends back computed local velocity - Central processor handles kf, traj gen, frame conversion - Very simple api - seems like the best starting point Starting with option 3 Send from central controller: - Local velocity command [vx, vy, va] Response from clearcore: - Estimated local velocity [vx_est, vy_est, va_est] Clearcore outline: while true: if new command: decode command do IK send new motor commands do FK encode response send response (est vel) else: pass ================================================ FILE: experimental_testing/ImageParser.py ================================================ import matplotlib.image as mpimg import matplotlib.pyplot as plt import skimage.transform as sktf import numpy as np # Configuration image_name = 'MR.jpg' desired_width = 300 desired_height = 300 dominoes = np.array( [('black', (0,0,0)), ('red', (1,0,0)), ('blue', (0,0,1)), ('green', (0,1,0)), ('white', (1,1,1)) ]) # Load original image img = mpimg.imread(image_name) # Scaled image img_scaled = sktf.resize(img, (desired_height, desired_width),anti_aliasing=False) # Parse image into domino IDs by choosing 'closest' color img_parsed_color = np.zeros((desired_width*desired_height, 3)) img_parsed_ids = np.zeros_like(img_parsed_color,dtype=np.int_)[:,0] count = 0 for row in img_scaled: for px in row: best_id = -1 best_score = 9999999 for id,val in enumerate(dominoes): color = val[1] score = np.linalg.norm(px - color) if score < best_score: best_score = score best_id = id img_parsed_ids[count] = best_id img_parsed_color[count] = dominoes[best_id][1] count = count + 1 img_parsed_ids = img_parsed_ids.reshape((desired_height, desired_width)) img_parsed_color = img_parsed_color.reshape((desired_height, desired_width, 3)) # Output some metrics print('Domino usage:') print('Total number of dominoes: ' + str(img_parsed_ids.size)) print('Colors:') unique_colors, counts = np.unique(img_parsed_ids, return_counts=True) for i,id in enumerate(unique_colors): print(' ' + dominoes[id][0] + ': ' + str(counts[i])) # Plot images fig, axes = plt.subplots(nrows=1, ncols=3) ax = axes.ravel() ax[0].imshow(img) ax[0].set_title('Original') ax[1].imshow(img_scaled) ax[1].set_title('Scaled') ax[2].imshow(img_parsed_color) ax[2].set_title('Dominoes') plt.tight_layout() plt.show() ================================================ FILE: experimental_testing/RobotArduinoTesting/StepperDriverMotion_4Motor_Accel/StepperDriverMotion_4Motor_Accel.ino ================================================ #include // https://github.com/DIMRobotics/ArduinoStepperDriver /* Axis identifiers inside the library; on them then Can be controlled specific. You can do several axles, The maximum number in the native assembly is 3. It is treated by editing headerfile, there is a constant NUM_AXSIS */ axis_t bl, br, fl, fr; float MaxSpeed = 3.14; // rad/s #define PulseRev 3200 const float SECONDS_TO_MICROSECONDS = 1000000; const float STEPPER_CONVERSION = SECONDS_TO_MICROSECONDS * 2 * PI / PulseRev; #define bl_pulse 49 #define bl_dir 48 #define br_pulse 51 #define br_dir 50 #define fl_pulse 47 #define fl_dir 46 #define fr_pulse 45 #define fr_dir 44 #define PIN_ENABLE_ALL 40 void setup() { Serial.begin(115200); StepperDriver.init(); delay(100); // Serial.print("rad conversion: "); // Serial.println(RAD_PER_SEC_TO_STEPS_PER_SEC); // Serial.print("sec conversion: "); // Serial.println(SECONDS_TO_MICROSECONDS); bl = StepperDriver.newAxis(bl_pulse, bl_dir, 255, PulseRev); br = StepperDriver.newAxis(br_pulse, br_dir, 255, PulseRev); fr = StepperDriver.newAxis(fr_pulse, fr_dir, 255, PulseRev); fl = StepperDriver.newAxis(fl_pulse, fl_dir, 255, PulseRev); StepperDriver.enable(bl); StepperDriver.enable(br); StepperDriver.enable(fl); StepperDriver.enable(fr); digitalWrite(PIN_ENABLE_ALL, LOW); Serial.println("TCCR2A: "); Serial.println(TCCR2A, BIN); Serial.println("TCCR2B: "); Serial.println(TCCR2B, BIN); Serial.println("TIMSK2: "); Serial.println(TIMSK2, BIN); } void writeAll(float vel) { Serial.print("Speed: "); Serial.println(vel, 4); Serial.print("Delays: [ "); writeSpeed(fl, vel); writeSpeed(fr, vel); writeSpeed(br, vel); writeSpeed(bl, vel); Serial.println("]"); } void writeSpeed(axis_t motor, float vel) { uint16_t delay_us = 0; // This works for if vel is 0 // Compute motor direction if(vel > 0) { StepperDriver.setDir(motor, FORWARD); } else { vel = -vel; StepperDriver.setDir(motor, BACKWARD); } // Compute delay between steps to achieve desired velocity if(vel != 0) { delay_us = static_cast(STEPPER_CONVERSION / vel); } Serial.print(delay_us); Serial.print(", "); // Update motor StepperDriver.setDelay(motor, delay_us); } void loop() { writeAll(MaxSpeed); delay(2000); writeAll(0); delay(500); writeAll(-1*MaxSpeed); delay(2000); writeAll(0); delay(500); } ================================================ FILE: experimental_testing/RobotArduinoTesting/TestMotor/TestMotor.ino ================================================ #define PIN_ENABLE 52 #define PIN_BL_PWM 6 #define PIN_BL_DIR 48 #define FW 1 #define BW 0 #define SPEED_SLOW 50 #define SPEED_FAST 255 void setup() { // put your setup code here, to run once: pinMode(PIN_ENABLE,OUTPUT); pinMode(PIN_BL_PWM,OUTPUT); pinMode(PIN_BL_DIR,OUTPUT); Serial.begin(9600); } void loop() { // put your main code here, to run repeatedly: Serial.print("Forward"); analogWrite(PIN_BL_PWM, SPEED_SLOW); digitalWrite(PIN_BL_DIR, FW); digitalWrite(PIN_ENABLE, HIGH); // Wait for a little bit delay(2000); // Stop digitalWrite(PIN_ENABLE, LOW); delay(500); Serial.print("Reverse"); analogWrite(PIN_BL_PWM, SPEED_FAST); digitalWrite(PIN_BL_DIR, BW); digitalWrite(PIN_ENABLE, HIGH); // Wait for a little bit delay(2000); // Stop digitalWrite(PIN_ENABLE, LOW); delay(500); } ================================================ FILE: experimental_testing/RobotArduinoTesting/WifiServerWithESP/RobotServer.cpp ================================================ #include // This has to be before ArduinoJson.h to fix compiler issues #include "RobotServer.h" #include RobotServer::RobotServer(HardwareSerial& serial, HardwareSerial& debug) : serial_(serial), debug_(debug), clientConnected_(false), wifiConnected_(false) { serial_.begin(115200); } RobotServer::oneLoop() { COMMAND cmd = COMMAND::NONE; String newMsg = getAnyIncomingMessage(); if(newMsg.length() != 0) { debug_.print("[RobotServer] "); debug_.print("RCV: "); debug_.println(newMsg); if(newMsg == "Client connected") { clientConnected_ = true; wifiConnected_ = true; } else if(newMsg == "Client disconnected") { clientConnected_ = false; wifiConnected_ = true; } else if(newMsg == "Connecting..") { wifiConnected_ = false; clientConnected_ = false; } else if(newMsg.startsWith("Connected to WiFi.")) { clientConnected_ = false; wifiConnected_ = true; } else if(newMsg == "Waiting for client connection") { clientConnected_ = false; wifiConnected_ = true; } else { cmd = getCommand(cleanString(newMsg)); } } return cmd; } String RobotServer::cleanString(String message) { int idx_start = message.indexOf("{"); int idx_end = message.lastIndexOf("}") + 1; return message.substring(idx_start, idx_end); } String RobotServer::getAnyIncomingMessage() { String msg = ""; if(serial_.available()) { msg = serial_.readString(); } return msg; } RobotServer::COMMAND RobotServer::getCommand(String message) { COMMAND cmd = COMMAND::NONE; StaticJsonDocument<256> doc; DeserializationError err = deserializeJson(doc, message); debug_.print("[RobotServer] GetCommand(): "); debug_.println(message); if(err) { debug_.print("[RobotServer] Error parsing JSON: "); debug_.println(err.c_str()); cmd = COMMAND::ERROR_BAD_JSON; sendErr("bad_json"); } else { String type = doc["type"]; if(type == "move") { debug_.println("[RobotServer] Got MOVE command "); cmd = COMMAND::MOVE; sendAck(type); } else if(type == "place") { debug_.println("[RobotServer] Got PLACE command "); cmd = COMMAND::PLACE; sendAck(type); } else if(type == "dock") { debug_.println("[RobotServer] Got DOCK command "); cmd = COMMAND::DOCK; sendAck(type); } else if(type == "undock") { debug_.println("[RobotServer] Got UNDOCK command "); cmd = COMMAND::UNDOCK; sendAck(type); } else if(type == "dropoff") { debug_.println("[RobotServer] Got DROPOFF command "); cmd = COMMAND::DROPOFF; sendAck(type); } else if(type == "pickup") { debug_.println("[RobotServer] Got PICKUP command "); cmd = COMMAND::PICKUP; sendAck(type); } else if(type == "position") { debug_.println("[RobotServer] Got POSITION command "); cmd = COMMAND::POSITION; sendAck(type); } else if(type == "status") { debug_.println("[RobotServer] Got STATUS command "); cmd = COMMAND::STATUS; sendStatus(); } else if(type == "") { debug_.println("[RobotServer] ERROR: Type field empty or not specified "); cmd = COMMAND::ERROR_NO_TYPE; sendErr("no_type"); } else { debug_.println("[RobotServer] ERROR: Unkown type field "); cmd = COMMAND::ERROR_UNKOWN_TYPE; sendErr("unkown_type"); } } return cmd; } void RobotServer::sendMsg(String msg) { serial_.print(msg + '\0'); debug_.print("[RobotServer] Send: "); debug_.println(msg); } void RobotServer::sendAck(String data) { StaticJsonDocument<256> doc; doc["type"] = "ack"; doc["data"] = data; String msg; serializeJson(doc, msg); sendMsg(msg); } void RobotServer::sendErr(String data) { StaticJsonDocument<256> doc; doc["type"] = "ack"; doc["data"] = data; String msg; serializeJson(doc, msg); sendMsg(msg); } void RobotServer::sendStatus() { StaticJsonDocument<256> doc; doc["type"] = "status"; doc["data"] = "not implimented"; String msg; serializeJson(doc, msg); sendMsg(msg); } ================================================ FILE: experimental_testing/RobotArduinoTesting/WifiServerWithESP/RobotServer.h ================================================ /* * Robot server that handles gettings messages from master * and responding correctly */ #ifndef RobotServer_h #define RobotServer_h #include class RobotServer { public: enum COMMAND { NONE, ERROR_NO_TYPE, ERROR_UNKOWN_TYPE, ERROR_BAD_JSON, MOVE, PLACE, DOCK, UNDOCK, DROPOFF, PICKUP, POSITION, STATUS }; // Constructor RobotServer(HardwareSerial& serial, HardwareSerial& debug); int oneLoop(); private: HardwareSerial& serial_; HardwareSerial& debug_; bool clientConnected_; bool wifiConnected_; String getAnyIncomingMessage(); String cleanString(String message); COMMAND getCommand(String message); void sendMsg(String msg); void sendAck(String data); void sendErr(String data); void sendStatus(); }; #endif ================================================ FILE: experimental_testing/RobotArduinoTesting/WifiServerWithESP/WifiServerWithESP.ino ================================================ #include "RobotServer.h" RobotServer server = RobotServer(Serial3, Serial); void setup() { // put your setup code here, to run once: // Communication with the host computer Serial.begin(9600); Serial.println("Wifi client starting"); } void loop() { // put your main code here, to run repeatedly: int cmd = server.oneLoop(); delay(50); } ================================================ FILE: experimental_testing/RobotArduinoTesting/WifiTesting/WifiTesting.ino ================================================ // Arduino pin 15 (RX3) to ESP8266 TX // Arduino pin 14 (TX3) to voltage divider then to ESP8266 RX // Connect GND from the Arduiono to GND on the ESP8266 // Pull ESP8266 CH_PD HIGH // When a command is entered in to the serial monitor on the computer // the Arduino will relay it to the ESP8266 void setup() { Serial.begin(9600); // communication with the host computer // Start the serial for communication with the ESP8266 Serial3.begin(115200); Serial.println(""); Serial.println("Remember to to set Both NL & CR in the serial monitor."); Serial.println("Ready"); Serial.println(""); } void loop() { // listen for communication from the ESP8266 and then write it to the serial monitor if ( Serial3.available() ) { Serial.write( Serial3.read() ); } // listen for user input and send it to the ESP8266 if ( Serial.available() ) { Serial3.write( Serial.read() ); } } ================================================ FILE: experimental_testing/RobotArduinoTesting/all_motors/all_motors.ino ================================================ // Pinouts #define PIN_ENABLE 52 #define PIN_FR_PWM 3 #define PIN_FR_DIR 49 #define PIN_FL_PWM 2 #define PIN_FL_DIR 51 #define PIN_BR_PWM 5 #define PIN_BR_DIR 50 #define PIN_BL_PWM 6 #define PIN_BL_DIR 48 // Motor index #define FR 0 #define FL 1 #define BR 2 #define BL 3 // Motor arrays const int PWM_ARRAY[4] = {PIN_FR_PWM, PIN_FL_PWM, PIN_BR_PWM, PIN_BL_PWM}; const int DIR_ARRAY[4] = {PIN_FR_DIR, PIN_FL_DIR, PIN_BR_DIR, PIN_BL_DIR}; const int SPEED_ARRAY[4] = {130, 75, 75, 100}; const int NUM_DIRECTIONS = 4; // Direction vectors const int MOTOR_DIRECTIONS[NUM_DIRECTIONS][4] = { {1, 1, 1, 1}, //Forward {0, 0, 0, 0}, //Backward {1, 0, 0, 1}, //Left {0, 1, 1, 0} //Right // {1, 0, 1, 0}, //Forward-Left // {1, 0, 0, 1}, //Forward-Right // {0, 1, 0, 1}, //Backward-Left // {0, 1, 1, 0} //Backward-Right }; void commandMotors(int direction) { for(int i = 0; i < 4; i++) { analogWrite(PWM_ARRAY[i],SPEED_ARRAY[i]); digitalWrite(DIR_ARRAY[i],MOTOR_DIRECTIONS[direction][i]); } digitalWrite(PIN_ENABLE,HIGH); } void setup() { // Setup pin modes pinMode(PIN_ENABLE,OUTPUT); for(int i = 0; i < 4; i++) { pinMode(PWM_ARRAY[i],OUTPUT); pinMode(DIR_ARRAY[i],OUTPUT); } Serial.begin(9600); } int stage = 0; void loop() { // Command motors on commandMotors(stage); Serial.print("On: "); Serial.println(stage); // Wait for a little bit delay(1000); // Turn motors off and let coast digitalWrite(PIN_ENABLE,HIGH); for(int i = 0; i < 4; i++) { analogWrite(PWM_ARRAY[i],0); } Serial.println("Off"); // Wait for a little bit delay(1500); // Increment stage if(stage == NUM_DIRECTIONS-1) { stage = 0; } else { stage++; } } ================================================ FILE: experimental_testing/RobotArduinoTesting/all_motors_speed_control/Motor.cpp ================================================ #include "Motor.h" #include Motor::Motor(int pwmPin, int dirPin, int encPinA, int encPinB, double Kp, double Ki, double Kd, float velFilterFreq) : pwmPin_(pwmPin), dirPin_(dirPin), inputVel_(0.0), currentVelRaw_(0.0), currentVelFiltered_(0.0), outputCmd_(0.0), prevCount_(0), prevMillis_(millis()), enc_(encPinA, encPinB), controller_(¤tVelFiltered_, &outputCmd_, &inputVel_, Kp, Ki, Kd, DIRECT), velFilter_(LOWPASS, velFilterFreq) { pinMode(pwmPin_, OUTPUT); pinMode(dirPin_, OUTPUT); controller_.SetMode(AUTOMATIC); controller_.SetOutputLimits(-255, 255); } void Motor::setCommand(double vel) { inputVel_ = vel; } void Motor::runLoop() { // Read current values long curCount = enc_.read(); unsigned long curMillis = millis(); // Compute delta long deltaCount = curCount - prevCount_; double deltaRevs = static_cast(deltaCount) * COUNTS_PER_REV; unsigned long deltaMillis = curMillis - prevMillis_; // Compute current velocity in revs/second currentVelRaw_ = deltaRevs / (static_cast(deltaMillis) * 1000.0); currentVelFiltered_ = velFilter_.input(currentVelRaw_); // Run PID controller controller_.Compute(); // Update output values if(outputCmd_ < 0) { digitalWrite(dirPin_,0); } else { digitalWrite(dirPin_,1); } analogWrite(pwmPin_, abs(outputCmd_)); // Copy current values into previous prevCount_ = curCount; prevMillis_ = curMillis; // // Serial.print(curCount); // Serial.print(" "); // Serial.print(currentVelRaw_); // Serial.print(" "); // Serial.print(currentVelFiltered_); // Serial.print(" "); // Serial.print(inputVel_); // Serial.print(" "); // Serial.print(outputCmd_); // Serial.println(""); } ================================================ FILE: experimental_testing/RobotArduinoTesting/all_motors_speed_control/Motor.h ================================================ /* Requires libraries: * Filters: https://github.com/JonHub/Filters * PID: https://playground.arduino.cc/Code/PIDLibrary/ * Encoder: https://www.pjrc.com/teensy/td_libs_Encoder.html */ #ifndef Motor_h #define Motor_h #include #include #include #define COUNTS_PER_REV 835; class Motor { public: /* * Motor object constructor * pwmPin - which pin the motor PWM is connected to * dirPin - which pin the motor direction is connected to * encPinA - which pin the encoder wire A is connected to. This should be a pin capable of handling interrupts * encPinB - which pin the encoder wire B is connected to. This should ideally (but not required) be a pin capable of handling interrupts * Kp - Proportional gain * Ki - Integral gain * Kd - Derrivative gain * velFilterFreq - frequency to use for velocity lowpass filter */ Motor(int pwmPin, int dirPin, int encPinA, int encPinB, double Kp, double Ki, double Kd, float velFilterFreq); /* * Set the desired velocity in revs/second */ void setCommand(double vel); /* * Run the actual controller. Make sure this gets called reasonably quickly (i.e. every 20 ms or so) */ void runLoop(); private: // Pins int pwmPin_; int dirPin_; // Values for computation double inputVel_; // Desired velocity in revs/sec double currentVelRaw_; // Raw current velocity in revs/sec double currentVelFiltered_; double outputCmd_; // Output command in [-255, 255] long prevCount_; // Encoder count from previous loop // Timer unsigned long prevMillis_; // Timer from previous loop // Other objects Encoder enc_; // Encoder object PID controller_; // PID controller FilterOnePole velFilter_; // Velocity lowpass filter }; #endif Motor_h ================================================ FILE: experimental_testing/RobotArduinoTesting/all_motors_speed_control/all_motors_speed_control.ino ================================================ /* Requires libraries: * Filters: https://github.com/JonHub/Filters * PID: https://playground.arduino.cc/Code/PIDLibrary/ * Encoder: https://www.pjrc.com/teensy/td_libs_Encoder.html */ #include "Motor.h" // Pinouts #define PIN_ENABLE 52 #define PIN_FR_PWM 3 #define PIN_FR_DIR 49 #define PIN_FL_PWM 2 #define PIN_FL_DIR 51 #define PIN_BR_PWM 5 #define PIN_BR_DIR 50 #define PIN_BL_PWM 6 #define PIN_BL_DIR 48 #define PIN_FR_ENC_A 20 #define PIN_FR_ENC_B 35 #define PIN_FL_ENC_A 21 #define PIN_FL_ENC_B 41 #define PIN_BR_ENC_A 18 #define PIN_BR_ENC_B 23 #define PIN_BL_ENC_A 19 #define PIN_BL_ENC_B 29 #define VEL_FILTER_FREQ 0.5 // HZ const double Kp = 100; const double Ki = 1; const double Kd = 1; Motor allMotors[4] = { Motor(PIN_FR_PWM, PIN_FR_DIR, PIN_FR_ENC_A, PIN_FR_ENC_B, Kp, Ki, Kd, VEL_FILTER_FREQ), Motor(PIN_FL_PWM, PIN_FL_DIR, PIN_FL_ENC_A, PIN_FL_ENC_B, Kp, Ki, Kd, VEL_FILTER_FREQ), Motor(PIN_BR_PWM, PIN_BR_DIR, PIN_BR_ENC_A, PIN_BR_ENC_B, Kp, Ki, Kd, VEL_FILTER_FREQ), Motor(PIN_BL_PWM, PIN_BL_DIR, PIN_BL_ENC_A, PIN_BL_ENC_B, Kp, Ki, Kd, VEL_FILTER_FREQ) }; const int MOTOR_DIRECTIONS[4][4] = { {1, 1, 1, 1}, //Forward {-1, -1, -1, -1}, //Backward {1, -1, -1, 1}, //Left {-1, 1, 1, -1} //Right // {1, 0, 1, 0}, //Forward-Left // {1, 0, 0, 1}, //Forward-Right // {0, 1, 0, 1}, //Backward-Left // {0, 1, 1, 0} //Backward-Right }; const float SPEED[4] = {2, 1, 1, 0.5}; // scale factor for rev/s void setup() { // Setup pin modes pinMode(PIN_ENABLE,OUTPUT); Serial.begin(250000); } int waitForInput() { int motion = 0; bool gotInput = false; while(!gotInput) { if(Serial.available() > 0) { // Serial.println("Serial available"); int incomingByte = Serial.read(); // Serial.print("Read: "); // Serial.println(incomingByte); if(incomingByte == 49) { motion = 1; gotInput = true; } else if(incomingByte == 50) { motion = 2; gotInput = true; } else if(incomingByte == 51) { motion = 3; gotInput = true; } else if(incomingByte == 52) { motion = 4; gotInput = true; } else { // Do nothing } } } return motion; } void setMotorCommands(int motion) { for(int i = 0; i < 4; i++) { int curDirection = MOTOR_DIRECTIONS[motion-1][i]; float curSpeed = curDirection * SPEED[i]; Serial.print("Motor "); Serial.print(i); Serial.print(", Direction "); Serial.print(curDirection); Serial.print(", Speed "); Serial.println(curSpeed); allMotors[i].setCommand(curSpeed); } digitalWrite(PIN_ENABLE, 1); } void runMotors() { unsigned long prevTime = millis(); unsigned long startTime = prevTime; int targetDelta = 20; int targetTotalTime = 2000; while (millis() - startTime < targetTotalTime) { if(millis() - prevTime > targetDelta) { prevTime = millis(); for(int i = 0; i < 4; i++) { allMotors[i].runLoop(); } } } } void loop() { int motion = waitForInput(); setMotorCommands(motion); runMotors(); // Stop motors digitalWrite(PIN_ENABLE, 0); } ================================================ FILE: experimental_testing/RobotArduinoTesting/kalman_test/KalmanFilter.cpp ================================================ #include "KalmanFilter.h" KalmanFilter::KalmanFilter( double dt, const mat& A, const mat& B, const mat& C, const mat& Q, const mat& R, const mat& P) : A(A), B(B), C(C), Q(Q), R(R), P0(P), m(C.get_rows()), n(A.get_rows()), dt(dt), I(mat::identity(n)), x_hat(1,n), x_hat_new(1,n) { } void KalmanFilter::init(double t0, const mat& x0) { x_hat = x0; P = P0; this->t0 = t0; t = t0; } void KalmanFilter::init() { x_hat = mat::zeros(n,1); P = P0; t0 = 0; t = t0; } void KalmanFilter::predict(double dt, const mat& A, const mat& u) { this->A = A; this->dt = dt; x_hat_new = A * x_hat + B * u; P = A*P*A.t() + Q; t += dt; x_hat = x_hat_new; } void KalmanFilter::update(const mat& y) { K = P*C.t()*(C*P*C.t() + R).inv(); x_hat_new += K * (y - C*x_hat); P = (I - K*C)*P; x_hat = x_hat_new; } ================================================ FILE: experimental_testing/RobotArduinoTesting/kalman_test/KalmanFilter.h ================================================ // Loosely based on kalman-cpp: https://github.com/hmartiro/kalman-cpp #ifndef KalmanFilter_h #define KalmanFilter_h #include class KalmanFilter { public: /** * Create a Kalman filter with the specified matrices. * A - System dynamics matrix * B - Control matrix * C - Output matrix * Q - Process noise covariance * R - Measurement noise covariance * P - Estimate error covariance */ KalmanFilter( double dt, const mat& A, const mat& B, const mat& C, const mat& Q, const mat& R, const mat& P ); /** * Initialize the filter with initial states as zero. */ void init(); /** * Initialize the filter with a guess for initial states. */ void init(double t0, const mat& x0); /** * Predict estimated state based on the time step, dynamics matrix, and control input */ void predict(double dt, const mat& A, const mat& u); /** * Update the estimated state based on measured values. */ void update(const mat& y); /** * Return the current state and time. */ mat state() { return x_hat; }; double time() { return t; }; private: // Matrices for computation mat A, B, C, Q, R, P, K, P0; // System dimensions uint8_t m, n; // Initial and current time double t0, t; // Discrete time step double dt; // n-size identity mat I; // Estimated states mat x_hat, x_hat_new; }; #endif ================================================ FILE: experimental_testing/RobotArduinoTesting/kalman_test/kalman_test.ino ================================================ #include "KalmanFilter.h" #include "LinearAlgebra.h" mat A = mat::identity(6); mat B = mat::zeros(6,3); mat C = mat::zeros(3,6); mat Q = mat::identity(6); mat R = mat::zeros(3,3); mat P = mat::identity(6); KalmanFilter kf = KalmanFilter(0.1, A, B, C, Q, R, P); void setup() { A(3,3) = 0; A(4,4) = 0; A(5,5) = 0; B(3,0) = 1; B(4,1) = 1; B(5,2) = 1; C(0,0) = 1; C(1,1) = 1; C(2,2) = 1; kf.init(); } void loop() { double dt = 0.1; A(0,3) = dt; A(1,4) = dt; A(2,6) = dt; mat u = mat::ones(1,3); mat y = mat::ones(1,3); kf.predict(dt, A, u); kf.update(y); mat x = kf.state(); String s; x.print(s); } ================================================ FILE: experimental_testing/RobotArduinoTesting/lifter_nano_test/lifter_nano_test.ino ================================================ #define LIFTER_PIN_0 1 #define LIFTER_PIN_1 2 #define LIFTER_PIN_2 3 #define FEEDBACK_PIN 4 void setup () { Serial.begin(115200); pinMode(LIFTER_PIN_0, OUTPUT); pinMode(LIFTER_PIN_1, OUTPUT); pinMode(LIFTER_PIN_2, OUTPUT); pinMode(FEEDBACK_PIN, INPUT_PULLUP); } int getCommand() { String instr = ""; int outCmd = 0; if(Serial.available()) { instr = Serial.readString(); outCmd = instr.toInt(); } return outCmd; } void sendPos(int num) { int p2 = num / 4; int r = num % 4; int p1 = r / 2; r = r % 2; int p0 = r / 1; digitalWrite(LIFTER_PIN_0, p0); digitalWrite(LIFTER_PIN_1, p1); digitalWrite(LIFTER_PIN_2, p2); } bool getFeedback() { return !digitalRead(FEEDBACK_PIN); } void loop() { int cmd = getCommand(); bool in_progress = getFeedback(); if(!in_progress && cmd > 0) { sendPos(cmd); } delay(100); } ================================================ FILE: experimental_testing/RobotArduinoTesting/motor_driver_test_script/motor_driver_test_script.ino ================================================ #include "ClearCore.h" #define LIFTER_MOTOR ConnectorM3 #define INCREMENTAL_UP_PIN DI7 #define INCREMENTAL_DOWN_PIN DI6 #define LATCH_SERVO_PIN IO0 // Only IO0 does pwm #define HOMING_SWITCH_PIN IO4 #define LIFTER_STEPS_PER_REV 800 #define LIFTER_MAX_VEL 7 // revs/sec #define LIFTER_MAX_ACC 10 // rev/sec^2 // Max num steps ~51000 #define SAFETY_MAX_POS 120 // Revs, Sanity check on desired position to make sure it isn't larger than this #define SAFETY_MIN_POS 0 // Revs, Sanity check on desired position to make sure it isn't less than this #define LATCH_ACTIVE_MS 2000 #define LATCH_OPEN_DUTY_CYCLE 50 #define LATCH_CLOSE_DUTY_CYCLE 200 void setup() { pinMode(INCREMENTAL_UP_PIN, INPUT); pinMode(INCREMENTAL_DOWN_PIN, INPUT); pinMode(HOMING_SWITCH_PIN, INPUT); pinMode(LATCH_SERVO_PIN, OUTPUT); // Sets the input clocking rate. This normal rate is ideal for ClearPath // step and direction applications. MotorMgr.MotorInputClocking(MotorManager::CLOCK_RATE_LOW); // Sets all motor connectors into step and direction mode. MotorMgr.MotorModeSet(MotorManager::MOTOR_ALL, Connector::CPM_MODE_STEP_AND_DIR); LIFTER_MOTOR.VelMax(LIFTER_MAX_VEL*LIFTER_STEPS_PER_REV); LIFTER_MOTOR.AccelMax(LIFTER_MAX_ACC*LIFTER_STEPS_PER_REV); LIFTER_MOTOR.HlfbMode(MotorDriver::HLFB_MODE_STATIC); LIFTER_MOTOR.PolarityInvertSDEnable(true); LIFTER_MOTOR.EnableRequest(true); Serial.begin(115200); Serial.println("Test script starting"); } void test_motor() { bool vel_up = digitalRead(INCREMENTAL_UP_PIN); bool vel_down = digitalRead(INCREMENTAL_DOWN_PIN); Serial.print("Up: "); Serial.print(vel_up); Serial.print(", down: "); Serial.println(vel_down); if(vel_up) { LIFTER_MOTOR.MoveVelocity(LIFTER_MAX_VEL*LIFTER_STEPS_PER_REV); Serial.println(LIFTER_MOTOR.PositionRefCommanded()); } else if(vel_down) { LIFTER_MOTOR.MoveVelocity(-1*LIFTER_MAX_VEL*LIFTER_STEPS_PER_REV); Serial.println(LIFTER_MOTOR.PositionRefCommanded()); } else { LIFTER_MOTOR.MoveStopAbrupt(); } } void test_homing_switch() { bool switch_active = !digitalRead(HOMING_SWITCH_PIN); if(switch_active) { Serial.print("Homing switch: "); Serial.println(switch_active); LIFTER_MOTOR.PositionRefSet(0); } } void test_servo() { Serial.println("Servo open"); analogWrite(LATCH_SERVO_PIN, LATCH_OPEN_DUTY_CYCLE); delay(1000); Serial.println("Servo closed"); analogWrite(LATCH_SERVO_PIN, LATCH_CLOSE_DUTY_CYCLE); delay(1000); } void loop() { test_motor(); test_homing_switch(); //test_servo(); delay(100); } ================================================ FILE: experimental_testing/RobotArduinoTesting/new_dc_motor_tuning/Motor.cpp ================================================ #include "Motor.h" #include "constants.h" constexpr double COUNTS_TO_RADS = 2.0 * PI / static_cast(COUNTS_PER_OUTPUT_SHAFT_REV); Motor::Motor(int pwmPin, int dirPin, int encPinA, int encPinB, double Kp, double Ki, double Kd) : pwmPin_(pwmPin), dirPin_(dirPin), inputVel_(0.0), currentVelRaw_(0.0), currentVelFiltered_(0.0), pidOut_(0.0), outputCmd_(0.0), prevCount_(0), prevMicros_(micros()), enc_(encPinA, encPinB), controller_(¤tVelFiltered_, &pidOut_, &inputVel_, Kp, Ki, Kd, DIRECT), velFilter_(LOWPASS, VEL_FILTER_FREQ) { pinMode(pwmPin_, OUTPUT); pinMode(dirPin_, OUTPUT); controller_.SetMode(AUTOMATIC); controller_.SetOutputLimits(-255, 255); controller_.SetSampleTime(5); } void Motor::setCommand(double vel) { inputVel_ = vel; } float Motor::getCurrentVelocity() { return static_cast(currentVelFiltered_); } long Motor::getCounts() { return enc_.read(); } void Motor::runLoop(bool pub) { // Read current values long curCount = -1*enc_.read(); unsigned long curMicros = micros(); // Compute delta long deltaCount = curCount - prevCount_; double deltaRads = static_cast(deltaCount) * COUNTS_TO_RADS; unsigned long deltaMicros = curMicros - prevMicros_; if(deltaMicros == 0) { // This will break things due to div by 0, so just skip return; } // Copy current values into previous prevCount_ = curCount; prevMicros_ = curMicros; // Do a check for large time deltas. This likely means the controller hasn't run for a while and we should ignore this value if(deltaMicros > 100000) { return; } // Compute current velocity in rads/second currentVelRaw_ = 1000000.0 * deltaRads / static_cast(deltaMicros); currentVelFiltered_ = currentVelRaw_; // velFilter_.input(currentVelRaw_); // Run PID controller controller_.Compute(); /* Use output from PID to update our current command. Since this is a velocity controller, when the error is 0 * and the PID controller drives the output to 0, we actually want to maintain a certian PWM value. Hence, the * PID output is used to modify our control singal and not drive it directly. */ outputCmd_ += int(pidOut_); //outputCmd_ = static_cast(pidOut_ + inputVel_ * 10); // Set a deadband region based on input vel to avoid integral windup if(fabs(inputVel_) < 0.001) { outputCmd_ = 0; } // Make sure we don't exceed max/min power if(outputCmd_ > 255) { outputCmd_ = 255; } else if(outputCmd_ < -255) { outputCmd_ = -255; } // Update output direction if(outputCmd_ < 0) { digitalWrite(dirPin_,1); } else { digitalWrite(dirPin_,0); } // Actually write out the motor power analogWrite(pwmPin_, abs(outputCmd_)); if (pub) { // Debugging prints //Serial.print(deltaMicros); //Serial.print(" "); //Serial.print(curCount); //Serial.print(" "); Serial.print("currentVelRaw_:"); Serial.print(currentVelRaw_, 5); Serial.print(" "); Serial.print("currentVelFiltered_:"); Serial.print(currentVelFiltered_, 5); Serial.print(" "); Serial.print("inputVel_:"); Serial.print(inputVel_, 5); Serial.print(" "); //Serial.print(pidOut_, 5); //Serial.print(" "); Serial.print("outputCmd_:"); Serial.print(outputCmd_/10.0); Serial.println(""); } } ================================================ FILE: experimental_testing/RobotArduinoTesting/new_dc_motor_tuning/Motor.h ================================================ #ifndef Motor_h #define Motor_h #include #include #include class Motor { public: /* * Motor object constructor * pwmPin - which pin the motor PWM is connected to * dirPin - which pin the motor direction is connected to * encPinA - which pin the encoder wire A is connected to. This should be a pin capable of handling interrupts * encPinB - which pin the encoder wire B is connected to. This should ideally (but not required) be a pin capable of handling interrupts * Kp - Proportional gain * Ki - Integral gain * Kd - Derrivative gain */ Motor(int pwmPin, int dirPin, int encPinA, int encPinB, double Kp, double Ki, double Kd); /* * Set the desired velocity in rad/second */ void setCommand(double vel); /* * Run the actual controller. Make sure this gets called reasonably quickly (i.e. every 20 ms or so) */ void runLoop(bool pub); /* * Get the current measured motor velocity in rad/s */ float getCurrentVelocity(); // Get number of counts from encoder - for debugging long getCounts(); private: // Pins int pwmPin_; int dirPin_; // Values for computation double inputVel_; // Desired velocity in rad/sec double currentVelRaw_; // Raw current velocity in rad/sec double currentVelFiltered_; // Filtered velocity in rad/sec double pidOut_; // Output from PID controller int outputCmd_; // Output command in [-255, 255] long prevCount_; // Encoder count from previous loop // Timer unsigned long prevMicros_; // Timer from previous loop // Other objects Encoder enc_; // Encoder object PID controller_; // PID controller FilterOnePole velFilter_; // Velocity lowpass filter }; #endif Motor_h ================================================ FILE: experimental_testing/RobotArduinoTesting/new_dc_motor_tuning/constants.h ================================================ #ifndef Constants_h #define Constants_h // Pins #define PIN_PWM_0 3 #define PIN_PWM_1 4 #define PIN_PWM_2 5 #define PIN_PWM_3 6 #define PIN_DIR_0 41 #define PIN_DIR_1 40 #define PIN_DIR_2 39 #define PIN_DIR_3 38 #define PIN_ENABLE_0 49 #define PIN_ENABLE_1 48 #define PIN_ENABLE_2 47 #define PIN_ENABLE_3 46 #define PIN_ENC_A_0 21 #define PIN_ENC_B_0 25 #define PIN_ENC_A_1 20 #define PIN_ENC_B_1 23 #define PIN_ENC_A_2 19 #define PIN_ENC_B_2 24 #define PIN_ENC_A_3 18 #define PIN_ENC_B_3 22 #define PIN_LATCH_SERVO_PIN 12 #define PIN_TRAY_STEPPER_LEFT_PULSE 46 #define PIN_TRAY_STEPPER_LEFT_DIR 47 #define PIN_TRAY_STEPPER_RIGHT_PULSE 44 #define PIN_TRAY_STEPPER_RIGHT_DIR 43 #define PIN_TRAY_HOME_SWITCH 53 // Velocitiy limits #define MAX_TRANS_SPEED_FINE 0.08 // m/s #define MAX_ROT_SPEED_FINE 0.5 // rad/2 #define MAX_TRANS_SPEED_COARSE 0.5 // m/s #define MAX_ROT_SPEED_COARSE 1.0 // rad/2 // Acceleration limits #define MAX_TRANS_ACC_FINE 0.1 // m/s^2 #define MAX_ROT_ACC_FINE 0.5 // rad/s^2 #define MAX_TRANS_ACC_COARSE 0.5 // m/s^2 #define MAX_ROT_ACC_COARSE 1.0 // rad/s^2 // Cartesian control gains #define CART_TRANS_KP 2 #define CART_TRANS_KI 0.1 #define CART_TRANS_KD 0 #define CART_ROT_KP 3 #define CART_ROT_KI 0.5 #define CART_ROT_KD 0 // Motor control gains #define MOTOR_KP 1 #define MOTOR_KI 0 #define MOTOR_KD 0 // Motor control constants #define VEL_FILTER_FREQ 20 // HZ #define COUNTS_PER_OUTPUT_SHAFT_REV 36000 // Manually measrued/estimated // Physical dimensions #define WHEEL_DIAMETER 0.152 // meters #define WHEEL_DIST_FROM_CENTER 0.4572 // meters // Scaling factors #define TRAJ_MAX_FRACTION 0.7 // Only generate a trajectory to this fraction of max speed to give motors headroom to compensate // Kalman filter scales #define PROCESS_NOISE_SCALE 0.08 #define MEAS_NOISE_SCALE 0.01 #define MEAS_NOISE_VEL_SCALE_FACTOR 10000 // Possition accuracy targets #define TRANS_POS_ERR_COARSE 0.10 // m #define ANG_POS_ERR_COARSE 0.08 // rad #define TRANS_VEL_ERR_COARSE 0.05 // m/s #define ANG_VEL_ERR_COARSE 0.05 // rad/s #define TRANS_POS_ERR_FINE 0.01 // m #define ANG_POS_ERR_FINE 0.02 // rad #define TRANS_VEL_ERR_FINE 0.01 // m/s #define ANG_VEL_ERR_FINE 0.01 // rad/s // Set debug printing, comment out to skip debug printing #define PRINT_DEBUG true // Tray stepper control values #define TRAY_STEPPER_STEPS_PER_REV 200 // Steps per rev for tray servos #define TRAY_DIST_PER_REV 1.8 // mm of linear travel per stepper revolution #define TRAY_MAX_LINEAR_TRAVEL 300 // mm of total linear travel possible // Note: all tray positions measured in mm from home pos #define TRAY_DEFAULT_POS_MM 100 // Default position for driving in mm #define TRAY_LOAD_POS_MM 50 // Loading position in mm #define TRAY_PLACE_POS_MM 250 // Placing position in mm #define TRAY_MAX_SPEED 1000 // Max tray speed in steps/sec #define TRAY_MAX_ACCEL 2000 // Max tray acceleration in steps/sec/sec // Tray servo control values #define LATCH_CLOSE_POS 20 // Latch servo position for close in degrees #define LATCH_OPEN_POS 150 // Latch servo position for open in degrees #define TRAY_SERVO_MIN_PW 1000 // Min pulse witdh in microseconds corresponding to 0 position #define TRAY_SERVO_MAX_PW 2000 // Max pulse witdh in microseconds corresponding to 180 position #define TRAY_PLACEMENT_PAUSE_TIME 3000 // How many ms to wait after opening the latch for placement enum COMMAND { NONE, MOVE, MOVE_REL, MOVE_FINE, PLACE_TRAY, LOAD_TRAY, INITIALIZE_TRAY, POSITION, ESTOP, LOAD_COMPLETE, }; #endif ================================================ FILE: experimental_testing/RobotArduinoTesting/new_dc_motor_tuning/new_dc_motor_tuning.ino ================================================ #include "constants.h" #include "Motor.h" double Kp = 14; double Ki = 0.1; double Kd = 0.08; Motor m = Motor(PIN_PWM_2, PIN_DIR_2, PIN_ENC_A_2, PIN_ENC_B_2, Kp, Ki, Kd); int step = 0; unsigned long prevStepTime = millis(); unsigned long prevLoopTime = millis(); double cur_speed = 0; double max_vel = 2; double max_acc = 5; double max_acc_time = 1000 * max_vel / max_acc; template int sgn(T val) { return (T(0) < val) - (val < T(0)); } void setup() { pinMode(PIN_ENABLE_3, OUTPUT); digitalWrite(PIN_ENABLE_3, HIGH); Serial.begin(115200); //Serial.println("Start motor tuning"); } double computeAcc(double dt, double acc) { cur_speed += dt/1000.0 * acc; if (fabsf(cur_speed) > max_vel) { cur_speed = sgn(cur_speed) * max_vel; } } void runAccStep(int waitTime, double acc) { unsigned long curTime = millis(); unsigned long stepTime = curTime - prevStepTime; if (stepTime > waitTime) { step += 1; prevStepTime = millis(); prevLoopTime = millis(); } else { unsigned long loopTime = curTime - prevLoopTime; computeAcc(loopTime, acc); prevLoopTime = millis(); } } double runCVStep(int waitTime, double vel) { unsigned long curTime = millis(); unsigned long stepTime = curTime - prevStepTime; if (stepTime > waitTime) { step += 1; prevStepTime = millis(); prevLoopTime = millis(); } else { cur_speed = vel; } } double runSteps() { if (step == 0) { int stepTime = max_acc_time; double stepAcc = max_acc; runAccStep(stepTime, stepAcc); } else if (step == 1) { int stepTime = 2000; runCVStep(stepTime, max_vel); } else if (step == 2) { int stepTime = max_acc_time; double stepAcc = -1*max_acc; runAccStep(stepTime, stepAcc); } else if (step == 3) { int stepTime = 2000; runCVStep(stepTime, 0); } else if (step == 4) { int stepTime = max_acc_time; double stepAcc = -1 * max_acc; runAccStep(stepTime, stepAcc); } else if (step == 5) { int stepTime = 2000; runCVStep(stepTime, -1*max_vel); } else if (step == 6) { int stepTime = max_acc_time; double stepAcc = max_acc; runAccStep(stepTime, stepAcc); } else if (step == 7) { int stepTime = 2000; runCVStep(stepTime, 0); } else if (step >= 8) { step = 0; } } void loop() { runSteps(); m.setCommand(cur_speed); bool motor_pub = true; if(step == 3 || step == 7) {motor_pub = false;} m.runLoop(motor_pub); // analogWrite(PIN_PWM_2, s); // Serial.println(s); // s += 1; delay(15); } ================================================ FILE: experimental_testing/RobotArduinoTesting/single_motor_speed_control/Motor.cpp ================================================ #include "Motor.h" #include Motor::Motor(int pwmPin, int dirPin, int encPinA, int encPinB, double Kp, double Ki, double Kd, float velFilterFreq) : pwmPin_(pwmPin), dirPin_(dirPin), inputVel_(0.0), currentVelRaw_(0.0), currentVelFiltered_(0.0), outputCmd_(0.0), prevCount_(0), prevMillis_(millis()), enc_(encPinA, encPinB), controller_(¤tVelFiltered_, &outputCmd_, &inputVel_, Kp, Ki, Kd, DIRECT), velFilter_(LOWPASS, velFilterFreq) { pinMode(pwmPin_, OUTPUT); pinMode(dirPin_, OUTPUT); controller_.SetMode(AUTOMATIC); controller_.SetOutputLimits(-255, 255); controller_.SetSampleTime(5); } void Motor::setCommand(double vel) { inputVel_ = vel; } void Motor::runLoop() { // Read current values long curCount = enc_.read(); unsigned long curMillis = millis(); // Compute delta long deltaCount = curCount - prevCount_; double deltaRevs = static_cast(deltaCount) / COUNTS_PER_SHAFT_REV; unsigned long deltaMillis = curMillis - prevMillis_; // Copy current values into previous prevCount_ = curCount; prevMillis_ = curMillis; // Do a check for large time deltas. This likely means the controller hasn't run for a while and we should ignore this value if(deltaMillis > 100) { return; } // Compute current velocity in revs/second currentVelRaw_ = 1000.0 * deltaRevs / static_cast(deltaMillis); currentVelFiltered_ = velFilter_.input(currentVelRaw_); // Run PID controller controller_.Compute(); // Update output values if(outputCmd_ < 0) { digitalWrite(dirPin_,0); } else { digitalWrite(dirPin_,1); } analogWrite(pwmPin_, abs(outputCmd_)); // Serial.print(curCount); // Serial.print(" "); // Serial.print(currentVelRaw_); // Serial.print(" "); Serial.print(currentVelFiltered_); Serial.print(" "); Serial.print(inputVel_); Serial.print(" "); Serial.print(outputCmd_/255.0); // Serial.print(" "); // Serial.print(deltaMillis); Serial.println(""); } ================================================ FILE: experimental_testing/RobotArduinoTesting/single_motor_speed_control/Motor.h ================================================ /* Requires libraries: * Filters: https://github.com/JonHub/Filters * PID: https://playground.arduino.cc/Code/PIDLibrary/ * Encoder: https://www.pjrc.com/teensy/td_libs_Encoder.html */ #ifndef Motor_h #define Motor_h #include #include #include const unsigned int COUNTS_PER_MOTOR_REV = 44; const unsigned int MOTOR_GEAR_RATIO = 40; const double FUDGE_FACTOR = 1.5; const double COUNTS_PER_SHAFT_REV = COUNTS_PER_MOTOR_REV * MOTOR_GEAR_RATIO * FUDGE_FACTOR; class Motor { public: /* * Motor object constructor * pwmPin - which pin the motor PWM is connected to * dirPin - which pin the motor direction is connected to * encPinA - which pin the encoder wire A is connected to. This should be a pin capable of handling interrupts * encPinB - which pin the encoder wire B is connected to. This should ideally (but not required) be a pin capable of handling interrupts * Kp - Proportional gain * Ki - Integral gain * Kd - Derrivative gain * velFilterFreq - frequency to use for velocity lowpass filter */ Motor(int pwmPin, int dirPin, int encPinA, int encPinB, double Kp, double Ki, double Kd, float velFilterFreq); /* * Set the desired velocity in revs/second */ void setCommand(double vel); /* * Run the actual controller. Make sure this gets called reasonably quickly (i.e. every 20 ms or so) */ void runLoop(); private: // Pins int pwmPin_; int dirPin_; // Values for computation double inputVel_; // Desired velocity in revs/sec double currentVelRaw_; // Raw current velocity in revs/sec double currentVelFiltered_; double outputCmd_; // Output command in [-255, 255] long prevCount_; // Encoder count from previous loop // Timer unsigned long prevMillis_; // Timer from previous loop // Other objects Encoder enc_; // Encoder object PID controller_; // PID controller FilterOnePole velFilter_; // Velocity lowpass filter }; #endif Motor_h ================================================ FILE: experimental_testing/RobotArduinoTesting/single_motor_speed_control/single_motor_speed_control.ino ================================================ /* Requires libraries: * Filters: https://github.com/JonHub/Filters * PID: https://playground.arduino.cc/Code/PIDLibrary/ * Encoder: https://www.pjrc.com/teensy/td_libs_Encoder.html */ #include "Motor.h" // Pinouts #define PIN_ENABLE 52 #define PIN_BL_PWM 6 #define PIN_BL_DIR 48 #define PIN_BL_ENC_A 19 #define PIN_BL_ENC_B 29 #define VEL_FILTER_FREQ 10 // HZ const double Kp = 200; const double Ki = 2000; const double Kd = 0; Motor myMotor = Motor(PIN_BL_PWM, PIN_BL_DIR, PIN_BL_ENC_A, PIN_BL_ENC_B, Kp, Ki, Kd, VEL_FILTER_FREQ); void setup() { // Setup pin modes pinMode(PIN_ENABLE,OUTPUT); Serial.begin(250000); } void setMotorCommands(float rps) { int curDirection = 1; float curSpeed = curDirection * rps; // Serial.print("Direction "); // Serial.print(curDirection); // Serial.print(", Speed "); // Serial.println(curSpeed); myMotor.setCommand(curSpeed); digitalWrite(PIN_ENABLE, 1); } void runMotors() { unsigned long prevTime = millis(); unsigned long startTime = prevTime; int targetDelta = 15; int targetTotalTime = 3000; while (millis() - startTime < targetTotalTime) { if(millis() - prevTime > targetDelta) { prevTime = millis(); myMotor.runLoop(); } } } void loop() { // // Get revolutions per second // Serial.println("Please enter target revolutions per second"); // bool gotInput = false; // float rps = 0.0; // while(!gotInput) // { // if(Serial.available() > 0) // { // rps = Serial.parseFloat(); // if(rps == 0) // { // continue; // } // else // { // break; // } // } // delay(10); // } delay(1000); float rps = 0.5; setMotorCommands(rps); runMotors(); // Stop motors digitalWrite(PIN_ENABLE, 0); } ================================================ FILE: experimental_testing/RobotArduinoTesting/stepper_nano_test/stepper_nano_test.ino ================================================ // Uses stepper driver library from here: https://www.airspayce.com/mikem/arduino/AccelStepper/classAccelStepper.html #include #define STEP_PIN 2 #define DIR_PIN 1 // Note that the DIR pin is actually controlled by the mster arduino so this won't be connected to anything #define MILLIS_BETWEEN_PRINTING 100 const int max_vel = 800; //steps/second AccelStepper motor; unsigned long count = 0; unsigned long prevMillisPrint; void setup() { Serial.begin(115200); Serial.println("Starting..."); motor = AccelStepper(AccelStepper::DRIVER, STEP_PIN, DIR_PIN); motor.setMaxSpeed(max_vel); motor.setAcceleration(max_vel); motor.setMinPulseWidth(10); // Min from docs is 2.5 microseconds prevMillisPrint = millis(); motor.move(800); } void loop() { if(motor.isRunning() && millis() - prevMillisPrint > MILLIS_BETWEEN_PRINTING) { Serial.print(motor.speed()); Serial.print(", "); Serial.println(motor.currentPosition()); prevMillisPrint = millis(); count = 0; } motor.run(); count++; } ================================================ FILE: experimental_testing/RobotArduinoTesting/test_encoders/test_encoders.ino ================================================ // Pins #define PIN_ENCA_1 21 #define PIN_ENCA_2 20 #define PIN_ENCA_3 19 #define PIN_ENCA_4 18 #define PIN_ENCB_1 25 #define PIN_ENCB_2 23 #define PIN_ENCB_3 24 #define PIN_ENCB_4 22 #include Encoder e[4] = { Encoder(PIN_ENCA_1, PIN_ENCB_1), Encoder(PIN_ENCA_2, PIN_ENCB_2), Encoder(PIN_ENCA_3, PIN_ENCB_3), Encoder(PIN_ENCA_4, PIN_ENCB_4) } ; void setup() { // put your setup code here, to run once: Serial.begin(115200); Serial.println("Encoder Test:"); } void loop() { Serial.print("Pos: ["); for (int i = 0; i < 4; ++i ) { Serial.print(e[i].read()); Serial.print(", "); } Serial.println("]"); // if a character is sent from the serial monitor, // reset both back to zero. if (Serial.available()) { Serial.read(); Serial.println("Reset position to zero"); e[0].write(0); e[1].write(0); e[2].write(0); e[3].write(0); } delay(100); } ================================================ FILE: experimental_testing/TrajGen.py ================================================ # Testing out trajectory generation where I have a gui import math import numpy as np from matplotlib import pyplot as plt # Parameters MAX_VEL = 3.0 # m/s MAX_ACC = 2.0 # m/s^2 # Find min time and position change to reach full speed T_ACC = MAX_VEL / MAX_ACC print("T_ACC: {}".format(T_ACC)) P_ACC = 0.5 * MAX_ACC * T_ACC * T_ACC print("P_ACC: {}".format(P_ACC)) def sign(n): if n > 0: return 1 elif n < 0: return -1 else: return 0 def gen_traj2(p1, p2, dt): position_change = abs(p2 - p1) # Determine if position change is less than required for steady state if position_change < 2*P_ACC: return gen_triangle_traj(p1, p2, dt) else: return gen_trap_traj(p1, p2, dt) def gen_triangle_traj(p1, p2, dt): turn_pt = (p2 - p1)/2 dir = sign(p2 - p1) traj = {'time': [0], 'pos': [p1], 'vel': [0], 'acc': [dir * MAX_ACC]} gen_const_acc_until_pos(traj, dt, dir*MAX_ACC, turn_pt, dir) gen_const_acc_until_pos(traj, dt, -1*dir*MAX_ACC, p2, dir) return traj def gen_trap_traj(p1, p2, dt): dir = sign(p2 - p1) max_vel_pt = dir*P_ACC slow_down_pt = p2 - dir * P_ACC traj = {'time': [0], 'pos': [p1], 'vel': [0], 'acc': [dir * MAX_ACC]} gen_const_acc_until_pos(traj, dt, dir*MAX_ACC, max_vel_pt, dir) gen_const_acc_until_pos(traj, dt, 0, slow_down_pt, dir) gen_const_acc_until_pos(traj, dt, -1*dir*MAX_ACC, p2, dir) return traj def gen_const_acc_until_pos(traj, dt, acc, stop_pos, stop_dir): """ Generate constant acceleration trajectory """ n_iter = 0 while True: new_vel = traj['vel'][-1] + acc * dt new_pos = traj['pos'][-1] + traj['vel'][-1] * dt new_time = traj['time'][-1] + dt if stop_dir > 0 and new_vel < 0: break if stop_dir < 0 and new_vel > 0: break if stop_dir > 0 and new_pos >= stop_pos: break elif stop_pos < 0 and new_pos <= stop_pos: break elif n_iter > 100000: raise StopIteration("Too many iterations") traj['time'].append(new_time) traj['pos'].append(new_pos) traj['vel'].append(new_vel) traj['acc'].append(acc) n_iter += 1 return traj def plot_traj(pvt_data): """ Makes a plot of position, velocity, and acceleration versus time """ ax = plt.axes() ax.plot(pvt_data["time"], pvt_data["pos"],'r',label='Position') ax.plot(pvt_data["time"], pvt_data["vel"],'g',label='Velocity') ax.plot(pvt_data["time"], pvt_data["acc"],'b',label='Acceleration') ax.legend() plt.show() if __name__ == '__main__': p1 = 0 p2 = 20 dt = 0.05 pvt = gen_traj2(p1, p2, dt) plot_traj(pvt) ================================================ FILE: experimental_testing/TrajGenv2.py ================================================ import math import numpy as np from matplotlib import pyplot as plt p_target = 20.0 # m v_max = 1.0 #m/s a_max = 2.0 #m/s^2 j_max = 8.0 #m/s^3 alpha = 0.8 # velocity decay beta = 0.8 # acceleation decay loop_limit = 10 # max loops for convergence plot_timestep = 0.01 def generate(target_position): v_lim = v_max a_lim = a_max j_lim = j_max output = {} output['done'] = False for i in range(loop_limit): print("Starting loop {}".format(i)) output = generate_once(target_position, v_lim, a_lim, j_lim) if output['done']: break else: v_lim = output['v_lim'] a_lim = output['a_lim'] j_lim = output['j_lim'] if output['done']: print("Trajectory found") return output else: print("Trajectory not found") return {} def generate_once(p, v_lim, a_lim, j_lim): output = {} output['done'] = False output['v_lim'] = v_lim output['a_lim'] = a_lim output['j_lim'] = j_lim output['t'] = [] # Constant jerk regions dt_j = a_lim / j_lim print("dt_j = {}".format(dt_j)) dv_j = 0.5 * j_lim * dt_j ** 2 # Positive jerk region dp_j1 = 1/6.0 * j_lim * dt_j ** 3 # Positive jerk region dp_j2 = (v_lim - dv_j) * dt_j + 0.5 * a_lim * dt_j ** 2 - 1/6 * j_lim * dt_j ** 3 # Negative jerk region print("dv_j: {}, dp_j1: {}, dp_j2: {}".format(dv_j, dp_j1, dp_j2)) # Constant accel region dt_a = (v_lim - 2*dv_j ) / a_lim print("dt_a = {}".format(dt_a)) if dt_a <= 0: output['a_lim'] = a_lim * beta print("New a_lim = {}".format(output['a_lim'] )) return output dp_a = dv_j * dt_a + 0.5 * a_lim * dt_a ** 2 print("dp_a: {}".format(dp_a)) # Constant vel region dt_v = (p - 2 * dp_j1 - 2 * dp_j2 - 2 * dp_a) / v_lim print("dt_v = {}".format(dt_v)) if dt_v <= 0: output['v_lim'] = alpha * v_lim print("New v_lim = {}".format(output['v_lim'] )) return output dp_v = v_lim * dt_v print("dp_v: {}".format(dp_v)) # If we get here, the genertation should be correct, so compute time regions and return output['done'] = True t = [0,0,0,0,0,0,0,0] t[0] = 0 t[1] = t[0] + dt_j t[2] = t[1] + dt_a t[3] = t[2] + dt_j t[4] = t[3] + dt_v t[5] = t[4] + dt_j t[6] = t[5] + dt_a t[7] = t[6] + dt_j output['t'] = t print("Times: {}".format(t)) return output def generate_inverse(p, dt_j, dt_a, dt_v): output = {} output['done'] = True t = [0,0,0,0,0,0,0,0] t[0] = 0 t[1] = t[0] + dt_j t[2] = t[1] + dt_a t[3] = t[2] + dt_j t[4] = t[3] + dt_v t[5] = t[4] + dt_j t[6] = t[5] + dt_a t[7] = t[6] + dt_j output['t'] = t # Solve system of equations to get kinematic limits A = np.array([ [dt_j, -1, 0 ], [dt_j ** 2, dt_a, -1 ], [(dt_j ** 2) * (dt_a - dt_j), dt_j ** 2 + dt_a ** 2, dt_v + 2* dt_j ]]) b = np.array([0, 0, p]) lims = np.linalg.solve(A, b) output['v_lim'] = lims[2] output['a_lim'] = lims[1] output['j_lim'] = lims[0] return output def generate_profile_from_params(output, timestep): j_lim = output['j_lim'] a_lim = output['a_lim'] v_lim = output['v_lim'] T = output['t'] t_vals = np.arange(0, T[7], timestep) p = [0] v = [0] a = [0] j = [j_lim] for t in t_vals[1:]: if t >= T[0] and t < T[1]: p.append(p[-1] + v[-1] * timestep + 0.5 * a[-1] * timestep **2 + 1/6.0 * j[-1] * timestep ** 3) v.append(v[-1] + a[-1] * timestep + 0.5* j[-1] * timestep ** 2) a.append(a[-1] + j[-1] * timestep) j.append(j_lim) elif t >= T[1] and t < T[2]: p.append(p[-1] + v[-1] * timestep + 0.5 * a[-1] * timestep **2 + 1/6.0 * j[-1] * timestep ** 3) v.append(v[-1] + a[-1] * timestep + 0.5* j[-1] * timestep ** 2) a.append(a_lim) j.append(0) elif t >= T[2] and t < T[3]: p.append(p[-1] + v[-1] * timestep + 0.5 * a[-1] * timestep **2 + 1/6.0 * j[-1] * timestep ** 3) v.append(v[-1] + a[-1] * timestep + 0.5* j[-1] * timestep ** 2) a.append(a[-1] + j[-1] * timestep) j.append(-j_lim) elif t >= T[3] and t < T[4]: p.append(p[-1] + v[-1] * timestep + 0.5 * a[-1] * timestep **2 + 1/6.0 * j[-1] * timestep ** 3) v.append(v_lim) a.append(0) j.append(0) elif t >= T[4] and t < T[5]: p.append(p[-1] + v[-1] * timestep + 0.5 * a[-1] * timestep **2 + 1/6.0 * j[-1] * timestep ** 3) v.append(v[-1] + a[-1] * timestep + 0.5* j[-1] * timestep ** 2) a.append(a[-1] + j[-1] * timestep) j.append(-j_lim) elif t >= T[5] and t < T[6]: p.append(p[-1] + v[-1] * timestep + 0.5 * a[-1] * timestep **2 + 1/6.0 * j[-1] * timestep ** 3) v.append(v[-1] + a[-1] * timestep + 0.5* j[-1] * timestep ** 2) a.append(-a_lim) j.append(0) elif t >= T[6] and t < T[7]: p.append(p[-1] + v[-1] * timestep + 0.5 * a[-1] * timestep **2 + 1/6.0 * j[-1] * timestep ** 3) v.append(v[-1] + a[-1] * timestep + 0.5* j[-1] * timestep ** 2) a.append(a[-1] + j[-1] * timestep) j.append(j_lim) return (t_vals, p, v, a, j) def plot_data(t,p,v,a,j, output): ax = plt.axes() ax.plot(t, p,'r',label='Position', linewidth=4) ax.plot(t, v,'g',label='Velocity', linewidth=4) ax.plot(t, a,'b',label='Acceleration', linewidth=4) ax.plot(t, j,'k',label='Jerk', linewidth=4) ax.legend() ax.set_xlabel('Time (s)') ax.set_ylabel("Value (m, m/s, m/s^2, m/s^3)") ax.set_title("Trajectory Generation\nDist: {} m, Vel limit: {} m/s, Acc limit: {} m/s^2, Jerk limit: {} m/s^3".format(p_target, v_max, a_max, j_max)) # Plot vertical lines # ax.set_ylabel("Velocity (m/s)") # ax.set_ylim((-0.5, 2)) # t_vert = output['t'] # y_vert = np.linspace(-10.0, 10.0, 100) # for new_t in t_vert: # ax.plot([new_t]*100, y_vert, 'k--', linewidth=3) # ax.grid(True) # ax.text(t_vert[0] + (t_vert[1] - t_vert[0])/6.0, -0.2, "Constant Jerk", fontsize='x-large') # ax.text(t_vert[1] + (t_vert[2] - t_vert[1])/6.0, -0.1, "Constant Accel", fontsize='x-large') # ax.text(t_vert[2] + (t_vert[3] - t_vert[2])/6.0, -0.2, "Constant Jerk", fontsize='x-large') # ax.text(t_vert[3] + (t_vert[4] - t_vert[3])/3.0, 0.0, "Constant Velocity", fontsize='x-large') # ax.text(t_vert[4] + (t_vert[5] - t_vert[4])/6.0, -0.2, "Constant Jerk", fontsize='x-large') # ax.text(t_vert[5] + (t_vert[6] - t_vert[5])/6.0, -0.1, "Constant Accel", fontsize='x-large') # ax.text(t_vert[6] + (t_vert[7] - t_vert[6])/6.0, -0.2, "Constant Jerk", fontsize='x-large') plt.show() def plot_trapazoidal_profile(): n_divisions = 91 a_steps = int(n_divisions/3) t = np.linspace(0, 5, n_divisions) a = [1]*a_steps + [0]*a_steps + [-1]*a_steps + [0] v = [0] p = [0] for i in range(1, n_divisions): dt = t[i] - t[i-1] new_v = v[i-1] + a[i-1] * dt new_p = p[i-1] + v[i-1] * dt + 0.5 * a[i-1] * dt * dt v.append(new_v) p.append(new_p) ax = plt.axes() # ax.plot(t, p,'r',label='Position') ax.plot(t, v,'g',label='Velocity',linewidth=5) # ax.plot(t, a,'b',label='Acceleration') ax.set_xlabel('Time (s)') ax.set_ylabel("Velocity (m/s)") ax.legend() # Plot vertical lines t_vert = [t[0], t[a_steps], t[2*a_steps], t[3*a_steps]] y_vert = np.linspace(-10.0, 10.0, 100) for new_t in t_vert: ax.plot([new_t]*100, y_vert, 'k--', linewidth=3) ax.set_ylim((-0.5, 3)) ax.grid(True) ax.text(t_vert[0] + (t_vert[1] - t_vert[0])/3.0, -0.2, "Constant Accel", fontsize='x-large') ax.text(t_vert[1] + (t_vert[2] - t_vert[1])/3.0, -0.1, "Constant Velocity", fontsize='x-large') ax.text(t_vert[2] + (t_vert[3] - t_vert[2])/3.0, -0.2, "Constant Accel", fontsize='x-large') plt.show() if __name__ == '__main__': output = generate(p_target) if output: # Test inverse dt_j = output['t'][1] dt_a = output['t'][2] - output['t'][1] dt_v = output['t'][4] - output['t'][3] output2 = generate_inverse(p_target, dt_j, dt_a, dt_v) eps = 0.01 all_valid = True if output2: if abs(output2['v_lim'] - output['v_lim']) / output['v_lim'] > eps: print("v_lim not close. Expected: {}, Actual: {}, Percent diff: {}".format(output['v_lim'], output2['v_lim'], abs(output2['v_lim'] - output['v_lim']) / output['v_lim'] )) all_valid = False if abs(output2['a_lim'] - output['a_lim']) / output['a_lim'] > eps: print("a_lim not close. Expected: {}, Actual: {}, Percent diff: {}".format(output['a_lim'], output2['a_lim'], abs(output2['a_lim'] - output['a_lim']) / output['a_lim'] )) all_valid = False if abs(output2['j_lim'] - output['j_lim']) / output['j_lim'] > eps: print("j_lim not close. Expected: {}, Actual: {}, Percent diff: {}".format(output['j_lim'], output2['j_lim'], abs(output2['j_lim'] - output['j_lim']) / output['j_lim'] )) all_valid = False if all_valid: print("All inverse values valid") else: print("Inverse failed") # Generate plot data = generate_profile_from_params(output, plot_timestep) plot_data(*data, output) # plot_trapazoidal_profile() ================================================ FILE: src/master/FieldPlanner.py ================================================ import matplotlib.image as mpimg import matplotlib.pyplot as plt import skimage.transform as sktf import numpy as np import matplotlib.patches as patches import math import enum import logging import os import Utils from Utils import ActionTypes import config import pickle import csv import copy def generateVisionOffsetMap(cfg, max_x, max_y): vision_offset_map = { (x,y): cfg.default_vision_offset for x in range(max_x) for y in range(max_y) } with open(cfg.vision_offset_file) as csvfile: reader = csv.reader(csvfile) for idx,row in enumerate(reader): if idx == 0: continue if row[0] == 'X': tile_x = [x for x in range(max_x)] else: tile_x = [int(row[0])] if row[1] == 'X': tile_y = [ y for y in range(max_y)] else: tile_y = [int(row[1])] offset_x_meters = int(row[2]) / 1000.0 offset_y_meters = int(row[3]) / 1000.0 offset_a_degrees = float(row[4]) for x in tile_x: for y in tile_y: key = (x,y) add_value = np.array((offset_x_meters,offset_y_meters,offset_a_degrees)) vision_offset_map[key] = vision_offset_map[key] + add_value return vision_offset_map class DominoField: """ Data and methods for parsing and image and generating a field of tiles of dominos """ def __init__(self, cfg): self.cfg = cfg self.img = None self.img_scaled = None self.img_parsed_ids = None self.img_parsed_color = None self.n_tiles_x = 0 self.n_tiles_y = 0 self.tiles = [] def generate(self): logging.info('Generating domino field from image...') self._generateField() logging.info('done.') logging.info('Generating tiles from field...') self._generateTiles() logging.info('done.') def printStats(self): # Output some metrics logging.info("Original image size: {}".format(self.img.shape[:2])) logging.info("Scaled image size: {}".format(self.img_scaled.shape[:2])) logging.info('Domino usage:') logging.info('Total number of dominos: ' + str(self.img_parsed_ids.size)) logging.info('Colors:') unique_colors, counts = np.unique(self.img_parsed_ids, return_counts=True) for i, id in enumerate(unique_colors): logging.info(' ' + self.cfg.dominos[id][0] + ': ' + str(counts[i])) def show_image_parsing(self): # Plot images fig, axes = plt.subplots(nrows=1, ncols=3) ax = axes.ravel() ax[0].imshow(self.img) ax[0].set_title('Original') ax[1].imshow(self.img_scaled) ax[1].set_title('Scaled') ax[2].imshow(self.img_parsed_color) ax[2].set_title('Dominos') plt.tight_layout() figManager = plt.get_current_fig_manager() figManager.window.state('zoomed') plt.show() def render_domino_image_tiles(self): # Allocate memory for image tile_size_x_in_px = (self.cfg.domino_width_px + self.cfg.domino_spacing_width_px) * self.cfg.tile_width tile_size_y_in_px = (self.cfg.domino_height_px + self.cfg.domino_spacing_height_px) * self.cfg.tile_height array_size_x = tile_size_x_in_px * self.n_tiles_x array_size_y = tile_size_y_in_px * self.n_tiles_y image_array = np.zeros((array_size_x, array_size_y, 3)) # Generate image for tile in self.tiles: tile.draw(image_array) # Modify array to show image correctly image_array = np.transpose(image_array, (1, 0, 2)) image_array = np.flip(image_array, 0) # Actually show image plt.imshow(image_array) figManager = plt.get_current_fig_manager() figManager.window.state('zoomed') plt.show() def show_tile_ordering(self): # Build array of order to show and write order_array = np.zeros((self.n_tiles_x,self.n_tiles_y)) # print(self.n_tiles_x) # print(self.n_tiles_y) for tile in self.tiles: # print(tile.coordinate) order_array[tile.coordinate] = tile.order # Modify array to show image correctly order_array = np.transpose(order_array, (1, 0)) # Show figure with colors fig = plt.figure() ax = fig.add_subplot(111) im = ax.imshow(order_array, origin='lower', cmap='cool', interpolation='None') for x in range(self.n_tiles_x): for y in range(self.n_tiles_y): label = int(order_array[y, x]) ax.text(x, y, label, color='black', ha='center', va='center') fig.colorbar(im) plt.show() def _generateField(self): # Load original image img = mpimg.imread(self.cfg.image_name) # Skip A value for RGBA files if img.shape[2] is 4: img = img[:,:,:3] # Do some modifications to MR logo to get it the right shape if self.cfg.MR_LOGO_PLAN: # Crop vertical vert_size = img.shape[0] crop_factor = 0.4 lb = int(vert_size/2 - crop_factor*vert_size) ub = int(vert_size/2 + crop_factor*vert_size) img = img[lb:ub,:,:] # Padd horizontal hz_size = img.shape[1] pad_amount = int(0.1 * hz_size) img = np.pad(img, pad_width=[(0, 0), (pad_amount, pad_amount),(0, 0)], mode='constant') # Scaled image img_scaled = sktf.resize(img, (self.cfg.desired_height_dominos, self.cfg.desired_width_dominos), anti_aliasing=False) # Parse image into domino IDs by choosing 'closest' color img_parsed_color = np.zeros((self.cfg.desired_width_dominos * self.cfg.desired_height_dominos, 3)) img_parsed_ids = np.zeros_like(img_parsed_color, dtype=np.int_)[:, 0] count = 0 for row in img_scaled: for px in row: best_id = -1 best_score = 9999999 for id, val in enumerate(self.cfg.dominos): color = val[1] score = np.linalg.norm(px - color) if score < best_score: best_score = score best_id = id img_parsed_ids[count] = best_id img_parsed_color[count] = self.cfg.dominos[best_id][1] count = count + 1 img_parsed_ids = img_parsed_ids.reshape((self.cfg.desired_height_dominos, self.cfg.desired_width_dominos)) img_parsed_color = img_parsed_color.reshape((self.cfg.desired_height_dominos, self.cfg.desired_width_dominos, 3)) self.img = img self.img_scaled = img_scaled self.img_parsed_ids = img_parsed_ids self.img_parsed_color = img_parsed_color def _addTile(self, tile_coordinate, tile_values, tile_order, vision_offset_map): vision_offset = vision_offset_map[tile_coordinate] # print("Tile: order {}, coord {}, vision offset: {}".format(tile_order, tile_coordinate, vision_offset)) new_tile = Tile(self.cfg, tile_coordinate, tile_values, tile_order, vision_offset) self.tiles.append(new_tile) def _generateTiles(self): # Check sizes and make sure things line up if self.cfg.desired_height_dominos % self.cfg.tile_height != 0: raise ValueError('Field height is not evenly divisible by tile height!') if self.cfg.desired_width_dominos % self.cfg.tile_width != 0: raise ValueError('Field width is not evenly divisible by tile width!') # Determine number of tiles needed in x and y self.n_tiles_x = int(self.cfg.desired_width_dominos / self.cfg.tile_width) self.n_tiles_y = int(self.cfg.desired_height_dominos / self.cfg.tile_height) print("Generating tiles {} x {}".format(self.n_tiles_x, self.n_tiles_y)) order_map = self._generateTileOrdering(self.n_tiles_x, self.n_tiles_y) vision_offset_map = generateVisionOffsetMap(self.cfg, self.n_tiles_x, self.n_tiles_y) # Loop over tiles and assign id and colors to each for i in range(self.n_tiles_x): x_start_idx = i * self.cfg.tile_width x_end_idx = (i + 1) * self.cfg.tile_width for j in range(self.n_tiles_y): # Need to account for flipped y axis with array y_start_idx = - j * self.cfg.tile_height - 1 y_end_idx = - (j + 1) * self.cfg.tile_height - 1 tile_values = np.copy(self.img_parsed_ids[y_start_idx:y_end_idx:-1, x_start_idx:x_end_idx]) tile_coord = (i,j) tile_order = order_map[tile_coord] self._addTile(tile_coord, tile_values, tile_order, vision_offset_map) # Sort tile array so they are in order self.tiles = sorted(self.tiles, key = lambda tile: tile.order) @classmethod def _generateTileOrdering(cls, num_x, num_y): """ Generates and ordering that maps x y tile coordinate -> order number """ # return cls._generateTileOrderingDiagonal(num_x, num_y) return cls._generateTileOrderingColumns(num_x, num_y) @classmethod def _generateTileOrderingColumns(cls, num_x, num_y): def coordToOrderLR(coord, num_x, num_y): return coord[0]*num_y + num_y - coord[1] -1 def coordToOrderRL(coord, num_x, num_y): return (num_x - coord[0] - 1)*num_y + num_y - coord[1] -1 all_coords = [(x,y) for x in range(num_x) for y in range(num_y)] order_map = {coord: coordToOrderRL(coord, num_x, num_y) for coord in all_coords} return order_map @classmethod def _generateTileOrderingDiagonal(cls, num_x, num_y): coord = [num_x - 1, num_y - 1] # Starting at top right order = 0 order_map = {tuple(coord): order} done = False next_pass = np.copy(coord) next_pass[1] = coord[1] - 1 # Next pass starts at far right, next row down order = order + 1 while not done: # If you are at the top or left, go to next pass if coord[1] + 1 >= num_y or coord[0] - 1 < 0: coord = np.copy(next_pass) # Compute new value for next pass if next_pass[1] > 0: next_pass[1] = next_pass[1] - 1 # Next pass starts at far right, next row down elif next_pass[1] == 0: next_pass[0] = next_pass[0] - 1 # If we reach the bottom row, shift to the left instead else: raise ValueError("Whoops! Shouldn't ever get here!") else: coord[0] = coord[0] - 1 # Move up and left coord[1] = coord[1] + 1 # Save the order in the map for this coordinate order_map[tuple(coord)] = order order = order + 1 # If we got to 0,0 then we are done if coord[0] == 0 and coord[1] == 0: done = True return order_map class Tile: """ Holds info and useful methods related to an individual domino tile """ def __init__(self, cfg, coordinate, values, order, vision_offset): self.coordinate = coordinate self.vision_offset = vision_offset self.values = values self.cfg = cfg self.order = order def getPlacementPositionInMeters(self): # Returns bottom left corner position of the tile relative to the origin of the field tile_start_x = self.coordinate[0] * self.cfg.tile_size_width_meters tile_start_y = self.coordinate[1] * self.cfg.tile_size_height_meters return (tile_start_x, tile_start_y) def draw(self, array): # Note that this function draws pixels assuming the array is indexed as x,y instead of rows and columns # the array is flipped to plot as an image in the parent function # Determine tile location tile_size_x_in_px = (self.cfg.domino_width_px + self.cfg.domino_spacing_width_px) * self.cfg.tile_width tile_size_y_in_px = (self.cfg.domino_height_px + self.cfg.domino_spacing_height_px) * self.cfg.tile_height tile_start_x_px = self.coordinate[0] * tile_size_x_in_px tile_start_y_px = self.coordinate[1] * tile_size_y_in_px tile_end_x_px = tile_start_x_px + tile_size_x_in_px tile_end_y_px = tile_start_y_px + tile_size_y_in_px self.draw_single(array, tile_start_x_px, tile_start_y_px, tile_end_x_px, tile_end_y_px) def draw_single(self, array, tile_start_x_px, tile_start_y_px, tile_end_x_px, tile_end_y_px, draw_edge=True): # Fill in tile with background color array[tile_start_x_px:tile_end_x_px, tile_start_y_px:tile_end_y_px, 0] = self.cfg.tile_background_color[0] array[tile_start_x_px:tile_end_x_px, tile_start_y_px:tile_end_y_px, 1] = self.cfg.tile_background_color[1] array[tile_start_x_px:tile_end_x_px, tile_start_y_px:tile_end_y_px, 2] = self.cfg.tile_background_color[2] # Fill in tile edge color (only have to do start locations since next tile over will fill in end locations) if draw_edge: array[tile_start_x_px:tile_end_x_px, tile_start_y_px, 0] = self.cfg.tile_edge_color[0] array[tile_start_x_px:tile_end_x_px, tile_start_y_px, 1] = self.cfg.tile_edge_color[1] array[tile_start_x_px:tile_end_x_px, tile_start_y_px, 2] = self.cfg.tile_edge_color[2] array[tile_start_x_px, tile_start_y_px:tile_end_y_px, 0] = self.cfg.tile_edge_color[0] array[tile_start_x_px, tile_start_y_px:tile_end_y_px, 1] = self.cfg.tile_edge_color[1] array[tile_start_x_px, tile_start_y_px:tile_end_y_px, 2] = self.cfg.tile_edge_color[2] # Draw dominos for i in range(self.cfg.tile_width): domino_start_x = tile_start_x_px + self.cfg.domino_spacing_width_px + (self.cfg.domino_width_px + self.cfg.domino_spacing_width_px) * i domino_end_x = domino_start_x + self.cfg.domino_width_px for j in range(self.cfg.tile_height): domino_start_y = tile_start_y_px + self.cfg.domino_spacing_height_px + (self.cfg.domino_height_px + self.cfg.domino_spacing_height_px) * j domino_end_y = domino_start_y + self.cfg.domino_height_px domino_id = self.values[j, i] domino_color = self.cfg.dominos[domino_id][1] array[domino_start_x:domino_end_x, domino_start_y:domino_end_y, 0] = domino_color[0] array[domino_start_x:domino_end_x, domino_start_y:domino_end_y, 1] = domino_color[1] array[domino_start_x:domino_end_x, domino_start_y:domino_end_y, 2] = domino_color[2] class Action: def __init__(self, action_type, name): self.action_type = action_type self.name = name def draw(self, ax, text=""): pass class SetPoseAction(Action): def __init__(self, action_type, name, x, y, a): # action_type (enum) # string name # X position [m] # Y position [m] # Angle [deg] super().__init__(action_type, name) self.x = float(x) self.y = float(y) self.a = math.radians(float(a)) class WaitAction(Action): def __init__(self,action_type, name, time): super().__init__(action_type, name) self.time = time class MoveConstVelAction(Action): def __init__(self, action_type, name, vx, vy, va, t): # action_type (enum) # string name # X velocity [m/s] # Y velocity [m/s] # Angle [rad/s] # time [sec] super().__init__(action_type, name) self.vx = float(vx) self.vy = float(vy) self.va = float(va) self.t = float(t) class MoveAction(Action): def __init__(self, action_type, name, x, y, a): # action_type (enum) # string name # X position [m] # Y position [m] # Angle [deg] super().__init__(action_type, name) self.x = float(x) self.y = float(y) self.a = math.radians(float(a)) def getPos(self): return np.array([self.x, self.y]) def getAngleDegrees(self): return math.degrees(self.a) def getAngleRadians(self): return self.a def draw(self, ax, text="", show_label=True): if self.action_type in [ActionTypes.MOVE_WITH_VISION, ActionTypes.MOVE_REL, ActionTypes.MOVE_REL_SLOW]: return # Base triangle at 0 degrees scale = 0.3 p1 = np.array([scale, 0]) s = math.sin(45*math.pi/180.0) c = math.cos(45 * math.pi / 180.0) p2 = np.array([-scale*s, scale*c]) p3 = np.array([-scale*s, -scale*c]) points = np.vstack((p1, p2 ,p3)) # Rotate for orientation s = math.sin(self.getAngleRadians()) c = math.cos(self.getAngleRadians()) R = np.array([[c, -s],[s, c]]) for i in range(3): # Do local rotation points[i,:] = np.matmul(R, np.reshape(points[i,:],(2,1))).ravel() # Then offset for position points[i, :] = points[i, :] + self.getPos() ax.add_patch(patches.Polygon(points, fill=True, edgecolor='c', facecolor='c')) text_point = points[0] text_to_show = self.name if text is not "": text_to_show = text if show_label: ax.annotate(text_to_show, xy=text_point[:2], xytext=[1, 1], textcoords="offset points", fontsize=8, color="green") def generate_full_action_sequence(cfg, tile): """ Standard sequence: - Move to load - Do load - Move out of load - Move to field entry - Move to coarse drop off - Wait for localization - Move to fine drop off - Drop off - Move to coarse drop off - Move to field exit - Move to near load """ # Setup positions in field frame tile_pos_in_field_frame = np.array(tile.getPlacementPositionInMeters()) robot_placement_pos_field_frame = tile_pos_in_field_frame + Utils.TransformPos(cfg.tile_to_robot_offset, [0,0], cfg.field_to_robot_frame_angle) robot_placement_fine_pos_field_frame = robot_placement_pos_field_frame + Utils.TransformPos(cfg.tile_placement_fine_offset, [0,0], cfg.field_to_robot_frame_angle) robot_placement_coarse_pos_field_frame = robot_placement_fine_pos_field_frame + Utils.TransformPos(cfg.tile_placement_coarse_offset, [0,0], cfg.field_to_robot_frame_angle) # Convert positions to global frame robot_placement_coarse_pos_global_frame = Utils.TransformPos(robot_placement_coarse_pos_field_frame, cfg.domino_field_origin, cfg.domino_field_angle) robot_placement_fine_pos_global_frame = Utils.TransformPos(robot_placement_fine_pos_field_frame, cfg.domino_field_origin, cfg.domino_field_angle) robot_field_angle = cfg.domino_field_angle + cfg.field_to_robot_frame_angle # Setup various entry and intermediate positions intermediate_entry_pos_global_frame = np.array([cfg.highway_x, cfg.intermediate_entry_hz_y]) entry_y = robot_placement_coarse_pos_global_frame[1]+cfg.enter_position_distance field_entry_pos_global_frame = np.array([cfg.highway_x+0.75, entry_y]) intermediate_place_pos_global_frame = np.array([robot_placement_coarse_pos_global_frame[0], entry_y]) # Make pose adjustments based on config coord = tile.coordinate x_offset_row = cfg.x_offset_rows[coord[1]] y_offset_row = cfg.y_offset_rows[coord[1]] y_offset_col = cfg.y_offset_cols[coord[0]] extra_y_offset_fine = y_offset_col + y_offset_row robot_placement_fine_pos_global_frame[0] += x_offset_row robot_placement_fine_pos_global_frame[1] += extra_y_offset_fine robot_placement_coarse_pos_global_frame[1] += extra_y_offset_fine intermediate_place_pos_global_frame[1] += extra_y_offset_fine field_entry_pos_global_frame[1] += extra_y_offset_fine # Figure out if intermediate steps are needed intermediate_hz = robot_placement_coarse_pos_global_frame[1] < cfg.intermediate_entry_hz_y - 1 intermediate_vt = robot_placement_coarse_pos_global_frame[0] > cfg.intermediate_place_vt_x + 1 # Tiles near the back wall don't have enough space for backwards offset relative_tile_offset = copy.deepcopy(cfg.tile_placement_coarse_offset) if robot_placement_coarse_pos_global_frame[0] < 2: intermediate_place_pos_global_frame[0] = robot_placement_fine_pos_global_frame[0] robot_placement_coarse_pos_global_frame[0] = robot_placement_fine_pos_global_frame[0] relative_tile_offset = np.asarray([0, 1]) actions = [] name = "Move to load waypoint - coarse" actions.append(MoveAction(ActionTypes.MOVE_COARSE, name, cfg.load_waypoint[0], cfg.load_waypoint[1], cfg.highway_angle)) # name = "Wait for localization" # actions.append(Action(ActionTypes.WAIT_FOR_LOCALIZATION, name)) name = "Move to near load area - coarse" actions.append(MoveAction(ActionTypes.MOVE_COARSE, name, cfg.base_station_prep_pos[0], cfg.base_station_prep_pos[1], cfg.base_station_target_angle)) # name = "Pause plan for load" # actions.append(Action(ActionTypes.PAUSE_PLAN, name)) # name = "Start cameras" # actions.append(Action(ActionTypes.START_CAMERAS, name)) # name = "Wait for localization" # actions.append(Action(ActionTypes.WAIT_FOR_LOCALIZATION, name)) # name = "Move to load prep - fine" # actions.append(MoveAction(ActionTypes.MOVE_FINE, name, cfg.base_station_prep_pos[0], cfg.base_station_prep_pos[1], cfg.base_station_target_angle)) # name = "Move to load prep - vision" # actions.append(MoveAction(ActionTypes.MOVE_WITH_VISION, name, cfg.base_station_prep_vision_offset[0], cfg.base_station_prep_vision_offset[1], cfg.base_station_prep_vision_offset[2])) # name = "Move to load - relative slow" # actions.append(MoveAction(ActionTypes.MOVE_REL_SLOW, name, cfg.base_station_relative_offset[0], cfg.base_station_relative_offset[1], cfg.base_station_relative_offset[2])) # name = "Align with load" # actions.append(MoveAction(ActionTypes.MOVE_WITH_VISION, name, cfg.base_station_vision_offset[0], cfg.base_station_vision_offset[1], cfg.base_station_vision_offset[2])) # name = "Stop cameras" # actions.append(Action(ActionTypes.STOP_CAMERAS, name)) name = "Load tile" actions.append(Action(ActionTypes.LOAD, name)) # name = "Move away from load - relative slow" # actions.append(MoveAction(ActionTypes.MOVE_REL_SLOW, name, -1.3*cfg.base_station_relative_offset[0], -1.3*cfg.base_station_relative_offset[1], -1.3*cfg.base_station_relative_offset[2])) # name = "Pause plan for QC" # actions.append(Action(ActionTypes.PAUSE_PLAN, name)) name = "Move to load waypoint - coarse" actions.append(MoveAction(ActionTypes.MOVE_COARSE, name, cfg.load_waypoint[0], cfg.load_waypoint[1], cfg.highway_angle)) # name = "Wait for localization" # actions.append(Action(ActionTypes.WAIT_FOR_LOCALIZATION, name)) # if intermediate_hz: # name = "Move to intermediate enter - coarse" # actions.append(MoveAction(ActionTypes.MOVE_COARSE, name, intermediate_entry_pos_global_frame[0], intermediate_entry_pos_global_frame[1], cfg.highway_angle)) # name = "Wait for motor cooldown (long)" # actions.append(WaitAction(ActionTypes.WAIT, name, 20)) name = "Move to enter - coarse" actions.append(MoveAction(ActionTypes.MOVE_COARSE, name, field_entry_pos_global_frame[0], field_entry_pos_global_frame[1], robot_field_angle)) name = "Wait for localization" actions.append(Action(ActionTypes.WAIT_FOR_LOCALIZATION, name)) # name = "Wait for motor cooldown" # actions.append(WaitAction(ActionTypes.WAIT, name, 10)) # if intermediate_vt: # name = "Move to intermediate place - coarse" # actions.append(MoveAction(ActionTypes.MOVE_COARSE, name, intermediate_place_pos_global_frame[0], intermediate_place_pos_global_frame[1], robot_field_angle)) # name = "Wait for localization" # actions.append(Action(ActionTypes.WAIT_FOR_LOCALIZATION, name)) # name = "Wait for motor cooldown" # actions.append(WaitAction(ActionTypes.WAIT, name, 10)) name = "Move to near place - coarse" actions.append(MoveAction(ActionTypes.MOVE_COARSE, name, robot_placement_coarse_pos_global_frame[0], robot_placement_coarse_pos_global_frame[1], robot_field_angle)) name = "Start cameras" actions.append(Action(ActionTypes.START_CAMERAS, name)) name = "Wait for localization" actions.append(Action(ActionTypes.WAIT_FOR_LOCALIZATION, name)) name = "Move to place - fine" actions.append(MoveAction(ActionTypes.MOVE_FINE_STOP_VISION, name, robot_placement_fine_pos_global_frame[0], robot_placement_fine_pos_global_frame[1], robot_field_angle + cfg.angle_adjust_fine)) name = "Move to place - vision" actions.append(MoveAction(ActionTypes.MOVE_WITH_VISION, name, tile.vision_offset[0], tile.vision_offset[1], tile.vision_offset[2])) name = "Stop cameras" actions.append(Action(ActionTypes.STOP_CAMERAS, name)) # name = "Pause plan for QC" # actions.append(Action(ActionTypes.PAUSE_PLAN, name)) name = "Place tile" actions.append(Action(ActionTypes.PLACE, name)) name = "Move away from place - relative slow" actions.append(MoveAction(ActionTypes.MOVE_REL_SLOW, name, relative_tile_offset[0], relative_tile_offset[1], 0)) # if intermediate_vt: # name = "Move to intermediate place - coarse" # actions.append(MoveAction(ActionTypes.MOVE_COARSE, name, intermediate_place_pos_global_frame[0], intermediate_place_pos_global_frame[1], robot_field_angle)) # name = "Wait for motor cooldown" # actions.append(WaitAction(ActionTypes.WAIT, name, 10)) name = "Move to intermediate place - coarse" actions.append(MoveAction(ActionTypes.MOVE_COARSE, name, intermediate_place_pos_global_frame[0], intermediate_place_pos_global_frame[1], robot_field_angle)) name = "Move to exit - coarse" actions.append(MoveAction(ActionTypes.MOVE_COARSE, name, field_entry_pos_global_frame[0], field_entry_pos_global_frame[1], cfg.highway_angle)) name = "Wait for localization" actions.append(Action(ActionTypes.WAIT_FOR_LOCALIZATION, name)) # name = "Wait for motor cooldown" # actions.append(WaitAction(ActionTypes.WAIT, name, 10)) # if intermediate_hz: # name = "Move to intermediate exit - coarse" # actions.append(MoveAction(ActionTypes.MOVE_COARSE, name, intermediate_entry_pos_global_frame[0], intermediate_entry_pos_global_frame[1], cfg.highway_angle)) # name = "Wait for motor cooldown (long)" # actions.append(WaitAction(ActionTypes.WAIT, name, 20)) return actions def generate_hax_action_sequence(cfg, tile): actions = [] x_pose = 7 name = "Move 1 - coarse 90" actions.append(MoveAction(ActionTypes.MOVE_COARSE, name, x_pose, 5, 90)) name = "Move 1 - coarse 0" actions.append(MoveAction(ActionTypes.MOVE_COARSE, name, x_pose, 5, 0)) name = "Move 1 - coarse -90" actions.append(MoveAction(ActionTypes.MOVE_COARSE, name, x_pose, 5, -90)) name = "Wait 1" actions.append(WaitAction(ActionTypes.WAIT, name, 5)) name = "Move 2 - coarse -90" actions.append(MoveAction(ActionTypes.MOVE_COARSE, name, x_pose, -3.5, -90)) name = "Move 2 - coarse 0 " actions.append(MoveAction(ActionTypes.MOVE_COARSE, name, x_pose, -3.5, 0)) name = "Move 2 - coarse 90" actions.append(MoveAction(ActionTypes.MOVE_COARSE, name, x_pose, -3.5, 90)) name = "Wait 2" actions.append(WaitAction(ActionTypes.WAIT, name, 5)) return actions def generate_small_testing_action_sequence(cfg, tile): """ Short sequence for testing Load Move to near place - coarse Wait for localization Move to place - fine Place Move away from place - fine Move to load - coarse """ # Setup positions in field frame tile_pos_in_field_frame = np.array(tile.getPlacementPositionInMeters()) robot_placement_fine_pos_field_frame = tile_pos_in_field_frame + Utils.TransformPos(cfg.tile_to_robot_offset, [0,0], cfg.field_to_robot_frame_angle) robot_placement_coarse_pos_field_frame = robot_placement_fine_pos_field_frame + Utils.TransformPos(cfg.tile_placement_coarse_offset, [0,0], cfg.field_to_robot_frame_angle) enter_field_prep_pos_field_frame = [robot_placement_coarse_pos_field_frame[0], - cfg.prep_position_distance] exit_field_prep_pos_field_frame = [-cfg.exit_position_distance, robot_placement_coarse_pos_field_frame[1]] # Convert positions to global frame tile_pos_in_global_frame = Utils.TransformPos(tile_pos_in_field_frame, cfg.domino_field_origin, cfg.domino_field_angle) robot_placement_coarse_pos_global_frame = Utils.TransformPos(robot_placement_coarse_pos_field_frame, cfg.domino_field_origin, cfg.domino_field_angle) robot_placement_fine_pos_global_frame = Utils.TransformPos(robot_placement_fine_pos_field_frame, cfg.domino_field_origin, cfg.domino_field_angle) robot_field_angle = cfg.domino_field_angle + cfg.field_to_robot_frame_angle enter_field_prep_global_frame = Utils.TransformPos(enter_field_prep_pos_field_frame, cfg.domino_field_origin, cfg.domino_field_angle) exit_field_prep_global_frame = Utils.TransformPos(exit_field_prep_pos_field_frame, cfg.domino_field_origin, cfg.domino_field_angle) # print(tile.order) # print(tile.coordinate) # print(tile_pos_in_field_frame) # print(tile_pos_in_global_frame) # print(robot_placement_coarse_pos_global_frame) # print(robot_placement_fine_pos_global_frame) actions = [] name = "Load tile" actions.append(Action(ActionTypes.LOAD, name)) name = "Move to prep - coarse" actions.append(MoveAction(ActionTypes.MOVE_COARSE, name, enter_field_prep_global_frame[0], enter_field_prep_global_frame[1], robot_field_angle)) name = "Wait for localization" actions.append(Action(ActionTypes.WAIT_FOR_LOCALIZATION, name)) name = "Move to near place - coarse" actions.append(MoveAction(ActionTypes.MOVE_COARSE, name, robot_placement_coarse_pos_global_frame[0], robot_placement_coarse_pos_global_frame[1], robot_field_angle)) name = "Wait for localization" actions.append(Action(ActionTypes.WAIT_FOR_LOCALIZATION, name)) name = "Start cameras" actions.append(Action(ActionTypes.START_CAMERAS, name)) name = "Move to place - fine" actions.append(MoveAction(ActionTypes.MOVE_FINE, name, robot_placement_fine_pos_global_frame[0], robot_placement_fine_pos_global_frame[1], robot_field_angle)) name = "Move to place - vision" actions.append(MoveAction(ActionTypes.MOVE_WITH_VISION, name, tile.vision_offset[0], tile.vision_offset[1], tile.vision_offset[2])) name = "Stop cameras" actions.append(Action(ActionTypes.STOP_CAMERAS, name)) name = "Place tile" actions.append(Action(ActionTypes.PLACE, name)) name = "Move away from place - fine" actions.append(MoveAction(ActionTypes.MOVE_COARSE, name, robot_placement_coarse_pos_global_frame[0], robot_placement_coarse_pos_global_frame[1], robot_field_angle)) name = "Move to exit - coarse" actions.append(MoveAction(ActionTypes.MOVE_COARSE, name, enter_field_prep_global_frame[0], enter_field_prep_global_frame[1], robot_field_angle)) name = "Move to near load - coarse" actions.append(MoveAction(ActionTypes.MOVE_COARSE, name, cfg.load_pose[0], cfg.load_pose[1], cfg.load_pose[2])) return actions def draw_env(cfg): """ Draws a figure of the environment """ fig,ax = plt.subplots(1) # Draw overall map boundaries ax.add_patch(patches.Rectangle(cfg.robot_boundaries[0], cfg.robot_boundaries[1][0] - cfg.robot_boundaries[0][0], cfg.robot_boundaries[1][1] - cfg.robot_boundaries[0][1], fill=False, edgecolor='b')) # Draw field boundaries ax.add_patch(patches.Rectangle(cfg.domino_field_origin, cfg.domino_field_top_right[0] - cfg.domino_field_origin[0], cfg.domino_field_top_right[1] - cfg.domino_field_origin[1], fill=False, edgecolor='r')) # Draw base station ax.add_patch(patches.Rectangle(cfg.base_station_boundaries[0], cfg.base_station_boundaries[1][0] - cfg.base_station_boundaries[0][0], cfg.base_station_boundaries[1][1] - cfg.base_station_boundaries[0][1], fill=True, edgecolor='k', facecolor='k')) ax.set_xlim(left=-1, right=cfg.robot_boundaries[1][0]+1) ax.set_ylim(bottom=-1, top=cfg.robot_boundaries[1][1]+1) ax.axis('equal') return ax class Cycle: def __init__(self, id, cfg, robot_id, tile, action_sequence): self.id = id self.cfg = cfg self.robot_id = robot_id self.tile = tile self.action_sequence = action_sequence def draw_cycle(self, ax): for action in self.action_sequence: action.draw(ax) def draw_action(self, ax, idx, text=""): self.action_sequence[idx].draw(ax, text) def generate_standard_cycles(cfg, field, cycle_generator_fn): start_num = 1 robot_num = start_num n_robots = len(cfg.ip_map) cycles = [] for tile in field.tiles: action_sequence = cycle_generator_fn(cfg, tile) cycles.append(Cycle(tile.order, cfg, "robot{}".format(robot_num), tile, action_sequence)) robot_num += 1 if robot_num > n_robots: robot_num = start_num return cycles class BasePlan: def __init__(self, cycles): self.cycles = cycles def get_cycle(self, cycle_num): try: return self.cycles[cycle_num] except IndexError: return None def get_action(self, cycle_id, action_id): cycle = self.get_cycle(cycle_id) if cycle: try: return cycle.action_sequence[action_id] except IndexError: return None else: return None class RealPlan(BasePlan): def __init__(self, cfg, field, cycles): self.cfg = cfg self.field = field super().__init__(cycles) def draw_cycle(self, cycle_num): ax = draw_env(self.cfg) self.get_cycle(cycle_num).draw_cycle(ax) plt.show() def find_pose_move_idx(self, cycle): # Figure out what id the tile pose is place_idx = -1 for i,action in enumerate(cycle.action_sequence): if action.action_type == ActionTypes.PLACE: place_idx = i break if place_idx == -1: raise ValueError("Couldn't find placement index") tile_pose_move_idx = -1 for j in range(place_idx, 0, -1): action = cycle.action_sequence[j] if action.action_type == ActionTypes.MOVE_FINE or action.action_type == ActionTypes.MOVE_COARSE : tile_pose_move_idx = j break if tile_pose_move_idx == -1: raise ValueError("Couldn't find movement index") return tile_pose_move_idx def draw_all_tile_poses(self): ax = draw_env(self.cfg) for cycle in self.cycles: tile_pose_move_idx = self.find_pose_move_idx(cycle) cycle.draw_action(ax, tile_pose_move_idx, text=cycle.tile.order) plt.show() class Plan(RealPlan): def __init__(self, cfg, cycle_generator_fn): field = DominoField(cfg) field.generate() logging.info('Generating robot actions...') cycles = generate_standard_cycles(cfg, field, cycle_generator_fn) logging.info('done.') super().__init__(cfg, field, cycles) class SubsectionPlan(RealPlan): def __init__(self, full_plan): self.full_plan = full_plan self.start_coords = full_plan.cfg.start_coords self.end_coords = full_plan.cfg.end_coords self.delta_coords = (self.end_coords[0] - self.start_coords[0], self.end_coords[1] - self.start_coords[1] ) new_field = DominoField(full_plan.cfg) new_field.n_tiles_x = self.delta_coords[0] new_field.n_tiles_y = self.delta_coords[1] counter = 0 new_field.tiles = [] new_cycles = [] for i in range(len(full_plan.field.tiles)): tile = full_plan.field.tiles[i] cycle = full_plan.cycles[i] tile_coords = tile.coordinate if tile_coords[0] >= self.start_coords[0] and \ tile_coords[0] <= self.end_coords[0] and \ tile_coords[1] >= self.start_coords[1] and \ tile_coords[1] <= self.end_coords[1]: # Make new tile new_tile = copy.deepcopy(tile) new_tile.order = counter new_field.tiles.append(new_tile) # Make new cycle new_cycle = copy.deepcopy(cycle) new_cycle.id = counter new_cycle.tile = new_tile new_cycles.append(new_cycle) counter += 1 super().__init__(self.full_plan.cfg, new_field, new_cycles) class TestPlan(BasePlan): """ Test plan used for debugging and testing various action sequences """ def __init__(self): actions = [] actions.append(MoveAction(ActionTypes.MOVE_COARSE, "TestMoveCoarse", 0.5, 0.5, 0)) actions.append(MoveAction(ActionTypes.MOVE_FINE, 'TestMoveFine', 1,1,0)) actions.append(MoveAction(ActionTypes.MOVE_COARSE, 'Blah', 0,0,3.14)) cycles = [Cycle(i,None,'robot1','TestCycle{}'.format(i), actions) for i in range(3)] super().__init__(cycles) def RunFieldPlanning(autosave=False): cfg = config.Config() logging.basicConfig( level=logging.INFO, format="%(asctime)s [%(levelname)s] %(message)s", handlers=[ logging.FileHandler(os.path.join(cfg.log_folder,"planner.log")), logging.StreamHandler() ] ) plan = None if cfg.USE_SMALL_TESTING_CONFIG: plan = Plan(cfg, generate_small_testing_action_sequence) else: plan = Plan(cfg, generate_full_action_sequence) # plan = Plan(cfg, generate_hax_action_sequence) if cfg.USE_SUBSECTION: plan = SubsectionPlan(plan) if autosave: fname = os.path.join(cfg.plans_dir,"autosaved.p") with open(fname, 'wb') as f: pickle.dump(plan, f) logging.info("Saved plan to {}".format(fname)) return plan def GeneratePDF(plan): from reportlab.lib.pagesizes import letter from reportlab.pdfgen import canvas from PIL import Image logging.info("Generating PDF") # Initialize pdf name = "domino_plan.pdf" if plan.cfg.MR_LOGO_PLAN: name = "domnino_plan_logo.pdf" full_path = os.path.join(plan.cfg.plans_dir, name) page_height, page_width = letter # Flipped to get landscape c = canvas.Canvas(full_path, pagesize=(page_width, page_height)) # Pre allocate image array tile_size_x_in_px = (plan.cfg.domino_width_px + plan.cfg.domino_spacing_width_px) * plan.cfg.tile_width tile_size_y_in_px = (plan.cfg.domino_height_px + plan.cfg.domino_spacing_height_px) * plan.cfg.tile_height for i in range(len(plan.field.tiles)): # Get tile to draw on this page tile = plan.field.tiles[i] # Draw title text_width = 0.5 * page_width text_height = 0.9 * page_height text = "Tile {}, Coordinate: ({}, {})".format(i, tile.coordinate[0], tile.coordinate[1]) c.setFont("Helvetica", 20) c.drawCentredString(text_width,text_height,text) # Draw orientation note text_width = 0.5 * page_width text_height = 0.1 * page_height text = "This side towards robot body" c.drawCentredString(text_width,text_height,text) # Draw image image_array = np.zeros((tile_size_x_in_px, tile_size_y_in_px, 3)) tile.draw_single(image_array, 0, 0, tile_size_x_in_px, tile_size_y_in_px, draw_edge=False) image_array = np.transpose(image_array, (1, 0, 2)) image_array = np.flip(image_array, 0) im = Image.fromarray(np.uint8(255*image_array), mode='RGB') image_fraction = 0.7 start_width = (1-image_fraction)/2.0 * page_width start_height = (1-image_fraction)/2.0 * page_height image_width = image_fraction * page_width image_height = image_fraction * page_height c.drawInlineImage(im, start_width , start_height, width=image_width, height=image_height) # Complete page c.showPage() c.save() if __name__ == '__main__': import PySimpleGUI as sg plan = RunFieldPlanning(autosave=False) # plan.field.printStats() # plan.field.show_image_parsing() # plan.field.render_domino_image_tiles() # plan.field.show_tile_ordering() plan.draw_cycle(0) plan.draw_cycle(10) plan.draw_cycle(19) plan.draw_all_tile_poses() # GeneratePDF(plan) # sg.change_look_and_feel('Dark Blue 3') # clicked_value = sg.popup_yes_no('Save plan to file?') # if clicked_value == "Yes": # fname = sg.popup_get_file("Location to save", save_as=True) # with open(fname, 'wb') as f: # pickle.dump(plan, f) # logging.info("Saved plan to {}".format(fname)) ================================================ FILE: src/master/MarvelMindHandler.py ================================================ """ Python wrapper for Marvelmind C API """ import ctypes import logging from collections import defaultdict from Utils import NonBlockingTimer class MarvelmindWrapper: def __init__(self, cfg): self.lib = ctypes.windll.LoadLibrary(cfg.mm_api_path) self.devices = defaultdict(dict) self.expected_devices = [] self.wake_timer = None for name,beacons in cfg.device_map.items(): for b in beacons: self.expected_devices.append(b) self._open_serial_port() def __del__(self): self._close_serial_port() def _open_serial_port(self): """ Establish communication with marvelmind router. Required for all other functions. """ logging.info("Opening communication with MarvelMind") fn = self.lib.mm_open_port fn.restype = ctypes.c_bool fn.argtypes = [ctypes.POINTER(ctypes.c_void_p)] status = fn(None) def _close_serial_port(self): """ Close communication with marvelmind router. """ fn = self.lib.mm_close_port fn.restype = ctypes.c_bool fn.argtypes = [ctypes.POINTER(ctypes.c_void_p)] status = fn(None) def check_all_devices_status(self): """ Checks to see if all devices are awake and ready """ logging.info("Getting device list") fn = self.lib.mm_get_devices_list fn.restype = ctypes.c_bool fn.argtypes = [ctypes.POINTER(ctypes.c_uint8)] buff_size = 255 dataptr = (ctypes.c_uint8*buff_size)() dataptr = ctypes.cast(dataptr, ctypes.POINTER(ctypes.c_uint8)) status = fn(dataptr) num_devices = dataptr[0] logging.info("Num devices found: " + str(num_devices)) idx = 1 device_offset = 9 addr_offset = 0 sleep_offset = 2 for i in range(num_devices): addr = dataptr[idx + addr_offset] sleep = dataptr[idx + sleep_offset] self.devices[addr] = {'sleep': bool(sleep)} idx += device_offset ready = True if self.wake_timer: if not self.wake_timer.check(): ready = False else: for d in self.expected_devices: if d not in self.devices.keys(): ready = False logging.warning("Could not find expected device {}".format(d)) continue if self.devices[d]['sleep'] is False: ready = False logging.warning("Device {} is not awake".format(d)) continue return ready def wake_all_devices(self): for d in self.expected_devices: self.wake_device(d) def wake_all_devices_only_if_needed(self): ok = self.check_all_devices_status() if not ok: logging.info("Triggering marvelmind device wakeup. Will wait for a moment to let them warmup") self.wake_timer = NonBlockingTimer(30) self.wake_all_devices() def sleep_all_devices(self): for d in self.expected_devices: self.sleep_device(d) def wake_device(self, address): """ Wake up the device with the given address """ logging.info("Waking device: {}".format(address)) fn = self.lib.mm_wake_device fn.restype = ctypes.c_bool fn.argtypes = [ctypes.c_uint8] status = fn(address) if not status: logging.info("Warning: unable to wake device {}".format(address)) def sleep_device(self, address): """ Sleep the device with the given address """ logging.info("Sleeping device: {}".format(address)) fn = self.lib.mm_send_to_sleep_device fn.restype = ctypes.c_bool fn.argtypes = [ctypes.c_uint8] status = fn(address) if not status: logging.info("Warning: unable to sleep device {}".format(address)) def get_metrics(self): return self.devices class MockMarvelmindWrapper: def __init__(self, cfg): pass def check_all_devices_status(self): return True def wake_all_devices_only_if_needed(self): pass def wake_all_devices(self): pass def sleep_all_devices(self): pass def wake_device(self, address): pass def sleep_device(self, address): pass def get_metrics(self): return {} ================================================ FILE: src/master/MasterMain.py ================================================ import time import math import copy import config import os import sys import logging import PySimpleGUI as sg import traceback from FieldPlanner import * from Runtime import RuntimeManager, PlanStatus import Utils STATUS_PANEL_OK_COLOR = "green" STATUS_PANEL_BAD_COLOR = "red" def status_panel(name): width = 40 height = 20 return [[sg.Text("{} status".format(name))], [sg.Text("{} offline".format(name), size=(width, height), \ relief=sg.RELIEF_RIDGE, key='_{}_STATUS_'.format(name.upper()), background_color=STATUS_PANEL_BAD_COLOR) ]] def setup_gui_layout(config, panel_names, target_names): plan_button_size = [9,2] plan_button_pad = (2, 10) # Left hand column with status panels col1 = [] for name in panel_names: col1 += status_panel(name) # Plan cycle/action modification buttons incremenet_cycle_button = sg.Button('Cycle +', button_color=('white','blue'), size=plan_button_size, pad=plan_button_pad, key='_INC_CYCLE_', disabled=True) decremenet_cycle_button = sg.Button('Cycle -', button_color=('white','blue'), size=plan_button_size, pad=plan_button_pad, key='_DEC_CYCLE_', disabled=True) incremenet_action_button = sg.Button('Action +', button_color=('white','blue'), size=plan_button_size, pad=plan_button_pad, key='_INC_ACTION_', disabled=True) decremenet_action_button = sg.Button('Action -', button_color=('white','blue'), size=plan_button_size, pad=plan_button_pad, key='_DEC_ACTION_', disabled=True) set_cycle_field = sg.Input(key='_CYCLE_FIELD_', size=(5,1), pad=(0,20)) set_action_field = sg.Input(key='_ACTION_FIELD_', size=(5,1), pad=(0,20)) set_cycle_button = sg.Button('Set Cycle', button_color=('white','blue'), size=plan_button_size, pad=plan_button_pad, key='_SET_CYCLE_', disabled=True) set_action_button = sg.Button('Set Action', button_color=('white','blue'), size=plan_button_size, pad=plan_button_pad, key='_SET_ACTION_', disabled=True) cycle_buttons = [[sg.Column([[decremenet_cycle_button], [decremenet_action_button]]), sg.Column([[incremenet_cycle_button], [incremenet_action_button]]), sg.Column([[set_cycle_field], [set_action_field]]), sg.Column([[set_cycle_button], [set_action_button]]) ]] col1 += cycle_buttons # Middle column with plot and buttons target_element = [ [sg.Text("Target: ")], [sg.Combo(target_names, key='_TARGET_', default_value='robot1')] ] actions = [a for a in ActionTypes] action_element = [ [sg.Text("Action: ")], [sg.Combo(actions, key='_ACTION_')] ] data_element = [ [sg.Text('Data:')], [sg.Input(key='_ACTION_DATA_')] ] plan_file_field = sg.Input(key='_PLAN_FILE_', visible=False, enable_events=True) load_plan_button = sg.FileBrowse(button_text='Load Plan', button_color=('white','blue'), size=plan_button_size, pad=plan_button_pad, \ key='_LOAD_PLAN_', file_types=(('Robot Plans', ('*.json','*.p')),)) run_plan_button = sg.Button('Run Plan', button_color=('white','blue'), size=plan_button_size, pad=plan_button_pad, key='_RUN_PLAN_', disabled=True) pause_plan_button = sg.Button('Pause Plan', button_color=('white','blue'), size=plan_button_size, pad=plan_button_pad, key='_PAUSE_PLAN_', disabled=True) abort_plan_button = sg.Button('Abort Plan', button_color=('white','blue'), size=plan_button_size, pad=plan_button_pad, key='_ABORT_PLAN_', disabled=True) plan_buttons = [[sg.Column([[plan_file_field, load_plan_button], [run_plan_button]]), sg.Column([[pause_plan_button], [abort_plan_button]])]] button_size = [20,6] button_pad = (2,10) estop_button = [[sg.Button('ESTOP', button_color=('white','red'), size=button_size, pad=button_pad) ]] manual_button = [[sg.Button('Send Command', button_color=('white','green'), size=button_size, pad=button_pad) ]] clear_error_button = [[sg.Button('Clear Error', button_color=('white','orange red'), size=[10,6], pad=button_pad)]] col2 = [[sg.Graph(canvas_size=(700,700), graph_bottom_left=config.graph_bottom_left, graph_top_right=config.graph_top_right, float_values=True, key="_GRAPH_", background_color="light grey") ], [sg.Column(target_element), sg.Column(action_element), sg.Column(data_element)], [sg.Column(plan_buttons), sg.Column(estop_button), sg.Column(manual_button), sg.Column(clear_error_button)] ] # Right hand column with text ouput col3 = [[sg.Output(size=(70, 50), echo_stdout_stderr=True)]] return [[ sg.Column(col1), sg.Column(col2), sg.Column(col3)]] class CmdGui: def __init__(self, config): self.config = config sg.change_look_and_feel('DarkBlack') panel_names = ["{}".format(n) for n in self.config.ip_map] panel_names += ['plan'] target_names = copy.deepcopy(panel_names) target_names.remove('plan') layout = setup_gui_layout(config, panel_names, target_names) self.window = sg.Window('Robot Controller', layout, return_keyboard_events=True, use_default_focus=False) self.window.finalize() self.viz_figs = {} self.manual_action_debounce_timer = Utils.NonBlockingTimer(1.0) self.drawn_plan_grid = False self.last_cycle_number_drawn = -1 self._draw_environment() def close(self): self.window.close() def update(self): event, values = self.window.read(timeout=20) # if event != "__TIMEOUT__": # print(event) # print(values) # At exit, check if we should keep marvelmind on if event is None or event == 'Exit': if self.config.OFFLINE_TESTING or self.config.SKIP_MARVELMIND: return 'Exit', None else: clicked_value = sg.popup_yes_no('Do you want to keep the Marvelmind running') if clicked_value == "Yes": return "ExitMM", None else: return 'Exit', None # Sending a manual action (via button or pressing enter) if event in ('Send Command', "Clear Error", '\r', '\n'): manual_action = None if self.manual_action_debounce_timer.check(): if event == "Clear Error": manual_action = (values['_TARGET_'], Action(ActionTypes.CLEAR_ERROR, "ClearError")) else: manual_action = self._parse_manual_action(values) if manual_action is not None: self.window['_ACTION_DATA_'].update("") self.manual_action_debounce_timer = Utils.NonBlockingTimer(1.0) return 'Action', manual_action # Pressing the run plan button if event == "_RUN_PLAN_": clicked_value = sg.popup_yes_no('Ready to start plan?') if clicked_value == "Yes": return "Run", None if event == "_PLAN_FILE_": return "Load", values["_PLAN_FILE_"] if event == "_PAUSE_PLAN_": return "Pause", None if event == "_ABORT_PLAN_": clicked_value = sg.popup_yes_no('Abort plan?') if clicked_value == "Yes": return "Abort", None if event in ["_INC_CYCLE_","_DEC_CYCLE_","_INC_ACTION_","_DEC_ACTION_"]: return event, None if event in ["_SET_CYCLE_", "_SET_ACTION_"]: return event, (values['_CYCLE_FIELD_'], values["_ACTION_FIELD_"]) if event == "ESTOP" or event == " ": return "ESTOP", None return None, None def update_status_panels(self, metrics): for key, metric in metrics.items(): if key == 'mm' or key == 'base': continue elif key == 'plan': self._update_plan_panel(metric) else: self._update_robot_panel(key, metric) def _parse_manual_action(self, values): target = values['_TARGET_'] action_type = ActionTypes(values['_ACTION_']) data_str = values['_ACTION_DATA_'] name = 'ManualAction' action = None if action_type in [ActionTypes.MOVE_COARSE, ActionTypes.MOVE_REL, ActionTypes.MOVE_REL_SLOW, ActionTypes.MOVE_FINE, ActionTypes.MOVE_FINE_STOP_VISION]: data = data_str.split(',') data = [x.strip() for x in data] if len(data) != 3: logging.warning("Invalid data: {}".format(data)) return None action = MoveAction(action_type, name, data[0], data[1], data[2]) elif action_type in [ActionTypes.MOVE_CONST_VEL]: data = data_str.split(',') data = [x.strip() for x in data] if len(data) != 4: logging.warning("Invalid data: {}".format(data)) return None action = MoveConstVelAction(action_type, name, data[0], data[1], data[2], data[3]) elif action_type in [ActionTypes.SET_POSE]: data = data_str.split(',') data = [x.strip() for x in data] if len(data) != 3: logging.warning("Invalid data: {}".format(data)) return None action = SetPoseAction(action_type, name, data[0], data[1], data[2]) elif action_type == ActionTypes.MOVE_WITH_VISION: data = data_str.split(',') data = [x.strip() for x in data] if len(data) != 3: data = (0,0,0) logging.warning("Assuming position of (0,0,0) for vision move") action = MoveAction(action_type, name, data[0], data[1], data[2]) elif action_type == ActionTypes.WAIT: data = data_str.split(',') data = [x.strip() for x in data] action = WaitAction(action_type, name, float(data[0])) else: action = Action(action_type, name) return (target, action) def _udpate_cycle_button_status(self, disabled): self.window['_INC_CYCLE_'].update(disabled=disabled) self.window['_DEC_CYCLE_'].update(disabled=disabled) self.window['_INC_ACTION_'].update(disabled=disabled) self.window['_DEC_ACTION_'].update(disabled=disabled) self.window['_CYCLE_FIELD_'].update(disabled=disabled) self.window['_ACTION_FIELD_'].update(disabled=disabled) self.window['_SET_CYCLE_'].update(disabled=disabled) self.window['_SET_ACTION_'].update(disabled=disabled) def _update_plan_button_status(self, plan_status): if plan_status == PlanStatus.NONE: self.window['_RUN_PLAN_'].update(text='Run', disabled=True) self.window['_LOAD_PLAN_'].update(disabled=False) self.window['_PAUSE_PLAN_'].update(disabled=True) self.window['_ABORT_PLAN_'].update(disabled=True) self._udpate_cycle_button_status(disabled=True) elif plan_status == PlanStatus.LOADED or plan_status == PlanStatus.DONE: self.window['_RUN_PLAN_'].update(text='Run', disabled=False) self.window['_LOAD_PLAN_'].update(disabled=False) self.window['_PAUSE_PLAN_'].update(disabled=True) self.window['_ABORT_PLAN_'].update(disabled=True) self._udpate_cycle_button_status(disabled=False) elif plan_status == PlanStatus.RUNNING: self.window['_RUN_PLAN_'].update(text='Run', disabled=True) self.window['_LOAD_PLAN_'].update(disabled=True) self.window['_PAUSE_PLAN_'].update(disabled=False) self.window['_ABORT_PLAN_'].update(disabled=False) self._udpate_cycle_button_status(disabled=True) elif plan_status == PlanStatus.PAUSED: self.window['_RUN_PLAN_'].update(text='Resume', disabled=False) self.window['_LOAD_PLAN_'].update(disabled=True) self.window['_PAUSE_PLAN_'].update(disabled=True) self.window['_ABORT_PLAN_'].update(disabled=False) self._udpate_cycle_button_status(disabled=False) elif plan_status == PlanStatus.ABORTED: self.window['_RUN_PLAN_'].update(text='Restart', disabled=False) self.window['_LOAD_PLAN_'].update(disabled=False) self.window['_PAUSE_PLAN_'].update(disabled=True) self.window['_ABORT_PLAN_'].update(disabled=True) self._udpate_cycle_button_status(disabled=False) else: logging.warning("Unhandled plan status for button state: {}".format(plan_status)) def _update_plan_panel(self, status_dict): status_str = "Plan is not running" color_str = STATUS_PANEL_BAD_COLOR if status_dict: try: self._update_plan_button_status(status_dict['status']) status_str = "" plan_state_str = "{}".format(status_dict['status']).split('.')[1] status_str += "Plan status: {}\n".format(plan_state_str) status_str += "Plan filename: {}\n".format(status_dict['filename']) status_str += "Next cycle num: {}\n".format(status_dict['next_cycle_number']) status_str += "Idle bots: {}\n".format(status_dict['idle_bots']) for id, data in status_dict['robots'].items(): needs_restart_str = '' if data['needs_restart']: needs_restart_str = "(Needs Restart)" status_str += "{}{}:\n".format(id, needs_restart_str) status_str += " Cycle id: {}\n".format(data["cycle_id"]) status_str += " Action id: {}\n".format(data["action_id"]) status_str += " Action name: {}\n".format(data["action_name"]) status_str += " Tile Coordinate: {}\n".format(data["tile_coordinate"]) status_str += " Vision offset: {}\n".format(data["vision_offset"]) # Set panel coloring based on state if plan_state_str == "PAUSED" or plan_state_str == "ABORTED": color_str = STATUS_PANEL_BAD_COLOR elif plan_state_str != "NONE": color_str = STATUS_PANEL_OK_COLOR except Exception as e: status_str = "Bad dict: " + str(status_dict) self.window['_PLAN_STATUS_'].update(status_str, background_color=color_str) def _update_robot_panel(self, robot_id, status_dict): status_str = "Cannot get {} status".format(robot_id) color_str = STATUS_PANEL_BAD_COLOR if status_dict: try: robot_pose = [status_dict['pos_x'], status_dict['pos_y'], math.degrees(status_dict['pos_a'])] last_mm_pose = [status_dict['last_mm_x'],status_dict['last_mm_y'], math.degrees(status_dict['last_mm_a']), status_dict['last_mm_used']] status_str = "" status_str += "Position: [{0:.3f} m, {1:.3f} m, {2:.2f} deg]\n".format(robot_pose[0], robot_pose[1], robot_pose[2]) status_str += "Last MM Pose: [{:.3f} m, {:.3f} m, {:.2f} deg]\n".format( last_mm_pose[0], last_mm_pose[1], last_mm_pose[2]) status_str += "Velocity: [{0:.3f} m/s, {1:.3f} m/s, {2:.2f} deg/s]\n".format(status_dict['vel_x'],status_dict['vel_y'], math.degrees(status_dict['vel_a'])) status_str += "Vision Pose : [{0:.3f} m, {1:.3f} m, {2:.2f} deg]\n".format(status_dict['vision_x'],status_dict['vision_y'], math.degrees(status_dict['vision_a'])) status_str += "Camera Raw Pose : [{0:.3f} m, {1:.3f} m, {2:.2f} deg]\n".format(status_dict['cam_pose_x'],status_dict['cam_pose_y'], math.degrees(status_dict['cam_pose_a'])) status_str += "Cam Status: Both-{} | Side-{} | Rear-{}\n".format(status_dict["cam_both_ok"],status_dict["cam_side_ok"], status_dict["cam_rear_ok"]) status_str += "Raw Camera px: Side [{:d}, {:d}] Rear: [{:d}, {:d}]\n".format( \ int(status_dict['cam_side_u']),int(status_dict['cam_side_v']), int(status_dict['cam_rear_u']), int(status_dict['cam_rear_v'])) status_str += "Localization total confidence: {:.1f}%\n".format(status_dict['localization_total_confidence']*100) status_str += " Axes confidence: [{:.1f}%, {:.1f}%, {:.1f}%]\n".format( status_dict['localization_confidence_x']*100,status_dict['localization_confidence_y']*100,status_dict['localization_confidence_a']*100) status_str += "Localization position uncertainty: {:.2f}\n".format(status_dict['last_position_uncertainty']) status_str += "Controller timing: {} ms\n".format(status_dict['controller_loop_ms']) status_str += "Position timing: {} ms\n".format(status_dict['position_loop_ms']) status_str += "Camera timing: {} ms\n".format(status_dict['cam_loop_ms']) status_str += "Current action: {}\n".format(status_dict['current_action'].split('.')[-1]) status_str += "Motion in progress: {}\n".format(status_dict["in_progress"]) status_str += "Has error: {}\n".format(status_dict["error_status"]) status_str += "Counter: {}\n".format(status_dict['counter']) # Also update the visualization position self._update_robot_viz_position(robot_id, robot_pose) self._update_last_mm_pose(robot_id, last_mm_pose) # If there is target position data populated, draw the target too if 'current_move_data' in status_dict.keys(): self._update_target_viz_position(robot_id, robot_pose, status_dict['current_move_data']) color_str = STATUS_PANEL_OK_COLOR if status_dict["error_status"]: color_str = STATUS_PANEL_BAD_COLOR except Exception as e: if "offline" in str(status_dict): status_str = str(status_dict) else: status_str = "Bad dict: " + str(status_dict) self.window['_{}_STATUS_'.format(robot_id.upper())].update(status_str, background_color=color_str) def _update_robot_viz_position(self, robot_id, robot_pose): if robot_id in self.viz_figs.keys(): for f in self.viz_figs[robot_id]: self.window['_GRAPH_'].DeleteFigure(f) self.viz_figs[robot_id] = self._draw_robot(robot_pose, use_target_color=False) def _update_target_viz_position(self, robot_id, robot_pose, target_pose): viz_key = "{}_target".format(robot_id) if viz_key in self.viz_figs.keys(): for f in self.viz_figs[viz_key]: self.window['_GRAPH_'].DeleteFigure(f) if robot_pose and target_pose: target_line = self.window['_GRAPH_'].draw_line(point_from=robot_pose[:2], point_to=target_pose[:2], color='yellow', width=2) self.viz_figs[viz_key] = self._draw_robot(target_pose, use_target_color=True) self.viz_figs[viz_key].append(target_line) def _update_last_mm_pose(self, robot_id, last_mm_pose): viz_key = "{}_last_mm".format(robot_id) if viz_key in self.viz_figs.keys(): for f in self.viz_figs[viz_key]: self.window['_GRAPH_'].DeleteFigure(f) used = last_mm_pose[3] if used: color = 'green' thickness = 2 else: color = 'red' thickness = 2 global_pose_2d = np.array(last_mm_pose[:2]) angle = last_mm_pose[2] viz_handles = [] direction_pt = np.array([self.config.robot_direction_indicator_length, 0]) direction_arrow_x = self.config.robot_direction_indicator_arrow_length*math.cos(math.radians(self.config.robot_direction_indicator_arrow_angle)) direction_arrow_y = self.config.robot_direction_indicator_arrow_length*math.sin(math.radians(self.config.robot_direction_indicator_arrow_angle)) direction_arrow_left = direction_pt + np.array([-direction_arrow_x, direction_arrow_y]) direction_arrow_right = direction_pt + np.array([-direction_arrow_x, -direction_arrow_y]) global_direction_pt = Utils.TransformPos(direction_pt, global_pose_2d, angle) viz_handles.append(self.window['_GRAPH_'].draw_line(point_from=list(global_pose_2d), point_to=list(global_direction_pt), color=color, width=thickness)) global_direction_arrow_left = Utils.TransformPos(direction_arrow_left, global_pose_2d, angle) viz_handles.append(self.window['_GRAPH_'].draw_line(point_from=list(global_direction_pt), point_to=list(global_direction_arrow_left), color=color, width=thickness)) global_direction_arrow_right = Utils.TransformPos(direction_arrow_right, global_pose_2d, angle) viz_handles.append(self.window['_GRAPH_'].draw_line(point_from=list(global_direction_pt), point_to=list(global_direction_arrow_right), color=color, width=thickness)) self.viz_figs[viz_key] = viz_handles def _draw_robot(self, robot_pose, use_target_color): robot_line_color = 'black' robot_line_thickness = 2 direction_line_color = 'red' direction_line_thickness = 1 if use_target_color: robot_line_color = 'green' robot_line_thickness = 2 direction_line_color = 'tomato' direction_line_thickness = 1 robot_global_pos_2d = np.array([robot_pose[0],robot_pose[1]]) robot_angle = robot_pose[2] robot_fig_handles = [] # Robot chassis points in robot frame chassis_fl = np.array([self.config.robot_rotation_center_offset, self.config.robot_chassis_size/2.0]) chassis_fr = np.array([self.config.robot_rotation_center_offset, -self.config.robot_chassis_size/2.0]) chassis_bl = np.array([self.config.robot_rotation_center_offset - self.config.robot_chassis_size, self.config.robot_chassis_size/2.0]) chassis_br = np.array([self.config.robot_rotation_center_offset - self.config.robot_chassis_size, -self.config.robot_chassis_size/2.0]) # Tile points in robot frame tile_bl = -self.config.tile_to_robot_offset tile_br = tile_bl + np.array([0, -self.config.tile_size_width_meters]) tile_fl = tile_bl + np.array([self.config.tile_size_height_meters, 0]) tile_fr = tile_br + np.array([self.config.tile_size_height_meters, 0]) tile_front_center = (tile_fl + tile_fr) / 2.0 # Robot direction indicator scale_factor = 0.7 direction_pt = np.array([scale_factor*self.config.robot_direction_indicator_length, 0]) direction_arrow_x = scale_factor*self.config.robot_direction_indicator_arrow_length*math.cos(math.radians(self.config.robot_direction_indicator_arrow_angle)) direction_arrow_y = scale_factor*self.config.robot_direction_indicator_arrow_length*math.sin(math.radians(self.config.robot_direction_indicator_arrow_angle)) direction_arrow_left = direction_pt + np.array([-direction_arrow_x, direction_arrow_y]) direction_arrow_right = direction_pt + np.array([-direction_arrow_x, -direction_arrow_y]) # Collect, transform, and draw chassis points chassis_points_robot_frame = [chassis_fl, chassis_fr, chassis_br, chassis_bl] # order matters for drawing chassis_points_global_frame = [] for pt in chassis_points_robot_frame: global_pt = Utils.TransformPos(pt, robot_global_pos_2d, robot_angle) chassis_points_global_frame.append(global_pt) for i in range(4): start_pt = list(chassis_points_global_frame[i]) end_pt = list(chassis_points_global_frame[(i+1)%4]) robot_fig_handles.append(self.window['_GRAPH_'].draw_line(point_from=start_pt, point_to=end_pt, color=robot_line_color, width = robot_line_thickness)) # Collect, transform, and draw tile points tile_points_robot_frame = [tile_bl, tile_br, tile_fr, tile_fl] # order matters for drawing tile_points_global_frame = [] for pt in tile_points_robot_frame: global_pt = Utils.TransformPos(pt, robot_global_pos_2d, robot_angle) tile_points_global_frame.append(global_pt) for i in range(4): start_pt = list(tile_points_global_frame[i]) end_pt = list(tile_points_global_frame[(i+1)%4]) robot_fig_handles.append(self.window['_GRAPH_'].draw_line(point_from=start_pt, point_to=end_pt, color=robot_line_color, width = robot_line_thickness)) # Draw angle indicator line angle_root_pt = Utils.TransformPos(tile_front_center, robot_global_pos_2d, robot_angle) global_direction_pt = Utils.TransformPos(direction_pt, angle_root_pt, robot_angle) robot_fig_handles.append(self.window['_GRAPH_'].draw_line(point_from=list(angle_root_pt), point_to=list(global_direction_pt), color=direction_line_color, width=direction_line_thickness)) global_direction_arrow_left = Utils.TransformPos(direction_arrow_left, angle_root_pt, robot_angle) robot_fig_handles.append(self.window['_GRAPH_'].draw_line(point_from=list(global_direction_pt), point_to=list(global_direction_arrow_left), color=direction_line_color, width=direction_line_thickness)) global_direction_arrow_right = Utils.TransformPos(direction_arrow_right, angle_root_pt, robot_angle) robot_fig_handles.append(self.window['_GRAPH_'].draw_line(point_from=list(global_direction_pt), point_to=list(global_direction_arrow_right), color=direction_line_color, width=direction_line_thickness)) return robot_fig_handles def _draw_environment(self): # Robot boundaries self.viz_figs["boundaires"] = self.window['_GRAPH_'].draw_rectangle(self.config.robot_boundaries[0], self.config.robot_boundaries[1], line_color='red') # Domino field boundaries bottom_left = self.config.domino_field_origin rect_width_height = Utils.TransformPos(np.array([self.config.field_width, self.config.field_height]), [0,0], self.config.domino_field_angle) top_left = (bottom_left[0], bottom_left[1]+rect_width_height[1]) bottom_right = (bottom_left[0] + rect_width_height[0], bottom_left[1]) self.viz_figs["field"] = self.window['_GRAPH_'].draw_rectangle(top_left, bottom_right, line_color='green', line_width=2) # Base station base_station_top_left = (self.config.base_station_boundaries[0][0], self.config.base_station_boundaries[1][1]) base_station_bottom_right = (self.config.base_station_boundaries[1][0], self.config.base_station_boundaries[0][1]) self.viz_figs["base"] = self.window['_GRAPH_'].draw_rectangle(base_station_top_left, base_station_bottom_right, line_color='blue') # X axis left_side = (self.config.graph_bottom_left[0], 0) right_side = (self.config.graph_top_right[0], 0) self.viz_figs["xaxis"] = self.window['_GRAPH_'].draw_line(left_side, right_side, color="black", width=2) x_text_location = (right_side[0] - self.config.axes_label_offset, right_side[1] + self.config.axes_label_offset) self.viz_figs["xaxis_label"] = self.window['_GRAPH_'].draw_text("X", x_text_location) x_ticks = [i for i in range(0, int(right_side[0]), self.config.axes_tick_spacing)] + [i for i in range(0, int(left_side[0]), -self.config.axes_tick_spacing)] x_ticks = np.sort(np.unique(np.array(x_ticks))) for val in x_ticks: bottom = (val, -self.config.axes_tick_size/2) top = (val, self.config.axes_tick_size/2) self.viz_figs["xtick_{}".format(val)] = self.window['_GRAPH_'].draw_line(bottom, top, color="black") label_location = (bottom[0] + self.config.axes_label_offset, bottom[1]) self.viz_figs["xtick_label_{}".format(val)] = self.window['_GRAPH_'].draw_text("{}".format(val), label_location) # Y axis bottom_side = (0, self.config.graph_bottom_left[1]) top_side = (0, self.config.graph_top_right[1]) self.viz_figs["yaxis"] = self.window['_GRAPH_'].draw_line(bottom_side, top_side, color="black", width=2) y_text_location = (top_side[0] + self.config.axes_label_offset, top_side[1] - self.config.axes_label_offset) self.viz_figs["yaxis_label"] = self.window['_GRAPH_'].draw_text("Y", y_text_location) y_ticks = [i for i in range(0, int(top_side[1]), self.config.axes_tick_spacing)] + [i for i in range(0, int(bottom_side[1]), -self.config.axes_tick_spacing)] y_ticks = np.sort(np.unique(np.array(y_ticks))) for val in y_ticks: left = (-self.config.axes_tick_size/2, val) right = (self.config.axes_tick_size/2, val) self.viz_figs["ytick_{}".format(val)] = self.window['_GRAPH_'].draw_line(left, right, color="black") label_location = (right[0], right[1]+self.config.axes_label_offset) self.viz_figs["ytick_label_{}".format(val)] = self.window['_GRAPH_'].draw_text("{}".format(val), label_location) def update_plan_display(self, plan_info): if plan_info is None: return plan = plan_info[0] plan_status = plan_info[1] cycle_number = plan_info[2] if not self.drawn_plan_grid: rect_width_height = Utils.TransformPos(np.array([self.config.field_width, self.config.field_height]), [0,0], self.config.domino_field_angle) bottom_left = self.config.domino_field_origin bottom_right = (bottom_left[0], bottom_left[1]+rect_width_height[1]) top_left = (bottom_left[0] + rect_width_height[0], bottom_left[1]) for i in range(1, self.config.num_tiles_width): dx = Utils.TransformPos(np.array([self.config.tile_size_width_meters, 0]), [0,0], self.config.domino_field_angle) p1 = list(bottom_left + dx * i) p2 = list(top_left + dx * i) self.viz_figs["plan_vert_{}".format(i)] = self.window['_GRAPH_'].draw_line(p1, p2, color = "grey17") for i in range(1, self.config.num_tiles_height): dx = Utils.TransformPos(np.array([0, self.config.tile_size_height_meters]), [0,0], self.config.domino_field_angle) p1 = list(bottom_left + dx * i) p2 = list(bottom_right + dx * i) self.viz_figs["plan_hz_{}".format(i)] = self.window['_GRAPH_'].draw_line(p1, p2, color = "grey17") if type(plan) is SubsectionPlan: origin = self.config.domino_field_origin bottom_left = Utils.TransformPos(np.array(plan.field.tiles[-1].getPlacementPositionInMeters()), origin, self.config.domino_field_angle) top_right = Utils.TransformPos(np.array(plan.field.tiles[0].getPlacementPositionInMeters())+ np.array((self.config.tile_size_width_meters,self.config.tile_size_height_meters)), origin, self.config.domino_field_angle) self.viz_figs["subfield"] = self.window['_GRAPH_'].draw_rectangle(bottom_left, top_right, line_color='green', line_width=3) self.drawn_plan_grid = True if cycle_number != self.last_cycle_number_drawn: tile_color = "red" if plan_status in [PlanStatus.RUNNING, PlanStatus.PAUSED]: tile_color = "blue" if cycle_number <= len(plan.field.tiles): if cycle_number < 0: cycle_number = 0 if "plan_marker" in self.viz_figs: self.window['_GRAPH_'].DeleteFigure(self.viz_figs["plan_marker"]) tile = plan.field.tiles[cycle_number] location_field_frame = np.array(tile.getPlacementPositionInMeters()) + np.array([self.config.tile_size_width_meters, self.config.tile_size_height_meters])/2.0 location_global_frame = list(Utils.TransformPos(location_field_frame, self.config.domino_field_origin, self.config.domino_field_angle)) self.viz_figs["plan_marker"] = self.window['_GRAPH_'].draw_circle(location_global_frame, 0.25, fill_color=tile_color) self.last_cycle_number_drawn = cycle_number class Master: def __init__(self, cfg, gui_handle): self.cfg = cfg self.cmd_gui = gui_handle self.keep_mm_running = False logging.info("Initializing Master") self.runtime_manager = RuntimeManager(self.cfg) self.runtime_manager.initialize() def loop(self): ready_to_exit = False while not ready_to_exit: try: self.runtime_manager.update() ready_to_exit = self.update_gui_and_handle_input() except Exception as e: logging.warning("Unhandled exception {}".format(str(e))) if self.runtime_manager.get_plan_status() is PlanStatus.RUNNING: self.runtime_manager.set_plan_status(PlanStatus.PAUSED) # Clean up whenever loop exits self.runtime_manager.shutdown(self.keep_mm_running) self.cmd_gui.close() def update_gui_and_handle_input(self): # Handle any input from gui event_type, event_data = self.cmd_gui.update() if event_type == "Exit": return True if event_type == "ExitMM": self.keep_mm_running = True return True if event_type == "Run": if self.runtime_manager.get_plan_status() == PlanStatus.PAUSED: logging.info("PLAN RESUMED") self.runtime_manager.set_plan_status(PlanStatus.RUNNING) if event_type == "Load": if event_data is not '': self.runtime_manager.load_plan(event_data) if event_type == "Pause": logging.info("PLAN PAUSED") self.runtime_manager.estop() self.runtime_manager.set_plan_status(PlanStatus.PAUSED) if event_type == "Abort": logging.warning("PLAN ABORTED") self.runtime_manager.estop() self.runtime_manager.set_plan_status(PlanStatus.ABORTED) if event_type == "Action": self.runtime_manager.run_manual_action(event_data) if event_type == "_INC_CYCLE_": self.runtime_manager.increment_robot_cycle("robot1") if event_type == "_DEC_CYCLE_": self.runtime_manager.decrement_robot_cycle("robot1") if event_type == "_INC_ACTION_": self.runtime_manager.increment_robot_action("robot1") if event_type == "_DEC_ACTION_": self.runtime_manager.decrement_robot_action("robot1") if event_type == "_SET_CYCLE_": self.runtime_manager.set_cycle("robot1", int(event_data[0])) if event_type == "_SET_ACTION_": self.runtime_manager.set_action("robot1", int(event_data[1])) if event_type == "ESTOP": self.runtime_manager.estop() if self.runtime_manager.get_plan_status() == PlanStatus.RUNNING: self.runtime_manager.set_plan_status(PlanStatus.PAUSED) logging.warning("Pausing plan due to ESTOP event") # Get metrics and update the displayed info metrics = self.runtime_manager.get_all_metrics() self.cmd_gui.update_status_panels(metrics) self.cmd_gui.update_plan_display(self.runtime_manager.get_plan_info()) return False def configure_logging(path): rootLogger = logging.getLogger() rootLogger.setLevel(logging.INFO) if not os.path.isdir(path): os.mkdir(path) fileHandler = logging.FileHandler(os.path.join(path,"master.log"), 'w+') fileFormatter = logging.Formatter("%(asctime)s [%(levelname)s] %(message)s") fileHandler.setFormatter(fileFormatter) rootLogger.addHandler(fileHandler) consoleHandler = logging.StreamHandler() consoleFormatter = logging.Formatter("%(message)s") consoleHandler.setFormatter(consoleFormatter) rootLogger.addHandler(consoleHandler) if __name__ == '__main__': # Setup config and gui cfg = config.Config() gui = CmdGui(cfg) # Need to setup gui before logging to ensure that output pane captures logs correctly configure_logging(cfg.log_folder) # Startup master and loop forever m = Master(cfg, gui) m.loop() ================================================ FILE: src/master/README.md ================================================ # **Warning! This is probably out of date and might be inaccurate. Use at your own risk.** # Master This is the top level 'Master' software for controlling the Domino Robot System. It is the central hub for communication and control of all other robots, provides a GUI for visualization and debugging, and handles executing the top level plan to set up all the dominos. There are two programs that can be run within the master software: - `MasterMain.py` is the main program that runs the GUI and controls everything and is the one the rest of this README will primarily focus on. - `FieldPlanner.py` will generate a plan using an input image, available domino colors, and many other configuration parameters (all defined in `config.py`). This plan can then be saved to a file (in the `src/master/plans/` folder) and loaded at a later point for execution. You probably shouldn't ever need to run this. # Table of Contents - [**Warning! This is probably out of date and might be inaccurate. Use at your own risk.**](#warning-this-is-probably-out-of-date-and-might-be-inaccurate-use-at-your-own-risk) - [Master](#master) - [Table of Contents](#table-of-contents) - [Installation](#installation) - [Usage](#usage) - [Robot Setup](#robot-setup) - [Running MasterMain](#running-mastermain) - [Using the MasterMain GUI](#using-the-mastermain-gui) - [Window Overview](#window-overview) - [Running Commands](#running-commands) - [Summary of available commands](#summary-of-available-commands) # Installation 1. Ensure you have a version of Python 3 installed by installing from https://www.python.org/. This project was developed using Python 3.7, but any recent Python version should be fine. I've only used it on Windows 10 so the instructions here are for Windows. It may or may not work on other operating systems. 2. Open a new terminal window and navigate to the folder where you downloaded the DominoRobot repository ``` cd path\to\DominoRobot ``` 3. Create a python virtual environment in the DominoRobot folder ``` python -m venv venv ``` 4. Activate the virtual environment (your terminal should show `(venv)` at the front of the line to indicate it is active): ``` .\venv\Scripts\activate ``` 5. Install required python packages in the virtual environment: ``` pip install -r src\master\requirements.txt ``` # Usage ## Robot Setup Any robot you wish to connect to and control must be setup and running before the master program can do anything. 1. Ensure you are connect to the DominoNet wifi. Talk to Alex if you need help getting setup on the network. 2. Power on the robot by plugging in both sets of batteries and turning on all power switches. You can leave the hardware ESTOP button pressed for now while finishing the setup, but just make sure you release it before trying to send movement commands or it won't go anywhere! 3. Connect to the robot over SSH. - If this is your first time using SSH, you may need to install an SSH program like [PuTTY](https://www.putty.org/) or setup [Windows SSH](https://www.pugetsystems.com/labs/hpc/How-To-Use-SSH-Client-and-Server-on-Windows-10-1470/) - Figure out the IP address of the robot you are interested in connecting to (If this list becomes out of date, the up to date info will be in the `ip_map` variable in `config.py`) | Target | IP | |---------------|--------------| | robot1 | 192.168.1.5 | - SSH to `pi@` and enter the password to connect 4. Once you are connected to the robot move to the DominoRobot directory: ``` cd DominoRobot ``` 5. Ensure that the robot has the latest version of the correct software ``` git checkout master && git pull ``` 6. Recompile the robot software (this could take a minute or two if it has to compile a lot of stuff) ``` make ``` 7. Start the robot software ``` run_robot ``` 8. This should start printing out a bunch of info on the screen and the robot should be ready to connect to the master (See the next section on [Running MasterMain](#running-mastermain) ). Note that you must keep this terminal window running as closing it will stop the robot software. If at any point you want to stop the software, just use `Ctrl+C` to stop it. You can then restart it with `run_robot` again. 9. **IMPORTANT**: When you are done and want to shut everything down, make sure you shut down the raspberry pi by running the shutdown command BEFORE turning off the power. If you just turn off the power without shutting down, there is a possibility that the SD card can get corrupted. ``` sudo shutdown -h now ``` ## Running MasterMain 1. Open a new terminal window and navigate to the folder where you downloaded the DominoRobot repository (if you aren't there already from the installation step) ``` cd path\to\DominoRobot ``` 2. Ensure that your virtual environment is active (your terminal should show `(venv)` at the front of the line to indicate it is active): ``` .\venv\Scripts\activate ``` 3. Run the program: ``` python src\master\MasterMain.py ``` 4. It may take a second for the window to show up, but as long as a window pops up and the commandline doesn't spit out a bunch of errors, you should be good to go. You can stop the program at any time with the X in the top right of the window, or using `Ctrl+C` on the commandline. 5. If you need to stop and re-run the program, you only need to re-run step 3 above as long as you don't close the terminal window (In most terminals you can just push the up arrow to get back the previous command you ran). ## Using the MasterMain GUI ### Window Overview The window will look something like this when you launch it: ![GUI window](readme_images/gui_start.png) On the left are the status panels for each component connected to the system. They are all red right now because nothing is connected and will turn green and display information when that component is active, like this: ![Active robot panel](readme_images/robot_status_green.png) In the middle is a display that shows where the robot and other objects are in the scene. As components are connected and move around, this will update to show their location. On the bottom of the middle section are the control buttons which is your main way of interacting with the system: ![Control buttons](readme_images/control_panel.png) On the right side of the window, the program prints out information about what it is doing. This information is also mirrored to the console and logged into `DominRobot\log\master.log` (which is overwritten on each new run) ### Running Commands In order to send a command to a robot a few prerequisites must be met. 1. The robot has to be connected to the master (i.e. the box should be green and the counter value should be changing). If you followed the [setup](##robot-setup) steps above, this probably should be the case as soon as you run `MasterMain` on your computer. If not, you'll have to debug why (check that your SSH connection is okay and that the robot program hasn't crashed) 2. The robot is programmed to only run a single command at a time, so it will ignore any commands that are sent while another one is active, so make sure a previous command isn't still running. If there is no motion from the robot/lifter and the "Motion in progress" value is False, it should let you run a command. To actually run the command, you need 3 pieces of information. 1. The `Target` field is which robot/device to send the command to. Make sure you have the right target selected from the drop down menu. ![Target menu](readme_images/control_target_select.png) 2. The `Action` field is which action you would like to run on the target. See below for a summary of what the various actions do. ![Action menu](readme_images/control_action_type.png) 3. Some actions require some additional data to be specified. If additional data is needed for an action, you must specify it in the `Data` field by typing it in. See the table below for which actions require additional data. If an action does not require additional data any information (blank or otherwise) in the data field is ignored. Once all the prerequisites are met and the required fields are filled in, you may use the `` key or the `Send Command` button to run the command on the target. If everything is successful, you will see values change in the status panel of the target and text printed out in the pane on the right. If something goes wrong, you can use the big red ESTOP button on the command window to send an emergency stop signal to all robots. Note that this is a software ESTOP and requires the master to be functional enough to send the ESTOP signal and the target functional enough to recieve and act on the ESTOP signal. There are certian failure modes where this will not be the case and this button will not function (i.e. the master software crashes and you can't push the button). In that case, use the physical ESTOP buttons on the robots to cut power. ### Summary of available commands | Name | Valid targets | Summary | Additional data| |-------|---------------|---------|----------------| |MOVE_COARSE | robot | Move the robot coarsely to an absolute position. | Comma separated x,y,a pose (Example: `1.0,2,0` would tell the robot to move to x=1, y=2, a=0). Units are meters, meters, radians. | |MOVE_FINE | robot | Move the robot very precisely to an absolute position.| Comma separated x,y,a pose (Example: `0.1,-0.5,3.14` would tell the robot to move to x=0.1, y=-0.5, a=3.14). Units are meters, meters, radians.| |MOVE_REL | robot |Move the robot coarsely to a position relative to its current position.|Comma separated x,y,a pose (Example: `1.0,2,0` would tell the robot to move to x=x_current+1, y=y_current+2, a=a_current+0). Units are meters, meters, radians. | |MOVE_CONST_VEL |robot | Move the robot coarsely at a constant velocity for a specified time.|Comma separated x,y,a,t velocity and time (Example: `0.1,0.2,0,4` would tell the robot to move to at a velocity of x_vel=0.1, y_vel=0.2, a_vel=0 for t=4 seconds). Units are meters/sec, meters/sec, radians/sec, seconds. | |NET | all |Check if the robot is connected to the network. | None | |LOAD| robot | Run the tray loading sequence on the robot. *Note that the tray will pause in the "ready to load" position until a LOAD_COMPLETE action is recieved*.| None | |PLACE| robot | Run the tray placement sequence on the robot.| None| |TRAY_INIT | robot | Run the tray intialization sequence on the robot. | None| |LOAD_COMPLETE|robot| Indicate to the robot that the base station has completed the loading process. | None | |ESTOP|all| Tell the target to immediately stop any current action/motion. | None | |WAIT_FOR_LOCALIZATION| robot | Tell the target to wait for the localization confidence to reach a good level. | None | |CLEAR_ERROR |all| Clear an error state that is preventing the target from starting a new action.| None | ================================================ FILE: src/master/RobotClient.py ================================================ # This is a basic TCP client for a robot that wraps up sending # and recieving various commands import socket import select import json import time import logging import copy PORT = 8123 NET_TIMEOUT = 0.1 # seconds START_CHAR = "<" END_CHAR = ">" class TcpClient: def __init__(self, ip, port, timeout): self.socket = socket.create_connection((ip, port), timeout) self.socket.setblocking(False) if not socket: return None def send(self, msg, print_debug=True): totalsent = 0 if print_debug: logging.info("TX: " + msg) msg = START_CHAR + msg + END_CHAR msg_bytes = msg.encode() while totalsent < len(msg): sent = 0 try: sent = self.socket.send(msg_bytes[totalsent:]) except: pass if sent == 0: raise RuntimeError("socket connection broken") totalsent = totalsent + sent def recieve(self, timeout=0.1, print_debug=True): # logging.info("Checking socket ready") # socket_ready, _, _ = select.select([self.socket], [], []) # if not socket_ready: # logging.info("Socket not ready") # return "" new_msg = "" new_msg_ready = False start_time = time.time() while not new_msg_ready and time.time() - start_time < timeout: # Get new data try: data = self.socket.recv(2048) except socket.error: continue if data == b'': raise RuntimeError("socket connection broken") # Decode data and parse into message new_str = data.decode(encoding='UTF-8',errors='strict') start_idx = new_str.find(START_CHAR) end_idx = new_str.find(END_CHAR) if start_idx != -1 and end_idx != -1: new_msg = new_str[start_idx+1:end_idx] new_msg_ready = True elif start_idx != -1: new_msg += new_str[start_idx+1:] msg_rcv_in_progress = True elif end_idx != -1: new_msg += new_str[:end_idx] new_msg_ready = True else: new_msg += new_str if print_debug and new_msg: logging.info("RX: " + new_msg) if not new_msg_ready: #logging.info("Socket timeout") new_msg = "" return new_msg class ClientBase: def __init__(self, ip): self.client = TcpClient(ip, PORT, NET_TIMEOUT) # Queue is used to help handle out of order messages recieved self.incoming_queue = [] def wait_for_server_response(self, expected_msg_type, timeout=0.1, print_debug=True): """ Waits for specified time to get a reply from the server Must set expected_msg_type to a string and this function will return the first message recieved matching that type. Other messages are added to the queue and checked the next time this function is called. If no matching message is recieved before timeout, this function returns None """ # First, check the existing queue to see if the message we are looking for is there for ix in range(len(self.incoming_queue)): # Since type field existence is checked before insertion into queue, it should # always be safe to check the field directly if self.incoming_queue[ix]['type'] == expected_msg_type: msg_copy = copy.copy(self.incoming_queue[ix]) del self.incoming_queue[ix] return msg_copy # If there was nothing in the queue matching the expected type, try to recieve a message start_time = time.time() while time.time() - start_time < timeout: try: incoming_msg = self.client.recieve(timeout=0.1, print_debug=print_debug) except socket.timeout: break if incoming_msg: try: new_msg = json.loads(incoming_msg) if 'type' not in new_msg: logging.warning("Discarded msg for missing type field: {}".format(new_msg)) continue elif new_msg['type'] == expected_msg_type: return new_msg else: self.incoming_queue.append(new_msg) except: logging.warning("Error decoding json: {}".format(incoming_msg)) break # Will get here if timeout is reached or decode error happens return None def send_msg_and_wait_for_ack(self, msg, print_debug=False, timeout=0.1): """ Sends msg and ensures that the correct ack is returned Logs a warning if ack is not recieved or incorrect ack is recieved """ self.client.send(json.dumps(msg,separators=(',',':')), print_debug=print_debug) # Make sure json dump is compact for transmission num_wait_checks = 3 counter = 0 resp_ok = False while counter < num_wait_checks and not resp_ok: counter += 1 resp = self.wait_for_server_response(expected_msg_type='ack', print_debug=print_debug) if not resp: logging.warning("Did not recieve ack. Try {} of {}".format(counter, num_wait_checks)) continue else: if resp['type'] != 'ack': logging.warning("Expecting return type ack. Try {} of {}".format(counter, num_wait_checks)) continue elif resp['data'] != msg['type']: logging.warning("Incorrect ack type. Expecting: {}, Got: {}. Try {} of {}".format( msg['type'], resp['data'], counter, num_wait_checks)) continue resp_ok = True if resp_ok: return resp else: raise ValueError("Bad/no response") def net_status(self): """ Check if the network connection is ok""" msg = {'type': 'check'} status = False try: self.send_msg_and_wait_for_ack(msg) status = True except: pass finally: return status def request_status(self): """ Request status from server """ msg = {'type' : 'status'} self.client.send(json.dumps(msg), print_debug=False) resp = self.wait_for_server_response(expected_msg_type='status', print_debug=False) if not resp: logging.warning("Did not recieve status response") return None return resp['data'] def estop(self): """ Tell client to estop """ msg = {'type': 'estop'} self.send_msg_and_wait_for_ack(msg) class RobotClient(ClientBase): def __init__(self, cfg, robot_id): super().__init__(cfg.ip_map[robot_id]) self.robot_id = robot_id self.cfg = cfg def move(self, x, y, a): """ Tell robot to move to specific location """ msg = {'type': 'move', 'data': {'x': x, 'y': y, 'a': a}} self.send_msg_and_wait_for_ack(msg) def move_rel(self, x, y, a): """ Tell robot to move to a relative location """ msg = {'type': 'move_rel', 'data': {'x': x, 'y': y, 'a': a}} self.send_msg_and_wait_for_ack(msg) def move_rel_slow(self, x, y, a): """ Tell robot to move to a relative location but slowly """ msg = {'type': 'move_rel_slow', 'data': {'x': x, 'y': y, 'a': a}} self.send_msg_and_wait_for_ack(msg) def move_fine(self, x, y, a): """ Tell robot to move to a specific location with fine precision """ msg = {'type': 'move_fine', 'data': {'x': x, 'y': y, 'a': a}} self.send_msg_and_wait_for_ack(msg) def move_fine_stop_vision(self, x, y, a): """ Tell robot to move to a specific location with fine precision but stop if vision markers are detected """ msg = {'type': 'move_fine_stop_vision', 'data': {'x': x, 'y': y, 'a': a}} self.send_msg_and_wait_for_ack(msg) def move_with_vision(self, x, y, a): """ Tell robot to move to a location relative to the vision measurements from cameras""" msg = {'type': 'move_vision', 'data': {'x': x, 'y': y, 'a': a}} self.send_msg_and_wait_for_ack(msg) def move_const_vel(self, vx, vy, va, t): """ Tell robot to move at constant velocity for a specific amount of time""" msg = {'type': 'move_const_vel', 'data': {'vx': vx, 'vy': vy, 'va': va, 't': t}} self.send_msg_and_wait_for_ack(msg) def place(self): """ Tell robot to place pallet """ msg = {'type': 'place'} self.send_msg_and_wait_for_ack(msg) def load(self): """ Tell robot to load pallet """ msg = {'type': 'load'} self.send_msg_and_wait_for_ack(msg) def tray_init(self): """ Tell robot to initialize tray """ msg = {'type': 'init'} self.send_msg_and_wait_for_ack(msg) def load_complete(self): """ Tell robot that base station load is complete """ msg = {'type': 'lc'} self.send_msg_and_wait_for_ack(msg) def clear_error(self): """ Tell robot to clear an existing error """ msg = {'type': 'clear_error'} self.send_msg_and_wait_for_ack(msg) def wait_for_localization(self): """ Tell robot to wait for localization""" msg = {'type': 'wait_for_loc'} self.send_msg_and_wait_for_ack(msg) def set_pose(self, x, y, a): """ Sets the pose of the robot explicity bypassing any localization code """ msg = {'type': 'set_pose', 'data': {'x': x, 'y': y, 'a': a}} self.send_msg_and_wait_for_ack(msg) def toggle_vision_debug(self): """ Tell robot to toggle vision debug output""" msg = {'type': 'toggle_vision_debug'} self.send_msg_and_wait_for_ack(msg) def start_cameras(self): """ Tell robot to start the cameras""" msg = {'type': 'start_cameras'} self.send_msg_and_wait_for_ack(msg) def stop_cameras(self): """ Tell robot to stop the cameras""" msg = {'type': 'stop_cameras'} self.send_msg_and_wait_for_ack(msg) class BaseStationClient(ClientBase): def __init__(self, cfg): super().__init__(cfg.base_station_ip) self.cfg = cfg def load(self): """ Tells the station to load the next set of dominos into the robot""" msg = {'type': 'load'} self.send_msg_and_wait_for_ack(msg) # Hacky Mocks to use for testing class MockRobotClient: def __init__(self, cfg, robot_id): super().__init__() self.robot_id = robot_id self.cfg = cfg def move(self, x, y, a): pass def move_rel(self, x, y, a): pass def move_rel_slow(self, x, y, a): pass def move_fine(self, x, y, a): pass def place(self): pass def net_status(self): return True def estop(self): pass def load(self): pass def request_status(self): return {"in_progress": False, "pos_x": 1, "pos_y": 2, "pos_a": 0} def clear_error(self): pass def wait_for_localization(self): pass def toggle_distance(self): pass class MockBaseStationClient: def __init__(self, cfg): super().__init__() self.cfg = cfg def load(self): pass def net_status(self): return True def estop(self): pass def request_status(self): return {} if __name__== '__main__': import config from MasterMain import configure_logging cfg = config.Config() configure_logging(cfg.log_folder) r = RobotClient(cfg, 'robot1') time.sleep(1) r.request_status() #r.request_status() time.sleep(1) #r.request_status() #time.sleep(1) # while(True): # speed = input("Input move speed [x,y,a]: ").strip().split(',') # if len(speed) != 3: # logging.info("Need to provide comma separated values.") # else: # x = float(speed[0]) # y = float(speed[1]) # a = float(speed[2]) # r.move(x,y,a) # time.sleep(3) ================================================ FILE: src/master/Runtime.py ================================================ import time import logging import enum import pprint import pickle import os import json import copy import math from numpy.core.defchararray import add from MarvelMindHandler import MarvelmindWrapper, MockMarvelmindWrapper from RobotClient import RobotClient, BaseStationClient, MockRobotClient, MockBaseStationClient from FieldPlanner import ActionTypes, Action, TestPlan, RunFieldPlanning from Utils import write_file, NonBlockingTimer, ActionValidator class RobotInterface: def __init__(self, config, robot_id): self.comms_online = False self.robot_client = None self.config = config self.robot_id = robot_id self.last_status_time = 0 self.last_status = None self.simple_action_map = {} self.current_action = ActionTypes.NONE self.current_move_data = [] self.current_action_timer = None self.wait_timer = None self.waiting_active = False def bring_online(self, use_mock=False): self._bring_comms_online(use_mock) def check_online(self): return self.comms_online def get_robot_metrics(self): metrics = copy.copy(self.last_status) if metrics and type(metrics) is dict: metrics['current_action'] = str(self.current_action) metrics['current_move_data'] = self.current_move_data if metrics and self.waiting_active: metrics['in_progress'] = True return metrics def _setup_action_map(self): self.simple_action_map = { ActionTypes.LOAD: self.robot_client.load, ActionTypes.PLACE: self.robot_client.place, ActionTypes.TRAY_INIT: self.robot_client.tray_init, ActionTypes.LOAD_COMPLETE: self.robot_client.load_complete, ActionTypes.ESTOP: self.robot_client.estop, ActionTypes.CLEAR_ERROR: self.robot_client.clear_error, ActionTypes.WAIT_FOR_LOCALIZATION: self.robot_client.wait_for_localization, ActionTypes.TOGGLE_VISION_DEBUG: self.robot_client.toggle_vision_debug, ActionTypes.START_CAMERAS: self.robot_client.start_cameras, ActionTypes.STOP_CAMERAS: self.robot_client.stop_cameras, } def _bring_comms_online(self, use_mock=False): try: logging.info("Attempting to connect to {} over wifi".format(self.robot_id)) if use_mock: self.robot_client = MockRobotClient(self.config, self.robot_id) else: self.robot_client = RobotClient(self.config, self.robot_id) if self.robot_client.net_status(): self.comms_online = True self._setup_action_map() logging.info("Connected to {} over wifi".format(self.robot_id)) except Exception as e: logging.info("Couldn't connect to {} over wifi".format(self.robot_id)) return def _get_status_from_robot(self): self.last_status = self.robot_client.request_status() self.last_status_time = time.time() # Reset current action and move data if the robot finished an action if self.current_action != ActionTypes.NONE and self.last_status is not None and \ 'in_progress' in self.last_status.keys() and not self.last_status['in_progress'] and self.current_action_timer.check(): self.current_action = ActionTypes.NONE self.current_move_data = [] def update(self): if self.comms_online: # Request a status update if needed if time.time() - self.last_status_time > self.config.robot_status_wait_time: try: self._get_status_from_robot() except RuntimeError: logging.info("Network connection with {} lost".format(self.robot_id)) self.comms_online = False self.last_status = "{} offline".format(self.robot_id) if self.waiting_active: if self.wait_timer.check(): self.waiting_active = False self.wait_timer = None def run_action(self, action): if not self.comms_online: return logging.info("Running {} on {}".format(action.action_type, self.robot_id)) try: if action.action_type is not None: self.current_action = action.action_type if action.action_type == ActionTypes.MOVE_COARSE: self.robot_client.move(action.x, action.y, action.a) self.current_move_data = [action.x, action.y, action.getAngleDegrees()] elif action.action_type == ActionTypes.MOVE_REL: self.robot_client.move_rel(action.x, action.y, action.a) elif action.action_type == ActionTypes.MOVE_REL_SLOW: self.robot_client.move_rel_slow(action.x, action.y, action.a) elif action.action_type == ActionTypes.MOVE_FINE: self.robot_client.move_fine(action.x, action.y, action.a) self.current_move_data = [action.x, action.y, action.getAngleDegrees()] elif action.action_type == ActionTypes.MOVE_FINE_STOP_VISION: self.robot_client.move_fine_stop_vision(action.x, action.y, action.a) self.current_move_data = [action.x, action.y, action.getAngleDegrees()] elif action.action_type == ActionTypes.MOVE_WITH_VISION: self.robot_client.move_with_vision(action.x, action.y, action.a) elif action.action_type == ActionTypes.MOVE_CONST_VEL: self.robot_client.move_const_vel(action.vx, action.vy, action.va, action.t) elif action.action_type == ActionTypes.SET_POSE: self.robot_client.set_pose(action.x, action.y, action.a) elif action.action_type == ActionTypes.NET: status = self.robot_client.net_status() logging.info("Robot {} network status is: {}".format(self.robot_id, status)) elif action.action_type == ActionTypes.WAIT: self.wait_timer = NonBlockingTimer(action.time) self.waiting_active = True elif action.action_type in self.simple_action_map.keys(): self.simple_action_map[action.action_type]() else: logging.info("Unknown action: {}".format(action.action_type)) self.current_action_timer = NonBlockingTimer(self.config.robot_next_action_wait_time) except RuntimeError: logging.info("Network connection with {} lost".format(self.robot_id)) self.comms_online = False self.last_status = "{} offline".format(self.robot_id) class BaseStationInterface: def __init__(self, config): self.config = config self.client = None self.comms_online = False self.last_status = None self.last_status_time = 0 def bring_online(self, use_mock=False): logging.info("Bringing BaseStation comms online") if use_mock: self.client = MockBaseStationClient(self.config) else: self.client = BaseStationClient(self.config) if self.client.net_status(): self.comms_online = True def get_last_status(self): return self.last_status def check_online(self): return self.comms_online def update(self): # Request a status update if needed if time.time() - self.last_status_time > self.config.base_station_status_wait_time: try: self._get_status_from_base_station() except RuntimeError: logging.info("Network connection with base station lost") self.comms_online = False self.last_status = "Base station offline" def _get_status_from_base_station(self): self.last_status = self.client.request_status() if self.last_status == None or self.last_status == "": self.last_status = "Base station status not available!" self.last_status_time = time.time() def run_action(self, action): if not self.comms_online: return try: if action.action_type == ActionTypes.ESTOP: self.client.estop() elif action.action_type == ActionTypes.NET: status = self.client.net_status() logging.info("Base station network status is: {}".format(status)) elif action.action_type == ActionTypes.LOAD: self.client.load() else: logging.info("Unknown action: {}".format(action.action_type)) except RuntimeError: logging.info("Network connection with base station lost") self.comms_online = False self.last_status = "Base station offline" class PlanStatus(enum.Enum): NONE = 0, LOADED = 1, RUNNING = 2, PAUSED = 3, ABORTED = 4, DONE = 5, class RuntimeManager: STATUS_NOT_INITIALIZED = 0 STATUS_PARTIALLY_INITIALIZED = 1 STATUS_FULLY_INITIALIZED = 2 def __init__(self, config): self.config = config self.robots = {id: RobotInterface(config, id) for id in self.config.ip_map.keys()} self.base_station = BaseStationInterface(config) self.initialization_status = RuntimeManager.STATUS_NOT_INITIALIZED self.component_initialization_status = {id: False for id in self.config.ip_map.keys()} self.component_initialization_status['mm'] = False self.component_initialization_status['base_station'] = False if self.config.OFFLINE_TESTING or self.config.SKIP_MARVELMIND: self.mm_wrapper = MockMarvelmindWrapper(config) else: self.mm_wrapper = MarvelmindWrapper(config) self.last_metrics = {} self.idle_bots = set([n for n in self.robots.keys()]) self.initialization_timer = None self.cycle_tracker = {n: {'action_id': None, 'cycle_id': None, 'timer': None, 'needs_restart': False, 'action_validator': ActionValidator()} for n in self.robots.keys()} self.next_cycle_number = 0 self.plan = None self.plan_path = "" self.plan_status = PlanStatus.NONE if self.config.USE_TEST_PLAN: self._load_plan_from_file('') elif self.config.REGEN_PLAN: logging.info("Regenerating and loading plan") plan = RunFieldPlanning(autosave=True) self._load_plan_from_object(plan, os.path.join(self.config.plans_dir, "autosaved.p")) elif self.config.AUTO_LOAD_PLAN and self.config.AUTO_LOAD_PLAN_NAME: plan_path = os.path.join(self.config.plans_dir, self.config.AUTO_LOAD_PLAN_NAME) self._load_plan_from_file(plan_path) def initialize(self): if self.initialization_timer and not self.initialization_timer.check(): return # Handle testing/mock case use_base_station_mock = False use_robot_mock = False if self.config.OFFLINE_TESTING: use_base_station_mock = True use_robot_mock = True if self.config.SKIP_BASE_STATION: use_base_station_mock = True # Bring everything online if needed if not self.component_initialization_status['base_station']: self.base_station.bring_online(use_base_station_mock) if not self.component_initialization_status['mm']: self.mm_wrapper.wake_all_devices_only_if_needed() for id, robot in self.robots.items(): if not self.component_initialization_status[id]: robot.bring_online(use_robot_mock) # Check the status of initialization self._check_initialization_status() if self.initialization_status != RuntimeManager.STATUS_FULLY_INITIALIZED: self.initialization_timer = NonBlockingTimer(3) logging.info("Unable to fully initialize RuntimeManager, will try again in 3 seconds") logging.info("Current componenent initialization status:") logging.info(pprint.pformat(self.component_initialization_status, indent=2, width=10)) def shutdown(self, keep_mm_awake): logging.info("Shutting down") if self.initialization_status != RuntimeManager.STATUS_NOT_INITIALIZED: if not keep_mm_awake: self.mm_wrapper.sleep_all_devices() def get_initialization_status(self): self._check_initialization_status() return self.initialization_status def get_all_metrics(self): return self.last_metrics def update(self): self._check_initialization_status() if self.get_initialization_status != RuntimeManager.STATUS_FULLY_INITIALIZED: self.initialize() self.base_station.update() for robot in self.robots.values(): robot.update() self._update_all_metrics() if self.plan_status == PlanStatus.RUNNING: self._update_plan() self._update_cycle_actions() self._cycle_state_to_file() def run_manual_action(self, manual_action): target = manual_action[0] action = manual_action[1] self._run_action(target, action) def estop(self): logging.warning("ESTOP") self._run_action('base', Action(ActionTypes.ESTOP, 'ESTOP')) for robot in self.robots.values(): robot.run_action(Action(ActionTypes.ESTOP, 'ESTOP')) def load_plan(self, plan_file): if plan_file and os.path.basename(plan_file).split('.')[1] == 'json': self._load_cycle_state_from_file(plan_file) else: self._load_plan_from_file(plan_file) def set_plan_status(self, status): # Clean up if we don't want to save the plan state if status == PlanStatus.ABORTED: self.next_cycle_number = 0 self.cycle_tracker = {n: {'action_id': None, 'cycle_id': None, 'timer': None, 'needs_restart': False, 'action_validator': ActionValidator()} for n in self.robots.keys()} self.idle_bots = set([n for n in self.robots.keys()]) elif status == PlanStatus.PAUSED: for data in self.cycle_tracker.values(): data['needs_restart'] = True self.plan_status = status def increment_robot_cycle(self, target): self._modify_cycle_state(target, add_cycle_id=1) def decrement_robot_cycle(self, target): self._modify_cycle_state(target, add_cycle_id=-1) def increment_robot_action(self, target): self._modify_cycle_state(target, add_action_id=1) def decrement_robot_action(self, target): self._modify_cycle_state(target, add_action_id=-1) def set_cycle(self, target, cycle_num): self._modify_cycle_state(target, add_cycle_id=cycle_num, absolute=True) def set_action(self, target, action_num): self._modify_cycle_state(target, add_action_id=action_num, absolute=True) def get_plan_status(self): return self.plan_status def get_plan_info(self): if not self.plan: return None else: return (self.plan, self.plan_status, self.next_cycle_number-1) def _load_plan_from_object(self, plan, plan_path=""): self.plan = plan self.plan_status = PlanStatus.LOADED self.plan_path = plan_path for key in self.robots.keys(): self.set_cycle(key, 0) self.set_action(key, 0) def _load_plan_from_file(self, plan_file): if self.config.USE_TEST_PLAN or plan_file == "testplan": self._load_plan_from_object(TestPlan(), "testplan") else: with open(plan_file, 'rb') as f: plan = pickle.load(f) self._load_plan_from_object(plan, plan_file) logging.info("Loaded plan from {}".format(plan_file)) for key in self.robots.keys(): self.set_cycle(key, 0) self.set_action(key, 0) def _update_plan(self): # If we have an idle robot, send it the next cycle to execute if self.plan_status == PlanStatus.RUNNING and self._any_idle_bots(): next_cycle = self.plan.get_cycle(self.next_cycle_number) # If we get none, that means we are done with the plan if next_cycle is None: self.next_cycle_number = 0 logging.info("Completed plan!") self.plan_status = PlanStatus.DONE self._erase_cycle_state_file() else: logging.info("Sending cycle {} for execution".format(self.next_cycle_number)) self.next_cycle_number += 1 self._assign_new_cycle(next_cycle) def _any_idle_bots(self): return len(self.idle_bots) > 0 def _assign_new_cycle(self, cycle): expected_robot = cycle.robot_id cycle_id = cycle.id if expected_robot not in self.idle_bots: raise ValueError("Expected cycle {} to be run with {} but only available robots are {}".format(cycle_id, expected_robot, self.idle_bots)) else: logging.info("Assigning cycle {} to {}".format(cycle_id, expected_robot)) self.cycle_tracker[expected_robot]['cycle_id'] = cycle_id self.cycle_tracker[expected_robot]['action_id'] = None def _run_action(self, target, action): if action.action_type == ActionTypes.PAUSE_PLAN: self.set_plan_status(PlanStatus.PAUSED) self.increment_robot_action(target) elif target == 'base': self.base_station.run_action(action) elif 'robot' in target: self.robots[target].run_action(action) if target in self.idle_bots: self.idle_bots.remove(target) else: logging.info("Unknown target: {}".format(target)) def _check_initialization_status(self): self.component_initialization_status['base_station'] = self.base_station.check_online() self.component_initialization_status['mm'] = self.mm_wrapper.check_all_devices_status() for id,robot in self.robots.items(): self.component_initialization_status[id] = robot.check_online() ready = [i for i in self.component_initialization_status.values()] if all(ready): self.initialization_status = RuntimeManager.STATUS_FULLY_INITIALIZED elif any(ready): self.initialization_status = RuntimeManager.STATUS_PARTIALLY_INITIALIZED def _update_all_metrics(self): self.last_metrics = {} self.last_metrics['mm'] = self.mm_wrapper.get_metrics() self.last_metrics['base'] = self.base_station.get_last_status() self.last_metrics['plan'] = self._get_plan_metrics() for robot in self.robots.values(): robot_metrics = robot.get_robot_metrics() self.last_metrics[str(robot.robot_id)] = robot_metrics # Check if the robot has an error that would require pausing the plan robot_has_error = robot_metrics and "error_status" in robot_metrics and robot_metrics["error_status"] plan_running = self.plan_status == PlanStatus.RUNNING if plan_running and robot_has_error: logging.warning("Pausing plan due to error on {}. Please address before proceeding.".format(robot.robot_id)) self.set_plan_status(PlanStatus.PAUSED) def _get_plan_metrics(self): plan_metrics = {} plan_metrics['status'] = self.plan_status plan_metrics['filename'] = os.path.basename(self.plan_path) plan_metrics['next_cycle_number'] = self.next_cycle_number plan_metrics['idle_bots'] = list(self.idle_bots) robot_metrics = {} for id, data in self.cycle_tracker.items(): robot_metrics[id] = {} robot_metrics[id] = {'cycle_id': 'None', 'action_name': 'None', 'action_id': 'None', 'vision_offset': 'None','tile_coordinate': 'None'} if data['cycle_id'] is not None: robot_metrics[id]['cycle_id'] = data['cycle_id'] robot_metrics[id]['tile_coordinate'] = self.plan.field.tiles[data['cycle_id']].coordinate if data['action_id'] is not None: robot_metrics[id]['action_id'] = data['action_id'] action = self.plan.get_action(data['cycle_id'], data['action_id']) if action: robot_metrics[id]['action_name'] = action.name if action and action.action_type == ActionTypes.MOVE_WITH_VISION: robot_metrics[id]['vision_offset'] = (action.x, action.y, math.degrees(action.a)) else: robot_metrics[id]['vision_offset'] = 'None' robot_metrics[id]['needs_restart'] = data['needs_restart'] plan_metrics['robots'] = robot_metrics return plan_metrics def _update_cycle_actions(self): # Use metrics to update in progress actions for robot_id, metric in self.last_metrics.items(): # No updates needed for pos tracker, bbse station, or plan if robot_id in ['mm', 'plan', 'base']: continue if metric is None or 'in_progress' not in metric.keys(): continue # Figure out if we need to do any updates on the running actions tracker_data = self.cycle_tracker[robot_id] if tracker_data['cycle_id'] is not None: cycle = self.plan.get_cycle(tracker_data['cycle_id']) # Check if this action needs to be restarted due to being paused or interrupted if tracker_data['needs_restart']: tracker_data['timer'] = NonBlockingTimer(self.config.robot_next_action_wait_time) action = cycle.action_sequence[tracker_data['action_id']] logging.info("Re-starting action {} ({}) on {}".format(tracker_data['action_id'], action.name, robot_id)) self._run_action(robot_id, action) tracker_data['action_validator'].update_expected_action(action.action_type) tracker_data['needs_restart'] = False # The timer check here is to give a delay for the robot to actually start the action # before master checks if it is finished action_timer_ready = tracker_data['timer'] is None or tracker_data['timer'].check() # If we got a new cycle but haven't started an action yet, start the first action start_next_action = False if tracker_data['action_id'] is None: start_next_action = True # If the robot was doing an action and is now finished, start the next one action_assigned = tracker_data['action_id'] is not None action_finished = not metric['in_progress'] previous_action_validated = tracker_data['action_validator'].update_action_validation(metric) if action_assigned and action_finished and action_timer_ready: start_next_action = True if not previous_action_validated: logging.warning("Everything else finished but previous action not validated") if start_next_action: # Check if there is a new action to run for this cycle, if not, end the cycle if tracker_data['action_id'] is not None and (tracker_data['action_id'] + 1) >= len(cycle.action_sequence): logging.info("{} finished cycle {}".format(robot_id, cycle.id)) tracker_data['cycle_id'] = None tracker_data['action_id'] = None else: # If there is a new action to run, start it if tracker_data['action_id'] is None: tracker_data['action_id'] = 0 else: tracker_data['action_id'] += 1 tracker_data['timer'] = NonBlockingTimer(self.config.robot_next_action_wait_time) next_action = cycle.action_sequence[tracker_data['action_id']] logging.info("Starting action {} ({}) on {}".format(tracker_data['action_id'], next_action.name, robot_id)) self._run_action(robot_id, next_action) tracker_data['action_validator'].update_expected_action(next_action.action_type) # Update if any robots are idle for robot, data in self.cycle_tracker.items(): if data['action_id'] == None and data['cycle_id'] == None: self.idle_bots.add(robot) def _modify_cycle_state(self, target, add_cycle_id=None, add_action_id=None, absolute=False): if self.plan_status == PlanStatus.RUNNING: logging.warning("Cannot update cycle state while plan is running.") return if target not in self.cycle_tracker.keys(): logging.warning("Invalid target: {}".format(target)) return cur_cycle_id = self.cycle_tracker[target]["cycle_id"] if cur_cycle_id is None: cur_cycle_id = 0 new_cycle_id = cur_cycle_id if add_cycle_id is not None: if absolute: new_cycle_id = add_cycle_id else: new_cycle_id = cur_cycle_id + add_cycle_id if self.plan and new_cycle_id >= len(self.plan.cycles) or new_cycle_id < 0: logging.warning("Cycle id {} out of bounds".format(new_cycle_id)) return cur_action_id = self.cycle_tracker[target]["action_id"] if cur_action_id is None: cur_action_id = 0 new_action_id = cur_action_id if add_action_id is not None: if absolute: new_action_id = add_action_id else: new_action_id = cur_action_id + add_action_id if self.plan and new_action_id >= len(self.plan.get_cycle(new_cycle_id).action_sequence) or new_action_id < 0: logging.warning("Action id {} out of bounds".format(new_action_id)) return data = self.cycle_tracker[target] data['action_id'] = new_action_id data['cycle_id'] = new_cycle_id data['timer'] = None data['needs_restart'] = True data['action_validator'] = ActionValidator() self.next_cycle_number = new_cycle_id + 1 try: self.idle_bots.remove(target) except: pass def _cycle_state_to_file(self): if self.plan_status == PlanStatus.RUNNING: # Copy data and prepare to write robot_cycle_states = copy.deepcopy(self.cycle_tracker) for tracker_data in robot_cycle_states.values(): del tracker_data['timer'] del tracker_data['action_validator'] data_to_dump = {} data_to_dump['plan_path'] = self.plan_path data_to_dump['next_cycle_number'] = self.next_cycle_number data_to_dump['robots'] = robot_cycle_states with open(self.config.cycle_state_file, 'w') as f: json.dump(data_to_dump, f) def _load_cycle_state_from_file(self, filepath): logging.info("Loading cycle state information from {}".format(filepath)) with open(filepath, 'r') as f: loaded_data = json.load(f) # Set the various bits of data from the file self.plan_path = loaded_data['plan_path'] self.next_cycle_number = loaded_data['next_cycle_number'] self.cycle_tracker = loaded_data['robots'] # Need to handle robot states carefully for robot, data in self.cycle_tracker.items(): # Need to re-add timer state and tell the controller the action needs to be restarted data['timer'] = None data['needs_restart'] = True data['action_validator'] = ActionValidator() # Need to remove the robot from the idle list if it was actually doing something at the time it stopped if data['action_id'] is not None or data['cycle_id'] is not None: self.idle_bots.remove(robot) # Reload the plan data and start the status as PAUSED so that it can be resumed self._load_plan_from_file(self.plan_path) self.plan_status = PlanStatus.PAUSED def _erase_cycle_state_file(self): fname = self.config.cycle_state_file if os.path.exists(fname): os.remove(fname) ================================================ FILE: src/master/Utils.py ================================================ import time import numpy as np import math import enum class ActionTypes(enum.Enum): MOVE_COARSE = 0, MOVE_FINE = 1, MOVE_REL = 2, NET = 3, LOAD = 4, PLACE = 5, TRAY_INIT = 6, LOAD_COMPLETE = 7, ESTOP = 8, WAIT_FOR_LOCALIZATION = 9, MOVE_CONST_VEL = 10, CLEAR_ERROR = 11, NONE = 12, SET_POSE = 13, MOVE_WITH_VISION = 14, TOGGLE_VISION_DEBUG = 15, START_CAMERAS = 16, STOP_CAMERAS = 17, MOVE_REL_SLOW = 18, WAIT = 19, MOVE_FINE_STOP_VISION = 20, PAUSE_PLAN = 21, class NonBlockingTimer: def __init__(self, trigger_time): self.start_time = time.time() self.trigger_time = trigger_time def check(self): if time.time() - self.start_time > self.trigger_time: return True else: return False def write_file(filename, text): with open(filename, 'w+') as f: f.write(text) # Pos and offset expected as 2 long numpy vector, Angle is expected in degrees # Returns 2 length numpy vector def TransformPos(pos_2d, frame_offset_2d, frame_angle): frame_angle = math.radians(frame_angle) # Make a transform tf = np.array([[ np.cos(frame_angle), -np.sin(frame_angle), frame_offset_2d[0] ], [ np.sin(frame_angle), np.cos(frame_angle), frame_offset_2d[1] ], [0, 0, 1 ]]) pos_2d = np.concatenate((pos_2d,[1])).reshape((3,)) new_pos = np.matmul(tf, pos_2d) return new_pos[:2].reshape((2,)) class ActionValidator: def __init__(self): self.expected_action = str(ActionTypes.NONE) self.action_validated = False def update_expected_action(self, expected_action): self.expected_action = str(expected_action) self.action_validated = False def update_action_validation(self, metric): if metric is not None and \ 'in_progress' in metric and \ 'current_action' in metric and \ metric['current_action'] == self.expected_action: if self.expected_action in [ActionTypes.START_CAMERAS, ActionTypes.STOP_CAMERAS]: self.action_validated = True elif metric['in_progress'] == True: self.action_validated = True return self.action_validated if __name__ == '__main__': test_pos = np.array([1,1]) test_offset = np.array([2,2]) test_angle = 90 expected_pos = np.array([1,3]) new_pos = TransformPos(test_pos, test_offset, test_angle) print("Actual: {}, expected: {}".format(new_pos, expected_pos)) ================================================ FILE: src/master/__init__.py ================================================ ================================================ FILE: src/master/camera_utils.py ================================================ # Helper script to get camera images and debug info from raspi import paramiko from scp import SCPClient import os import matplotlib.image as mpimg import matplotlib.pyplot as plt import config from datetime import datetime def scp_image(ip, remote_path, local_path): ssh_client = paramiko.SSHClient() ssh_client.load_system_host_keys() ssh_client.connect(ip, username="pi", password='dominobot') with SCPClient(ssh_client.get_transport()) as scp: scp.get(remote_path, local_path) if not os.path.exists(local_path): raise ValueError("SCP image did not complete successfully") def display_debug_image(local_path): img = mpimg.imread(local_path) plt.imshow(img) figManager = plt.get_current_fig_manager() figManager.window.state('zoomed') plt.show() def get_and_display_multiple_images(cam_name, remote_ip, remote_path, local_path, img_data): fig, axes = plt.subplots(nrows=2, ncols=3) fig.suptitle("{} camera".format(cam_name), fontsize=16) ax = axes.ravel() for i,data in enumerate(img_data): cur_time_str = datetime.now().strftime("%Y%m%d%H%M%S") remote_path_to_file = remote_path + data['file'] local_path_to_file = os.path.join(local_path, cur_time_str+"_"+cam_name+"_"+data['file']) scp_image(remote_ip, remote_path_to_file, local_path_to_file) img = mpimg.imread(local_path_to_file) if data["color"]: ax[i].imshow(img) else: ax[i].imshow(img, cmap='gray', vmin=0, vmax=255) ax[i].set_title(data['title']) plt.tight_layout() figManager = plt.get_current_fig_manager() figManager.window.state('zoomed') plt.show() if __name__ == '__main__': cfg = config.Config() robot_ip = cfg.ip_map['robot1'] remote_path = "/home/pi/images/debug/" local_path = os.path.join(cfg.log_folder,"images") img_data = [] img_data.append({ "file": "img_raw.jpg", "title": "raw", "color": False }) img_data.append({ "file": "img_undistorted.jpg", "title": "undistorted", "color": False }) img_data.append({ "file": "img_thresh.jpg", "title": "threshold", "color": False }) img_data.append({ "file": "img_keypoints.jpg", "title": "detections", "color": False }) img_data.append({ "file": "img_best_keypoint.jpg", "title": "best", "color": False }) DISPLAY_SIDE = True DISPLAY_REAR = True if DISPLAY_SIDE: side_remote_path = remote_path + 'side/' get_and_display_multiple_images("side", robot_ip, side_remote_path, local_path, img_data) if DISPLAY_REAR: rear_remote_path = remote_path + 'rear/' get_and_display_multiple_images("rear", robot_ip, rear_remote_path, local_path, img_data) ================================================ FILE: src/master/config.py ================================================ import numpy as np import os import Utils class Config: # Various debug/test flags # Set to override config values for home network USING_HOME_NETWORK = False # Set for laptop vs desktop USING_DESKTOP = False # Set to override config values for small scale testing USE_SMALL_TESTING_CONFIG = False # Set to skip connecting to robot OFFLINE_TESTING = False # Set to skip connecting to base station SKIP_BASE_STATION = True # Set to skip connecting to Marvelmind SKIP_MARVELMIND = True # Set to use fake plan instead of loading a generated one USE_TEST_PLAN = False # MR LOGO plan MR_LOGO_PLAN = False # Set to auto-load this plan on master startup AUTO_LOAD_PLAN = False AUTO_LOAD_PLAN_NAME = "FullPlan_DominoBros.p" # Set to regenerate and auto load plan on master startup REGEN_PLAN = True # Set to true to use just a subsection of the overal plan USE_SUBSECTION = True # ====== PATHS ======== root_path = "C:\\Users\\alexb\\Documents\\Github\\DominoRobot\\" # Laptop if USING_DESKTOP: root_path = "C:\\Users\\alexb\\Data\\Github\\DominoRobot\\" # Desktop mm_api_relative_path = "marvelmind_SW_20202_04_19\\API\\api_windows_64bit\\dashapi.dll" config_dir_path = os.path.dirname(os.path.realpath(__file__)) mm_api_path = os.path.join(root_path, mm_api_relative_path) log_folder = os.path.join(root_path, 'log') plans_dir = os.path.join(config_dir_path, 'plans') cycle_state_file = os.path.join(plans_dir, 'previous_plan_state.json') # ====== ROBOT CONFIG ======== # Maps robot (or static) to sets of marvel mind beacons device_map = { "static": (11, 12), "robot1": (1, 2) } # Specifies which IP address each robot has ip_map = {'robot1': '10.0.0.3'} # Workshop if USING_HOME_NETWORK: ip_map = {'robot1': '192.168.1.5'} # Home base_station_ip = '10.0.0.100' # ====== PLAN GENERATION ======== # Image configuration image_name = os.path.join(config_dir_path, 'DominoDesign-Questions.psd') num_tiles_width = 18 num_tiles_height = 19 dominos = np.array( [('black', (0,0,0)), ('red', (1,0,0)), ('blue', (0.188,0.5,0.886)), ('green', (0,1,0)), ('white', (1,1,1)), ('brown', (1,0.51,0)), ('yellow', (1,0.867,0)), ], dtype=object) if USE_SMALL_TESTING_CONFIG: num_tiles_width = 2 num_tiles_height = 4 if MR_LOGO_PLAN: image_name = os.path.join(config_dir_path, 'logo.jpg') num_tiles_width = 5 num_tiles_height = 5 dominos = np.array( [('black', (0,0,0)), ('white', (1,1,1)), ], dtype=object) # Physical dimensions of dominos domino_width = 0.025 # meters domino_height = 0.010 # meters domino_spacing_width = 0.037 # meters domino_spacing_height = 0.024 # meters # Spacing for drawing dominos as pixels instead of rectangles meters_per_pixel = 0.008 domino_width_px = round(domino_width / meters_per_pixel) domino_height_px = round(domino_height / meters_per_pixel) domino_spacing_width_px = round(domino_spacing_width / meters_per_pixel) domino_spacing_height_px = round(domino_spacing_height / meters_per_pixel) # Tile configuration tile_width = 15 tile_height = 20 tile_background_color = (0.9,0.9,0.9) tile_edge_color = (0,0,1) tile_size_width_meters = 0.930 #tile_width * (domino_spacing_width + domino_width) tile_size_height_meters = 0.665 #tile_height * (domino_spacing_height + domino_height) desired_width_dominos = tile_width * num_tiles_width desired_height_dominos = tile_height * num_tiles_height # Map configureation grid_top_left = np.array([13.2, 6.9]) grid_top_right = np.array([13.2, -10.07]) grid_num_cols = 19 grid_bottom_left = np.array([1.18,6.9]) grid_bottom_right = np.array([1.18,-10.07]) grid_num_rows = 19 grid_tile_width = ((grid_top_left - grid_top_right)/grid_num_cols)[1] grid_tile_height = ((grid_top_left - grid_bottom_left)/grid_num_rows)[0] tile_size_width_meters = grid_tile_width # tile_size_height_meters = grid_tile_height # Vision offset configuration default_vision_offset = np.array((0,0,-0.5)) vision_offset_file = os.path.join(plans_dir, 'vision_offsets_full_plan.csv') # ====== ENVIRONMENT CONFIGURATION ======== # Map configuration (distances in meters, angles in degrees) robot_boundaries = np.array([[1,-11],[15,11]]) # Bottom left, top right, global frame highway_x = 2.5 # "Highway" coordinate load_waypoint = np.array([highway_x, -8.5]) # xya (global frame) for waypoint to go to first before load prep highway_angle = -90 base_station_boundaries = np.array([[2.5,10],[3.5,11]]) # Bottom left, top right, global frame base_station_target_angle = -90 # Target angle (deg) for base station in global frame base_station_relative_offset = np.array([0.9, 0, 0]) # Relative position of base station from prep pos - robot frame (x,y,a) base_station_vision_offset = np.array([0.01,0.010,-0.5]) # Vision offset for base station alignment base_station_prep_pos = np.array([highway_x, -10.5]) # Pose outside of base station to align with before going in to dock base_station_prep_vision_offset = np.array([0,0.01,-1]) # Vision offset to use for base station prep pose robot_pose_top_left = grid_top_left # Robot pose in global frame for top left of tile position of domino field domino_field_angle = -90 # Domino field angle (deg), global frame tile_placement_fine_offset = np.array([0, 0]) # Offset position for fine tile placement [x,y], in robot coordinate frame (to avoid hitting next column) tile_placement_coarse_offset = np.array([-0.5,0.5]) # Offset position for tile placement [x,y], in robot coordinate frame tile_to_robot_offset = np.array([-0.3, -tile_size_width_meters/2.0]) # Offset from bottom left of tile to robot center [x,y], in robot coordinate frame enter_position_distance = 0.25 # How far out of field boundaries to do robot prep move intermediate_entry_hz_y = 0 # Y coordinate for horizontal intermediate position intermediate_place_vt_x = 8 # X coordinate for vertical intermediate position field_to_robot_frame_angle = 90 # In case robot frame and field frame ever need to be rotated relative to each other # Used for testing sub-sections of the larger pattern if USE_SUBSECTION: start_coords = (6, 5) end_coords = (18, 6) # start_coords = (8, 4) # end_coords = (18, 4) # Left side # if USE_SMALL_TESTING_CONFIG: # load_pose = np.array([7.5,7.5,0]) # robot_pose_top_left = np.array([12.20,9.472]) # domino_field_angle = -90 # tile_placement_coarse_offset = np.array([-0.5,-0.5]) # tile_to_robot_offset = np.array([-0.3, -tile_size_width_meters/2.0 ]) # Right side if USE_SMALL_TESTING_CONFIG: load_pose = np.array([8,-6.5,0]) robot_pose_top_left = np.array([12.74,-6.94]) domino_field_angle = -90 tile_placement_coarse_offset = np.array([-0.5,-0.5]) tile_to_robot_offset = np.array([-0.3, -tile_size_width_meters/2.0 ]) # Fine motion y offset adjustments # y_offset_cols = np.linspace(0.05, 0, num_tiles_height) y_offset_cols = np.linspace(0.51, 0, num_tiles_width) y_offset_rows = np.linspace(0, 0.0, num_tiles_height) x_offset_rows = np.linspace(0, 0.3, num_tiles_height) x_offset_rows[0:2] = 0 # Angle adjustment for fine motion to try and prevent wheel from hitting angle_adjust_fine = 4 # degrees # Computed - don't change field_width = tile_size_width_meters * desired_width_dominos/tile_width field_height = tile_size_height_meters * desired_height_dominos/tile_height # Fix me domino_field_top_left = robot_pose_top_left + np.array([tile_size_height_meters-tile_to_robot_offset[0], -tile_to_robot_offset[1]]) \ - np.array([0, 2*tile_size_width_meters]) #Utils.TransformPos(np.array([tile_size_height_meters-tile_to_robot_offset[0], -tile_to_robot_offset[1]]), [0,0], domino_field_angle) domino_field_origin = domino_field_top_left + Utils.TransformPos(np.array([0,-field_height]), [0,0], domino_field_angle) domino_field_top_right = domino_field_origin + Utils.TransformPos(np.array([field_width,field_height]), [0,0], domino_field_angle) domino_field_boundaries = np.array([domino_field_origin, domino_field_top_right]) # ====== GUI CONFIGURATION ======== graph_padding = 0.5 # How many meters of padding to have around edge of graph axes_label_offset = 0.25 # How many meters to offset labels on axes axes_tick_spacing = 5 # How many meters of space between tick marks axes_tick_size = 0.5 # How many meters long the tick marks should be robot_chassis_size = 0.6 # How many meters wide and long the robot chassis is robot_rotation_center_offset = 0.245 # How many meters from front of robot to center of rotation robot_direction_indicator_length = 0.8 # How many meters long to make the direction line for the robot render robot_direction_indicator_arrow_length = 0.4 # How many meters long to make direction arrow lines robot_direction_indicator_arrow_angle = 25 # How many degrees to make arrow angle lines # Computed - don't change graph_bottom_left = np.array(( min(robot_boundaries[0][0], 0) - graph_padding, robot_boundaries[0][1] - graph_padding )) graph_size = np.max(robot_boundaries[1] - robot_boundaries[0]) + 2*graph_padding graph_top_right = graph_bottom_left + np.array([graph_size,graph_size]) # ====== RUNTIME CONFIGURATION ======== robot_status_wait_time = 0.2 # How many seconds to wait between status requests for each robot base_station_status_wait_time = 1 # How many seconds to wait between status requests for the base station robot_next_action_wait_time = 2.0 # How many seconds to wait before checking if robot is finished with current plan action ================================================ FILE: src/master/plans/vision_offsets small_testing_area.csv ================================================ #tile_x, tile_y, offset_x (millimeters), offset_y(millimeters), offset_a(degrees), add to default (True) or exact (False) 0,0,5,5,0,True 1,1,0,15,0,True 1,0,10,25,0,True ================================================ FILE: src/master/plans/vision_offsets_full_plan.csv ================================================ #tile_x, tile_y, offset_x (millimeters), offset_y(millimeters), offset_a(degrees) X,17,0,-4,0 ================================================ FILE: src/master/plot_logs2.py ================================================ import paramiko from scp import SCPClient import os import matplotlib.pyplot as plt import config import csv import numpy as np import datetime possible_rows = [ 'time', 'pos_x', 'pos_y', 'pos_a', 'vel_x', 'vel_y', 'vel_a', 'target_pos_x', 'target_pos_y', 'target_pos_a', 'target_vel_x', 'target_vel_y', 'target_vel_a', 'control_vel_x', 'control_vel_y', 'control_vel_a' ] def scp_last_motion_log(ip, remote_path, local_path): ssh_client = paramiko.SSHClient() ssh_client.load_system_host_keys() ssh_client.connect(ip, username="pi", password='dominobot') with SCPClient(ssh_client.get_transport()) as scp: scp.get(remote_path, local_path) if not os.path.exists(local_path): raise ValueError("SCP image did not complete successfully") def init_data(): data = {name:[] for name in possible_rows} data['title'] = "" data['start_time'] = None return data def parse_log_file(path): data = init_data() with open(local_file) as csvfile: reader = csv.reader(csvfile, delimiter=";") for id,row in enumerate(reader): if id == 0: continue elif id == 1: data['title'] = row[-1].replace('"','') else: parse_row(row, data) return data def parse_row(row, data): entry_data = row[-1].replace('"','').split(",") entry_name = entry_data[0] entry_values = entry_data[1:] entry_time = datetime.datetime.strptime(row[1], '%H:%M:%S.%f') if entry_name == "time": if not data["start_time"]: data["start_time"] = entry_time data["time"].append(0) else: dt = (entry_time - data['start_time'])/ datetime.timedelta(seconds=1) data["time"].append(dt) else: for i,axis in enumerate(['x','y','a']): axis_name = entry_name+'_'+axis axis_value = float(entry_values[i]) data[axis_name].append(axis_value) def plot_data(data, rows_to_plot=None): if not rows_to_plot: rows_to_plot = possible_rows[1:] plt.figure() ax = plt.gca() for row in rows_to_plot: ax.plot('time', row, data=data, label=row) ax.legend() ax.set_title(data['title']) plt.xlabel('Time (s)') plt.show() def plot_rows_axes(data, rows, axes): rows_to_plot = [row+'_'+axis for axis in axes for row in rows] plot_data(data, rows_to_plot) if __name__ == '__main__': cfg = config.Config() GET_FILE = True EXISTING_LOCAL_FILENAME = "WiggleVision.csv" if GET_FILE: robot_ip = cfg.ip_map['robot1'] remote_file = "/home/pi/DominoRobot/src/robot/log/last_motion_log.csv" local_file = os.path.join(cfg.log_folder, "last_motion_log.csv") scp_last_motion_log(robot_ip, remote_file, local_file) else: local_file = os.path.join(cfg.log_folder, EXISTING_LOCAL_FILENAME) parsed_data = parse_log_file(local_file) # plot_data(parsed_data, rows_to_plot=['control_vel_x','control_vel_y','control_vel_a']) plot_rows_axes(parsed_data, ['pos','vel','target_pos','target_vel','control_vel'], ['a']) ================================================ FILE: src/master/requirements.txt ================================================ cycler==0.10.0 decorator==4.4.2 imageio==2.9.0 kiwisolver==1.3.1 #Was originally 1.2.0, but wasn't building with Python 3.9.6 matplotlib==3.3.0 networkx==2.5 numpy==1.21.0 #Changed from 1.19.1 opencv-python==4.5.1.48 paramiko==2.7.2 Pillow==8.2.0 pyparsing==2.4.7 pyserial==3.4 PySimpleGUI==4.32.1 python-dateutil==2.8.1 PyWavelets==1.1.1 rope==0.18.0 scikit-image==0.18.0 #0.17.2 scipy==1.7.0 #1.5.2 scp==0.13.3 six==1.15.0 tifffile==2020.9.3 virtualenv==16.7.8 ================================================ FILE: src/robot/Makefile ================================================ # Started with https://spin.atomicobject.com/2016/08/26/makefile-c-projects/ # Additional inspiration from https://codereview.stackexchange.com/questions/157780/generic-makefile-handling-unit-testing-and-folder-structure TARGET_EXEC ?= robot-main TEST_EXEC ?= test-main BUILD_DIR ?= build SRC_DIRS ?= src TEST_DIRS ?= test SRCS := $(shell find $(SRC_DIRS) -name *.cpp -or -name *.c -or -name *.s) OBJS := $(SRCS:%=$(BUILD_DIR)/%.o) DEPS := $(OBJS:.o=.d) INC_DIRS := ./lib ./src /usr/local/include INC_FLAGS := $(addprefix -I,$(INC_DIRS)) CPPFLAGS ?= $(INC_FLAGS) -MMD -MP # -g for debug symbols, -O2 for run speed CXXFLAGS ?= -std=c++17 -Wall -Wextra -Wwrite-strings -Wno-parentheses -pthread -O3 `pkg-config --cflags opencv4` LDFLAGS += -ldl -lconfig++ -lstdc++fs `pkg-config --libs opencv4` LIBS += /usr/local/lib/libserial.a ./lib/marvelmind/marvelmind.a TEST_SRCS := $(shell find $(TEST_DIRS) -name *.cpp -or -name *.c -or -name *.s) TEST_OBJS := $(TEST_SRCS:%=$(BUILD_DIR)/%.o) $(filter-out build/src/main.cpp.o, $(OBJS)) vpath %.cpp $(SRC_DIRS) .PHONY: all all: $(BUILD_DIR)/$(TARGET_EXEC) .PHONY: test test: $(BUILD_DIR)/$(TEST_EXEC) # Target file $(BUILD_DIR)/$(TARGET_EXEC): $(OBJS) $(CXX) $(CXXFLAGS) $(OBJS) -o $@ $(LIBS) $(LDFLAGS) # Test file $(BUILD_DIR)/$(TEST_EXEC): $(TEST_OBJS) $(CXX) $(CXXFLAGS) $(TEST_OBJS) -o $@ $(LIBS) $(LDFLAGS) # Source files $(BUILD_DIR)/%.cpp.o: %.cpp $(MKDIR_P) $(dir $@) $(CXX) $(CPPFLAGS) $(CXXFLAGS) -c $< -o $@ .PHONY: clean clean: $(RM) -r $(BUILD_DIR) -include $(DEPS) MKDIR_P ?= mkdir -p .DELETE_ON_ERROR: ================================================ FILE: src/robot/README.md ================================================ # **Warning! This is probably out of date and might be inaccurate. Use at your own risk.** ## Libraries needed for installation on Raspberry Pi Required header libraries at src/robot/lib - ArduinoJson - Catch (for tests) - Eigen - plog - marvelmind library, compiled as .a - https://marvelmind.com/download/ I used 2020_04_10_C_example. This required the addition of `#include ` in `marvelmind.h` to compile correctly Required system libraries at /usr/local/include - libSerial: https://github.com/crayzeewulf/libserial (have to build from source due to outdated apt package) Required libraries at /usr/local/lib - libserial.a - opencv: (following instructions here: https://docs.opencv.org/master/d7/d9f/tutorial_linux_install.html and fix from here: https://github.com/cggos/DIPDemoQt/issues/1) Required libraries installed with apt-get - libconfig++-dev ## Build and run Go to `DominoRobot/src/robot` folder and run `make` to build. You can build tests with `make test` and remove all outputs with `make clean`. Run main or test using `build/robot-main` or `build/robot-test` respectively. The Raspberry Pi on the robot has these aliased to `run_robot` and `run_test` for ease of use. ================================================ FILE: src/robot/src/KalmanFilter.cpp ================================================ #include "KalmanFilter.h" KalmanFilter::KalmanFilter( Eigen::MatrixXf A, Eigen::MatrixXf B, Eigen::MatrixXf C, Eigen::MatrixXf Q, Eigen::MatrixXf R ) : KalmanFilter(A.rows(), C.rows()) { A_ = A; B_ = B; C_ = C; Q_ = Q; R_ = R; } KalmanFilter::KalmanFilter(int n, int m) : n_(n), m_(m), A_(Eigen::MatrixXf::Identity(n,n)), B_(Eigen::MatrixXf::Zero(n,n)), C_(Eigen::MatrixXf::Zero(m,n)), I_(Eigen::MatrixXf::Identity(n,n)), K_(Eigen::MatrixXf::Zero(n,m)), P_(Eigen::MatrixXf::Identity(n,n)), Q_(Eigen::MatrixXf::Identity(n,n)), R_(Eigen::MatrixXf::Identity(m,m)), S_(Eigen::MatrixXf::Identity(m,m)), x_hat_(Eigen::VectorXf::Zero(n)) {} void KalmanFilter::predict(const Eigen::VectorXf& u) { x_hat_ = A_ * x_hat_ + B_ * u; P_ = A_ * P_ * A_.transpose() + Q_; } void KalmanFilter::update(const Eigen::VectorXf& y, Eigen::MatrixXf R) { R_ = R; update(y); } void KalmanFilter::update(const Eigen::VectorXf& y) { S_ = C_ * P_ * C_.transpose() + R_; K_ = P_ * C_.transpose() * S_.inverse(); x_hat_ = x_hat_ + K_ * (y - C_ * x_hat_); P_ = (I_ - K_ * C_) * P_; } ================================================ FILE: src/robot/src/KalmanFilter.h ================================================ /** * Kalman filter loosely based on * https://github.com/hmartiro/kalman-cpp * and * https://github.com/tysik/kalman_filters */ #ifndef KalmanFilter_h #define KalmanFilter_h #include class KalmanFilter { public: KalmanFilter( Eigen::MatrixXf A, Eigen::MatrixXf B, Eigen::MatrixXf C, Eigen::MatrixXf Q, Eigen::MatrixXf R ); // Size only simplify initializing in a class KalmanFilter(int n, int m); // Prediction step with input u void predict(const Eigen::VectorXf& u); // Update the estimated state based on measured values. void update(const Eigen::VectorXf& y); void update(const Eigen::VectorXf& y, Eigen::MatrixXf R); // Get current state estimate Eigen::VectorXf state() { return x_hat_; }; Eigen::MatrixXf covariance() { return P_; }; void update_covariance(Eigen::MatrixXf P) {P_ = P;} private: // System dimensions int n_; int m_; // Matrices for computation Eigen::MatrixXf A_; Eigen::MatrixXf B_; Eigen::MatrixXf C_; Eigen::MatrixXf I_; Eigen::MatrixXf K_; Eigen::MatrixXf P_; Eigen::MatrixXf Q_; Eigen::MatrixXf R_; Eigen::MatrixXf S_; // Estimated state Eigen::VectorXf x_hat_; }; #endif //KalmanFilter_h ================================================ FILE: src/robot/src/Localization.cpp ================================================ #include "Localization.h" #include #include #include #include "constants.h" Localization::Localization() : pos_(0,0,0), vel_(0,0,0), mm_x_offset_(cfg.lookup("localization.mm_x_offset")), mm_y_offset_(cfg.lookup("localization.mm_y_offset")), variance_ref_trans_(cfg.lookup("localization.variance_ref_trans")), variance_ref_angle_(cfg.lookup("localization.variance_ref_angle")), meas_trans_cov_(cfg.lookup("localization.kf_meas_trans_cov")), meas_angle_cov_(cfg.lookup("localization.kf_meas_angle_cov")), localization_uncertainty_scale_(cfg.lookup("localization.kf_uncertainty_scale")), min_vel_uncertainty_(cfg.lookup("localization.min_vel_uncertainty")), vel_uncertainty_slope_(cfg.lookup("localization.vel_uncertainty_slope")), max_vel_uncetainty_(cfg.lookup("localization.max_vel_uncetainty")), vel_uncertainty_decay_time_(cfg.lookup("localization.vel_uncertainty_decay_time")), metrics_(), time_since_last_motion_(), kf_(3,3) { Eigen::MatrixXf A = Eigen::MatrixXf::Identity(3,3); Eigen::MatrixXf B = Eigen::MatrixXf::Identity(3,3); Eigen::MatrixXf C = Eigen::MatrixXf::Identity(3,3); Eigen::MatrixXf Q(3,3); float predict_trans_cov = cfg.lookup("localization.kf_predict_trans_cov"); float predict_angle_cov = cfg.lookup("localization.kf_predict_angle_cov"); Q << predict_trans_cov,0,0, 0,predict_trans_cov,0, 0,0,predict_angle_cov; Eigen::MatrixXf R(3,3); R << meas_trans_cov_,0,0, 0,meas_trans_cov_,0, 0,0,meas_angle_cov_; kf_ = KalmanFilter(A,B,C,Q,R); } void Localization::updatePositionReading(Point global_position) { if (fabs(global_position.a) > 3.2) { PLOGE << "INVALID ANGLE - should be in radians between +/- pi"; return; } // The x,y,a here is from the center of the marvlemind pair, so we need to transform it to the actual center of the // robot (i.e. center of rotation) Eigen::Vector3f raw_measured_position = {global_position.x, global_position.y, global_position.a}; Eigen::Vector3f adjusted_measured_position = marvelmindToRobotCenter(raw_measured_position); // Generate an uncertainty based on the current velocity and time since motion since we know the beacons are less accurate when moving const float position_uncertainty = computePositionUncertainty(); metrics_.last_position_uncertainty = position_uncertainty; Eigen::MatrixXf R(3,3); R << meas_trans_cov_ + position_uncertainty*localization_uncertainty_scale_,0,0, 0,meas_trans_cov_+ position_uncertainty*localization_uncertainty_scale_,0, 0,0,meas_angle_cov_+ position_uncertainty*localization_uncertainty_scale_; // PLOGI << "position_uncertainty: " << position_uncertainty; // PLOGI << "R: " << R; // PLOGI << "cov1: " << kf_.covariance(); kf_.update(adjusted_measured_position, R); Eigen::VectorXf est = kf_.state(); pos_.x = est[0]; pos_.y = est[1]; pos_.a = wrap_angle(est[2]); // PLOGI << "cov2: " << kf_.covariance(); PLOGI_(LOCALIZATION_LOG_ID).printf("\nPosition update:"); PLOGI_(LOCALIZATION_LOG_ID).printf(" Raw input: [%4.3f, %4.3f, %4.3f]", raw_measured_position(0), raw_measured_position(1), raw_measured_position(2)); PLOGI_(LOCALIZATION_LOG_ID).printf(" Adjusted input: [%4.3f, %4.3f, %4.3f]", adjusted_measured_position(0), adjusted_measured_position(1), adjusted_measured_position(2)); PLOGI_(LOCALIZATION_LOG_ID).printf(" position_uncertainty: %4.3f", position_uncertainty); PLOGI_(LOCALIZATION_LOG_ID).printf(" Current position: %s\n", pos_.toString().c_str()); } void Localization::updateVelocityReading(Velocity local_cart_vel, float dt) { // Convert local cartesian velocity to global cartesian velocity using the last estimated angle float cA = cos(pos_.a); float sA = sin(pos_.a); vel_.vx = cA * local_cart_vel.vx - sA * local_cart_vel.vy; vel_.vy = sA * local_cart_vel.vx + cA * local_cart_vel.vy; vel_.va = local_cart_vel.va; // Apply prediction step with velocity to get estimated position Eigen::Vector3f u = {vel_.vx, vel_.vy, vel_.va}; Eigen::Vector3f udt = dt*u; kf_.predict(udt); // PLOGI << "cov3: " << kf_.covariance(); time_since_last_motion_.reset(); Eigen::VectorXf est = kf_.state(); pos_.x = est[0]; pos_.y = est[1]; pos_.a = wrap_angle(est[2]); // Using the covariance matrix from kf, using those values to estimate a fractional 'confidence' // in our positioning relative to some reference amout. Eigen::MatrixXf cov = kf_.covariance(); // Compute inverse confidence. As long as the variance isn't larger than the reference values // these will be between 0-1 with 0 being more confident in the positioning metrics_.confidence_x = std::max(1-cov(0,0)/variance_ref_trans_, 0.0f); metrics_.confidence_y = std::max(1-cov(1,1)/variance_ref_trans_, 0.0f); metrics_.confidence_a = std::max(1-cov(2,2)/variance_ref_angle_, 0.0f); metrics_.total_confidence = (metrics_.confidence_x + metrics_.confidence_y + metrics_.confidence_a)/3; } Eigen::Vector3f Localization::marvelmindToRobotCenter(Eigen::Vector3f mm_global_position) { Eigen::Vector3f local_offset = {mm_x_offset_/1000.0f, mm_y_offset_/1000.0f, 0.0f}; float cA = cos(mm_global_position(2)); float sA = sin(mm_global_position(2)); Eigen::Matrix3f rotation; rotation << cA, -sA, 0.0f, sA, cA, 0.0f, 0.0f, 0.0f, 1.0f; Eigen::Vector3f adjusted_position = mm_global_position - rotation * local_offset; // PLOGD_(LOCALIZATION_LOG_ID).printf("\nMM to robot frame:"); // PLOGD_(LOCALIZATION_LOG_ID).printf(" mm position: [%4.3f, %4.3f, %4.3f]", mm_global_position(0), mm_global_position(1), mm_global_position(2)); // PLOGD_(LOCALIZATION_LOG_ID).printf(" R: \n[%4.3f, %4.3f, %4.3f]\n[%4.3f, %4.3f, %4.3f]\n[%4.3f, %4.3f, %4.3f]", // rotation(0,0), rotation(0,1), rotation(0,2), rotation(1,0), rotation(1,1), rotation(1,2), rotation(2,0), rotation(2,1), rotation(2,2)); // PLOGD_(LOCALIZATION_LOG_ID).printf(" new position: [%4.3f, %4.3f, %4.3f]", adjusted_position(0), adjusted_position(1), adjusted_position(2)); return adjusted_position; } float Localization::computePositionUncertainty() { Eigen::Vector3f v = {vel_.vx, vel_.vy, vel_.va}; float total_v = v.norm(); float position_uncertainty = 0.0f; if(total_v > 0.01) { position_uncertainty = std::max(min_vel_uncertainty_ + vel_uncertainty_slope_*total_v, max_vel_uncetainty_); } else { position_uncertainty = std::max(min_vel_uncertainty_ * (vel_uncertainty_decay_time_ - time_since_last_motion_.dt_s()) / vel_uncertainty_decay_time_, 0.0f); } PLOGD_(LOCALIZATION_LOG_ID).printf("\nPosition Uncertainty:"); PLOGD_(LOCALIZATION_LOG_ID).printf(" Total v: %4.3f, time since motion: %4.3f, position_uncertainty: %4.3f", total_v, time_since_last_motion_.dt_s(), position_uncertainty); return position_uncertainty; } void Localization::resetAngleCovariance() { Eigen::MatrixXf covariance = kf_.covariance(); covariance(2,2) = variance_ref_angle_; kf_.update_covariance(covariance); } ================================================ FILE: src/robot/src/Localization.h ================================================ #ifndef Localization_h #define Localization_h #include "utils.h" #include #include "KalmanFilter.h" class Localization { public: Localization(); void updatePositionReading(Point global_position); void updateVelocityReading(Velocity local_cart_vel, float dt); Point getPosition() {return pos_; }; Velocity getVelocity() {return vel_; }; void forceZeroVelocity() {vel_ = {0,0,0}; }; // Force the position to a specific value, bypassing localization algorithms (used for testing/debugging) void forceSetPosition(Point global_position) {pos_ = global_position;}; LocalizationMetrics getLocalizationMetrics() { return metrics_; }; void resetAngleCovariance(); private: // Convert position reading from marvelmind frame to robot frame Eigen::Vector3f marvelmindToRobotCenter(Eigen::Vector3f mm_global_position); // Compute a multiplier based on the current velocity that informs how trustworthy the current reading might be float computePositionUncertainty(); // Current position and velocity Point pos_; Velocity vel_; // Parameters for localization algorithms float mm_x_offset_; float mm_y_offset_; float variance_ref_trans_; float variance_ref_angle_; float meas_trans_cov_; float meas_angle_cov_; float localization_uncertainty_scale_; float min_vel_uncertainty_; float vel_uncertainty_slope_; float max_vel_uncetainty_; float vel_uncertainty_decay_time_; LocalizationMetrics metrics_; Timer time_since_last_motion_; KalmanFilter kf_; }; #endif //Localization_h ================================================ FILE: src/robot/src/MarvelmindWrapper.cpp ================================================ #include "MarvelmindWrapper.h" #include "constants.h" #include "plog/Log.h" #include MarvelmindWrapper::MarvelmindWrapper() : hedge_(createMarvelmindHedge()), ready_(false) { // I expect to 2 devices connected, but because of the fun of linux and udev rules, I don't know // which of the three possible names the devices could have. I could later change this to first // do an `ls /dev/ | grep marvelmind` and find the right ports, but this should work fine too. if(std::filesystem::exists(MARVELMIND_USB_0)) { hedge_->ttyFileName = MARVELMIND_USB_0; PLOGI << "Found device " << MARVELMIND_USB_0; } else if(std::filesystem::exists(MARVELMIND_USB_1)) { hedge_->ttyFileName = MARVELMIND_USB_1; PLOGI << "Found device " << MARVELMIND_USB_1; } else if(std::filesystem::exists(MARVELMIND_USB_2)) { hedge_->ttyFileName = MARVELMIND_USB_2; PLOGI << "Found device " << MARVELMIND_USB_2; } else { PLOGW << "No marvelmind devices connected"; return; } // Now actually try to connect hedge_->verbose = true; startMarvelmindHedge(hedge_); ready_ = true; } std::vector MarvelmindWrapper::getPositions() { PositionValue position_data; std::vector output; if (!ready_) { return output; } bool ok = getPositionFromMarvelmindHedge(hedge_, &position_data); if (ok) { //PLOGI.printf("Position data - ready: %i, addr: %i, ts: %i, x: %i, y: %i, a:%f", position_data.ready, position_data.address, position_data.timestamp, position_data.x, position_data.y, position_data.angle); if (position_data.ready) { output.push_back(position_data.x/1000.0); output.push_back(position_data.y/1000.0); output.push_back(position_data.angle); } } return output; } MarvelmindWrapper::~MarvelmindWrapper() { if(ready_){ stopMarvelmindHedge (hedge_); } destroyMarvelmindHedge (hedge_); } // MarvelmindWrapper::MarvelmindWrapper() // { // marvelmindAPILoad(); // PLOGI << "Marvelmind API loaded"; // // I expect to connect to 2 devices, but because of the fun of linux and udev rules, I don't know // // which of the three possible names the devices could have. I could later change this to first // // do an `ls /dev/ | grep marvelmind` and find the right ports, but this should work fine too. // // char portName0[64] = MARVELMIND_USB_0; // // char portName1[64] = MARVELMIND_USB_1; // // char portName2[64] = MARVELMIND_USB_2; // // PLOGI << portName0; // // bool success0 = mmOpenPortByName(portName0); // // bool success1 = mmOpenPortByName(portName1); // // bool success2 = mmOpenPortByName(portName2); // // PLOGI << "S: " << success0 << success1 << success2; // // int success = static_cast(success0) + static_cast(success1) + static_cast(success2); // // if(success == 2) // // { // // PLOGI << "Successfully connected to 2 Marvelmind devices"; // // } // // else if (success == 1) // // { // // PLOGW << "Only able to connect to 1 Marvelmind device"; // // } // // else if (success == 0) // // { // // PLOGW << "Unable to connect to any Marvelmind devices"; // // } // // else // // { // // PLOGE << "Managed to connect to " << success << "Marvelmind devices. That shouldn't happen!"; // // } // // Open loop // bool open = false; // int count = 0; // while(!open && count < 10000) // { // open = mmOpenPort(); // count++; // usleep(1000); // } // PLOGI << "Open status: " << open; // // Get usb loop // bool usb_ready = false; // count = 0; // MarvelmindDeviceVersion usbDevVersion; // while(!usb_ready && count < 10000) // { // usb_ready = mmGetVersionAndId(MM_USB_DEVICE_ADDRESS, &usbDevVersion); // count++; // usleep(1000); // } // PLOGI <<"USB ready: " << usb_ready; // if(usb_ready) // { // PLOGI << "Device type: " << (usbDevVersion.fwVerDeviceType); // PLOGI << "Device is hedge: " << mmDeviceIsHedgehog(usbDevVersion.fwVerDeviceType); // } // // bool device_list_ok = false; // // count = 0; // // MarvelmindDevicesList device_list; // // while(!device_list_ok && count < 10) // // { // // device_list_ok = mmGetDevicesList(&device_list); // // count++; // // PLOGI << count; // // usleep(1000); // // } // // PLOGI << "Device list status: " << device_list_ok; // // if(!device_list_ok) // // { // // PLOGW << "Could not retrieve list of Marvelmind devices"; // // } // // else // // { // // for(int i = 0; i < device_list.numDevices; i++) // // { // // MarvelmindDeviceInfo data = device_list.devices[i]; // // PLOGI << "Found device " << data.address; // // // if(data.address == MARVELMIND_DEVICE_ID0 || data.address == MARVELMIND_DEVICE_ID1) // // // { // // // PLOGI << "Found device " << data.address; // // // if (data.isSleeping) // // // { // // // PLOGI << "Device " << data.address << " is sleeping, triggering wake up"; // // // mmWakeDevice(data.address); // // // } // // // else // // // { // // // PLOGI << "Device " << data.address << " is already awake"; // // // } // // // } // // } // // } // } ================================================ FILE: src/robot/src/MarvelmindWrapper.h ================================================ #ifndef MarvelmindWrapper_h #define MarvelmindWrapper_h #include extern "C" { #include } class MarvelmindWrapper { public: MarvelmindWrapper(); ~MarvelmindWrapper(); std::vector getPositions(); private: MarvelmindHedge* hedge_; bool ready_; }; #endif ================================================ FILE: src/robot/src/RobotController.cpp ================================================ #include "RobotController.h" #include #include #include #include "constants.h" #include "serial/SerialCommsFactory.h" #include "robot_controller_modes/RobotControllerModePosition.h" #include "robot_controller_modes/RobotControllerModeVision.h" #include "robot_controller_modes/RobotControllerModeStopFast.h" RobotController::RobotController(StatusUpdater& statusUpdater) : statusUpdater_(statusUpdater), serial_to_motor_driver_(SerialCommsFactory::getFactoryInstance()->get_serial_comms(CLEARCORE_USB)), localization_(), prevOdomLoopTimer_(), cartPos_(), cartVel_(), trajRunning_(false), limits_mode_(LIMITS_MODE::FINE), controller_rate_(cfg.lookup("motion.controller_frequency")), logging_rate_(cfg.lookup("motion.log_frequency")), log_this_cycle_(false), fake_perfect_motion_(cfg.lookup("motion.fake_perfect_motion")), fake_local_cart_vel_(0,0,0), max_cart_vel_limit_({cfg.lookup("motion.translation.max_vel.coarse"), cfg.lookup("motion.translation.max_vel.coarse"), cfg.lookup("motion.rotation.max_vel.coarse")}), loop_time_averager_(20) { if(fake_perfect_motion_) PLOGW << "Fake robot motion enabled"; } void RobotController::moveToPosition(float x, float y, float a) { reset_last_motion_logger(); limits_mode_ = LIMITS_MODE::COARSE; setCartVelLimits(limits_mode_); Point goal_pos = Point(x,y,a); PLOGI_(MOTION_CSV_LOG_ID).printf("MoveToPosition: %s",goal_pos.toString().c_str()); auto position_mode = std::make_unique(fake_perfect_motion_); bool ok = position_mode->startMove(cartPos_, goal_pos, limits_mode_); if (ok) { startTraj(); controller_mode_ = std::move(position_mode); } else { statusUpdater_.setErrorStatus(); } } void RobotController::moveToPositionRelative(float dx_local, float dy_local, float da_local) { reset_last_motion_logger(); limits_mode_ = LIMITS_MODE::COARSE; setCartVelLimits(limits_mode_); float dx_global = cos(cartPos_.a) * dx_local - sin(cartPos_.a) * dy_local; float dy_global = sin(cartPos_.a) * dx_local + cos(cartPos_.a) * dy_local; float da_global = da_local; Point goal_pos = Point(cartPos_.x + dx_global, cartPos_.y + dy_global, wrap_angle(cartPos_.a + da_global)); PLOGI_(MOTION_CSV_LOG_ID).printf("MoveToPositionRelative: %s",goal_pos.toString().c_str()); auto position_mode = std::make_unique(fake_perfect_motion_); bool ok = position_mode->startMove(cartPos_, goal_pos, limits_mode_); if (ok) { startTraj(); controller_mode_ = std::move(position_mode); } else { statusUpdater_.setErrorStatus(); } } void RobotController::moveToPositionRelativeSlow(float dx_local, float dy_local, float da_local) { reset_last_motion_logger(); limits_mode_ = LIMITS_MODE::SLOW; setCartVelLimits(limits_mode_); float dx_global = cos(cartPos_.a) * dx_local - sin(cartPos_.a) * dy_local; float dy_global = sin(cartPos_.a) * dx_local + cos(cartPos_.a) * dy_local; float da_global = da_local; Point goal_pos = Point(cartPos_.x + dx_global, cartPos_.y + dy_global, wrap_angle(cartPos_.a + da_global)); PLOGI_(MOTION_CSV_LOG_ID).printf("MoveToPositionRelativeSlow: %s",goal_pos.toString().c_str()); auto position_mode = std::make_unique(fake_perfect_motion_); bool ok = position_mode->startMove(cartPos_, goal_pos, limits_mode_); if (ok) { startTraj(); controller_mode_ = std::move(position_mode); } else { statusUpdater_.setErrorStatus(); } } void RobotController::moveToPositionFine(float x, float y, float a) { reset_last_motion_logger(); limits_mode_ = LIMITS_MODE::FINE; setCartVelLimits(limits_mode_); Point goal_pos = Point(x,y,a); PLOGI_(MOTION_CSV_LOG_ID).printf("MoveToPositionFine: %s",goal_pos.toString().c_str()); auto position_mode = std::make_unique(fake_perfect_motion_); bool ok = position_mode->startMove(cartPos_, goal_pos, limits_mode_); if (ok) { startTraj(); controller_mode_ = std::move(position_mode); } else { statusUpdater_.setErrorStatus(); } } void RobotController::moveConstVel(float vx , float vy, float va, float t) { (void) vx; (void) vy; (void) va; (void) t; PLOGE << "Not implimented"; } void RobotController::moveWithVision(float x, float y, float a) { reset_last_motion_logger(); limits_mode_ = LIMITS_MODE::VISION; setCartVelLimits(limits_mode_); Point goal = Point(x,y,a); PLOGI_(MOTION_CSV_LOG_ID).printf("MoveWithVision: %s",goal.toString().c_str()); auto vision_mode = std::make_unique(fake_perfect_motion_, statusUpdater_); bool ok = vision_mode->startMove(goal); if (ok) { startTraj(); controller_mode_ = std::move(vision_mode); } else { statusUpdater_.setErrorStatus(); } } void RobotController::stopFast() { reset_last_motion_logger(); limits_mode_ = LIMITS_MODE::FINE; setCartVelLimits(limits_mode_); PLOGI_(MOTION_CSV_LOG_ID).printf("Stop Fast"); auto stop_fast_mode = std::make_unique(fake_perfect_motion_); stop_fast_mode->startMove(cartPos_, cartVel_); startTraj(); controller_mode_ = std::move(stop_fast_mode); } void RobotController::startTraj() { trajRunning_ = true; enableAllMotors(); PLOGI.printf("Starting move"); } void RobotController::estop() { PLOGW.printf("Estopping robot control"); PLOGD_(MOTION_LOG_ID) << "\n====ESTOP====\n"; trajRunning_ = false; limits_mode_ = LIMITS_MODE::FINE; disableAllMotors(); } void RobotController::update() { // Ensures controller runs at approximately constant rate if (!controller_rate_.ready()) { return; } // Ensures logging doesn't get out of hand log_this_cycle_ = logging_rate_.ready(); // Create a command based on the trajectory or not moving Velocity target_vel; if(trajRunning_) { target_vel = controller_mode_->computeTargetVelocity(cartPos_, cartVel_, log_this_cycle_); // Check if we are finished with the trajectory if (controller_mode_->checkForMoveComplete(cartPos_, cartVel_)) { PLOGI.printf("Reached goal"); PLOGD_(MOTION_LOG_ID) << "Trajectory complete"; target_vel = {0,0,0}; disableAllMotors(); trajRunning_ = false; // Bit of a hack for reseting angle covariance after vision motion // This avoids weird rotations from error accumlated in the kalman filter if (limits_mode_ == LIMITS_MODE::VISION) { localization_.resetAngleCovariance(); } // Re-enable fine mode at the end of a trajectory limits_mode_ = LIMITS_MODE::FINE; } } else { // Force zero velocity target_vel = {0,0,0}; // Force current velocity to 0 localization_.forceZeroVelocity(); // Force flags to default values limits_mode_ = LIMITS_MODE::FINE; } // Run controller and odometry update setCartVelCommand(target_vel); computeOdometry(); // Update status statusUpdater_.updatePosition(cartPos_.x, cartPos_.y, cartPos_.a); statusUpdater_.updateVelocity(cartVel_.vx, cartVel_.vy, cartVel_.va); loop_time_averager_.mark_point(); statusUpdater_.updateControlLoopTime(loop_time_averager_.get_ms()); statusUpdater_.updateLocalizationMetrics(localization_.getLocalizationMetrics()); } void RobotController::enableAllMotors() { if (serial_to_motor_driver_->isConnected()) { serial_to_motor_driver_->send("base:Power:ON"); PLOGI << "Motors enabled"; PLOGD_(MOTION_LOG_ID) << "Motors enabled"; } else { PLOGW << "No connection: Skipping motor enable"; } } void RobotController::disableAllMotors() { if (serial_to_motor_driver_->isConnected()) { serial_to_motor_driver_->send("base:Power:OFF"); PLOGI << "Motors disabled"; PLOGD_(MOTION_LOG_ID) << "Motors disabled"; } else { PLOGW << "No connection: Skipping motor disable"; } } void RobotController::inputPosition(float x, float y, float a) { bool last_mm_used = false; if(limits_mode_ == LIMITS_MODE::FINE || limits_mode_ == LIMITS_MODE::COARSE) { localization_.updatePositionReading({x,y,a}); cartPos_ = localization_.getPosition(); last_mm_used = true; } statusUpdater_.updateLastMarvelmindPose({x,y,a}, last_mm_used); } void RobotController::forceSetPosition(float x, float y, float a) { localization_.forceSetPosition({x,y,a}); cartPos_ = localization_.getPosition(); } void RobotController::setCartVelLimits(LIMITS_MODE limits_mode) { if(limits_mode == LIMITS_MODE::VISION) { max_cart_vel_limit_ = { cfg.lookup("motion.translation.max_vel.vision"), cfg.lookup("motion.translation.max_vel.vision"), cfg.lookup("motion.rotation.max_vel.vision")}; } else if(limits_mode == LIMITS_MODE::FINE || limits_mode == LIMITS_MODE::SLOW) { max_cart_vel_limit_ = { cfg.lookup("motion.translation.max_vel.fine"), cfg.lookup("motion.translation.max_vel.fine"), cfg.lookup("motion.rotation.max_vel.fine")}; } else { max_cart_vel_limit_ = { cfg.lookup("motion.translation.max_vel.coarse"), cfg.lookup("motion.translation.max_vel.coarse"), cfg.lookup("motion.rotation.max_vel.coarse")}; } } bool RobotController::readMsgFromMotorDriver(Velocity* decodedVelocity) { std::string msg = ""; std::vector tmpVelocity = {0,0,0}; if (serial_to_motor_driver_->isConnected()) { int count = 0; while(msg.empty() && count++ < 10) { msg = serial_to_motor_driver_->rcv_base(); } } if (msg.empty()) { return false; } else { tmpVelocity = parseCommaDelimitedStringToFloat(msg); if(tmpVelocity.size() != 3) { PLOGW.printf("Decode failed"); return false; } } decodedVelocity->vx = tmpVelocity[0]; decodedVelocity->vy = tmpVelocity[1]; decodedVelocity->va = tmpVelocity[2]; return true; } void RobotController::computeOdometry() { Velocity local_cart_vel = {0,0,0}; if(fake_perfect_motion_) { local_cart_vel = fake_local_cart_vel_; } else if(!readMsgFromMotorDriver(&local_cart_vel)) { return; } Velocity zero = {0,0,0}; if(trajRunning_ || !(local_cart_vel == zero)) { PLOGD_IF_(MOTION_LOG_ID, log_this_cycle_).printf("Decoded velocity: %.3f, %.3f, %.3f\n", local_cart_vel.vx, local_cart_vel.vy, local_cart_vel.va); } // Bypassing motor feedback and just using fake_local_cart_vel_ here may give better performance, but its hard to tell. // Compute time since last odom update float dt = prevOdomLoopTimer_.dt_s(); prevOdomLoopTimer_.reset(); localization_.updateVelocityReading(local_cart_vel, dt); cartPos_ = localization_.getPosition(); cartVel_ = localization_.getVelocity(); } void RobotController::setCartVelCommand(Velocity target_vel) { if (trajRunning_) { PLOGD_IF_(MOTION_LOG_ID, log_this_cycle_).printf("CartVelCmd: [vx: %.4f, vy: %.4f, va: %.4f]", target_vel.vx, target_vel.vy, target_vel.va); } // Convert input global velocities to local velocities Velocity local_cart_vel; float cA = cos(cartPos_.a); float sA = sin(cartPos_.a); local_cart_vel.vx = cA * target_vel.vx + sA * target_vel.vy; local_cart_vel.vy = -sA * target_vel.vx + cA * target_vel.vy; local_cart_vel.va = target_vel.va; // Cap velocity for safety if(fabs(local_cart_vel.vx) > max_cart_vel_limit_.vx) { float clamped_vel = sgn(local_cart_vel.vx) * max_cart_vel_limit_.vx; PLOGW.printf("Attempted to command X velocity of %.3f m/s, clamping to %.3f m/s", local_cart_vel.vx, clamped_vel); PLOGW_(MOTION_LOG_ID).printf("Attempted to command X velocity of %.3f m/s, clamping to %.3f m/s", local_cart_vel.vx, clamped_vel); local_cart_vel.vx = clamped_vel; } if(fabs(local_cart_vel.vy) > max_cart_vel_limit_.vy) { float clamped_vel = sgn(local_cart_vel.vy) * max_cart_vel_limit_.vy; PLOGW.printf("Attempted to command Y velocity of %.3f m/s, clamping to %.3f m/s", local_cart_vel.vy, clamped_vel); PLOGW_(MOTION_LOG_ID).printf("Attempted to command Y velocity of %.3f m/s, clamping to %.3f m/s", local_cart_vel.vy, clamped_vel); local_cart_vel.vy = clamped_vel; } if(fabs(local_cart_vel.va) > max_cart_vel_limit_.va) { float clamped_vel = sgn(local_cart_vel.va) * max_cart_vel_limit_.va; PLOGW.printf("Attempted to command A velocity of %.3f rad/s, clamping to %.3f rad/s", local_cart_vel.va, clamped_vel); PLOGW_(MOTION_LOG_ID).printf("Attempted to command A velocity of %.3f rad/s, clamping to %.3f rad/s", local_cart_vel.va, clamped_vel); local_cart_vel.va = clamped_vel; } // Prep velocity data to send to motor driver char buff[100]; sprintf(buff, "base:%.4f,%.4f,%.4f",local_cart_vel.vx, local_cart_vel.vy, local_cart_vel.va); std::string s = buff; if (local_cart_vel.vx != 0 || local_cart_vel.vy != 0 || local_cart_vel.va != 0 ) { PLOGD_IF_(MOTION_LOG_ID, log_this_cycle_).printf("Sending to motors: [%s]", s.c_str()); } if(fake_perfect_motion_) { fake_local_cart_vel_ = local_cart_vel; } else if (serial_to_motor_driver_->isConnected()) { serial_to_motor_driver_->send(s); } } ================================================ FILE: src/robot/src/RobotController.h ================================================ #ifndef RobotController_h #define RobotController_h #include "SmoothTrajectoryGenerator.h" #include "StatusUpdater.h" #include "serial/SerialComms.h" #include "utils.h" #include "Localization.h" #include "robot_controller_modes/RobotControllerModeBase.h" class RobotController { public: // Constructor RobotController(StatusUpdater& statusUpdater); // Command robot to move a specific position with low accuracy void moveToPosition(float x, float y, float a); // Command robot to move a specific position relative to current position with low accuracy void moveToPositionRelative(float dx_local, float dy_local, float da_local); // Command robot to move a specific position relative to current position with low speed void moveToPositionRelativeSlow(float dx_local, float dy_local, float da_local); // Command robot to move to a specific position with high accuracy void moveToPositionFine(float x, float y, float a); // Command robot to move with a constant velocity for some amount of time void moveConstVel(float vx , float vy, float va, float t); void moveWithVision(float x, float y, float a); void stopFast(); // Main update loop. Should be called as fast as possible void update(); // Enable all motors at once void enableAllMotors(); // Disable all motors at once. This will cause motors to coast to a stop void disableAllMotors(); // Provide a position reading from the MarvelMind sensors void inputPosition(float x, float y, float a); // Force the position to a specific value, bypassing localization algorithms (used for testing/debugging) void forceSetPosition(float x, float y, float a); // Indicates if a trajectory is currently active bool isTrajectoryRunning() { return trajRunning_; }; // Stops the currently running motion void estop(); Point getCurrentPosition() { return cartPos_; }; private: //Internal methods // Set the global cartesian velocity command void setCartVelCommand(Velocity target_vel); // Update loop for motor objects void updateMotors(); // Calculate wheel odometry void computeOdometry(); // Sets up everything to start the trajectory running void startTraj(); // Reads an incoming message from the motor driver and fills the decoded // velocity in the pointer, if available. Returns true if velocity is filled, false otherwise bool readMsgFromMotorDriver(Velocity* decodedVelocity); void setCartVelLimits(LIMITS_MODE limits_mode); // Member variables StatusUpdater& statusUpdater_; // Reference to status updater object to input status info about the controller SerialCommsBase* serial_to_motor_driver_; // Serial connection to motor driver Localization localization_; // Object that handles localization Timer prevOdomLoopTimer_; // Timer for odom loop Point cartPos_; // Current cartesian position Velocity cartVel_; // Current cartesian velocity bool trajRunning_; // If a trajectory is currently active LIMITS_MODE limits_mode_; // Which limits mode is being used. RateController controller_rate_; // Rate limit controller loops RateController logging_rate_ ; // Rate limit logging to file bool log_this_cycle_; // Trigger for logging this cycle bool fake_perfect_motion_; // Flag used for testing to enable perfect motion without clearcore Velocity fake_local_cart_vel_; // Commanded local cartesian velocity used to fake perfect motion Velocity max_cart_vel_limit_; // Maximum velocity allowed, used to limit commanded velocity TimeRunningAverage loop_time_averager_; // Handles keeping average of the loop timing std::unique_ptr controller_mode_; }; #endif ================================================ FILE: src/robot/src/RobotServer.cpp ================================================ #include "RobotServer.h" #include #include #include "sockets/SocketMultiThreadWrapperFactory.h" #include RobotServer::RobotServer(StatusUpdater& statusUpdater) : moveData_(), positionData_(), velocityData_(), statusUpdater_(statusUpdater), recvInProgress_(false), recvIdx_(0), buffer_(""), socket_(SocketMultiThreadWrapperFactory::getFactoryInstance()->get_socket()) { } COMMAND RobotServer::getCommand(std::string message) { COMMAND cmd = COMMAND::NONE; StaticJsonDocument<256> doc; DeserializationError err = deserializeJson(doc, message); if(err) { printIncomingCommand(message); PLOGI.printf("Error parsing JSON: "); PLOGI.printf(err.c_str()); sendErr("bad_json"); } else { std::string type = doc["type"]; if(type == "move") { cmd = COMMAND::MOVE; moveData_.x = doc["data"]["x"]; moveData_.y = doc["data"]["y"]; moveData_.a = doc["data"]["a"]; printIncomingCommand(message); sendAck(type); } else if(type == "move_rel") { cmd = COMMAND::MOVE_REL; moveData_.x = doc["data"]["x"]; moveData_.y = doc["data"]["y"]; moveData_.a = doc["data"]["a"]; printIncomingCommand(message); sendAck(type); } else if(type == "move_rel_slow") { cmd = COMMAND::MOVE_REL_SLOW; moveData_.x = doc["data"]["x"]; moveData_.y = doc["data"]["y"]; moveData_.a = doc["data"]["a"]; printIncomingCommand(message); sendAck(type); } else if(type == "move_fine") { cmd = COMMAND::MOVE_FINE; moveData_.x = doc["data"]["x"]; moveData_.y = doc["data"]["y"]; moveData_.a = doc["data"]["a"]; printIncomingCommand(message); sendAck(type); } else if(type == "move_fine_stop_vision") { cmd = COMMAND::MOVE_FINE_STOP_VISION; moveData_.x = doc["data"]["x"]; moveData_.y = doc["data"]["y"]; moveData_.a = doc["data"]["a"]; printIncomingCommand(message); sendAck(type); } else if(type == "move_vision") { cmd = COMMAND::MOVE_WITH_VISION; moveData_.x = doc["data"]["x"]; moveData_.y = doc["data"]["y"]; moveData_.a = doc["data"]["a"]; printIncomingCommand(message); sendAck(type); } else if(type == "move_const_vel") { cmd = COMMAND::MOVE_CONST_VEL; velocityData_.vx = doc["data"]["vx"]; velocityData_.vy = doc["data"]["vy"]; velocityData_.va = doc["data"]["va"]; velocityData_.t = doc["data"]["t"]; printIncomingCommand(message); sendAck(type); } else if(type == "place") { cmd = COMMAND::PLACE_TRAY; printIncomingCommand(message); sendAck(type); } else if(type == "load") { cmd = COMMAND::LOAD_TRAY; printIncomingCommand(message); sendAck(type); } else if(type == "init") { cmd = COMMAND::INITIALIZE_TRAY; printIncomingCommand(message); sendAck(type); } else if(type == "p") { cmd = COMMAND::POSITION; positionData_.x = doc["data"]["x"]; positionData_.y = doc["data"]["y"]; positionData_.a = doc["data"]["a"]; sendAck(type); } else if(type == "set_pose") { cmd = COMMAND::SET_POSE; positionData_.x = doc["data"]["x"]; positionData_.y = doc["data"]["y"]; positionData_.a = doc["data"]["a"]; sendAck(type); } else if(type == "estop") { cmd = COMMAND::ESTOP; printIncomingCommand(message); sendAck(type); } else if(type == "lc") { cmd = COMMAND::LOAD_COMPLETE; printIncomingCommand(message); sendAck(type); } else if(type == "status") { sendStatus(); } else if (type == "check") { sendAck(type); } else if (type == "clear_error") { statusUpdater_.clearErrorStatus(); sendAck(type); } else if (type == "wait_for_loc") { sendAck(type); cmd = COMMAND::WAIT_FOR_LOCALIZATION; } else if (type == "toggle_vision_debug") { sendAck(type); cmd = COMMAND::TOGGLE_VISION_DEBUG; } else if (type == "start_cameras") { sendAck(type); cmd = COMMAND::START_CAMERAS; } else if (type == "stop_cameras") { sendAck(type); cmd = COMMAND::STOP_CAMERAS; } else if(type == "") { printIncomingCommand(message); PLOGI.printf("ERROR: Type field empty or not specified "); sendErr("no_type"); } else { printIncomingCommand(message); PLOGI.printf("ERROR: Unkown type field "); sendErr("unkown_type"); } } return cmd; } RobotServer::PositionData RobotServer::getMoveData() { return moveData_; } RobotServer::PositionData RobotServer::getPositionData() { return positionData_; } RobotServer::VelocityData RobotServer::getVelocityData() { return velocityData_; } void RobotServer::sendStatus() { std::string msg = statusUpdater_.getStatusJsonString(); sendMsg(msg, false); } COMMAND RobotServer::oneLoop() { COMMAND cmd = COMMAND::NONE; std::string newMsg = getAnyIncomingMessage(); if(newMsg.length() != 0) { PLOGD.printf("RX: %s", newMsg.c_str()); cmd = getCommand(cleanString(newMsg)); } return cmd; } std::string RobotServer::cleanString(std::string message) { int idx_start = message.find("{"); int idx_end = message.find("}") + 1; int len = idx_end - idx_start + 1; if(idx_start == -1 || idx_end == 0) { PLOGW.printf("Could not find brackets in message"); return message; } return message.substr(idx_start, len); } std::string RobotServer::getAnyIncomingMessage() { bool newData = false; std::string new_msg = ""; while (socket_->dataAvailableToRead() && newData == false) { std::string data = socket_->getData(); for (auto c : data) { if (recvInProgress_ == true) { if (c == START_CHAR) { buffer_ = ""; } else if (c != END_CHAR) { buffer_ += c; } else { recvInProgress_ = false; newData = true; new_msg = buffer_; buffer_ = ""; } } else if (c == START_CHAR) { recvInProgress_ = true; } } } return new_msg; } void RobotServer::sendMsg(std::string msg, bool print_debug) { if (msg.length() == 0 && print_debug) { PLOGI.printf("Nothing to send!!!\n"); } else { if(print_debug) { PLOGD.printf("TX: %s", msg.c_str()); } std::string send_msg = START_CHAR + msg + END_CHAR; socket_->sendData(send_msg); } } void RobotServer::printIncomingCommand(std::string message) { PLOGI.printf(message.c_str()); } void RobotServer::sendAck(std::string data) { StaticJsonDocument<64> doc; doc["type"] = "ack"; doc["data"] = data; std::string msg; serializeJson(doc, msg); sendMsg(msg); } void RobotServer::sendErr(std::string data) { StaticJsonDocument<64> doc; doc["type"] = "ack"; doc["data"] = data; std::string msg; serializeJson(doc, msg); sendMsg(msg); } ================================================ FILE: src/robot/src/RobotServer.h ================================================ /* * Robot server that handles gettings messages from master * and responding correctly */ #ifndef RobotServer_h #define RobotServer_h #include #include #include "constants.h" #include "sockets/SocketMultiThreadWrapperBase.h" #include "StatusUpdater.h" #define START_CHAR '<' #define END_CHAR '>' class RobotServer { public: struct PositionData { float x; float y; float a; }; struct VelocityData { float vx; float vy; float va; float t; }; RobotServer(StatusUpdater& statusUpdater); COMMAND oneLoop(); RobotServer::PositionData getMoveData(); RobotServer::PositionData getPositionData(); RobotServer::VelocityData getVelocityData(); private: PositionData moveData_; PositionData positionData_; VelocityData velocityData_; StatusUpdater& statusUpdater_; bool recvInProgress_; int recvIdx_; std::string buffer_; SocketMultiThreadWrapperBase* socket_; COMMAND getCommand(std::string message); void sendMsg(std::string msg, bool print_debug=true); void sendAck(std::string data); void sendErr(std::string data); std::string getAnyIncomingMessage(); std::string cleanString(std::string message); void printIncomingCommand(std::string message); void sendStatus(); }; #endif ================================================ FILE: src/robot/src/SmoothTrajectoryGenerator.cpp ================================================ #include "SmoothTrajectoryGenerator.h" #include #include "constants.h" #include "utils.h" constexpr float d6 = 1/6.0; SmoothTrajectoryGenerator::SmoothTrajectoryGenerator() : currentTrajectory_() { currentTrajectory_.complete = false; solver_params_.num_loops = cfg.lookup("trajectory_generation.solver_max_loops"); solver_params_.beta_decay = cfg.lookup("trajectory_generation.solver_beta_decay"); solver_params_.alpha_decay = cfg.lookup("trajectory_generation.solver_alpha_decay"); solver_params_.exponent_decay = cfg.lookup("trajectory_generation.solver_exponent_decay"); } PVTPoint SmoothTrajectoryGenerator::lookup(float time) { std::vector trans_values = lookup_1D(time, currentTrajectory_.trans_params); std::vector rot_values = lookup_1D(time, currentTrajectory_.rot_params); // Map translational trajectory into XY space with direction vector Eigen::Vector2f trans_pos_delta = trans_values[0] * currentTrajectory_.trans_direction; Eigen::Vector2f trans_vel = trans_values[1] * currentTrajectory_.trans_direction; // Map rotational trajectory into angular space with direction float rot_pos_delta = rot_values[0] * currentTrajectory_.rot_direction; float rot_vel = rot_values[1] * currentTrajectory_.rot_direction; // Build and return pvtpoint PVTPoint pvt; pvt.position = {currentTrajectory_.initialPoint.x + trans_pos_delta(0), currentTrajectory_.initialPoint.y + trans_pos_delta(1), wrap_angle(currentTrajectory_.initialPoint.a + rot_pos_delta) }; pvt.velocity = {trans_vel(0), trans_vel(1), rot_vel}; pvt.time = time; return pvt; } std::vector lookup_1D(float time, const SCurveParameters& params) { // Handle time before start of trajectory if(time <= params.switch_points[0].t) { return {params.switch_points[0].p, params.switch_points[0].v}; } // Handle time after the end of the trajectory else if (time > params.switch_points[7].t) { return {params.switch_points[7].p, params.switch_points[7].v}; } // Handle times within the trajectory else { // Look for correct region for (int i = 1; i <= 7; i++) { // Once region is found, compute position and velocity from previous switch point if(params.switch_points[i-1].t < time && time <= params.switch_points[i].t) { float dt = time - params.switch_points[i-1].t; std::vector values = computeKinematicsBasedOnRegion(params, i, dt); return {values[2], values[1]}; } } } PLOGE << "Should not get to this point in lookup"; return {0,0}; } bool SmoothTrajectoryGenerator::generatePointToPointTrajectory(Point initialPoint, Point targetPoint, LIMITS_MODE limits_mode) { // Print to logs PLOGI.printf("Generating trajectory"); PLOGI.printf("Starting point: %s", initialPoint.toString().c_str()); PLOGI.printf("Target point: %s", targetPoint.toString().c_str()); PLOGD_(MOTION_LOG_ID).printf("\nGenerating trajectory"); PLOGD_(MOTION_LOG_ID).printf("Starting point: %s", initialPoint.toString().c_str()); PLOGD_(MOTION_LOG_ID).printf("Target point: %s", targetPoint.toString().c_str()); MotionPlanningProblem mpp = buildMotionPlanningProblem(initialPoint, targetPoint, limits_mode, solver_params_); currentTrajectory_ = generateTrajectory(mpp); PLOGI << currentTrajectory_.toString(); PLOGD_(MOTION_LOG_ID) << currentTrajectory_.toString(); return currentTrajectory_.complete; } // TODO Implement a more accurate version of this if needed // Note that this implimentation is a hack and isn't guaranteed to give an accurate constant velocity - so use with caution. bool SmoothTrajectoryGenerator::generateConstVelTrajectory(Point initialPoint, Velocity velocity, float moveTime, LIMITS_MODE limits_mode) { // Print to logs PLOGI.printf("Generating constVel (sort of) trajectory"); PLOGI.printf("Starting point: %s", initialPoint.toString().c_str()); PLOGI.printf("Target velocity: %s", velocity.toString().c_str()); PLOGI.printf("Move time: %d", moveTime); PLOGD_(MOTION_LOG_ID).printf("Generating constVel (sort of) trajectory"); PLOGD_(MOTION_LOG_ID).printf("Starting point: %s", initialPoint.toString().c_str()); PLOGD_(MOTION_LOG_ID).printf("Target velocity: %s", velocity.toString().c_str()); PLOGD_(MOTION_LOG_ID).printf("Move time: %d", moveTime); // This will undershoot the target velocity because we aren't consider accel/jerk here so the // solver will not quite reach this velocity - especially if the move time specified is small. Point targetPoint; targetPoint.x = initialPoint.x + velocity.vx * moveTime; targetPoint.y = initialPoint.y + velocity.vy * moveTime; targetPoint.a = initialPoint.a + velocity.va * moveTime; MotionPlanningProblem mpp = buildMotionPlanningProblem(initialPoint, targetPoint, limits_mode, solver_params_); currentTrajectory_ = generateTrajectory(mpp); PLOGI << currentTrajectory_.toString(); PLOGD_(MOTION_LOG_ID) << currentTrajectory_.toString(); return currentTrajectory_.complete; } MotionPlanningProblem buildMotionPlanningProblem(Point initialPoint, Point targetPoint, LIMITS_MODE limits_mode, const SolverParameters& solver) { MotionPlanningProblem mpp; mpp.initialPoint = {initialPoint.x, initialPoint.y, initialPoint.a}; mpp.targetPoint = {targetPoint.x, targetPoint.y, targetPoint.a}; DynamicLimits translationalLimits; DynamicLimits rotationalLimits; if(limits_mode == LIMITS_MODE::VISION) { PLOGI << "Setting trajectory limits mode to LIMITS_MODE::VISION"; translationalLimits = { cfg.lookup("motion.translation.max_vel.vision"), cfg.lookup("motion.translation.max_acc.vision"), cfg.lookup("motion.translation.max_jerk.vision")}; rotationalLimits = { cfg.lookup("motion.rotation.max_vel.vision"), cfg.lookup("motion.rotation.max_acc.vision"), cfg.lookup("motion.rotation.max_jerk.vision")}; } else if(limits_mode == LIMITS_MODE::FINE || limits_mode == LIMITS_MODE::SLOW) { PLOGI << "Setting trajectory limits mode to LIMITS_MODE::FINE/SLOW"; translationalLimits = { cfg.lookup("motion.translation.max_vel.fine"), cfg.lookup("motion.translation.max_acc.fine"), cfg.lookup("motion.translation.max_jerk.fine")}; rotationalLimits = { cfg.lookup("motion.rotation.max_vel.fine"), cfg.lookup("motion.rotation.max_acc.fine"), cfg.lookup("motion.rotation.max_jerk.fine")}; } else { PLOGI << "Setting trajectory limits mode to LIMITS_MODE::COARSE"; translationalLimits = { cfg.lookup("motion.translation.max_vel.coarse"), cfg.lookup("motion.translation.max_acc.coarse"), cfg.lookup("motion.translation.max_jerk.coarse")}; rotationalLimits = { cfg.lookup("motion.rotation.max_vel.coarse"), cfg.lookup("motion.rotation.max_acc.coarse"), cfg.lookup("motion.rotation.max_jerk.coarse")}; } // This scaling makes sure to give some headroom for the controller to go a bit faster than the planned limits // without actually violating any hard constraints mpp.translationalLimits = translationalLimits * cfg.lookup("motion.limit_max_fraction"); mpp.rotationalLimits = rotationalLimits * cfg.lookup("motion.limit_max_fraction"); mpp.solver_params = solver; return std::move(mpp); } bool sCurveWithinLimits(const SCurveParameters& params, const DynamicLimits& limits) { return params.v_lim <= limits.max_vel && params.a_lim <= limits.max_acc && params.j_lim <= limits.max_jerk; } Trajectory generateTrajectory(MotionPlanningProblem problem) { // Figure out delta that the trajectory needs to cover Eigen::Vector3f deltaPosition = problem.targetPoint - problem.initialPoint; deltaPosition(2) = wrap_angle(deltaPosition(2)); // Begin building trajectory object Trajectory traj; traj.complete = false; traj.initialPoint = {problem.initialPoint(0), problem.initialPoint(1), problem.initialPoint(2)}; traj.trans_direction = deltaPosition.head(2).normalized(); traj.rot_direction = sgn(deltaPosition(2)); // Solve translational component float dist = deltaPosition.head(2).norm(); SCurveParameters trans_params; bool ok = generateSCurve(dist, problem.translationalLimits, problem.solver_params, &trans_params); if(!ok) { PLOGW << "Failed to generate translational trajectory"; return traj; } // Solve rotational component SCurveParameters rot_params; dist = fabs(deltaPosition(2)); ok = generateSCurve(dist, problem.rotationalLimits, problem.solver_params, &rot_params); if(!ok) { PLOGW << "Failed to generate rotational trajectory"; return traj; } // Syncronize trajectories so they both start and end at the same time ok = synchronizeParameters(&trans_params, &rot_params); if (!ok) { PLOGW << "Failed to synchronize trajectory parameters"; return traj; } if(!sCurveWithinLimits(trans_params, problem.translationalLimits)) { PLOGW << "Generated translational trajectory violates limits"; return traj; } if(!sCurveWithinLimits(rot_params, problem.rotationalLimits)) { PLOGW << "Generated rotational trajectory violates limits"; return traj; } traj.trans_params = trans_params; traj.rot_params = rot_params; traj.complete = true; return traj; } bool generateSCurve(float dist, DynamicLimits limits, const SolverParameters& solver, SCurveParameters* params) { // Handle case where distance is very close to 0 float min_dist = cfg.lookup("trajectory_generation.min_dist_limit"); if (fabs(dist) < min_dist) { params->v_lim = 0; params->a_lim = 0; params->j_lim = 0; for (int i = 0; i < 8; i++) { params->switch_points[i].t = 0; params->switch_points[i].p = 0; params->switch_points[i].v = 0; params->switch_points[i].a = 0; } return true; } // Initialize parameters float v_lim = limits.max_vel; float a_lim = limits.max_acc; float j_lim = limits.max_jerk; bool solution_found = false; int loop_counter = 0; while(!solution_found && loop_counter < solver.num_loops) { loop_counter++; PLOGI << "Trajectory generation loop " << loop_counter; // Constant jerk region float dt_j = a_lim / j_lim; float dv_j = 0.5 * j_lim * std::pow(dt_j, 2); float dp_j1 = d6 * j_lim * std::pow(dt_j, 3); float dp_j2 = (v_lim - dv_j) * dt_j + 0.5 * a_lim * std::pow(dt_j, 2) - d6 * j_lim * std::pow(dt_j, 3); // Constant accel region float dt_a = (v_lim - 2 * dv_j) / a_lim; if (dt_a <= 0) { // If dt_a is negative, it means we couldn't find a solution // so adjust accel parameter and try loop again a_lim *= std::pow(solver.beta_decay, 1 + solver.exponent_decay * loop_counter); PLOGI << "dt_a: " << dt_a << ", trying new accel limit: " << a_lim; continue; } float dp_a = dv_j * dt_a + 0.5 * a_lim * std::pow(dt_a, 2); // Constant velocity region float dt_v = (dist - 2 * dp_j1 - 2 * dp_j2 - 2 * dp_a) / v_lim; if (dt_v <= 0) { // If dt_a is negative, it means we couldn't find a solution // so adjust velocity parameter and try loop again v_lim *= std::pow(solver.alpha_decay, 1 + solver.exponent_decay * loop_counter); PLOGI << "dt_v: " << dt_v << ", trying new velocity limit: " << v_lim; continue; } // If we get here, it means we found a valid solution and can populate the rest of the // switch time parameters solution_found = true; params->v_lim = v_lim; params->a_lim = a_lim; params->j_lim = j_lim; PLOGI << "Trajectory solution found"; populateSwitchTimeParameters(params, dt_j, dt_a, dt_v); } return solution_found; } void populateSwitchTimeParameters(SCurveParameters* params, float dt_j, float dt_a, float dt_v) { // Fill first point with all zeros params->switch_points[0].t = 0; params->switch_points[0].p = 0; params->switch_points[0].v = 0; params->switch_points[0].a = 0; for (int i = 1; i < 8; i++) { float dt; // Constant jerk regions if (i == 1 || i == 3 || i == 5 || i == 7) { dt = dt_j; } // Constant acceleration regions else if (i == 2 || i == 6) { dt = dt_a; } // Constant velocity region else { dt = dt_v; } // Populate values std::vector values = computeKinematicsBasedOnRegion(*params, i, dt); params->switch_points[i].a = values[0]; params->switch_points[i].v = values[1]; params->switch_points[i].p = values[2]; params->switch_points[i].t = params->switch_points[i-1].t + dt; } } bool synchronizeParameters(SCurveParameters* params1, SCurveParameters* params2) { // Figure out which parameter set is the faster one float end_time_1 = params1->switch_points[7].t; float end_time_2 = params2->switch_points[7].t; // Slow down which ever one is faster to syncronize them bool ok = false; if(end_time_1 > end_time_2) { ok = slowDownParamsToMatchTime(params2, end_time_1); } else { ok = slowDownParamsToMatchTime(params1, end_time_2); } return ok; } bool slowDownParamsToMatchTime(SCurveParameters* params, float time_to_match) { // Compute time delta needed to adjust by float end_time = params->switch_points[7].t; float dt = time_to_match - end_time; // Do a simple adjustment by adding an equal amount of time to each region float dt_per_region = dt / 7.0; for (int i = 1; i < 8; i++) { params->switch_points[i].t += + i*dt_per_region; } // Then recompute limits based on updated times return solveInverse(params); } bool solveInverse(SCurveParameters* params) { // Gather parameters needed const float dt_j = params->switch_points[1].t - params->switch_points[0].t; const float dt_a = params->switch_points[2].t - params->switch_points[1].t; const float dt_v = params->switch_points[4].t - params->switch_points[3].t; const float deltaPosition = params->switch_points[7].p; // Build linear system Eigen::Matrix3f A; Eigen::Vector3f b; A << dt_j, -1, 0 , std::pow(dt_j, 2), dt_a , -1 , std::pow(dt_j, 2) * (dt_a - dt_j), std::pow(dt_j, 2) + std::pow(dt_a, 2), dt_v + 2* dt_j; b << 0, 0, deltaPosition; // Solve system and check results // Eigen::Vector3f lims = A.fullPivHouseholderQr().solve(b); Eigen::Vector3f lims = A.bdcSvd(Eigen::ComputeFullU | Eigen::ComputeFullV).solve(b); float relative_error = (A*lims - b).norm() / b.norm(); if (relative_error > 1e-5) { PLOGW << "Could not find feasible inverse parameter mapping"; return false; } params->j_lim = lims(0); params->a_lim = lims(1); params->v_lim = lims(2); populateSwitchTimeParameters(params, dt_j, dt_a, dt_v); return true; } std::vector computeKinematicsBasedOnRegion(const SCurveParameters& params, int region, float dt) { float j, a, v, p; bool need_a = true; bool need_v = true; // Positive jerk if (region == 1 || region == 7) { j = params.j_lim; } // Negative jerk else if (region == 3 || region == 5) { j = -1 * params.j_lim; } // Constant positive acceleration else if (region == 2) { j = 0; a = params.a_lim; need_a = false; } // Constant negative acceleration else if ( region == 6) { j = 0; a = -1*params.a_lim; need_a = false; } // Constant velocity region else if (region == 4) { j = 0; a = 0; v = params.v_lim; need_a = false; need_v = false; } else { // Error PLOGE << "Invalid region value: " << region; return {0,0,0,0}; } // Compute remaining values if (need_a) { a = params.switch_points[region-1].a + j * dt; } if (need_v) { v = params.switch_points[region-1].v + params.switch_points[region-1].a * dt + 0.5 * j * std::pow(dt, 2); } p = params.switch_points[region-1].p + params.switch_points[region-1].v * dt + 0.5 * params.switch_points[region-1].a * std::pow(dt, 2) + d6 * j * std::pow(dt, 3); return {a,v,p}; } ================================================ FILE: src/robot/src/SmoothTrajectoryGenerator.h ================================================ #ifndef SmoothTrajectoryGenerator_h #define SmoothTrajectoryGenerator_h #include #include "utils.h" // Return structure for a trajectory point lookup that contains all the info about a point in time the controller // needs to drive the robot struct PVTPoint { Point position; Velocity velocity; float time; std::string toString() const { char s[200]; sprintf(s, "[Position: %s, Velocity: %s, T: %.3f]", position.toString().c_str(), velocity.toString().c_str(), time); return static_cast(s); } }; // Contains info about the maximum dynamic limits of a trajectory struct DynamicLimits { float max_vel; float max_acc; float max_jerk; DynamicLimits operator* (float c) { DynamicLimits rtn; rtn.max_vel = c * max_vel; rtn.max_acc = c * max_acc; rtn.max_jerk = c * max_jerk; return rtn; } }; // A fully defined point for switching from one region of the trajectory // to another - needed for efficient lookup without building a huge table struct SwitchPoint { float t; float p; float v; float a; }; // Parameters defining a 1-D S-curve trajectory struct SCurveParameters { float v_lim; float a_lim; float j_lim; SwitchPoint switch_points[8]; std::string toString() const { char s[256]; sprintf(s, " Limits: [v: %.3f, a: %.3f, j: %.3f]\n Switch times: [%.3f, %.3f, %.3f, %.3f, %.3f, %.3f, %.3f, %.3f]", v_lim, a_lim, j_lim, switch_points[0].t,switch_points[1].t,switch_points[2].t,switch_points[3].t,switch_points[4].t,switch_points[5].t,switch_points[6].t,switch_points[7].t); return static_cast(s); } }; // Everything needed to define a point to point s-curve trajectory in X, Y, and angle struct Trajectory { Eigen::Vector2f trans_direction; int rot_direction; Point initialPoint; SCurveParameters trans_params; SCurveParameters rot_params; bool complete; std::string toString() const { char s[1000]; sprintf(s, "Trajectory Parameters:\nTranslation:\n Direction: [%.2f, %.2f]\n S-Curve:\n%s\nRotation:\n Direction: %i\n S-Curve:\n%s\n", trans_direction[0], trans_direction[1], trans_params.toString().c_str(), rot_direction, rot_params.toString().c_str()); return static_cast(s); } }; struct SolverParameters { int num_loops; float alpha_decay; float beta_decay; float exponent_decay; }; enum class LIMITS_MODE { COARSE, FINE, VISION, SLOW, }; // All the pieces needed to define the motion planning problem struct MotionPlanningProblem { Eigen::Vector3f initialPoint; Eigen::Vector3f targetPoint; DynamicLimits translationalLimits; DynamicLimits rotationalLimits; SolverParameters solver_params; }; // Helper methods - making public for easier testing MotionPlanningProblem buildMotionPlanningProblem(Point initialPoint, Point targetPoint, LIMITS_MODE limits_mode, const SolverParameters& solver); Trajectory generateTrajectory(MotionPlanningProblem problem); bool generateSCurve(float dist, DynamicLimits limits, const SolverParameters& solver, SCurveParameters* params); void populateSwitchTimeParameters(SCurveParameters* params, float dt_j, float dt_a, float dt_v); bool synchronizeParameters(SCurveParameters* params1, SCurveParameters* params2); bool slowDownParamsToMatchTime(SCurveParameters* params, float time_to_match); bool solveInverse(SCurveParameters* params); std::vector lookup_1D(float time, const SCurveParameters& params); std::vector computeKinematicsBasedOnRegion(const SCurveParameters& params, int region, float dt); class SmoothTrajectoryGenerator { public: SmoothTrajectoryGenerator(); // Generates a trajectory that starts at the initial point and ends at the target point. Setting fineMode to true makes the // adjusts the dynamic limits for a more accurate motion. Returns a bool indicating if trajectory generation was // successful bool generatePointToPointTrajectory(Point initialPoint, Point targetPoint, LIMITS_MODE limits_mode); // Generates a trajectory that attempts to maintain the target velocity for a specified time. Note that the current implimentation // of this does not give a guarantee on the accuracy of the velocity if the specified velocity and move time would violate the dynamic // limits of the fine or coarse movement mode. Returns a bool indicating if trajectory generation was successful bool generateConstVelTrajectory(Point initialPoint, Velocity velocity, float moveTime, LIMITS_MODE limits_mode); // Looks up a point in the current trajectory based on the time, in seconds, from the start of the trajectory PVTPoint lookup(float time); private: // The current trajectory - this lets the generation class hold onto this and just provide a lookup method // since I don't have a need to pass the trajectory around anywhere Trajectory currentTrajectory_; // These need to be part of the class because they need to be loaded at construction time, not // program initialization time (i.e. as globals). This is because the config file is not // yet loaded at program start up time. SolverParameters solver_params_; }; #endif ================================================ FILE: src/robot/src/StatusUpdater.cpp ================================================ #include "StatusUpdater.h" #include "constants.h" StatusUpdater::StatusUpdater() : currentStatus_() { } std::string StatusUpdater::getStatusJsonString() { // Return the status string return currentStatus_.toJsonString(); } void StatusUpdater::updatePosition(float x, float y, float a) { currentStatus_.pos_x = x; currentStatus_.pos_y = y; currentStatus_.pos_a = a; } void StatusUpdater::updateVelocity(float vx, float vy, float va) { currentStatus_.vel_x = vx; currentStatus_.vel_y = vy; currentStatus_.vel_a = va; } void StatusUpdater::updateInProgress(bool in_progress) { currentStatus_.in_progress = in_progress; } void StatusUpdater::updateControlLoopTime(int controller_loop_ms) { currentStatus_.controller_loop_ms = controller_loop_ms; } void StatusUpdater::updatePositionLoopTime(int position_loop_ms) { currentStatus_.position_loop_ms = position_loop_ms; } void StatusUpdater::updateLocalizationMetrics(LocalizationMetrics localization_metrics) { currentStatus_.localization_metrics = localization_metrics; } void StatusUpdater::update_motor_driver_connected(bool connected) { currentStatus_.motor_driver_connected = connected; } void StatusUpdater::update_lifter_driver_connected(bool connected) { currentStatus_.lifter_driver_connected = connected; } void StatusUpdater::updateVisionControllerPose(Point pose) { currentStatus_.vision_x = pose.x; currentStatus_.vision_y = pose.y; currentStatus_.vision_a = pose.a; } void StatusUpdater::updateLastMarvelmindPose(Point pose, bool pose_used) { currentStatus_.last_mm_x = pose.x; currentStatus_.last_mm_y = pose.y; currentStatus_.last_mm_a = pose.a; currentStatus_.last_mm_used = pose_used; } ================================================ FILE: src/robot/src/StatusUpdater.h ================================================ #ifndef StatusUpdater_h #define StatusUpdater_h #include #include "utils.h" class StatusUpdater { public: StatusUpdater(); std::string getStatusJsonString(); void updatePosition(float x, float y, float a); void updateVelocity(float vx, float vy, float va); void updateControlLoopTime(int controller_loop_ms); void updatePositionLoopTime(int position_loop_ms); void updateInProgress(bool in_progress); bool getInProgress() const { return currentStatus_.in_progress; }; void setErrorStatus() { currentStatus_.error_status = true;} void clearErrorStatus() {currentStatus_.error_status = false;}; bool getErrorStatus() const {return currentStatus_.error_status;} void updateLocalizationMetrics(LocalizationMetrics localization_metrics); float getLocalizationConfidence() const {return currentStatus_.localization_metrics.total_confidence;}; void update_motor_driver_connected(bool connected); void update_lifter_driver_connected(bool connected); void updateCameraDebug(CameraDebug camera_debug) {currentStatus_.camera_debug = camera_debug;}; void updateVisionControllerPose(Point pose); void updateLastMarvelmindPose(Point pose, bool pose_used); struct Status { // Current position and velocity float pos_x; float pos_y; float pos_a; float vel_x; float vel_y; float vel_a; // Vision tracker pose float vision_x; float vision_y; float vision_a; // Last pose from mm float last_mm_x; float last_mm_y; float last_mm_a; bool last_mm_used; // Loop times int controller_loop_ms; int position_loop_ms; bool in_progress; bool error_status; uint8_t counter; // Just to show that the status is updating. Okay to roll over bool motor_driver_connected; bool lifter_driver_connected; LocalizationMetrics localization_metrics; CameraDebug camera_debug; //When adding extra fields, update toJsonString method to serialize and add additional capacity Status(): pos_x(0.0), pos_y(0.0), pos_a(0.0), vel_x(0.0), vel_y(0.0), vel_a(0.0), vision_x(0.0), vision_y(0.0), vision_a(0.0), last_mm_x(0.0), last_mm_y(0.0), last_mm_a(0.0), last_mm_used(false), controller_loop_ms(999), position_loop_ms(999), in_progress(false), error_status(false), counter(0), motor_driver_connected(false), lifter_driver_connected(false), localization_metrics(), camera_debug() { } std::string toJsonString() { // Size the object correctly const size_t capacity = JSON_OBJECT_SIZE(50); // Update when adding new fields DynamicJsonDocument root(capacity); // Format to match messages sent by server root["type"] = "status"; JsonObject doc = root.createNestedObject("data"); // Fill in data doc["pos_x"] = pos_x; doc["pos_y"] = pos_y; doc["pos_a"] = pos_a; doc["vel_x"] = vel_x; doc["vel_y"] = vel_y; doc["vel_a"] = vel_a; doc["controller_loop_ms"] = controller_loop_ms; doc["position_loop_ms"] = position_loop_ms; doc["in_progress"] = in_progress; doc["error_status"] = error_status; doc["counter"] = counter++; doc["motor_driver_connected"] = motor_driver_connected; doc["lifter_driver_connected"] = lifter_driver_connected; doc["localization_confidence_x"] = localization_metrics.confidence_x; doc["localization_confidence_y"] = localization_metrics.confidence_y; doc["localization_confidence_a"] = localization_metrics.confidence_a; doc["localization_total_confidence"] = localization_metrics.total_confidence; doc["last_position_uncertainty"] = localization_metrics.last_position_uncertainty; doc["cam_side_ok"] = camera_debug.side_ok; doc["cam_rear_ok"] = camera_debug.rear_ok; doc["cam_both_ok"] = camera_debug.both_ok; doc["cam_side_u"] = camera_debug.side_u; doc["cam_side_v"] = camera_debug.side_v; doc["cam_rear_u"] = camera_debug.rear_u; doc["cam_rear_v"] = camera_debug.rear_v; doc["cam_side_x"] = camera_debug.side_x; doc["cam_side_y"] = camera_debug.side_y; doc["cam_rear_x"] = camera_debug.rear_x; doc["cam_rear_y"] = camera_debug.rear_y; doc["cam_pose_x"] = camera_debug.pose_x; doc["cam_pose_y"] = camera_debug.pose_y; doc["cam_pose_a"] = camera_debug.pose_a; doc["cam_loop_ms"] = camera_debug.loop_ms; doc["vision_x"] = vision_x; doc["vision_y"] = vision_y; doc["vision_a"] = vision_a; doc["last_mm_x"] = last_mm_x; doc["last_mm_y"] = last_mm_y; doc["last_mm_a"] = last_mm_a; doc["last_mm_used"] = last_mm_used; // Serialize and return string std::string msg; serializeJson(root, msg); return msg; } }; Status getStatus() { return currentStatus_;}; private: Status currentStatus_; }; #endif ================================================ FILE: src/robot/src/TrayController.cpp ================================================ #include "TrayController.h" #include "constants.h" #include #include "serial/SerialCommsFactory.h" enum class LifterPosType { DEFAULT, PLACE, LOAD, }; int getPos(LifterPosType pos_type) { float revs = 0.0; switch(pos_type) { case LifterPosType::DEFAULT: revs = cfg.lookup("tray.default_pos_revs"); break; case LifterPosType::PLACE: revs = cfg.lookup("tray.place_pos_revs"); break; case LifterPosType::LOAD: revs = cfg.lookup("tray.load_pos_revs"); break; } int steps_per_rev = cfg.lookup("tray.steps_per_rev"); float steps = steps_per_rev * revs; return static_cast(steps); } TrayController::TrayController() : serial_to_lifter_driver_(SerialCommsFactory::getFactoryInstance()->get_serial_comms(CLEARCORE_USB)), action_step_running_(false), load_complete_(false), action_step_(0), fake_tray_motion_(cfg.lookup("tray.fake_tray_motions")), cur_action_(ACTION::NONE), controller_rate_(cfg.lookup("tray.controller_frequency")), is_initialized_(false) { if(fake_tray_motion_) PLOGW << "Fake tray motion enabled"; } void TrayController::initialize() { cur_action_ = ACTION::INITIALIZE; action_step_ = 0; PLOGI << "Starting tray action INITIALIZE"; } bool TrayController::place() { if(!is_initialized_ && !fake_tray_motion_) { PLOGE << "Tray is not initialized, aborting action"; return false; } cur_action_ = ACTION::PLACE; action_step_ = 0; PLOGI << "Starting tray action PLACE"; return true; } bool TrayController::load() { if(!is_initialized_ && !fake_tray_motion_) { PLOGE << "Tray is not initialized, aborting action"; return false; } cur_action_ = ACTION::LOAD; action_step_ = 0; PLOGI << "Starting tray action LOAD"; return true; } void TrayController::estop() { cur_action_ = ACTION::NONE; action_step_ = 0; action_step_running_ = false; PLOGW << "Estopping tray control"; if (serial_to_lifter_driver_->isConnected()) { serial_to_lifter_driver_->send("lift:stop"); } } void TrayController::setLoadComplete() { if(cur_action_ == ACTION::LOAD && action_step_ == 1) { load_complete_ = true; } else { PLOGW << "Recieved LOAD_COMPLETE signal at incorrect time. Ignoring."; } } void TrayController::update() { if (!controller_rate_.ready()) { return; } switch (cur_action_) { case ACTION::INITIALIZE: updateInitialize(); break; case ACTION::LOAD: updateLoad(); break; case ACTION::PLACE: updatePlace(); break; default: // Just to make sure a serial buffer doesn't fill up somewhere std::string msg = ""; msg = serial_to_lifter_driver_->rcv_lift(); (void) msg; break; } } void TrayController::runStepAndWaitForCompletion(std::string data, std::string debug_print) { if(!action_step_running_) { if (serial_to_lifter_driver_->isConnected()) { serial_to_lifter_driver_->send(data); } action_step_running_ = true; PLOGI << debug_print; action_timer_.reset(); } else { // Request status and wait for command to complete std::string msg = ""; if (serial_to_lifter_driver_->isConnected()) { serial_to_lifter_driver_->send("lift:status_req"); msg = serial_to_lifter_driver_->rcv_lift(); } // Initial delay for action if(action_timer_.dt_ms() > 1000 && (msg == "none" || fake_tray_motion_)) { action_step_running_ = false; action_step_++; } } } void TrayController::updateInitialize() { /* Sequence: 0 - Close latch 1 - Do tray init 2 - Move to default location */ // 0 - Close latch if(action_step_ == 0) { std::string data = "lift:close"; std::string debug = "Closing latch"; runStepAndWaitForCompletion(data, debug); } // 1 - Do tray init if(action_step_ == 1) { std::string data = "lift:home"; std::string debug = "Homing tray"; runStepAndWaitForCompletion(data, debug); } // 2 - Move to default location if(action_step_ == 2) { int pos = getPos(LifterPosType::DEFAULT); std::string data = "lift:pos:" + std::to_string(pos); std::string debug = "Moving tray to default position"; runStepAndWaitForCompletion(data, debug); } // 3 - Done with actinon if(action_step_ == 3) { cur_action_ = ACTION::NONE; PLOGI << "Done with initialization"; is_initialized_ = true; } } void TrayController::updatePlace() { /* Sequence: 0 - Move tray to place 1 - Open latch 2 - Move tray to default 3 - Close latch */ // 0 - Move tray to place if(action_step_ == 0) { int pos = getPos(LifterPosType::PLACE); std::string data = "lift:pos:" + std::to_string(pos); std::string debug = "Moving tray to placement position"; runStepAndWaitForCompletion(data, debug); } // 1 - Open latch if(action_step_ == 1) { std::string data = "lift:open"; std::string debug = "Opening latch"; runStepAndWaitForCompletion(data, debug); } // 2 - Move tray to default if(action_step_ == 2) { int pos = getPos(LifterPosType::DEFAULT); std::string data = "lift:pos:" + std::to_string(pos); std::string debug = "Moving tray to default position"; runStepAndWaitForCompletion(data, debug); } // 3 - Close latch if(action_step_ == 3) { std::string data = "lift:close"; std::string debug = "Closing latch"; runStepAndWaitForCompletion(data, debug); } // 4 - Done with actinon if(action_step_ == 4) { cur_action_ = ACTION::NONE; PLOGI << "Done with place"; } } void TrayController::updateLoad() { /* Sequence: 0 - Move tray to load 1 - Wait for load complete signal 2 - Move to default */ // 0 - Move tray to load if(action_step_ == 0) { int pos = getPos(LifterPosType::LOAD); std::string data = "lift:pos:" + std::to_string(pos); std::string debug = "Moving tray to load position"; runStepAndWaitForCompletion(data, debug); } // 1 - Wait for load complete signal if(action_step_ == 1) { if(!action_step_running_) { action_step_running_ = true; PLOGI << "Waiting for load complete"; } else { if(load_complete_) { action_step_++; PLOGI << "Got load complete signal"; load_complete_ = false; action_step_running_ = false; } } } // 2 - Move to default if(action_step_ == 2) { int pos = getPos(LifterPosType::DEFAULT); std::string data = "lift:pos:" + std::to_string(pos); std::string debug = "Moving tray to default position"; runStepAndWaitForCompletion(data, debug); } // 3 - Done with actinon if(action_step_ == 3) { cur_action_ = ACTION::NONE; PLOGI << "Done with load"; } } ================================================ FILE: src/robot/src/TrayController.h ================================================ #ifndef TrayController_h #define TrayController_h #include "serial/SerialComms.h" #include "utils.h" class TrayController { public: TrayController(); void initialize(); // Returns bool indicating if action started successfully bool place(); // Returns bool indicating if action started successfully bool load(); bool isActionRunning() {return cur_action_ != ACTION::NONE;} void update(); void estop(); void setLoadComplete(); // Used for testing void setTrayInitialized(bool value) {is_initialized_ = value;}; private: enum ACTION { NONE, INITIALIZE, PLACE, LOAD, }; SerialCommsBase* serial_to_lifter_driver_; // Serial connection to lifter driver bool action_step_running_; bool load_complete_; int action_step_; bool fake_tray_motion_; ACTION cur_action_; RateController controller_rate_; // Rate limit controller loop Timer action_timer_; bool is_initialized_; void runStepAndWaitForCompletion(std::string data, std::string debug_print); void updateInitialize(); void updateLoad(); void updatePlace(); }; #endif ================================================ FILE: src/robot/src/camera_tracker/CameraPipeline.cpp ================================================ #include "CameraPipeline.h" #include #include "constants.h" #include std::mutex data_mutex; cv::Point2f getBestKeypoint(std::vector keypoints) { if(keypoints.empty()) { return {0,0}; } // Get keypoint with largest area cv::KeyPoint best_keypoint = keypoints.front(); for (const auto& k : keypoints) { // PLOGI << "Keypoint: " << k.class_id; // PLOGI << "Point: " << k.pt; // PLOGI << "Angle: " << k.angle; // PLOGI << "Size: " << k.size; if(k.size > best_keypoint.size) { best_keypoint = k; } } return best_keypoint.pt; } CameraPipeline::CameraPipeline(CAMERA_ID id, bool start_thread) : use_debug_image_(cfg.lookup("vision_tracker.debug.use_debug_image")), output_debug_images_(cfg.lookup("vision_tracker.debug.save_camera_debug")), thread_running_(false), current_output_() { initCamera(id); // Configure detection parameters threshold_ = cfg.lookup("vision_tracker.detection.threshold"); blob_params_.minThreshold = 10; blob_params_.maxThreshold = 200; blob_params_.filterByArea = cfg.lookup("vision_tracker.detection.blob.use_area"); blob_params_.minArea = cfg.lookup("vision_tracker.detection.blob.min_area"); blob_params_.maxArea = cfg.lookup("vision_tracker.detection.blob.max_area"); blob_params_.filterByColor = false; blob_params_.filterByCircularity = cfg.lookup("vision_tracker.detection.blob.use_circularity"); blob_params_.minCircularity = cfg.lookup("vision_tracker.detection.blob.min_circularity"); blob_params_.maxCircularity = cfg.lookup("vision_tracker.detection.blob.max_circularity"); blob_params_.filterByConvexity = false; blob_params_.filterByInertia = false; blob_detector_ = cv::SimpleBlobDetector::create(blob_params_); // Start thread if (start_thread) start(); } CameraPipeline::~CameraPipeline() { stop(); } void CameraPipeline::initCamera(CAMERA_ID id) { // Get config strings std::string name = cameraIdToString(id); std::string calibration_path_config_name = "vision_tracker." + name + ".calibration_file"; std::string camera_path_config_name = "vision_tracker." + name + ".camera_path"; std::string debug_output_path_config_name = "vision_tracker." + name + ".debug_output_path"; std::string debug_image_config_name = "vision_tracker." + name + ".debug_image"; std::string x_offset_config_name = "vision_tracker.physical." + name + ".x_offset"; std::string y_offset_config_name = "vision_tracker.physical." + name + ".y_offset"; std::string z_offset_config_name = "vision_tracker.physical." + name + ".z_offset"; std::string x_res_scale_config_name = "vision_tracker." + name + ".resolution_scale_x"; std::string y_res_scale_config_name = "vision_tracker." + name + ".resolution_scale_y"; // Initialize intrinsic calibration data camera_data_.id = id; std::string calibration_path = cfg.lookup(calibration_path_config_name); PLOGI.printf("Loading %s camera calibration from %s", name.c_str(), calibration_path.c_str()); cv::FileStorage fs(calibration_path, cv::FileStorage::READ); if(fs["K"].isNone() || fs["D"].isNone()) { PLOGE.printf("Missing %s calibration data", name.c_str()); throw; } fs["K"] >> camera_data_.K; fs["D"] >> camera_data_.D; // Handle scale factors float x_res_scale = cfg.lookup(x_res_scale_config_name); camera_data_.K.at(0,0) = camera_data_.K.at(0,0) * x_res_scale; camera_data_.K.at(0,1) = camera_data_.K.at(0,1) * x_res_scale; camera_data_.K.at(0,2) = camera_data_.K.at(0,2) * x_res_scale; float y_res_scale = cfg.lookup(y_res_scale_config_name); camera_data_.K.at(1,0) = camera_data_.K.at(1,0) * y_res_scale; camera_data_.K.at(1,1) = camera_data_.K.at(1,1) * y_res_scale; camera_data_.K.at(1,2) = camera_data_.K.at(1,2) * y_res_scale; // Initialize extrinsic data float x_offset = cfg.lookup(x_offset_config_name); float y_offset = cfg.lookup(y_offset_config_name); float z_offset = cfg.lookup(z_offset_config_name); Eigen::Vector3f camera_pose = {x_offset, y_offset, z_offset}; Eigen::Matrix3f camera_rotation; // Hardcoding rotation matrices here for simplicity if(id == CAMERA_ID::SIDE) { camera_rotation << 1,0,0, 0,-1,0, 0,0,-1; } else { camera_rotation << 0,1,0, 1,0,0, 0,0,-1; } // From https://ksimek.github.io/2012/08/22/extrinsic/ camera_data_.t = -1 * camera_rotation * camera_pose; camera_data_.R = camera_rotation.transpose(); // PLOGI << name << " R: " << camera_data_.R; // PLOGI << name << " t: " << camera_data_.t.transpose(); // Precompute some inverse values for later re-use camera_data_.R_inv = camera_data_.R.inverse(); Eigen::Matrix3f tmp; // Hack to get around problems with enabling eigen support in opencv tmp << camera_data_.K.at(0,0), camera_data_.K.at(0,1), camera_data_.K.at(0,2), camera_data_.K.at(1,0), camera_data_.K.at(1,1), camera_data_.K.at(1,2), camera_data_.K.at(2,0), camera_data_.K.at(2,1), camera_data_.K.at(2,2); camera_data_.K_inv = tmp.inverse(); // PLOGI << name << " K: " << camera_data_.K; // PLOGI << name << " Ktmp: " << tmp; // PLOGI << name << " Kinv: " << camera_data_.K_inv; // Initialize camera calibration capture or debug data if(!use_debug_image_) { std::string camera_path = cfg.lookup(camera_path_config_name); camera_data_.capture = cv::VideoCapture(camera_path); if (!camera_data_.capture.isOpened()) { PLOGE.printf("Could not open %s camera at %s", name.c_str(), camera_path.c_str()); throw; } PLOGI.printf("Opened %s camera at %s", name.c_str(), camera_path.c_str()); camera_data_.capture.set(cv::CAP_PROP_FRAME_WIDTH, 320); camera_data_.capture.set(cv::CAP_PROP_FRAME_HEIGHT, 240); camera_data_.capture.set(cv::CAP_PROP_FPS, 30); int width = camera_data_.capture.get(cv::CAP_PROP_FRAME_WIDTH); int height = camera_data_.capture.get(cv::CAP_PROP_FRAME_HEIGHT); int fps = camera_data_.capture.get(cv::CAP_PROP_FPS); PLOGI.printf("Properties %s: resolution: %ix%i, fps: %i", name.c_str(), width, height, fps); } else { std::string image_path = cfg.lookup(debug_image_config_name); camera_data_.debug_frame = cv::imread(image_path, cv::IMREAD_GRAYSCALE); if(camera_data_.debug_frame.empty()) { PLOGE.printf("Could not read %s debug image from %s", name.c_str(), image_path.c_str()); throw; } PLOGW.printf("Loading %s debug image from %s", name.c_str(), image_path.c_str()); } // Initialze debug output path if(output_debug_images_) { camera_data_.debug_output_path = std::string(cfg.lookup(debug_output_path_config_name)); } } std::string CameraPipeline::cameraIdToString(CAMERA_ID id) { if(id == CAMERA_ID::SIDE) return "side"; if(id == CAMERA_ID::REAR) return "rear"; PLOGE << "Unknown camera id"; return "unk"; } std::vector CameraPipeline::allKeypointsInImage(cv::Mat img_raw, bool output_debug) { // Timer t; // Undistort and crop cv::Mat img_undistorted; cv::Rect validPixROI; // PLOGI << "init memory time: " << t.dt_ms(); // t.reset(); cv::Mat newcameramtx = cv::getOptimalNewCameraMatrix(camera_data_.K, camera_data_.D, img_raw.size(), /*alpha=*/1, img_raw.size(), &validPixROI); // PLOGI << "new camera time: " << t.dt_ms(); // t.reset(); cv::undistort(img_raw, img_undistorted, camera_data_.K, camera_data_.D); // PLOGI << "undistort time: " << t.dt_ms(); // t.reset(); // Threshold cv::Mat img_thresh; cv::threshold(img_undistorted, img_thresh, threshold_, 255, cv::THRESH_BINARY_INV); // PLOGI <<" Threshold"; // PLOGI << "threshold time: " << t.dt_ms(); // t.reset(); // Blob detection std::vector keypoints; keypoints.reserve(10); // PLOGI << "blob memory time: " << t.dt_ms(); // t.reset(); blob_detector_->detect(img_thresh, keypoints); // PLOGI <<"Num blobs " << keypoints.size(); // PLOGI << "blob detect time: " << t.dt_ms(); // t.reset(); if(output_debug) { std::string debug_path = camera_data_.id == CAMERA_ID::SIDE ? cfg.lookup("vision_tracker.side.debug_output_path") : cfg.lookup("vision_tracker.rear.debug_output_path"); cv::imwrite(debug_path + "img_raw.jpg", img_raw); cv::imwrite(debug_path + "img_undistorted.jpg", img_undistorted); cv::imwrite(debug_path + "img_thresh.jpg", img_thresh); cv::Mat img_with_keypoints; cv::drawKeypoints(img_thresh, keypoints, img_with_keypoints, cv::Scalar(0,0,255), cv::DrawMatchesFlags::DRAW_RICH_KEYPOINTS); cv::imwrite(debug_path + "img_keypoints.jpg", img_with_keypoints); cv::Mat img_with_best_keypoint = img_undistorted; cv::Point2f best_keypoint = getBestKeypoint(keypoints); cv::circle(img_with_best_keypoint, best_keypoint, 1, cv::Scalar(0,0,255), -1); cv::circle(img_with_best_keypoint, best_keypoint, 10, cv::Scalar(0,0,255), 5); Eigen::Vector2f best_point_m = cameraToRobot(best_keypoint); std::string label_text = "Best:" + std::to_string(best_point_m[0]) +"m,"+ std::to_string(best_point_m[1]) + "m"; cv::putText(img_with_best_keypoint, //target image label_text, //text cv::Point(10, 20), //top-left position cv::FONT_HERSHEY_DUPLEX, 0.5, CV_RGB(255,0,0), //font color 1); Eigen::Vector2f target_point_world; if(camera_data_.id == CAMERA_ID::SIDE) { target_point_world << float(cfg.lookup("vision_tracker.physical.side.target_x")), float(cfg.lookup("vision_tracker.physical.side.target_y")); } else { target_point_world << float(cfg.lookup("vision_tracker.physical.rear.target_x")), float(cfg.lookup("vision_tracker.physical.rear.target_y")); } Eigen::Vector2f target_point_camera = robotToCamera(target_point_world); cv::Point2f pt{target_point_camera[0], target_point_camera[1]}; cv::circle(img_with_best_keypoint, pt, 5, cv::Scalar(255,0,0), -1); std::string label_text2 = "Target:" + std::to_string(target_point_camera[0]) +"px, "+ std::to_string(target_point_camera[1]) + "px"; cv::putText(img_with_best_keypoint, //target image label_text2, //text cv::Point(10, 40), //top-left position cv::FONT_HERSHEY_DUPLEX, 0.5, CV_RGB(0,0,255), //font color 1); cv::imwrite(debug_path + "img_best_keypoint.jpg", img_with_best_keypoint); PLOGI.printf("Writing debug images %s",cameraIdToString(camera_data_.id).c_str()); } // PLOGI << "debug time: " << t.dt_ms(); // t.reset(); return keypoints; } void CameraPipeline::start() { if(!thread_running_) { PLOGI.printf("Starting %s camera thread",cameraIdToString(camera_data_.id).c_str()); thread_running_ = true; thread_ = std::thread(&CameraPipeline::threadLoop, this); // thread_.detach(); } } void CameraPipeline::stop() { if(thread_running_) { PLOGI.printf("Stopping %s camera thread",cameraIdToString(camera_data_.id).c_str()); thread_running_ = false; thread_.join(); } } void CameraPipeline::threadLoop() { while(thread_running_) { oneLoop(); } } void CameraPipeline::oneLoop() { Eigen::Vector2f best_point_m = {0,0}; cv::Point2f best_point_px = {0,0}; bool detected = false; try { // Get latest frame cv::Mat frame; if (use_debug_image_) frame = camera_data_.debug_frame; else camera_data_.capture >> frame; // Perform keypoint detection std::vector keypoints = allKeypointsInImage(frame, output_debug_images_); // Do post-processing if the detection was successful detected = !keypoints.empty(); if(detected) { best_point_px = getBestKeypoint(keypoints); best_point_m = cameraToRobot(best_point_px); } } catch (cv::Exception& e) { PLOGW << "Caught cv::Exception: " << e.what(); detected = false; best_point_m = {0,0}; best_point_px = {0,0}; } // Update output values in thread-safe manner { std::lock_guard read_lock(data_mutex); current_output_.ok = detected; current_output_.timestamp = ClockFactory::getFactoryInstance()->get_clock()->now(); current_output_.point = best_point_m; current_output_.uv = {best_point_px.x, best_point_px.y}; } } CameraPipelineOutput CameraPipeline::getData() { std::lock_guard read_lock(data_mutex); CameraPipelineOutput output = current_output_; current_output_.ok = false; return output; } Eigen::Vector2f CameraPipeline::cameraToRobot(cv::Point2f cameraPt) { Eigen::Vector3f uv_point = {cameraPt.x, cameraPt.y, 1}; // Compute scaling factor for Z=0 Eigen::Vector3f tmp1 = camera_data_.R_inv * camera_data_.K_inv * uv_point; Eigen::Vector3f tmp2 = camera_data_.R_inv * camera_data_.t; float s = tmp2(2,0) / tmp1(2,0); // Debug Eigen::Vector3f scaled_cam_frame = s * camera_data_.K_inv * uv_point; Eigen::Vector3f cam_frame_offset = scaled_cam_frame - camera_data_.t; // Convert to world coordinate Eigen::Vector3f world_point = camera_data_.R_inv * cam_frame_offset; // PLOGI << cameraIdToString(id); // PLOGI << "uv_point: " << uv_point.transpose(); // PLOGI << "scaled_cam_frame: " << scaled_cam_frame.transpose(); // PLOGI << "cam_frame_offset: " << cam_frame_offset.transpose(); // PLOGI << "world_point: " << world_point.transpose(); if(fabs(world_point(2)) > 1e-3) PLOGE.printf("Z value %f is not near zero", world_point(2)); return {world_point(0),world_point(1)}; } Eigen::Vector2f CameraPipeline::robotToCamera(Eigen::Vector2f robotPt) { Eigen::Vector4f robot_point_3d = {robotPt[0], robotPt[1], 0, 1}; Eigen::MatrixXf r_t(3,4); r_t << camera_data_.R, camera_data_.t; Eigen::Matrix3f K_tmp; // Hack to get around problems with enabling eigen support in opencv K_tmp << camera_data_.K.at(0,0), camera_data_.K.at(0,1), camera_data_.K.at(0,2), camera_data_.K.at(1,0), camera_data_.K.at(1,1), camera_data_.K.at(1,2), camera_data_.K.at(2,0), camera_data_.K.at(2,1), camera_data_.K.at(2,2); Eigen::Vector3f camera_pt_scaled = K_tmp * r_t * robot_point_3d; return {camera_pt_scaled[0]/camera_pt_scaled[2], camera_pt_scaled[1]/camera_pt_scaled[2]}; } ================================================ FILE: src/robot/src/camera_tracker/CameraPipeline.h ================================================ #ifndef CameraPipeline_h #define CameraPipeline_h #include "utils.h" #include #include #include #include enum class CAMERA_ID { REAR, SIDE }; struct CameraPipelineOutput { bool ok = false; ClockTimePoint timestamp = ClockFactory::getFactoryInstance()->get_clock()->now(); Eigen::Vector2f point = {0,0}; Eigen::Vector2f uv = {0,0}; }; class CameraPipeline { public: CameraPipeline(CAMERA_ID id, bool start_thread); ~CameraPipeline(); CameraPipelineOutput getData(); void start(); void stop(); void oneLoop(); void toggleDebugImageOutput() {output_debug_images_ = !output_debug_images_;}; Eigen::Vector2f cameraToRobot(cv::Point2f cameraPt); Eigen::Vector2f robotToCamera(Eigen::Vector2f robotPt); private: struct CameraData { CAMERA_ID id; cv::VideoCapture capture; cv::Mat K; Eigen::Matrix3f K_inv; cv::Mat D; Eigen::Matrix3f R; Eigen::Matrix3f R_inv; Eigen::Vector3f t; cv::Mat debug_frame; std::string debug_output_path; }; void threadLoop(); std::string cameraIdToString(CAMERA_ID id); void initCamera(CAMERA_ID id); std::vector allKeypointsInImage(cv::Mat img_raw, bool output_debug); CameraData camera_data_; bool use_debug_image_; std::atomic output_debug_images_; std::atomic thread_running_; std::thread thread_; CameraPipelineOutput current_output_; // Params read from config cv::SimpleBlobDetector::Params blob_params_; cv::Ptr blob_detector_; int threshold_; float pixels_per_meter_u_; float pixels_per_meter_v_; }; #endif //CameraPipeline_h ================================================ FILE: src/robot/src/camera_tracker/CameraTracker.cpp ================================================ #include "CameraTracker.h" #include #include "constants.h" constexpr int num_samples_to_average = 10; CameraTracker::CameraTracker(bool start_thread) : camera_loop_time_averager_(num_samples_to_average), rear_cam_(CAMERA_ID::REAR, start_thread), side_cam_(CAMERA_ID::SIDE, start_thread), output_({{0,0,0}, false, ClockFactory::getFactoryInstance()->get_clock()->now(), false}), last_rear_cam_output_(), last_side_cam_output_(), side_cam_ok_filter_(0.5), rear_cam_ok_filter_(0.5), both_cams_ok_filter_(0.5), running_(false) { // Target points robot_P_side_target_ = {cfg.lookup("vision_tracker.physical.side.target_x"), cfg.lookup("vision_tracker.physical.side.target_y")}; robot_P_rear_target_ = {cfg.lookup("vision_tracker.physical.rear.target_x"), cfg.lookup("vision_tracker.physical.rear.target_y")}; } CameraTracker::~CameraTracker() {} void CameraTracker::update() { CameraPipelineOutput side_output = side_cam_.getData(); CameraPipelineOutput rear_output = rear_cam_.getData(); output_.raw_detection = side_output.ok && rear_output.ok; debug_.side_ok = side_cam_ok_filter_.update(side_output.ok); debug_.rear_ok = rear_cam_ok_filter_.update(rear_output.ok); bool new_output_pose_ready = false; if(rear_output.ok) { last_rear_cam_output_ = rear_output; } if(side_output.ok) { last_side_cam_output_ = side_output; } if(last_side_cam_output_.ok && last_rear_cam_output_.ok) new_output_pose_ready = true; // int time_delta_ms = std::chrono::duration_cast // (last_side_cam_output_.timestamp - last_rear_cam_output_.timestamp).count(); // if(abs(time_delta_ms) > 400) // { // PLOGW << "Camera timestamp delta too large: " << time_delta_ms; // new_output_pose_ready = false; // } output_.ok = both_cams_ok_filter_.update(new_output_pose_ready); debug_.both_ok = output_.ok; if(!new_output_pose_ready) return; // Populate output output_.pose = computeRobotPoseFromImagePoints(last_side_cam_output_.point, last_rear_cam_output_.point); output_.timestamp = ClockFactory::getFactoryInstance()->get_clock()->now(); camera_loop_time_averager_.mark_point(); // Populate debug debug_.side_u = last_side_cam_output_.uv[0]; debug_.side_v = last_side_cam_output_.uv[1]; debug_.rear_u = last_rear_cam_output_.uv[0]; debug_.rear_v = last_rear_cam_output_.uv[1]; debug_.side_x = last_side_cam_output_.point[0]; debug_.side_y = last_side_cam_output_.point[1]; debug_.rear_x = last_rear_cam_output_.point[0]; debug_.rear_y = last_rear_cam_output_.point[1]; debug_.pose_x = output_.pose.x; debug_.pose_y = output_.pose.y; debug_.pose_a = output_.pose.a; debug_.loop_ms = camera_loop_time_averager_.get_ms(); // Mark camera output values as not okay to make sure they are only used once last_rear_cam_output_.ok = false; last_side_cam_output_.ok = false; } CameraTrackerOutput CameraTracker::getPoseFromCamera() { return output_; } CameraDebug CameraTracker::getCameraDebug() { return debug_; } void CameraTracker::start() { rear_cam_.start(); side_cam_.start(); camera_loop_time_averager_ = TimeRunningAverage(num_samples_to_average); running_ = true; } void CameraTracker::stop() { rear_cam_.stop(); side_cam_.stop(); running_ = false; } void CameraTracker::toggleDebugImageOutput() { rear_cam_.toggleDebugImageOutput(); side_cam_.toggleDebugImageOutput(); } Point CameraTracker::computeRobotPoseFromImagePoints(Eigen::Vector2f p_side, Eigen::Vector2f p_rear) { // Solve linear equations to get pose values Eigen::Matrix4f A; Eigen::Vector4f b; A << 1, 0, -p_rear[1], p_rear[0], 0, 1, p_rear[0], p_rear[1], 1, 0, -p_side[1], p_side[0], 0, 1, p_side[0], p_side[1]; b << robot_P_rear_target_[0], robot_P_rear_target_[1], robot_P_side_target_[0], robot_P_side_target_[1]; Eigen::Vector4f x = A.colPivHouseholderQr().solve(b); // Populate pose with angle averaging float pose_x = x[0]; float pose_y = x[1]; float pose_a = atan2(x[2], x[3]); return {pose_x, pose_y, pose_a}; } ================================================ FILE: src/robot/src/camera_tracker/CameraTracker.h ================================================ #ifndef CameraTracker_h #define CameraTracker_h #include "CameraTrackerBase.h" #include "utils.h" #include #include "CameraPipeline.h" class CameraTracker : public CameraTrackerBase { public: CameraTracker(bool start_thread = false); virtual ~CameraTracker(); virtual void start() override; virtual void stop() override; virtual void update() override; virtual bool running() override { return running_; }; virtual void toggleDebugImageOutput() override; virtual CameraTrackerOutput getPoseFromCamera() override; virtual CameraDebug getCameraDebug() override; // Below here exposed for testing only (yes, bad practice, but useful for now) Point computeRobotPoseFromImagePoints(Eigen::Vector2f p_side, Eigen::Vector2f p_rear); void oneLoop(); private: TimeRunningAverage camera_loop_time_averager_; CameraDebug debug_; CameraPipeline rear_cam_; CameraPipeline side_cam_; CameraTrackerOutput output_; Eigen::Vector2f robot_P_side_target_; Eigen::Vector2f robot_P_rear_target_; CameraPipelineOutput last_rear_cam_output_; CameraPipelineOutput last_side_cam_output_; LatchedBool side_cam_ok_filter_; LatchedBool rear_cam_ok_filter_; LatchedBool both_cams_ok_filter_; bool running_; }; #endif //CameraTracker_h ================================================ FILE: src/robot/src/camera_tracker/CameraTrackerBase.h ================================================ #ifndef CameraTrackerBase_h #define CameraTrackerBase_h #include "utils.h" struct CameraTrackerOutput { Point pose; bool ok; ClockTimePoint timestamp; bool raw_detection; }; class CameraTrackerBase { public: virtual void start() = 0; virtual void stop() = 0; virtual void update() = 0; virtual bool running() = 0; virtual void toggleDebugImageOutput() = 0; virtual CameraTrackerOutput getPoseFromCamera() = 0; virtual CameraDebug getCameraDebug() = 0; }; #endif //CameraTrackerBase_h ================================================ FILE: src/robot/src/camera_tracker/CameraTrackerFactory.cpp ================================================ #include "CameraTrackerFactory.h" #include "CameraTracker.h" #include "CameraTrackerMock.h" #include CameraTrackerFactory* CameraTrackerFactory::instance = NULL; CameraTrackerFactory* CameraTrackerFactory::getFactoryInstance() { if(!instance) { instance = new CameraTrackerFactory; } return instance; } void CameraTrackerFactory::set_mode(CAMERA_TRACKER_FACTORY_MODE mode) { mode_ = mode; } CameraTrackerBase* CameraTrackerFactory::get_camera_tracker() { if (!camera_tracker_) { build_camera_tracker(); } return camera_tracker_.get(); } void CameraTrackerFactory::build_camera_tracker() { if(mode_ == CAMERA_TRACKER_FACTORY_MODE::STANDARD) { camera_tracker_ = std::make_unique(); PLOGI << "Built CameraTracker"; } else if (mode_ == CAMERA_TRACKER_FACTORY_MODE::MOCK) { camera_tracker_ = std::make_unique(); PLOGI << "Built CameraTrackerMock"; } } // Private constructor CameraTrackerFactory::CameraTrackerFactory() : mode_(CAMERA_TRACKER_FACTORY_MODE::STANDARD), camera_tracker_() {} ================================================ FILE: src/robot/src/camera_tracker/CameraTrackerFactory.h ================================================ #ifndef CameraTrackerFactory_h #define CameraTrackerFactory_h #include #include #include "CameraTrackerBase.h" enum class CAMERA_TRACKER_FACTORY_MODE { STANDARD, MOCK, }; class CameraTrackerFactory { public: static CameraTrackerFactory* getFactoryInstance(); void set_mode(CAMERA_TRACKER_FACTORY_MODE mode); CameraTrackerBase* get_camera_tracker(); // Delete copy and assignment constructors CameraTrackerFactory(CameraTrackerFactory const&) = delete; CameraTrackerFactory& operator= (CameraTrackerFactory const&) = delete; private: // Make standard constructor private so it can't be created CameraTrackerFactory(); void build_camera_tracker(); static CameraTrackerFactory* instance; CAMERA_TRACKER_FACTORY_MODE mode_; std::unique_ptr camera_tracker_; }; #endif //CameraTrackerFactory_h ================================================ FILE: src/robot/src/camera_tracker/CameraTrackerMock.h ================================================ #ifndef CameraTrackerMock_h #define CameraTrackerMock_h #include "utils.h" class CameraTrackerMock : public CameraTrackerBase { public: virtual void start() override {}; virtual void stop() override {}; virtual void update() override {}; virtual bool running() override {return true;}; virtual void toggleDebugImageOutput() override {}; virtual CameraTrackerOutput getPoseFromCamera() override { return {{0,0,0},false, ClockFactory::getFactoryInstance()->get_clock()->now(),false};}; virtual CameraDebug getCameraDebug() override {return CameraDebug(); }; }; #endif //CameraTrackerMock_h ================================================ FILE: src/robot/src/constants.cfg ================================================ name = "Runtime constants"; log_level = "info"; // verbose, debug, info, warning, error, fatal, none motion = { limit_max_fraction = 0.8; // Only generate a trajectory to this fraction of max speed to give motors headroom to compensate controller_frequency = 40 ; // Hz for RobotController log_frequency = 20 ; // HZ for logging to motion log fake_perfect_motion = false; // Enable or disable bypassing clearcore to fake perfect motion for testing rate_always_ready = false; // Bypasses rate limiter if set to true translation = { max_vel = { vision = 0.08; // m/s fine = 0.1; // m/s coarse = 0.7; // m/s }; max_acc = { vision = 0.1; // m/s^2 fine = 0.15; // m/s^2 coarse = 0.2; // m/s^2 }; max_jerk = { vision = 0.1; // m/s^3 fine = 0.2; // m/s^3 coarse = 0.5; // m/s^3 }; position_threshold = { vision = 0.005; // m fine = 0.04; // m coarse = 0.10; // m }; velocity_threshold = { vision = 0.01; // m/s fine = 0.03; // m/s coarse = 0.05; // m/s }; gains = { kp = 2.0; ki = 0.1; kd = 0.0; }; gains_vision = { kp = 1.0; ki = 0.0; kd = 0.9; }; }; rotation = { max_vel = { vision = 0.1; // rad/s fine = 0.15; // rad/s coarse = 0.4; // rad/s }; max_acc = { vision = 0.2; // rad/s^2 fine = 0.3; // rad/s^2 coarse = 0.5; // rad/s^2 }; max_jerk = { vision = 0.5; // rad/s^3 fine = 0.7; // rad/s^3 coarse = 1.0; // rad/s^3 }; position_threshold = { vision = 0.002; // rad fine = 0.02; // rad coarse = 0.08; // rad }; velocity_threshold = { vision = 0.02; // rad/s fine = 0.03; // rad/s coarse = 0.05; // rad/s }; gains = { kp = 3.0; ki = 0.1; kd = 0.0; }; gains_vision = { kp = 1.0; ki = 0.0; kd = 0.9; }; }; }; physical = { wheel_diameter = 0.152; // m wheel_dist_from_center = 0.4794; // m }; trajectory_generation = { solver_max_loops = 30; // Only let the solver loop this many times before giving up solver_alpha_decay = 0.8; // Decay for velocity limit solver_beta_decay = 0.8; // Decay for acceleration limit solver_exponent_decay = 0.1; // Decay expoenent to apply each loop min_dist_limit = 0.0001; // Smallest value solver will attempt to solve for }; tray = { default_pos_revs = 33.0; // Default position for driving in revs from home load_pos_revs = 33.0; // Loading position in revs from home place_pos_revs = 67.0; // Placing position in revs from home steps_per_rev = 800; // Number of steps per motor rev controller_frequency = 20 ; // Hz for controller rate fake_tray_motions = false; // Flag to fake tray motions for testing }; localization = { mm_x_offset = -60.0; // X offset of mm pair center from center of rotation (in millimeters, + is in front, - is behind) mm_y_offset = 0.0; // Y offset of mm pair center from center of rotation (in millimeters, + is to left, - is to right) max_wait_time = 10.0; // Max time to wait (s) when waiting for good localization confidence_for_wait = 0.97; // Confidence level required for waiting for localization kf_predict_trans_cov = 0.00005; // How much noise is expected in the prediciton step (each timestep) for position, lower is less noise kf_predict_angle_cov = 0.0001; // How much noise is expected in the prediction step for angle, lower is less noise kf_meas_trans_cov = 0.01; // How much noise is expected in the marvelmind measurements of position, lower is less noise kf_meas_angle_cov = 0.01; // How much noise is expected in the marvelmind measurements of angle, lower is less noise kf_uncertainty_scale = 1.0; // Scale factor for how much extra noise to add based on position confidence variance_ref_trans = 1.0; // Max position varaince for 0 confidence variance_ref_angle = 1.57; // Max angle variance for 0 confidence min_vel_uncertainty = 0.5; // Minimum uncertianty for any nonzero velocity vel_uncertainty_slope = 0.5; // Uncertianty/velocity scale max_vel_uncetainty = 1.0; // Uncertainty cap vel_uncertainty_decay_time = 4.0; // Number of seconds after completeing motion until uncertainty goes to 0 }; // USB ports on pi mapping to v4l path num // USB2 - 3 | USB3 - 1 | Eth // USB2 - 4 | USB3 - 2 | vision_tracker = { side = { camera_path = "/dev/v4l/by-path/platform-fd500000.pcie-pci-0000:01:00.0-usb-0:1.1.1:1.0-video-index0" calibration_file = "/home/pi/IR_calibration_2.yml"; // Calibration data debug_output_path = "/home/pi/images/debug/side/"; // Path to output debug images debug_image = "/home/pi/images/dev/side_raw_2.jpg"; // Debug image path resolution_scale_x = 0.25; // Scale factor from calibration resolution to onboard resolution resolution_scale_y = 0.333333; // Scale factor from calibration resolution to onboard resolution } rear = { camera_path = "/dev/v4l/by-path/platform-fd500000.pcie-pci-0000:01:00.0-usb-0:1.1.2:1.0-video-index0" calibration_file = "/home/pi/IR_calibration_1.yml"; // Calibration data debug_output_path = "/home/pi/images/debug/rear/"; // Path to output debug images debug_image = "/home/pi/DominoRobot/src/robot/test/testdata/images/20210701210351_rear_img_raw.jpg"; // Debug image path resolution_scale_x = 0.25; // Scale factor from calibration resolution to onboard resolution resolution_scale_y = 0.333333; // Scale factor from calibration resolution to onboard resolution } debug = { use_debug_image = false; // Use debug image instead of loading camera save_camera_debug = false; } detection = { threshold = 235; // Threshold value for image processing blob = { use_area = true; // Use area for blob detection min_area = 80.0; // Min area for blob detection max_area = 500.0; // Max area for blob detection use_circularity = true; // Use circularity for blob detection min_circularity = 0.6; // Min circularity for blob detection max_circularity = 1.0; // Max circularity for blob detection } } physical = { pixels_per_meter_u = 1362.0; // Hack to try and get something working pixels_per_meter_v = 1411.0; side = { x_offset = 0.0; // x offset (meters) from fake robot frame origin y_offset = 0.93; // y offset (meters) from fake robot frame origin z_offset = 0.485; // z offset (meters) from fake robot frame origin target_x = 0.0; // target x coord (meters) in fake robot frame target_y = 0.93; // target y coord (meters) in fake robot frame } rear = { x_offset = -0.68; // x offset (meters) fromfake robot frame origin y_offset = 0.0; // y offset (meters) from fake robot frame origin z_offset = 0.49; // z offset (meters) from fake robot frame origin target_x = -0.68; // target x coord (meters) in fake robot frame target_y = 0.0; // target y coord (meters) in fake robot frame } } kf = { predict_trans_cov = 0.0001; // How much noise is expected in the prediciton step (each timestep) for position, lower is less noise predict_angle_cov = 0.001; // How much noise is expected in the prediciton step (each timestep) for angle, lower is less noise meas_trans_cov = 0.001; // How much noise is expected in the update step for position, lower is less noise meas_angle_cov = 0.001; // How much noise is expected in the update step for angle, lower is less noise } }; mock_socket = { enabled = false; // Commands must have <> symbols and a number is to pause for that many ms data = ["2000","<{'type':'move_vision','data':{'x':0,'y':0,'a':0}}>"]; }; ================================================ FILE: src/robot/src/constants.h ================================================ #ifndef Constants_h #define Constants_h #include extern libconfig::Config cfg; #define CONSTANTS_FILE "/home/pi/DominoRobot/src/robot/src/constants.cfg" #define TEST_CONSTANTS_FILE "/home/pi/DominoRobot/src/robot/test/test_constants.cfg" // USB devices #define CLEARCORE_USB "/dev/clearcore" #define MARVELMIND_USB_0 "/dev/marvelmind0" //Marvelminds could show up at any of these three links #define MARVELMIND_USB_1 "/dev/marvelmind1" #define MARVELMIND_USB_2 "/dev/marvelmind2" // Expected marvelmind device connections #define MARVELMIND_DEVICE_ID0 5 #define MARVELMIND_DEVICE_ID1 6 // Log file ID for motion specific stuff #define MOTION_LOG_ID 2 #define LOCALIZATION_LOG_ID 3 #define MOTION_CSV_LOG_ID 4 // Commands use to communicate about behavior specified from master enum class COMMAND { NONE, MOVE, MOVE_REL, MOVE_FINE, MOVE_CONST_VEL, MOVE_WITH_VISION, PLACE_TRAY, LOAD_TRAY, INITIALIZE_TRAY, POSITION, ESTOP, LOAD_COMPLETE, CLEAR_ERROR, WAIT_FOR_LOCALIZATION, SET_POSE, TOGGLE_VISION_DEBUG, START_CAMERAS, STOP_CAMERAS, MOVE_REL_SLOW, MOVE_FINE_STOP_VISION, }; #endif ================================================ FILE: src/robot/src/main.cpp ================================================ #include #include #include #include #include #include #include #include #include "robot.h" #include "constants.h" #include "sockets/SocketMultiThreadWrapperFactory.h" #include "camera_tracker/CameraTrackerFactory.h" libconfig::Config cfg = libconfig::Config(); void configure_logger() { // Get current date/time const std::time_t datetime = std::chrono::system_clock::to_time_t(std::chrono::system_clock::now()); char datetime_str[50]; std::strftime(datetime_str, sizeof(datetime_str), "%Y%m%d_%H%M%S", std::localtime(&datetime)); // Make file names std::string robot_log_file_name = std::string("log/robot_log_") + std::string(datetime_str) + std::string(".txt"); std::string motion_log_file_name = std::string("log/motion_log_") + std::string(datetime_str) + std::string(".txt"); std::string localization_log_file_name = std::string("log/localization_log_") + std::string(datetime_str) + std::string(".txt"); // Initialize robot logs to to go file and console std::string log_level = cfg.lookup("log_level"); plog::Severity severity = plog::severityFromString(log_level.c_str()); static plog::RollingFileAppender fileAppender(robot_log_file_name.c_str(), 1000000, 5); static plog::ColorConsoleAppender consoleAppender; plog::init(severity, &fileAppender).addAppender(&consoleAppender); // Initialize motion logs to go to file static plog::RollingFileAppender motionFileAppender(motion_log_file_name.c_str(), 1000000, 5); plog::init(plog::debug, &motionFileAppender); // Initialize localization logs to go to file static plog::RollingFileAppender localizationFileAppender(localization_log_file_name.c_str(), 1000000, 5); plog::init(plog::debug, &localizationFileAppender); PLOGI << "Logger ready"; } void setup_mock_socket() { bool enabled = cfg.lookup("mock_socket.enabled"); if(enabled) { auto factory = SocketMultiThreadWrapperFactory::getFactoryInstance(); factory->set_mode(SOCKET_FACTORY_MODE::MOCK); factory->build_socket(); for (const auto& s : cfg.lookup("mock_socket.data")) { factory->add_mock_data(s); } } } void capture_images() { // Modify config values to ensure they are set to log debug images libconfig::Setting& save_debug_config = cfg.lookup("vision_tracker.debug.save_camera_debug"); save_debug_config = true; // Start camera CameraTrackerBase* camera_tracker_ = CameraTrackerFactory::getFactoryInstance()->get_camera_tracker(); camera_tracker_->start(); // Wait for a few seconds for images to save Timer t; while(t.dt_s() < 2.0) {} camera_tracker_->stop(); PLOGI << "Camera image capture complete...Exiting"; } int main(int argc, char** argv) { try { cfg.readFile(CONSTANTS_FILE); std::string name = cfg.lookup("name"); configure_logger(); PLOGI << "Loaded constants file: " << name; if(argc > 1 && std::string(argv[1]) == "-c") { capture_images(); } else { setup_mock_socket(); Robot r; r.run(); //Should loop forver until stopped } } catch (const libconfig::SettingNotFoundException &e) { std::cerr << "Configuration error with " << e.getPath() << std::endl; return(EXIT_FAILURE); } return(EXIT_SUCCESS); } ================================================ FILE: src/robot/src/robot.cpp ================================================ #include "robot.h" #include #include "utils.h" #include "camera_tracker/CameraTrackerFactory.h" WaitForLocalizeHelper::WaitForLocalizeHelper(const StatusUpdater& statusUpdater, float max_timeout, float confidence_threshold) : statusUpdater_(statusUpdater), timer_(), max_timeout_(max_timeout), confidence_threshold_(confidence_threshold) {} bool WaitForLocalizeHelper::isDone() { if(timer_.dt_s() > max_timeout_) { PLOGW.printf("Exiting wait for localize due to time"); return true; } float confidence = statusUpdater_.getLocalizationConfidence(); if(confidence > confidence_threshold_) { PLOGI.printf("Exiting wait for localize with confidence: %4.2f", confidence); return true; } return false; } void WaitForLocalizeHelper::start() { timer_.reset(); } Robot::Robot() : statusUpdater_(), server_(statusUpdater_), controller_(statusUpdater_), tray_controller_(), mm_wrapper_(), position_time_averager_(10), robot_loop_time_averager_(20), wait_for_localize_helper_(statusUpdater_, cfg.lookup("localization.max_wait_time"), cfg.lookup("localization.confidence_for_wait")), vision_print_rate_(10), camera_tracker_(CameraTrackerFactory::getFactoryInstance()->get_camera_tracker()), camera_motion_start_time_(ClockTimePoint::min()), camera_trigger_time_1_(ClockTimePoint::min()), camera_trigger_time_2_(ClockTimePoint::min()), camera_stop_triggered_(false), curCmd_(COMMAND::NONE) { PLOGI.printf("Robot starting"); } void Robot::run() { while(true) { runOnce(); } } void Robot::runOnce() { // Check for new command and try to start it COMMAND newCmd = server_.oneLoop(); bool status = tryStartNewCmd(newCmd); // Update our current command if we successfully started a new command if(status) { curCmd_ = newCmd; statusUpdater_.updateInProgress(true); } // Service marvelmind std::vector positions = mm_wrapper_.getPositions(); if (positions.size() == 3) { position_time_averager_.mark_point(); float angle_rad = wrap_angle(positions[2] * M_PI / 180.0); controller_.inputPosition(positions[0], positions[1], angle_rad); } // Service various modules controller_.update(); tray_controller_.update(); camera_tracker_->update(); // Verify if MOVE_FINE_STOP_VISION needs to trigger stop if(checkForCameraStopTrigger()) { controller_.stopFast(); camera_stop_triggered_ = true; } // Check if the current command has finished bool done = checkForCmdComplete(curCmd_); if(done) { curCmd_ = COMMAND::NONE; statusUpdater_.updateInProgress(false); } // Update loop time and status updater statusUpdater_.updatePositionLoopTime(position_time_averager_.get_ms()); CameraDebug camera_debug = camera_tracker_->getCameraDebug(); statusUpdater_.updateCameraDebug(camera_debug); robot_loop_time_averager_.mark_point(); } bool Robot::tryStartNewCmd(COMMAND cmd) { // Position info doesn't count as a real 'command' since it doesn't interrupt anything // Always service it, but don't consider it starting a new command if (cmd == COMMAND::POSITION) { RobotServer::PositionData data = server_.getPositionData(); controller_.inputPosition(data.x, data.y, data.a); // Update the position rate position_time_averager_.mark_point(); return false; } if (cmd == COMMAND::SET_POSE) { RobotServer::PositionData data = server_.getPositionData(); controller_.forceSetPosition(data.x, data.y, data.a); return false; } if (cmd == COMMAND::TOGGLE_VISION_DEBUG) { camera_tracker_->toggleDebugImageOutput(); return false; } if (cmd == COMMAND::START_CAMERAS) { camera_tracker_->start(); return false; } if (cmd == COMMAND::STOP_CAMERAS) { camera_tracker_->stop(); return false; } // Same with ESTOP if (cmd == COMMAND::ESTOP) { controller_.estop(); tray_controller_.estop(); return false; } // Same with LOAD_COMPLETE if (cmd == COMMAND::LOAD_COMPLETE) { tray_controller_.setLoadComplete(); return false; } // Just do nothing for NONE if (cmd == COMMAND::NONE) { return false;} // For all other commands, we need to make sure we aren't doing anything else at the moment if(statusUpdater_.getInProgress()) { PLOGW << "Command " << static_cast(curCmd_) << " already running, rejecting new command: " << static_cast(cmd); return false; } else if (statusUpdater_.getErrorStatus()) { return false; } // Start new command if(cmd == COMMAND::MOVE) { RobotServer::PositionData data = server_.getMoveData(); controller_.moveToPosition(data.x, data.y, data.a); } else if(cmd == COMMAND::MOVE_REL) { RobotServer::PositionData data = server_.getMoveData(); controller_.moveToPositionRelative(data.x, data.y, data.a); } else if(cmd == COMMAND::MOVE_REL_SLOW) { RobotServer::PositionData data = server_.getMoveData(); controller_.moveToPositionRelativeSlow(data.x, data.y, data.a); } else if(cmd == COMMAND::MOVE_FINE) { RobotServer::PositionData data = server_.getMoveData(); controller_.moveToPositionFine(data.x, data.y, data.a); } else if(cmd == COMMAND::MOVE_FINE_STOP_VISION) { if(!camera_tracker_->running()) { PLOGW << "Cannot start MOVE_FINE_STOP_VISION if camera tracker isn't running"; return false; } RobotServer::PositionData data = server_.getMoveData(); controller_.moveToPositionFine(data.x, data.y, data.a); resetCameraStopTriggers(); camera_motion_start_time_ = ClockFactory::getFactoryInstance()->get_clock()->now(); fine_move_target_ = {data.x, data.y, data.a}; } else if(cmd == COMMAND::MOVE_CONST_VEL) { RobotServer::VelocityData data = server_.getVelocityData(); controller_.moveConstVel(data.vx, data.vy, data.va, data.t); } else if (cmd == COMMAND::MOVE_WITH_VISION) { RobotServer::PositionData data = server_.getMoveData(); controller_.moveWithVision(data.x, data.y, data.a); } else if(cmd == COMMAND::PLACE_TRAY) { bool ok = tray_controller_.place(); if(!ok) statusUpdater_.setErrorStatus(); return ok; } else if(cmd == COMMAND::LOAD_TRAY) { bool ok = tray_controller_.load(); if(!ok) statusUpdater_.setErrorStatus(); return ok; } else if(cmd == COMMAND::INITIALIZE_TRAY) { tray_controller_.initialize(); } else if (cmd == COMMAND::WAIT_FOR_LOCALIZATION) { wait_for_localize_helper_.start(); } else { PLOGW.printf("Unknown command!"); return false; } return true; } bool Robot::checkForCmdComplete(COMMAND cmd) { if (cmd == COMMAND::NONE) { return true; } else if(cmd == COMMAND::MOVE || cmd == COMMAND::MOVE_REL || cmd == COMMAND::MOVE_FINE || cmd == COMMAND::MOVE_CONST_VEL || cmd == COMMAND::MOVE_WITH_VISION || cmd == COMMAND::MOVE_REL_SLOW || cmd == COMMAND::MOVE_FINE_STOP_VISION) { return !controller_.isTrajectoryRunning(); } else if(cmd == COMMAND::PLACE_TRAY || cmd == COMMAND::LOAD_TRAY || cmd == COMMAND::INITIALIZE_TRAY) { return !tray_controller_.isActionRunning(); } else if (cmd == COMMAND::WAIT_FOR_LOCALIZATION) { return wait_for_localize_helper_.isDone(); } else { PLOGE.printf("Completion check not implimented for command: %i",cmd); return true; } } bool Robot::checkForCameraStopTrigger() { if(curCmd_ != COMMAND::MOVE_FINE_STOP_VISION) return false; if(camera_stop_triggered_) return false; CameraTrackerOutput camera_output = camera_tracker_->getPoseFromCamera(); if(camera_output.ok) { bool camera_trigger_1_ = camera_trigger_time_1_ > camera_motion_start_time_; bool camera_trigger_2 = camera_trigger_time_2_ > camera_motion_start_time_; bool timestamp_after_1 = camera_output.timestamp > camera_trigger_time_1_; bool timestamp_after_2 = camera_output.timestamp > camera_trigger_time_2_; bool timestamp_difference = std::chrono::duration_cast(camera_output.timestamp - camera_trigger_time_1_).count() > 0; Point current_pos = controller_.getCurrentPosition(); Eigen::Vector2f dp = {fine_move_target_.x - current_pos.x, fine_move_target_.y - current_pos.y}; bool pos_in_tolerance = dp.norm() < 0.5; if(camera_trigger_1_ && camera_trigger_2 && timestamp_after_1 && timestamp_after_2 && pos_in_tolerance) { return true; } else if(camera_trigger_1_ && timestamp_after_1 && timestamp_difference) { camera_trigger_time_2_ = camera_output.timestamp; return true; } else { camera_trigger_time_1_ = camera_output.timestamp; return false; } } else { resetCameraStopTriggers(); } return false; } void Robot::resetCameraStopTriggers() { camera_stop_triggered_ = false; camera_trigger_time_1_ = ClockTimePoint::min(); camera_trigger_time_2_ = ClockTimePoint::min(); } ================================================ FILE: src/robot/src/robot.h ================================================ #ifndef Robot_h #define Robot_h #include "MarvelmindWrapper.h" #include "RobotController.h" #include "RobotServer.h" #include "StatusUpdater.h" #include "TrayController.h" #include "utils.h" #include "camera_tracker/CameraTracker.h" class WaitForLocalizeHelper { public: WaitForLocalizeHelper(const StatusUpdater& statusUpdater, float max_timeout, float confidence_threshold); bool isDone(); void start(); private: const StatusUpdater& statusUpdater_; Timer timer_; float max_timeout_; float confidence_threshold_; }; class Robot { public: Robot(); void run(); void runOnce(); // Used for tests only COMMAND getCurrentCommand() { return curCmd_; }; StatusUpdater::Status getStatus() { return statusUpdater_.getStatus(); }; private: bool checkForCmdComplete(COMMAND cmd); bool tryStartNewCmd(COMMAND cmd); bool checkForCameraStopTrigger(); void resetCameraStopTriggers(); StatusUpdater statusUpdater_; RobotServer server_; RobotController controller_; TrayController tray_controller_; MarvelmindWrapper mm_wrapper_; TimeRunningAverage position_time_averager_; // Handles keeping average of the position update timing TimeRunningAverage robot_loop_time_averager_; WaitForLocalizeHelper wait_for_localize_helper_; RateController vision_print_rate_; CameraTrackerBase* camera_tracker_; ClockTimePoint camera_motion_start_time_; ClockTimePoint camera_trigger_time_1_; ClockTimePoint camera_trigger_time_2_; bool camera_stop_triggered_; Point fine_move_target_; COMMAND curCmd_; }; #endif ================================================ FILE: src/robot/src/robot_controller_modes/RobotControllerModeBase.cpp ================================================ #include "RobotControllerModeBase.h" RobotControllerModeBase::RobotControllerModeBase(bool fake_perfect_motion) : move_start_timer_(), move_running_(false), fake_perfect_motion_(fake_perfect_motion) { } void RobotControllerModeBase::startMove() { move_running_ = true; move_start_timer_.reset(); loop_timer_.reset(); } ================================================ FILE: src/robot/src/robot_controller_modes/RobotControllerModeBase.h ================================================ #ifndef RobotControllerModeBase_h #define RobotControllerModeBase_h #include "SmoothTrajectoryGenerator.h" #include "utils.h" class RobotControllerModeBase { public: RobotControllerModeBase(bool fake_perfect_motion); virtual Velocity computeTargetVelocity(Point current_position, Velocity current_velocity, bool log_this_cycle) = 0; virtual bool checkForMoveComplete(Point current_position, Velocity current_velocity) = 0; protected: struct TrajectoryTolerances { float trans_pos_err; float ang_pos_err; float trans_vel_err; float ang_vel_err; }; void startMove(); Timer move_start_timer_; Timer loop_timer_; bool move_running_; bool fake_perfect_motion_; }; #endif //RobotControllerModeBase_h ================================================ FILE: src/robot/src/robot_controller_modes/RobotControllerModePosition.cpp ================================================ #include "RobotControllerModePosition.h" #include "constants.h" #include RobotControllerModePosition::RobotControllerModePosition(bool fake_perfect_motion) : RobotControllerModeBase(fake_perfect_motion), traj_gen_(), limits_mode_(LIMITS_MODE::FINE), goal_pos_(0,0,0), current_target_() { coarse_tolerances_.trans_pos_err = cfg.lookup("motion.translation.position_threshold.coarse"); coarse_tolerances_.ang_pos_err = cfg.lookup("motion.rotation.position_threshold.coarse"); coarse_tolerances_.trans_vel_err = cfg.lookup("motion.translation.velocity_threshold.coarse"); coarse_tolerances_.ang_vel_err = cfg.lookup("motion.rotation.velocity_threshold.coarse"); fine_tolerances_.trans_pos_err = cfg.lookup("motion.translation.position_threshold.fine"); fine_tolerances_.ang_pos_err = cfg.lookup("motion.rotation.position_threshold.fine"); fine_tolerances_.trans_vel_err = cfg.lookup("motion.translation.velocity_threshold.fine"); fine_tolerances_.ang_vel_err = cfg.lookup("motion.rotation.velocity_threshold.fine"); PositionController::Gains position_gains; position_gains.kp = cfg.lookup("motion.translation.gains.kp"); position_gains.ki = cfg.lookup("motion.translation.gains.ki"); position_gains.kd = cfg.lookup("motion.translation.gains.kd"); x_controller_ = PositionController(position_gains); y_controller_ = PositionController(position_gains); PositionController::Gains angle_gains; angle_gains.kp = cfg.lookup("motion.rotation.gains.kp"); angle_gains.ki = cfg.lookup("motion.rotation.gains.ki"); angle_gains.kd = cfg.lookup("motion.rotation.gains.kd"); a_controller_ = PositionController(angle_gains); } bool RobotControllerModePosition::startMove(Point current_position, Point target_position, LIMITS_MODE limits_mode) { limits_mode_ = limits_mode; goal_pos_ = target_position; bool ok = traj_gen_.generatePointToPointTrajectory(current_position, target_position, limits_mode); if(ok) RobotControllerModeBase::startMove(); return ok; } Velocity RobotControllerModePosition::computeTargetVelocity(Point current_position, Velocity current_velocity, bool log_this_cycle) { float dt_from_traj_start = move_start_timer_.dt_s(); current_target_ = traj_gen_.lookup(dt_from_traj_start); // Print motion estimates to log PLOGD_IF_(MOTION_LOG_ID, log_this_cycle) << "Target: " << current_target_.toString(); PLOGD_IF_(MOTION_LOG_ID, log_this_cycle) << "Est Vel: " << current_velocity.toString(); PLOGD_IF_(MOTION_LOG_ID, log_this_cycle) << "Est Pos: " << current_position.toString(); Velocity output; if(fake_perfect_motion_) { output = current_target_.velocity; } else { float dt_since_last_loop = loop_timer_.dt_s(); loop_timer_.reset(); output.vx = x_controller_.compute(current_target_.position.x, current_position.x, current_target_.velocity.vx, current_velocity.vx, dt_since_last_loop); output.vy = y_controller_.compute(current_target_.position.y, current_position.y, current_target_.velocity.vy, current_velocity.vy, dt_since_last_loop); output.va = a_controller_.compute(current_target_.position.a, current_position.a, current_target_.velocity.va, current_velocity.va, dt_since_last_loop); } PLOGI_(MOTION_CSV_LOG_ID).printf("time,%.4f",dt_from_traj_start); PLOGI_(MOTION_CSV_LOG_ID).printf("pos,%.4f,%.4f,%.4f",current_position.x,current_position.y,current_position.a); PLOGI_(MOTION_CSV_LOG_ID).printf("target_pos,%.4f,%.4f,%.4f",current_target_.position.x,current_target_.position.y,current_target_.position.a); PLOGI_(MOTION_CSV_LOG_ID).printf("vel,%.4f,%.4f,%.4f",current_velocity.vx,current_velocity.vy,current_velocity.va); PLOGI_(MOTION_CSV_LOG_ID).printf("target_vel,%.4f,%.4f,%.4f",current_target_.velocity.vx,current_target_.velocity.vy,current_target_.velocity.va); PLOGI_(MOTION_CSV_LOG_ID).printf("control_vel,%.4f,%.4f,%.4f",output.vx,output.vy,output.va); return output; } bool RobotControllerModePosition::checkForMoveComplete(Point current_position, Velocity current_velocity) { // Get the right threshold values TrajectoryTolerances tol = limits_mode_ == LIMITS_MODE::FINE ? fine_tolerances_ : coarse_tolerances_; // Verify our target velocity is zero (i.e. we reached the end of the trajectory) bool zero_cmd_vel = current_target_.velocity.nearZero(); // Verify actual translational and rotational positions are within tolerance Eigen::Vector2f dp = {goal_pos_.x - current_position.x, goal_pos_.y - current_position.y}; bool pos_in_tolerance = dp.norm() < tol.trans_pos_err && fabs(angle_diff(goal_pos_.a, current_position.a)) < tol.ang_pos_err; // Verify actual translational and rotational velocities are within tolerance Eigen::Vector2f dv = {current_velocity.vx, current_velocity.vy}; bool vel_in_tolerance = dv.norm() < tol.trans_vel_err && fabs(current_velocity.va) < tol.ang_vel_err; // Trajectory is done when we aren't commanding any velocity, our both our actual velocity and // position are within the correct tolerance. bool traj_complete = zero_cmd_vel && vel_in_tolerance && pos_in_tolerance; if(traj_complete) { move_running_ = false; } return traj_complete; } ================================================ FILE: src/robot/src/robot_controller_modes/RobotControllerModePosition.h ================================================ #ifndef RobotControllerModePosition_h #define RobotControllerModePosition_h #include "RobotControllerModeBase.h" #include "SmoothTrajectoryGenerator.h" #include "utils.h" class RobotControllerModePosition : public RobotControllerModeBase { public: RobotControllerModePosition(bool fake_perfect_motion); bool startMove(Point current_position, Point target_position, LIMITS_MODE limits_mode); virtual Velocity computeTargetVelocity(Point current_position, Velocity current_velocity, bool log_this_cycle) override; virtual bool checkForMoveComplete(Point current_position, Velocity current_velocity) override; protected: SmoothTrajectoryGenerator traj_gen_; LIMITS_MODE limits_mode_; Point goal_pos_; PVTPoint current_target_; TrajectoryTolerances coarse_tolerances_; TrajectoryTolerances fine_tolerances_; PositionController x_controller_; PositionController y_controller_; PositionController a_controller_; }; #endif //RobotControllerModePosition_h ================================================ FILE: src/robot/src/robot_controller_modes/RobotControllerModeStopFast.cpp ================================================ #include "RobotControllerModeStopFast.h" #include "constants.h" #include RobotControllerModeStopFast::RobotControllerModeStopFast(bool fake_perfect_motion) : RobotControllerModeBase(fake_perfect_motion), current_target_() { fine_tolerances_.trans_pos_err = cfg.lookup("motion.translation.position_threshold.fine"); fine_tolerances_.ang_pos_err = cfg.lookup("motion.rotation.position_threshold.fine"); fine_tolerances_.trans_vel_err = cfg.lookup("motion.translation.velocity_threshold.fine"); fine_tolerances_.ang_vel_err = cfg.lookup("motion.rotation.velocity_threshold.fine"); max_decel_ = {cfg.lookup("motion.translation.max_acc.coarse"), cfg.lookup("motion.translation.max_acc.coarse"), cfg.lookup("motion.rotation.max_acc.coarse")}; PositionController::Gains position_gains; position_gains.kp = cfg.lookup("motion.translation.gains.kp"); position_gains.ki = cfg.lookup("motion.translation.gains.ki"); position_gains.kd = cfg.lookup("motion.translation.gains.kd"); x_controller_ = PositionController(position_gains); y_controller_ = PositionController(position_gains); PositionController::Gains angle_gains; angle_gains.kp = cfg.lookup("motion.rotation.gains.kp"); angle_gains.ki = cfg.lookup("motion.rotation.gains.ki"); angle_gains.kd = cfg.lookup("motion.rotation.gains.kd"); a_controller_ = PositionController(angle_gains); } void RobotControllerModeStopFast::startMove(Point current_position, Velocity current_velocity) { (void) current_position; initial_vel_sign_ = { sgn(current_velocity.vx), sgn(current_velocity.vy), sgn(current_velocity.va), }; current_decel_ = { -initial_vel_sign_[0] * max_decel_[0], -initial_vel_sign_[1] * max_decel_[1], -initial_vel_sign_[2] * max_decel_[2], }; PLOGW.printf("Starting STOP_FAST from velocity %s at decel: %f,%f,%f",current_velocity.toString().c_str(), current_decel_[0],current_decel_[1], current_decel_[2]); RobotControllerModeBase::startMove(); } Velocity RobotControllerModeStopFast::computeTargetVelocity(Point current_position, Velocity current_velocity, bool log_this_cycle) { float dt_from_traj_start = move_start_timer_.dt_s(); float dt = loop_timer_.dt_s(); Point target_pos = { current_position.x + current_velocity.vx * dt + 0.5f * current_decel_[0] * dt * dt, current_position.y + current_velocity.vy * dt + 0.5f * current_decel_[1] * dt * dt, current_position.a + current_velocity.va * dt + 0.5f * current_decel_[2] * dt * dt, }; Velocity target_vel = { current_velocity.vx + current_decel_[0]*dt, current_velocity.vy + current_decel_[1]*dt, current_velocity.va + current_decel_[2]*dt, }; if(current_velocity.vx != 0 && sgn(current_velocity.vx) != initial_vel_sign_[0]) target_vel.vx = 0; if(current_velocity.vy != 0 && sgn(current_velocity.vy) != initial_vel_sign_[1]) target_vel.vy = 0; if(current_velocity.va != 0 && sgn(current_velocity.va) != initial_vel_sign_[2]) target_vel.va = 0; current_target_ = {target_pos, target_vel, dt_from_traj_start}; // Print motion estimates to log PLOGD_IF_(MOTION_LOG_ID, log_this_cycle) << "Target: " << current_target_.toString(); PLOGD_IF_(MOTION_LOG_ID, log_this_cycle) << "Est Vel: " << current_velocity.toString(); PLOGD_IF_(MOTION_LOG_ID, log_this_cycle) << "Est Pos: " << current_position.toString(); Velocity output; if(fake_perfect_motion_) { output = current_target_.velocity; } else { float dt_since_last_loop = loop_timer_.dt_s(); loop_timer_.reset(); output.vx = x_controller_.compute(0, 0, current_target_.velocity.vx, current_velocity.vx, dt_since_last_loop); output.vy = y_controller_.compute(0, 0, current_target_.velocity.vy, current_velocity.vy, dt_since_last_loop); output.va = a_controller_.compute(0, 0, current_target_.velocity.va, current_velocity.va, dt_since_last_loop); } PLOGI_(MOTION_CSV_LOG_ID).printf("time,%.4f",dt_from_traj_start); PLOGI_(MOTION_CSV_LOG_ID).printf("pos,%.4f,%.4f,%.4f",current_position.x,current_position.y,current_position.a); PLOGI_(MOTION_CSV_LOG_ID).printf("target_pos,%.4f,%.4f,%.4f",current_target_.position.x,current_target_.position.y,current_target_.position.a); PLOGI_(MOTION_CSV_LOG_ID).printf("vel,%.4f,%.4f,%.4f",current_velocity.vx,current_velocity.vy,current_velocity.va); PLOGI_(MOTION_CSV_LOG_ID).printf("target_vel,%.4f,%.4f,%.4f",current_target_.velocity.vx,current_target_.velocity.vy,current_target_.velocity.va); PLOGI_(MOTION_CSV_LOG_ID).printf("control_vel,%.4f,%.4f,%.4f",output.vx,output.vy,output.va); return output; } bool RobotControllerModeStopFast::checkForMoveComplete(Point current_position, Velocity current_velocity) { (void) current_position; // Verify our target velocity is zero (i.e. we reached the end of the trajectory) bool zero_cmd_vel = current_target_.velocity.nearZero(); // Verify actual translational and rotational velocities are within tolerance Eigen::Vector2f dv = {current_velocity.vx, current_velocity.vy}; bool vel_in_tolerance = dv.norm() < fine_tolerances_.trans_vel_err && fabs(current_velocity.va) < fine_tolerances_.ang_vel_err; // Trajectory is done when we aren't commanding any velocity, our both our actual velocity and // position are within the correct tolerance. bool traj_complete = zero_cmd_vel && vel_in_tolerance; if(traj_complete) { move_running_ = false; } return traj_complete; } ================================================ FILE: src/robot/src/robot_controller_modes/RobotControllerModeStopFast.h ================================================ #ifndef RobotControllerModeStopFast_h #define RobotControllerModeStopFast_h #include "RobotControllerModeBase.h" #include "SmoothTrajectoryGenerator.h" #include "utils.h" class RobotControllerModeStopFast : public RobotControllerModeBase { public: RobotControllerModeStopFast(bool fake_perfect_motion); void startMove(Point current_position, Velocity current_velocity); virtual Velocity computeTargetVelocity(Point current_position, Velocity current_velocity, bool log_this_cycle) override; virtual bool checkForMoveComplete(Point current_position, Velocity current_velocity) override; protected: PVTPoint current_target_; TrajectoryTolerances fine_tolerances_; std::vector max_decel_; std::vector current_decel_; std::vector initial_vel_sign_; PositionController x_controller_; PositionController y_controller_; PositionController a_controller_; }; #endif //RobotControllerModePosition_h ================================================ FILE: src/robot/src/robot_controller_modes/RobotControllerModeVision.cpp ================================================ #include "RobotControllerModeVision.h" #include "constants.h" #include #include "camera_tracker/CameraTrackerFactory.h" RobotControllerModeVision::RobotControllerModeVision(bool fake_perfect_motion, StatusUpdater& status_updater) : RobotControllerModeBase(fake_perfect_motion), status_updater_(status_updater), traj_gen_(), goal_point_(0,0,0), current_point_(0,0,0), current_target_(), camera_tracker_(CameraTrackerFactory::getFactoryInstance()->get_camera_tracker()), traj_done_timer_(), kf_(3,3) { tolerances_.trans_pos_err = cfg.lookup("motion.translation.position_threshold.vision"); tolerances_.ang_pos_err = cfg.lookup("motion.rotation.position_threshold.vision"); tolerances_.trans_vel_err = cfg.lookup("motion.translation.velocity_threshold.vision"); tolerances_.ang_vel_err = cfg.lookup("motion.rotation.velocity_threshold.vision"); PositionController::Gains position_gains; position_gains.kp = cfg.lookup("motion.translation.gains_vision.kp"); position_gains.ki = cfg.lookup("motion.translation.gains_vision.ki"); position_gains.kd = cfg.lookup("motion.translation.gains_vision.kd"); x_controller_ = PositionController(position_gains); y_controller_ = PositionController(position_gains); PositionController::Gains angle_gains; angle_gains.kp = cfg.lookup("motion.rotation.gains_vision.kp"); angle_gains.ki = cfg.lookup("motion.rotation.gains_vision.ki"); angle_gains.kd = cfg.lookup("motion.rotation.gains_vision.kd"); a_controller_ = PositionController(angle_gains); Eigen::MatrixXf A = Eigen::MatrixXf::Identity(3,3); Eigen::MatrixXf B = Eigen::MatrixXf::Identity(3,3); Eigen::MatrixXf C = Eigen::MatrixXf::Identity(3,3); Eigen::MatrixXf Q(3,3); Q << cfg.lookup("vision_tracker.kf.predict_trans_cov"),0,0, 0,cfg.lookup("vision_tracker.kf.predict_trans_cov"),0, 0,0,cfg.lookup("vision_tracker.kf.predict_angle_cov"); Eigen::MatrixXf R(3,3); R << cfg.lookup("vision_tracker.kf.meas_trans_cov"),0,0, 0,cfg.lookup("vision_tracker.kf.meas_trans_cov"),0, 0,0,cfg.lookup("vision_tracker.kf.meas_angle_cov"); kf_ = KalmanFilter(A,B,C,Q,R); } bool RobotControllerModeVision::startMove(Point target_point) { CameraTrackerOutput tracker_output = camera_tracker_->getPoseFromCamera(); if(!tracker_output.ok) { PLOGE << "Cannot start vision move, camera pose not ok"; return false; } current_point_ = tracker_output.pose; goal_point_ = target_point; bool ok = traj_gen_.generatePointToPointTrajectory(current_point_, target_point, LIMITS_MODE::VISION); if(ok) RobotControllerModeBase::startMove(); return ok; } Velocity RobotControllerModeVision::computeTargetVelocity(Point current_position, Velocity current_velocity, bool log_this_cycle) { // Get current target global position and global velocity according to the trajectory float dt_from_traj_start = move_start_timer_.dt_s(); current_target_ = traj_gen_.lookup(dt_from_traj_start); // Get previous loop time float dt_since_last_loop = loop_timer_.dt_s(); loop_timer_.reset(); // Convert global velocity into local robot frame and use it to predict the filter Eigen::Vector3f local_vel; local_vel[0] = cos(current_position.a) * current_velocity.vx + sin(current_position.a) * current_velocity.vy; local_vel[1] = - sin(current_position.a) * current_velocity.vx + cos(current_position.a) * current_velocity.vy; local_vel[2] = current_velocity.va; Eigen::Vector3f udt = local_vel * dt_since_last_loop; if(udt.norm() > 0) { kf_.predict(udt); } // Get latest pose from cameras and do update step if data is available CameraTrackerOutput tracker_output = camera_tracker_->getPoseFromCamera(); if(tracker_output.ok && tracker_output.timestamp > last_vision_update_time_) { // Get the current distance measurements from the sensors and update the filter Eigen::Vector3f point_vec = {tracker_output.pose.x,tracker_output.pose.y,tracker_output.pose.a}; kf_.update(point_vec); last_vision_update_time_ = tracker_output.timestamp; } // Update current point from state Eigen::VectorXf state = kf_.state(); current_point_ = {state[0], state[1], state[2]}; status_updater_.updateVisionControllerPose(current_point_); // Print motion estimates to log PLOGD_IF_(MOTION_LOG_ID, log_this_cycle) << "\nTarget: " << current_target_.toString(); PLOGD_IF_(MOTION_LOG_ID, log_this_cycle) << "Est Vel: " << current_velocity.toString(); PLOGD_IF_(MOTION_LOG_ID, log_this_cycle) << "Est camera pose: " << current_point_.toString(); // Compute control Velocity output_local; if(fake_perfect_motion_) { output_local = current_target_.velocity; } else { output_local.vx = x_controller_.compute(current_target_.position.x, current_point_.x, current_target_.velocity.vx, local_vel[0], dt_since_last_loop); output_local.vy = y_controller_.compute(current_target_.position.y, current_point_.y, current_target_.velocity.vy, local_vel[1], dt_since_last_loop); output_local.va = a_controller_.compute(current_target_.position.a, current_point_.a, current_target_.velocity.va, local_vel[2], dt_since_last_loop); } // Rotate this commanded velocity back into global frame because currently it is in the local robot frame Velocity output_global; output_global.vx = cos(current_position.a) * output_local.vx - sin(current_position.a) * output_local.vy; output_global.vy = sin(current_position.a) * output_local.vx + cos(current_position.a) * output_local.vy; output_global.va = output_local.va; PLOGI_(MOTION_CSV_LOG_ID).printf("time,%.4f",dt_from_traj_start); PLOGI_(MOTION_CSV_LOG_ID).printf("pos,%.4f,%.4f,%.4f",current_point_.x,current_point_.y,current_point_.a); PLOGI_(MOTION_CSV_LOG_ID).printf("target_pos,%.4f,%.4f,%.4f",current_target_.position.x,current_target_.position.y,current_target_.position.a); PLOGI_(MOTION_CSV_LOG_ID).printf("vel,%.4f,%.4f,%.4f",local_vel[0],local_vel[1],local_vel[2]); PLOGI_(MOTION_CSV_LOG_ID).printf("target_vel,%.4f,%.4f,%.4f",current_target_.velocity.vx,current_target_.velocity.vy,current_target_.velocity.va); PLOGI_(MOTION_CSV_LOG_ID).printf("control_vel,%.4f,%.4f,%.4f",output_local.vx,output_local.vy,output_local.va); return output_global; } bool RobotControllerModeVision::checkForMoveComplete(Point current_position, Velocity current_velocity) { (void) current_position; // Verify our target velocity is zero (i.e. we reached the end of the trajectory) bool zero_cmd_vel = current_target_.velocity.nearZero(); // Verify actual translational and rotational positions are within tolerance Eigen::Vector2f dp = {goal_point_.x - current_point_.x, goal_point_.y - current_point_.y}; bool pos_in_tolerance = dp.norm() < tolerances_.trans_pos_err && fabs(angle_diff(goal_point_.a, current_point_.a)) < tolerances_.ang_pos_err; // Verify actual translational and rotational velocities are within tolerance Eigen::Vector2f dv = {current_velocity.vx, current_velocity.vy}; bool vel_in_tolerance = dv.norm() < tolerances_.trans_vel_err && fabs(current_velocity.va) < tolerances_.ang_vel_err; // Trajectory is done when we aren't commanding any velocity, our both our actual velocity and // position are within the correct tolerance. bool maybe_traj_complete = zero_cmd_vel && vel_in_tolerance && pos_in_tolerance; if (!maybe_traj_complete) traj_done_timer_.reset(); else PLOGI_(MOTION_LOG_ID) << "Camera move maybe done"; if(maybe_traj_complete && traj_done_timer_.dt_s() > 0.8) { move_running_ = false; PLOGI_(MOTION_LOG_ID) << "Camera move done"; return true; } return false; } ================================================ FILE: src/robot/src/robot_controller_modes/RobotControllerModeVision.h ================================================ #ifndef RobotControllerModeVision_h #define RobotControllerModeVision_h #include "RobotControllerModeBase.h" #include "SmoothTrajectoryGenerator.h" #include "utils.h" #include "camera_tracker/CameraTracker.h" #include "KalmanFilter.h" #include "StatusUpdater.h" class RobotControllerModeVision : public RobotControllerModeBase { public: RobotControllerModeVision(bool fake_perfect_motion, StatusUpdater& status_updater); bool startMove(Point target_point); virtual Velocity computeTargetVelocity(Point current_position, Velocity current_velocity, bool log_this_cycle) override; virtual bool checkForMoveComplete(Point current_position, Velocity current_velocity) override; protected: StatusUpdater& status_updater_; SmoothTrajectoryGenerator traj_gen_; Point goal_point_; Point current_point_; PVTPoint current_target_; CameraTrackerBase* camera_tracker_; Timer traj_done_timer_; ClockTimePoint last_vision_update_time_; TrajectoryTolerances tolerances_; PositionController x_controller_; PositionController y_controller_; PositionController a_controller_; KalmanFilter kf_; }; #endif //RobotControllerModeVision_h ================================================ FILE: src/robot/src/serial/MockSerialComms.cpp ================================================ #include "MockSerialComms.h" #include #include MockSerialComms::MockSerialComms(std::string portName) : SerialCommsBase(), send_base_data_(), send_lift_data_(), send_distance_data_(), rcv_base_data_(), rcv_lift_data_(), rcv_distance_data_(), port_(portName) { connected_ = true; } MockSerialComms::~MockSerialComms() { } void MockSerialComms::send(std::string msg) { if (msg.rfind("base:", 0) == 0) { send_base_data_.push(msg.substr(5, std::string::npos)); } else if (msg.rfind("lift:", 0) == 0) { send_lift_data_.push(msg.substr(5, std::string::npos)); } else { // Distance send from pi -> arduino doesn't have dist prefix send_distance_data_.push(msg); } } std::string MockSerialComms::rcv_base() { if(rcv_base_data_.empty()) { return ""; } std::string outdata = rcv_base_data_.front(); rcv_base_data_.pop(); return outdata; } std::string MockSerialComms::rcv_lift() { if(rcv_lift_data_.empty()) { return ""; } std::string outdata = rcv_lift_data_.front(); rcv_lift_data_.pop(); return outdata; } std::string MockSerialComms::rcv_distance() { if(rcv_distance_data_.empty()) { return ""; } std::string outdata = rcv_distance_data_.front(); rcv_distance_data_.pop(); return outdata; } void MockSerialComms::mock_send(std::string msg) { if (msg.rfind("base:", 0) == 0) { rcv_base_data_.push(msg.substr(5, std::string::npos)); } else if (msg.rfind("lift:", 0) == 0) { rcv_lift_data_.push(msg.substr(5, std::string::npos)); } else if (msg.rfind("dist:", 0) == 0) { rcv_distance_data_.push(msg.substr(5, std::string::npos)); } else { PLOGE << "Unknown message type, skipping: " << msg; } } std::string MockSerialComms::mock_rcv_base() { if(send_base_data_.empty()) { return ""; } std::string outdata = send_base_data_.front(); send_base_data_.pop(); return outdata; } std::string MockSerialComms::mock_rcv_lift() { if(send_lift_data_.empty()) { return ""; } std::string outdata = send_lift_data_.front(); send_lift_data_.pop(); return outdata; } std::string MockSerialComms::mock_rcv_distance() { if(send_distance_data_.empty()) { return ""; } std::string outdata = send_distance_data_.front(); send_distance_data_.pop(); return outdata; } void MockSerialComms::purge_data() { while(!send_lift_data_.empty()) { send_lift_data_.pop(); } while(!send_base_data_.empty()) { send_base_data_.pop(); } while(!send_distance_data_.empty()) { send_distance_data_.pop(); } while(!rcv_base_data_.empty()) { rcv_base_data_.pop(); } while(!rcv_lift_data_.empty()) { rcv_lift_data_.pop(); } while(!rcv_distance_data_.empty()) { rcv_distance_data_.pop(); } } ================================================ FILE: src/robot/src/serial/MockSerialComms.h ================================================ #ifndef MockSerialComms_h #define MockSerialComms_h #include #include #include "SerialCommsBase.h" class MockSerialComms : public SerialCommsBase { public: // Constructor MockSerialComms(std::string portName); virtual ~MockSerialComms(); void send(std::string msg) override; std::string rcv_base() override; std::string rcv_lift() override; std::string rcv_distance() override; void mock_send(std::string msg); std::string mock_rcv_lift(); std::string mock_rcv_base(); std::string mock_rcv_distance(); void purge_data(); protected: std::queue send_base_data_; std::queue send_lift_data_; std::queue send_distance_data_; std::queue rcv_base_data_; std::queue rcv_lift_data_; std::queue rcv_distance_data_; std::string port_; }; #endif ================================================ FILE: src/robot/src/serial/SerialComms.cpp ================================================ #include "SerialComms.h" #include SerialComms::SerialComms(std::string portName) : SerialCommsBase(), serial_(portName), recvInProgress_(false), recvIdx_(0), buffer_(""), base_data_(), lift_data_(), distance_data_() { // If we get here, that means serial_ was constructed correctly which means // we have a valid connection connected_ = true; serial_.SetBaudRate(LibSerial::BaudRate::BAUD_115200); } SerialComms::~SerialComms() { } std::string SerialComms::rcv_base() { rcv(); if(base_data_.empty()) { return ""; } std::string outdata = base_data_.front(); base_data_.pop(); return outdata; } std::string SerialComms::rcv_lift() { rcv(); if(lift_data_.empty()) { return ""; } std::string outdata = lift_data_.front(); lift_data_.pop(); return outdata; } std::string SerialComms::rcv_distance() { rcv(); if(distance_data_.empty()) { return ""; } std::string outdata = distance_data_.front(); distance_data_.pop(); return outdata; } void SerialComms::rcv() { if(!connected_) { PLOGE.printf("Cannot receive if port isn't connected"); return; } bool newData = false; const int timeout_ms = 5; std::string new_msg; while (serial_.IsDataAvailable() && newData == false) { char rc = ' '; try { serial_.ReadByte(rc, timeout_ms); } catch (LibSerial::ReadTimeout&) { PLOGI.printf("Serial timeout"); break; } if (recvInProgress_ == true) { if (rc == START_CHAR) { buffer_ = ""; } else if (rc != END_CHAR) { buffer_ += rc; } else { recvInProgress_ = false; newData = true; new_msg = buffer_; buffer_ = ""; } } else if (rc == START_CHAR) { recvInProgress_ = true; } } // Figure out what to do with the message based on the identifier if (new_msg.rfind("DEBUG", 0) == 0) { PLOGI << new_msg; } else if (new_msg.rfind("base:", 0) == 0) { base_data_.push(new_msg.substr(5, std::string::npos)); } else if (new_msg.rfind("lift:", 0) == 0) { lift_data_.push(new_msg.substr(5, std::string::npos)); } else if (new_msg.rfind("dist:", 0) == 0) { distance_data_.push(new_msg.substr(5, std::string::npos)); } else if (new_msg.empty()) { // Do nothing } else { PLOGE << "Unknown message type, skipping: " << new_msg; } } void SerialComms::send(std::string msg) { if(!connected_) { PLOGE.printf("Cannot send if port isn't connected"); return; } if (msg.length() > 0) { std::string toSend = START_CHAR + msg + END_CHAR; PLOGD.printf("Serial send: %s",toSend.c_str()); serial_.Write(toSend); } } ================================================ FILE: src/robot/src/serial/SerialComms.h ================================================ #ifndef SerialComms_h #define SerialComms_h #include #include #include "SerialCommsBase.h" // Factory method std::unique_ptr buildSerialComms(std::string portName); class SerialComms : public SerialCommsBase { public: // Constructor SerialComms(std::string portName); virtual ~SerialComms(); void send(std::string msg) override; std::string rcv_base() override; std::string rcv_lift() override; std::string rcv_distance() override; protected: void rcv(); LibSerial::SerialPort serial_;; bool recvInProgress_; int recvIdx_; std::string buffer_; std::queue base_data_; std::queue lift_data_; std::queue distance_data_; }; #endif ================================================ FILE: src/robot/src/serial/SerialCommsBase.cpp ================================================ #include "SerialCommsBase.h" #include SerialCommsBase::SerialCommsBase() : connected_(false) {} SerialCommsBase::~SerialCommsBase() {} std::string SerialCommsBase::rcv_base() { return ""; } std::string SerialCommsBase::rcv_lift() { return ""; } std::string SerialCommsBase::rcv_distance() { return ""; } void SerialCommsBase::send(std::string msg) { (void) msg; // Silence warnings return; } ================================================ FILE: src/robot/src/serial/SerialCommsBase.h ================================================ #ifndef SerialCommsBase_h #define SerialCommsBase_h #include #define START_CHAR '<' #define END_CHAR '>' class SerialCommsBase { public: SerialCommsBase(); virtual ~SerialCommsBase(); virtual void send(std::string msg); virtual std::string rcv_base(); virtual std::string rcv_lift(); virtual std::string rcv_distance(); bool isConnected() {return connected_;}; protected: bool connected_; }; #endif ================================================ FILE: src/robot/src/serial/SerialCommsFactory.cpp ================================================ #include "SerialCommsFactory.h" #include "SerialComms.h" #include "MockSerialComms.h" #include SerialCommsFactory* SerialCommsFactory::instance = NULL; SerialCommsFactory* SerialCommsFactory::getFactoryInstance() { if(!instance) { instance = new SerialCommsFactory; } return instance; } void SerialCommsFactory::set_mode(SERIAL_FACTORY_MODE mode) { mode_ = mode; } SerialCommsBase* SerialCommsFactory::get_serial_comms(std::string portName) { if (comms_objects_.count(portName) == 0) { build_serial_comms(portName); } return comms_objects_[portName].get(); } void SerialCommsFactory::build_serial_comms(std::string portName) { std::unique_ptr serial_comms; if(mode_ == SERIAL_FACTORY_MODE::STANDARD) { try { serial_comms = std::make_unique(portName); PLOGI << "Built SerialComms on port: "<< portName; } catch (const LibSerial::OpenFailed&) { serial_comms = std::make_unique(); PLOGW << "Could not open serial port: " << portName; } } else if (mode_ == SERIAL_FACTORY_MODE::MOCK) { serial_comms = std::make_unique(portName); PLOGI << "Built MockSerialComms for " << portName; } comms_objects_[portName] = std::move(serial_comms); } // Private constructor SerialCommsFactory::SerialCommsFactory() : mode_(SERIAL_FACTORY_MODE::STANDARD), comms_objects_() {} ================================================ FILE: src/robot/src/serial/SerialCommsFactory.h ================================================ #ifndef SerialCommsFactory_h #define SerialCommsFactory_h #include #include #include "SerialCommsBase.h" enum class SERIAL_FACTORY_MODE { STANDARD, MOCK, }; class SerialCommsFactory { public: static SerialCommsFactory* getFactoryInstance(); void set_mode(SERIAL_FACTORY_MODE mode); SerialCommsBase* get_serial_comms(std::string portName); // Delete copy and assignment constructors SerialCommsFactory(SerialCommsFactory const&) = delete; SerialCommsFactory& operator= (SerialCommsFactory const&) = delete; private: // Make standard constructor private so it can't be created SerialCommsFactory(); void build_serial_comms(std::string portName); static SerialCommsFactory* instance; SERIAL_FACTORY_MODE mode_; std::map> comms_objects_; }; #endif ================================================ FILE: src/robot/src/sockets/ClientSocket.cpp ================================================ // Implementation of the ClientSocket class #include "ClientSocket.h" #include "SocketException.h" ClientSocket::ClientSocket ( std::string host, int port ) { if ( ! Socket::create() ) { throw SocketException ( "Could not create client socket." ); } if ( ! Socket::connect ( host, port ) ) { throw SocketException ( "Could not bind to port." ); } } const ClientSocket& ClientSocket::operator << ( const std::string& s ) const { if ( ! Socket::send ( s ) ) { throw SocketException ( "Could not write to socket." ); } return *this; } const ClientSocket& ClientSocket::operator >> ( std::string& s ) const { if ( ! Socket::recv ( s ) ) { throw SocketException ( "Could not read from socket." ); } return *this; } ================================================ FILE: src/robot/src/sockets/ClientSocket.h ================================================ // Definition of the ClientSocket class #ifndef ClientSocket_class #define ClientSocket_class #include "Socket.h" class ClientSocket : private Socket { public: ClientSocket ( std::string host, int port ); virtual ~ClientSocket(){}; const ClientSocket& operator << ( const std::string& ) const; const ClientSocket& operator >> ( std::string& ) const; }; #endif ================================================ FILE: src/robot/src/sockets/MockSocketMultiThreadWrapper.cpp ================================================ #include "MockSocketMultiThreadWrapper.h" #include MockSocketMultiThreadWrapper::MockSocketMultiThreadWrapper() : SocketMultiThreadWrapperBase(), send_data_(), rcv_data_(), ms_until_next_command_(-1), send_immediate_(true), timer_() { } void MockSocketMultiThreadWrapper::add_mock_data(std::string data) { rcv_data_.push(data); } std::string MockSocketMultiThreadWrapper::getData() { if (rcv_data_.empty()) { return ""; } std::string outdata = rcv_data_.front(); rcv_data_.pop(); PLOGI << "Popped: " << outdata << " data left: " << rcv_data_.size(); if(outdata[1] != '{') { ms_until_next_command_ = stoi(outdata); timer_.reset(); PLOGI << "Waiting " << ms_until_next_command_ << " ms to send next command"; send_immediate_ = false; outdata = ""; } return outdata; } void MockSocketMultiThreadWrapper::sendData(std::string data) { send_data_.push(data); } bool MockSocketMultiThreadWrapper::dataAvailableToRead() { if(send_immediate_ || timer_.dt_ms() > ms_until_next_command_) { return !rcv_data_.empty(); } else { return false; } } std::string MockSocketMultiThreadWrapper::getMockData() { if (send_data_.empty()) { return ""; } std::string outdata = send_data_.front(); send_data_.pop(); return outdata; } void MockSocketMultiThreadWrapper::sendMockData(std::string data) { rcv_data_.push(data); } void MockSocketMultiThreadWrapper::purge_data() { while(!send_data_.empty()) { send_data_.pop(); } while(!rcv_data_.empty()) { rcv_data_.pop(); } } ================================================ FILE: src/robot/src/sockets/MockSocketMultiThreadWrapper.h ================================================ #ifndef MockSocketMultiThreadWrapper_h #define MockSocketMultiThreadWrapper_h #include #include #include "SocketMultiThreadWrapperBase.h" #include "utils.h" class MockSocketMultiThreadWrapper : public SocketMultiThreadWrapperBase { public: MockSocketMultiThreadWrapper(); std::string getData(); void sendData(std::string data); bool dataAvailableToRead(); void add_mock_data(std::string data); std::string getMockData(); void sendMockData(std::string data); void purge_data(); void set_send_immediate(bool send_immediate) {send_immediate_ = send_immediate;}; private: std::queue send_data_; std::queue rcv_data_; int ms_until_next_command_; bool send_immediate_; Timer timer_; }; #endif ================================================ FILE: src/robot/src/sockets/README.md ================================================ The low level socket implimentation in this folder comes from Rob Tougher and is published here at https://tldp.org/LDP/LG/issue74/tougher.html under the Open Publication License (http://www.opencontent.org/openpub/) wich allows copying and modification as long as a disclosure is present. The following files are copied from the site specified and provided here with minor modifications: -`Socket.h/.cpp` -`ServerSocket.h/.cpp` -`ClientSocket.h/.cpp` -`SocketException.h` All other files in this directory are written by Alex Baucom. ================================================ FILE: src/robot/src/sockets/ServerSocket.cpp ================================================ // Implementation of the ServerSocket class #include "ServerSocket.h" #include "SocketException.h" #include "SocketTimeoutException.h" #include ServerSocket::ServerSocket ( int port ) { if ( ! Socket::create() ) { throw SocketException ( "Could not create server socket." ); } if ( ! Socket::bind ( port ) ) { throw SocketException ( "Could not bind to port." ); } if ( ! Socket::listen() ) { throw SocketException ( "Could not listen to socket." ); } } ServerSocket::~ServerSocket() { } const ServerSocket& ServerSocket::operator << ( const std::string& s ) const { if ( ! Socket::send ( s ) ) { throw SocketException ( "Could not write to socket." ); } return *this; } const ServerSocket& ServerSocket::operator >> ( std::string& s ) const { int status = Socket::recv ( s ); if ( status == -1 ) { throw SocketTimeoutException ( "No data available in blocking time" ); } else if (status == 0) { throw SocketException ( "Could not read from socket." ); } return *this; } void ServerSocket::accept ( ServerSocket& sock ) { if ( ! Socket::accept ( sock ) ) { throw SocketException ( "Could not accept socket." ); } } void ServerSocket::set_non_blocking() { Socket::set_non_blocking(true); } ================================================ FILE: src/robot/src/sockets/ServerSocket.h ================================================ // From https://tldp.org/LDP/LG/issue74/tougher.html // Definition of the ServerSocket class #ifndef ServerSocket_class #define ServerSocket_class #include "Socket.h" class ServerSocket : private Socket { public: ServerSocket ( int port ); ServerSocket (){}; virtual ~ServerSocket(); const ServerSocket& operator << ( const std::string& ) const; const ServerSocket& operator >> ( std::string& ) const; void accept ( ServerSocket& ); void set_non_blocking(); }; #endif ================================================ FILE: src/robot/src/sockets/Socket.cpp ================================================ // Implementation of the Socket class. #include "Socket.h" #include "string.h" #include #include #include #include Socket::Socket() : m_sock ( -1 ) { memset ( &m_addr, 0, sizeof ( m_addr ) ); } Socket::~Socket() { if ( is_valid() ) ::close ( m_sock ); } bool Socket::create() { m_sock = socket ( AF_INET, SOCK_STREAM, 0 ); if ( ! is_valid() ) return false; // TIME_WAIT - argh int on = 1; if ( setsockopt ( m_sock, SOL_SOCKET, SO_REUSEADDR, ( const char* ) &on, sizeof ( on ) ) == -1 ) return false; return true; } bool Socket::bind ( const int port ) { if ( ! is_valid() ) { return false; } m_addr.sin_family = AF_INET; m_addr.sin_addr.s_addr = INADDR_ANY; m_addr.sin_port = htons ( port ); int bind_return = ::bind ( m_sock, ( struct sockaddr * ) &m_addr, sizeof ( m_addr ) ); if ( bind_return == -1 ) { return false; } return true; } bool Socket::listen() const { if ( ! is_valid() ) { return false; } int listen_return = ::listen ( m_sock, MAXCONNECTIONS ); if ( listen_return == -1 ) { return false; } return true; } bool Socket::accept ( Socket& new_socket ) const { int addr_length = sizeof ( m_addr ); new_socket.m_sock = ::accept ( m_sock, ( sockaddr * ) &m_addr, ( socklen_t * ) &addr_length ); if ( new_socket.m_sock <= 0 ) return false; else return true; } bool Socket::send ( const std::string s ) const { int status = ::send ( m_sock, s.c_str(), s.size(), MSG_NOSIGNAL ); if ( status == -1 ) { return false; } else { return true; } } int Socket::recv ( std::string& s ) const { char buf [ MAXRECV + 1 ]; s = ""; memset ( buf, 0, MAXRECV + 1 ); int status = ::recv ( m_sock, buf, MAXRECV, 0 ); if ( status == -1 ) { if (errno == EAGAIN ) { return -1; } else { std::cout << "status == -1 errno == " << errno << " in Socket::recv\n"; return 0; } } else if ( status == 0 ) { return 0; } else { s = buf; return status; } } bool Socket::connect ( const std::string host, const int port ) { if ( ! is_valid() ) return false; m_addr.sin_family = AF_INET; m_addr.sin_port = htons ( port ); int status = inet_pton ( AF_INET, host.c_str(), &m_addr.sin_addr ); if ( errno == EAFNOSUPPORT ) return false; status = ::connect ( m_sock, ( sockaddr * ) &m_addr, sizeof ( m_addr ) ); if ( status == 0 ) return true; else return false; } void Socket::set_non_blocking ( const bool b ) { int opts; opts = fcntl ( m_sock, F_GETFL ); if ( opts < 0 ) { return; } if ( b ) opts = ( opts | O_NONBLOCK ); else opts = ( opts & ~O_NONBLOCK ); fcntl ( m_sock, F_SETFL,opts ); } ================================================ FILE: src/robot/src/sockets/Socket.h ================================================ // Definition of the Socket class #ifndef Socket_class #define Socket_class #include #include #include #include #include #include #include const int MAXHOSTNAME = 200; const int MAXCONNECTIONS = 5; const int MAXRECV = 500; class Socket { public: Socket(); virtual ~Socket(); // Server initialization bool create(); bool bind ( const int port ); bool listen() const; bool accept ( Socket& ) const; // Client initialization bool connect ( const std::string host, const int port ); // Data Transimission bool send ( const std::string ) const; int recv ( std::string& ) const; void set_non_blocking ( const bool ); bool is_valid() const { return m_sock != -1; } private: int m_sock; sockaddr_in m_addr; }; #endif ================================================ FILE: src/robot/src/sockets/SocketException.h ================================================ // SocketException class #ifndef SocketException_class #define SocketException_class #include class SocketException { public: SocketException ( std::string s ) : m_s ( s ) {}; ~SocketException (){}; std::string description() { return m_s; } private: std::string m_s; }; #endif ================================================ FILE: src/robot/src/sockets/SocketMultiThreadWrapper.cpp ================================================ #include "SocketMultiThreadWrapper.h" #include #include "sockets/ServerSocket.h" #include "sockets/SocketException.h" #include "sockets/SocketTimeoutException.h" #include #define PORT 8123 std::mutex read_mutex; std::mutex send_mutex; SocketMultiThreadWrapper::SocketMultiThreadWrapper() : SocketMultiThreadWrapperBase(), data_buffer(), send_buffer() { run_thread = std::thread(&SocketMultiThreadWrapper::socket_loop, this); run_thread.detach(); } bool SocketMultiThreadWrapper::dataAvailableToRead() { std::lock_guard read_lock(read_mutex); return data_buffer.size() > 0; } std::string SocketMultiThreadWrapper::getData() { std::string outstring; const std::lock_guard lock(read_mutex); while(!data_buffer.empty()) { outstring.push_back(data_buffer.front()); data_buffer.pop(); } return outstring; } void SocketMultiThreadWrapper::sendData(std::string data) { const std::lock_guard lock(send_mutex); if(send_buffer.size() + data.size() >= BUFFER_SIZE) { PLOGE.printf("Send buffer overflow, dropping message: %s", data.c_str()); return; } for(const char c : data) { send_buffer.push(c); } } void SocketMultiThreadWrapper::socket_loop() { ServerSocket server(PORT); while(true) { // Wait for a client to connect PLOGI.printf("Ready for client connection"); ServerSocket socket; server.accept(socket); socket.set_non_blocking(); bool keep_connection = true; PLOGI.printf("Client connected"); // Stay connected to the client while the connection is valid while(keep_connection) { // Try to read data from the client std::string read_data; try { socket >> read_data; } catch (SocketTimeoutException &e) {} catch (SocketException& e) { //PLOGI.printf("Caught exception while reading: %s", e.description().c_str()); // If read fails for something other than timeout, disconnect keep_connection = false; continue; } { // Copy data into buffer for output if we got info from the client std::lock_guard read_lock(read_mutex); if(data_buffer.size() + read_data.size() >= BUFFER_SIZE) { PLOGE.printf("Data buffer overflow, dropping message"); } else { for (uint i = 0; i < read_data.size(); i++) { //PLOGD << std::hex << std::to_integer(read_buff[i]) << std::dec << ' '; data_buffer.push(read_data[i]); } } } { // If we have data ready to be sent, send it std::lock_guard send_lock(send_mutex); if(send_buffer.size() > 0) { PLOGD.printf("Length to send: %i", send_buffer.size()); std::string send_data; while(!send_buffer.empty()) { send_data.push_back(send_buffer.front()); send_buffer.pop(); } socket << send_data; } } } PLOGI.printf("Closing socket connection"); } } ================================================ FILE: src/robot/src/sockets/SocketMultiThreadWrapper.h ================================================ #ifndef SocketMultiThreadWrapper_h #define SocketMultiThreadWrapper_h #include #include #include "SocketMultiThreadWrapperBase.h" #define BUFFER_SIZE 2048 class SocketMultiThreadWrapper : public SocketMultiThreadWrapperBase { public: SocketMultiThreadWrapper(); std::string getData(); void sendData(std::string data); bool dataAvailableToRead(); private: void socket_loop(); std::queue data_buffer; std::queue send_buffer; std::thread run_thread; }; #endif ================================================ FILE: src/robot/src/sockets/SocketMultiThreadWrapperBase.h ================================================ #ifndef SocketMultiThreadWrapperBase_h #define SocketMultiThreadWrapperBase_h #include class SocketMultiThreadWrapperBase { public: virtual ~SocketMultiThreadWrapperBase() {}; virtual std::string getData() = 0; virtual void sendData(std::string data) = 0; virtual bool dataAvailableToRead() = 0; }; #endif ================================================ FILE: src/robot/src/sockets/SocketMultiThreadWrapperFactory.cpp ================================================ #include "SocketMultiThreadWrapperFactory.h" #include "SocketMultiThreadWrapper.h" #include "MockSocketMultiThreadWrapper.h" #include SocketMultiThreadWrapperFactory* SocketMultiThreadWrapperFactory::instance = NULL; SocketMultiThreadWrapperFactory* SocketMultiThreadWrapperFactory::getFactoryInstance() { if(!instance) { instance = new SocketMultiThreadWrapperFactory; } return instance; } void SocketMultiThreadWrapperFactory::set_mode(SOCKET_FACTORY_MODE mode) { mode_ = mode; } void SocketMultiThreadWrapperFactory::add_mock_data(std::string data) { if(mode_ == SOCKET_FACTORY_MODE::MOCK && socket_) { // This is slightly dangerous, but should be safe as long as I don't do something silly like // change the mode of the factory after building the socket SocketMultiThreadWrapperBase* raw_ptr = socket_.get(); dynamic_cast(raw_ptr)->add_mock_data(data); PLOGI << "Added mock data: " << data; } } void SocketMultiThreadWrapperFactory::build_socket() { if(socket_) { return; } if(mode_ == SOCKET_FACTORY_MODE::STANDARD) { socket_ = std::make_unique (); PLOGI << "Built SocketMultiThreadWrapper"; } else if (mode_ == SOCKET_FACTORY_MODE::MOCK) { socket_ = std::make_unique (); PLOGI << "Built MockSocketMultiThreadWrapper"; } } SocketMultiThreadWrapperBase* SocketMultiThreadWrapperFactory::get_socket() { if(!socket_) { build_socket(); } return socket_.get(); } // Private constructor SocketMultiThreadWrapperFactory::SocketMultiThreadWrapperFactory() : mode_(SOCKET_FACTORY_MODE::STANDARD) {} ================================================ FILE: src/robot/src/sockets/SocketMultiThreadWrapperFactory.h ================================================ #ifndef SocketMultiThreadWrapperFactory_h #define SocketMultiThreadWrapperFactory_h #include #include "SocketMultiThreadWrapperBase.h" enum class SOCKET_FACTORY_MODE { STANDARD, MOCK, }; class SocketMultiThreadWrapperFactory { public: static SocketMultiThreadWrapperFactory* getFactoryInstance(); void set_mode(SOCKET_FACTORY_MODE mode); void add_mock_data(std::string data); void build_socket(); SocketMultiThreadWrapperBase* get_socket(); // Delete copy and assignment constructors SocketMultiThreadWrapperFactory(SocketMultiThreadWrapperFactory const&) = delete; SocketMultiThreadWrapperFactory& operator= (SocketMultiThreadWrapperFactory const&) = delete; private: // Make standard constructor private so it can't be created SocketMultiThreadWrapperFactory(); static SocketMultiThreadWrapperFactory* instance; SOCKET_FACTORY_MODE mode_; std::unique_ptr socket_; }; #endif ================================================ FILE: src/robot/src/sockets/SocketTimeoutException.h ================================================ // SocketTimeoutException class #ifndef SocketTimeoutException_class #define SocketTimeoutException_class #include class SocketTimeoutException { public: SocketTimeoutException ( std::string s ) : m_s ( s ) {}; ~SocketTimeoutException (){}; std::string description() { return m_s; } private: std::string m_s; }; #endif ================================================ FILE: src/robot/src/utils.cpp ================================================ #define _USE_MATH_DEFINES #include "utils.h" #include #include #include #include #include #include #include #include #include float wrap_angle(float a) { while(std::abs(a) > M_PI) { if(a > M_PI) { a -= 2*M_PI; } else if (a < -1*M_PI) { a += 2*M_PI; } } return a; } float angle_diff(float a1, float a2) { float outA = a1 - a2; // Handle angle wrapping and compute the correct error amount outA = wrap_angle(outA); return outA; } RateController::RateController(int hz) : timer_(), dt_us_(1000000 / hz), always_ready_(cfg.lookup("motion.rate_always_ready")) { } bool RateController::ready() { if(always_ready_ || timer_.dt_us() > dt_us_) { timer_.reset(); return true; } return false; } TimeRunningAverage::TimeRunningAverage(int window_size) : buf_(), buf_idx_(0), window_size_(window_size), filled_(false), started_(false), timer_() { buf_.reserve(window_size_); } int TimeRunningAverage::get_ms() { int sum_end_idx = buf_idx_; if(filled_) { sum_end_idx = window_size_; } if (sum_end_idx == 0) { return 0; } int sum = 0; for(int i = 0; i < sum_end_idx; i++) { sum += buf_[i]; } return sum / sum_end_idx; } float TimeRunningAverage::get_sec() { float ms = static_cast(get_ms()); return ms / 1000.0; } void TimeRunningAverage::mark_point() { if(!started_) { timer_.reset(); started_ = true; return; } buf_[buf_idx_] = timer_.dt_ms(); timer_.reset(); buf_idx_++; if (buf_idx_ >= window_size_) { filled_ = true; buf_idx_ = 0; } } Timer::Timer() : prev_time_(ClockFactory::getFactoryInstance()->get_clock()->now()) { } void Timer::reset() { ClockWrapperBase* clock = ClockFactory::getFactoryInstance()->get_clock(); prev_time_ = clock->now(); } int Timer::dt_us() { ClockWrapperBase* clock = ClockFactory::getFactoryInstance()->get_clock(); return std::chrono::duration_cast(clock->now() - prev_time_).count(); } int Timer::dt_ms() { ClockWrapperBase* clock = ClockFactory::getFactoryInstance()->get_clock(); return std::chrono::duration_cast(clock->now() - prev_time_).count(); } float Timer::dt_s() { ClockWrapperBase* clock = ClockFactory::getFactoryInstance()->get_clock(); return std::chrono::duration_cast(clock->now() - prev_time_).count(); } ClockTimePoint ClockWrapper::now() { return std::chrono::steady_clock::now(); } MockClockWrapper::MockClockWrapper() : ClockWrapperBase(), internal_time_(std::chrono::steady_clock::now()) {} ClockTimePoint MockClockWrapper::now() { return internal_time_; } void MockClockWrapper::set_now() { set(std::chrono::steady_clock::now()); } void MockClockWrapper::set(ClockTimePoint time_point) { internal_time_ = time_point; } void MockClockWrapper::advance_us(int us) { internal_time_ += std::chrono::microseconds(us); } void MockClockWrapper::advance_ms(int ms) { internal_time_ += std::chrono::milliseconds(ms); } void MockClockWrapper::advance_sec(float sec) { internal_time_ += std::chrono::duration_cast(FpSeconds(sec)); } ClockFactory* ClockFactory::factory_instance_ = NULL; ClockFactory* ClockFactory::getFactoryInstance() { if(!factory_instance_) { factory_instance_ = new ClockFactory; } return factory_instance_; } void ClockFactory::set_mode(CLOCK_FACTORY_MODE mode) { mode_ = mode; } ClockWrapperBase* ClockFactory::get_clock() { if (!clock_instance_) { build_clock_instance(); } return clock_instance_.get(); } void ClockFactory::build_clock_instance() { if(mode_ == CLOCK_FACTORY_MODE::STANDARD) { //PLOGI << "Building STANDARD clock wrapper"; clock_instance_ = std::make_unique(); } else if (mode_ == CLOCK_FACTORY_MODE::MOCK) { //PLOGI << "Building MOCK clock wrapper"; clock_instance_ = std::make_unique(); } } // Private constructor ClockFactory::ClockFactory() : mode_(CLOCK_FACTORY_MODE::STANDARD), clock_instance_() {} PositionController::PositionController(Gains gains) : gains_(gains), error_sum_(0.0) { } void PositionController::reset() { error_sum_ = 0.0; } float PositionController::compute(float target_position, float actual_position, float target_velocity, float actual_velocity, float dt) { float pos_err = target_position - actual_position; float vel_err = target_velocity - actual_velocity; error_sum_ += pos_err * dt; float output_velocity = target_velocity + gains_.kp * pos_err + gains_.kd * vel_err + gains_.ki * error_sum_; return output_velocity; } float vectorMean(const std::vector& data) { if(data.empty()) return 0; float sum = 0.0; for (const auto& val : data) { sum += val; } return sum/data.size(); } float vectorStddev(const std::vector& data, float mean) { if(data.empty()) return 0; float variance = 0; for (const auto& val : data) { variance += (val - mean) * (val - mean); } variance /= data.size(); return sqrt(variance); } float zScore(float mean, float stddev, float reading) { if (stddev < 0.0001) return 0; return fabs((reading - mean)/stddev); } // From: https://www.tutorialspoint.com/parsing-a-comma-delimited-std-string-in-cplusplus std::vector parseCommaDelimitedString(const std::string& str_in) { std::vector result; std::stringstream s_stream(str_in); //create string stream from the string while(s_stream.good()) { std::string substr; getline(s_stream, substr, ','); //get first string delimited by comma result.push_back(substr); } return result; } std::vector parseCommaDelimitedStringToFloat(const std::string& str_in) { std::vector str_parsed = parseCommaDelimitedString(str_in); std::vector result; result.reserve(str_parsed.size()); for(const auto& val : str_parsed) { if(val.empty()) continue; result.push_back(std::stof(val)); } return result; } void reset_last_motion_logger() { // Delete the loggers g_logger.reset(); g_appender.reset(); // Delete the file remove("log/last_motion_log.csv"); // Re-initialize the logger g_appender.reset(new plog::RollingFileAppender("log/last_motion_log.csv")); g_logger.reset(new plog::Logger(plog::debug)); g_logger->addAppender(g_appender.get()); } ================================================ FILE: src/robot/src/utils.h ================================================ #ifndef utils_h #define utils_h #include #include #include #include #include #include #include "constants.h" using ClockTimePoint = std::chrono::time_point; using FpSeconds = std::chrono::duration; // Utility for getting sign of values // From: https://stackoverflow.com/questions/1903954/is-there-a-standard-sign-function-signum-sgn-in-c-c template int sgn(T val) { return (T(0) < val) - (val < T(0)); } //******************************************* // Angle stuff //******************************************* // Wrap an angle between +/- 2 pi float wrap_angle(float a); // Take diff of two angles, ensures the result is wrapped betwee +/- 2 pi float angle_diff(float a1, float a2); //******************************************* // Clock/time stuff //******************************************* // The ClockWrapperX classes provide wrappers around the system clock to simplified mocking the clock class ClockWrapperBase { public: virtual ClockTimePoint now() = 0; }; class ClockWrapper : public ClockWrapperBase { public: ClockTimePoint now() override; }; class MockClockWrapper : public ClockWrapperBase { public: MockClockWrapper(); ClockTimePoint now() override; void advance_sec(float sec); void advance_ms(int ms); void advance_us(int us); void set(ClockTimePoint time_point); void set_now(); private: ClockTimePoint internal_time_; }; enum class CLOCK_FACTORY_MODE { STANDARD, MOCK }; // Provides a way to get the current clock class ClockFactory { public: static ClockFactory* getFactoryInstance(); void set_mode(CLOCK_FACTORY_MODE mode); ClockWrapperBase* get_clock(); // Delete copy and assignment constructors ClockFactory(ClockFactory const&) = delete; ClockFactory& operator= (ClockFactory const&) = delete; private: // Make standard constructor private so it can't be created ClockFactory(); void build_clock_instance(); static ClockFactory* factory_instance_; CLOCK_FACTORY_MODE mode_; std::unique_ptr clock_instance_; }; // Simple timer class that can return the time since reset in various formats class Timer { public: Timer(); void reset(); int dt_us(); int dt_ms(); float dt_s(); private: ClockTimePoint prev_time_; }; // Keeps an average of how long the time delta is between events class TimeRunningAverage { public: TimeRunningAverage(int window_size); int get_ms(); float get_sec(); void mark_point(); private: std::vector buf_; int buf_idx_; int window_size_; bool filled_; bool started_; Timer timer_; }; // Simple class to run a loop at a certian rate class RateController { public: RateController(int hz); bool ready(); private: Timer timer_; int dt_us_; bool always_ready_; }; //******************************************* // Useful data structures //******************************************* struct Point { float x; float y; float a; Point(float x=0, float y=0, float a=0) : x(x), y(y), a(a) {} std::string toString() const { char s[100]; sprintf(s, "[x: %.3f, y: %.3f, a: %.3f]", x, y, a); return static_cast(s); } bool operator== (const Point& other) const { return x == other.x && y == other.y && a == other.a; } }; struct Velocity { float vx; float vy; float va; Velocity(float vx=0, float vy=0, float va=0) : vx(vx), vy(vy), va(va) {} std::string toString() const { char s[100]; sprintf(s, "[vx: %.3f, vy: %.3f, va: %.3f]", vx, vy, va); return static_cast(s); } bool operator== (const Velocity& other) const { return vx == other.vx && vy == other.vy && va == other.va; } bool nearZero(float eps=1e-6) const { if (fabs(vx) < eps && fabs(vy) < eps && fabs(va) < eps) { return true; } else { return false; } } }; class LatchedBool { public: LatchedBool(float seconds_to_latch) : latch_time_(seconds_to_latch), time_since_true_(), value_(false) {} void add(bool value_in) { if(value_in) { value_ = true; time_since_true_.reset(); } else { if(time_since_true_.dt_s() > latch_time_) { value_ = false; } } } bool get() {return value_;} bool update(bool value_in) { add(value_in); return get(); } private: float latch_time_; Timer time_since_true_; bool value_; }; // Very simple circular buffer class template class CircularBuffer { public: CircularBuffer(int size) : data_(std::make_unique>()), size_(size), idx_(0), full_(false) { data_->reserve(size_); } void insert(T val) { if(!full_) { data_->push_back(val); idx_++; if (idx_ % size_ == 0) { full_ = true; idx_ = 0; } } else { data_->at(idx_) = val; idx_ = (idx_ + 1) % size_; } } void clear() { data_->clear(); idx_ = 0; full_ = false; } std::vector get_contents() const { if(!full_ || idx_ == 0) { return *data_; } else { std::vector front(data_->begin() + idx_, data_->end()); std::vector back(data_->begin(), data_->begin() + idx_); front.insert(front.end(), back.begin(), back.end()); return front; } } bool isFull() const { return full_;} private: std::unique_ptr> data_; const int size_; int idx_; bool full_; }; class PositionController { public: struct Gains { float kp; float ki; float kd; }; PositionController(Gains gains = {0,0,0}); // Resets error sum to avoid integral windup void reset(); // Compute control velocity for target position with feedforward velocity float compute(float target_position, float actual_position, float target_velocity, float actual_velocity, float dt); private: Gains gains_; float error_sum_; }; struct LocalizationMetrics { float last_position_uncertainty; float confidence_x; float confidence_y; float confidence_a; float total_confidence; }; struct CameraDebug { bool side_ok = false; bool rear_ok = false; bool both_ok = false; float side_u; float side_v; float rear_u; float rear_v; float side_x; float side_y; float rear_x; float rear_y; float pose_x; float pose_y; float pose_a; int loop_ms = 0; }; //******************************************* // Misc math //******************************************* float vectorMean(const std::vector& data); float vectorStddev(const std::vector& data, float mean); float zScore(float mean, float stddev, float reading); //******************************************* // Strings //******************************************* std::vector parseCommaDelimitedString(const std::string& str_in); std::vector parseCommaDelimitedStringToFloat(const std::string& str_in); //******************************************* // Logging //******************************************* static std::unique_ptr g_appender; static std::unique_ptr> g_logger; void reset_last_motion_logger(); #endif ================================================ FILE: src/robot/test/CameraPipeline_Test.cpp ================================================ #include #include "camera_tracker/CameraPipeline.h" #include "test-utils.h" TEST_CASE("Robot coords from image coords", "[Camera]") { SafeConfigModifier config_modifier_1("vision_tracker.debug.use_debug_image", true); SafeConfigModifier config_modifier_2("vision_tracker.side.debug_image", "/home/pi/images/test/TestImage2.jpg"); SafeConfigModifier config_modifier_3("vision_tracker.rear.debug_image", "/home/pi/images/test/TestImage2.jpg"); float side_offset_x = 0.0; float side_offset_y = -1.5; SafeConfigModifier config_modifier_4("vision_tracker.physical.side.x_offset", side_offset_x); SafeConfigModifier config_modifier_5("vision_tracker.physical.side.y_offset", side_offset_y); SafeConfigModifier config_modifier_6("vision_tracker.physical.side.z_offset", 0.55); CameraPipeline c(CAMERA_ID::SIDE, /*start_thread=*/ false); SECTION("Side cam, center point") { float u = 320/2; float v = 240/2; Eigen::Vector2f xy_world = c.cameraToRobot({u,v}); CHECK(xy_world[0] == Approx(side_offset_x).margin(0.2)); CHECK(xy_world[1] == Approx(side_offset_y).margin(0.2)); } SECTION("Side cam, top corner") { float u = 0; float v = 0; Eigen::Vector2f xy_world = c.cameraToRobot({u,v}); CHECK(xy_world[0] < side_offset_x); CHECK(xy_world[1] > side_offset_y); } } // Verify that camera params between actual and test config are synced if this test is giving trouble TEST_CASE("Marker Detection", "[Camera]") { struct TestImageMetadata { std::string name; bool expected_detection; CAMERA_ID id; Eigen::Vector2f expected_point_px = {-1, -1}; }; std::vector test_images = { {.name = "20210712175430_side_img_raw.jpg", .expected_detection = true, .id = CAMERA_ID::SIDE}, {.name = "20210712175436_rear_img_raw.jpg", .expected_detection = true, .id = CAMERA_ID::REAR}, {.name = "20210712175519_side_img_raw.jpg", .expected_detection = true, .id = CAMERA_ID::SIDE}, {.name = "20210712175525_rear_img_raw.jpg", .expected_detection = true, .id = CAMERA_ID::REAR}, {.name = "20210712175606_side_img_raw.jpg", .expected_detection = false, .id = CAMERA_ID::SIDE}, {.name = "20210712175612_rear_img_raw.jpg", .expected_detection = false, .id = CAMERA_ID::REAR}, {.name = "20210712175719_side_img_raw.jpg", .expected_detection = false, .id = CAMERA_ID::SIDE}, {.name = "20210712175728_rear_img_raw.jpg", .expected_detection = false, .id = CAMERA_ID::REAR}, {.name = "20210712175942_rear_img_raw.jpg", .expected_detection = true, .id = CAMERA_ID::REAR}, {.name = "20210712180047_rear_img_raw.jpg", .expected_detection = true, .id = CAMERA_ID::REAR}, {.name = "20210712180208_side_img_raw.jpg", .expected_detection = true, .id = CAMERA_ID::SIDE}, {.name = "20210712180322_side_img_raw.jpg", .expected_detection = true, .id = CAMERA_ID::SIDE}, {.name = "20210712180446_side_img_raw.jpg", .expected_detection = true, .id = CAMERA_ID::SIDE}, {.name = "20210712180528_side_img_raw.jpg", .expected_detection = true, .id = CAMERA_ID::SIDE}, {.name = "20210712182429_side_img_raw.jpg", .expected_detection = true, .id = CAMERA_ID::SIDE}, }; for (const auto& item : test_images) { std::string image_dir = "/home/pi/DominoRobot/src/robot/test/testdata/new_images/"; std::string image_path = image_dir + item.name; SafeConfigModifier config_modifier_1("vision_tracker.debug.use_debug_image", true); SafeConfigModifier config_modifier_2("vision_tracker.side.debug_image", image_path); SafeConfigModifier config_modifier_3("vision_tracker.rear.debug_image", image_path); CameraPipeline c(item.id, /*start_thread=*/ false); c.oneLoop(); CameraPipelineOutput output = c.getData(); PLOGI << "Checking detection in image " << item.name; CHECK(output.ok == item.expected_detection); if(item.expected_point_px != Eigen::Vector2f({-1,-1})) { CHECK(output.uv == item.expected_point_px); } } } ================================================ FILE: src/robot/test/CameraTracker_Test.cpp ================================================ #include #include "camera_tracker/CameraTracker.h" #include "test-utils.h" #include TEST_CASE("Nominal case", "[Camera]") { SafeConfigModifier config_modifier_1("vision_tracker.debug.use_debug_image", true); SafeConfigModifier config_modifier_2("vision_tracker.side.debug_image", "/home/pi/images/test/TestImage2.jpg"); SafeConfigModifier config_modifier_3("vision_tracker.rear.debug_image", "/home/pi/images/test/TestImage2.jpg"); CameraTracker c(/*start_thread=*/ true); int counter = 0; CameraTrackerOutput output; output.ok = false; while(!output.ok && counter < 50){ c.update(); output = c.getPoseFromCamera(); counter++; usleep(100000); } REQUIRE(output.ok == true); Point p = output.pose; bool any_nonzero = p.x != 0 || p.y != 0 || p.a != 0; CHECK(any_nonzero == true); } TEST_CASE("Robot pose from coords", "[Camera]") { SafeConfigModifier config_modifier_1("vision_tracker.debug.use_debug_image", true); SafeConfigModifier config_modifier_2("vision_tracker.side.debug_image", "/home/pi/images/test/TestImage2.jpg"); SafeConfigModifier config_modifier_3("vision_tracker.rear.debug_image", "/home/pi/images/test/TestImage2.jpg"); float side_target_x = 0.0; float side_target_y = -1.4; SafeConfigModifier config_modifier_4("vision_tracker.physical.side.x_offset", side_target_x); SafeConfigModifier config_modifier_5("vision_tracker.physical.side.y_offset", side_target_y); SafeConfigModifier config_modifier_6("vision_tracker.physical.side.z_offset", 0.55); SafeConfigModifier config_modifier_7("vision_tracker.physical.side.target_x", side_target_x); SafeConfigModifier config_modifier_8("vision_tracker.physical.side.target_y", side_target_y); float rear_target_x = -1.5; float rear_target_y = 0.0; SafeConfigModifier config_modifier_9("vision_tracker.physical.rear.x_offset", rear_target_x); SafeConfigModifier config_modifier_10("vision_tracker.physical.rear.y_offset", rear_target_y); SafeConfigModifier config_modifier_11("vision_tracker.physical.rear.z_offset", 0.55); SafeConfigModifier config_modifier_12("vision_tracker.physical.rear.target_x", rear_target_x); SafeConfigModifier config_modifier_13("vision_tracker.physical.rear.target_y", rear_target_y); SECTION("Exact match, target at center images") { CameraTracker c(/*start_thread=*/ false); Point p = c.computeRobotPoseFromImagePoints({side_target_x, side_target_y},{rear_target_x,rear_target_y}); CHECK(p.x == Approx(0).margin(0.0005)); CHECK(p.y == Approx(0).margin(0.0005)); CHECK(p.a == Approx(0).margin(0.0005)); } SECTION("Offset translation, target at center images") { CameraTracker c(/*start_thread=*/ false); float x_offset = 0.1; float y_offset = -0.2; Point p = c.computeRobotPoseFromImagePoints({side_target_x + x_offset, side_target_y + y_offset}, {rear_target_x + x_offset, rear_target_y + y_offset}); CHECK(p.x == Approx(-x_offset).margin(0.0005)); CHECK(p.y == Approx(-y_offset).margin(0.0005)); CHECK(p.a == Approx(0).margin(0.0005)); } SECTION("Offset angle, target at center images") { CameraTracker c(/*start_thread=*/ false); float a_offset = 5 * M_PI / 180.0; Point p = c.computeRobotPoseFromImagePoints({side_target_y * -sin(a_offset), side_target_y * cos(a_offset) }, {rear_target_x * cos(a_offset), rear_target_x * sin(a_offset)}); CHECK(p.x == Approx(0).margin(0.0005)); CHECK(p.y == Approx(0).margin(0.0005)); CHECK(p.a == Approx(-a_offset).margin(0.0005)); } } ================================================ FILE: src/robot/test/Localization_Test.cpp ================================================ // #include // #include // #include "Localization.h" // #include "StatusUpdater.h" // #include "test-utils.h" // TEST_CASE("Simple velocity only", "[Localization]") // { // // Setup x,y offset for mm to be 0 for this simple case // SafeConfigModifier mm_x_config_modifier("localization.mm_x_offset", 0.0); // SafeConfigModifier mm_y_config_modifier("localization.mm_y_offset", 0.0); // Localization L; // L.updateVelocityReading({1,0,0}, 1); // REQUIRE(L.getVelocity() == Velocity{1,0,0}); // REQUIRE(L.getPosition() == Point{1,0,0}); // } // TEST_CASE("Simple position adjustment", "[Localization]") // { // // Use fake perfect motion for simplicity // SafeConfigModifier config_modifier("motion.fake_perfect_motion", true); // SECTION("Zero Vel, Same Position, No offset") // { // // Setup x,y offset for mm to be 0 for this simple case // SafeConfigModifier mm_x_config_modifier("localization.mm_x_offset", 0.0); // SafeConfigModifier mm_y_config_modifier("localization.mm_y_offset", 0.0); // Localization L; // // "Move" robot to target position // float move_x = 1.0; // float move_y = 0.0; // float move_a = 0.0; // Point move_target = {move_x, move_y, move_a}; // L.forceSetPosition(move_target); // // Verify robot is in expected position // Point position = L.getPosition(); // REQUIRE(position.x == Approx(move_x).margin(0.0005)); // REQUIRE(position.y == Approx(move_y).margin(0.0005)); // REQUIRE(position.a == Approx(move_a).margin(0.0005)); // // Send position update that should match current position // L.updatePositionReading(move_target); // // Verify position has not changed // position = L.getPosition(); // REQUIRE(position.x == Approx(move_x).margin(0.0005)); // REQUIRE(position.y == Approx(move_y).margin(0.0005)); // REQUIRE(position.a == Approx(move_a).margin(0.0005)); // } // SECTION("Zero Vel, Position adjustment, No offset") // { // // Setup x,y offset for mm to be 0 for this simple case // SafeConfigModifier mm_x_config_modifier("localization.mm_x_offset", 0.0); // SafeConfigModifier mm_y_config_modifier("localization.mm_y_offset", 0.0); // Localization L; // // "Move" robot to target position // float move_x = 1.0; // float move_y = 0.0; // float move_a = 0.0; // Point move_target = {move_x, move_y, move_a}; // L.forceSetPosition(move_target); // // Verify robot is in expected position // Point position = L.getPosition(); // REQUIRE(position.x == Approx(move_x).margin(0.0005)); // REQUIRE(position.y == Approx(move_y).margin(0.0005)); // REQUIRE(position.a == Approx(move_a).margin(0.0005)); // // Send position update that should trigger adjustment // float update_x = 2.0; // float update_y = 0.0; // float update_a = 0.0; // Point update_target = {update_x, update_y, update_a}; // L.updatePositionReading(update_target); // // Verify position changed as expected // float expected_x = 0.0; // position = L.getPosition(); // REQUIRE(position.x == Approx(expected_x).margin(0.0005)); // REQUIRE(position.y == Approx(move_y).margin(0.0005)); // REQUIRE(position.a == Approx(move_a).margin(0.0005)); // } // } // TEST_CASE("Position update with MM offset", "[Localization]") // { // // Setup x,y offset for mm // SafeConfigModifier mm_x_config_modifier("localization.mm_x_offset", -100.0); // SafeConfigModifier mm_y_config_modifier("localization.mm_y_offset", 10.0); // Localization L; // // "Move" robot to target position // float move_x = 1.0; // float move_y = 0.0; // float move_a = 3.14; // Point move_target = {move_x, move_y, move_a}; // L.forceSetPosition(move_target); // // Verify robot is in expected position // Point position = L.getPosition(); // REQUIRE(position.x == Approx(move_x).margin(0.0005)); // REQUIRE(position.y == Approx(move_y).margin(0.0005)); // REQUIRE(position.a == Approx(move_a).margin(0.0005)); // // Send position update that should not trigger adjustment // // Since there is an offset, this position update needs to account for that // float update_x = 1.1; // float update_y = -0.01; // float update_a = 3.14; // Point update_target = {update_x, update_y, update_a}; // L.updatePositionReading(update_target); // // Verify position did not change // position = L.getPosition(); // REQUIRE(position.x == Approx(move_x).margin(0.0005)); // REQUIRE(position.y == Approx(move_y).margin(0.0005)); // REQUIRE(position.a == Approx(move_a).margin(0.0005)); // } // // TODO: Re-enable once velocity adjustments are turned back on // // TEST_CASE("Position update with velocity adjustment", "[Localization]") // // { // // // Setup x,y offset for mm to be 0 // // SafeConfigModifier mm_x_config_modifier("localization.mm_x_offset", 0.0); // // SafeConfigModifier mm_y_config_modifier("localization.mm_y_offset", 0.0); // // // Fix these values for this test so that I know exactly what they are. This test shouldn't break // // // because I modified those values in the config // // SafeConfigModifier frac_config_modifier("localization.update_fraction_at_zero_vel", 1.0); // // SafeConfigModifier val_config_modifier("localization.val_for_zero_update", 0.1); // // Localization L; // // // Start 'robot' moving at specific velocity so we can inject the test conditions // // float target_vel_x = 0.05; // // float target_vel_y = 0.0; // // float target_vel_a = 0.0; // // L.updateVelocityReading({target_vel_x,target_vel_y,target_vel_a}, 1); // // // Send position update that should trigger adjustment, but will be adjusted due to velocity // // Point position = L.getPosition(); // // float cur_x = position.x; // // float cur_y = position.y; // // float update_x = cur_x; // This tests that an exact match of the position doesn't cause a change // // float update_y = 1.0; // This tests that a difference causes an adjustment modified by velocity // // float update_a = 0.0; // // L.updatePositionReading({update_x, update_y, update_a}); // // // Verify position changed as expected // // position = L.getPosition(); // // const float val_for_zero_update = cfg.lookup("localization.val_for_zero_update"); // // float expected_y = (target_vel_x / val_for_zero_update) * (update_y - cur_y) + cur_y; // // REQUIRE(position.x == Approx(cur_x).margin(0.0005)); // // REQUIRE(position.y == Approx(expected_y).margin(0.002)); // Some extra margin here because the robot is still moving // // REQUIRE(position.a == Approx(update_a).margin(0.0005)); // // } // TEST_CASE("Test reading reliability filter", "[Localization]") // { // // Setup x,y offset for mm to be 0 // SafeConfigModifier mm_x_config_modifier("localization.mm_x_offset", 0.0); // SafeConfigModifier mm_y_config_modifier("localization.mm_y_offset", 0.0); // // Fix these values for this test so that I know exactly what they are. This test shouldn't break // // because I modified those values in the config // std::default_random_engine generator; // SECTION("Filter spurious value") // { // Localization L; // float update_x = 0.0; // float update_y = 0.0; // float update_a = 0.0; // std::normal_distribution distribution(0.0,0.0001); // // Load in a number of reliable position updates // const int n_loops = 10; // for (int i = 0; i < n_loops ; i++) // { // float noise = distribution(generator); // L.updatePositionReading({update_x+noise, update_y+noise, update_a+noise}); // } // // Give a very wrong update value // L.updatePositionReading({1000.0, -123.0, 0.0}); // // Check that position didn't change // Point position = L.getPosition(); // REQUIRE(position.x == Approx(update_x).margin(0.0005)); // REQUIRE(position.y == Approx(update_y).margin(0.0005)); // REQUIRE(position.a == Approx(update_a).margin(0.0005)); // } // SECTION("Position updates after shift") // { // Localization L; // float update_x = 1.0; // float update_y = 0.0; // float update_a = 0.0; // std::normal_distribution distribution(0.0,0.0001); // // Load in a number of reliable position updates // const int n_loops = 10; // for (int i = 0; i < n_loops ; i++) // { // float noise = distribution(generator); // L.updatePositionReading({update_x+noise, update_y+noise, update_a+noise}); // } // // Shift the position to a new reliable position // float new_update_x = 50.0; // float new_update_y = 10.0; // float new_update_a = -3.0; // L.updatePositionReading({new_update_x, new_update_y, new_update_a}); // // After first update, position shouldn't change // Point position = L.getPosition(); // REQUIRE(position.x == Approx(update_x).margin(0.0005)); // REQUIRE(position.y == Approx(update_y).margin(0.0005)); // REQUIRE(position.a == Approx(update_a).margin(0.0005)); // // After many updates, the position should change // for (int i = 0; i < 2*n_loops ; i++) // { // float noise = distribution(generator); // L.updatePositionReading({new_update_x+noise, new_update_y+noise, new_update_a+noise}); // } // position = L.getPosition(); // REQUIRE(position.x == Approx(new_update_x).margin(0.0005)); // REQUIRE(position.y == Approx(new_update_y).margin(0.0005)); // REQUIRE(position.a == Approx(new_update_a).margin(0.0005)); // } // SECTION("High variance sampling doesn't affect position") // { // Localization L; // float update_x = 1.0; // float update_y = 0.0; // float update_a = 0.0; // std::normal_distribution distribution(0.0,0.0001); // // Load in a number of reliable position updates // const int n_loops = 10; // for (int i = 0; i < n_loops ; i++) // { // float noise = distribution(generator); // L.updatePositionReading({update_x+noise, update_y+noise, update_a+noise}); // } // // Verify position is as expected // Point position = L.getPosition(); // REQUIRE(position.x == Approx(update_x).margin(0.0005)); // REQUIRE(position.y == Approx(update_y).margin(0.0005)); // REQUIRE(position.a == Approx(update_a).margin(0.0005)); // // Now generate very noisy data around new position // float new_update_x = 50.0; // float new_update_y = 10.0; // float new_update_a = -3.0; // std::normal_distribution noisy_distribution(0.0,1); // // After many noisy updates, the position should not change // for (int i = 0; i < n_loops ; i++) // { // float noise = noisy_distribution(generator); // L.updatePositionReading({new_update_x+noise, new_update_y+noise, new_update_a+noise}); // } // position = L.getPosition(); // REQUIRE(position.x == Approx(update_x).margin(0.0005)); // REQUIRE(position.y == Approx(update_y).margin(0.0005)); // REQUIRE(position.a == Approx(update_a).margin(0.0005)); // } // } ================================================ FILE: src/robot/test/RobotController_Test.cpp ================================================ #include #include "RobotController.h" #include "StatusUpdater.h" #include "test-utils.h" // Helper that commands the robot to move to the given position // Returns the number of loops run before the robot either stopped or max_loops was hit int moveToPositionHelper(RobotController& r, float x, float y, float a, int max_loops) { MockSerialComms* mock_serial = build_and_get_mock_serial(CLEARCORE_USB);; MockClockWrapper* mock_clock = get_mock_clock(); mock_serial->purge_data(); r.moveToPosition(x,y,a); REQUIRE(mock_serial->mock_rcv_base() == "Power:ON"); int count = 0; while(r.isTrajectoryRunning() && count < max_loops) { count++; r.update(); // Report back the exact velocity commanded std::string cmd_vel = mock_serial->mock_rcv_base(); mock_serial->mock_send("base:" + cmd_vel); mock_clock->advance_ms(1); } return count; } // A simple position test that checks if the target position is reached within // a set amount of time void coarsePositionTest(float x, float y, float a) { // Force all control parameters to 0 for perfect velocity tracking (due to ffd) SafeConfigModifier pos_kp_config_modifier("motion.translation.gains.kp", 0.0); SafeConfigModifier pos_ki_config_modifier("motion.translation.gains.ki", 0.0); SafeConfigModifier pos_kd_config_modifier("motion.translation.gains.kd", 0.0); SafeConfigModifier ang_kp_config_modifier("motion.rotation.gains.kp", 0.0); SafeConfigModifier ang_ki_config_modifier("motion.rotation.gains.ki", 0.0); SafeConfigModifier ang_kd_config_modifier("motion.rotation.gains.kd", 0.0); SafeConfigModifier limit_config_modifier("motion.limit_max_fraction", 0.8); int max_loop_counts = 100000; reset_mock_clock(); // Need to reset before creating RobotController which instantiates timers StatusUpdater s; RobotController r = RobotController(s); int num_loops = moveToPositionHelper(r, x, y, a, max_loop_counts); CHECK(num_loops != max_loop_counts); CHECK(r.isTrajectoryRunning() == false); StatusUpdater::Status status = s.getStatus(); REQUIRE(status.pos_x == Approx(x).margin(0.0005)); REQUIRE(status.pos_y == Approx(y).margin(0.0005)); REQUIRE(status.pos_a == Approx(a).margin(0.0005)); } TEST_CASE("RobotController class", "[RobotController]") { StatusUpdater s; RobotController r = RobotController(s); r.moveToPosition(1,1,1); r.update(); REQUIRE(r.isTrajectoryRunning() == true); } TEST_CASE("Motor enable and disable", "[RobotController]") { MockSerialComms* mock_serial = build_and_get_mock_serial(CLEARCORE_USB);; StatusUpdater s; RobotController r = RobotController(s); mock_serial->purge_data(); r.enableAllMotors(); REQUIRE(mock_serial->mock_rcv_base() == "Power:ON"); mock_serial->purge_data(); r.disableAllMotors(); REQUIRE(mock_serial->mock_rcv_base() == "Power:OFF"); } TEST_CASE("Simple coarse motion", "[RobotController]") { SECTION("Straight forward") { coarsePositionTest(0.5,0,0); } SECTION("Diagonal") { coarsePositionTest(0.5,0.5,0); } SECTION("Positive rotation") { coarsePositionTest(0,0,0.5); } SECTION("Negative rotation") { coarsePositionTest(0,0,-0.5); } SECTION("All directions") { coarsePositionTest(0.1,0.1,-0.5); } } void fakeMotionHelper(float x, float y, float a, int max_loops, StatusUpdater& s) { MockClockWrapper* mock_clock = get_mock_clock_and_reset(); MockSerialComms* mock_serial = build_and_get_mock_serial(CLEARCORE_USB);; mock_serial->purge_data(); RobotController r = RobotController(s); r.moveToPosition(x,y,a); int count = 0; while(r.isTrajectoryRunning() && count < max_loops) { count++; r.update(); mock_clock->advance_us(1000); } } TEST_CASE("Validate fake_perfect_motion option", "[RobotController]") { StatusUpdater s; float x = 0.5; float y = 0.5; float a = 0.5; int test_counts = 10000; // Prior to fake motion enabled, the robot should not move fakeMotionHelper(x,y,a,test_counts,s); StatusUpdater::Status status = s.getStatus(); REQUIRE(status.pos_x != Approx(x).margin(0.0005)); REQUIRE(status.pos_y != Approx(y).margin(0.0005)); REQUIRE(status.pos_a != Approx(a).margin(0.0005)); { // Enable fake_perfect_motion with config modifier, which should reset when going out of scope SafeConfigModifier config_modifier("motion.fake_perfect_motion", true); // Now this should work fakeMotionHelper(x,y,a,test_counts,s); status = s.getStatus(); REQUIRE(status.pos_x == Approx(x).margin(0.0005)); REQUIRE(status.pos_y == Approx(y).margin(0.0005)); REQUIRE(status.pos_a == Approx(a).margin(0.0005)); } // Fake perfect motion should be reset, now it shouldn't move again fakeMotionHelper(x,y,a,test_counts,s); status = s.getStatus(); REQUIRE(status.pos_x != Approx(x).margin(0.0005)); REQUIRE(status.pos_y != Approx(y).margin(0.0005)); REQUIRE(status.pos_a != Approx(a).margin(0.0005)); } // Trajectory generation can no longer trigger an error. Re-enable this test once something can trigger errors again // TEST_CASE("Block on error", "[RobotController]") // { // MockSerialComms* mock_serial = build_and_get_mock_serial(CLEARCORE_USB);; // StatusUpdater s; // RobotController r = RobotController(s); // mock_serial->purge_data(); // r.moveToPosition(0.00001,10000,0.0000001); // REQUIRE(s.getErrorStatus() == true); // REQUIRE(s.getInProgress() == false); // REQUIRE(r.isTrajectoryRunning() == false); // r.update(); // REQUIRE(mock_serial->mock_rcv_base() != "Power:ON"); // REQUIRE(s.getErrorStatus() == true); // REQUIRE(s.getInProgress() == false); // REQUIRE(r.isTrajectoryRunning() == false); // } // TEST_CASE("Benchmarking motion", "[RobotController]") // { // MockSerialComms* mock_serial = build_and_get_mock_serial(CLEARCORE_USB);; // StatusUpdater s; // RobotController r = RobotController(s); // mock_serial->purge_data(); // r.moveToPosition(10,0,0); // REQUIRE(mock_serial->mock_rcv_base() == "Power:ON"); // BENCHMARK("RobotController benchmark") // { // r.update(); // // Report back the exact velocity commanded // std::string cmd_vel = mock_serial->mock_rcv_base(); // mock_serial->mock_send(cmd_vel); // }; // } ================================================ FILE: src/robot/test/RobotServer_Test.cpp ================================================ #include #include "RobotServer.h" #include "StatusUpdater.h" #include "test-utils.h" #include "sockets/MockSocketMultiThreadWrapper.h" void testSimpleCommand(RobotServer& r, std::string msg, std::string expected_resp, COMMAND expected_command) { MockSocketMultiThreadWrapper* mock_socket = build_and_get_mock_socket(); mock_socket->sendMockData(msg); COMMAND c = r.oneLoop(); std::string resp = mock_socket->getMockData(); REQUIRE(resp == expected_resp); REQUIRE(c == expected_command); } TEST_CASE("Move", "[RobotServer]") { std::string msg = "<{'type':'move','data':{'x':1,'y':2,'a':3}}>"; std::string expected_response = "<{\"type\":\"ack\",\"data\":\"move\"}>"; COMMAND expected_command = COMMAND::MOVE; StatusUpdater s; RobotServer r = RobotServer(s); testSimpleCommand(r, msg, expected_response, expected_command); RobotServer::PositionData data = r.getMoveData(); REQUIRE(data.x == 1); REQUIRE(data.y == 2); REQUIRE(data.a == 3); } TEST_CASE("Move rel", "[RobotServer]") { std::string msg = "<{'type':'move_rel','data':{'x':1,'y':2,'a':3}}>"; std::string expected_response = "<{\"type\":\"ack\",\"data\":\"move_rel\"}>"; COMMAND expected_command = COMMAND::MOVE_REL; StatusUpdater s; RobotServer r = RobotServer(s); testSimpleCommand(r, msg, expected_response, expected_command); RobotServer::PositionData data = r.getMoveData(); REQUIRE(data.x == 1); REQUIRE(data.y == 2); REQUIRE(data.a == 3); } TEST_CASE("Move fine", "[RobotServer]") { std::string msg = "<{'type':'move_fine','data':{'x':1,'y':2,'a':3}}>"; std::string expected_response = "<{\"type\":\"ack\",\"data\":\"move_fine\"}>"; COMMAND expected_command = COMMAND::MOVE_FINE; StatusUpdater s; RobotServer r = RobotServer(s); testSimpleCommand(r, msg, expected_response, expected_command); RobotServer::PositionData data = r.getMoveData(); REQUIRE(data.x == 1); REQUIRE(data.y == 2); REQUIRE(data.a == 3); } TEST_CASE("Move const vel", "[RobotServer]") { std::string msg = "<{'type':'move_const_vel','data':{'vx':1,'vy':2,'va':3,'t':4}}>"; std::string expected_response = "<{\"type\":\"ack\",\"data\":\"move_const_vel\"}>"; COMMAND expected_command = COMMAND::MOVE_CONST_VEL; StatusUpdater s; RobotServer r = RobotServer(s); testSimpleCommand(r, msg, expected_response, expected_command); RobotServer::VelocityData data = r.getVelocityData(); REQUIRE(data.vx == 1); REQUIRE(data.vy == 2); REQUIRE(data.va == 3); REQUIRE(data.t == 4); } TEST_CASE("Place", "[RobotServer]") { std::string msg = "<{'type':'place'}>"; std::string expected_response = "<{\"type\":\"ack\",\"data\":\"place\"}>"; COMMAND expected_command = COMMAND::PLACE_TRAY; StatusUpdater s; RobotServer r = RobotServer(s); testSimpleCommand(r, msg, expected_response, expected_command); } TEST_CASE("Load", "[RobotServer]") { std::string msg = "<{'type':'load'}>"; std::string expected_response = "<{\"type\":\"ack\",\"data\":\"load\"}>"; COMMAND expected_command = COMMAND::LOAD_TRAY; StatusUpdater s; RobotServer r = RobotServer(s); testSimpleCommand(r, msg, expected_response, expected_command); } TEST_CASE("Init tray", "[RobotServer]") { std::string msg = "<{'type':'init'}>"; std::string expected_response = "<{\"type\":\"ack\",\"data\":\"init\"}>"; COMMAND expected_command = COMMAND::INITIALIZE_TRAY; StatusUpdater s; RobotServer r = RobotServer(s); testSimpleCommand(r, msg, expected_response, expected_command); } TEST_CASE("Send position", "[RobotServer]") { std::string msg = "<{'type':'p','data':{'x':1,'y':2,'a':3}}>"; std::string expected_response = "<{\"type\":\"ack\",\"data\":\"p\"}>"; COMMAND expected_command = COMMAND::POSITION; StatusUpdater s; RobotServer r = RobotServer(s); testSimpleCommand(r, msg, expected_response, expected_command); RobotServer::PositionData data = r.getPositionData(); REQUIRE(data.x == 1); REQUIRE(data.y == 2); REQUIRE(data.a == 3); } TEST_CASE("Estop", "[RobotServer]") { std::string msg = "<{'type':'estop'}>"; std::string expected_response = "<{\"type\":\"ack\",\"data\":\"estop\"}>"; COMMAND expected_command = COMMAND::ESTOP; StatusUpdater s; RobotServer r = RobotServer(s); testSimpleCommand(r, msg, expected_response, expected_command); } TEST_CASE("Load complete", "[RobotServer]") { std::string msg = "<{'type':'lc'}>"; std::string expected_response = "<{\"type\":\"ack\",\"data\":\"lc\"}>"; COMMAND expected_command = COMMAND::LOAD_COMPLETE; StatusUpdater s; RobotServer r = RobotServer(s); testSimpleCommand(r, msg, expected_response, expected_command); } ================================================ FILE: src/robot/test/Robot_Test.cpp ================================================ #include #include "robot.h" #include "test-utils.h" TEST_CASE("Robot move", "[Robot]") { std::string msg = "<{'type':'move','data':{'x':0.5,'y':0.4,'a':0.3}}>"; SafeConfigModifier config_modifier("motion.fake_perfect_motion", true); MockClockWrapper* mock_clock = get_mock_clock_and_reset(); MockSocketMultiThreadWrapper* mock_socket = build_and_get_mock_socket(); Robot r = Robot(); mock_socket->sendMockData(msg); mock_clock->advance_ms(1); r.runOnce(); REQUIRE(r.getCurrentCommand() == COMMAND::MOVE); REQUIRE(r.getStatus().in_progress == true); for (int i = 0; i < 10000; i++) { r.runOnce(); mock_clock->advance_ms(1); if(r.getStatus().in_progress == false) {break;} } StatusUpdater::Status status = r.getStatus(); REQUIRE(status.pos_x == Approx(0.5).margin(0.0005)); REQUIRE(status.pos_y == Approx(0.4).margin(0.0005)); REQUIRE(status.pos_a == Approx(0.3).margin(0.0005)); } ================================================ FILE: src/robot/test/SanityCheckTest.cpp ================================================ #include #include "constants.h" // TEST_CASE("Mock socket config disabled", "[SanityCheck]") // { // bool mock_socket_enabled = cfg.lookup("mock_socket.enabled"); // REQUIRE(mock_socket_enabled == false); // } TEST_CASE("Fake perfect motion disabled", "[SanityCheck]") { bool fake_perfect_motion_enabled = cfg.lookup("motion.fake_perfect_motion"); REQUIRE(fake_perfect_motion_enabled == false); } ================================================ FILE: src/robot/test/SmoothTrajectoryGenerator_Test.cpp ================================================ #include #include "SmoothTrajectoryGenerator.h" #include "constants.h" // This stuff is needed to correctly print a custom type in Catch for debug namespace Catch { template<> struct StringMaker { static std::string convert(Point const& p) { return p.toString(); } }; template<> struct StringMaker { static std::string convert(Velocity const& p) { return p.toString(); } }; } TEST_CASE("SmoothTrajectoryGenerator class", "[trajectory]") { SECTION("Smoke test - don't crash") { SmoothTrajectoryGenerator stg; Point p1 = {3,4,5}; Point p2 = {1,2,3}; LIMITS_MODE limits_mode = LIMITS_MODE::COARSE; bool ok = stg.generatePointToPointTrajectory(p1, p2, limits_mode); REQUIRE(ok == true); PVTPoint output = stg.lookup(1.0); REQUIRE(output.time == 1.0); limits_mode = LIMITS_MODE::FINE; ok = stg.generatePointToPointTrajectory(p1, p2, limits_mode); REQUIRE(ok == true); output = stg.lookup(1.0); REQUIRE(output.time == 1.0); } SECTION("Final position") { SmoothTrajectoryGenerator stg; Point p1 = {0,0,0}; Point p2 = {1,2,3}; LIMITS_MODE limits_mode = LIMITS_MODE::COARSE; bool ok = stg.generatePointToPointTrajectory(p1, p2, limits_mode); REQUIRE(ok == true); //Way in the future should return the final point PVTPoint output = stg.lookup(60); REQUIRE(output.time == 60); CHECK(output.position.x == Approx(p2.x)); CHECK(output.position.y == Approx(p2.y)); CHECK(output.position.a == Approx(p2.a)); CHECK(output.velocity.vx == Approx(0.0).margin(0.0001)); CHECK(output.velocity.vy == Approx(0.0).margin(0.0001)); CHECK(output.velocity.va == Approx(0.0f).margin(0.0001)); limits_mode = LIMITS_MODE::FINE; ok = stg.generatePointToPointTrajectory(p1, p2, limits_mode); REQUIRE(ok == true); //Way in the future should return the final point output = stg.lookup(60); REQUIRE(output.time == 60); CHECK(output.position.x == Approx(p2.x)); CHECK(output.position.y == Approx(p2.y)); CHECK(output.position.a == Approx(p2.a)); CHECK(output.velocity.vx == Approx(0.0).margin(0.0001)); CHECK(output.velocity.vy == Approx(0.0).margin(0.0001)); CHECK(output.velocity.va == Approx(0.0).margin(0.0001)); } SECTION("Zeros") { SmoothTrajectoryGenerator stg; Point p1 = {0,0,0}; Point p2 = {10,0,0}; LIMITS_MODE limits_mode = LIMITS_MODE::COARSE; bool ok = stg.generatePointToPointTrajectory(p1, p2, limits_mode); REQUIRE(ok == true); //Way in the future should return the final point PVTPoint output = stg.lookup(60); REQUIRE(output.time == 60); CHECK(output.position.x == Approx(p2.x)); CHECK(output.position.y == Approx(p2.y)); CHECK(output.position.a == Approx(p2.a)); CHECK(output.velocity.vx == Approx(0)); CHECK(output.velocity.vy == Approx(0)); CHECK(output.velocity.va == Approx(0).margin(0.0001)); } SECTION("Lookup too early") { SmoothTrajectoryGenerator stg; Point p1 = {5,4,3}; Point p2 = {10,0,0}; LIMITS_MODE limits_mode = LIMITS_MODE::COARSE; bool ok = stg.generatePointToPointTrajectory(p1, p2, limits_mode); REQUIRE(ok == true); PVTPoint output = stg.lookup(0); REQUIRE(output.time == 0); CHECK(output.position.x == Approx(p1.x)); CHECK(output.position.y == Approx(p1.y)); CHECK(output.position.a == Approx(p1.a)); CHECK(output.velocity.vx == Approx(0)); CHECK(output.velocity.vy == Approx(0)); CHECK(output.velocity.va == Approx(0).margin(0.0001)); output = stg.lookup(-1); REQUIRE(output.time == -1); CHECK(output.position.x == Approx(p1.x)); CHECK(output.position.y == Approx(p1.y)); CHECK(output.position.a == Approx(p1.a)); CHECK(output.velocity.vx == Approx(0)); CHECK(output.velocity.vy == Approx(0)); CHECK(output.velocity.va == Approx(0).margin(0.0001)); } SECTION("Angle wrap") { SmoothTrajectoryGenerator stg; Point p1 = {0,0,3}; Point p2 = {0,0,-3}; LIMITS_MODE limits_mode = LIMITS_MODE::COARSE; bool ok = stg.generatePointToPointTrajectory(p1, p2, limits_mode); REQUIRE(ok == true); //Way in the future should return the final point PVTPoint output = stg.lookup(60); REQUIRE(output.time == 60); CHECK(output.position.x == Approx(p2.x)); CHECK(output.position.y == Approx(p2.y)); CHECK(output.position.a == Approx(p2.a)); CHECK(output.velocity.vx == Approx(0)); CHECK(output.velocity.vy == Approx(0)); CHECK(output.velocity.va == Approx(0).margin(0.0001)); for (float t = 0; t < 10; t+=0.1) { PVTPoint output = stg.lookup(t); REQUIRE(fabs(output.position.a) < 3.14); REQUIRE(fabs(output.position.a) > 2.99); } } SECTION("Angle non-wrap") { SmoothTrajectoryGenerator stg; Point p1 = {0,0,-1}; Point p2 = {0,0,1}; LIMITS_MODE limits_mode = LIMITS_MODE::COARSE; bool ok = stg.generatePointToPointTrajectory(p1, p2, limits_mode); REQUIRE(ok == true); //Way in the future should return the final point PVTPoint output = stg.lookup(60); REQUIRE(output.time == 60); CHECK(output.position.x == Approx(p2.x)); CHECK(output.position.y == Approx(p2.y)); CHECK(output.position.a == Approx(p2.a)); CHECK(output.velocity.vx == Approx(0)); CHECK(output.velocity.vy == Approx(0)); CHECK(output.velocity.va == Approx(0).margin(0.0001)); for (float t = 0; t < 10; t+=0.1) { PVTPoint output = stg.lookup(t); REQUIRE(fabs(output.position.a) < 1.01); } } } TEST_CASE("BuildMotionPlanningProblem", "[trajectory]") { Point p1 = {0,0,0}; Point p2 = {10,0,-3}; LIMITS_MODE limits_mode; SolverParameters solver; solver.num_loops = 10; solver.alpha_decay = 0.8; solver.beta_decay = 0.8; solver.exponent_decay = 0.1; Eigen::Vector3f expected_p1 = {0,0,0}; Eigen::Vector3f expected_p2 = {10,0,-3}; REQUIRE(static_cast(cfg.lookup("name")) == "Test constants"); REQUIRE(static_cast(cfg.lookup("motion.limit_max_fraction")) == 1.0); SECTION ("Coarse mode") { limits_mode = LIMITS_MODE::COARSE; MotionPlanningProblem mpp = buildMotionPlanningProblem(p1, p2, limits_mode, solver); REQUIRE(mpp.initialPoint == expected_p1); REQUIRE(mpp.targetPoint == expected_p2); REQUIRE(mpp.translationalLimits.max_vel == static_cast(cfg.lookup("motion.translation.max_vel.coarse"))); REQUIRE(mpp.translationalLimits.max_acc == static_cast(cfg.lookup("motion.translation.max_acc.coarse"))); REQUIRE(mpp.translationalLimits.max_jerk == static_cast(cfg.lookup("motion.translation.max_jerk.coarse"))); REQUIRE(mpp.rotationalLimits.max_vel ==static_cast( cfg.lookup("motion.rotation.max_vel.coarse"))); REQUIRE(mpp.rotationalLimits.max_acc == static_cast(cfg.lookup("motion.rotation.max_acc.coarse"))); REQUIRE(mpp.rotationalLimits.max_jerk == static_cast(cfg.lookup("motion.rotation.max_jerk.coarse"))); REQUIRE(mpp.solver_params.num_loops == solver.num_loops); REQUIRE(mpp.solver_params.alpha_decay == solver.alpha_decay); REQUIRE(mpp.solver_params.beta_decay == solver.beta_decay); REQUIRE(mpp.solver_params.exponent_decay == solver.exponent_decay); } SECTION ("Fine mode") { limits_mode = LIMITS_MODE::FINE; MotionPlanningProblem mpp = buildMotionPlanningProblem(p1, p2, limits_mode, solver); REQUIRE(mpp.initialPoint == expected_p1); REQUIRE(mpp.targetPoint == expected_p2); REQUIRE(mpp.translationalLimits.max_vel == static_cast(cfg.lookup("motion.translation.max_vel.fine"))); REQUIRE(mpp.translationalLimits.max_acc == static_cast(cfg.lookup("motion.translation.max_acc.fine"))); REQUIRE(mpp.translationalLimits.max_jerk == static_cast(cfg.lookup("motion.translation.max_jerk.fine"))); REQUIRE(mpp.rotationalLimits.max_vel ==static_cast( cfg.lookup("motion.rotation.max_vel.fine"))); REQUIRE(mpp.rotationalLimits.max_acc == static_cast(cfg.lookup("motion.rotation.max_acc.fine"))); REQUIRE(mpp.rotationalLimits.max_jerk == static_cast(cfg.lookup("motion.rotation.max_jerk.fine"))); REQUIRE(mpp.solver_params.num_loops == solver.num_loops); REQUIRE(mpp.solver_params.alpha_decay == solver.alpha_decay); REQUIRE(mpp.solver_params.beta_decay == solver.beta_decay); REQUIRE(mpp.solver_params.exponent_decay == solver.exponent_decay); } } TEST_CASE("generateTrajectory", "[trajectory]") { Point p1 = {0,0,0}; Point p2 = {10,0,-3}; LIMITS_MODE limits_mode = LIMITS_MODE::COARSE; SolverParameters solver = {25, 0.8, 0.8, 0.1}; MotionPlanningProblem mpp = buildMotionPlanningProblem(p1, p2, limits_mode, solver); Trajectory traj = generateTrajectory(mpp); REQUIRE(traj.complete == true); CHECK(traj.initialPoint == p1); CHECK(traj.trans_direction(0) == 1); CHECK(traj.trans_direction(1) == 0); CHECK(traj.rot_direction == -1); } TEST_CASE("generateSCurve", "[trajectory]") { float dist = 10; DynamicLimits limits = {1, 2, 8}; SolverParameters solver = {25, 0.8, 0.8, 0.1}; SCurveParameters params; bool ok = generateSCurve(dist, limits, solver, ¶ms); REQUIRE(ok == true); REQUIRE(params.v_lim == 1.0); REQUIRE(params.a_lim == 2.0); REQUIRE(params.j_lim == 8.0); float dt_v = 9.25; // Expected values float dt_a = 0.25; float dt_j = 0.25; CHECK(params.switch_points[0].t == 0); CHECK(params.switch_points[1].t == dt_j); CHECK(params.switch_points[2].t == dt_j + dt_a); CHECK(params.switch_points[3].t == 2*dt_j + dt_a); CHECK(params.switch_points[4].t == 2*dt_j + dt_a + dt_v); CHECK(params.switch_points[5].t == 3*dt_j + dt_a + dt_v); CHECK(params.switch_points[6].t == 3*dt_j + 2*dt_a + dt_v); CHECK(params.switch_points[7].t == 4*dt_j + 2*dt_a + dt_v); SECTION("Zero") { float dist = 0; DynamicLimits limits = {1, 2, 8}; SolverParameters solver = {25, 0.8, 0.8, 0.1}; SCurveParameters params; bool ok = generateSCurve(dist, limits, solver, ¶ms); REQUIRE(ok == true); REQUIRE(params.v_lim == 0); REQUIRE(params.a_lim == 0); REQUIRE(params.j_lim == 0); for (int i = 0; i < 8; i++) { CHECK(params.switch_points[i].t == 0); CHECK(params.switch_points[i].p == 0); CHECK(params.switch_points[i].v == 0); CHECK(params.switch_points[i].a == 0); } } SECTION("Very close to zero") { float dist = 0.000001; DynamicLimits limits = {1, 2, 8}; SolverParameters solver = {25, 0.8, 0.8, 0.1}; SCurveParameters params; bool ok = generateSCurve(dist, limits, solver, ¶ms); REQUIRE(ok == true); REQUIRE(params.v_lim == 0); REQUIRE(params.a_lim == 0); REQUIRE(params.j_lim == 0); for (int i = 0; i < 8; i++) { CHECK(params.switch_points[i].t == 0); CHECK(params.switch_points[i].p == 0); CHECK(params.switch_points[i].v == 0); CHECK(params.switch_points[i].a == 0); } } SECTION("Small dist that is above min_dist_limit") { float dist = 0.0001; DynamicLimits limits = {4, 2, 1}; SolverParameters solver = {30, 0.8, 0.8, 0.1}; SCurveParameters params; bool ok = generateSCurve(dist, limits, solver, ¶ms); REQUIRE(ok == true); REQUIRE(params.v_lim < 4); REQUIRE(params.a_lim < 2); REQUIRE(params.j_lim == 1); } SECTION("Modified v limit") { float dist = 10; DynamicLimits limits = {3, 1, 1}; SolverParameters solver = {25, 0.8, 0.8, 0.1}; SCurveParameters params; bool ok = generateSCurve(dist, limits, solver, ¶ms); REQUIRE(ok == true); REQUIRE(params.v_lim < 3); REQUIRE(params.a_lim == 1); REQUIRE(params.j_lim == 1); } SECTION("Modified a limit") { float dist = 10; DynamicLimits limits = {1, 2, 1}; SolverParameters solver = {25, 0.8, 0.8, 0.1}; SCurveParameters params; bool ok = generateSCurve(dist, limits, solver, ¶ms); REQUIRE(ok == true); REQUIRE(params.v_lim == 1); REQUIRE(params.a_lim < 2); REQUIRE(params.j_lim == 1); } SECTION("Modified both limits") { float dist = 10; DynamicLimits limits = {4, 2, 1}; SolverParameters solver = {25, 0.8, 0.8, 0.1}; SCurveParameters params; bool ok = generateSCurve(dist, limits, solver, ¶ms); REQUIRE(ok == true); REQUIRE(params.v_lim < 4); REQUIRE(params.a_lim < 2); REQUIRE(params.j_lim == 1); } } TEST_CASE("populateSwitchTimeParameters", "[trajectory]") { SCurveParameters params; params.v_lim = 1.0; params.a_lim = 2.0; params.j_lim = 8.0; float dt_v = 9.25; float dt_a = 0.25; float dt_j = 0.25; populateSwitchTimeParameters(¶ms, dt_j, dt_a, dt_v); REQUIRE(params.switch_points[0].t == Approx(0)); REQUIRE(params.switch_points[0].p == Approx(0)); REQUIRE(params.switch_points[0].v == Approx(0)); REQUIRE(params.switch_points[0].a == Approx(0)); CHECK(params.switch_points[1].t == Approx(0.25).margin(0.001)); CHECK(params.switch_points[1].p == Approx(0.02083).margin(0.001)); CHECK(params.switch_points[1].v == Approx(0.25).margin(0.001)); CHECK(params.switch_points[1].a == Approx(2.0).margin(0.001)); CHECK(params.switch_points[2].t == Approx(0.5).margin(0.001)); CHECK(params.switch_points[2].p == Approx(0.1458).margin(0.001)); CHECK(params.switch_points[2].v == Approx(0.75).margin(0.001)); CHECK(params.switch_points[2].a == Approx(2.0).margin(0.001)); CHECK(params.switch_points[3].t == Approx(0.75).margin(0.001)); CHECK(params.switch_points[3].p == Approx(0.375).margin(0.001)); CHECK(params.switch_points[3].v == Approx(1.0).margin(0.001)); CHECK(params.switch_points[3].a == Approx(0).margin(0.001)); CHECK(params.switch_points[4].t == Approx(10.0).margin(0.001)); CHECK(params.switch_points[4].p == Approx(9.625).margin(0.001)); CHECK(params.switch_points[4].v == Approx(1.0).margin(0.001)); CHECK(params.switch_points[4].a == Approx(0).margin(0.001)); CHECK(params.switch_points[5].t == Approx(10.25).margin(0.001)); CHECK(params.switch_points[5].p == Approx(9.854).margin(0.001)); CHECK(params.switch_points[5].v == Approx(0.75).margin(0.001)); CHECK(params.switch_points[5].a == Approx(-2.0).margin(0.001)); CHECK(params.switch_points[6].t == Approx(10.5).margin(0.001)); CHECK(params.switch_points[6].p == Approx(9.98).margin(0.001)); CHECK(params.switch_points[6].v == Approx(0.25).margin(0.001)); CHECK(params.switch_points[6].a == Approx(-2.0).margin(0.001)); CHECK(params.switch_points[7].t == Approx(10.75).margin(0.001)); CHECK(params.switch_points[7].p == Approx(10).margin(0.001)); CHECK(params.switch_points[7].v == Approx(0).margin(0.001)); CHECK(params.switch_points[7].a == Approx(0).margin(0.001)); } TEST_CASE("lookup_1D", "[trajectory]") { SCurveParameters params; params.v_lim = 1.0; params.a_lim = 1.0; params.j_lim = 1.0; for (int i = 0; i < 8; i ++) { params.switch_points[i].a = 1.0; params.switch_points[i].v = 1.0; params.switch_points[i].p = 1.0; params.switch_points[i].t = i; } // Need to manually set a for const vel region params.switch_points[3].a = 0; SECTION("Region 1 - Const Positive Jerk") { int region = 1; float time = static_cast(region-1) + 0.5; std::vector test_vec = lookup_1D(time, params); float expected_v = 1.625; float expected_p = 1.645; REQUIRE(test_vec[1] == Approx(expected_v).margin(0.001)); REQUIRE(test_vec[0] == Approx(expected_p).margin(0.001)); } SECTION("Region 2 - Const Positive Acc") { int region = 2; float time = static_cast(region-1) + 0.5; std::vector test_vec = lookup_1D(time, params); float expected_v = 1.5; float expected_p = 1.625; REQUIRE(test_vec[1] == Approx(expected_v).margin(0.001)); REQUIRE(test_vec[0] == Approx(expected_p).margin(0.001)); } SECTION("Region 3 - Const Negative Jerk") { int region = 3; float time = static_cast(region-1) + 0.5; std::vector test_vec = lookup_1D(time, params); float expected_v = 1.375; float expected_p = 1.604; REQUIRE(test_vec[1] == Approx(expected_v).margin(0.001)); REQUIRE(test_vec[0] == Approx(expected_p).margin(0.001)); } SECTION("Region 4 - Const Vel") { int region = 4; float time = static_cast(region-1) + 0.5; std::vector test_vec = lookup_1D(time, params); float expected_v = 1.0; float expected_p = 1.5; REQUIRE(test_vec[1] == Approx(expected_v).margin(0.001)); REQUIRE(test_vec[0] == Approx(expected_p).margin(0.001)); } SECTION("Region 5 - Const Negative Jerk") { int region = 5; float time = static_cast(region-1) + 0.5; std::vector test_vec = lookup_1D(time, params); float expected_v = 1.375; float expected_p = 1.604; REQUIRE(test_vec[1] == Approx(expected_v).margin(0.001)); REQUIRE(test_vec[0] == Approx(expected_p).margin(0.001)); } SECTION("Region 6 - Const Negative Acc") { int region = 6; float time = static_cast(region-1) + 0.5; std::vector test_vec = lookup_1D(time, params); float expected_v = 1.5; float expected_p = 1.625; REQUIRE(test_vec[1] == Approx(expected_v).margin(0.001)); REQUIRE(test_vec[0] == Approx(expected_p).margin(0.001)); } SECTION("Region 7 - Const Positive Jerk") { int region = 7; float time = static_cast(region-1) + 0.5; std::vector test_vec = lookup_1D(time, params); float expected_v = 1.625; float expected_p = 1.645; REQUIRE(test_vec[1] == Approx(expected_v).margin(0.001)); REQUIRE(test_vec[0] == Approx(expected_p).margin(0.001)); } } TEST_CASE("computeKinematicsBasedOnRegion", "[trajectory]") { SCurveParameters params; params.v_lim = 1.0; params.a_lim = 1.0; params.j_lim = 1.0; for (int i = 0; i < 8; i ++) { params.switch_points[i].a = 1.0; params.switch_points[i].v = 1.0; params.switch_points[i].p = 1.0; params.switch_points[i].t = i; } // Need to manually set a for const vel region params.switch_points[3].a = 0; SECTION("Region 1 - Const Positive Jerk") { int region = 1; float dt = 0.5; std::vector test_vec = computeKinematicsBasedOnRegion(params, region, dt); float expected_a = 1.5; float expected_v = 1.625; float expected_p = 1.645; REQUIRE(test_vec[0] == Approx(expected_a).margin(0.001)); REQUIRE(test_vec[1] == Approx(expected_v).margin(0.001)); REQUIRE(test_vec[2] == Approx(expected_p).margin(0.001)); } SECTION("Region 2 - Const Positive Acc") { int region = 2; float dt = 0.5; std::vector test_vec = computeKinematicsBasedOnRegion(params, region, dt); float expected_a = 1.0; float expected_v = 1.5; float expected_p = 1.625; REQUIRE(test_vec[0] == Approx(expected_a).margin(0.001)); REQUIRE(test_vec[1] == Approx(expected_v).margin(0.001)); REQUIRE(test_vec[2] == Approx(expected_p).margin(0.001)); } SECTION("Region 3 - Const Negative Jerk") { int region = 3; float dt = 0.5; std::vector test_vec = computeKinematicsBasedOnRegion(params, region, dt); float expected_a = 0.5; float expected_v = 1.375; float expected_p = 1.604; REQUIRE(test_vec[0] == Approx(expected_a).margin(0.001)); REQUIRE(test_vec[1] == Approx(expected_v).margin(0.001)); REQUIRE(test_vec[2] == Approx(expected_p).margin(0.001)); } SECTION("Region 4 - Const Vel") { int region = 4; float dt = 0.5; std::vector test_vec = computeKinematicsBasedOnRegion(params, region, dt); float expected_a = 0; float expected_v = 1.0; float expected_p = 1.5; REQUIRE(test_vec[0] == Approx(expected_a).margin(0.001)); REQUIRE(test_vec[1] == Approx(expected_v).margin(0.001)); REQUIRE(test_vec[2] == Approx(expected_p).margin(0.001)); } SECTION("Region 5 - Const Negative Jerk") { int region = 5; float dt = 0.5; std::vector test_vec = computeKinematicsBasedOnRegion(params, region, dt); float expected_a = 0.5; float expected_v = 1.375; float expected_p = 1.604; REQUIRE(test_vec[0] == Approx(expected_a).margin(0.001)); REQUIRE(test_vec[1] == Approx(expected_v).margin(0.001)); REQUIRE(test_vec[2] == Approx(expected_p).margin(0.001)); } SECTION("Region 6 - Const Negative Acc") { int region = 6; float dt = 0.5; std::vector test_vec = computeKinematicsBasedOnRegion(params, region, dt); float expected_a = -1.0; float expected_v = 1.5; float expected_p = 1.625; REQUIRE(test_vec[0] == Approx(expected_a).margin(0.001)); REQUIRE(test_vec[1] == Approx(expected_v).margin(0.001)); REQUIRE(test_vec[2] == Approx(expected_p).margin(0.001)); } SECTION("Region 7 - Const Positive Jerk") { int region = 7; float dt = 0.5; std::vector test_vec = computeKinematicsBasedOnRegion(params, region, dt); float expected_a = 1.5; float expected_v = 1.625; float expected_p = 1.645; REQUIRE(test_vec[0] == Approx(expected_a).margin(0.001)); REQUIRE(test_vec[1] == Approx(expected_v).margin(0.001)); REQUIRE(test_vec[2] == Approx(expected_p).margin(0.001)); } } TEST_CASE("synchronizeParameters", "[trajectory]") { // These values should all be valid together std::vector switch_times {0.000, 0.200, 0.500, 0.700, 6.000, 6.200, 6.500, 6.700}; float dt_v = 5.3; float dt_a = 0.3; float dt_j = 0.2; float dp = 3.0; SCurveParameters valid_params; valid_params.v_lim = 0.5; valid_params.a_lim = 1.0; valid_params.j_lim = 5.0; populateSwitchTimeParameters(&valid_params, dt_j, dt_a, dt_v); for (int i = 0; i < 8; i++) { REQUIRE(valid_params.switch_points[i].t == switch_times[i]); } REQUIRE(valid_params.switch_points[7].p == dp); SCurveParameters params_to_test = valid_params; params_to_test.v_lim = 0.0; params_to_test.a_lim = 0.0; params_to_test.j_lim = 0.0; SECTION("solveInverse") { bool ok = solveInverse(¶ms_to_test); CHECK(ok == true); CHECK(params_to_test.v_lim == Approx(valid_params.v_lim)); CHECK(params_to_test.a_lim == Approx(valid_params.a_lim)); CHECK(params_to_test.j_lim == Approx(valid_params.j_lim)); } SECTION("slowDownParamsToMatchTime") { for (int i = 0; i < 8; i++) { params_to_test.switch_points[i].t /= 2.0; } bool ok = slowDownParamsToMatchTime(¶ms_to_test, switch_times[7]); CHECK(ok == true); CHECK(params_to_test.switch_points[7].t == switch_times[7]); CHECK(params_to_test.switch_points[7].p == Approx(dp)); } SECTION("synchronize") { for (int i = 0; i < 8; i++) { params_to_test.switch_points[i].t /= 2.0; } SCurveParameters params_to_test2 = valid_params; bool ok = synchronizeParameters(¶ms_to_test, ¶ms_to_test2); CHECK(ok == true); CHECK(params_to_test.switch_points[7].t == switch_times[7]); CHECK(params_to_test.switch_points[7].p == Approx(dp)); CHECK(params_to_test2.switch_points[7].t == switch_times[7]); CHECK(params_to_test2.switch_points[7].p == Approx(dp)); for (int i = 0; i < 8; i++) { CHECK(params_to_test2.switch_points[i].t == switch_times[i]); } } } ================================================ FILE: src/robot/test/SocketWrapper_Test.cpp ================================================ #include #include #include "sockets/ClientSocket.h" #include "sockets/SocketException.h" #include "sockets/SocketMultiThreadWrapper.h" void SimpleSocketSend(std::string data_to_send) { usleep(1000); ClientSocket test_socket ( "localhost", 8123 ); //Send message test_socket << data_to_send; // Brief wait to ensure thread processed the sent message usleep(1000); } TEST_CASE("Socket send test", "[socket]") { SocketMultiThreadWrapper s; std::string test_msg = "This is a test"; for(int i = 0; i < 10; i++) { test_msg += std::to_string(i); SimpleSocketSend(test_msg); REQUIRE(s.dataAvailableToRead()); REQUIRE(s.getData() == test_msg); } } // TEST_CASE("Socket recv test", "[socket]") // { // SocketMultiThreadWrapper s; // std::string test_msg = "This is a test"; // for(int i = 0; i < 10; i++) // { // test_msg += std::to_string(i); // usleep(1000); // ClientSocket test_socket ( "localhost", 8123 ); // s.sendData(test_msg); // std::string data; // test_socket >> data; // REQUIRE(data == test_msg); // } // } ================================================ FILE: src/robot/test/StatusUpdater_Test.cpp ================================================ #include #include "StatusUpdater.h" using Catch::Matchers::StartsWith; using Catch::Matchers::EndsWith; using Catch::Matchers::Contains; TEST_CASE("Position", "[StatusUpdater]") { StatusUpdater s; s.updatePosition(1,2,3); StatusUpdater::Status status = s.getStatus(); REQUIRE(status.pos_x == 1); REQUIRE(status.pos_y == 2); REQUIRE(status.pos_a == 3); } TEST_CASE("Velocity", "[StatusUpdater]") { StatusUpdater s; s.updateVelocity(1,2,3); StatusUpdater::Status status = s.getStatus(); REQUIRE(status.vel_x == 1); REQUIRE(status.vel_y == 2); REQUIRE(status.vel_a == 3); } TEST_CASE("LoopTimes", "[StatusUpdater]") { StatusUpdater s; s.updateControlLoopTime(1); s.updatePositionLoopTime(2); StatusUpdater::Status status = s.getStatus(); REQUIRE(status.controller_loop_ms == 1); REQUIRE(status.position_loop_ms == 2); } TEST_CASE("Progress", "[StatusUpdater]") { StatusUpdater s; StatusUpdater::Status status = s.getStatus(); REQUIRE(status.in_progress == false); REQUIRE(s.getInProgress() == false); s.updateInProgress(true); status = s.getStatus(); REQUIRE(status.in_progress == true); REQUIRE(s.getInProgress() == true); } TEST_CASE("JSON", "[StatusUpdater]") { StatusUpdater s; s.updatePosition(1,2,3); s.updateVelocity(4,5,6); s.updateControlLoopTime(7); s.updatePositionLoopTime(8); s.updateInProgress(true); std::string json_string = s.getStatusJsonString(); REQUIRE_THAT(json_string, StartsWith("{")); REQUIRE_THAT(json_string, Contains("\"pos_x\":1")); REQUIRE_THAT(json_string, Contains("\"pos_y\":2")); REQUIRE_THAT(json_string, Contains("\"pos_a\":3")); REQUIRE_THAT(json_string, Contains("\"vel_x\":4")); REQUIRE_THAT(json_string, Contains("\"vel_y\":5")); REQUIRE_THAT(json_string, Contains("\"vel_a\":6")); REQUIRE_THAT(json_string, Contains("\"controller_loop_ms\":7")); REQUIRE_THAT(json_string, Contains("\"position_loop_ms\":8")); REQUIRE_THAT(json_string, EndsWith("}")); } ================================================ FILE: src/robot/test/TrayController_Test.cpp ================================================ #include #include "TrayController.h" #include "test-utils.h" TEST_CASE("Send and waiting", "[TrayController]") { MockSerialComms* mock_serial = build_and_get_mock_serial(CLEARCORE_USB);; MockClockWrapper* mock_clock = get_mock_clock_and_reset(); TrayController t; t.initialize(); REQUIRE(mock_serial->mock_rcv_lift() == ""); REQUIRE(t.isActionRunning() == true); // Expect to receive close command once and then start status requests mock_clock->advance_ms(1); t.update(); REQUIRE(mock_serial->mock_rcv_lift() == "close"); // Expect to receive status request command while lifter reports closing for (int i = 0; i < 5; i ++) { mock_clock->advance_ms(1); mock_serial->mock_send("lift:close"); t.update(); REQUIRE(mock_serial->mock_rcv_lift() == "status_req"); } // Expect to receive none response once lifter reports no action mock_clock->advance_ms(1000); mock_serial->mock_send("lift:none"); t.update(); REQUIRE(mock_serial->mock_rcv_lift() == "status_req"); REQUIRE(mock_serial->mock_rcv_lift() == "home"); } TEST_CASE("Initialize tray", "[TrayController]") { MockSerialComms* mock_serial = build_and_get_mock_serial(CLEARCORE_USB);; MockClockWrapper* mock_clock = get_mock_clock_and_reset(); TrayController t; t.initialize(); REQUIRE(mock_serial->mock_rcv_lift() == ""); REQUIRE(t.isActionRunning() == true); mock_clock->advance_ms(1500); t.update(); REQUIRE(mock_serial->mock_rcv_lift() == "close"); mock_serial->mock_send("lift:none"); mock_clock->advance_ms(1500); t.update(); REQUIRE(mock_serial->mock_rcv_lift() == "status_req"); REQUIRE(mock_serial->mock_rcv_lift() == "home"); mock_serial->mock_send("lift:none"); mock_clock->advance_ms(1500); t.update(); float revs = cfg.lookup("tray.default_pos_revs"); int steps_per_rev = cfg.lookup("tray.steps_per_rev"); int steps = steps_per_rev * revs; REQUIRE(mock_serial->mock_rcv_lift() == "status_req"); REQUIRE(mock_serial->mock_rcv_lift() == "pos:"+std::to_string(steps)); mock_serial->mock_send("lift:none"); mock_clock->advance_ms(1500); t.update(); REQUIRE(t.isActionRunning() == false); } TEST_CASE("Place tray", "[TrayController]") { MockSerialComms* mock_serial = build_and_get_mock_serial(CLEARCORE_USB);; MockClockWrapper* mock_clock = get_mock_clock_and_reset(); TrayController t; t.setTrayInitialized(true); bool status = t.place(); REQUIRE(status == true); REQUIRE(mock_serial->mock_rcv_lift() == ""); REQUIRE(t.isActionRunning() == true); mock_clock->advance_ms(1500); t.update(); float revs = cfg.lookup("tray.place_pos_revs"); int steps_per_rev = cfg.lookup("tray.steps_per_rev"); int steps = steps_per_rev * revs; REQUIRE(mock_serial->mock_rcv_lift() == "pos:"+std::to_string(steps)); mock_serial->mock_send("lift:none"); mock_clock->advance_ms(1500); t.update(); REQUIRE(mock_serial->mock_rcv_lift() == "status_req"); REQUIRE(mock_serial->mock_rcv_lift() == "open"); mock_serial->mock_send("lift:none"); mock_clock->advance_ms(1500); t.update(); revs = cfg.lookup("tray.default_pos_revs"); steps = steps_per_rev * revs; REQUIRE(mock_serial->mock_rcv_lift() == "status_req"); REQUIRE(mock_serial->mock_rcv_lift() == "pos:"+std::to_string(steps)); mock_serial->mock_send("lift:none"); mock_clock->advance_ms(1500); t.update(); REQUIRE(mock_serial->mock_rcv_lift() == "status_req"); REQUIRE(mock_serial->mock_rcv_lift() == "close"); mock_serial->mock_send("lift:none"); mock_clock->advance_ms(1500); t.update(); REQUIRE(t.isActionRunning() == false); } TEST_CASE("Load tray", "[TrayController]") { MockSerialComms* mock_serial = build_and_get_mock_serial(CLEARCORE_USB);; MockClockWrapper* mock_clock = get_mock_clock_and_reset(); TrayController t; t.setTrayInitialized(true); bool status = t.load(); REQUIRE(status == true); REQUIRE(mock_serial->mock_rcv_lift() == ""); REQUIRE(t.isActionRunning() == true); mock_clock->advance_ms(1500); t.update(); float revs = cfg.lookup("tray.load_pos_revs"); int steps_per_rev = cfg.lookup("tray.steps_per_rev"); int steps = steps_per_rev * revs; REQUIRE(mock_serial->mock_rcv_lift() == "pos:"+std::to_string(steps)); mock_serial->mock_send("lift:none"); mock_clock->advance_ms(1500); t.update(); REQUIRE(mock_serial->mock_rcv_lift() == "status_req"); REQUIRE(mock_serial->mock_rcv_lift() == ""); t.setLoadComplete(); mock_clock->advance_ms(1500); t.update(); revs = cfg.lookup("tray.default_pos_revs"); steps = steps_per_rev * revs; REQUIRE(mock_serial->mock_rcv_lift() == "pos:"+std::to_string(steps)); mock_serial->mock_send("lift:none"); mock_clock->advance_ms(1500); t.update(); REQUIRE(t.isActionRunning() == false); } TEST_CASE("Errors when tray not initialized", "[TrayController]") { TrayController t; t.setTrayInitialized(false); REQUIRE(t.place() == false); REQUIRE(t.load() == false); } ================================================ FILE: src/robot/test/test-main.cpp ================================================ #define CATCH_CONFIG_RUNNER #include #include "constants.h" #include "serial/SerialCommsFactory.h" #include "sockets/SocketMultiThreadWrapperFactory.h" #include "camera_tracker/CameraTrackerFactory.h" #include "utils.h" #include #include #include #include #include #include libconfig::Config cfg = libconfig::Config(); void configure_logger() { // Make file names std::string test_log_file_name = std::string("log/test_log.txt"); // Initialize test logs to to go file and console std::string log_level = cfg.lookup("log_level"); plog::Severity severity = plog::severityFromString(log_level.c_str()); static plog::RollingFileAppender fileAppender(test_log_file_name.c_str(), 1000000, 5); static plog::ColorConsoleAppender consoleAppender; plog::init(severity, &fileAppender).addAppender(&consoleAppender); PLOGI << "Logger ready"; } int main( int argc, char* argv[] ) { cfg.readFile(TEST_CONSTANTS_FILE); configure_logger(); SerialCommsFactory::getFactoryInstance()->set_mode(SERIAL_FACTORY_MODE::MOCK); SocketMultiThreadWrapperFactory::getFactoryInstance()->set_mode(SOCKET_FACTORY_MODE::MOCK); ClockFactory::getFactoryInstance()->set_mode(CLOCK_FACTORY_MODE::MOCK); CameraTrackerFactory::getFactoryInstance()->set_mode(CAMERA_TRACKER_FACTORY_MODE::MOCK); int result = Catch::Session().run(argc, argv); return result; } ================================================ FILE: src/robot/test/test-utils.h ================================================ #ifndef testutils_h #define testutils_h #include "serial/MockSerialComms.h" #include "serial/SerialCommsFactory.h" #include "constants.h" #include "sockets/SocketMultiThreadWrapperFactory.h" #include "sockets/MockSocketMultiThreadWrapper.h" #include "utils.h" #include inline MockSocketMultiThreadWrapper* build_and_get_mock_socket() { SocketMultiThreadWrapperBase* base_socket = SocketMultiThreadWrapperFactory::getFactoryInstance()->get_socket(); // Slightly dangerous.... MockSocketMultiThreadWrapper* mock_socket = dynamic_cast(base_socket); mock_socket->purge_data(); return mock_socket; } inline MockSerialComms* build_and_get_mock_serial(const std::string& portName) { SerialCommsBase* base_serial = SerialCommsFactory::getFactoryInstance()->get_serial_comms(portName); // Slightly dangerous.... MockSerialComms* mock_serial = dynamic_cast(base_serial); mock_serial->purge_data(); return mock_serial; } inline MockClockWrapper* get_mock_clock() { ClockWrapperBase* base_clock = ClockFactory::getFactoryInstance()->get_clock(); MockClockWrapper* mock_clock = dynamic_cast(base_clock); return mock_clock; } inline MockClockWrapper* get_mock_clock_and_reset() { MockClockWrapper* mock_clock = get_mock_clock(); mock_clock->set_now(); return mock_clock; } inline void reset_mock_clock() { MockClockWrapper* mock_clock = get_mock_clock(); mock_clock->set_now(); } // Helper class to modify a global setting which will revert the setting change when going out of scope template class SafeConfigModifier { public: SafeConfigModifier(std::string config_path, T value) : cur_val_(cfg.lookup(config_path)), old_val_(static_cast(cur_val_)) { cur_val_ = value; } ~SafeConfigModifier() { cur_val_ = old_val_; } private: libconfig::Setting& cur_val_; T old_val_; }; #endif ================================================ FILE: src/robot/test/test_constants.cfg ================================================ name = "Test constants"; log_level = "info"; // verbose, debug, info, warning, error, fatal, none motion = { limit_max_fraction = 1.0; // Only generate a trajectory to this fraction of max speed to give motors headroom to compensate controller_frequency = 40; // Hz for RobotController log_frequency = 20 ; // HZ for logging to motion log fake_perfect_motion = false; // Enable or disable bypassing clearcore to fake perfect motion for testing rate_always_ready = true; // Bypasses rate limiter if set to true translation = { max_vel = { vision = 1.0; // m/s fine = 1.0; // m/s coarse = 1.0; // m/s }; max_acc = { vision = 2.0; // m/s^2 fine = 2.0; // m/s^2 coarse = 2.0; // m/s^2 }; max_jerk = { vision = 8.0; // m/s^3 fine = 8.0; // m/s^3 coarse = 8.0; // m/s^3 }; position_threshold = { vision = 0.01; // m fine = 0.01; // m coarse = 0.10; // m }; velocity_threshold = { vision = 0.01; // m/s fine = 0.01; // m/s coarse = 0.05; // m/s }; gains = { kp = 1.0; ki = 0.0; kd = 0.0; }; gains_vision = { kp = 1.0; ki = 0.0; kd = 0.0; }; }; rotation = { max_vel = { vision = 0.5; // rad/s fine = 0.5; // rad/s coarse = 0.5; // rad/s }; max_acc = { vision = 1.0; // rad/s^2 fine = 1.0; // rad/s^2 coarse = 1.0; // rad/s^2 }; max_jerk = { vision = 5.0; // rad/s^3 fine = 5.0; // rad/s^3 coarse = 5.0; // rad/s^3 }; position_threshold = { vision = 0.02; // rad fine = 0.02; // rad coarse = 0.08; // rad }; velocity_threshold = { vision = 0.01; // rad/s fine = 0.01; // rad/s coarse = 0.05; //rad/s }; gains = { kp = 1.0; ki = 0.0; kd = 0.0; }; gains_vision = { kp = 1.0; ki = 0.0; kd = 0.0; }; }; }; physical = { wheel_diameter = 1.0; // m wheel_dist_from_center = 0.5; // m }; trajectory_generation = { solver_max_loops = 30; // Only let the solver loop this many times before giving up solver_alpha_decay = 0.8; // Decay for velocity limit solver_beta_decay = 0.8; // Decay for acceleration limit solver_exponent_decay = 0.1; // Decay expoenent to apply each loop min_dist_limit = 0.0001; // Smallest value solver will attempt to solve for }; tray = { default_pos_revs = 1.2; // Default position for driving in revs from home load_pos_revs = 5.0; // Loading position in revs from home place_pos_revs = 67.0; // Placing position in revs from home steps_per_rev = 800; // Number of steps per motor rev controller_frequency = 20000 ; // Hz for controller rate - large so that tests don't skip updates fake_tray_motions = false; // Flag to fake tray motions for testing }; localization = { mm_x_offset = -55.0; // X offset of mm pair center from center of rotation (in millimeters, + is in front, - is behind) mm_y_offset = 0.0; // Y offset of mm pair center from center of rotation (in millimeters, + is to left, - is to right) max_wait_time = 10.0; // Max time to wait (s) when waiting for good localization confidence_for_wait = 0.95; // Confidence level required for waiting for localization kf_predict_trans_cov = 0.1; // How much noise is expected in the prediciton step for position, lower is less noise kf_predict_angle_cov = 0.1; // How much noise is expected in the prediction step for angle, lower is less noise kf_meas_trans_cov = 0.1; // How much noise is expected in the marvelmind measurements of position, lower is less noise kf_meas_angle_cov = 0.1; // How much noise is expected in the marvelmind measurements of angle, lower is less noise kf_uncertainty_scale = 1.0; // Scale factor for how much extra noise to add based on position confidence variance_ref_trans = 1.0; // Max position varaince for 0 confidence variance_ref_angle = 1.57; // Max angle variance for 0 confidence min_vel_uncertainty = 0.5; // Minimum uncertianty for any nonzero velocity vel_uncertainty_slope = 0.5; // Uncertianty/velocity scale max_vel_uncetainty = 1.0; // Uncertainty cap vel_uncertainty_decay_time = 2.0; // Number of seconds after completeing motion until uncertainty goes to 0 }; // USB ports on pi mapping to v4l path num // USB2 - 3 | USB3 - 1 | Eth // USB2 - 4 | USB3 - 2 | vision_tracker = { side = { camera_path = "/dev/v4l/by-path/platform-fd500000.pcie-pci-0000:01:00.0-usb-0:1.1:1.0-video-index0" calibration_file = "/home/pi/IR_calibration_2.yml"; // Calibration data debug_output_path = "/home/pi/images/debug/side/"; // Path to output debug images debug_image = "/home/pi/images/dev/side_raw_1.jpg"; // Debug image path resolution_scale_x = 0.25; // Scale factor from calibration resolution to onboard resolution resolution_scale_y = 0.333333; // Scale factor from calibration resolution to onboard resolution } rear = { camera_path = "/dev/v4l/by-path/platform-fd500000.pcie-pci-0000:01:00.0-usb-0:1.2:1.0-video-index0" calibration_file = "/home/pi/IR_calibration_1.yml"; // Calibration data debug_output_path = "/home/pi/images/debug/rear/"; // Path to output debug images debug_image = "/home/pi/images/dev/rear_raw_1.jpg"; // Debug image path resolution_scale_x = 0.25; // Scale factor from calibration resolution to onboard resolution resolution_scale_y = 0.333333; // Scale factor from calibration resolution to onboard resolution } debug = { use_debug_image = true; // Use debug image instead of loading camera save_camera_debug = false; } detection = { threshold = 235; // Threshold value for image processing blob = { use_area = true; // Use area for blob detection min_area = 80.0; // Min area for blob detection max_area = 500.0; // Max area for blob detection use_circularity = true; // Use circularity for blob detection min_circularity = 0.6; // Min circularity for blob detection max_circularity = 1.0; // Max circularity for blob detection } } physical = { pixels_per_meter_u = 1362.0; // Hack to try and get something working pixels_per_meter_v = 1411.0; side = { x_offset = 0.0; // x offset (meters) from robot frame y_offset = -1.5; // y offset (meters) from robot frame z_offset = 0.55; // z offset (meters) from robot frame target_x = 0.0; // target x coord (meters) in robot frame target_y = -1.4; // target y coord (meters) in robot frame } rear = { x_offset = -1.5; // x offset (meters) from robot frame y_offset = 0.0; // y offset (meters) from robot frame z_offset = 0.55; // z offset (meters) from robot frame target_x = -1.4; // target x coord (meters) in robot frame target_y = 0.0; // target y coord (meters) in robot frame } } kf = { predict_trans_cov = 0.0001; // How much noise is expected in the prediciton step (each timestep) for position, lower is less noise predict_angle_cov = 0.001; // How much noise is expected in the prediciton step (each timestep) for angle, lower is less noise meas_trans_cov = 0.01; // How much noise is expected in the update step for position, lower is less noise meas_angle_cov = 1.0; // How much noise is expected in the update step for angle, lower is less noise } }; ================================================ FILE: src/robot/test/utils_Test.cpp ================================================ #include #include #include "utils.h" #include "test-utils.h" TEST_CASE("Sign util test", "[utils]") { REQUIRE(sgn(1) == 1); REQUIRE(sgn(-1) == -1); REQUIRE(sgn(0) == 0); REQUIRE(sgn(1.234) == 1); REQUIRE(sgn(-2.854) == -1); REQUIRE(sgn(-0.0000001) == -1); } TEST_CASE("Angle wrap test", "[utils]") { REQUIRE(Approx(wrap_angle(M_PI-0.1)) == M_PI-0.1); REQUIRE(Approx(wrap_angle(-M_PI+0.1)) == -M_PI+0.1); REQUIRE(Approx(wrap_angle(0)) == 0); REQUIRE(Approx(wrap_angle(2*M_PI)).margin(0.0001) == 0); REQUIRE(Approx(wrap_angle(1.5*M_PI)) == -0.5*M_PI); REQUIRE(Approx(wrap_angle(-1.5*M_PI)) == 0.5*M_PI); REQUIRE(Approx(wrap_angle(4.5*M_PI)) == 0.5*M_PI); } TEST_CASE("Angle diff test", "[utils]") { REQUIRE(angle_diff(0,0) == 0); REQUIRE(angle_diff(M_PI,M_PI) == 0); REQUIRE(Approx(angle_diff(0,2*M_PI)).margin(0.0001) == 0); REQUIRE(Approx(angle_diff(1.5*M_PI,0.5*M_PI)) == M_PI); REQUIRE(Approx(angle_diff(1.5*M_PI,3*M_PI)) == 0.5*M_PI); REQUIRE(Approx(angle_diff(-1.5*M_PI,3*M_PI)) == -0.5*M_PI); } TEST_CASE("TimeRunningAverage - Test constant time pauses", "[utils]") { int window = 10; int sleep_time_us = 2000; TimeRunningAverage T = TimeRunningAverage(window); MockClockWrapper* mock_clock = get_mock_clock_and_reset(); for(int i = 0; i < window+1; i++) { T.mark_point(); mock_clock->advance_us(sleep_time_us); } REQUIRE(T.get_ms() == sleep_time_us/1000); REQUIRE(T.get_sec() == Approx(sleep_time_us/1000000.0)); SECTION("Wrap buffer") { for(int i = 0; i < window+1; i++) { T.mark_point(); mock_clock->advance_us(sleep_time_us); } REQUIRE(T.get_ms() == sleep_time_us/1000); REQUIRE(T.get_sec() == Approx(sleep_time_us/1000000.0)); } } TEST_CASE("TimeRunningAverage - Test larger buffer", "[utils]") { int window = 100; int sleep_time_us = 1000; TimeRunningAverage T = TimeRunningAverage(window); MockClockWrapper* mock_clock = get_mock_clock_and_reset(); for(int i = 0; i < window+1; i++) { T.mark_point(); mock_clock->advance_us(sleep_time_us); } REQUIRE(T.get_ms() == sleep_time_us/1000); REQUIRE(T.get_sec() == Approx(sleep_time_us/1000000.0)); SECTION("Wrap buffer") { for(int i = 0; i < window+1; i++) { T.mark_point(); mock_clock->advance_us(sleep_time_us); } REQUIRE(T.get_ms() == sleep_time_us/1000); REQUIRE(T.get_sec() == Approx(sleep_time_us/1000000.0)); } } TEST_CASE("TimeRunningAverage - Test no data", "[utils]") { TimeRunningAverage T = TimeRunningAverage(100); REQUIRE(T.get_ms() == 0); REQUIRE(T.get_sec() == 0); } TEST_CASE("TimeRunningAverage - Variable timing", "[utils]") { int window = 10; int sleep_time_us = 2000; TimeRunningAverage T = TimeRunningAverage(window); MockClockWrapper* mock_clock = get_mock_clock_and_reset(); SECTION("Partial buffer") { for(int i = 0; i < 6; i++) { T.mark_point(); mock_clock->advance_us(sleep_time_us); sleep_time_us += 2000; } REQUIRE(T.get_ms() == 6); SECTION("Full buffer") { for(int i = 0; i < 5; i++) { T.mark_point(); mock_clock->advance_us(sleep_time_us); sleep_time_us += 2000; } REQUIRE(T.get_ms() == 11); } } } TEST_CASE("Rate controller - slow", "[utils]") { SafeConfigModifier config_modifier("motion.rate_always_ready", false); SECTION("Slow") { MockClockWrapper* mock_clock = get_mock_clock_and_reset(); RateController r(1); REQUIRE(r.ready() == false); mock_clock->advance_sec(0.5); REQUIRE(r.ready() == false); mock_clock->advance_sec(0.55); REQUIRE(r.ready() == true); REQUIRE(r.ready() == false); } SECTION("Fast") { MockClockWrapper* mock_clock = get_mock_clock_and_reset(); RateController r(1000); REQUIRE(r.ready() == false); mock_clock->advance_us(500); REQUIRE(r.ready() == false); mock_clock->advance_us(600); REQUIRE(r.ready() == true); REQUIRE(r.ready() == false); mock_clock->advance_us(1100); REQUIRE(r.ready() == true); } } TEST_CASE("MockClockWrapper", "[utils]") { MockClockWrapper* mock_clock = get_mock_clock_and_reset(); SECTION("Microseconds") { int count = 100; ClockTimePoint point = mock_clock->now(); mock_clock->advance_us(count); ClockTimePoint new_point = mock_clock->now(); std::chrono::microseconds test_duration = std::chrono::duration_cast(new_point - point); REQUIRE(test_duration.count() == count); } SECTION("Milliseconds") { int count = 100; ClockTimePoint point = mock_clock->now(); mock_clock->advance_ms(count); ClockTimePoint new_point = mock_clock->now(); std::chrono::milliseconds test_duration = std::chrono::duration_cast(new_point - point); REQUIRE(test_duration.count() == count); } SECTION("Seconds") { float count = 5.2; ClockTimePoint point = mock_clock->now(); mock_clock->advance_sec(count); ClockTimePoint new_point = mock_clock->now(); FpSeconds test_duration = std::chrono::duration_cast(new_point - point); REQUIRE(test_duration.count() == count); } SECTION("Set") { ClockTimePoint point = std::chrono::steady_clock::now(); mock_clock->set(point); REQUIRE(mock_clock->now() == point); } } TEST_CASE("ClockWrapper", "[utils]") { ClockWrapper clock; ClockTimePoint p1 = std::chrono::steady_clock::now(); usleep(1); ClockTimePoint p2 = clock.now(); std::chrono::microseconds test_duration = std::chrono::duration_cast(p2 - p1); REQUIRE(test_duration.count() > 0); REQUIRE(test_duration.count() < 1000); } TEST_CASE("Timer", "[utils]") { MockClockWrapper* mock_clock = get_mock_clock_and_reset(); Timer t; REQUIRE(t.dt_s() == 0); REQUIRE(t.dt_ms() == 0); REQUIRE(t.dt_us() == 0); mock_clock->advance_sec(0.5); REQUIRE(t.dt_s() == 0.5f); REQUIRE(t.dt_ms() == 500); REQUIRE(t.dt_us() == 500000); t.reset(); REQUIRE(t.dt_s() == 0); REQUIRE(t.dt_ms() == 0); REQUIRE(t.dt_us() == 0); mock_clock->advance_ms(100); REQUIRE(t.dt_s() == 0.1f); REQUIRE(t.dt_ms() == 100); REQUIRE(t.dt_us() == 100000); t.reset(); REQUIRE(t.dt_s() == 0); REQUIRE(t.dt_ms() == 0); REQUIRE(t.dt_us() == 0); mock_clock->advance_us(100); REQUIRE(t.dt_s() == 0.0001f); REQUIRE(t.dt_ms() == 0); REQUIRE(t.dt_us() == 100); t.reset(); REQUIRE(t.dt_s() == 0); REQUIRE(t.dt_ms() == 0); REQUIRE(t.dt_us() == 0); } TEST_CASE("CircularBuffer", "[utils]") { SECTION("CheckFull") { CircularBuffer buf(5); buf.insert(1); buf.insert(2); buf.insert(3); buf.insert(4); REQUIRE(buf.isFull() == false); buf.insert(1); REQUIRE(buf.isFull() == true); buf.insert(2); REQUIRE(buf.isFull() == true); buf.clear(); REQUIRE(buf.isFull() == false); } SECTION("CheckContents") { CircularBuffer buf(5); buf.insert(1); buf.insert(2); buf.insert(3); buf.insert(4); REQUIRE(buf.get_contents() == std::vector{1,2,3,4}); buf.insert(5); REQUIRE(buf.get_contents() == std::vector{1,2,3,4,5}); buf.insert(6); REQUIRE(buf.get_contents() == std::vector{2,3,4,5,6}); buf.clear(); REQUIRE(buf.get_contents() == std::vector{}); } } TEST_CASE("PositionController", "[utils]") { SECTION("P only") { PositionController::Gains gains {1,0,0}; PositionController controller(gains); float target_pos = 1.0; float actual_pos = 0.0; float target_vel = 0.0; float actual_vel = 0.0; float dt = 0.1; float cmd_vel = controller.compute(target_pos, actual_pos, target_vel, actual_vel, dt); REQUIRE(cmd_vel == 1.0); } SECTION("PD") { PositionController::Gains gains {1,0,0.5}; PositionController controller(gains); float target_pos = 1.0; float actual_pos = 0.0; float target_vel = 1.0; float actual_vel = 0.0; float dt = 0.1; float cmd_vel = controller.compute(target_pos, actual_pos, target_vel, actual_vel, dt); REQUIRE(cmd_vel == 2.5); } SECTION("PID") { PositionController::Gains gains {1,1,0.5}; PositionController controller(gains); float target_pos = 1.0; float actual_pos = 0.0; float target_vel = 1.0; float actual_vel = 0.0; float dt = 0.1; float cmd_vel = controller.compute(target_pos, actual_pos, target_vel, actual_vel, dt); // Compute again to get additive I gain cmd_vel = controller.compute(target_pos, actual_pos, target_vel, actual_vel, dt); REQUIRE(cmd_vel == 2.7f); // Reset gain controller.reset(); cmd_vel = controller.compute(target_pos, actual_pos, target_vel, actual_vel, dt); REQUIRE(cmd_vel == 2.6f); } SECTION("PID step response no noise") { PositionController::Gains gains {2,1,0.01}; PositionController controller(gains); float target_pos = 1.0; float actual_pos = 0.0; float target_vel = 0.0; float actual_vel = 0.0; float dt = 0.1; for (int i = 0; i < 100; i++) { actual_vel = controller.compute(target_pos, actual_pos, target_vel, actual_vel, dt); actual_pos += actual_vel*dt; } REQUIRE(actual_vel == Approx(target_vel).margin(0.001)); REQUIRE(actual_pos == Approx(target_pos).margin(0.001)); } } TEST_CASE("VectorMath", "[utils]") { SECTION("Simple data") { std::vector data = {1,2,3,4,5,6,7,8,9,10}; float mean = vectorMean(data); float stddev = vectorStddev(data, mean); float zscore = zScore(mean, stddev, 11); REQUIRE(mean == 5.5); REQUIRE(stddev == Approx(2.8723).margin(0.001)); REQUIRE(zscore == Approx(1.9148).margin(0.001)); } SECTION("No data") { std::vector data = {}; float mean = vectorMean(data); float stddev = vectorStddev(data, mean); float zscore = zScore(mean, stddev, 11); REQUIRE(mean == 0); REQUIRE(stddev == 0); REQUIRE(zscore == 0); } } TEST_CASE("StringParse", "[utils]") { SECTION("Empty string") { std::string test_string = ""; std::vector result = parseCommaDelimitedString(test_string); REQUIRE(result.size() == 1); REQUIRE(result[0].empty()); } SECTION("Normal string, no trailing comma") { std::string test_string = "asdf,1234,hello"; std::vector result = parseCommaDelimitedString(test_string); REQUIRE(result.size() == 3); REQUIRE_THAT(result, Catch::Matchers::Equals(std::vector{"asdf", "1234", "hello"})); } SECTION("Normal string, with trailing comma") { std::string test_string = "asdf,1234,hello,"; std::vector result = parseCommaDelimitedString(test_string); REQUIRE(result.size() == 4); REQUIRE_THAT(result, Catch::Matchers::Equals(std::vector{"asdf", "1234", "hello", ""})); } SECTION("String with whitespace") { std::string test_string = "as df,1234 , hello"; std::vector result = parseCommaDelimitedString(test_string); REQUIRE(result.size() == 3); REQUIRE_THAT(result, Catch::Matchers::Equals(std::vector{"as df", "1234 ", " hello"})); } } TEST_CASE("StringToFloatParse", "[utils]") { SECTION("Empty string") { std::string test_string = ""; std::vector result = parseCommaDelimitedStringToFloat(test_string); REQUIRE(result.empty()); } SECTION("Normal string, no trailing comma") { std::string test_string = "1.0,2.345,-1.2363"; std::vector result = parseCommaDelimitedStringToFloat(test_string); REQUIRE(result.size() == 3); REQUIRE_THAT(result, Catch::Matchers::Equals(std::vector{1.0, 2.345, -1.2363})); } SECTION("Normal string, with trailing comma") { std::string test_string = "1.0,2.345,-1.2363,"; std::vector result = parseCommaDelimitedStringToFloat(test_string); REQUIRE(result.size() == 3); REQUIRE_THAT(result, Catch::Matchers::Equals(std::vector{1.0, 2.345, -1.2363})); } SECTION("String with whitespace") { std::string test_string = "1.0 ,2.345 , -1.2363"; std::vector result = parseCommaDelimitedStringToFloat(test_string); REQUIRE(result.size() == 3); REQUIRE_THAT(result, Catch::Matchers::Equals(std::vector{1.0, 2.345, -1.2363})); } } TEST_CASE("LatchedBool", "[utils]") { SECTION("All true") { MockClockWrapper* mock_clock = get_mock_clock_and_reset(); LatchedBool lb(1.0); CHECK(lb.update(true) == true); mock_clock->advance_sec(0.5); CHECK(lb.update(true) == true); mock_clock->advance_sec(0.6); CHECK(lb.update(true) == true); } SECTION("All false") { MockClockWrapper* mock_clock = get_mock_clock_and_reset(); LatchedBool lb(1.0); CHECK(lb.update(false) == false); mock_clock->advance_sec(0.5); CHECK(lb.update(false) == false); mock_clock->advance_sec(0.6); CHECK(lb.update(false) == false); } SECTION("True then false then true") { MockClockWrapper* mock_clock = get_mock_clock_and_reset(); LatchedBool lb(1.0); CHECK(lb.update(true) == true); mock_clock->advance_sec(0.5); CHECK(lb.update(false) == true); CHECK(lb.update(false) == true); CHECK(lb.update(false) == true); mock_clock->advance_sec(0.6); CHECK(lb.update(false) == false); CHECK(lb.update(true) == true); mock_clock->advance_sec(0.5); CHECK(lb.update(false) == true); mock_clock->advance_sec(0.6); CHECK(lb.update(false) == false); } SECTION("False then true then false") { MockClockWrapper* mock_clock = get_mock_clock_and_reset(); LatchedBool lb(1.0); CHECK(lb.update(false) == false); mock_clock->advance_sec(0.5); CHECK(lb.update(false) == false); mock_clock->advance_sec(0.6); CHECK(lb.update(true) == true); mock_clock->advance_sec(0.6); CHECK(lb.update(false) == true); } } ================================================ FILE: src/robot_motor_driver/SerialComms.cpp ================================================ #include "SerialComms.h" SerialComms::SerialComms(HardwareSerial& serial) : serial_(serial), recvInProgress_(false), recvIdx_(0), buffer_("") { } SerialComms::~SerialComms() { } String SerialComms::rcv() { bool newData = false; String new_msg; while (serial_.available() > 0 && newData == false) { char rc = serial_.read(); if (recvInProgress_ == true) { if (rc == START_CHAR) { buffer_ = ""; } else if (rc != END_CHAR) { buffer_ += rc; } else { recvInProgress_ = false; newData = true; new_msg = buffer_; buffer_ = ""; } } else if (rc == START_CHAR) { recvInProgress_ = true; } } return new_msg; } void SerialComms::send(String msg) { if (msg.length() > 0) { serial_.print(START_CHAR); serial_.print(msg); serial_.print(END_CHAR); } } ================================================ FILE: src/robot_motor_driver/SerialComms.h ================================================ #ifndef SerialComms_h #define SerialComms_h #include #define START_CHAR '<' #define END_CHAR '>' class SerialComms { public: // Constructor SerialComms(HardwareSerial& serial); virtual ~SerialComms(); void send(String msg); String rcv(); protected: HardwareSerial& serial_; bool recvInProgress_; int recvIdx_; String buffer_; }; #endif ================================================ FILE: src/robot_motor_driver/robot_motor_driver.ino ================================================ #include "ClearCore.h" #include "SerialComms.h" #include // From https://teknic-inc.github.io/ClearCore-library/ArduinoRef.html // Serial = USB // Serial0 + Serial1 = COM ports #define PRINT_DEBUG false #define USE_FAKE_MOTOR false // Globals SerialComms comm(Serial); // -------------------------------------------------- // Base parameters // -------------------------------------------------- // Constants #define WHEEL_RADIUS 0.075 #define WHEEL_DIST_FROM_CENTER 0.405 #define BELT_RATIO 4 #define STEPS_PER_REV 800 #define MOTOR_MAX_VEL_STEPS_PER_SECOND 10000 #define MOTOR_MAX_ACC_STEPS_PER_SECOND_SQUARED 2600 #define MAX_BASE_MSG_TIMEOUT 200 const float sq3 = sqrt(3.0); const float rOver3 = WHEEL_RADIUS / 3.0; const float radsPerSecondToStepsPerSecond = STEPS_PER_REV / (2 * PI); #if USE_FAKE_MOTOR float MOTOR_FRONT_LEFT_FAKE = 0; float MOTOR_FRONT_RIGHT_FAKE = 0; float MOTOR_REAR_CENTER_FAKE = 0; #else // Motor mapping #define MOTOR_FRONT_LEFT ConnectorM1 #define MOTOR_FRONT_RIGHT ConnectorM2 #define MOTOR_REAR_CENTER ConnectorM0 #endif // -------------------------------------------------- // Lifter parameters // -------------------------------------------------- #define LIFTER_MOTOR ConnectorM3 #define INCREMENTAL_UP_PIN DI7 #define INCREMENTAL_DOWN_PIN DI6 #define SERVO_TOGGLE_PIN DI8 #define LATCH_SERVO_PIN IO0 // Only IO0 does pwm #define HOMING_SWITCH_PIN IO4 #define LIFTER_STEPS_PER_REV 800 #define LIFTER_MAX_VEL 7 // revs/sec #define LIFTER_MAX_ACC 10 // rev/sec^2 #define LIFTER_HOMING_VEL 3 // revs/sec #define SAFETY_MAX_POS 70*LIFTER_STEPS_PER_REV // Revs, Sanity check on desired position to make sure it isn't larger than this #define SAFETY_MIN_POS 0 // Revs, Sanity check on desired position to make sure it isn't less than this #define LATCH_ACTIVE_MS 1000 #define LATCH_OPEN_DUTY_CYCLE 45 #define LATCH_CLOSE_DUTY_CYCLE 120 #define MANUAL_LATCH_BUTTON_DELAY 200 // Poor man's debounce // -------------------------------------------------- // Helper structs // -------------------------------------------------- enum MODE { NONE, MANUAL_VEL, AUTO_POS, HOMING, LATCH_OPEN, LATCH_CLOSE, }; struct Command { int abs_pos; bool home; bool valid; bool stop; bool latch_open; bool latch_close; bool status_req; }; struct CartVelocity { float vx; float vy; float va; String toString() const { char s[100]; sprintf(s, "[vx: %.4f, vy: %.4f, va: %.4f]", vx, vy, va); return static_cast(s); } }; struct MotorVelocity { float v0; float v1; float v2; String toString() const { char s[100]; sprintf(s, "[v0: %.4f, v1: %.4f, v2: %.4f]", v0, v1, v2); return static_cast(s); } bool nonzero() const { return v0 != 0 && v1 != 0 && v2 != 0; } }; // -------------------------------------------------- // Lifter control functions // -------------------------------------------------- MODE activeMode = MODE::NONE; unsigned long prevLatchMillis = millis(); unsigned long prevLatchManualTriggerMillis = millis(); bool prev_latch_state_open = false; bool prev_button_state = false; void lifter_setup() { pinMode(INCREMENTAL_UP_PIN, INPUT); pinMode(INCREMENTAL_DOWN_PIN, INPUT); pinMode(SERVO_TOGGLE_PIN, INPUT); pinMode(HOMING_SWITCH_PIN, INPUT); pinMode(LATCH_SERVO_PIN, OUTPUT); LIFTER_MOTOR.HlfbMode(MotorDriver::HLFB_MODE_STATIC); LIFTER_MOTOR.PolarityInvertSDEnable(true); LIFTER_MOTOR.PolarityInvertSDDirection(true); LIFTER_MOTOR.VelMax(LIFTER_MAX_VEL*LIFTER_STEPS_PER_REV); LIFTER_MOTOR.AccelMax(LIFTER_MAX_ACC*LIFTER_STEPS_PER_REV); LIFTER_MOTOR.EnableRequest(false); } bool checkForLifterButtonFallingEdge(bool button_state) { if (button_state != prev_button_state && millis() - prevLatchManualTriggerMillis > MANUAL_LATCH_BUTTON_DELAY) { prev_button_state = button_state; prevLatchManualTriggerMillis = millis(); if (!button_state) { return true; } } return false; } // Possible inputs are // "home", "stop", or an integer representing position to move to // "open" or "close" controls the latch servo // "status_req" will request a status of the mode // Note that this assumes the message has already been validated Command decodeLifterMsg(String msg_in) { // Strip identifier String msg = msg_in.substring(5); Command c = {0, false, false, false, false, false, false}; #if PRINT_DEBUG comm.send("DEBUG Incoming lifter message: " + msg); #endif if(msg == "home") { c.home = true; c.valid = true; } else if(msg == "stop") { c.stop = true; c.valid = true; } else if (msg == "open") { c.latch_open = true; c.valid = true; } else if (msg == "close") { c.latch_close = true; c.valid = true; } else if (msg == "status_req") { c.valid = true; c.status_req = true; } else if (msg.startsWith("pos:")) { String pos_msg = msg.substring(4); c.abs_pos = pos_msg.toInt(); c.valid = true; } return c; } void lifter_update(String msg) { // Verify if incoming message is for lifter Command inputCommand = {0, false, false, false, false, false}; bool valid_msg = false; if(msg.length() > 0 && msg.startsWith("lift:")) { valid_msg = true; } // Parse command if it was valid if(valid_msg) { inputCommand = decodeLifterMsg(msg); } // Read inputs for manual mode bool vel_up = digitalRead(INCREMENTAL_UP_PIN); bool vel_down = digitalRead(INCREMENTAL_DOWN_PIN); bool servo_button_state = digitalRead(SERVO_TOGGLE_PIN); bool manaul_servo_toggle = false; if (checkForLifterButtonFallingEdge(servo_button_state)) { manaul_servo_toggle = true; } // For debugging only // Serial.print("Vel up: "); // Serial.print(vel_up); // Serial.print(" Vel dn: "); // Serial.println(vel_down); // If we got a stop command, make sure to handle it immediately if (inputCommand.valid && inputCommand.stop) { activeMode = MODE::NONE; // LIFTER_MOTOR.EnableRequest(false); } // For any other command, we will only handle it when there are no other active commands else if (activeMode == MODE::NONE) { // Figure out what mode we are in if(vel_up || vel_down) { activeMode = MODE::MANUAL_VEL; LIFTER_MOTOR.EnableRequest(true); if(vel_up) { LIFTER_MOTOR.MoveVelocity(-1*LIFTER_MAX_VEL*LIFTER_STEPS_PER_REV); } else if(vel_down) { LIFTER_MOTOR.MoveVelocity(LIFTER_MAX_VEL*LIFTER_STEPS_PER_REV); } } else if(inputCommand.valid && inputCommand.home) { activeMode = MODE::HOMING; LIFTER_MOTOR.EnableRequest(true); LIFTER_MOTOR.MoveVelocity(-1*LIFTER_HOMING_VEL*LIFTER_STEPS_PER_REV); } else if((inputCommand.valid && inputCommand.latch_open) || (manaul_servo_toggle && !prev_latch_state_open)) { activeMode = MODE::LATCH_OPEN; analogWrite(LATCH_SERVO_PIN, LATCH_OPEN_DUTY_CYCLE); prev_latch_state_open = true; prevLatchMillis = millis(); } else if(inputCommand.valid && inputCommand.latch_close || (manaul_servo_toggle && prev_latch_state_open)) { activeMode = MODE::LATCH_CLOSE; analogWrite(LATCH_SERVO_PIN, LATCH_CLOSE_DUTY_CYCLE); prev_latch_state_open = false; prevLatchMillis = millis(); } else if (inputCommand.valid && inputCommand.status_req) { #if PRINT_DEBUG comm.send("DEBUG Lifter position: " + String(LIFTER_MOTOR.PositionRefCommanded())); #endif } else if(inputCommand.valid) { if(inputCommand.abs_pos <= SAFETY_MAX_POS && inputCommand.abs_pos >= SAFETY_MIN_POS) { activeMode = MODE::AUTO_POS; long target = inputCommand.abs_pos; LIFTER_MOTOR.EnableRequest(true); LIFTER_MOTOR.Move(target, StepGenerator::MOVE_TARGET_ABSOLUTE); } } } // Handle continous updates for each mode String status_str = "lift:none"; if(activeMode == MODE::MANUAL_VEL) { status_str = "lift:manual"; if(!(vel_up || vel_down)) { activeMode = MODE::NONE; } } else if(activeMode == MODE::AUTO_POS) { status_str = "lift:pos"; if(LIFTER_MOTOR.StepsComplete()) { activeMode = MODE::NONE; } } else if(activeMode == MODE::HOMING) { if(!digitalRead(HOMING_SWITCH_PIN)) { LIFTER_MOTOR.MoveStopAbrupt(); LIFTER_MOTOR.PositionRefSet(0); activeMode = MODE::NONE; } status_str = "lift:homing"; } else if (activeMode == MODE::LATCH_CLOSE) { status_str = "lift:close"; if(millis() - prevLatchMillis > LATCH_ACTIVE_MS) { activeMode = MODE::NONE; } } else if (activeMode == MODE::LATCH_OPEN) { status_str = "lift:open"; if(millis() - prevLatchMillis > LATCH_ACTIVE_MS) { activeMode = MODE::NONE; } } else if (activeMode == MODE::NONE) { LIFTER_MOTOR.MoveStopAbrupt(); // LIFTER_MOTOR.EnableRequest(false); } // Will send back one of [none, manual, pos, homing, open, close] if (valid_msg) { comm.send(status_str); } // For debugging only // Serial.println(""); } // -------------------------------------------------- // Base control functions // -------------------------------------------------- unsigned long last_base_msg_millis = millis(); CartVelocity decodeBaseMsg(String msg) { #if PRINT_DEBUG comm.send("DEBUG Incoming base message: " + msg); #endif float vals[3]; int prev_idx = 0; int j = 0; for(int i = 0; i < msg.length(); i++) { if(msg[i] == ',') { vals[j] = msg.substring(prev_idx, i).toFloat(); j++; prev_idx = i+1; } if (i == msg.length()-1) { vals[j] = msg.substring(prev_idx).toFloat(); } } CartVelocity cv; cv.vx = vals[0]; cv.vy = vals[1]; cv.va = vals[2]; #if PRINT_DEBUG comm.send("DEBUG Decoded message: " + cv.toString()); #endif return cv; } // https://www.wolframalpha.com/input/?i=%5B%5B-cosd%2830%29%2C+sind%2830%29%2C+d%5D%2C%5Bcosd%2830%29%2C+sind%2830%29%2C+d%5D%2C%5B0%2C-1%2Cd%5D%5D MotorVelocity doIK(CartVelocity cmd) { MotorVelocity motors; motors.v0 = -1/WHEEL_RADIUS * (-1 * sq3 / 2 * cmd.vx + 0.5 * cmd.vy + WHEEL_DIST_FROM_CENTER * cmd.va) * BELT_RATIO; motors.v1 = -1/WHEEL_RADIUS * ( sq3 / 2 * cmd.vx + 0.5 * cmd.vy + WHEEL_DIST_FROM_CENTER * cmd.va) * BELT_RATIO; motors.v2 = -1/WHEEL_RADIUS * ( - 1 * cmd.vy + WHEEL_DIST_FROM_CENTER * cmd.va) * BELT_RATIO; #if PRINT_DEBUG comm.send("DEBUG After IK: " + motors.toString()); #endif return motors; } void SendCommandsToMotors(MotorVelocity motors) { #if PRINT_DEBUG comm.send("DEBUG Send to motor: " + String(motors.v0 * radsPerSecondToStepsPerSecond)); #endif #if USE_FAKE_MOTOR MOTOR_FRONT_LEFT_FAKE = motors.v0 * radsPerSecondToStepsPerSecond; MOTOR_FRONT_RIGHT_FAKE = motors.v1 * radsPerSecondToStepsPerSecond; MOTOR_REAR_CENTER_FAKE = motors.v2 * radsPerSecondToStepsPerSecond; #else MOTOR_FRONT_LEFT.MoveVelocity(motors.v0 * radsPerSecondToStepsPerSecond); MOTOR_FRONT_RIGHT.MoveVelocity(motors.v1 * radsPerSecondToStepsPerSecond); MOTOR_REAR_CENTER.MoveVelocity(motors.v2 * radsPerSecondToStepsPerSecond); #endif } MotorVelocity ReadMotorSpeeds() { MotorVelocity measured; #if USE_FAKE_MOTOR measured.v0 = MOTOR_FRONT_LEFT_FAKE / radsPerSecondToStepsPerSecond; measured.v1 = MOTOR_FRONT_RIGHT_FAKE / radsPerSecondToStepsPerSecond; measured.v2 = MOTOR_REAR_CENTER_FAKE / radsPerSecondToStepsPerSecond; #else measured.v0 = MOTOR_FRONT_LEFT.VelocityRefCommanded() / radsPerSecondToStepsPerSecond; measured.v1 = MOTOR_FRONT_RIGHT.VelocityRefCommanded() / radsPerSecondToStepsPerSecond; measured.v2 = MOTOR_REAR_CENTER.VelocityRefCommanded() / radsPerSecondToStepsPerSecond; #endif #if PRINT_DEBUG comm.send("DEBUG Motor measured: " + measured.toString()); #endif return measured; } // https://www.wolframalpha.com/input/?i=inv%28%5B%5B-cosd%2830%29%2C+sind%2830%29%2C+d%5D%2C%5Bcosd%2830%29%2C+sind%2830%29%2C+d%5D%2C%5B0%2C-1%2Cd%5D%5D%29 CartVelocity doFK(MotorVelocity motor_measured) { MotorVelocity wheel_speed; wheel_speed.v0 = motor_measured.v0 / float(BELT_RATIO); wheel_speed.v1 = motor_measured.v1 / float(BELT_RATIO); wheel_speed.v2 = motor_measured.v2 / float(BELT_RATIO); CartVelocity robot_measured; robot_measured.vx = -rOver3 * (-1 * sq3 * wheel_speed.v0 + sq3 * wheel_speed.v1); robot_measured.vy = -rOver3 * ( wheel_speed.v0 + wheel_speed.v1 - 2.0 * wheel_speed.v2); robot_measured.va = -rOver3 / WHEEL_DIST_FROM_CENTER * (wheel_speed.v0 + wheel_speed.v1 + wheel_speed.v2); #if PRINT_DEBUG comm.send("DEBUG After FK: " + robot_measured.toString()); #endif return robot_measured; } void ReportRobotVelocity(CartVelocity robot_v_measured) { char msg[32]; sprintf(msg, "base:%.3f,%.3f,%.3f",robot_v_measured.vx, robot_v_measured.vy, robot_v_measured.va); comm.send(msg); } // Checks for any motor on/off requests before trying to parse for commanded speeds bool handlePowerRequests(String msg) { if(msg == "Power:ON") { MOTOR_FRONT_RIGHT.EnableRequest(true); MOTOR_REAR_CENTER.EnableRequest(true); MOTOR_FRONT_LEFT.EnableRequest(true); #if PRINT_DEBUG comm.send("DEBUG Motors enabled"); #endif return true; } else if (msg == "Power:OFF") { MOTOR_FRONT_RIGHT.EnableRequest(false); MOTOR_REAR_CENTER.EnableRequest(false); MOTOR_FRONT_LEFT.EnableRequest(false); #if PRINT_DEBUG comm.send("DEBUG Motors disabled"); #endif return true; } return false; } void base_setup() { MOTOR_FRONT_RIGHT.EnableRequest(false); MOTOR_FRONT_RIGHT.VelMax(MOTOR_MAX_VEL_STEPS_PER_SECOND); MOTOR_FRONT_RIGHT.AccelMax(MOTOR_MAX_ACC_STEPS_PER_SECOND_SQUARED); MOTOR_REAR_CENTER.EnableRequest(false); MOTOR_REAR_CENTER.VelMax(MOTOR_MAX_VEL_STEPS_PER_SECOND); MOTOR_REAR_CENTER.AccelMax(MOTOR_MAX_ACC_STEPS_PER_SECOND_SQUARED); MOTOR_FRONT_LEFT.EnableRequest(false); MOTOR_FRONT_LEFT.VelMax(MOTOR_MAX_VEL_STEPS_PER_SECOND); MOTOR_FRONT_LEFT.AccelMax(MOTOR_MAX_ACC_STEPS_PER_SECOND_SQUARED); } bool isValidBaseMessage(String msg) { bool result = false; if(msg.length() > 0 && msg.startsWith("base:")) { result = true; } return result; } void base_update(String msg) { MotorVelocity cur_motor_v = ReadMotorSpeeds(); if(millis() - last_base_msg_millis > MAX_BASE_MSG_TIMEOUT && cur_motor_v.nonzero()) { SendCommandsToMotors({0,0,0}); handlePowerRequests("Power:OFF"); } if(!isValidBaseMessage(msg)) { return; } last_base_msg_millis = millis(); // Strip base identifier String new_msg = msg.substring(5); if(!handlePowerRequests(new_msg)) { CartVelocity cmd_v = decodeBaseMsg(new_msg); MotorVelocity motor_v = doIK(cmd_v); SendCommandsToMotors(motor_v); MotorVelocity motor_v_measured = ReadMotorSpeeds(); CartVelocity robot_v_measured = doFK(motor_v_measured); ReportRobotVelocity(robot_v_measured); } } // -------------------------------------------------- // Top level functions // -------------------------------------------------- void setup() { Serial.begin(115200); // Sets the input clocking rate. MotorMgr.MotorInputClocking(MotorManager::CLOCK_RATE_LOW); // Sets all motor connectors into step and direction mode. MotorMgr.MotorModeSet(MotorManager::MOTOR_ALL, Connector::CPM_MODE_STEP_AND_DIR); base_setup(); lifter_setup(); } void loop() { String msg = comm.rcv(); base_update(msg); lifter_update(msg); } ================================================ FILE: src/tools/80-domino-bot.rules ================================================ SUBSYSTEM=="tty", ATTRS{idVendor}=="2890", ATTRS{idProduct}=="8022", SYMLINK+="clearcore" SUBSYSTEM=="tty", ATTRS{idVendor}=="0483", ATTRS{idProduct}=="5740", SYMLINK+="marvelmind%n" SUBSYSTEM=="tty", ATTRS{idVendor}=="2341", ATTRS{idProduct}=="0043", SYMLINK+="arduino" SUBSYSTEM=="tty", ATTRS{idVendor}=="2341", ATTRS{idProduct}=="0042", SYMLINK+="arduino" ================================================ FILE: src/tools/IR_calibration_1.yml ================================================ %YAML:1.0 --- K: !!opencv-matrix rows: 3 cols: 3 dt: d data: [ 1.2117719731192783e+03, 0., 6.4569867933163880e+02, 0., 1.2132218864815018e+03, 3.0315790346461841e+02, 0., 0., 1. ] D: !!opencv-matrix rows: 1 cols: 5 dt: d data: [ -4.6874878599433956e-01, 7.8637235803345207e-01, -5.0536502302069442e-04, 5.8119887567900750e-04, -1.6010067485682662e+00 ] ================================================ FILE: src/tools/IR_calibration_2.yml ================================================ %YAML:1.0 --- K: !!opencv-matrix rows: 3 cols: 3 dt: d data: [ 1.2054254220822804e+03, 0., 6.7330983848794142e+02, 0., 1.2054587537926946e+03, 3.4257042557877725e+02, 0., 0., 1. ] D: !!opencv-matrix rows: 1 cols: 5 dt: d data: [ -4.0579303514821868e-01, 2.1496514338793393e-01, 5.1830677921908667e-04, 1.2914137465804134e-04, -1.6468164001861275e-02 ] ================================================ FILE: src/tools/camera_calibration.py ================================================ import numpy as np import cv2 import glob import argparse import os # termination criteria criteria = (cv2.TERM_CRITERIA_EPS + cv2.TERM_CRITERIA_MAX_ITER, 30, 0.001) def calibrate(dirpath, prefix, image_format, square_size, width=9, height=6): """ Apply camera calibration operation for images in the given directory path. """ # prepare object points, like (0,0,0), (1,0,0), (2,0,0) ....,(8,6,0) objp = np.zeros((height*width, 3), np.float32) objp[:, :2] = np.mgrid[0:width, 0:height].T.reshape(-1, 2) objp = objp * square_size # Arrays to store object points and image points from all the images. objpoints = [] # 3d point in real world space imgpoints = [] # 2d points in image plane. glob_str = os.path.join(dirpath, prefix + '*.' + image_format) print("Looking for images that match: {}".format(glob_str)) images = glob.glob(glob_str) for fname in images: img = cv2.imread(fname) print("Loading image from {}".format(fname)) gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY) # Find the chess board corners ret, corners = cv2.findChessboardCorners(gray, (width, height), None) # If found, add object points, image points (after refining them) if ret: objpoints.append(objp) corners2 = cv2.cornerSubPix(gray, corners, (11, 11), (-1, -1), criteria) imgpoints.append(corners2) # Draw and display the corners img = cv2.drawChessboardCorners(img, (width, height), corners2, ret) cv2.imshow('img', img) cv2.waitKey(500) cv2.destroyAllWindows() ret, mtx, dist, rvecs, tvecs = cv2.calibrateCamera(objpoints, imgpoints, gray.shape[::-1], None, None) # Show calibrated images using last image h, w = img.shape[:2] newcameramtx, roi = cv2.getOptimalNewCameraMatrix(mtx, dist, (w,h), 1, (w,h)) # undistort dst = cv2.undistort(img, mtx, dist, None, newcameramtx) # crop the image x, y, w, h = roi dst = dst[y:y+h, x:x+w] cv2.imshow('raw', img) cv2.waitKey(1000) cv2.imshow('calibrated', dst) cv2.waitKey(1000) cv2.destroyAllWindows() # reprojection error mean_error = 0 for i in range(len(objpoints)): imgpoints2, _ = cv2.projectPoints(objpoints[i], rvecs[i], tvecs[i], mtx, dist) error = cv2.norm(imgpoints[i], imgpoints2, cv2.NORM_L2)/len(imgpoints2) mean_error += error print( "total reprotjection error: {}".format(mean_error/len(objpoints)) ) return [ret, mtx, dist, rvecs, tvecs] def save_coefficients(mtx, dist, path): """ Save the camera matrix and the distortion coefficients to given path/file. """ cv_file = cv2.FileStorage(path, cv2.FILE_STORAGE_WRITE) cv_file.write("K", mtx) cv_file.write("D", dist) # note you *release* you don't close() a FileStorage object cv_file.release() print("Wrote calibration file to {}".format(path)) def load_coefficients(path): """ Loads camera matrix and distortion coefficients. """ # FILE_STORAGE_READ cv_file = cv2.FileStorage(path, cv2.FILE_STORAGE_READ) # note we also have to specify the type to retrieve other wise we only get a # FileNode object back instead of a matrix camera_matrix = cv_file.getNode("K").mat() dist_matrix = cv_file.getNode("D").mat() cv_file.release() return [camera_matrix, dist_matrix] if __name__ == '__main__': parser = argparse.ArgumentParser(description='Camera calibration') parser.add_argument('--image_dir', type=str, default='C:\\Users\\alexb\\Pictures\\DominoRobotIRCalibration\\Camera2', help='image directory path') parser.add_argument('--image_format', type=str, default='jpg', help='image format, png/jpg') parser.add_argument('--prefix', type=str, default='', help='image prefix') parser.add_argument('--square_size', type=float, default=0.023, help='chessboard square size') parser.add_argument('--width', type=int, default=9, help='chessboard width size, default is 9') parser.add_argument('--height', type=int, default=6, help='chessboard height size, default is 6') parser.add_argument('--save_file', type=str, default='IR_calibration_2.yml', help='YML file to save calibration matrices') args = parser.parse_args() ret, mtx, dist, rvecs, tvecs = calibrate(args.image_dir, args.prefix, args.image_format, args.square_size, args.width, args.height) save_coefficients(mtx, dist, args.save_file) print("Calibration is finished. RMS: ", ret) ================================================ FILE: src/tools/motor_test_script.py ================================================ import serial import time import signal import sys SERIAL_PORT = 'COM7' SERIAL_BAUD = 115200 SERIAL_TIMEOUT = 1 START_CHAR = "<" END_CHAR = ">" # Copied and modified from RobotClient.TcpClient class SerialClient: def __init__(self, port, baud, timeout): self.ser = serial.Serial(port=port, baudrate=baud, timeout=timeout) if not self.ser: return None def send(self, msg): msg = START_CHAR + msg + END_CHAR msg_bytes = msg.encode() self.ser.write(msg_bytes) def recieve(self, timeout=0.1): new_msg = "" new_msg_ready = False start_time = time.time() while not new_msg_ready and time.time() - start_time < timeout: # Get new data data = self.ser.read() if data == b'': break # Decode data and parse into message new_str = data.decode(encoding='UTF-8',errors='strict') start_idx = new_str.find(START_CHAR) end_idx = new_str.find(END_CHAR) if start_idx != -1 and end_idx != -1: new_msg = new_str[start_idx+1:end_idx] new_msg_ready = True elif start_idx != -1: new_msg += new_str[start_idx+1:] msg_rcv_in_progress = True elif end_idx != -1: new_msg += new_str[:end_idx] new_msg_ready = True else: new_msg += new_str if not new_msg_ready: new_msg = "" return new_msg def power_on(s): msg = "base:Power:ON" s.send(msg) check_response(s) def power_off(s): msg = "base:Power:OFF" s.send(msg) check_response(s) def send_vel(s, vel): if len(vel) != 3: raise ValueError("Velocity should be 3 elements long") msg = "base:{:.3f},{:.3f},{:.3f}".format(vel[0], vel[1], vel[2]) print("SND: {}".format(msg)) s.send(msg) def check_response(s): msg = s.recieve() while msg: print("RCV: {}".format(msg)) msg = s.recieve() def move_vel_with_pause(s, vel, move_time): start_time = time.time() while time.time() - start_time < move_time: send_vel(s, vel) check_response(s) time.sleep(0.2) def timed_move(s, vel, move_time, pause_time): print("Starting move") move_vel_with_pause(s, vel, move_time) print("Done with move") stop_vel = [0,0,0] move_vel_with_pause(s, stop_vel, pause_time) if __name__ == '__main__': ser = SerialClient(SERIAL_PORT, SERIAL_BAUD, SERIAL_TIMEOUT) def signal_handler(sig, frame): print('Gracefully powering down motors and exiting') send_vel(ser, [0,0,0]) power_off(ser) sys.exit(0) signal.signal(signal.SIGINT, signal_handler) fwd_vel = [ 0.2, 0, 0] bkwd_vel = [-0.2, 0, 0] left_vel = [0, 0.2, 0] right_vel = [0, -0.2, 0] ccw_spin_vel = [0, 0, 0.2] cw_spin_vel = [0, 0, -0.2] diag_fwd_vel = [0.1, 0.173, 0] diag_rev_vel = [-0.1, -0.173, 0] move_time = 2 pause_time = 1 print("Motors on") power_on(ser) while(True): print("Move forward") timed_move(ser, fwd_vel, move_time, pause_time) print("Move backward") timed_move(ser, bkwd_vel, move_time, pause_time) # print("Move left") # timed_move(ser, left_vel, move_time, pause_time) # print("Move right") # timed_move(ser, right_vel, move_time, pause_time) # print("Move CCW") # timed_move(ser, ccw_spin_vel, move_time, pause_time) # print("Move CW") # timed_move(ser, cw_spin_vel, move_time, pause_time) # print("Move Diag Fwd") # timed_move(ser, diag_fwd_vel, move_time, pause_time) # print("Move Diag Rev") # timed_move(ser, diag_rev_vel, move_time, pause_time) print("Motors off") power_off(ser) ================================================ FILE: src/tools/plot_logs.py ================================================ import argparse import os import numpy as np import json import matplotlib.pyplot as plt import PySimpleGUI as sg DEFAULT_LOG_PATH = "C:\\Users\\alexb\\Documents\\Github\\DominoRobot\\log" def get_value(line, path): idx = 0 for p in path: idx = line.find(p, idx) idx += len(p) data = "" for c in line[idx:]: if c in ['[',']',',','\n']: break elif c in [':',' ']: continue else: data += c return float(data) def get_array(line): start_idx = line.find('[') end_idx = line.find(']') array_txt = line[start_idx+1:end_idx] vals = array_txt.split(',') return [float(v.strip()) for v in vals] class LogParser: def __init__(self, path): self.path = path self.time = np.zeros((1,1)) self.target_pos = np.zeros((3,1)) self.target_vel = np.zeros((3,1)) self.cmd_vel = np.zeros((3,1)) self.est_vel = np.zeros((3,1)) self.est_pos = np.zeros((3,1)) self.motor_cmd_vel = np.zeros((4,1)) self.motor_est_vel = np.zeros((4,1)) self.motor_counts = np.zeros((4,1)) self.cartesian_controller = np.zeros((3,1)) self.motor_info = [{"deltaRads": [0], "deltaMicros":[0], "pidOut": [0], "outputCmd": [0]} for i in range(4)] self.t_offset = 0 def parse_logs(self): with open(self.path) as f: for line in f: if line.startswith("Target:"): # time self.time = np.append(self.time, get_value(line, ['Target', 'T']) + self.t_offset) # target position data = [] data.append(get_value(line, ['Target','Position','X'])) data.append(get_value(line, ['Target','Position','Y'])) data.append(get_value(line, ['Target','Position','A'])) arr = np.asarray(data).reshape((3,1)) self.target_pos = np.hstack((self.target_pos, arr)) # target velocity data = [] data.append(get_value(line, ['Target','Velocity','X'])) data.append(get_value(line, ['Target','Velocity','Y'])) data.append(get_value(line, ['Target','Velocity','A'])) arr = np.asarray(data).reshape((3,1)) self.target_vel = np.hstack((self.target_vel, arr)) elif line.startswith("CartesianControlX:"): # control values data = [] data.append(get_value(line, ['CartesianControlX','PosErr'])) data.append(get_value(line, ['CartesianControlX','VelErr'])) data.append(get_value(line, ['CartesianControlX','ErrSum'])) arr = np.asarray(data).reshape((3,1)) self.cartesian_controller = np.hstack((self.cartesian_controller, arr)) elif line.startswith("CartVelCmd:"): # command velocity data = [] data.append(get_value(line, ['CartVelCmd','vx'])) data.append(get_value(line, ['CartVelCmd','vy'])) data.append(get_value(line, ['CartVelCmd','va'])) arr = np.asarray(data).reshape((3,1)) self.cmd_vel = np.hstack((self.cmd_vel, arr)) elif line.startswith("MotorCommands:"): # motor command vel data = get_array(line) arr = np.asarray(data).reshape((4,1)) self.motor_cmd_vel = np.hstack((self.motor_cmd_vel, arr)) elif line.startswith("MotorMeasured:"): # motor est vel data = get_array(line) arr = np.asarray(data).reshape((4,1)) self.motor_est_vel = np.hstack((self.motor_est_vel, arr)) elif line.startswith("MotorCounts:"): # motor encoder counts data = get_array(line) arr = np.asarray(data).reshape((4,1)) self.motor_counts = np.hstack((self.motor_counts, arr)) elif line.startswith("Est Vel:"): # est velocity data = [] data.append(get_value(line, ['Est Vel','X'])) data.append(get_value(line, ['Est Vel','Y'])) data.append(get_value(line, ['Est Vel','A'])) arr = np.asarray(data).reshape((3,1)) self.est_vel = np.hstack((self.est_vel, arr)) elif line.startswith("Est Pos:"): # est position data = [] data.append(get_value(line, ['Est Pos','X'])) data.append(get_value(line, ['Est Pos','Y'])) data.append(get_value(line, ['Est Pos','A'])) arr = np.asarray(data).reshape((3,1)) self.est_pos = np.hstack((self.est_pos, arr)) elif line.startswith("Reached goal"): # Drop extra target line if needed if self.time.shape[0] > self.est_pos.shape[1]: self.time = self.time[:-1] self.target_pos = self.target_pos[:,:-1] self.target_vel = self.target_vel[:,:-1] # Update time offset self.t_offset += self.time[-1] + 0.5 #some extra spacing elif line.startswith("Motor0:") or line.startswith("Motor1:") or \ line.startswith("Motor2:") or line.startswith("Motor3:"): n = int(line[5]) name = "Motor{}".format(n) if line[7:].strip(): self.motor_info[n]["deltaRads"].append(get_value(line, [name, "deltaRads"])) self.motor_info[n]["deltaMicros"].append(get_value(line, [name, "deltaMicros"])) self.motor_info[n]["pidOut"].append(get_value(line, [name, "pidOut"])) self.motor_info[n]["outputCmd"].append(get_value(line, [name, "outputCmd"])) else: self.motor_info[n]["deltaRads"].append(0.00123) self.motor_info[n]["deltaMicros"].append(0.00123) self.motor_info[n]["pidOut"].append(0.00123) self.motor_info[n]["outputCmd"].append(0.00123) else: continue def plot_pos(self): fig = plt.figure() fig.canvas.set_window_title('Cartesian Position') labels = ['x', 'y', 'a'] main_ax = None for i in range(3): if not main_ax: main_ax = fig.add_subplot(3,1,i+1) ax = main_ax else: ax = fig.add_subplot(3,1,i+1, sharex=main_ax) ax.plot(self.time,self.target_pos[i,:], '-b.', self.time,self.est_pos[i,:], '-r.') ax.legend(['Target', 'Estimate']) ax.set_ylabel("Position: {}".format(labels[i])) ax.set_xlabel('Time') def plot_x_control(self): fig = plt.figure() fig.canvas.set_window_title('Cartesian X Control') # X position main_ax = fig.add_subplot(3,1,1) ax = main_ax ax.plot(self.time,self.target_pos[0,:], '-b.', self.time,self.est_pos[0,:], '-r.') ax.legend(['Target', 'Estimate']) ax.set_ylabel("Position") # X velocity ax = fig.add_subplot(3,1,2, sharex=main_ax) ax.plot(self.time,self.target_vel[0,:], '-b.', self.time,self.est_vel[0,:], '-r.', self.time,self.cmd_vel[0,:], '-g.') ax.legend(['Target', 'Estimate', 'Command']) ax.set_ylabel("Velocity") # X control - gains just copied from constants.h kp = 2 ki = 0.1 kd = 0 ax = fig.add_subplot(3,1,3, sharex=main_ax) ax.plot(self.time,self.cartesian_controller[0,:], '-b.', self.time,self.cartesian_controller[1,:], '-r.', self.time,self.cartesian_controller[2,:], '-g.') ax.plot(self.time,kp*self.cartesian_controller[0,:], '--b', self.time, kd*self.cartesian_controller[1,:], '--r', self.time, ki*self.cartesian_controller[2,:], '--g') ax.legend(['PosErr', 'VelErr', 'ErrSum']) ax.set_ylabel("Controller") def plot_vel(self): fig = plt.figure() fig.canvas.set_window_title('Cartesian Velocity') labels = ['x', 'y', 'a'] main_ax = None for i in range(3): if not main_ax: main_ax = fig.add_subplot(3,1,i+1) ax = main_ax else: ax = fig.add_subplot(3,1,i+1, sharex=main_ax) ax.plot(self.time,self.target_vel[i,:], '-b.', self.time,self.est_vel[i,:], '-r.', self.time,self.cmd_vel[i,:], '-g.') ax.legend(['Target', 'Estimate', 'Command']) ax.set_ylabel("Velocity: {}".format(labels[i])) ax.set_xlabel('Time') def plot_motors(self): fig = plt.figure() fig.canvas.set_window_title('Motor Velocity') labels = ['0', '1', '2', '3'] main_ax = None for i in range(4): if not main_ax: main_ax = fig.add_subplot(4,1,i+1) ax = main_ax else: ax = fig.add_subplot(4,1,i+1, sharex=main_ax) ax.plot(self.time,self.motor_cmd_vel[i,:], '-b.', self.time,self.motor_est_vel[i,:], '-r.') ax.legend(['Target', 'Estimate']) ax.set_ylabel("Motor Velocity: {}".format(labels[i])) ax.set_xlabel('Time') def plot_motor_counts(self): fig = plt.figure() fig.canvas.set_window_title('Motor Counts') ax = fig.add_subplot(1,1,1) colors = ['r','g','b','k'] for i in range(4): ax.plot(self.time,self.motor_counts[i,:], '-{}.'.format(colors[i])) labels = ['0', '1', '2', '3'] ax.legend(labels) ax.set_ylabel("Motor counts") ax.set_xlabel('Time') def plot_motor_info(self): fig = plt.figure() fig.canvas.set_window_title('Motor Info') labels = ['0', '1', '2', '3'] main_ax = None for i in range(4): if not main_ax: main_ax = fig.add_subplot(4,1,i+1) ax = main_ax else: ax = fig.add_subplot(4,1,i+1, sharex=main_ax) ax.plot(self.time,self.motor_info[i]["pidOut"], '-b.', self.time,self.motor_info[i]["outputCmd"], '-r.') ax.legend(['pidOut', 'outputCmd']) ax.set_ylabel("Motor Info: {}".format(labels[i])) ax.set_xlabel('Time') def plot_logs(self): self.plot_pos() self.plot_vel() self.plot_motors() plt.show() def get_all_log_files(): files = [f for f in os.listdir(DEFAULT_LOG_PATH) if os.path.isfile(os.path.join(DEFAULT_LOG_PATH, f))] files.reverse() return files class PlottingGui: def __init__(self): sg.change_look_and_feel('DarkBlue') all_files = get_all_log_files() left_side = sg.Column( [[sg.Listbox(values=all_files, size=(50, 10), key='_FILES_')]] ) pos = [sg.Checkbox('Plot Positions', default=True, key='_POS_')] vel = [sg.Checkbox('Plot Velocities', default=True, key='_VEL_')] motors = [sg.Checkbox('Plot Motors', default=True, key='_MOTOR_')] motor_counts = [sg.Checkbox('Plot Motor Counts', default=False, key='_MOTORCOUNTS_')] x_control = [sg.Checkbox('Plot X Controller', default=False, key='_XCONTROL_')] motor_info = [sg.Checkbox('Plot Motor Info', default=False, key='_MOTORINFO_')] plot_button = [sg.Button('PLOT')] right_side = sg.Column([ pos, vel, motors, motor_counts, x_control, motor_info, plot_button ]) layout = [[left_side, right_side]] self.window = sg.Window('Plotting Utility', layout) self.window.finalize() def loop(self): while True: # Event Loop # Keep files in list updated all_files = get_all_log_files() self.window['_FILES_'].update(values=all_files) # Handle buttons event, values = self.window.read() if event is None or event == 'Exit': break if event == 'PLOT': # Get file vals = self.window['_FILES_'].get_list_values() idx = self.window['_FILES_'].get_indexes() fname = vals[idx[0]] fpath = os.path.join(DEFAULT_LOG_PATH, fname) print("Loading logs from {}".format(fpath)) # Parse logs and generate plots for desired items lp = LogParser(fpath) lp.parse_logs() if self.window['_POS_'].Get(): lp.plot_pos() if self.window['_VEL_'].Get(): lp.plot_vel() if self.window['_MOTOR_'].Get(): lp.plot_motors() if self.window['_MOTORCOUNTS_'].Get(): lp.plot_motor_counts() if self.window['_XCONTROL_'].Get(): lp.plot_x_control() if self.window['_MOTORINFO_'].Get(): lp.plot_motor_info() # Show figures plt.show(block=False) self.window.close() if __name__ == '__main__': pg = PlottingGui() pg.loop()