Repository: lp6m/yolov5s_android Branch: master Commit: cef737ceb9fb Files: 77 Total size: 62.9 MB Directory structure: gitextract_67cfm_6g/ ├── .gitmodules ├── LICENSE.md ├── README.md ├── app/ │ ├── README.md │ └── tflite_yolov5_test/ │ ├── .gitignore │ ├── .idea/ │ │ ├── .gitignore │ │ ├── compiler.xml │ │ ├── gradle.xml │ │ ├── jarRepositories.xml │ │ ├── misc.xml │ │ ├── runConfigurations.xml │ │ └── vcs.xml │ ├── app/ │ │ ├── .gitignore │ │ ├── CMakeLists.txt │ │ ├── build.gradle │ │ ├── proguard-rules.pro │ │ └── src/ │ │ ├── androidTest/ │ │ │ └── java/ │ │ │ └── com/ │ │ │ └── example/ │ │ │ └── tflite_yolov5_test/ │ │ │ └── ExampleInstrumentedTest.java │ │ ├── main/ │ │ │ ├── AndroidManifest.xml │ │ │ ├── cpp/ │ │ │ │ ├── nms.h │ │ │ │ └── postprocess.cpp │ │ │ ├── java/ │ │ │ │ └── com/ │ │ │ │ └── example/ │ │ │ │ └── tflite_yolov5_test/ │ │ │ │ ├── ImageProcess.java │ │ │ │ ├── MainActivity.java │ │ │ │ ├── PathUtils.java │ │ │ │ ├── TfliteRunMode.java │ │ │ │ ├── TfliteRunner.java │ │ │ │ ├── camera/ │ │ │ │ │ ├── CameraActivity.java │ │ │ │ │ ├── CameraConnectionFragment.java │ │ │ │ │ ├── DetectorActivity.java │ │ │ │ │ ├── LegacyCameraConnectionFragment.java │ │ │ │ │ ├── env/ │ │ │ │ │ │ ├── BorderedText.java │ │ │ │ │ │ └── ImageUtils.java │ │ │ │ │ └── tracker/ │ │ │ │ │ └── MultiBoxTracker.java │ │ │ │ └── customview/ │ │ │ │ ├── AutoFitTextureView.java │ │ │ │ └── OverlayView.java │ │ │ └── res/ │ │ │ ├── drawable/ │ │ │ │ ├── ic_dashboard_black_24dp.xml │ │ │ │ ├── ic_home_black_24dp.xml │ │ │ │ ├── ic_launcher_background.xml │ │ │ │ └── ic_notifications_black_24dp.xml │ │ │ ├── drawable-v24/ │ │ │ │ └── ic_launcher_foreground.xml │ │ │ ├── layout/ │ │ │ │ ├── activity_camera.xml │ │ │ │ ├── activity_main.xml │ │ │ │ └── tfe_od_camera_connection_fragment_tracking.xml │ │ │ ├── menu/ │ │ │ │ └── bottom_nav_menu.xml │ │ │ ├── mipmap-anydpi-v26/ │ │ │ │ ├── ic_launcher.xml │ │ │ │ └── ic_launcher_round.xml │ │ │ ├── navigation/ │ │ │ │ └── mobile_navigation.xml │ │ │ ├── values/ │ │ │ │ ├── colors.xml │ │ │ │ ├── dimens.xml │ │ │ │ ├── strings.xml │ │ │ │ └── themes.xml │ │ │ └── values-night/ │ │ │ └── themes.xml │ │ └── test/ │ │ └── java/ │ │ └── com/ │ │ └── example/ │ │ └── tflite_yolov5_test/ │ │ └── ExampleUnitTest.java │ ├── build.gradle │ ├── gradle/ │ │ └── wrapper/ │ │ ├── gradle-wrapper.jar │ │ └── gradle-wrapper.properties │ ├── gradle.properties │ ├── gradlew │ ├── gradlew.bat │ └── settings.gradle ├── benchmark/ │ └── README.md ├── convert_model/ │ ├── README.md │ └── quantize.py ├── docker/ │ └── Dockerfile ├── host/ │ ├── README.md │ ├── cococlass.txt │ ├── detect.py │ ├── detector_head.py │ ├── evaluate.py │ ├── metric.py │ ├── postprocess.py │ ├── results/ │ │ ├── android_result.json │ │ └── host_result.json │ └── runner.py └── tflite_model/ ├── yolov5s_fp32_320.tflite ├── yolov5s_fp32_640.tflite ├── yolov5s_int8_320.tflite └── yolov5s_int8_640.tflite ================================================ FILE CONTENTS ================================================ ================================================ FILE: .gitmodules ================================================ [submodule "yolov5"] path = yolov5 url = https://github.com/ultralytics/yolov5 ================================================ FILE: LICENSE.md ================================================ GNU GENERAL PUBLIC LICENSE Version 3, 29 June 2007 Copyright (C) 2007 Free Software Foundation, Inc. Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed. Preamble The GNU General Public License is a free, copyleft license for software and other kinds of works. The licenses for most software and other practical works are designed to take away your freedom to share and change the works. By contrast, the GNU General Public License is intended to guarantee your freedom to share and change all versions of a program--to make sure it remains free software for all its users. We, the Free Software Foundation, use the GNU General Public License for most of our software; it applies also to any other work released this way by its authors. You can apply it to your programs, too. When we speak of free software, we are referring to freedom, not price. Our General Public Licenses are designed to make sure that you have the freedom to distribute copies of free software (and charge for them if you wish), that you receive source code or can get it if you want it, that you can change the software or use pieces of it in new free programs, and that you know you can do these things. To protect your rights, we need to prevent others from denying you these rights or asking you to surrender the rights. Therefore, you have certain responsibilities if you distribute copies of the software, or if you modify it: responsibilities to respect the freedom of others. For example, if you distribute copies of such a program, whether gratis or for a fee, you must pass on to the recipients the same freedoms that you received. You must make sure that they, too, receive or can get the source code. And you must show them these terms so they know their rights. Developers that use the GNU GPL protect your rights with two steps: (1) assert copyright on the software, and (2) offer you this License giving you legal permission to copy, distribute and/or modify it. For the developers' and authors' protection, the GPL clearly explains that there is no warranty for this free software. For both users' and authors' sake, the GPL requires that modified versions be marked as changed, so that their problems will not be attributed erroneously to authors of previous versions. Some devices are designed to deny users access to install or run modified versions of the software inside them, although the manufacturer can do so. This is fundamentally incompatible with the aim of protecting users' freedom to change the software. The systematic pattern of such abuse occurs in the area of products for individuals to use, which is precisely where it is most unacceptable. Therefore, we have designed this version of the GPL to prohibit the practice for those products. If such problems arise substantially in other domains, we stand ready to extend this provision to those domains in future versions of the GPL, as needed to protect the freedom of users. Finally, every program is threatened constantly by software patents. States should not allow patents to restrict development and use of software on general-purpose computers, but in those that do, we wish to avoid the special danger that patents applied to a free program could make it effectively proprietary. To prevent this, the GPL assures that patents cannot be used to render the program non-free. The precise terms and conditions for copying, distribution and modification follow. TERMS AND CONDITIONS 0. Definitions. "This License" refers to version 3 of the GNU General Public License. "Copyright" also means copyright-like laws that apply to other kinds of works, such as semiconductor masks. "The Program" refers to any copyrightable work licensed under this License. Each licensee is addressed as "you". "Licensees" and "recipients" may be individuals or organizations. To "modify" a work means to copy from or adapt all or part of the work in a fashion requiring copyright permission, other than the making of an exact copy. The resulting work is called a "modified version" of the earlier work or a work "based on" the earlier work. A "covered work" means either the unmodified Program or a work based on the Program. To "propagate" a work means to do anything with it that, without permission, would make you directly or secondarily liable for infringement under applicable copyright law, except executing it on a computer or modifying a private copy. Propagation includes copying, distribution (with or without modification), making available to the public, and in some countries other activities as well. To "convey" a work means any kind of propagation that enables other parties to make or receive copies. Mere interaction with a user through a computer network, with no transfer of a copy, is not conveying. An interactive user interface displays "Appropriate Legal Notices" to the extent that it includes a convenient and prominently visible feature that (1) displays an appropriate copyright notice, and (2) tells the user that there is no warranty for the work (except to the extent that warranties are provided), that licensees may convey the work under this License, and how to view a copy of this License. If the interface presents a list of user commands or options, such as a menu, a prominent item in the list meets this criterion. 1. Source Code. The "source code" for a work means the preferred form of the work for making modifications to it. "Object code" means any non-source form of a work. A "Standard Interface" means an interface that either is an official standard defined by a recognized standards body, or, in the case of interfaces specified for a particular programming language, one that is widely used among developers working in that language. The "System Libraries" of an executable work include anything, other than the work as a whole, that (a) is included in the normal form of packaging a Major Component, but which is not part of that Major Component, and (b) serves only to enable use of the work with that Major Component, or to implement a Standard Interface for which an implementation is available to the public in source code form. A "Major Component", in this context, means a major essential component (kernel, window system, and so on) of the specific operating system (if any) on which the executable work runs, or a compiler used to produce the work, or an object code interpreter used to run it. The "Corresponding Source" for a work in object code form means all the source code needed to generate, install, and (for an executable work) run the object code and to modify the work, including scripts to control those activities. However, it does not include the work's System Libraries, or general-purpose tools or generally available free programs which are used unmodified in performing those activities but which are not part of the work. For example, Corresponding Source includes interface definition files associated with source files for the work, and the source code for shared libraries and dynamically linked subprograms that the work is specifically designed to require, such as by intimate data communication or control flow between those subprograms and other parts of the work. The Corresponding Source need not include anything that users can regenerate automatically from other parts of the Corresponding Source. The Corresponding Source for a work in source code form is that same work. 2. Basic Permissions. All rights granted under this License are granted for the term of copyright on the Program, and are irrevocable provided the stated conditions are met. This License explicitly affirms your unlimited permission to run the unmodified Program. The output from running a covered work is covered by this License only if the output, given its content, constitutes a covered work. This License acknowledges your rights of fair use or other equivalent, as provided by copyright law. You may make, run and propagate covered works that you do not convey, without conditions so long as your license otherwise remains in force. You may convey covered works to others for the sole purpose of having them make modifications exclusively for you, or provide you with facilities for running those works, provided that you comply with the terms of this License in conveying all material for which you do not control copyright. Those thus making or running the covered works for you must do so exclusively on your behalf, under your direction and control, on terms that prohibit them from making any copies of your copyrighted material outside their relationship with you. Conveying under any other circumstances is permitted solely under the conditions stated below. Sublicensing is not allowed; section 10 makes it unnecessary. 3. Protecting Users' Legal Rights From Anti-Circumvention Law. No covered work shall be deemed part of an effective technological measure under any applicable law fulfilling obligations under article 11 of the WIPO copyright treaty adopted on 20 December 1996, or similar laws prohibiting or restricting circumvention of such measures. When you convey a covered work, you waive any legal power to forbid circumvention of technological measures to the extent such circumvention is effected by exercising rights under this License with respect to the covered work, and you disclaim any intention to limit operation or modification of the work as a means of enforcing, against the work's users, your or third parties' legal rights to forbid circumvention of technological measures. 4. Conveying Verbatim Copies. You may convey verbatim copies of the Program's source code as you receive it, in any medium, provided that you conspicuously and appropriately publish on each copy an appropriate copyright notice; keep intact all notices stating that this License and any non-permissive terms added in accord with section 7 apply to the code; keep intact all notices of the absence of any warranty; and give all recipients a copy of this License along with the Program. You may charge any price or no price for each copy that you convey, and you may offer support or warranty protection for a fee. 5. Conveying Modified Source Versions. You may convey a work based on the Program, or the modifications to produce it from the Program, in the form of source code under the terms of section 4, provided that you also meet all of these conditions: a) The work must carry prominent notices stating that you modified it, and giving a relevant date. b) The work must carry prominent notices stating that it is released under this License and any conditions added under section 7. This requirement modifies the requirement in section 4 to "keep intact all notices". c) You must license the entire work, as a whole, under this License to anyone who comes into possession of a copy. This License will therefore apply, along with any applicable section 7 additional terms, to the whole of the work, and all its parts, regardless of how they are packaged. This License gives no permission to license the work in any other way, but it does not invalidate such permission if you have separately received it. d) If the work has interactive user interfaces, each must display Appropriate Legal Notices; however, if the Program has interactive interfaces that do not display Appropriate Legal Notices, your work need not make them do so. A compilation of a covered work with other separate and independent works, which are not by their nature extensions of the covered work, and which are not combined with it such as to form a larger program, in or on a volume of a storage or distribution medium, is called an "aggregate" if the compilation and its resulting copyright are not used to limit the access or legal rights of the compilation's users beyond what the individual works permit. Inclusion of a covered work in an aggregate does not cause this License to apply to the other parts of the aggregate. 6. Conveying Non-Source Forms. You may convey a covered work in object code form under the terms of sections 4 and 5, provided that you also convey the machine-readable Corresponding Source under the terms of this License, in one of these ways: a) Convey the object code in, or embodied in, a physical product (including a physical distribution medium), accompanied by the Corresponding Source fixed on a durable physical medium customarily used for software interchange. b) Convey the object code in, or embodied in, a physical product (including a physical distribution medium), accompanied by a written offer, valid for at least three years and valid for as long as you offer spare parts or customer support for that product model, to give anyone who possesses the object code either (1) a copy of the Corresponding Source for all the software in the product that is covered by this License, on a durable physical medium customarily used for software interchange, for a price no more than your reasonable cost of physically performing this conveying of source, or (2) access to copy the Corresponding Source from a network server at no charge. c) Convey individual copies of the object code with a copy of the written offer to provide the Corresponding Source. This alternative is allowed only occasionally and noncommercially, and only if you received the object code with such an offer, in accord with subsection 6b. d) Convey the object code by offering access from a designated place (gratis or for a charge), and offer equivalent access to the Corresponding Source in the same way through the same place at no further charge. You need not require recipients to copy the Corresponding Source along with the object code. If the place to copy the object code is a network server, the Corresponding Source may be on a different server (operated by you or a third party) that supports equivalent copying facilities, provided you maintain clear directions next to the object code saying where to find the Corresponding Source. Regardless of what server hosts the Corresponding Source, you remain obligated to ensure that it is available for as long as needed to satisfy these requirements. e) Convey the object code using peer-to-peer transmission, provided you inform other peers where the object code and Corresponding Source of the work are being offered to the general public at no charge under subsection 6d. A separable portion of the object code, whose source code is excluded from the Corresponding Source as a System Library, need not be included in conveying the object code work. A "User Product" is either (1) a "consumer product", which means any tangible personal property which is normally used for personal, family, or household purposes, or (2) anything designed or sold for incorporation into a dwelling. In determining whether a product is a consumer product, doubtful cases shall be resolved in favor of coverage. For a particular product received by a particular user, "normally used" refers to a typical or common use of that class of product, regardless of the status of the particular user or of the way in which the particular user actually uses, or expects or is expected to use, the product. A product is a consumer product regardless of whether the product has substantial commercial, industrial or non-consumer uses, unless such uses represent the only significant mode of use of the product. "Installation Information" for a User Product means any methods, procedures, authorization keys, or other information required to install and execute modified versions of a covered work in that User Product from a modified version of its Corresponding Source. The information must suffice to ensure that the continued functioning of the modified object code is in no case prevented or interfered with solely because modification has been made. If you convey an object code work under this section in, or with, or specifically for use in, a User Product, and the conveying occurs as part of a transaction in which the right of possession and use of the User Product is transferred to the recipient in perpetuity or for a fixed term (regardless of how the transaction is characterized), the Corresponding Source conveyed under this section must be accompanied by the Installation Information. But this requirement does not apply if neither you nor any third party retains the ability to install modified object code on the User Product (for example, the work has been installed in ROM). The requirement to provide Installation Information does not include a requirement to continue to provide support service, warranty, or updates for a work that has been modified or installed by the recipient, or for the User Product in which it has been modified or installed. Access to a network may be denied when the modification itself materially and adversely affects the operation of the network or violates the rules and protocols for communication across the network. Corresponding Source conveyed, and Installation Information provided, in accord with this section must be in a format that is publicly documented (and with an implementation available to the public in source code form), and must require no special password or key for unpacking, reading or copying. 7. Additional Terms. "Additional permissions" are terms that supplement the terms of this License by making exceptions from one or more of its conditions. Additional permissions that are applicable to the entire Program shall be treated as though they were included in this License, to the extent that they are valid under applicable law. If additional permissions apply only to part of the Program, that part may be used separately under those permissions, but the entire Program remains governed by this License without regard to the additional permissions. When you convey a copy of a covered work, you may at your option remove any additional permissions from that copy, or from any part of it. (Additional permissions may be written to require their own removal in certain cases when you modify the work.) You may place additional permissions on material, added by you to a covered work, for which you have or can give appropriate copyright permission. Notwithstanding any other provision of this License, for material you add to a covered work, you may (if authorized by the copyright holders of that material) supplement the terms of this License with terms: a) Disclaiming warranty or limiting liability differently from the terms of sections 15 and 16 of this License; or b) Requiring preservation of specified reasonable legal notices or author attributions in that material or in the Appropriate Legal Notices displayed by works containing it; or c) Prohibiting misrepresentation of the origin of that material, or requiring that modified versions of such material be marked in reasonable ways as different from the original version; or d) Limiting the use for publicity purposes of names of licensors or authors of the material; or e) Declining to grant rights under trademark law for use of some trade names, trademarks, or service marks; or f) Requiring indemnification of licensors and authors of that material by anyone who conveys the material (or modified versions of it) with contractual assumptions of liability to the recipient, for any liability that these contractual assumptions directly impose on those licensors and authors. All other non-permissive additional terms are considered "further restrictions" within the meaning of section 10. If the Program as you received it, or any part of it, contains a notice stating that it is governed by this License along with a term that is a further restriction, you may remove that term. If a license document contains a further restriction but permits relicensing or conveying under this License, you may add to a covered work material governed by the terms of that license document, provided that the further restriction does not survive such relicensing or conveying. If you add terms to a covered work in accord with this section, you must place, in the relevant source files, a statement of the additional terms that apply to those files, or a notice indicating where to find the applicable terms. Additional terms, permissive or non-permissive, may be stated in the form of a separately written license, or stated as exceptions; the above requirements apply either way. 8. Termination. You may not propagate or modify a covered work except as expressly provided under this License. Any attempt otherwise to propagate or modify it is void, and will automatically terminate your rights under this License (including any patent licenses granted under the third paragraph of section 11). However, if you cease all violation of this License, then your license from a particular copyright holder is reinstated (a) provisionally, unless and until the copyright holder explicitly and finally terminates your license, and (b) permanently, if the copyright holder fails to notify you of the violation by some reasonable means prior to 60 days after the cessation. Moreover, your license from a particular copyright holder is reinstated permanently if the copyright holder notifies you of the violation by some reasonable means, this is the first time you have received notice of violation of this License (for any work) from that copyright holder, and you cure the violation prior to 30 days after your receipt of the notice. Termination of your rights under this section does not terminate the licenses of parties who have received copies or rights from you under this License. If your rights have been terminated and not permanently reinstated, you do not qualify to receive new licenses for the same material under section 10. 9. Acceptance Not Required for Having Copies. You are not required to accept this License in order to receive or run a copy of the Program. Ancillary propagation of a covered work occurring solely as a consequence of using peer-to-peer transmission to receive a copy likewise does not require acceptance. However, nothing other than this License grants you permission to propagate or modify any covered work. These actions infringe copyright if you do not accept this License. Therefore, by modifying or propagating a covered work, you indicate your acceptance of this License to do so. 10. Automatic Licensing of Downstream Recipients. Each time you convey a covered work, the recipient automatically receives a license from the original licensors, to run, modify and propagate that work, subject to this License. You are not responsible for enforcing compliance by third parties with this License. An "entity transaction" is a transaction transferring control of an organization, or substantially all assets of one, or subdividing an organization, or merging organizations. If propagation of a covered work results from an entity transaction, each party to that transaction who receives a copy of the work also receives whatever licenses to the work the party's predecessor in interest had or could give under the previous paragraph, plus a right to possession of the Corresponding Source of the work from the predecessor in interest, if the predecessor has it or can get it with reasonable efforts. You may not impose any further restrictions on the exercise of the rights granted or affirmed under this License. For example, you may not impose a license fee, royalty, or other charge for exercise of rights granted under this License, and you may not initiate litigation (including a cross-claim or counterclaim in a lawsuit) alleging that any patent claim is infringed by making, using, selling, offering for sale, or importing the Program or any portion of it. 11. Patents. A "contributor" is a copyright holder who authorizes use under this License of the Program or a work on which the Program is based. The work thus licensed is called the contributor's "contributor version". A contributor's "essential patent claims" are all patent claims owned or controlled by the contributor, whether already acquired or hereafter acquired, that would be infringed by some manner, permitted by this License, of making, using, or selling its contributor version, but do not include claims that would be infringed only as a consequence of further modification of the contributor version. For purposes of this definition, "control" includes the right to grant patent sublicenses in a manner consistent with the requirements of this License. Each contributor grants you a non-exclusive, worldwide, royalty-free patent license under the contributor's essential patent claims, to make, use, sell, offer for sale, import and otherwise run, modify and propagate the contents of its contributor version. In the following three paragraphs, a "patent license" is any express agreement or commitment, however denominated, not to enforce a patent (such as an express permission to practice a patent or covenant not to sue for patent infringement). To "grant" such a patent license to a party means to make such an agreement or commitment not to enforce a patent against the party. If you convey a covered work, knowingly relying on a patent license, and the Corresponding Source of the work is not available for anyone to copy, free of charge and under the terms of this License, through a publicly available network server or other readily accessible means, then you must either (1) cause the Corresponding Source to be so available, or (2) arrange to deprive yourself of the benefit of the patent license for this particular work, or (3) arrange, in a manner consistent with the requirements of this License, to extend the patent license to downstream recipients. "Knowingly relying" means you have actual knowledge that, but for the patent license, your conveying the covered work in a country, or your recipient's use of the covered work in a country, would infringe one or more identifiable patents in that country that you have reason to believe are valid. If, pursuant to or in connection with a single transaction or arrangement, you convey, or propagate by procuring conveyance of, a covered work, and grant a patent license to some of the parties receiving the covered work authorizing them to use, propagate, modify or convey a specific copy of the covered work, then the patent license you grant is automatically extended to all recipients of the covered work and works based on it. A patent license is "discriminatory" if it does not include within the scope of its coverage, prohibits the exercise of, or is conditioned on the non-exercise of one or more of the rights that are specifically granted under this License. You may not convey a covered work if you are a party to an arrangement with a third party that is in the business of distributing software, under which you make payment to the third party based on the extent of your activity of conveying the work, and under which the third party grants, to any of the parties who would receive the covered work from you, a discriminatory patent license (a) in connection with copies of the covered work conveyed by you (or copies made from those copies), or (b) primarily for and in connection with specific products or compilations that contain the covered work, unless you entered into that arrangement, or that patent license was granted, prior to 28 March 2007. Nothing in this License shall be construed as excluding or limiting any implied license or other defenses to infringement that may otherwise be available to you under applicable patent law. 12. No Surrender of Others' Freedom. If conditions are imposed on you (whether by court order, agreement or otherwise) that contradict the conditions of this License, they do not excuse you from the conditions of this License. If you cannot convey a covered work so as to satisfy simultaneously your obligations under this License and any other pertinent obligations, then as a consequence you may not convey it at all. For example, if you agree to terms that obligate you to collect a royalty for further conveying from those to whom you convey the Program, the only way you could satisfy both those terms and this License would be to refrain entirely from conveying the Program. 13. Use with the GNU Affero General Public License. Notwithstanding any other provision of this License, you have permission to link or combine any covered work with a work licensed under version 3 of the GNU Affero General Public License into a single combined work, and to convey the resulting work. The terms of this License will continue to apply to the part which is the covered work, but the special requirements of the GNU Affero General Public License, section 13, concerning interaction through a network will apply to the combination as such. 14. Revised Versions of this License. The Free Software Foundation may publish revised and/or new versions of the GNU General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns. Each version is given a distinguishing version number. If the Program specifies that a certain numbered version of the GNU General Public License "or any later version" applies to it, you have the option of following the terms and conditions either of that numbered version or of any later version published by the Free Software Foundation. If the Program does not specify a version number of the GNU General Public License, you may choose any version ever published by the Free Software Foundation. If the Program specifies that a proxy can decide which future versions of the GNU General Public License can be used, that proxy's public statement of acceptance of a version permanently authorizes you to choose that version for the Program. Later license versions may give you additional or different permissions. However, no additional obligations are imposed on any author or copyright holder as a result of your choosing to follow a later version. 15. Disclaimer of Warranty. THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION. 16. Limitation of Liability. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. 17. Interpretation of Sections 15 and 16. If the disclaimer of warranty and limitation of liability provided above cannot be given local legal effect according to their terms, reviewing courts shall apply local law that most closely approximates an absolute waiver of all civil liability in connection with the Program, unless a warranty or assumption of liability accompanies a copy of the Program in return for a fee. END OF TERMS AND CONDITIONS How to Apply These Terms to Your New Programs If you develop a new program, and you want it to be of the greatest possible use to the public, the best way to achieve this is to make it free software which everyone can redistribute and change under these terms. To do so, attach the following notices to the program. It is safest to attach them to the start of each source file to most effectively state the exclusion of warranty; and each file should have at least the "copyright" line and a pointer to where the full notice is found. Copyright (C) This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see . Also add information on how to contact you by electronic and paper mail. If the program does terminal interaction, make it output a short notice like this when it starts in an interactive mode: Copyright (C) This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. This is free software, and you are welcome to redistribute it under certain conditions; type `show c' for details. The hypothetical commands `show w' and `show c' should show the appropriate parts of the General Public License. Of course, your program's commands might be different; for a GUI interface, you would use an "about box". You should also get your employer (if you work as a programmer) or school, if any, to sign a "copyright disclaimer" for the program, if necessary. For more information on this, and how to apply and follow the GNU GPL, see . The GNU General Public License does not permit incorporating your program into proprietary programs. If your program is a subroutine library, you may consider it more useful to permit linking proprietary applications with the library. If this is what you want to do, use the GNU Lesser General Public License instead of this License. But first, please read . ================================================ FILE: README.md ================================================ # yolov5s_android:rocket:
The implementation of yolov5s on android for the [yolov5s export contest](https://github.com/ultralytics/yolov5/discussions/3213). Download the latest android apk from [release](https://github.com/lp6m/yolov5s_android/releases) and install your device. **UPDATE:rocket: 2022/06/25** Added tutorial on how to integrate models trained with custom data. [Custom Model Intergration Tutorial](https://github.com/lp6m/yolov5s_android/issues/14) ## Environment - Host Ubuntu18.04 - Docker * Tensorflow 2.4.0 * PyTorch 1.7.0 * OpenVino 2021.3 - Android App * Android Studio 4.2.1 * minSdkVersion 28 * targetSdkVersion 29 * TfLite 2.4.0 - Android Device * Xiaomi Mi11 (Storage 128GB/ RAM8GB) * OS MIUI 12.5.8 We use docker container for host evaluation and model conversion. ```sh git clone --recursive https://github.com/lp6m/yolov5s_android cd yolov5s_android docker build ./ -f ./docker/Dockerfile -t yolov5s_android docker run -it --gpus all -v `pwd`:/workspace yolov5s_android bash ``` ## Files - `./app` * Android application. * To build application by yourself, copy `./tflite_model/*.tflite` to `app/tflite_yolov5_test/app/src/main/assets/`, and build on Android Studio. * The app can perform inference with various configurations of input size, inference accuracy, and model accuracy. * For 'Open Directory Mode', save the detected bounding boxes results as a json file in coco format. * Realtime deteciton from camera image (precision and input size is fixed to int8/320). Achieved FPS is about **15FPS** on Mi11. * **NOTE** Please select image/directory as an absolute path from 'Device'. The app does not support select image/directory from 'Recent' in some devices. - `./benchmark` * Benchmark script and results by [TFLite Model Benchmark Tool with C++ Binary](https://github.com/tensorflow/tensorflow/tree/master/tensorflow/lite/tools/benchmark#profiling-model-operators). - `./convert_model` * Model conversion guide and model quantization script. - `./docker` * Dockerfile for the evaluation and model conversion environment. - `./host` * `detect.py` : Run detection for image with TfLite model on host environment. * `evaluate.py`: Run evaluation with coco validation dataset and inference results. - `./tflite_model` * Converted TfLite Model. ## Performance ### Latency These results are measured on `Xiaomi Mi11`. Please refer [`benchmark/README.md`](https://github.com/lp6m/yolov5s_android/tree/master/benchmark) about the detail of benchmark command. The latency does not contain the pre/post processing time and data transfer time. #### float32 model | delegate | 640x640 [ms] | 320x320 [ms] | | :-------------------- | -----------: | -----------: | | None (CPU) | 249 | 61 | | NNAPI (qti-gpu, fp32) | 156 | 112 | | NNAPI (qti-gpu, fp16) | 92 | 79 | #### int8 model We tried to accelerate the inference process by using `NNAPI (qti-dsp)` and offload calculation to Hexagon DSP, but it doesn't work for now. Please see [here](https://github.com/lp6m/yolov5s_android/tree/dev/benchmark#nnapi-qti-dsp-not-working) in detail. | delegate | 640x640 [ms] | 320x320 [ms] | | :------------------- | -----------: | -----------: | | None (CPU) | 95 | 23 | | NNAPI (qti-default) | Not working | Not working | | NNAPI (qti-dsp) | Not working | Not working | ## Accuracy Please refer [host/README.md](https://github.com/lp6m/yolov5s_android/tree/master/host#example2) about the evaluation method. We set `conf_thresh=0.25` and `iou_thresh=0.45` for nms parameter. | device, model, delegate | 640x640 mAP | 320x320 mAP | | :-------------------------------- | ----------: | ----------: | | host GPU (Tflite + PyTorch, fp32) | 27.8 | 26.6 | | host CPU (Tflite + PyTorch, int8) | 26.6 | 25.5 | | NNAPI (qti-gpu, fp16) | 28.5 | 26.8 | | CPU (int8) | 27.2 | 25.8 | ## Model conversion This project focuses on obtaining a tflite model by **model conversion from PyTorch original implementation, rather than doing own implementation in tflite**. We convert models in this way: `PyTorch -> ONNX -> OpenVino -> TfLite`. To convert the model from OpenVino to TfLite, we use [openvino2tensorflow](https://github.com/PINTO0309/openvino2tensorflow). Please refer [convert_model/README.md](https://github.com/lp6m/yolov5s_android/tree/master/convert_model) about the model conversion. ================================================ FILE: app/README.md ================================================ # Android App This applications uses [TFLite Android Support](https://www.tensorflow.org/lite/guide/android). ### TfliteRunner [`TfliteRunner.java`](https://github.com/lp6m/yolov5s_android/blob/master/app/tflite_yolov5_test/app/src/main/java/com/example/tflite_yolov5_test/TfliteRunner.java) is the main class for running TfLite model. Apply a delegate according to the specified running mode. ### postprocess.cpp [`postprocess.cpp`](https://github.com/lp6m/yolov5s_android/blob/master/app/tflite_yolov5_test/app/src/main/cpp/postprocess.cpp) corresponds to the [detect layer module](https://github.com/ultralytics/yolov5/blob/master/models/yolo.py#L33) and `non_max_suppression` of original yolov5. This C++ code is called by `TfliteRunner` via JNI Interface. ### Realtime inference For realtime inference from camera image, We copy a lot of codes from [TensorFlow Lite Object Detection Android Demo](https://github.com/tensorflow/examples/tree/master/lite/examples/object_detection/android) to `src/main/java/com/example/tflite_yolov5_test/customview, camera`. Realtime inference is not the main topic of the contest, so please forgive me for porting the code in a messy way! ================================================ FILE: app/tflite_yolov5_test/.gitignore ================================================ *.iml .gradle /local.properties /.idea/caches /.idea/libraries /.idea/modules.xml /.idea/workspace.xml /.idea/navEditor.xml /.idea/assetWizardSettings.xml .DS_Store /build /captures .externalNativeBuild .cxx local.properties app/src/main/assets/*.tflite app/release ================================================ FILE: app/tflite_yolov5_test/.idea/.gitignore ================================================ # Default ignored files /shelf/ /workspace.xml ================================================ FILE: app/tflite_yolov5_test/.idea/compiler.xml ================================================ ================================================ FILE: app/tflite_yolov5_test/.idea/gradle.xml ================================================ ================================================ FILE: app/tflite_yolov5_test/.idea/jarRepositories.xml ================================================ ================================================ FILE: app/tflite_yolov5_test/.idea/misc.xml ================================================ ================================================ FILE: app/tflite_yolov5_test/.idea/runConfigurations.xml ================================================ ================================================ FILE: app/tflite_yolov5_test/.idea/vcs.xml ================================================ ================================================ FILE: app/tflite_yolov5_test/app/.gitignore ================================================ /build ================================================ FILE: app/tflite_yolov5_test/app/CMakeLists.txt ================================================ cmake_minimum_required(VERSION 3.4.1) add_library( # Sets the name of the library. native-lib # Sets the library as a shared library. SHARED # Provides a relative path to your source file(s). src/main/cpp/postprocess.cpp ) # Searches for a specified prebuilt library and stores the path as a # variable. Because CMake includes system libraries in the search path by # default, you only need to specify the name of the public NDK library # you want to add. CMake verifies that the library exists before # completing its build. find_library( # Sets the name of the path variable. log-lib # Specifies the name of the NDK library that # you want CMake to locate. log ) # Specifies libraries CMake should link to your target library. You # can link multiple libraries, such as libraries you define in this # build script, prebuilt third-party libraries, or system libraries. target_link_libraries( # Specifies the target library. native-lib # Links the target library to the log library # included in the NDK. ${log-lib} ) ================================================ FILE: app/tflite_yolov5_test/app/build.gradle ================================================ plugins { id 'com.android.application' } android { compileSdkVersion 29 buildToolsVersion "30.0.3" defaultConfig { ndk { abiFilters 'armeabi-v7a', 'arm64-v8a' } applicationId "com.example.tflite_yolov5_test" minSdkVersion 28 targetSdkVersion 29 versionCode 1 versionName "1.0" testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner" externalNativeBuild { cmake { cppFlags "-O3" } } } buildTypes { release { minifyEnabled false proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro' } } compileOptions { sourceCompatibility JavaVersion.VERSION_1_8 targetCompatibility JavaVersion.VERSION_1_8 } externalNativeBuild { cmake { path "CMakeLists.txt" } } buildFeatures { viewBinding true } } dependencies { implementation 'org.nanohttpd:nanohttpd:2.3.1' implementation 'androidx.appcompat:appcompat:1.2.0' implementation 'com.google.android.material:material:1.2.1' implementation 'androidx.constraintlayout:constraintlayout:2.0.1' implementation 'androidx.lifecycle:lifecycle-livedata-ktx:2.2.0' implementation 'androidx.lifecycle:lifecycle-viewmodel-ktx:2.2.0' implementation 'androidx.navigation:navigation-fragment:2.3.0' implementation 'androidx.navigation:navigation-ui:2.3.0' implementation 'org.tensorflow:tensorflow-lite:2.4.0' //implementation 'org.tensorflow:tensorflow-lite:0.0.0-nightly-SNAPSHOT' //implementation 'org.tensorflow:tensorflow-lite:0.0.0-nightly' //implementation 'org.tensorflow:tensorflow-lite-select-tf-ops:0.0.0-nightly' //implementation 'org.tensorflow:tensorflow-lite-gpu:0.0.0-nightly-SNAPSHOT' //implementation 'org.tensorflow:tensorflow-lite-support:0.0.0-nightly-SNAPSHOT' testImplementation 'junit:junit:4.+' androidTestImplementation 'androidx.test.ext:junit:1.1.2' androidTestImplementation 'androidx.test.espresso:espresso-core:3.3.0' } ================================================ FILE: app/tflite_yolov5_test/app/proguard-rules.pro ================================================ # Add project specific ProGuard rules here. # You can control the set of applied configuration files using the # proguardFiles setting in build.gradle. # # For more details, see # http://developer.android.com/guide/developing/tools/proguard.html # If your project uses WebView with JS, uncomment the following # and specify the fully qualified class name to the JavaScript interface # class: #-keepclassmembers class fqcn.of.javascript.interface.for.webview { # public *; #} # Uncomment this to preserve the line number information for # debugging stack traces. #-keepattributes SourceFile,LineNumberTable # If you keep the line number information, uncomment this to # hide the original source file name. #-renamesourcefileattribute SourceFile ================================================ FILE: app/tflite_yolov5_test/app/src/androidTest/java/com/example/tflite_yolov5_test/ExampleInstrumentedTest.java ================================================ package com.example.tflite_yolov5_test; import android.content.Context; import androidx.test.platform.app.InstrumentationRegistry; import androidx.test.ext.junit.runners.AndroidJUnit4; import org.junit.Test; import org.junit.runner.RunWith; import static org.junit.Assert.*; /** * Instrumented test, which will execute on an Android device. * * @see Testing documentation */ @RunWith(AndroidJUnit4.class) public class ExampleInstrumentedTest { @Test public void useAppContext() { // Context of the app under test. Context appContext = InstrumentationRegistry.getInstrumentation().getTargetContext(); assertEquals("com.example.tflite_yolov5_test", appContext.getPackageName()); } } ================================================ FILE: app/tflite_yolov5_test/app/src/main/AndroidManifest.xml ================================================ ================================================ FILE: app/tflite_yolov5_test/app/src/main/cpp/nms.h ================================================ //https://github.com/martinkersner/non-maximum-suppression-cpp/blob/master/nms.cpp #include #include #include using namespace std; struct bbox{ float x1; float y1; float x2; float y2; float conf; int class_idx; bbox(float x1, float y1, float x2, float y2, float conf, int class_idx) : x1(x1), y1(y1), x2(x2), y2(y2), conf(conf), class_idx(class_idx){ } }; #define Point_XMIN 0 #define Point_XMAX 1 #define Point_YMIN 2 #define Point_YMAX 3 vector GetPointFromRect(const vector &rect, const int pos) { vector points; for (const auto & p: rect) { float point; if (pos == Point_XMIN) point = p.x1; else if (pos == Point_XMAX) point = p.x2; else if (pos == Point_YMIN) point = p.y1; else if (pos == Point_YMAX) point = p.y2; points.push_back(point); } return points; } vector ComputeArea(const vector & x1, const vector & y1, const vector & x2, const vector & y2) { vector area; auto len = x1.size(); for (decltype(len) idx = 0; idx < len; ++idx) { auto tmpArea = (x2[idx] - x1[idx] + 1) * (y2[idx] - y1[idx] + 1); area.push_back(tmpArea); } return area; } vector argsort_byscore(const vector & v) { // initialize original index locations vector idx(v.size()); std::iota(idx.begin(), idx.end(), 0); // sort indexes based on comparing values in v sort(idx.begin(), idx.end(), [&v](int i1, int i2) {return v[i1].conf < v[i2].conf;}); return idx; } vector Maximum(const float & num, const vector & vec) { auto maxVec = vec; auto len = vec.size(); for (decltype(len) idx = 0; idx < len; ++idx) if (vec[idx] < num) maxVec[idx] = num; return maxVec; } vector Minimum(const float & num, const vector & vec) { auto minVec = vec; auto len = vec.size(); for (decltype(len) idx = 0; idx < len; ++idx) if (vec[idx] > num) minVec[idx] = num; return minVec; } vector CopyByIndexes(const vector & vec, const vector & idxs) { vector resultVec; for (const auto & idx : idxs) resultVec.push_back(vec[idx]); return resultVec; } vector RemoveLast(const vector & vec) { auto resultVec = vec; resultVec.erase(resultVec.end()-1); return resultVec; } vector Subtract(const vector & vec1, const vector & vec2) { vector result; auto len = vec1.size(); for (decltype(len) idx = 0; idx < len; ++idx) result.push_back(vec1[idx] - vec2[idx] + 1); return result; } vector Multiply(const vector & vec1, const vector & vec2) { vector resultVec; auto len = vec1.size(); for (decltype(len) idx = 0; idx < len; ++idx) resultVec.push_back(vec1[idx] * vec2[idx]); return resultVec; } vector Divide(const vector & vec1, const vector & vec2) { vector resultVec; auto len = vec1.size(); for (decltype(len) idx = 0; idx < len; ++idx) resultVec.push_back(vec1[idx] / vec2[idx]); return resultVec; } vector WhereLarger(const vector & vec, const float & threshold) { vector resultVec; auto len = vec.size(); for (decltype(len) idx = 0; idx < len; ++idx) if (vec[idx] > threshold) resultVec.push_back(idx); return resultVec; } vector RemoveByIndexes(const vector & vec, const vector & idxs) { auto resultVec = vec; auto offset = 0; for (const auto & idx : idxs) { resultVec.erase(resultVec.begin() + idx + offset); offset -= 1; } return resultVec; } template vector FilterVector(const vector & vec, const vector & idxs) { vector resultVec; for (const auto & idx: idxs) resultVec.push_back(vec[idx]); return resultVec; } vector nms(const vector & candidates, const float &iou_threshold) { if (candidates.empty()) return vector(); // grab the coordinates of the bounding boxes auto x1 = GetPointFromRect(candidates, Point_XMIN); auto y1 = GetPointFromRect(candidates, Point_YMIN); auto x2 = GetPointFromRect(candidates, Point_XMAX); auto y2 = GetPointFromRect(candidates, Point_YMAX); // compute the area of the bounding boxes and sort the bounding // boxes by the bottom-right y-coordinate of the bounding box auto area = ComputeArea(x1, y1, x2, y2); auto idxs = argsort_byscore(candidates); int last; int i; vector pick; // keep looping while some indexes still remain in the indexes list while (idxs.size() > 0) { // grab the last index in the indexes list and add the // index value to the list of picked indexes last = idxs.size() - 1; i = idxs[last]; pick.push_back(i); // find the largest (x, y) coordinates for the start of // the bounding box and the smallest (x, y) coordinates // for the end of the bounding box auto idxsWoLast = RemoveLast(idxs); auto xx1 = Maximum(x1[i], CopyByIndexes(x1, idxsWoLast)); auto yy1 = Maximum(y1[i], CopyByIndexes(y1, idxsWoLast)); auto xx2 = Minimum(x2[i], CopyByIndexes(x2, idxsWoLast)); auto yy2 = Minimum(y2[i], CopyByIndexes(y2, idxsWoLast)); // compute the width and height of the bounding box auto w = Maximum(0, Subtract(xx2, xx1)); auto h = Maximum(0, Subtract(yy2, yy1)); // compute the ratio of overlap auto overlap = Divide(Multiply(w, h), CopyByIndexes(area, idxsWoLast)); // delete all indexes from the index list that have auto deleteIdxs = WhereLarger(overlap, iou_threshold); deleteIdxs.push_back(last); idxs = RemoveByIndexes(idxs, deleteIdxs); } return FilterVector(candidates, pick); } ================================================ FILE: app/tflite_yolov5_test/app/src/main/cpp/postprocess.cpp ================================================ #include #include #include #include #include "nms.h" using namespace std; float sigmoid(float f){ return (float)(1.0f / (1.0f + exp(-f))); } float revsigmoid(float f){ const float eps = 1e-8; return -1.0f * (float)log((1.0f / (f + eps)) - 1.0f); } #define CLASS_NUM 80 #define max_wh 4096 void detector( vector* bbox_candidates, JNIEnv *env, jobjectArray input, const int gridnum, const int strides, const int anchorgrid[3][2], const float conf_thresh){ float revsigmoid_conf = revsigmoid(conf_thresh); //Warning: For now, we assume batch_size is always 1. for(int bi = 0; bi < 1; bi++){ jobjectArray ptr_d0 = (jobjectArray)env->GetObjectArrayElement(input , bi); for(int gy = 0; gy < gridnum; gy++){ jobjectArray ptr_d1 = (jobjectArray)env->GetObjectArrayElement(ptr_d0 ,gy); for(int gx = 0; gx < gridnum; gx++){ jobjectArray ptr_d2 = (jobjectArray)env->GetObjectArrayElement(ptr_d1 ,gx); auto elmptr = env->GetFloatArrayElements((jfloatArray)ptr_d2 , nullptr); for(int ch = 0; ch < 3; ch++){ int offset = 85 * ch; auto elmptr_ch = elmptr + offset; //don't apply sigmoid to all bbox candidates for efficiency float obj_conf_unsigmoid = elmptr_ch[4]; //if (sigmoid(obj_conf_unsigmoid) < conf_thresh) continue; if (obj_conf_unsigmoid >= revsigmoid_conf) { //get maximum conf class float max_class_conf = elmptr_ch[5]; int max_class_idx = 0; for(int class_idx = 1; class_idx < CLASS_NUM; class_idx++){ float class_conf = elmptr_ch[class_idx + 5]; if (class_conf > max_class_conf){ max_class_conf = class_conf; max_class_idx = class_idx; } } // class conf filter float bbox_conf = sigmoid(max_class_conf) * sigmoid(obj_conf_unsigmoid); //if (bbox_conf < conf_thresh) continue; // xywh2xyxy // batched nms (by adding class * max_wh to coordinates, // we can get nms result for all classes by just one nms call) //grid[gridnum][gy][gx][0] = gx //grid[gridnum][gy][gx][1] = gy float cx = ((sigmoid(elmptr_ch[0]) * 2.0f) - 0.5f + (float)gx) * (float)strides; float cy = ((sigmoid(elmptr_ch[1]) * 2.0f) - 0.5f + (float)gy) * (float)strides; float w = (sigmoid(elmptr_ch[2]) * sigmoid(elmptr_ch[2])) * 4.0f * (float)anchorgrid[ch][0]; float h = (sigmoid(elmptr_ch[3]) * sigmoid(elmptr_ch[3])) * 4.0f * (float)anchorgrid[ch][1]; float x1 = cx - w / 2.0f + max_wh * max_class_idx; float y1 = cy - h / 2.0f + max_wh * max_class_idx; float x2 = cx + w / 2.0f + max_wh * max_class_idx; float y2 = cy + h / 2.0f + max_wh * max_class_idx; bbox box = bbox(x1, y1, x2, y2, bbox_conf, max_class_idx); bbox_candidates->push_back(box); } } env->ReleaseFloatArrayElements((jfloatArray)ptr_d2, elmptr, 0); env->DeleteLocalRef(ptr_d2); } env->DeleteLocalRef(ptr_d1); } env->DeleteLocalRef(ptr_d0); } env->DeleteLocalRef(input); } extern "C" jobjectArray Java_com_example_tflite_1yolov5_1test_TfliteRunner_postprocess ( JNIEnv *env, jobject /* this */, jobjectArray input1,//80x80 or 40x40 jobjectArray input2, //40x40 or 20x20, jobjectArray input3, //20x20 or 10x10 jint input_size, jfloat conf_thresh, jfloat iou_thresh){ //conf const int anchorgrids[3][3][2] = { {{10, 13}, {16, 30}, {33, 23}}, //80 {{30, 61}, {62, 45}, {59, 119}}, //40 {{116, 90}, {156, 198}, {373, 326}} //20 }; const int strides[3] = {8, 16, 32}; vector bbox_candidates; //TODO: reserve //Detector detector(&bbox_candidates, env, input1, input_size / 8, strides[0], anchorgrids[0], conf_thresh); detector(&bbox_candidates, env, input2, input_size / 16, strides[1], anchorgrids[1], conf_thresh); detector(&bbox_candidates, env, input3, input_size / 32, strides[2], anchorgrids[2], conf_thresh); //non-max-suppression vector nms_results = nms(bbox_candidates, iou_thresh); //return 2-dimension array [detected_box][6(x1, y1, x2, y2, conf, class)] jobjectArray objArray; jclass floatArray = env->FindClass("[F"); if (floatArray == NULL) return NULL; int size = nms_results.size(); objArray = env->NewObjectArray(size, floatArray, NULL); if (objArray == NULL) return NULL; for(int i = 0; i < nms_results.size(); i++){ int class_idx = nms_results[i].class_idx; float x1 = nms_results[i].x1 - class_idx * max_wh; float y1 = nms_results[i].y1 - class_idx * max_wh; float x2 = nms_results[i].x2 - class_idx * max_wh; float y2 = nms_results[i].y2 - class_idx * max_wh; float conf = nms_results[i].conf; float boxres[6] = {x1, y1, x2, y2, conf, (float)class_idx}; jfloatArray iarr = env->NewFloatArray((jsize)6); if (iarr == NULL) return NULL; env->SetFloatArrayRegion(iarr, 0, 6, boxres); env->SetObjectArrayElement(objArray, i, iarr); env->DeleteLocalRef(iarr); } return objArray; } ================================================ FILE: app/tflite_yolov5_test/app/src/main/java/com/example/tflite_yolov5_test/ImageProcess.java ================================================ package com.example.tflite_yolov5_test; import android.graphics.Bitmap; import android.graphics.Canvas; import android.graphics.Color; import android.graphics.Paint; import android.graphics.RectF; import android.text.TextUtils; import com.example.tflite_yolov5_test.camera.env.BorderedText; import java.util.List; public class ImageProcess { private static final int[] COLORS = { Color.BLUE, Color.RED, Color.GREEN, Color.YELLOW, Color.CYAN, Color.MAGENTA, Color.WHITE, Color.parseColor("#55FF55"), Color.parseColor("#FFA500"), Color.parseColor("#FF8888"), Color.parseColor("#AAAAFF"), Color.parseColor("#FFFFAA"), Color.parseColor("#55AAAA"), Color.parseColor("#AA33AA"), Color.parseColor("#0D0068") }; static public Bitmap drawBboxes(List bboxes, Bitmap bitmap, int inputSize) { Bitmap mutableBitmap = bitmap.copy(Bitmap.Config.ARGB_8888, true); bitmap.recycle(); final Canvas canvas = new Canvas(mutableBitmap); final Paint paint = new Paint(); BorderedText borderedText = new BorderedText(25); paint.setStyle(Paint.Style.STROKE); paint.setStrokeWidth(3.0f); for (TfliteRunner.Recognition bbox : bboxes) { int color_idx = bbox.getClass_idx() % COLORS.length; paint.setColor(COLORS[color_idx]); RectF location = bbox.getLocation(); float left = location.left * bitmap.getWidth() / inputSize; float right = location.right * bitmap.getWidth() / inputSize; float top = location.top * bitmap.getHeight() / inputSize; float bottom = location.bottom * bitmap.getHeight() / inputSize; RectF drawBoxRect = new RectF(left, top, right, bottom); canvas.drawRect(drawBoxRect, paint); String labelString = String.format("%s %.2f", bbox.getTitle(), (100 * bbox.getConfidence())); borderedText.drawText( canvas, left - 10, top - 10, labelString, paint); } return mutableBitmap; } } ================================================ FILE: app/tflite_yolov5_test/app/src/main/java/com/example/tflite_yolov5_test/MainActivity.java ================================================ package com.example.tflite_yolov5_test; import android.content.Context; import android.graphics.RectF; import android.os.Bundle; import com.example.tflite_yolov5_test.camera.DetectorActivity; import androidx.appcompat.app.AppCompatActivity; import android.Manifest; import android.app.AlertDialog; import android.content.Intent; import android.content.pm.PackageManager; import android.graphics.BitmapFactory; import android.net.Uri; import android.os.Handler; import android.os.HandlerThread; import android.provider.DocumentsContract; import android.view.View; import android.widget.Button; import android.widget.ImageView; import android.widget.ProgressBar; import android.widget.RadioButton; import android.widget.SeekBar; import android.widget.TextView; import android.widget.Toast; import java.io.IOException; import org.json.JSONArray; //import org.tensorflow.lite.gpu.GpuDelegate; import java.io.*; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; import android.graphics.Bitmap; import java.lang.Math; public class MainActivity extends AppCompatActivity { final int REQUEST_OPEN_FILE = 1; final int REQUEST_OPEN_DIRECTORY = 9999; //permission private int inputSize = -1; private File[] process_files = null; private final int REQUEST_PERMISSION = 1000; private final String[] PERMISSIONS = { Manifest.permission.READ_EXTERNAL_STORAGE, Manifest.permission.WRITE_EXTERNAL_STORAGE, }; //background task private boolean handler_stop_request; private Handler handler; private HandlerThread handlerThread; private void checkPermission(){ if (!isGranted()){ requestPermissions(PERMISSIONS, REQUEST_PERMISSION); } } @Override public void onRequestPermissionsResult(int requestCode, String[] permissions, int[] grantResults) { super.onRequestPermissionsResult(requestCode, permissions, grantResults); Toast.makeText(this, "onRequestPermissionResult", Toast.LENGTH_LONG).show(); if (requestCode == REQUEST_PERMISSION){ boolean result = isGranted(); Toast.makeText(getApplicationContext(), result ? "OK" : "NG", Toast.LENGTH_SHORT).show(); } } private boolean isGranted(){ for (int i = 0; i < PERMISSIONS.length; i++){ if (checkSelfPermission(PERMISSIONS[i]) != PackageManager.PERMISSION_GRANTED) { if (shouldShowRequestPermissionRationale(PERMISSIONS[i])) { Toast.makeText(this, "permission is required", Toast.LENGTH_LONG).show(); } return false; } } return true; } @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); SeekBar conf_seekBar = (SeekBar)findViewById(R.id.conf_seekBar); conf_seekBar.setOnSeekBarChangeListener(new SeekBar.OnSeekBarChangeListener(){ @Override public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) { TextView conf_textView = (TextView)findViewById(R.id.conf_TextView); conf_textView.setText(String.format("Confidence Threshold: %.2f", (float)progress / 100)); } @Override public void onStopTrackingTouch(SeekBar seekBar) { } @Override public void onStartTrackingTouch(SeekBar seekBar) { } }); conf_seekBar.setMax(100); conf_seekBar.setProgress(25);//0.25 SeekBar iou_seekBar = (SeekBar)findViewById(R.id.iou_seekBar); iou_seekBar.setOnSeekBarChangeListener(new SeekBar.OnSeekBarChangeListener(){ @Override public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) { TextView iou_textView = (TextView)findViewById(R.id.iou_TextView); iou_textView.setText(String.format("IoU Threshold: %.2f", (float)progress / 100)); } @Override public void onStopTrackingTouch(SeekBar seekBar) { } @Override public void onStartTrackingTouch(SeekBar seekBar) { } }); iou_seekBar.setMax(100); iou_seekBar.setProgress(45);//0.45 } public void OnOpenImageButtonClick(View view){ checkPermission(); Intent intent = new Intent(Intent.ACTION_GET_CONTENT); intent.setType("image/*"); intent.addCategory(Intent.CATEGORY_OPENABLE); startActivityForResult(Intent.createChooser(intent, "Open an image"), REQUEST_OPEN_FILE); } public void OnOpenDirButtonClick(View view){ checkPermission(); Intent intent = new Intent(Intent.ACTION_OPEN_DOCUMENT_TREE); intent.addCategory(Intent.CATEGORY_DEFAULT); startActivityForResult(Intent.createChooser(intent, "Open directory"), REQUEST_OPEN_DIRECTORY); } public void setResultImage(Bitmap bitmap){ ImageView imageview = (ImageView)findViewById(R.id.resultImageView); imageview.setImageBitmap(bitmap); } ArrayList> bboxesToMap(File file, List bboxes, int orig_h, int orig_w){ ArrayList> resList = new ArrayList>(); String basename = file.getName(); basename = basename.substring(0, basename.lastIndexOf('.')); Object image_id; try{ image_id = Integer.parseInt(basename); } catch (Exception e){ image_id = basename; } for(TfliteRunner.Recognition bbox : bboxes){ //clamp and scale to original image size RectF location = bbox.getLocation(); float x1 = Math.min(Math.max(0, location.left), this.inputSize) * orig_w / (float)this.inputSize; float y1 = Math.min(Math.max(0, location.top), this.inputSize) * orig_h / (float)this.inputSize; float x2 = Math.min(Math.max(0, location.right), this.inputSize) * orig_w / (float)this.inputSize; float y2 = Math.min(Math.max(0, location.bottom), this.inputSize) * orig_h / (float)this.inputSize; float x = x1; float y = y1; float w = x2 - x1; float h = y2 - y1; float conf = bbox.getConfidence(); int class_idx = TfliteRunner.get_coco91_from_coco80(bbox.getClass_idx()); HashMap mapbox = new HashMap<>(); mapbox.put("image_id", image_id); mapbox.put("bbox", new float[]{x, y, w, h}); mapbox.put("score", conf); mapbox.put("category_id", class_idx); resList.add(mapbox); } return resList; } private boolean isBackgroundTaskRunning() { return this.handlerThread != null && this.handlerThread.isAlive(); } public void OnInferenceTaskCompleted() { Button button = (Button) findViewById(R.id.runInferenceButton); button.setText("Run Inference"); SeekBar conf_seekBar = (SeekBar) findViewById(R.id.conf_seekBar); conf_seekBar.setEnabled(true); SeekBar iou_seekBar = (SeekBar) findViewById(R.id.iou_seekBar); iou_seekBar.setEnabled(true); } public void OnInferenceTaskStart(){ Button button = (Button)findViewById(R.id.runInferenceButton); button.setText("Stop Inference"); SeekBar conf_seekBar = (SeekBar) findViewById(R.id.conf_seekBar); conf_seekBar.setEnabled(false); SeekBar iou_seekBar = (SeekBar) findViewById(R.id.iou_seekBar); iou_seekBar.setEnabled(false); } public float getConfThreshFromGUI(){ return ((float)((SeekBar)findViewById(R.id.conf_seekBar)).getProgress()) / 100.0f;} public float getIoUThreshFromGUI(){ return ((float)((SeekBar)findViewById(R.id.iou_seekBar)).getProgress()) / 100.0f;} public void OnRunInferenceButtonClick(View view){ TfliteRunner runner; TfliteRunMode.Mode runmode = getRunModeFromGUI(); this.inputSize = getInputSizeFromGUI(); //validation if (this.process_files == null || this.process_files.length == 0){ showErrorDialog("Please select image or directory."); return; } if (runmode == null) { showErrorDialog("Please select valid configurations."); return; } //open model try { Context context = getApplicationContext(); runner = new TfliteRunner(context, runmode, this.inputSize, getConfThreshFromGUI(), getIoUThreshFromGUI()); } catch (Exception e) { showErrorDialog("Model load failed: " + e.getMessage()); return; } //check background task status if(isBackgroundTaskRunning()){ //already inference is running, stop inference this.handler_stop_request = true; this.handlerThread.quitSafely(); try { handlerThread.join(); handlerThread = null; handler = null; } catch (final InterruptedException e) { addLog(e.getMessage() + "Exception!"); } OnInferenceTaskCompleted(); return; } else { //start inference task this.handler_stop_request = false; OnInferenceTaskStart(); } //run inference in background this.handlerThread = new HandlerThread("inference"); this.handlerThread.start(); this.handler = new Handler(this.handlerThread.getLooper()); ProgressBar pbar = (ProgressBar)findViewById(R.id.progressBar); File[] process_files = this.process_files; pbar.setProgress(0); ArrayList> resList = new ArrayList<>(); runInBackground( new Runnable() { @Override public void run() { try { for(int i = 0; i < process_files.length; i++){ if (handler_stop_request) break; File file = process_files[i]; InputStream is = new FileInputStream(file); Bitmap bitmap = BitmapFactory.decodeStream(is); Bitmap resized = TfliteRunner.getResizedImage(bitmap, inputSize); runner.setInput(resized); List bboxes = runner.runInference(); Bitmap resBitmap = ImageProcess.drawBboxes(bboxes, bitmap, getInputSizeFromGUI()); ArrayList> bboxmaps = bboxesToMap(file, bboxes, bitmap.getHeight(), bitmap.getWidth()); resList.addAll(bboxmaps); int ii = i; runOnUiThread( new Runnable() { @Override public void run () { pbar.setProgress(Math.min(100, (ii+1) * 100 / process_files.length)); setResultImage(resBitmap); } }); bitmap.recycle(); } } catch (Exception e) { runOnUiThread( new Runnable() { @Override public void run() { showErrorDialog("Inference failed : " + e.getMessage()) ; } } ); } //completed runOnUiThread( new Runnable() { @Override public void run() { handler_stop_request = false; OnInferenceTaskCompleted(); //output json if directory mode if (process_files.length > 1) { try { String jsonpath = saveBboxesToJson(resList, process_files[0], "result.json"); showInfoDialog("result json is saved : " + jsonpath); } catch (Exception e){ showErrorDialog("json output failed : " + e.getMessage()); } } addLog(runner.getLastElapsedTimeLog()); } } ); } } ); } String saveBboxesToJson(ArrayList> resList, File file, String output_filename) throws org.json.JSONException, IOException{ // HashMap[] resArr = (HashMap[])resList.toArray(); JSONArray jarr = new JSONArray(resList); String jstr = jarr.toString(); String filepath = file.getParent() + "/" + output_filename; FileOutputStream fileOutputStream = new FileOutputStream(filepath, false); fileOutputStream.write(jstr.getBytes()); return filepath; } private void showErrorDialog(String text){ showDialog("Error", text);} private void showInfoDialog(String text){ showDialog("Info", text);} private void showDialog(String title, String text){ new AlertDialog.Builder(this) .setTitle(title) .setMessage(text) .setPositiveButton("OK" , null ) .create().show(); } private void addLog(String logtxt){ TextView logtext = findViewById(R.id.logTextView); logtext.setText(logtext.getText() + logtxt + "\n"); } private void setOneLineLog(String text){ TextView onelinetextview = findViewById(R.id.oneLineLabel); onelinetextview.setText(text); } public void OnClearLogButton(View view) { TextView logtext = findViewById(R.id.logTextView); logtext.setText(""); } void setImageView(Bitmap bitmap){ ImageView imageview = (ImageView)findViewById(R.id.resultImageView); imageview.setImageBitmap(bitmap); } public void OnOpenCameraButtonClick(View view){ if (isBackgroundTaskRunning()) { showErrorDialog("Please stop inference task"); return; } Intent intent = new Intent(MainActivity.this, DetectorActivity.class); startActivity(intent); } @Override protected void onActivityResult(int requestCode, int resultCode, Intent data) { super.onActivityResult(requestCode, resultCode, data); if (requestCode == REQUEST_OPEN_FILE) { // one image file is selected if (resultCode == RESULT_OK && data != null) { Uri uri = data.getData(); if (uri != null) { String fullpath = PathUtils.getPath(getApplicationContext(), uri); this.process_files = new File[]{new File(fullpath)}; } } } else if (requestCode == REQUEST_OPEN_DIRECTORY) { // image directory is selected if (resultCode == RESULT_OK && data != null) { Uri uri = data.getData(); if (uri != null) { Uri docUri = DocumentsContract.buildDocumentUriUsingTree(uri, DocumentsContract.getTreeDocumentId(uri)); String fullpath = PathUtils.getPath(getApplicationContext(), docUri); File directory = new File(fullpath); this.process_files = directory.listFiles(new ImageFilenameFIlter()); } } } if (this.process_files != null && this.process_files.length > 0){ setOneLineLog(String.valueOf(this.process_files.length) + " images loaded."); try{ InputStream is = new FileInputStream(this.process_files[0]); Bitmap bitmap = BitmapFactory.decodeStream(is); setResultImage(bitmap); } catch(Exception ex){ setOneLineLog(ex.getMessage()); } } } class ImageFilenameFIlter implements FilenameFilter { public boolean accept(File dir, String name) { if (name.toLowerCase().matches(".*\\.jpg$|.*\\.jpeg$|.*\\.png$|.*\\.bmp$")) { return true; } return false; } } protected synchronized void runInBackground(final Runnable r) { if (this.handler != null) { this.handler.post(r); } } private TfliteRunMode.Mode getRunModeFromGUI(){ boolean model_float = ((RadioButton)findViewById(R.id.radioButton_modelFloat)).isChecked(); boolean model_int8 = ((RadioButton)findViewById(R.id.radioButton_modelInt)).isChecked(); boolean precision_fp32 = ((RadioButton)findViewById(R.id.radioButton_runFP32)).isChecked(); boolean precision_fp16 = ((RadioButton)findViewById(R.id.radioButton_runFP16)).isChecked(); boolean precision_int8 = ((RadioButton)findViewById(R.id.radioButton_runInt8)).isChecked(); boolean delegate_none = ((RadioButton)findViewById(R.id.radioButton_delegateNone)).isChecked(); boolean delegate_nnapi = ((RadioButton)findViewById(R.id.radioButton_delegateNNAPI)).isChecked(); boolean[] gui_selected = {model_float, model_int8, precision_fp32, precision_fp16, precision_int8, delegate_none, delegate_nnapi}; final Map candidates = new HashMap(){{ put(TfliteRunMode.Mode.NONE_FP32, new boolean[]{true, false, true, false, false, true, false}); put(TfliteRunMode.Mode.NONE_FP16, new boolean[]{true, false, false, true, false, true, false}); put(TfliteRunMode.Mode.NNAPI_GPU_FP32, new boolean[]{true, false, true, false, false, false, true}); put(TfliteRunMode.Mode.NNAPI_GPU_FP16, new boolean[]{true, false, false, true, false, false, true}); put(TfliteRunMode.Mode.NONE_INT8, new boolean[]{false, true, false, false, true, true, false}); put(TfliteRunMode.Mode.NNAPI_DSP_INT8, new boolean[]{false, true, false, false, true, false, true}); }}; for(Map.Entry entry : candidates.entrySet()){ if (Arrays.equals(gui_selected, entry.getValue())) return entry.getKey(); } //not found return null; } public int getInputSizeFromGUI(){ RadioButton input_640 = findViewById(R.id.radioButton_640); if (input_640.isChecked()) return 640; else return 320; } //Eliminate infeasible run configurations(model, precision) public void onModelFloatClick(View view) { RadioButton precision_int8 = findViewById(R.id.radioButton_runInt8); if (precision_int8.isChecked()){ RadioButton precision_fp32 = findViewById(R.id.radioButton_runFP32); precision_fp32.setChecked(true); } } public void onModelIntClick(View view) { RadioButton precision_fp32 = findViewById(R.id.radioButton_runFP32); RadioButton precision_fp16 = findViewById(R.id.radioButton_runFP16); if (precision_fp32.isChecked() || precision_fp16.isChecked()){ RadioButton precision_int8 = findViewById(R.id.radioButton_runInt8); precision_int8.setChecked(true); } } public void onPrecisionFPClick(View view){ RadioButton model_int = findViewById(R.id.radioButton_modelInt); if (model_int.isChecked()) { RadioButton model_fp = findViewById(R.id.radioButton_modelFloat); model_fp.setChecked(true); } } public void onPrecisionIntClick(View view){ RadioButton model_fp = findViewById(R.id.radioButton_modelFloat); if (model_fp.isChecked()) { RadioButton model_int = findViewById(R.id.radioButton_modelInt); model_int.setChecked(true); } } } ================================================ FILE: app/tflite_yolov5_test/app/src/main/java/com/example/tflite_yolov5_test/PathUtils.java ================================================ package com.example.tflite_yolov5_test; import android.content.ContentUris; import android.content.Context; import android.database.Cursor; import android.graphics.Bitmap; import android.graphics.Canvas; import android.graphics.Color; import android.graphics.Paint; import android.graphics.RectF; import android.net.Uri; import android.os.Build; import android.os.Environment; import android.provider.DocumentsContract; import android.provider.MediaStore; import java.util.List; public class PathUtils { public static String getPath(final Context context, final Uri uri) { // DocumentProvider if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT && DocumentsContract.isDocumentUri(context, uri)) { if (isExternalStorageDocument(uri)) {// ExternalStorageProvider final String docId = DocumentsContract.getDocumentId(uri); final String[] split = docId.split(":"); final String type = split[0]; String storageDefinition; if("primary".equalsIgnoreCase(type)){ return Environment.getExternalStorageDirectory() + "/" + split[1]; } else { if(Environment.isExternalStorageRemovable()){ storageDefinition = "EXTERNAL_STORAGE"; } else{ storageDefinition = "SECONDARY_STORAGE"; } return System.getenv(storageDefinition) + "/" + split[1]; } } else if (isDownloadsDocument(uri)) {// DownloadsProvider final String id = DocumentsContract.getDocumentId(uri); final Uri contentUri = ContentUris.withAppendedId( Uri.parse("content://downloads/public_downloads"), Long.valueOf(id)); return getDataColumn(context, contentUri, null, null); } else if (isMediaDocument(uri)) {// MediaProvider final String docId = DocumentsContract.getDocumentId(uri); final String[] split = docId.split(":"); final String type = split[0]; Uri contentUri = null; if ("image".equals(type)) { contentUri = MediaStore.Images.Media.EXTERNAL_CONTENT_URI; } else if ("video".equals(type)) { contentUri = MediaStore.Video.Media.EXTERNAL_CONTENT_URI; } else if ("audio".equals(type)) { contentUri = MediaStore.Audio.Media.EXTERNAL_CONTENT_URI; } final String selection = "_id=?"; final String[] selectionArgs = new String[]{ split[1] }; return getDataColumn(context, contentUri, selection, selectionArgs); } } else if ("content".equalsIgnoreCase(uri.getScheme())) {// MediaStore (and general) // Return the remote address if (isGooglePhotosUri(uri)) return uri.getLastPathSegment(); return getDataColumn(context, uri, null, null); } else if ("file".equalsIgnoreCase(uri.getScheme())) {// File return uri.getPath(); } return null; } public static String getDataColumn(Context context, Uri uri, String selection, String[] selectionArgs) { Cursor cursor = null; final String column = "_data"; final String[] projection = { column }; try { cursor = context.getContentResolver().query(uri, projection, selection, selectionArgs, null); if (cursor != null && cursor.moveToFirst()) { final int column_index = cursor.getColumnIndexOrThrow(column); return cursor.getString(column_index); } } finally { if (cursor != null) cursor.close(); } return null; } public static boolean isExternalStorageDocument(Uri uri) { return "com.android.externalstorage.documents".equals(uri.getAuthority()); } public static boolean isDownloadsDocument(Uri uri) { return "com.android.providers.downloads.documents".equals(uri.getAuthority()); } public static boolean isMediaDocument(Uri uri) { return "com.android.providers.media.documents".equals(uri.getAuthority()); } public static boolean isGooglePhotosUri(Uri uri) { return "com.google.android.apps.photos.content".equals(uri.getAuthority()); } } ================================================ FILE: app/tflite_yolov5_test/app/src/main/java/com/example/tflite_yolov5_test/TfliteRunMode.java ================================================ package com.example.tflite_yolov5_test; public class TfliteRunMode { public enum Mode{ NONE_FP32, NONE_FP16, NONE_INT8, NNAPI_GPU_FP32, NNAPI_GPU_FP16, NNAPI_DSP_INT8 } static public boolean isQuantizedMode(Mode mode){ return mode == Mode.NONE_INT8 || mode == Mode.NNAPI_DSP_INT8; } } ================================================ FILE: app/tflite_yolov5_test/app/src/main/java/com/example/tflite_yolov5_test/TfliteRunner.java ================================================ package com.example.tflite_yolov5_test; import android.content.Context; import android.content.res.AssetFileDescriptor; import android.content.res.AssetManager; import android.graphics.Bitmap; import android.graphics.BitmapFactory; import android.graphics.RectF; import android.widget.ImageView; import org.tensorflow.lite.Interpreter; import org.tensorflow.lite.nnapi.NnApiDelegate; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStream; import java.nio.ByteBuffer; import java.nio.ByteOrder; import java.nio.MappedByteBuffer; import java.nio.channels.FileChannel; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import com.example.tflite_yolov5_test.TfliteRunMode.*; public class TfliteRunner { final int numBytesPerChannel_float = 4; final int numBytesPerChannel_int = 1; static { System.loadLibrary("native-lib"); } public native float[][] postprocess(float[][][][] out1, float[][][][] out2, float[][][][] out3, int inputSize, float conf_thresh, float iou_thresh); private Interpreter tfliteInterpreter; Mode runmode; int inputSize; class InferenceRawResult{ public float[][][][] out1; public float[][][][] out2; public float[][][][] out3; public InferenceRawResult(int inputSize){ this.out1 = new float[1][inputSize/8][inputSize/8][3*85]; this.out2 = new float[1][inputSize/16][inputSize/16][3*85]; this.out3 = new float[1][inputSize/32][inputSize/32][3*85]; } } Object[] inputArray; Map outputMap; InferenceRawResult rawres; float conf_thresh; float iou_thresh; public TfliteRunner(Context context, Mode runmode, int inputSize, float conf_thresh, float iou_thresh) throws Exception{ this.runmode = runmode; this.rawres = new InferenceRawResult(inputSize); this.inputSize = inputSize; this.conf_thresh = conf_thresh; this.iou_thresh = iou_thresh; loadModel(context, runmode, inputSize, 4); } private static MappedByteBuffer loadModelFile(AssetManager assets, String modelFilename) throws IOException { AssetFileDescriptor fileDescriptor = assets.openFd(modelFilename); FileInputStream inputStream = new FileInputStream(fileDescriptor.getFileDescriptor()); FileChannel fileChannel = inputStream.getChannel(); long startOffset = fileDescriptor.getStartOffset(); long declaredLength = fileDescriptor.getDeclaredLength(); return fileChannel.map(FileChannel.MapMode.READ_ONLY, startOffset, declaredLength); } public void loadModel(Context context, Mode runmode, int inputSize, int num_threads) throws Exception{ Interpreter.Options options = new Interpreter.Options(); NnApiDelegate.Options nnapi_options = new NnApiDelegate.Options(); options.setNumThreads(num_threads); nnapi_options.setExecutionPreference(1);//sustain-spped switch (runmode){ case NONE_FP32: options.setUseXNNPACK(true); break; case NONE_FP16: //TODO:deprecated? options.setAllowFp16PrecisionForFp32(true); break; case NNAPI_GPU_FP32: nnapi_options.setAcceleratorName("qti-gpu"); nnapi_options.setAllowFp16(false); options.addDelegate(new NnApiDelegate(nnapi_options)); break; case NNAPI_GPU_FP16: nnapi_options.setAcceleratorName("qti-gpu"); nnapi_options.setAllowFp16(true); options.addDelegate(new NnApiDelegate(nnapi_options)); break; case NONE_INT8: options.setUseXNNPACK(true); break; case NNAPI_DSP_INT8: nnapi_options.setAcceleratorName("qti-dsp"); options.addDelegate(new NnApiDelegate(nnapi_options)); break; default: throw new RuntimeException("Unknown runmode!"); } boolean quantized_mode = TfliteRunMode.isQuantizedMode(runmode); String precision_str = quantized_mode ? "int8" : "fp32"; String modelname = "yolov5s_" + precision_str + "_" + String.valueOf(inputSize) + ".tflite"; MappedByteBuffer tflite_model_buf = TfliteRunner.loadModelFile(context.getAssets(), modelname); this.tfliteInterpreter = new Interpreter(tflite_model_buf, options); } static public Bitmap getResizedImage(Bitmap bitmap, int inputSize) { Bitmap resized = Bitmap.createScaledBitmap(bitmap, inputSize, inputSize, true); return resized; } public void setInput(Bitmap resizedbitmap){ boolean quantized_mode = TfliteRunMode.isQuantizedMode(this.runmode); int numBytesPerChannel = quantized_mode ? numBytesPerChannel_int : numBytesPerChannel_float; ByteBuffer imgData = ByteBuffer.allocateDirect(1 * inputSize * inputSize * 3 * numBytesPerChannel); int[] intValues = new int[inputSize * inputSize]; resizedbitmap.getPixels(intValues, 0, resizedbitmap.getWidth(), 0, 0, resizedbitmap.getWidth(), resizedbitmap.getHeight()); imgData.order(ByteOrder.nativeOrder()); imgData.rewind(); for (int i = 0; i < inputSize; ++i) { for (int j = 0; j < inputSize; ++j) { int pixelValue = intValues[i * inputSize + j]; if (quantized_mode) { // Quantized model imgData.put((byte) ((pixelValue >> 16) & 0xFF)); imgData.put((byte) ((pixelValue >> 8) & 0xFF)); imgData.put((byte) (pixelValue & 0xFF)); } else { // Float model float r = (((pixelValue >> 16) & 0xFF)) / 255.0f; float g = (((pixelValue >> 8) & 0xFF)) / 255.0f; float b = ((pixelValue & 0xFF)) / 255.0f; imgData.putFloat(r); imgData.putFloat(g); imgData.putFloat(b); } } } this.inputArray = new Object[]{imgData}; this.outputMap = new HashMap<>(); outputMap.put(0, this.rawres.out1); outputMap.put(1, this.rawres.out2); outputMap.put(2, this.rawres.out3); } private int inference_elapsed; private int postprocess_elapsed; public String getLastElapsedTimeLog() { return String.format("inference: %dms postprocess: %dms", this.inference_elapsed, this.postprocess_elapsed); } public List runInference(){ List bboxes = new ArrayList<>(); long start = System.currentTimeMillis(); this.tfliteInterpreter.runForMultipleInputsOutputs(inputArray, outputMap); long end = System.currentTimeMillis(); this.inference_elapsed = (int)(end - start); //float[bbox_num][6] // (x1, y1, x2, y2, conf, class_idx) float[][] bbox_arrs = postprocess(this.rawres.out1, this.rawres.out2, this.rawres.out3, this.inputSize, this.conf_thresh, this.iou_thresh); long end2 = System.currentTimeMillis(); this.postprocess_elapsed = (int)(end2 - end); for(float[] bbox_arr: bbox_arrs){ bboxes.add(new Recognition(bbox_arr)); } return bboxes; } static int[] coco80_to_91class_map = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 27, 28, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 67, 70, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 84, 85, 86, 87, 88, 89, 90}; static public int get_coco91_from_coco80(int idx){ //assume idx < 80 return coco80_to_91class_map[idx]; } public void setConfThresh(float thresh){ this.conf_thresh = thresh;} public void setIoUThresh(float thresh) {this.iou_thresh = thresh;} //port from TfLite Object Detection example /** An immutable result returned by a Detector describing what was recognized. */ public class Recognition { private final String[] coco_class_names = new String[]{"person", "bicycle", "car", "motorbike", "aeroplane", "bus", "train", "truck", "boat", "traffic light", "fire hydrant", "stop sign", "parking meter", "bench", "bird", "cat", "dog", "horse", "sheep", "cow", "elephant", "bear", "zebra", "giraffe", "backpack", "umbrella", "handbag", "tie", "suitcase", "frisbee", "skis", "snowboard", "sports ball", "kite", "baseball bat", "baseball glove", "skateboard", "surfboard", "tennis racket", "bottle", "wine glass", "cup", "fork", "knife", "spoon", "bowl", "banana", "apple", "sandwich", "orange", "broccoli", "carrot", "hot dog", "pizza", "donut", "cake", "chair", "sofa", "pottedplant", "bed", "diningtable", "toilet", "tvmonitor", "laptop", "mouse", "remote", "keyboard", "cell phone", "microwave", "oven", "toaster", "sink", "refrigerator", "book", "clock", "vase", "scissors", "teddy bear", "hair drier", "toothbrush"}; private final Integer class_idx; /** * A unique identifier for what has been recognized. Specific to the class, not the instance of * the object. */ //private final String id; /** Display name for the recognition. */ private final String title; /** * A sortable score for how good the recognition is relative to others. Higher should be better. */ private final Float confidence; /** Optional location within the source image for the location of the recognized object. */ private RectF location; public Recognition( float[] bbox_array) { float x1 = bbox_array[0]; float y1 = bbox_array[1]; float x2 = bbox_array[2]; float y2 = bbox_array[3]; //this.id = (int)bbox_array[5]; int class_id = (int)bbox_array[5]; this.class_idx = class_id; this.title = coco_class_names[class_id]; this.confidence = bbox_array[4]; this.location = new RectF(x1, y1, x2, y2); } public Integer getClass_idx(){ return class_idx; } /*public String getId() { return id; }*/ public String getTitle() { return title; } public Float getConfidence() { return confidence; } public RectF getLocation() { return new RectF(location); } public void setLocation(RectF location) { this.location = location; } @Override public String toString() { String resultString = ""; /*if (id != null) { resultString += "[" + id + "] "; }*/ if (title != null) { resultString += title + " "; } if (confidence != null) { resultString += String.format("(%.1f%%) ", confidence * 100.0f); } if (location != null) { resultString += location + " "; } return resultString.trim(); } } } ================================================ FILE: app/tflite_yolov5_test/app/src/main/java/com/example/tflite_yolov5_test/camera/CameraActivity.java ================================================ package com.example.tflite_yolov5_test.camera; import androidx.appcompat.app.AppCompatActivity; import androidx.appcompat.widget.SwitchCompat; import android.Manifest; import android.app.Fragment; import android.content.Context; import android.content.pm.PackageManager; import android.hardware.Camera; import android.hardware.camera2.CameraAccessException; import android.hardware.camera2.CameraCharacteristics; import android.hardware.camera2.CameraManager; import android.hardware.camera2.params.StreamConfigurationMap; import android.media.Image; import android.media.ImageReader; import android.os.Build; import android.os.Bundle; import android.os.Handler; import android.os.HandlerThread; import android.os.Trace; import android.util.Size; import android.view.Surface; import android.view.View; import android.widget.CompoundButton; import android.widget.ImageView; import android.widget.LinearLayout; import android.widget.SeekBar; import android.widget.TextView; import android.widget.Toast; import com.example.tflite_yolov5_test.R; import com.google.android.material.bottomsheet.BottomSheetBehavior; import com.example.tflite_yolov5_test.camera.env.ImageUtils; import java.nio.ByteBuffer; public abstract class CameraActivity extends AppCompatActivity implements ImageReader.OnImageAvailableListener, Camera.PreviewCallback, CompoundButton.OnCheckedChangeListener, View.OnClickListener { private static final int PERMISSIONS_REQUEST = 1; private static final String PERMISSION_CAMERA = Manifest.permission.CAMERA; protected int previewWidth = 0; protected int previewHeight = 0; private boolean debug = false; private Handler handler; private HandlerThread handlerThread; private boolean useCamera2API; private boolean isProcessingFrame = false; private byte[][] yuvBytes = new byte[3][]; private int[] rgbBytes = null; private int yRowStride; private Runnable postInferenceCallback; private Runnable imageConverter; private LinearLayout bottomSheetLayout; private LinearLayout gestureLayout; private BottomSheetBehavior sheetBehavior; protected TextView frameValueTextView, cropValueTextView, inferenceTimeTextView; protected ImageView bottomSheetArrowImageView; private ImageView plusImageView, minusImageView; private SwitchCompat apiSwitchCompat; private TextView threadsTextView; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_camera); if (hasPermission()) { setFragment(); } else { requestPermission(); } } private String chooseCamera() { final CameraManager manager = (CameraManager) getSystemService(Context.CAMERA_SERVICE); try { for (final String cameraId : manager.getCameraIdList()) { final CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId); // We don't use a front facing camera in this sample. final Integer facing = characteristics.get(CameraCharacteristics.LENS_FACING); if (facing != null && facing == CameraCharacteristics.LENS_FACING_FRONT) { continue; } final StreamConfigurationMap map = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); if (map == null) { continue; } // Fallback to camera1 API for internal cameras that don't have full support. // This should help with legacy situations where using the camera2 API causes // distorted or otherwise broken previews. useCamera2API = (facing == CameraCharacteristics.LENS_FACING_EXTERNAL) || isHardwareLevelSupported( characteristics, CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_FULL); return cameraId; } } catch (CameraAccessException e) { } return null; } protected void fillBytes(final Image.Plane[] planes, final byte[][] yuvBytes) { // Because of the variable row stride it's not possible to know in // advance the actual necessary dimensions of the yuv planes. for (int i = 0; i < planes.length; ++i) { final ByteBuffer buffer = planes[i].getBuffer(); if (yuvBytes[i] == null) { yuvBytes[i] = new byte[buffer.capacity()]; } buffer.get(yuvBytes[i]); } } protected int getScreenOrientation() { switch (getWindowManager().getDefaultDisplay().getRotation()) { case Surface.ROTATION_270: return 270; case Surface.ROTATION_180: return 180; case Surface.ROTATION_90: return 90; default: return 0; } } /** Callback for android.hardware.Camera API */ @Override public void onPreviewFrame(final byte[] bytes, final Camera camera) { if (isProcessingFrame) { return; } try { // Initialize the storage bitmaps once when the resolution is known. if (rgbBytes == null) { Camera.Size previewSize = camera.getParameters().getPreviewSize(); previewHeight = previewSize.height; previewWidth = previewSize.width; rgbBytes = new int[previewWidth * previewHeight]; onPreviewSizeChosen(new Size(previewSize.width, previewSize.height), 90); } } catch (final Exception e) { return; } } /** Callback for Camera2 API */ @Override public void onImageAvailable(final ImageReader reader) { // We need wait until we have some size from onPreviewSizeChosen if (previewWidth == 0 || previewHeight == 0) { return; } if (rgbBytes == null) { rgbBytes = new int[previewWidth * previewHeight]; } try { final Image image = reader.acquireLatestImage(); if (image == null) { return; } if (isProcessingFrame) { image.close(); return; } isProcessingFrame = true; Trace.beginSection("imageAvailable"); final Image.Plane[] planes = image.getPlanes(); fillBytes(planes, yuvBytes); yRowStride = planes[0].getRowStride(); final int uvRowStride = planes[1].getRowStride(); final int uvPixelStride = planes[1].getPixelStride(); imageConverter = new Runnable() { @Override public void run() { ImageUtils.convertYUV420ToARGB8888( yuvBytes[0], yuvBytes[1], yuvBytes[2], previewWidth, previewHeight, yRowStride, uvRowStride, uvPixelStride, rgbBytes); } }; postInferenceCallback = new Runnable() { @Override public void run() { image.close(); isProcessingFrame = false; } }; processImage(); } catch (final Exception e) { Trace.endSection(); return; } Trace.endSection(); } @Override public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) { } @Override public void onClick(View v) { } @Override public synchronized void onResume() { super.onResume(); handlerThread = new HandlerThread("inference"); handlerThread.start(); handler = new Handler(handlerThread.getLooper()); } @Override public synchronized void onPause() { handlerThread.quitSafely(); try { handlerThread.join(); handlerThread = null; handler = null; } catch (final InterruptedException e) { } super.onPause(); } protected synchronized void runInBackground(final Runnable r) { if (handler != null) { handler.post(r); } } protected int[] getRgbBytes() { imageConverter.run(); return rgbBytes; } protected void readyForNextImage() { if (postInferenceCallback != null) { postInferenceCallback.run(); } } private boolean isHardwareLevelSupported( CameraCharacteristics characteristics, int requiredLevel) { int deviceLevel = characteristics.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL); if (deviceLevel == CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY) { return requiredLevel == deviceLevel; } // deviceLevel is not LEGACY, can use numerical sort return requiredLevel <= deviceLevel; } protected void setFragment() { String cameraId = chooseCamera(); Fragment fragment; if (useCamera2API) { CameraConnectionFragment camera2Fragment = CameraConnectionFragment.newInstance( new CameraConnectionFragment.ConnectionCallback() { @Override public void onPreviewSizeChosen(final Size size, final int rotation) { previewHeight = size.getHeight(); previewWidth = size.getWidth(); CameraActivity.this.onPreviewSizeChosen(size, rotation); } }, this, getLayoutId(), getDesiredPreviewFrameSize()); camera2Fragment.setCamera(cameraId); fragment = camera2Fragment; } else { fragment = new LegacyCameraConnectionFragment(this, getLayoutId(), getDesiredPreviewFrameSize()); } getFragmentManager().beginTransaction().replace(R.id.container, fragment).commit(); int a = 0; a = a + 1; } protected abstract void processImage(); protected abstract void onPreviewSizeChosen(final Size size, final int rotation); protected abstract int getLayoutId(); protected abstract Size getDesiredPreviewFrameSize(); protected abstract void setNumThreads(int numThreads); protected abstract void setUseNNAPI(boolean isChecked); private static boolean allPermissionsGranted(final int[] grantResults) { for (int result : grantResults) { if (result != PackageManager.PERMISSION_GRANTED) { return false; } } return true; } private boolean hasPermission() { if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) { return checkSelfPermission(PERMISSION_CAMERA) == PackageManager.PERMISSION_GRANTED; } else { return true; } } private void requestPermission() { if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) { if (shouldShowRequestPermissionRationale(PERMISSION_CAMERA)) { Toast.makeText( CameraActivity.this, "Camera permission is required for this demo", Toast.LENGTH_LONG) .show(); } requestPermissions(new String[] {PERMISSION_CAMERA}, PERMISSIONS_REQUEST); } } @Override public void onRequestPermissionsResult( final int requestCode, final String[] permissions, final int[] grantResults) { super.onRequestPermissionsResult(requestCode, permissions, grantResults); if (requestCode == PERMISSIONS_REQUEST) { if (allPermissionsGranted(grantResults)) { setFragment(); } else { requestPermission(); } } } } ================================================ FILE: app/tflite_yolov5_test/app/src/main/java/com/example/tflite_yolov5_test/camera/CameraConnectionFragment.java ================================================ package com.example.tflite_yolov5_test.camera; import android.annotation.SuppressLint; import android.app.Activity; import android.app.AlertDialog; import android.app.Dialog; import android.app.DialogFragment; import android.app.Fragment; import android.content.Context; import android.content.DialogInterface; import android.content.res.Configuration; import android.graphics.ImageFormat; import android.graphics.Matrix; import android.graphics.RectF; import android.graphics.SurfaceTexture; import android.hardware.camera2.CameraAccessException; import android.hardware.camera2.CameraCaptureSession; import android.hardware.camera2.CameraCharacteristics; import android.hardware.camera2.CameraDevice; import android.hardware.camera2.CameraManager; import android.hardware.camera2.CaptureRequest; import android.hardware.camera2.CaptureResult; import android.hardware.camera2.TotalCaptureResult; import android.hardware.camera2.params.StreamConfigurationMap; import android.media.ImageReader; import android.os.Bundle; import android.os.Handler; import android.os.HandlerThread; import android.util.Size; import android.util.SparseIntArray; import android.view.LayoutInflater; import android.view.Surface; import android.view.TextureView; import android.view.View; import android.view.ViewGroup; import android.widget.Toast; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Comparator; import java.util.List; import java.util.concurrent.Semaphore; import java.util.concurrent.TimeUnit; import com.example.tflite_yolov5_test.R; import com.example.tflite_yolov5_test.customview.AutoFitTextureView; @SuppressLint("ValidFragment") public class CameraConnectionFragment extends Fragment { /** * The camera preview size will be chosen to be the smallest frame by pixel size capable of * containing a DESIRED_SIZE x DESIRED_SIZE square. */ private static final int MINIMUM_PREVIEW_SIZE = 320; /** Conversion from screen rotation to JPEG orientation. */ private static final SparseIntArray ORIENTATIONS = new SparseIntArray(); private static final String FRAGMENT_DIALOG = "dialog"; static { ORIENTATIONS.append(Surface.ROTATION_0, 90); ORIENTATIONS.append(Surface.ROTATION_90, 0); ORIENTATIONS.append(Surface.ROTATION_180, 270); ORIENTATIONS.append(Surface.ROTATION_270, 180); } /** A {@link Semaphore} to prevent the app from exiting before closing the camera. */ private final Semaphore cameraOpenCloseLock = new Semaphore(1); /** A {@link ImageReader.OnImageAvailableListener} to receive frames as they are available. */ private final ImageReader.OnImageAvailableListener imageListener; /** The input size in pixels desired by TensorFlow (width and height of a square bitmap). */ private final Size inputSize; /** The layout identifier to inflate for this Fragment. */ private final int layout; private final ConnectionCallback cameraConnectionCallback; private final CameraCaptureSession.CaptureCallback captureCallback = new CameraCaptureSession.CaptureCallback() { @Override public void onCaptureProgressed( final CameraCaptureSession session, final CaptureRequest request, final CaptureResult partialResult) {} @Override public void onCaptureCompleted( final CameraCaptureSession session, final CaptureRequest request, final TotalCaptureResult result) {} }; /** ID of the current {@link CameraDevice}. */ private String cameraId; /** An {@link AutoFitTextureView} for camera preview. */ private AutoFitTextureView textureView; /** A {@link CameraCaptureSession } for camera preview. */ private CameraCaptureSession captureSession; /** A reference to the opened {@link CameraDevice}. */ private CameraDevice cameraDevice; /** The rotation in degrees of the camera sensor from the display. */ private Integer sensorOrientation; /** The {@link Size} of camera preview. */ private Size previewSize; /** An additional thread for running tasks that shouldn't block the UI. */ private HandlerThread backgroundThread; /** A {@link Handler} for running tasks in the background. */ private Handler backgroundHandler; /** An {@link ImageReader} that handles preview frame capture. */ private ImageReader previewReader; /** {@link CaptureRequest.Builder} for the camera preview */ private CaptureRequest.Builder previewRequestBuilder; /** {@link CaptureRequest} generated by {@link #previewRequestBuilder} */ private CaptureRequest previewRequest; /** {@link CameraDevice.StateCallback} is called when {@link CameraDevice} changes its state. */ private final CameraDevice.StateCallback stateCallback = new CameraDevice.StateCallback() { @Override public void onOpened(final CameraDevice cd) { // This method is called when the camera is opened. We start camera preview here. cameraOpenCloseLock.release(); cameraDevice = cd; createCameraPreviewSession(); } @Override public void onDisconnected(final CameraDevice cd) { cameraOpenCloseLock.release(); cd.close(); cameraDevice = null; } @Override public void onError(final CameraDevice cd, final int error) { cameraOpenCloseLock.release(); cd.close(); cameraDevice = null; final Activity activity = getActivity(); if (null != activity) { activity.finish(); } } }; /** * {@link TextureView.SurfaceTextureListener} handles several lifecycle events on a {@link * TextureView}. */ private final TextureView.SurfaceTextureListener surfaceTextureListener = new TextureView.SurfaceTextureListener() { @Override public void onSurfaceTextureAvailable( final SurfaceTexture texture, final int width, final int height) { openCamera(width, height); } @Override public void onSurfaceTextureSizeChanged( final SurfaceTexture texture, final int width, final int height) { configureTransform(width, height); } @Override public boolean onSurfaceTextureDestroyed(final SurfaceTexture texture) { return true; } @Override public void onSurfaceTextureUpdated(final SurfaceTexture texture) {} }; private CameraConnectionFragment( final ConnectionCallback connectionCallback, final ImageReader.OnImageAvailableListener imageListener, final int layout, final Size inputSize) { this.cameraConnectionCallback = connectionCallback; this.imageListener = imageListener; this.layout = layout; this.inputSize = inputSize; } /** * Given {@code choices} of {@code Size}s supported by a camera, chooses the smallest one whose * width and height are at least as large as the minimum of both, or an exact match if possible. * * @param choices The list of sizes that the camera supports for the intended output class * @param width The minimum desired width * @param height The minimum desired height * @return The optimal {@code Size}, or an arbitrary one if none were big enough */ protected static Size chooseOptimalSize(final Size[] choices, final int width, final int height) { final int minSize = Math.max(Math.min(width, height), MINIMUM_PREVIEW_SIZE); final Size desiredSize = new Size(width, height); // Collect the supported resolutions that are at least as big as the preview Surface boolean exactSizeFound = false; final List bigEnough = new ArrayList(); final List tooSmall = new ArrayList(); for (final Size option : choices) { if (option.equals(desiredSize)) { // Set the size but don't return yet so that remaining sizes will still be logged. exactSizeFound = true; } if (option.getHeight() >= minSize && option.getWidth() >= minSize) { bigEnough.add(option); } else { tooSmall.add(option); } } if (exactSizeFound) { return desiredSize; } // Pick the smallest of those, assuming we found any if (bigEnough.size() > 0) { final Size chosenSize = Collections.min(bigEnough, new CompareSizesByArea()); return chosenSize; } else { return choices[0]; } } public static CameraConnectionFragment newInstance( final ConnectionCallback callback, final ImageReader.OnImageAvailableListener imageListener, final int layout, final Size inputSize) { return new CameraConnectionFragment(callback, imageListener, layout, inputSize); } /** * Shows a {@link Toast} on the UI thread. * * @param text The message to show */ private void showToast(final String text) { final Activity activity = getActivity(); if (activity != null) { activity.runOnUiThread( new Runnable() { @Override public void run() { Toast.makeText(activity, text, Toast.LENGTH_SHORT).show(); } }); } } @Override public View onCreateView( final LayoutInflater inflater, final ViewGroup container, final Bundle savedInstanceState) { return inflater.inflate(layout, container, false); } @Override public void onViewCreated(final View view, final Bundle savedInstanceState) { textureView = (AutoFitTextureView) view.findViewById(R.id.texture); } @Override public void onActivityCreated(final Bundle savedInstanceState) { super.onActivityCreated(savedInstanceState); } @Override public void onResume() { super.onResume(); startBackgroundThread(); // When the screen is turned off and turned back on, the SurfaceTexture is already // available, and "onSurfaceTextureAvailable" will not be called. In that case, we can open // a camera and start preview from here (otherwise, we wait until the surface is ready in // the SurfaceTextureListener). if (textureView.isAvailable()) { openCamera(textureView.getWidth(), textureView.getHeight()); } else { textureView.setSurfaceTextureListener(surfaceTextureListener); } } @Override public void onPause() { closeCamera(); stopBackgroundThread(); super.onPause(); } public void setCamera(String cameraId) { this.cameraId = cameraId; } /** Sets up member variables related to camera. */ private void setUpCameraOutputs() { final Activity activity = getActivity(); final CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE); try { final CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId); final StreamConfigurationMap map = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); sensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION); // Danger, W.R.! Attempting to use too large a preview size could exceed the camera // bus' bandwidth limitation, resulting in gorgeous previews but the storage of // garbage capture data. previewSize = chooseOptimalSize( map.getOutputSizes(SurfaceTexture.class), inputSize.getWidth(), inputSize.getHeight()); // We fit the aspect ratio of TextureView to the size of preview we picked. final int orientation = getResources().getConfiguration().orientation; if (orientation == Configuration.ORIENTATION_LANDSCAPE) { textureView.setAspectRatio(previewSize.getWidth(), previewSize.getHeight()); } else { textureView.setAspectRatio(previewSize.getHeight(), previewSize.getWidth()); } } catch (final CameraAccessException e) { } catch (final NullPointerException e) { // Currently an NPE is thrown when the Camera2API is used but not supported on the // device this code runs. // ErrorDialog.newInstance(getString(R.string.tfe_od_camera_error)) // .show(getChildFragmentManager(), FRAGMENT_DIALOG); // throw new IllegalStateException(getString(R.string.tfe_od_camera_error)); } cameraConnectionCallback.onPreviewSizeChosen(previewSize, sensorOrientation); } /** Opens the camera specified by {@link CameraConnectionFragment#cameraId}. */ @SuppressLint("MissingPermission") private void openCamera(final int width, final int height) { setUpCameraOutputs(); configureTransform(width, height); final Activity activity = getActivity(); final CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE); try { if (!cameraOpenCloseLock.tryAcquire(2500, TimeUnit.MILLISECONDS)) { throw new RuntimeException("Time out waiting to lock camera opening."); } manager.openCamera(cameraId, stateCallback, backgroundHandler); } catch (final CameraAccessException e) { } catch (final InterruptedException e) { throw new RuntimeException("Interrupted while trying to lock camera opening.", e); } } /** Closes the current {@link CameraDevice}. */ private void closeCamera() { try { cameraOpenCloseLock.acquire(); if (null != captureSession) { captureSession.close(); captureSession = null; } if (null != cameraDevice) { cameraDevice.close(); cameraDevice = null; } if (null != previewReader) { previewReader.close(); previewReader = null; } } catch (final InterruptedException e) { throw new RuntimeException("Interrupted while trying to lock camera closing.", e); } finally { cameraOpenCloseLock.release(); } } /** Starts a background thread and its {@link Handler}. */ private void startBackgroundThread() { backgroundThread = new HandlerThread("ImageListener"); backgroundThread.start(); backgroundHandler = new Handler(backgroundThread.getLooper()); } /** Stops the background thread and its {@link Handler}. */ private void stopBackgroundThread() { backgroundThread.quitSafely(); try { backgroundThread.join(); backgroundThread = null; backgroundHandler = null; } catch (final InterruptedException e) { } } /** Creates a new {@link CameraCaptureSession} for camera preview. */ private void createCameraPreviewSession() { try { final SurfaceTexture texture = textureView.getSurfaceTexture(); assert texture != null; // We configure the size of default buffer to be the size of camera preview we want. texture.setDefaultBufferSize(previewSize.getWidth(), previewSize.getHeight()); // This is the output Surface we need to start preview. final Surface surface = new Surface(texture); // We set up a CaptureRequest.Builder with the output Surface. previewRequestBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); previewRequestBuilder.addTarget(surface); // Create the reader for the preview frames. previewReader = ImageReader.newInstance( previewSize.getWidth(), previewSize.getHeight(), ImageFormat.YUV_420_888, 2); previewReader.setOnImageAvailableListener(imageListener, backgroundHandler); previewRequestBuilder.addTarget(previewReader.getSurface()); // Here, we create a CameraCaptureSession for camera preview. cameraDevice.createCaptureSession( Arrays.asList(surface, previewReader.getSurface()), new CameraCaptureSession.StateCallback() { @Override public void onConfigured(final CameraCaptureSession cameraCaptureSession) { // The camera is already closed if (null == cameraDevice) { return; } // When the session is ready, we start displaying the preview. captureSession = cameraCaptureSession; try { // Auto focus should be continuous for camera preview. previewRequestBuilder.set( CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE); // Flash is automatically enabled when necessary. previewRequestBuilder.set( CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH); // Finally, we start displaying the camera preview. previewRequest = previewRequestBuilder.build(); captureSession.setRepeatingRequest( previewRequest, captureCallback, backgroundHandler); } catch (final CameraAccessException e) { } } @Override public void onConfigureFailed(final CameraCaptureSession cameraCaptureSession) { showToast("Failed"); } }, null); } catch (final CameraAccessException e) { } } /** * Configures the necessary {@link Matrix} transformation to `mTextureView`. This method should be * called after the camera preview size is determined in setUpCameraOutputs and also the size of * `mTextureView` is fixed. * * @param viewWidth The width of `mTextureView` * @param viewHeight The height of `mTextureView` */ private void configureTransform(final int viewWidth, final int viewHeight) { final Activity activity = getActivity(); if (null == textureView || null == previewSize || null == activity) { return; } final int rotation = activity.getWindowManager().getDefaultDisplay().getRotation(); final Matrix matrix = new Matrix(); final RectF viewRect = new RectF(0, 0, viewWidth, viewHeight); final RectF bufferRect = new RectF(0, 0, previewSize.getHeight(), previewSize.getWidth()); final float centerX = viewRect.centerX(); final float centerY = viewRect.centerY(); if (Surface.ROTATION_90 == rotation || Surface.ROTATION_270 == rotation) { bufferRect.offset(centerX - bufferRect.centerX(), centerY - bufferRect.centerY()); matrix.setRectToRect(viewRect, bufferRect, Matrix.ScaleToFit.FILL); final float scale = Math.max( (float) viewHeight / previewSize.getHeight(), (float) viewWidth / previewSize.getWidth()); matrix.postScale(scale, scale, centerX, centerY); matrix.postRotate(90 * (rotation - 2), centerX, centerY); } else if (Surface.ROTATION_180 == rotation) { matrix.postRotate(180, centerX, centerY); } textureView.setTransform(matrix); } /** * Callback for Activities to use to initialize their data once the selected preview size is * known. */ public interface ConnectionCallback { void onPreviewSizeChosen(Size size, int cameraRotation); } /** Compares two {@code Size}s based on their areas. */ static class CompareSizesByArea implements Comparator { @Override public int compare(final Size lhs, final Size rhs) { // We cast here to ensure the multiplications won't overflow return Long.signum( (long) lhs.getWidth() * lhs.getHeight() - (long) rhs.getWidth() * rhs.getHeight()); } } /** Shows an error message dialog. */ public static class ErrorDialog extends DialogFragment { private static final String ARG_MESSAGE = "message"; public static ErrorDialog newInstance(final String message) { final ErrorDialog dialog = new ErrorDialog(); final Bundle args = new Bundle(); args.putString(ARG_MESSAGE, message); dialog.setArguments(args); return dialog; } @Override public Dialog onCreateDialog(final Bundle savedInstanceState) { final Activity activity = getActivity(); return new AlertDialog.Builder(activity) .setMessage(getArguments().getString(ARG_MESSAGE)) .setPositiveButton( android.R.string.ok, new DialogInterface.OnClickListener() { @Override public void onClick(final DialogInterface dialogInterface, final int i) { activity.finish(); } }) .create(); } } } ================================================ FILE: app/tflite_yolov5_test/app/src/main/java/com/example/tflite_yolov5_test/camera/DetectorActivity.java ================================================ package com.example.tflite_yolov5_test.camera; import android.graphics.Bitmap; import android.graphics.Canvas; import android.graphics.Color; import android.graphics.Matrix; import android.graphics.Paint; import android.graphics.RectF; import android.graphics.Typeface; import android.media.ImageReader; import android.os.Bundle; import android.os.SystemClock; import android.util.Log; import android.util.Size; import android.util.TypedValue; import android.widget.SeekBar; import android.widget.TextView; import android.widget.Toast; import com.example.tflite_yolov5_test.R; import com.example.tflite_yolov5_test.camera.env.BorderedText; import com.example.tflite_yolov5_test.camera.env.ImageUtils; import com.example.tflite_yolov5_test.camera.tracker.MultiBoxTracker; import com.example.tflite_yolov5_test.customview.OverlayView; import com.example.tflite_yolov5_test.TfliteRunner; import com.example.tflite_yolov5_test.TfliteRunMode; import java.io.IOException; import java.util.ArrayList; import java.util.List; public class DetectorActivity extends CameraActivity implements ImageReader.OnImageAvailableListener { private static final int TF_OD_API_INPUT_SIZE = 320; private static final boolean TF_OD_API_IS_QUANTIZED = true; private static final String TF_OD_API_MODEL_FILE = "detect.tflite"; private static final String TF_OD_API_LABELS_FILE = "labelmap.txt"; private static final TfliteRunMode.Mode MODE = TfliteRunMode.Mode.NONE_INT8; // Minimum detection confidence to track a detection. private static final float MINIMUM_CONFIDENCE_TF_OD_API = 0.5f; private static final boolean MAINTAIN_ASPECT = false; private static final Size DESIRED_PREVIEW_SIZE = new Size(640, 480); private static final boolean SAVE_PREVIEW_BITMAP = false; private static final float TEXT_SIZE_DIP = 10; OverlayView trackingOverlay; private Integer sensorOrientation; private TfliteRunner detector; private long lastProcessingTimeMs = 0; private Bitmap rgbFrameBitmap = null; private Bitmap croppedBitmap = null; private Bitmap cropCopyBitmap = null; private boolean computingDetection = false; private long timestamp = 0; private Matrix frameToCropTransform; private Matrix cropToFrameTransform; private MultiBoxTracker tracker; private BorderedText borderedText; protected Size getDesiredPreviewFrameSize() { return DESIRED_PREVIEW_SIZE; } protected int getLayoutId() { return R.layout.tfe_od_camera_connection_fragment_tracking; } public float getConfThreshFromGUI(){ return ((float)((SeekBar)findViewById(R.id.conf_seekBar2)).getProgress()) / 100.0f;} public float getIoUThreshFromGUI(){ return ((float)((SeekBar)findViewById(R.id.iou_seekBar2)).getProgress()) / 100.0f;} @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); SeekBar conf_seekBar = (SeekBar)findViewById(R.id.conf_seekBar2); conf_seekBar.setOnSeekBarChangeListener(new SeekBar.OnSeekBarChangeListener(){ @Override public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) { TextView conf_textView = (TextView)findViewById(R.id.conf_TextView2); float thresh = (float)progress / 100.0f; conf_textView.setText(String.format("Confidence Threshold: %.2f", thresh)); if (detector != null) detector.setConfThresh(thresh); } @Override public void onStopTrackingTouch(SeekBar seekBar) { } @Override public void onStartTrackingTouch(SeekBar seekBar) { } }); conf_seekBar.setMax(100); conf_seekBar.setProgress(25);//0.25 SeekBar iou_seekBar = (SeekBar)findViewById(R.id.iou_seekBar2); iou_seekBar.setOnSeekBarChangeListener(new SeekBar.OnSeekBarChangeListener(){ @Override public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) { TextView iou_textView = (TextView)findViewById(R.id.iou_TextView2); float thresh = (float)progress / 100.0f; iou_textView.setText(String.format("IoU Threshold: %.2f", thresh)); if (detector != null) detector.setIoUThresh(thresh); } @Override public void onStopTrackingTouch(SeekBar seekBar) { } @Override public void onStartTrackingTouch(SeekBar seekBar) { } }); iou_seekBar.setMax(100); iou_seekBar.setProgress(45);//0.45 } @Override protected void setUseNNAPI(final boolean isChecked) { } @Override protected void setNumThreads(final int numThreads) { //runInBackground(() -> detector.setNumThreads(numThreads)); } @Override public void onPreviewSizeChosen(final Size size, final int rotation) { final float textSizePx = TypedValue.applyDimension( TypedValue.COMPLEX_UNIT_DIP, TEXT_SIZE_DIP, getResources().getDisplayMetrics()); borderedText = new BorderedText(textSizePx); borderedText.setTypeface(Typeface.MONOSPACE); tracker = new MultiBoxTracker(this); int cropSize = TF_OD_API_INPUT_SIZE; try { detector = new TfliteRunner(this, MODE, TF_OD_API_INPUT_SIZE, 0.25f, 0.45f); cropSize = TF_OD_API_INPUT_SIZE; } catch (final Exception e) { e.printStackTrace(); Toast toast = Toast.makeText( getApplicationContext(), "Detector could not be initialized", Toast.LENGTH_SHORT); toast.show(); finish(); } previewWidth = size.getWidth(); previewHeight = size.getHeight(); int a = getScreenOrientation(); sensorOrientation = rotation - getScreenOrientation(); rgbFrameBitmap = Bitmap.createBitmap(previewWidth, previewHeight, Bitmap.Config.ARGB_8888); croppedBitmap = Bitmap.createBitmap(cropSize, cropSize, Bitmap.Config.ARGB_8888); frameToCropTransform = ImageUtils.getTransformationMatrix( previewWidth, previewHeight, cropSize, cropSize, sensorOrientation, MAINTAIN_ASPECT); cropToFrameTransform = new Matrix(); frameToCropTransform.invert(cropToFrameTransform); trackingOverlay = (OverlayView) findViewById(R.id.tracking_overlay); trackingOverlay.addCallback( new OverlayView.DrawCallback() { @Override public void drawCallback(final Canvas canvas) { tracker.draw(canvas); } }); tracker.setFrameConfiguration(getDesiredPreviewFrameSize(), TF_OD_API_INPUT_SIZE, sensorOrientation); } @Override protected void processImage() { trackingOverlay.postInvalidate(); // No mutex needed as this method is not reentrant. if (computingDetection) { readyForNextImage(); return; } computingDetection = true; rgbFrameBitmap.setPixels(getRgbBytes(), 0, previewWidth, 0, 0, previewWidth, previewHeight); readyForNextImage(); final Canvas canvas = new Canvas(croppedBitmap); canvas.drawBitmap(rgbFrameBitmap, frameToCropTransform, null); runInBackground( new Runnable() { @Override public void run() { final long nowTime = SystemClock.uptimeMillis(); float fps = (float)1000 / (float)(nowTime - lastProcessingTimeMs); lastProcessingTimeMs = nowTime; //ImageUtils.saveBitmap(croppedBitmap); detector.setInput(croppedBitmap); final List results = detector.runInference(); cropCopyBitmap = Bitmap.createBitmap(croppedBitmap); final Canvas canvas = new Canvas(cropCopyBitmap); final Paint paint = new Paint(); paint.setColor(Color.RED); paint.setStyle(Paint.Style.STROKE); paint.setStrokeWidth(2.0f); for (final TfliteRunner.Recognition result : results) { final RectF location = result.getLocation(); //canvas.drawRect(location, paint); } tracker.trackResults(results); trackingOverlay.postInvalidate(); computingDetection = false; runOnUiThread( new Runnable() { @Override public void run() { TextView fpsTextView = (TextView)findViewById(R.id.textViewFPS); String fpsText = String.format("FPS: %.2f", fps); fpsTextView.setText(fpsText); TextView latencyTextView = (TextView)findViewById(R.id.textViewLatency); latencyTextView.setText(detector.getLastElapsedTimeLog()); } }); } }); } } ================================================ FILE: app/tflite_yolov5_test/app/src/main/java/com/example/tflite_yolov5_test/camera/LegacyCameraConnectionFragment.java ================================================ package com.example.tflite_yolov5_test.camera; import android.annotation.SuppressLint; import android.app.Fragment; import android.graphics.SurfaceTexture; import android.hardware.Camera; import android.os.Bundle; import android.os.Handler; import android.os.HandlerThread; import android.util.Size; import android.util.SparseIntArray; import android.view.LayoutInflater; import android.view.Surface; import android.view.TextureView; import android.view.View; import android.view.ViewGroup; import java.io.IOException; import java.util.List; import com.example.tflite_yolov5_test.R; import com.example.tflite_yolov5_test.customview.AutoFitTextureView; import com.example.tflite_yolov5_test.camera.env.ImageUtils; public class LegacyCameraConnectionFragment extends Fragment { /** Conversion from screen rotation to JPEG orientation. */ private static final SparseIntArray ORIENTATIONS = new SparseIntArray(); static { ORIENTATIONS.append(Surface.ROTATION_0, 90); ORIENTATIONS.append(Surface.ROTATION_90, 0); ORIENTATIONS.append(Surface.ROTATION_180, 270); ORIENTATIONS.append(Surface.ROTATION_270, 180); } private Camera camera; private Camera.PreviewCallback imageListener; private Size desiredSize; /** The layout identifier to inflate for this Fragment. */ private int layout; /** An {@link AutoFitTextureView} for camera preview. */ private AutoFitTextureView textureView; private SurfaceTexture availableSurfaceTexture = null; /** * {@link TextureView.SurfaceTextureListener} handles several lifecycle events on a {@link * TextureView}. */ private final TextureView.SurfaceTextureListener surfaceTextureListener = new TextureView.SurfaceTextureListener() { @Override public void onSurfaceTextureAvailable( final SurfaceTexture texture, final int width, final int height) { availableSurfaceTexture = texture; startCamera(); } @Override public void onSurfaceTextureSizeChanged( final SurfaceTexture texture, final int width, final int height) {} @Override public boolean onSurfaceTextureDestroyed(final SurfaceTexture texture) { return true; } @Override public void onSurfaceTextureUpdated(final SurfaceTexture texture) {} }; /** An additional thread for running tasks that shouldn't block the UI. */ private HandlerThread backgroundThread; @SuppressLint("ValidFragment") public LegacyCameraConnectionFragment( final Camera.PreviewCallback imageListener, final int layout, final Size desiredSize) { this.imageListener = imageListener; this.layout = layout; this.desiredSize = desiredSize; } @Override public View onCreateView( final LayoutInflater inflater, final ViewGroup container, final Bundle savedInstanceState) { return inflater.inflate(layout, container, false); } @Override public void onViewCreated(final View view, final Bundle savedInstanceState) { textureView = (AutoFitTextureView) view.findViewById(R.id.texture); } @Override public void onActivityCreated(final Bundle savedInstanceState) { super.onActivityCreated(savedInstanceState); } @Override public void onResume() { super.onResume(); startBackgroundThread(); // When the screen is turned off and turned back on, the SurfaceTexture is already // available, and "onSurfaceTextureAvailable" will not be called. In that case, we can open // a camera and start preview from here (otherwise, we wait until the surface is ready in // the SurfaceTextureListener). if (textureView.isAvailable()) { startCamera(); } else { textureView.setSurfaceTextureListener(surfaceTextureListener); } } @Override public void onPause() { stopCamera(); stopBackgroundThread(); super.onPause(); } /** Starts a background thread and its {@link Handler}. */ private void startBackgroundThread() { backgroundThread = new HandlerThread("CameraBackground"); backgroundThread.start(); } /** Stops the background thread and its {@link Handler}. */ private void stopBackgroundThread() { backgroundThread.quitSafely(); try { backgroundThread.join(); backgroundThread = null; } catch (final InterruptedException e) { } } private void startCamera() { int index = getCameraId(); camera = Camera.open(index); try { Camera.Parameters parameters = camera.getParameters(); List focusModes = parameters.getSupportedFocusModes(); if (focusModes != null && focusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE)) { parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE); } List cameraSizes = parameters.getSupportedPreviewSizes(); Size[] sizes = new Size[cameraSizes.size()]; int i = 0; for (Camera.Size size : cameraSizes) { sizes[i++] = new Size(size.width, size.height); } Size previewSize = CameraConnectionFragment.chooseOptimalSize( sizes, desiredSize.getWidth(), desiredSize.getHeight()); parameters.setPreviewSize(previewSize.getWidth(), previewSize.getHeight()); camera.setDisplayOrientation(90); camera.setParameters(parameters); camera.setPreviewTexture(availableSurfaceTexture); } catch (IOException exception) { camera.release(); } camera.setPreviewCallbackWithBuffer(imageListener); Camera.Size s = camera.getParameters().getPreviewSize(); camera.addCallbackBuffer(new byte[ImageUtils.getYUVByteSize(s.height, s.width)]); textureView.setAspectRatio(s.height, s.width); camera.startPreview(); } protected void stopCamera() { if (camera != null) { camera.stopPreview(); camera.setPreviewCallback(null); camera.release(); camera = null; } } private int getCameraId() { Camera.CameraInfo ci = new Camera.CameraInfo(); for (int i = 0; i < Camera.getNumberOfCameras(); i++) { Camera.getCameraInfo(i, ci); if (ci.facing == Camera.CameraInfo.CAMERA_FACING_BACK) return i; } return -1; // No camera found } } ================================================ FILE: app/tflite_yolov5_test/app/src/main/java/com/example/tflite_yolov5_test/camera/env/BorderedText.java ================================================ /* Copyright 2019 The TensorFlow Authors. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ==============================================================================*/ package com.example.tflite_yolov5_test.camera.env; import android.graphics.Canvas; import android.graphics.Color; import android.graphics.Paint; import android.graphics.Paint.Align; import android.graphics.Paint.Style; import android.graphics.Rect; import android.graphics.Typeface; import java.util.Vector; /** A class that encapsulates the tedious bits of rendering legible, bordered text onto a canvas. */ public class BorderedText { private final Paint interiorPaint; private final Paint exteriorPaint; private final float textSize; /** * Creates a left-aligned bordered text object with a white interior, and a black exterior with * the specified text size. * * @param textSize text size in pixels */ public BorderedText(final float textSize) { this(Color.WHITE, Color.BLACK, textSize); } /** * Create a bordered text object with the specified interior and exterior colors, text size and * alignment. * * @param interiorColor the interior text color * @param exteriorColor the exterior text color * @param textSize text size in pixels */ public BorderedText(final int interiorColor, final int exteriorColor, final float textSize) { interiorPaint = new Paint(); interiorPaint.setTextSize(textSize); interiorPaint.setColor(interiorColor); interiorPaint.setStyle(Style.FILL); interiorPaint.setAntiAlias(false); interiorPaint.setAlpha(255); exteriorPaint = new Paint(); exteriorPaint.setTextSize(textSize); exteriorPaint.setColor(exteriorColor); exteriorPaint.setStyle(Style.FILL_AND_STROKE); exteriorPaint.setStrokeWidth(textSize / 8); exteriorPaint.setAntiAlias(false); exteriorPaint.setAlpha(255); this.textSize = textSize; } public void setTypeface(Typeface typeface) { interiorPaint.setTypeface(typeface); exteriorPaint.setTypeface(typeface); } public void drawText(final Canvas canvas, final float posX, final float posY, final String text) { canvas.drawText(text, posX, posY, exteriorPaint); canvas.drawText(text, posX, posY, interiorPaint); } public void drawText( final Canvas canvas, final float posX, final float posY, final String text, Paint bgPaint) { float width = exteriorPaint.measureText(text); float textSize = exteriorPaint.getTextSize(); Paint paint = new Paint(bgPaint); paint.setStyle(Paint.Style.FILL); paint.setAlpha(160); canvas.drawRect(posX, (posY + (int) (textSize)), (posX + (int) (width)), posY, paint); canvas.drawText(text, posX, (posY + textSize), interiorPaint); } public void drawLines(Canvas canvas, final float posX, final float posY, Vector lines) { int lineNum = 0; for (final String line : lines) { drawText(canvas, posX, posY - getTextSize() * (lines.size() - lineNum - 1), line); ++lineNum; } } public void setInteriorColor(final int color) { interiorPaint.setColor(color); } public void setExteriorColor(final int color) { exteriorPaint.setColor(color); } public float getTextSize() { return textSize; } public void setAlpha(final int alpha) { interiorPaint.setAlpha(alpha); exteriorPaint.setAlpha(alpha); } public void getTextBounds( final String line, final int index, final int count, final Rect lineBounds) { interiorPaint.getTextBounds(line, index, count, lineBounds); } public void setTextAlign(final Align align) { interiorPaint.setTextAlign(align); exteriorPaint.setTextAlign(align); } } ================================================ FILE: app/tflite_yolov5_test/app/src/main/java/com/example/tflite_yolov5_test/camera/env/ImageUtils.java ================================================ package com.example.tflite_yolov5_test.camera.env; import android.graphics.Bitmap; import android.graphics.Matrix; import android.os.Environment; import java.io.File; import java.io.FileOutputStream; import android.graphics.Bitmap; import android.graphics.Matrix; import android.os.Environment; import java.io.File; import java.io.FileOutputStream; /** Utility class for manipulating images. */ public class ImageUtils { // This value is 2 ^ 18 - 1, and is used to clamp the RGB values before their ranges // are normalized to eight bits. static final int kMaxChannelValue = 262143; /** * Utility method to compute the allocated size in bytes of a YUV420SP image of the given * dimensions. */ public static int getYUVByteSize(final int width, final int height) { // The luminance plane requires 1 byte per pixel. final int ySize = width * height; // The UV plane works on 2x2 blocks, so dimensions with odd size must be rounded up. // Each 2x2 block takes 2 bytes to encode, one each for U and V. final int uvSize = ((width + 1) / 2) * ((height + 1) / 2) * 2; return ySize + uvSize; } /** * Saves a Bitmap object to disk for analysis. * * @param bitmap The bitmap to save. */ public static void saveBitmap(final Bitmap bitmap) { saveBitmap(bitmap, "preview.png"); } /** * Saves a Bitmap object to disk for analysis. * * @param bitmap The bitmap to save. * @param filename The location to save the bitmap to. */ public static void saveBitmap(final Bitmap bitmap, final String filename) { final String root = Environment.getExternalStorageDirectory().getAbsolutePath() + File.separator + "tensorflow"; final File myDir = new File(root); if (!myDir.mkdirs()) { } final String fname = filename; final File file = new File(myDir, fname); if (file.exists()) { file.delete(); } try { final FileOutputStream out = new FileOutputStream(file); bitmap.compress(Bitmap.CompressFormat.PNG, 99, out); out.flush(); out.close(); } catch (final Exception e) { } } public static void convertYUV420SPToARGB8888(byte[] input, int width, int height, int[] output) { final int frameSize = width * height; for (int j = 0, yp = 0; j < height; j++) { int uvp = frameSize + (j >> 1) * width; int u = 0; int v = 0; for (int i = 0; i < width; i++, yp++) { int y = 0xff & input[yp]; if ((i & 1) == 0) { v = 0xff & input[uvp++]; u = 0xff & input[uvp++]; } output[yp] = YUV2RGB(y, u, v); } } } private static int YUV2RGB(int y, int u, int v) { // Adjust and check YUV values y = (y - 16) < 0 ? 0 : (y - 16); u -= 128; v -= 128; // This is the floating point equivalent. We do the conversion in integer // because some Android devices do not have floating point in hardware. // nR = (int)(1.164 * nY + 2.018 * nU); // nG = (int)(1.164 * nY - 0.813 * nV - 0.391 * nU); // nB = (int)(1.164 * nY + 1.596 * nV); int y1192 = 1192 * y; int r = (y1192 + 1634 * v); int g = (y1192 - 833 * v - 400 * u); int b = (y1192 + 2066 * u); // Clipping RGB values to be inside boundaries [ 0 , kMaxChannelValue ] r = r > kMaxChannelValue ? kMaxChannelValue : (r < 0 ? 0 : r); g = g > kMaxChannelValue ? kMaxChannelValue : (g < 0 ? 0 : g); b = b > kMaxChannelValue ? kMaxChannelValue : (b < 0 ? 0 : b); return 0xff000000 | ((r << 6) & 0xff0000) | ((g >> 2) & 0xff00) | ((b >> 10) & 0xff); } public static void convertYUV420ToARGB8888( byte[] yData, byte[] uData, byte[] vData, int width, int height, int yRowStride, int uvRowStride, int uvPixelStride, int[] out) { int yp = 0; for (int j = 0; j < height; j++) { int pY = yRowStride * j; int pUV = uvRowStride * (j >> 1); for (int i = 0; i < width; i++) { int uv_offset = pUV + (i >> 1) * uvPixelStride; out[yp++] = YUV2RGB(0xff & yData[pY + i], 0xff & uData[uv_offset], 0xff & vData[uv_offset]); } } } /** * Returns a transformation matrix from one reference frame into another. Handles cropping (if * maintaining aspect ratio is desired) and rotation. * * @param srcWidth Width of source frame. * @param srcHeight Height of source frame. * @param dstWidth Width of destination frame. * @param dstHeight Height of destination frame. * @param applyRotation Amount of rotation to apply from one frame to another. Must be a multiple * of 90. * @param maintainAspectRatio If true, will ensure that scaling in x and y remains constant, * cropping the image if necessary. * @return The transformation fulfilling the desired requirements. */ public static Matrix getTransformationMatrix( final int srcWidth, final int srcHeight, final int dstWidth, final int dstHeight, final int applyRotation, final boolean maintainAspectRatio) { final Matrix matrix = new Matrix(); if (applyRotation != 0) { if (applyRotation % 90 != 0) { } // Translate so center of image is at origin. matrix.postTranslate(-srcWidth / 2.0f, -srcHeight / 2.0f); // Rotate around origin. matrix.postRotate(applyRotation); } // Account for the already applied rotation, if any, and then determine how // much scaling is needed for each axis. final boolean transpose = (Math.abs(applyRotation) + 90) % 180 == 0; final int inWidth = transpose ? srcHeight : srcWidth; final int inHeight = transpose ? srcWidth : srcHeight; // Apply scaling if necessary. if (inWidth != dstWidth || inHeight != dstHeight) { final float scaleFactorX = dstWidth / (float) inWidth; final float scaleFactorY = dstHeight / (float) inHeight; if (maintainAspectRatio) { // Scale by minimum factor so that dst is filled completely while // maintaining the aspect ratio. Some image may fall off the edge. final float scaleFactor = Math.max(scaleFactorX, scaleFactorY); matrix.postScale(scaleFactor, scaleFactor); } else { // Scale exactly to fill dst from src. matrix.postScale(scaleFactorX, scaleFactorY); } } if (applyRotation != 0) { // Translate back from origin centered reference to destination frame. matrix.postTranslate(dstWidth / 2.0f, dstHeight / 2.0f); } return matrix; } } ================================================ FILE: app/tflite_yolov5_test/app/src/main/java/com/example/tflite_yolov5_test/camera/tracker/MultiBoxTracker.java ================================================ package com.example.tflite_yolov5_test.camera.tracker; import android.content.Context; import android.graphics.Canvas; import android.graphics.Color; import android.graphics.Matrix; import android.graphics.Paint; import android.graphics.RectF; import android.text.TextUtils; import android.util.Log; import android.util.Pair; import android.util.Size; import android.util.TypedValue; import java.util.LinkedList; import java.util.List; import java.util.Queue; import android.content.Context; import android.graphics.Canvas; import android.graphics.Color; import android.graphics.Matrix; import android.graphics.Paint; import android.graphics.Paint.Cap; import android.graphics.Paint.Join; import android.graphics.Paint.Style; import android.graphics.RectF; import android.text.TextUtils; import android.util.Pair; import android.util.TypedValue; import java.util.LinkedList; import java.util.List; import java.util.Queue; import com.example.tflite_yolov5_test.camera.env.BorderedText; import com.example.tflite_yolov5_test.camera.env.ImageUtils; import com.example.tflite_yolov5_test.TfliteRunner.Recognition; /** A tracker that handles non-max suppression and matches existing objects to new detections. */ public class MultiBoxTracker { private static final float TEXT_SIZE_DIP = 18; private static final float MIN_SIZE = 16.0f; private static final int[] COLORS = { Color.BLUE, Color.RED, Color.GREEN, Color.YELLOW, Color.CYAN, Color.MAGENTA, Color.WHITE, Color.parseColor("#55FF55"), Color.parseColor("#FFA500"), Color.parseColor("#FF8888"), Color.parseColor("#AAAAFF"), Color.parseColor("#FFFFAA"), Color.parseColor("#55AAAA"), Color.parseColor("#AA33AA"), Color.parseColor("#0D0068") }; final List> screenRects = new LinkedList>(); private final Queue availableColors = new LinkedList(); private final List trackedObjects = new LinkedList(); private final Paint boxPaint = new Paint(); private final float textSizePx; private final BorderedText borderedText; private Matrix frameToCanvasMatrix; private int detectorInputSize; private int frameWidth; private int frameHeight; private int sensorOrientation; public MultiBoxTracker(final Context context) { for (final int color : COLORS) { availableColors.add(color); } boxPaint.setColor(Color.RED); boxPaint.setStyle(Paint.Style.STROKE); boxPaint.setStrokeWidth(10.0f); boxPaint.setStrokeCap(Paint.Cap.ROUND); boxPaint.setStrokeJoin(Paint.Join.ROUND); boxPaint.setStrokeMiter(100); textSizePx = TypedValue.applyDimension( TypedValue.COMPLEX_UNIT_DIP, TEXT_SIZE_DIP, context.getResources().getDisplayMetrics()); borderedText = new BorderedText(textSizePx); } public synchronized void setFrameConfiguration( final Size frameSize, final int detectorInputSize, final int sensorOrientation) { this.detectorInputSize = detectorInputSize; //reverse frameWidth = frameSize.getHeight(); frameHeight = frameSize.getWidth(); this.sensorOrientation = sensorOrientation; } public synchronized void drawDebug(final Canvas canvas) { final Paint textPaint = new Paint(); textPaint.setColor(Color.WHITE); textPaint.setTextSize(60.0f); final Paint boxPaint = new Paint(); boxPaint.setColor(Color.RED); boxPaint.setAlpha(200); boxPaint.setStyle(Paint.Style.STROKE); for (final Pair detection : screenRects) { final RectF rect = detection.second; canvas.drawRect(rect, boxPaint); canvas.drawText("" + detection.first, rect.left, rect.top, textPaint); borderedText.drawText(canvas, rect.centerX(), rect.centerY(), "" + detection.first); } } public synchronized void trackResults(final List results) { processResults(results); } private Matrix getFrameToCanvasMatrix() { return frameToCanvasMatrix; } public synchronized void draw(final Canvas canvas) { //WARNING(from lp6m): I don't understand about sensorOrientation, so fix to 0 for now... for (final TrackedRecognition recognition : trackedObjects) { float frameToCanvasScale = Math.min((float)canvas.getHeight() / frameHeight, (float)canvas.getWidth() / frameWidth); float scale_width = frameToCanvasScale * ((float)frameWidth / detectorInputSize); float scale_height = frameToCanvasScale * ((float)frameHeight / detectorInputSize); float x1 = recognition.location.left * scale_width; float y1 = recognition.location.top * scale_height; float x2 = recognition.location.right * scale_width; float y2 = recognition.location.bottom * scale_height; final RectF trackedPos = new RectF(x1, y1, x2, y2); boxPaint.setColor(recognition.color); float cornerSize = Math.min(trackedPos.width(), trackedPos.height()) / 8.0f; canvas.drawRoundRect(trackedPos, cornerSize, cornerSize, boxPaint); final String labelString = !TextUtils.isEmpty(recognition.title) ? String.format("%s %.2f", recognition.title, (100 * recognition.detectionConfidence)) : String.format("%.2f", (100 * recognition.detectionConfidence)); // borderedText.drawText(canvas, trackedPos.left + cornerSize, trackedPos.top, // labelString); borderedText.drawText( canvas, trackedPos.left + cornerSize, trackedPos.top, labelString + "%", boxPaint); } } private void processResults(final List results) { final List> rectsToTrack = new LinkedList>(); screenRects.clear(); final Matrix rgbFrameToScreen = new Matrix(getFrameToCanvasMatrix()); for (final Recognition result : results) { if (result.getLocation() == null) { continue; } final RectF detectionFrameRect = new RectF(result.getLocation()); final RectF detectionScreenRect = new RectF(); rgbFrameToScreen.mapRect(detectionScreenRect, detectionFrameRect); screenRects.add(new Pair(result.getConfidence(), detectionScreenRect)); if (detectionFrameRect.width() < MIN_SIZE || detectionFrameRect.height() < MIN_SIZE) { continue; } rectsToTrack.add(new Pair(result.getConfidence(), result)); } trackedObjects.clear(); if (rectsToTrack.isEmpty()) { return; } for (final Pair potential : rectsToTrack) { final TrackedRecognition trackedRecognition = new TrackedRecognition(); trackedRecognition.detectionConfidence = potential.first; trackedRecognition.location = new RectF(potential.second.getLocation()); trackedRecognition.title = potential.second.getTitle(); trackedRecognition.color = COLORS[trackedObjects.size()]; trackedObjects.add(trackedRecognition); if (trackedObjects.size() >= COLORS.length) { break; } } } private static class TrackedRecognition { RectF location; float detectionConfidence; int color; String title; } } ================================================ FILE: app/tflite_yolov5_test/app/src/main/java/com/example/tflite_yolov5_test/customview/AutoFitTextureView.java ================================================ package com.example.tflite_yolov5_test.customview; import android.content.Context; import android.util.AttributeSet; import android.view.TextureView; /** A {@link TextureView} that can be adjusted to a specified aspect ratio. */ public class AutoFitTextureView extends TextureView { private int ratioWidth = 0; private int ratioHeight = 0; public AutoFitTextureView(final Context context) { this(context, null); } public AutoFitTextureView(final Context context, final AttributeSet attrs) { this(context, attrs, 0); } public AutoFitTextureView(final Context context, final AttributeSet attrs, final int defStyle) { super(context, attrs, defStyle); } /** * Sets the aspect ratio for this view. The size of the view will be measured based on the ratio * calculated from the parameters. Note that the actual sizes of parameters don't matter, that is, * calling setAspectRatio(2, 3) and setAspectRatio(4, 6) make the same result. * * @param width Relative horizontal size * @param height Relative vertical size */ public void setAspectRatio(final int width, final int height) { if (width < 0 || height < 0) { throw new IllegalArgumentException("Size cannot be negative."); } ratioWidth = width; ratioHeight = height; requestLayout(); } @Override protected void onMeasure(final int widthMeasureSpec, final int heightMeasureSpec) { super.onMeasure(widthMeasureSpec, heightMeasureSpec); final int width = MeasureSpec.getSize(widthMeasureSpec); final int height = MeasureSpec.getSize(heightMeasureSpec); if (0 == ratioWidth || 0 == ratioHeight) { setMeasuredDimension(width, height); } else { if (width < height * ratioWidth / ratioHeight) { setMeasuredDimension(width, width * ratioHeight / ratioWidth); } else { setMeasuredDimension(height * ratioWidth / ratioHeight, height); } } } } ================================================ FILE: app/tflite_yolov5_test/app/src/main/java/com/example/tflite_yolov5_test/customview/OverlayView.java ================================================ package com.example.tflite_yolov5_test.customview; import android.content.Context; import android.graphics.Canvas; import android.util.AttributeSet; import android.view.View; import java.util.LinkedList; import java.util.List; import android.content.Context; import android.graphics.Canvas; import android.util.AttributeSet; import android.view.View; import java.util.LinkedList; import java.util.List; /** A simple View providing a render callback to other classes. */ public class OverlayView extends View { private final List callbacks = new LinkedList(); public OverlayView(final Context context, final AttributeSet attrs) { super(context, attrs); } public void addCallback(final DrawCallback callback) { callbacks.add(callback); } @Override public synchronized void draw(final Canvas canvas) { for (final DrawCallback callback : callbacks) { callback.drawCallback(canvas); } } /** Interface defining the callback for client classes. */ public interface DrawCallback { public void drawCallback(final Canvas canvas); } } ================================================ FILE: app/tflite_yolov5_test/app/src/main/res/drawable/ic_dashboard_black_24dp.xml ================================================ ================================================ FILE: app/tflite_yolov5_test/app/src/main/res/drawable/ic_home_black_24dp.xml ================================================ ================================================ FILE: app/tflite_yolov5_test/app/src/main/res/drawable/ic_launcher_background.xml ================================================ ================================================ FILE: app/tflite_yolov5_test/app/src/main/res/drawable/ic_notifications_black_24dp.xml ================================================ ================================================ FILE: app/tflite_yolov5_test/app/src/main/res/drawable-v24/ic_launcher_foreground.xml ================================================ ================================================ FILE: app/tflite_yolov5_test/app/src/main/res/layout/activity_camera.xml ================================================ ================================================ FILE: app/tflite_yolov5_test/app/src/main/res/layout/activity_main.xml ================================================