[
  {
    "path": ".gitignore",
    "content": "# ------------------------\r\n# Python\r\n# ------------------------\r\n\r\n# Byte‐compiled / optimized / DLL files\r\n__pycache__/\r\n*.py[cod]\r\n*$py.class\r\n\r\n# C extensions\r\n*.so\r\n\r\n# pytorch models\r\n*.pt\r\n\r\n# Distribution / packaging\r\n.Python\r\nbuild/\r\ndevelop-eggs/\r\ndist/\r\ndownloads/\r\neggs/\r\n.eggs/\r\nlib/\r\nlib64/\r\nparts/\r\nsdist/\r\nvar/\r\nwheels/\r\n*.egg-info/\r\n.installed.cfg\r\n*.egg\r\nMANIFEST\r\n\r\n# Installer logs\r\npip-log.txt\r\npip-delete-this-directory.txt\r\n\r\n# Unit test / coverage reports\r\nhtmlcov/\r\n.coverage\r\n.coverage.*\r\n.cache\r\nnosetests.xml\r\ncoverage.xml\r\n*.cover\r\n*.py,cover\r\n.hypothesis/\r\n\r\n# Translations\r\n*.mo\r\n*.pot\r\n\r\n# Django (if ever)\r\nlocal_settings.py\r\ndb.sqlite3\r\n\r\n# Flask\r\ninstance/\r\n.webassets-cache\r\n\r\n# Sphinx docs\r\ndocs/_build/\r\n\r\n# PyBuilder\r\ntarget/\r\n\r\n# Jupyter\r\n.ipynb_checkpoints\r\n\r\n# IPython\r\nprofile_default/\r\nipython_config.py\r\n\r\n# pyenv\r\n.python-version\r\n\r\n# pipenv / poetry\r\nPipfile.lock\r\npoetry.lock\r\n\r\n# PEP 582\r\n__pypackages__/\r\n\r\n# Virtual environments\r\n.env\r\n.venv/\r\nenv/\r\nvenv/\r\nENV/\r\nenv.bak/\r\nvenv.bak/\r\n\r\n# Spyder\r\n.spyderproject\r\n.spyproject\r\n\r\n# Rope\r\n.ropeproject\r\n\r\n# mkdocs\r\n/site\r\n\r\n# mypy\r\n.mypy_cache/\r\n\r\n# pytest\r\n.pytest_cache/\r\n\r\n# IDEs / editors\r\n.vscode/\r\n.idea/\r\n*.iml\r\n\r\n# OS files\r\n.DS_Store\r\nThumbs.db\r\nDesktop.ini\r\nehthumbs.db\r\n\r\n# ------------------------\r\n# Node / Frontend (React/TS/Tailwind)\r\n# ------------------------\r\n\r\n# dependencies\r\nnode_modules/\r\n\r\n# logs\r\nnpm-debug.log*\r\nyarn-debug.log*\r\nyarn-error.log*\r\npnpm-debug.log*\r\n\r\n# lockfiles (if you prefer not to commit them)\r\n# Uncomment if you don't commit lockfiles\r\n# package-lock.json\r\n# yarn.lock\r\n# pnpm-lock.yaml\r\n\r\n# build outputs\r\nbuild/\r\ndist/\r\n.next/\r\nout/\r\npublic/\r\n\r\n# caches\r\n.cache/\r\n.parcel-cache/\r\n\r\n# tailwind\r\n.tailwind-cache/\r\n\r\n# TypeScript\r\n*.tsbuildinfo\r\n\r\n# environment\r\n.env.local\r\n.env.development.local\r\n.env.test.local\r\n.env.production.local\r\n\r\n# framework-specific (if you ever add)\r\n.serverless/\r\n.vuepress/dist/\r\n\r\n# assets\r\nbackend/plugins/follow/assets/*\r\n"
  },
  {
    "path": "LICENSE",
    "content": "                                 Apache License\r\n                           Version 2.0, January 2004\r\n                        http://www.apache.org/licenses/\r\n\r\n   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION\r\n\r\n   1. Definitions.\r\n\r\n      \"License\" shall mean the terms and conditions for use, reproduction,\r\n      and distribution as defined by Sections 1 through 9 of this document.\r\n\r\n      \"Licensor\" shall mean the copyright owner or entity authorized by\r\n      the copyright owner that is granting the License.\r\n\r\n      \"Legal Entity\" shall mean the union of the acting entity and all\r\n      other entities that control, are controlled by, or are under common\r\n      control with that entity. For the purposes of this definition,\r\n      \"control\" means (i) the power, direct or indirect, to cause the\r\n      direction or management of such entity, whether by contract or\r\n      otherwise, or (ii) ownership of fifty percent (50%) or more of the\r\n      outstanding shares, or (iii) beneficial ownership of such entity.\r\n\r\n      \"You\" (or \"Your\") shall mean an individual or Legal Entity\r\n      exercising permissions granted by this License.\r\n\r\n      \"Source\" form shall mean the preferred form for making modifications,\r\n      including but not limited to software source code, documentation\r\n      source, and configuration files.\r\n\r\n      \"Object\" form shall mean any form resulting from mechanical\r\n      transformation or translation of a Source form, including but\r\n      not limited to compiled object code, generated documentation,\r\n      and conversions to other media types.\r\n\r\n      \"Work\" shall mean the work of authorship, whether in Source or\r\n      Object form, made available under the License, as indicated by a\r\n      copyright notice that is included in or attached to the work\r\n      (an example is provided in the Appendix below).\r\n\r\n      \"Derivative Works\" shall mean any work, whether in Source or Object\r\n      form, that is based on (or derived from) the Work and for which the\r\n      editorial revisions, annotations, elaborations, or other modifications\r\n      represent, as a whole, an original work of authorship. For the purposes\r\n      of this License, Derivative Works shall not include works that remain\r\n      separable from, or merely link (or bind by name) to the interfaces of,\r\n      the Work and Derivative Works thereof.\r\n\r\n      \"Contribution\" shall mean any work of authorship, including\r\n      the original version of the Work and any modifications or additions\r\n      to that Work or Derivative Works thereof, that is intentionally\r\n      submitted to Licensor for inclusion in the Work by the copyright owner\r\n      or by an individual or Legal Entity authorized to submit on behalf of\r\n      the copyright owner. For the purposes of this definition, \"submitted\"\r\n      means any form of electronic, verbal, or written communication sent\r\n      to the Licensor or its representatives, including but not limited to\r\n      communication on electronic mailing lists, source code control systems,\r\n      and issue tracking systems that are managed by, or on behalf of, the\r\n      Licensor for the purpose of discussing and improving the Work, but\r\n      excluding communication that is conspicuously marked or otherwise\r\n      designated in writing by the copyright owner as \"Not a Contribution.\"\r\n\r\n      \"Contributor\" shall mean Licensor and any individual or Legal Entity\r\n      on behalf of whom a Contribution has been received by Licensor and\r\n      subsequently incorporated within the Work.\r\n\r\n   2. Grant of Copyright License. Subject to the terms and conditions of\r\n      this License, each Contributor hereby grants to You a perpetual,\r\n      worldwide, non-exclusive, no-charge, royalty-free, irrevocable\r\n      copyright license to reproduce, prepare Derivative Works of,\r\n      publicly display, publicly perform, sublicense, and distribute the\r\n      Work and such Derivative Works in Source or Object form.\r\n\r\n   3. Grant of Patent License. Subject to the terms and conditions of\r\n      this License, each Contributor hereby grants to You a perpetual,\r\n      worldwide, non-exclusive, no-charge, royalty-free, irrevocable\r\n      (except as stated in this section) patent license to make, have made,\r\n      use, offer to sell, sell, import, and otherwise transfer the Work,\r\n      where such license applies only to those patent claims licensable\r\n      by such Contributor that are necessarily infringed by their\r\n      Contribution(s) alone or by combination of their Contribution(s)\r\n      with the Work to which such Contribution(s) was submitted. If You\r\n      institute patent litigation against any entity (including a\r\n      cross-claim or counterclaim in a lawsuit) alleging that the Work\r\n      or a Contribution incorporated within the Work constitutes direct\r\n      or contributory patent infringement, then any patent licenses\r\n      granted to You under this License for that Work shall terminate\r\n      as of the date such litigation is filed.\r\n\r\n   4. Redistribution. You may reproduce and distribute copies of the\r\n      Work or Derivative Works thereof in any medium, with or without\r\n      modifications, and in Source or Object form, provided that You\r\n      meet the following conditions:\r\n\r\n      (a) You must give any other recipients of the Work or\r\n          Derivative Works a copy of this License; and\r\n\r\n      (b) You must cause any modified files to carry prominent notices\r\n          stating that You changed the files; and\r\n\r\n      (c) You must retain, in the Source form of any Derivative Works\r\n          that You distribute, all copyright, patent, trademark, and\r\n          attribution notices from the Source form of the Work,\r\n          excluding those notices that do not pertain to any part of\r\n          the Derivative Works; and\r\n\r\n      (d) If the Work includes a \"NOTICE\" text file as part of its\r\n          distribution, then any Derivative Works that You distribute must\r\n          include a readable copy of the attribution notices contained\r\n          within such NOTICE file, excluding those notices that do not\r\n          pertain to any part of the Derivative Works, in at least one\r\n          of the following places: within a NOTICE text file distributed\r\n          as part of the Derivative Works; within the Source form or\r\n          documentation, if provided along with the Derivative Works; or,\r\n          within a display generated by the Derivative Works, if and\r\n          wherever such third-party notices normally appear. The contents\r\n          of the NOTICE file are for informational purposes only and\r\n          do not modify the License. You may add Your own attribution\r\n          notices within Derivative Works that You distribute, alongside\r\n          or as an addendum to the NOTICE text from the Work, provided\r\n          that such additional attribution notices cannot be construed\r\n          as modifying the License.\r\n\r\n      You may add Your own copyright statement to Your modifications and\r\n      may provide additional or different license terms and conditions\r\n      for use, reproduction, or distribution of Your modifications, or\r\n      for any such Derivative Works as a whole, provided Your use,\r\n      reproduction, and distribution of the Work otherwise complies with\r\n      the conditions stated in this License.\r\n\r\n   5. Submission of Contributions. Unless You explicitly state otherwise,\r\n      any Contribution intentionally submitted for inclusion in the Work\r\n      by You to the Licensor shall be under the terms and conditions of\r\n      this License, without any additional terms or conditions.\r\n      Notwithstanding the above, nothing herein shall supersede or modify\r\n      the terms of any separate license agreement you may have executed\r\n      with Licensor regarding such Contributions.\r\n\r\n   6. Trademarks. This License does not grant permission to use the trade\r\n      names, trademarks, service marks, or product names of the Licensor,\r\n      except as required for reasonable and customary use in describing the\r\n      origin of the Work and reproducing the content of the NOTICE file.\r\n\r\n   7. Disclaimer of Warranty. Unless required by applicable law or\r\n      agreed to in writing, Licensor provides the Work (and each\r\n      Contributor provides its Contributions) on an \"AS IS\" BASIS,\r\n      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or\r\n      implied, including, without limitation, any warranties or conditions\r\n      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A\r\n      PARTICULAR PURPOSE. You are solely responsible for determining the\r\n      appropriateness of using or redistributing the Work and assume any\r\n      risks associated with Your exercise of permissions under this License.\r\n\r\n   8. Limitation of Liability. In no event and under no legal theory,\r\n      whether in tort (including negligence), contract, or otherwise,\r\n      unless required by applicable law (such as deliberate and grossly\r\n      negligent acts) or agreed to in writing, shall any Contributor be\r\n      liable to You for damages, including any direct, indirect, special,\r\n      incidental, or consequential damages of any character arising as a\r\n      result of this License or out of the use or inability to use the\r\n      Work (including but not limited to damages for loss of goodwill,\r\n      work stoppage, computer failure or malfunction, or any and all\r\n      other commercial damages or losses), even if such Contributor\r\n      has been advised of the possibility of such damages.\r\n\r\n   9. Accepting Warranty or Additional Liability. While redistributing\r\n      the Work or Derivative Works thereof, You may choose to offer,\r\n      and charge a fee for, acceptance of support, warranty, indemnity,\r\n      or other liability obligations and/or rights consistent with this\r\n      License. However, in accepting such obligations, You may act only\r\n      on Your own behalf and on Your sole responsibility, not on behalf\r\n      of any other Contributor, and only if You agree to indemnify,\r\n      defend, and hold each Contributor harmless for any liability\r\n      incurred by, or claims asserted against, such Contributor by reason\r\n      of your accepting any such warranty or additional liability.\r\n\r\n   END OF TERMS AND CONDITIONS\r\n\r\n   APPENDIX: How to apply the Apache License to your work.\r\n\r\n      To apply the Apache License to your work, attach the following\r\n      boilerplate notice, with the fields enclosed by brackets \"[]\"\r\n      replaced with your own identifying information. (Don't include\r\n      the brackets!)  The text should be enclosed in the appropriate\r\n      comment syntax for the file format. We also recommend that a\r\n      file or class name and description of purpose be included on the\r\n      same \"printed page\" as the copyright notice for easier\r\n      identification within third-party archives.\r\n\r\n   Copyright 2025 Marshall Richards\r\n\r\n   Licensed under the Apache License, Version 2.0 (the \"License\");\r\n   you may not use this file except in compliance with the License.\r\n   You may obtain a copy of the License at\r\n\r\n       http://www.apache.org/licenses/LICENSE-2.0\r\n\r\n   Unless required by applicable law or agreed to in writing, software\r\n   distributed under the License is distributed on an \"AS IS\" BASIS,\r\n   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n   See the License for the specific language governing permissions and\r\n   limitations under the License.\r\n"
  },
  {
    "path": "NOTICE",
    "content": "turbodrone — reverse-engineered control stack for cheap toy drones\r\nCopyright © 2025 Marshall Richards\r\n\r\nThis product includes software developed by Marshall Richards\r\n(turbodrone, https://github.com/marshallrichards/turbodrone).\r\n\r\nPortions of this software are based on, or otherwise incorporate, the turbodrone project.\r\n\r\nLicensed under the Apache License, Version 2.0. You may obtain a copy of\r\nthe license in the LICENSE file or at <https://www.apache.org/licenses/LICENSE-2.0>.\r\n\r\n--------------------------------------------------------------------\r\nYou may add your own attribution notices below; this block must remain.\r\n--------------------------------------------------------------------"
  },
  {
    "path": "README.md",
    "content": "# Turbodrone\r\nReverse-engineered API and client for controlling some of the best-selling ~$50 \"toy\" drones on Amazon from a computer replacing the closed-source mobile apps they come with.\r\n\r\n![S20 Drone Short Clip](docs/images/s20-drone-short-clip-small.gif)\r\n\r\n## Introduction\r\nNowadays, there are incredibly cheap \"toy\" drones available on Amazon that are basically pared-down clones of the early DJI mavic. Only ~$50 to have a 1080p camera for FPV and recording, tiny downward-facing optical flow sensor for position and altitude hold, and a well-tuned flight profile out-of-the-box. The only problem with drones like this is that they run closed-source firmware and are locked to only being controlled by a custom mobile app. I thought it would be cool to free some of these drones from their \"jail\" and write an API and client for accessing the video feed and sending control commands down to the drone by reverse-engineering how the mobile apps work. That way you can turn a highly capable $50 \"toy\" drone into something that can be programmatically controlled and used for all sorts of applications and experiments.\r\n\r\n## Hardware\r\n* WiFi Camera Drones (ranked in order of recommendation):\r\n\r\n    | Brand      | Model Number(s) | Compatibility | Implementation | Motor      | Purchase Link                                               | Notes |\r\n    |------------|-----------------|---------------|----------------|------------|-------------------------------------------------------------|-------|\r\n    | Generic        | M10           | Tested    | `cooingdv` | Brushless | [Aliexpress](https://www.aliexpress.us/item/3256809708895605.html) | My current favorite. RTSP video stream is very reliable. |\r\n    | Plegble        | PL-515        | Tested | `s2x`      | Brushless | [Amazon](https://www.amazon.com/dp/B0D2B25YQQ) | Same airframe as the S2x models and takes the same batteries, but upgraded to brushless motors. |\r\n    | Karuisrc       | K417          | Tested    | `wifi_uav` | Brushless | [Amazon](https://www.amazon.com/Electric-Adjustable-AIdrones-Quadcopter-Beginners/dp/B0CYPSJ34H/) | First model to be supported with brushless motors. Build quality is great. |\r\n    | Loiley         | S29           | Tested    | `s2x`      | Brushed   | Can't find link anymore | Great build quality, has servo for tilting camera (_not implemented in API yet_) |\r\n    | Hiturbo        | S20           | Tested    | `s2x`      | Brushed   | [Amazon](https://www.amazon.com/dp/B0BBVZ849G), [Alternate Amazon Listing](https://www.amazon.com/Beginners-Foldable-Quadcopter-Gestures-Batteries/dp/B0D8LK1KJ3) | Original test platform, great build quality |\r\n    | FlyVista       | V88           | Tested    | `wifi_uav` | Brushed   | [Amazon](https://www.amazon.com/dp/B0D5CXY6X8) | |\r\n    | ?              | D16/GT3/V66   | Tested    | `wifi_uav` | Brushed   | cheapest on [Aliexpress](https://www.aliexpress.us/item/3256808590663347.html), [Amazon](https://www.amazon.com/AUHIFVAX-Intelligent-Avoidance-Christmas-Thanksgiving/dp/B0FJRVH76T) | 20% smaller DJI Neo clone. Only really good for indoor flight. |\r\n    | Several Brands | E58           | Tested    | Unknown    | Brushed   | [Amazon](https://www.amazon.com/Foldable-Quadcopter-Beginners-Batteries-Waypoints/dp/B09KV8L7WN/) | |\r\n    | Several Brands | E88           | Suspected | `cooingdv` | Brushed   | [Amazon](https://www.amazon.com/Beginners-Foldable-Quadcopter-Real-Time-Rechargable/dp/B0FKNH6Q4T) | |\r\n    | Several Brands | E88 Pro       | Suspected | Unknown    | Brushed   | [Amazon](https://www.amazon.com/Beginners-Foldable-Quadcopter-Real-Time-Rechargable/dp/B0FKNH6Q4T) | |\r\n    | Several Brands | E99           | Suspected | `cooingdv` | Brushed   | [Amazon](https://www.amazon.com/LJN53-Foldable-Drone-Dual-Cameras/dp/B0DRH9C6RF) | |\r\n    | Several Brands | E99 Pro       | Suspected | Unknown    | Brushed   | [Amazon](https://www.amazon.com/LJN53-Foldable-Drone-Dual-Cameras/dp/B0DRH9C6RF) | |\r\n    | Swifsen        | A35           | Suspected | Unknown    | Brushed   | [Amazon](https://a.co/d/bqKvloz) | Very small \"toy\" drone |\r\n    | Unknown        | LSRC-S1S      | Tested | `wifi_uav` | Brushed   | | mentioned in another reverse-engineering effort for the WiFi UAV app |\r\n    | Velcase        | S101          | TODO      | Unknown      | Brushed   | [Amazon](https://www.amazon.com/Foldable-Beginners-Quadcopter-Carrying-Positioning/dp/B0CH341G5F/) | lower quality build, smaller battery and props than S29 & S20 |\r\n    | Redrie         | X29           | Tested      | `s2x`    | Brushed   | [Amazon](https://www.amazon.com/Adults-1080P-Foldable-Altitude-Auto-Follow-Batteries/dp/B0CZQKNYL5) | Average build quality |\r\n\r\n    _**Tested** means the drone has been physically run with turbodrone to ensure its compatibility._\r\n\r\n  _**Suspected** means the APK for the drone appears to use the exact same packages and libraries as one of the tested drones._\r\n\r\n  _**TODO** means the APK operates with different byte packets and protocols and will have to be added as a new implementation in the API._\r\n\r\n* WiFi Dongle ([recommend ALFA Network AWUS036ACM](https://www.amazon.com/Network-AWUS036ACM-Long-Range-Wide-Coverage-High-Sensitivity/dp/B08BJS8FXD) or similar) \r\n  * Drone broadcasts its own WiFi network so your computer will have to connect to it.\r\n  * Not strictly necessary because you can use your computer's built-in WiFi radio to connect to the drone's network, but nice to have that way you can stay connected to the internet while flying the drone.\r\n\r\n\r\n## Setup\r\nMove to the `backend` directory\r\n```\r\ncd backend\r\n```\r\n\r\nAdd venv\r\n```\r\npython -m venv venv\r\nsource venv/bin/activate\r\n```\r\n\r\nInstall the dependencies\r\n```\r\npip install -r requirements.txt\r\n```\r\n\r\n_If_ you are on Windows, you will need to manually install the `curses` library.\r\n```\r\npip install windows-curses\r\n```\r\n\r\nOpen a new terminal window and install the dependencies for the frontend.\r\n_Make sure you have Node.js 20+ installed._\r\n```\r\ncd frontend\r\nnpm install\r\n```\r\n\r\nMake sure WiFi Dongle is plugged in, drone is turned on, connect to the \"BRAND-MODEL-XXXXXX\" network before proceeding.\r\n\r\nCreate a `.env` file in the `backend` directory. Add a DRONE_TYPE based on which drone you have:\r\n```\r\n# For \"com.vison.macrochip\" (s2x) based drones like S20 and S29:\r\nDRONE_TYPE=s2x\r\n# For WiFi UAV-based drones like V88 and D16:\r\n# DRONE_TYPE=wifi_uav\r\n# For cooingdv publisher drone apps like RC UFO and KY UFO:\r\n# DRONE_TYPE=cooingdv\r\n# For KY FPV / Jieli CTP control with first-pass RTP/JPEG video:\r\n# DRONE_TYPE=cooingdv_jieli\r\n# For WiFi_CAM native UDP apps:\r\n# DRONE_TYPE=wifi_cam\r\n```\r\n\r\nLaunch the backend: \r\n```\r\nuvicorn web_server:app\r\n```\r\n\r\nIn a separate terminal, launch the frontend web client:\r\n```\r\nnpm run dev\r\n```\r\n\r\nOpen the web client which will be at `http://localhost:5173` and you should see the drone video feed and be able to control it.\r\n\r\nTo control via a gaming controller, plug it in and move the sticks around for it to be detected and then push the toggle button to switch between keyboard and controller control.\r\n\r\nMake sure to fly in a safe area, preferably outdoors with little wind. And note that the \"Land\" button _currently_ is more of an E-stop button that will stop the drone motors immediately.\r\n\r\n\r\n## Status\r\nReconnection logic was solved recently.\r\n\r\nVideo feed: solid.\r\n\r\nControls: improved greatly via the web client. The implementation for WiFi UAV-based drones could use some fine-tuning.\r\n\r\nWeb Client: support for various inputs like keyboard, gamepad controllers, and ThinkPad TrackPoint mouse(lol).\r\n\r\nWorking on adding support for more drones from [Amazon's best-selling drone list](https://www.amazon.com/best-selling-drones/s?k=best+selling+drones).\r\n\r\n\r\n## Contribute\r\nTo contribute support for a new \"toy\" drone, download the APK the drone uses on a mirror site and start reverse engineering it by decompiling to Java files with [jadx](https://github.com/skylot/jadx).\r\nFrom there, look at the `AndroidManifest.xml` and see if you can find the classes that are entry points for the app. Look for port usage or protocol usage explicitly mentioned like TCP or UDP. Most of these apps will do the actual communication and video feed processing in native C++ libraries that will be embedded inside the APK. You can use a tool like Ghidra to decompile the native libraries and see if you can discover anything useful. For video feed processing you want to figure out what format it uses e.g. JPEG, YUV, etc. and also if it uses compression and what the byte structure looks like when its reforming an image frame from packets.\r\nAdditionally, Wireshark is your friend for understanding the raw data packets being sent and received by the app. Watch this [video](https://x.com/marshallrichrds/status/1923165437698670818) for an overview into the reverse engineering process used for adding support for the Hiturbo S20 drone.\r\n\r\nOnce you have the protocols and processing for RC and video figured out, make a small test program and add it to the `experimental` directory at that point if you'd like others to be able to try it out.\r\nAfter that, you can work on an implementation that is compatible with the existing back-end architecture; examples of this are the `s2x` and `wifi_uav` reverse-engineered implementations.\r\n\r\n\r\n## Experimental Support\r\nFor drones and apps with limited support which are not yet fully integrated into Turbodrone, see the `experimental` directory.\r\n"
  },
  {
    "path": "backend/control/strategies.py",
    "content": "from abc import ABC, abstractmethod\r\nfrom typing import Dict\r\n\r\nclass ControlStrategy(ABC):\r\n    \"\"\"Maps user intent → raw stick values inside the RC model.\"\"\"\r\n\r\n    @abstractmethod\r\n    def update(self, model, dt: float, axes: Dict[str, float]) -> None:\r\n        \"\"\"`axes` always ranges  -1 … +1  for throttle, yaw, pitch, roll\"\"\"\r\n        ...\r\n\r\n# 1. Same behaviour you already have for the CLI keyboard\r\nclass IncrementalStrategy(ControlStrategy):\r\n    def update(self, model, dt, axes):\r\n        # axes entries are  -1, 0, +1  (discrete keys)\r\n        model._update_axes_incremental(dt, axes)\r\n\r\n# 2. Direct mapping for joysticks / Gamepad API\r\nclass DirectStrategy(ControlStrategy):\r\n    \"\"\"\r\n    Absolute mode: normalised stick position is mapped directly\r\n    to the drone's raw range (optionally with expo curve).\r\n    \"\"\"\r\n    def update(self, model, dt, axes):\r\n        expo = getattr(model, \"expo_factor\", 0.0)\r\n        for axis, v in axes.items():\r\n            # optional expo curve\r\n            if expo:\r\n                sign = 1 if v >= 0 else -1\r\n                v = sign * (abs(v) ** (1 + expo))\r\n            setattr(model, axis, model._scale_normalised(v)) "
  },
  {
    "path": "backend/main.py",
    "content": "#!/usr/bin/env python3\r\nimport logging\r\nimport os\r\n\r\nfrom utils.logging_config import bootstrap_runtime, configure_logging\r\n\r\nbootstrap_runtime()\r\n\r\nimport argparse\r\nimport threading\r\nimport queue\r\nimport signal\r\nimport sys\r\n\r\nfrom models.s2x_rc import S2xDroneModel\r\nfrom protocols.s2x_rc_protocol_adapter import S2xRCProtocolAdapter\r\nfrom protocols.s2x_video_protocol import S2xVideoProtocolAdapter\r\n\r\nfrom models.wifi_uav_rc import WifiUavRcModel\r\nfrom protocols.wifi_uav_rc_protocol_adapter import WifiUavRcProtocolAdapter\r\nfrom protocols.wifi_uav_video_protocol import WifiUavVideoProtocolAdapter\r\n\r\nfrom models.cooingdv_rc import CooingdvRcModel\r\nfrom protocols.cooingdv_rc_protocol_adapter import CooingdvRcProtocolAdapter\r\nfrom protocols.cooingdv_jieli_rc_protocol_adapter import CooingdvJieliRcProtocolAdapter\r\nfrom protocols.cooingdv_jieli_video_protocol import CooingdvJieliVideoProtocolAdapter\r\nfrom protocols.cooingdv_video_protocol import CooingdvVideoProtocolAdapter\r\nfrom models.wifi_cam_rc import WifiCamRcModel\r\nfrom protocols.wifi_cam_rc_protocol_adapter import WifiCamRcProtocolAdapter\r\nfrom protocols.wifi_cam_video_protocol import WifiCamVideoProtocolAdapter\r\n\r\nfrom services.flight_controller import FlightController\r\nfrom services.video_receiver import VideoReceiverService\r\nfrom utils.wifi_uav_variants import WIFI_UAV_DRONE_TYPES, resolve_wifi_uav_variant\r\nfrom views.cli_rc import CLIView\r\nfrom views.opencv_video_view import OpenCVVideoView\r\n\r\nCOOINGDV_DRONE_TYPES = {\"cooingdv\", \"cooingdv_jieli\"}\r\n\r\n\r\nconfigure_logging()\r\nlogger = logging.getLogger(__name__)\r\n\r\ndef main():\r\n    parser = argparse.ArgumentParser(description=\"Drone teleoperation interface\")\r\n    parser.add_argument(\"--drone-type\", type=str, default=\"s2x\",\r\n                        choices=[\"s2x\", \"wifi_uav\", \"wifi_uav_fld\", \"wifi_uav_uav\", \"cooingdv\", \"cooingdv_jieli\", \"wifi_cam\"],\r\n                        help=\"Type of drone to control (s2x, wifi_uav, wifi_uav_fld, wifi_uav_uav, cooingdv, cooingdv_jieli, or wifi_cam, default: s2x)\")\r\n    parser.add_argument(\"--drone-ip\", type=str,\r\n                        help=\"Drone UDP IP address (default: s2x=172.16.10.1, wifi_uav=192.168.169.1, cooingdv=192.168.1.1, wifi_cam=192.168.4.153)\")\r\n    parser.add_argument(\"--control-port\", type=int,\r\n                        help=\"Drone control port (default: s2x=8080, wifi_uav=8800)\")\r\n    parser.add_argument(\"--video-port\", type=int,\r\n                        help=\"Drone video port (default: s2x=8888, wifi_uav=8800)\")\r\n    parser.add_argument(\"--rate\", type=float, default=None,\r\n                        help=\"Control packets per second (default depends on drone type)\")\r\n    parser.add_argument(\"--with-video\", action=\"store_true\",\r\n                        help=\"Enable video feed\")\r\n    parser.add_argument(\"--dump-frames\", action=\"store_true\",\r\n                        help=\"Dump video frames to files\")\r\n    parser.add_argument(\"--dump-packets\", action=\"store_true\",\r\n                        help=\"Dump raw video packets to files\")\r\n    args = parser.parse_args()\r\n\r\n    # Create model, protocol adapter, and controller\r\n    if args.drone_type == \"s2x\":\r\n        logger.info(\"[main] Using S2X drone implementation.\")\r\n        default_ip = \"172.16.10.1\"\r\n        default_control_port = 8080\r\n        default_video_port = 8888\r\n        default_rate = 80.0\r\n        \r\n        drone_ip = args.drone_ip if args.drone_ip else default_ip\r\n        control_port = args.control_port if args.control_port else default_control_port\r\n        video_port = args.video_port if args.video_port else default_video_port\r\n\r\n        drone_model = S2xDroneModel()\r\n        protocol_adapter = S2xRCProtocolAdapter(drone_ip, control_port)\r\n        video_protocol_adapter_class = S2xVideoProtocolAdapter\r\n    elif args.drone_type in WIFI_UAV_DRONE_TYPES:\r\n        wifi_uav_variant = resolve_wifi_uav_variant(args.drone_type)\r\n        logger.info(\"[main] Using WiFi UAV drone implementation (variant=%s).\", wifi_uav_variant)\r\n        default_ip = \"192.168.169.1\"\r\n        default_control_port = 8800\r\n        default_video_port = 8800 # For WifiUAV, control and video often use the same port\r\n        default_rate = 80.0\r\n\r\n        drone_ip = args.drone_ip if args.drone_ip else default_ip\r\n        control_port = args.control_port if args.control_port else default_control_port\r\n        video_port = args.video_port if args.video_port else default_video_port\r\n\r\n        drone_model = WifiUavRcModel()\r\n        protocol_adapter = WifiUavRcProtocolAdapter(drone_ip, control_port, variant=wifi_uav_variant)\r\n        video_protocol_adapter_class = WifiUavVideoProtocolAdapter\r\n    elif args.drone_type in COOINGDV_DRONE_TYPES:\r\n        logger.info(\"[main] Using Cooingdv drone implementation (%s).\", args.drone_type)\r\n        if args.drone_type == \"cooingdv_jieli\":\r\n            default_ip = \"192.168.8.15\"\r\n            default_control_port = 2228\r\n            default_video_port = 0\r\n        else:\r\n            default_ip = \"192.168.1.1\"\r\n            default_control_port = 7099\r\n            default_video_port = 7070  # RTSP port for video streaming\r\n        default_rate = 20.0\r\n\r\n        drone_ip = args.drone_ip if args.drone_ip else default_ip\r\n        control_port = args.control_port if args.control_port else default_control_port\r\n        video_port = args.video_port if args.video_port else default_video_port\r\n\r\n        drone_model = CooingdvRcModel()\r\n        if args.drone_type == \"cooingdv_jieli\":\r\n            protocol_adapter = CooingdvJieliRcProtocolAdapter(drone_ip, control_port)\r\n            video_protocol_adapter_class = CooingdvJieliVideoProtocolAdapter\r\n        else:\r\n            protocol_adapter = CooingdvRcProtocolAdapter(drone_ip, control_port)\r\n            video_protocol_adapter_class = CooingdvVideoProtocolAdapter\r\n    elif args.drone_type == \"wifi_cam\":\r\n        logger.info(\"[main] Using WiFi_CAM native UDP implementation.\")\r\n        default_ip = \"192.168.4.153\"\r\n        default_control_port = 8090\r\n        default_video_port = 8080\r\n        default_rate = 25.0\r\n\r\n        drone_ip = args.drone_ip if args.drone_ip else default_ip\r\n        control_port = args.control_port if args.control_port else default_control_port\r\n        video_port = args.video_port if args.video_port else default_video_port\r\n\r\n        drone_model = WifiCamRcModel()\r\n        protocol_adapter = WifiCamRcProtocolAdapter(\r\n            drone_ip,\r\n            control_port,\r\n            command_mode=os.getenv(\"WIFI_CAM_COMMAND_MODE\", \"auto\"),\r\n        )\r\n        video_protocol_adapter_class = WifiCamVideoProtocolAdapter\r\n    else:\r\n        # Should not happen due to choices in argparse\r\n        logger.error(\"[main] Unknown drone type: %s\", args.drone_type)\r\n        sys.exit(1)\r\n\r\n    control_rate = args.rate if args.rate is not None else default_rate\r\n    controller = FlightController(drone_model, protocol_adapter, control_rate)\r\n    \r\n\r\n    \r\n    # Start video if requested\r\n    video_view = None\r\n    video_receiver = None\r\n    video_thread = None\r\n    \r\n    if args.with_video:\r\n        # Define the blueprint for the video protocol adapter.\r\n        # The VideoReceiverService will create and manage the instance.\r\n        if args.drone_type == \"s2x\":\r\n            video_protocol_args = {\r\n                \"drone_ip\": drone_ip,\r\n                \"control_port\": control_port,\r\n                \"video_port\": video_port\r\n            }\r\n        elif args.drone_type in WIFI_UAV_DRONE_TYPES:\r\n            video_protocol_args = {\r\n                \"drone_ip\": drone_ip,\r\n                \"control_port\": control_port,\r\n                \"video_port\": video_port,\r\n                \"variant\": wifi_uav_variant,\r\n            }\r\n        elif args.drone_type in COOINGDV_DRONE_TYPES:\r\n            video_protocol_args = {\r\n                \"drone_ip\": drone_ip,\r\n                \"control_port\": control_port,\r\n                \"video_port\": video_port\r\n            }\r\n            if args.drone_type == \"cooingdv_jieli\":\r\n                video_protocol_args[\"video_port\"] = video_port or 6666\r\n        elif args.drone_type == \"wifi_cam\":\r\n            video_protocol_args = {\r\n                \"drone_ip\": drone_ip,\r\n                \"control_port\": control_port,\r\n                \"video_port\": video_port,\r\n            }\r\n        \r\n        frame_queue = queue.Queue(maxsize=100)\r\n        video_receiver = VideoReceiverService(\r\n            video_protocol_adapter_class, # The class to instantiate\r\n            video_protocol_args,          # The arguments for it\r\n            frame_queue,\r\n            dump_frames=args.dump_frames,\r\n            dump_packets=args.dump_packets,\r\n            rc_adapter=protocol_adapter if args.drone_type in WIFI_UAV_DRONE_TYPES or args.drone_type == \"wifi_cam\" else None,\r\n        )\r\n        video_view = OpenCVVideoView(frame_queue)\r\n        \r\n        # Start video receiver service. It now handles the protocol's lifecycle.\r\n        video_receiver.start()\r\n        \r\n        # Run HighGUI in its own, non-daemon thread\r\n        video_thread = threading.Thread(\r\n            target=video_view.run,\r\n            name=\"OpenCVVideoThread\"\r\n        )\r\n        video_thread.start()\r\n\r\n    # Start controller\r\n    controller.start()\r\n    \r\n    # Set up signal handler for clean shutdown\r\n    def signal_handler(sig, frame):\r\n        logger.info(\"[main] Caught signal, shutting down...\")\r\n        \r\n        # First stop video components\r\n        if video_receiver:\r\n            video_receiver.stop()\r\n        if video_view:\r\n            video_view.stop()\r\n        if video_thread:\r\n            video_thread.join(timeout=1.0)\r\n        \r\n        # Then stop controller\r\n        controller.stop()\r\n        \r\n        # Exit more forcefully, but only if threads haven't cleaned up\r\n        if video_thread and video_thread.is_alive():\r\n            logger.warning(\"[main] Forcing exit due to lingering threads\")\r\n            os._exit(0)\r\n        else:\r\n            # Normal exit\r\n            sys.exit(0)\r\n    \r\n    signal.signal(signal.SIGINT, signal_handler)\r\n    signal.signal(signal.SIGTERM, signal_handler)\r\n    \r\n    # Start CLI view\r\n    try:\r\n        view = CLIView(controller)\r\n        view.run()\r\n    except KeyboardInterrupt:\r\n        pass\r\n    finally:\r\n        # Clean up in reverse order of creation\r\n        controller.stop()\r\n        \r\n        # Clean up video components\r\n        if video_view:\r\n            video_view.stop()\r\n        if video_receiver:\r\n            video_receiver.stop()\r\n        if video_thread:\r\n            video_thread.join()          # wait until the window thread exits\r\n\r\nif __name__ == \"__main__\":\r\n    main()"
  },
  {
    "path": "backend/models/__init__.py",
    "content": "from .s2x_rc import S2xDroneModel\r\n\r\n__all__ = ['S2xDroneModel']\r\n"
  },
  {
    "path": "backend/models/base_rc.py",
    "content": "from __future__ import annotations\r\n\r\nfrom abc import ABC, abstractmethod\r\nfrom typing import ClassVar, Dict, List, Union, Optional\r\n\r\nfrom models.stick_range import StickRange\r\nfrom models.control_profile import ControlProfile\r\n\r\n\r\nclass BaseRCModel(ABC):\r\n    \"\"\"Common logic for every RC model / protocol implementation.\"\"\"\r\n\r\n    # Sub-classes **must** override or pass a StickRange explicitly.\r\n    STICK_RANGE: ClassVar[Optional[StickRange]] = None\r\n\r\n    # Generic fall-back presets – drones override with their own set\r\n    PRESETS: ClassVar[Dict[str, ControlProfile]] = {\r\n        \"normal\":     ControlProfile(\"normal\",     1.5,  2.5, 0.5, 0.02),\r\n        \"precise\":    ControlProfile(\"precise\",    1.0,  3.0, 0.3, 0.01),\r\n        \"aggressive\": ControlProfile(\"aggressive\", 3.0,  2.0, 1.5, 0.10),\r\n    }\r\n    SENSITIVITY_SEQ: ClassVar[List[str]] = [\"normal\", \"precise\", \"aggressive\"]\r\n\r\n    # -----------------------------------------------------------------\r\n    def __init__(\r\n        self,\r\n        stick_range: Optional[StickRange] = None,\r\n        profile: Union[str, ControlProfile] = \"normal\",\r\n    ) -> None:\r\n        # ----- enforce STICK_RANGE -----------------------------------\r\n        if stick_range is None:\r\n            stick_range = self.__class__.STICK_RANGE\r\n        if stick_range is None:\r\n            raise TypeError(\r\n                f\"{self.__class__.__name__} must define STICK_RANGE \"\r\n                \"or pass stick_range to BaseRCModel.__init__()\"\r\n            )\r\n        # -------------------------------------------------------------\r\n\r\n        if isinstance(profile, str):\r\n            if profile not in self.PRESETS:\r\n                raise ValueError(f\"Unknown profile '{profile}'\")\r\n            profile = self.PRESETS[profile]\r\n\r\n        self.range = stick_range\r\n        self.min_control_value = float(stick_range.min_val)\r\n        self.center_value      = float(stick_range.mid_val)\r\n        self.max_control_value = float(stick_range.max_val)\r\n\r\n        self._apply_profile(profile)\r\n\r\n        # axes start centred\r\n        self.throttle = self.yaw = self.pitch = self.roll = self.center_value\r\n\r\n    # ----- API that concrete models MUST still implement --------------\r\n    @abstractmethod\r\n    def update(self, dt, axes): ...\r\n    @abstractmethod\r\n    def takeoff(self): ...\r\n    @abstractmethod\r\n    def land(self): ...\r\n    @abstractmethod\r\n    def get_control_state(self): ...\r\n\r\n    # ----- shared helpers ---------------------------------------------\r\n    def set_profile(self, name: str) -> None:\r\n        if name not in self.PRESETS:\r\n            raise ValueError(f\"Unknown profile '{name}'\")\r\n        self._apply_profile(self.PRESETS[name])\r\n\r\n    def set_sensitivity(self, preset: int) -> None:\r\n        idx = preset % len(self.SENSITIVITY_SEQ)\r\n        self.set_profile(self.SENSITIVITY_SEQ[idx])\r\n\r\n    def set_strategy(self, strategy) -> None:\r\n        self.strategy = strategy\r\n\r\n    # -----------------------------------------------------------------\r\n    def _apply_profile(self, profile: ControlProfile) -> None:\r\n        half_range = self.max_control_value - self.center_value\r\n        full_range = self.max_control_value - self.min_control_value\r\n\r\n        self.profile            = profile\r\n        self.accel_rate         = profile.accel_ratio     * half_range\r\n        self.decel_rate         = profile.decel_ratio     * half_range\r\n        self.expo_factor        = profile.expo_factor\r\n        self.immediate_response = profile.immediate_ratio * full_range\r\n\r\n    # existing helper (unchanged)\r\n    def _scale_normalised(self, value: float) -> float:\r\n        \"\"\"\r\n        Map a normalised [-1 … +1] input to raw protocol units using\r\n        the model's StickRange.\r\n        \"\"\"\r\n        if value >= 0:\r\n            return self.center_value + value * (self.max_control_value - self.center_value)\r\n        return self.center_value + value * (self.center_value - self.min_control_value)\r\n\r\n    def _update_axes_incremental(self, dt, axes):\r\n        self.update_axes(\r\n            dt,\r\n            axes.get(\"throttle\", 0),\r\n            axes.get(\"yaw\", 0),\r\n            axes.get(\"pitch\", 0),\r\n            axes.get(\"roll\", 0),\r\n        )\r\n\r\n    def update_axes(self, dt, throttle_dir, yaw_dir, pitch_dir, roll_dir):\r\n        \"\"\"\r\n        Apply the shared incremental stick logic used by keyboard-style controls.\r\n\r\n        Pitch and roll get a small immediate boost when the pilot reverses\r\n        direction so the craft feels less sluggish during lateral movement.\r\n        \"\"\"\r\n        for attr, direction, boost_enabled in (\r\n            (\"throttle\", throttle_dir, False),\r\n            (\"yaw\", yaw_dir, False),\r\n            (\"pitch\", pitch_dir, True),\r\n            (\"roll\", roll_dir, True),\r\n        ):\r\n            cur = getattr(self, attr)\r\n            last_dir_attr = f\"last_{attr}_dir\"\r\n            last_dir = getattr(self, last_dir_attr, 0)\r\n\r\n            if direction > 0:\r\n                if boost_enabled and last_dir <= 0:\r\n                    cur += min(\r\n                        self.max_control_value - cur,\r\n                        self.immediate_response,\r\n                    )\r\n                dist = self.max_control_value - cur\r\n                accel = self.accel_rate * dt * (\r\n                    1 + self.expo_factor * dist\r\n                    / (self.max_control_value - self.center_value)\r\n                )\r\n                new = min(self.max_control_value, cur + accel)\r\n\r\n            elif direction < 0:\r\n                if boost_enabled and last_dir >= 0:\r\n                    cur -= min(\r\n                        cur - self.min_control_value,\r\n                        self.immediate_response,\r\n                    )\r\n                dist = cur - self.min_control_value\r\n                accel = self.accel_rate * dt * (\r\n                    1 + self.expo_factor * dist\r\n                    / (self.center_value - self.min_control_value)\r\n                )\r\n                new = max(self.min_control_value, cur - accel)\r\n\r\n            else:\r\n                if cur > self.center_value:\r\n                    dist = cur - self.center_value\r\n                    decel = self.decel_rate * dt * (\r\n                        1 + 0.5 * dist\r\n                        / (self.max_control_value - self.center_value)\r\n                    )\r\n                    new = max(self.center_value, cur - decel)\r\n                elif cur < self.center_value:\r\n                    dist = self.center_value - cur\r\n                    decel = self.decel_rate * dt * (\r\n                        1 + 0.5 * dist\r\n                        / (self.center_value - self.min_control_value)\r\n                    )\r\n                    new = min(self.center_value, cur + decel)\r\n                else:\r\n                    new = cur\r\n\r\n            setattr(self, attr, new)\r\n            setattr(self, last_dir_attr, direction)\r\n\r\n    def _update_axes_direct(self, axes):\r\n        expo = getattr(self, \"expo_factor\", 0.0)\r\n        for attr, value in axes.items():\r\n            if expo:                              # optional expo curve\r\n                sign  = 1 if value >= 0 else -1\r\n                value = sign * (abs(value) ** (1 + expo))\r\n            setattr(self, attr, self._scale_normalised(value))\r\n"
  },
  {
    "path": "backend/models/base_video_model.py",
    "content": "from abc import ABC, abstractmethod\r\nfrom typing import Optional\r\nfrom models.video_frame import VideoFrame\r\n\r\nclass BaseVideoModel(ABC):\r\n    \"\"\"\r\n    Stateless interface that turns *chunks* (whatever the protocol\r\n    thinks a chunk is: a JPEG slice, a whole JPEG, a H.264 NALU …)\r\n    into complete VideoFrame objects.\r\n    \"\"\"\r\n\r\n    @abstractmethod\r\n    def ingest_chunk(\r\n        self,\r\n        *,\r\n        stream_id: int | None = None,\r\n        chunk_id: int | None = None,\r\n        payload: bytes,\r\n    ) -> Optional[VideoFrame]:\r\n        \"\"\"\r\n        Feed one chunk into the model.\r\n\r\n        Parameters\r\n        ----------\r\n        stream_id : int | None\r\n            Identifier of the logical stream / frame (e.g. frame number).\r\n        chunk_id  : int | None\r\n            Sequential id of this chunk inside the stream (e.g. slice index).\r\n        payload   : bytes\r\n            Raw codec payload (JPEG slice, NALU, etc.).\r\n\r\n        Returns\r\n        -------\r\n        Optional[VideoFrame]\r\n            • VideoFrame when a frame is complete\r\n            • None if more data is required\r\n        \"\"\"\r\n        raise NotImplementedError"
  },
  {
    "path": "backend/models/control_profile.py",
    "content": "from dataclasses import dataclass\r\n\r\n@dataclass(frozen=True)\r\nclass ControlProfile:\r\n    \"\"\"\r\n    Profile parameters expressed as *ratios* of the stick range.\r\n      accel_ratio       – fraction of half-range per second\r\n      decel_ratio       – fraction of half-range per second\r\n      expo_factor       – dimension-less (unchanged)\r\n      immediate_ratio   – fraction of full range for one-shot boost\r\n    \"\"\"\r\n    name: str\r\n    accel_ratio: float\r\n    decel_ratio: float\r\n    expo_factor: float = 0.0\r\n    immediate_ratio: float = 0.0\r\n"
  },
  {
    "path": "backend/models/cooingdv_rc.py",
    "content": "\"\"\"\r\nRC Model for Cooingdv drones (RC UFO, KY UFO, E88 Pro, etc.)\r\n\r\nThese drones use the cooingdv publisher's mobile apps and communicate\r\nvia UDP on port 7099 with RTSP video on port 7070.\r\n\r\nKey features:\r\n- Soft landing (distinct from emergency stop)\r\n- Headless mode\r\n- Flip/somersault capability\r\n- Gyro calibration\r\n\"\"\"\r\n\r\nfrom __future__ import annotations\r\n\r\nfrom models.base_rc import BaseRCModel\r\nfrom models.control_profile import ControlProfile\r\nfrom models.stick_range import StickRange\r\nfrom control.strategies import IncrementalStrategy\r\n\r\n\r\nclass CooingdvRcModel(BaseRCModel):\r\n    \"\"\"\r\n    RC model for drones using cooingdv publisher apps (RC UFO, KY UFO, E88 Pro).\r\n\r\n    Protocol details from decompiled apps:\r\n    - Stick center: 128 (0x80)\r\n    - Safe operating range: 50-200 (apps use these bounds)\r\n    - The stock apps send control updates every 50 ms (about 20 Hz)\r\n    - The CooingDV family has at least two packet variants: short \"TC\" and\r\n      extended \"GL\". This model exposes the shared high-level commands while\r\n      the protocol adapter maps them onto the correct flag layout.\r\n    \"\"\"\r\n\r\n    # Stick range from decompiled FlyController.java\r\n    # Center at 128, safe bounds 50-200 (apps clamp to these)\r\n    STICK_RANGE = StickRange(50, 128, 200)\r\n\r\n    PRESETS = {\r\n        # name         accel   decel  expo  immediate-boost\r\n        \"normal\":     ControlProfile(\"normal\",     2.0, 4.0, 0.5, 0.02),\r\n        \"precise\":    ControlProfile(\"precise\",    1.2, 5.0, 0.3, 0.01),\r\n        \"aggressive\": ControlProfile(\"aggressive\", 4.0, 3.0, 1.2, 0.10),\r\n    }\r\n\r\n    def __init__(self, profile: str | ControlProfile = \"normal\") -> None:\r\n        super().__init__(stick_range=self.STICK_RANGE, profile=profile)\r\n\r\n        self.strategy = IncrementalStrategy()\r\n\r\n        # One-shot command flags\r\n        self.takeoff_flag = False\r\n        self.land_flag = False          # Soft landing (0x02)\r\n        self.stop_flag = False          # Emergency stop (0x04)\r\n        self.flip_flag = False          # Flip/somersault (0x08)\r\n        self.headless_flag = False      # Headless mode (0x10) - toggle state\r\n        self.calibration_flag = False   # Gyro calibration (0x80)\r\n\r\n        # Track last motion direction for each axis\r\n        self.last_throttle_dir = 0\r\n        self.last_yaw_dir = 0\r\n        self.last_pitch_dir = 0\r\n        self.last_roll_dir = 0\r\n\r\n    # ------------------------------------------------------------------ #\r\n    # BaseRCModel API\r\n    # ------------------------------------------------------------------ #\r\n    def update(self, dt, axes):\r\n        self.strategy.update(self, dt, axes)\r\n\r\n    def takeoff(self):\r\n        \"\"\"Initiate takeoff sequence.\"\"\"\r\n        self.takeoff_flag = True\r\n\r\n    def land(self):\r\n        \"\"\"\r\n        Initiate soft landing - gradual descent.\r\n        \r\n        This is distinct from emergency_stop() which cuts motors immediately.\r\n        The drone will descend gracefully and land.\r\n        \"\"\"\r\n        self.land_flag = True\r\n\r\n    def emergency_stop(self):\r\n        \"\"\"\r\n        Emergency motor cutoff - immediate stop.\r\n        \r\n        WARNING: This will cause the drone to fall from the sky!\r\n        Use land() for normal landing operations.\r\n        \"\"\"\r\n        self.stop_flag = True\r\n\r\n    def flip(self):\r\n        \"\"\"Execute a 360-degree flip/somersault.\"\"\"\r\n        self.flip_flag = True\r\n\r\n    def toggle_headless(self):\r\n        \"\"\"Toggle headless mode on/off.\"\"\"\r\n        self.headless_flag = not self.headless_flag\r\n\r\n    def calibrate_gyro(self):\r\n        \"\"\"Initiate gyroscope calibration. Drone should be on flat surface.\"\"\"\r\n        self.calibration_flag = True\r\n\r\n    def get_control_state(self):\r\n        return {\r\n            \"throttle\": self.throttle,\r\n            \"yaw\": self.yaw,\r\n            \"pitch\": self.pitch,\r\n            \"roll\": self.roll,\r\n            \"headless\": self.headless_flag,\r\n        }\r\n\r\n    def set_strategy(self, strategy) -> None:\r\n        self.strategy = strategy\r\n\r\n"
  },
  {
    "path": "backend/models/cooingdv_video_model.py",
    "content": "\"\"\"\r\nVideo Model for Cooingdv drones.\r\n\r\nThis model is intentionally simple because cooingdv drones use RTSP streaming,\r\nwhich delivers complete frames directly. Unlike the S2x and WiFi UAV protocols\r\nthat require assembling frames from UDP chunks, RTSP gives us ready-to-use\r\nframes via OpenCV.\r\n\r\nThe model exists primarily for API consistency with other implementations.\r\nMost of the heavy lifting is done in the video protocol adapter.\r\n\"\"\"\r\n\r\nfrom typing import Optional\r\nfrom models.base_video_model import BaseVideoModel\r\nfrom models.video_frame import VideoFrame\r\n\r\n\r\nclass CooingdvVideoModel(BaseVideoModel):\r\n    \"\"\"\r\n    Video model for cooingdv drones using RTSP streaming.\r\n    \r\n    Since RTSP provides complete frames, this model simply wraps the\r\n    incoming data into VideoFrame objects. No assembly or decoding\r\n    is required - OpenCV handles all of that.\r\n    \"\"\"\r\n\r\n    def __init__(self):\r\n        self._frame_counter = 0\r\n\r\n    def ingest_chunk(\r\n        self,\r\n        *,\r\n        stream_id: int | None = None,\r\n        chunk_id: int | None = None,\r\n        payload: bytes,\r\n    ) -> Optional[VideoFrame]:\r\n        \"\"\"\r\n        Process incoming video data.\r\n        \r\n        For RTSP streams, each 'chunk' is actually a complete frame,\r\n        so we simply wrap it in a VideoFrame and return it immediately.\r\n        \r\n        Parameters\r\n        ----------\r\n        stream_id : int | None\r\n            Frame identifier (from RTSP stream)\r\n        chunk_id : int | None\r\n            Not used for RTSP (frames are not chunked)\r\n        payload : bytes\r\n            Complete JPEG frame data\r\n            \r\n        Returns\r\n        -------\r\n        VideoFrame\r\n            Always returns a VideoFrame since RTSP delivers complete frames\r\n        \"\"\"\r\n        self._frame_counter += 1\r\n        \r\n        # Use provided stream_id or auto-increment\r\n        frame_id = stream_id if stream_id is not None else self._frame_counter\r\n        \r\n        return VideoFrame(\r\n            frame_id=frame_id,\r\n            data=payload,\r\n        )\r\n\r\n    def reset(self) -> None:\r\n        \"\"\"Reset the model state.\"\"\"\r\n        self._frame_counter = 0\r\n\r\n"
  },
  {
    "path": "backend/models/debug_rc.py",
    "content": "from __future__ import annotations\r\nimport logging\r\nfrom models.base_rc import BaseRCModel\r\nfrom models.stick_range import StickRange\r\n\r\nlog = logging.getLogger(__name__)\r\n\r\nclass DebugRcModel(BaseRCModel):\r\n    \"\"\"Dummy RC model for debugging purposes.\"\"\"\r\n\r\n    def __init__(self):\r\n        # Using a default stick range, can be customized if needed\r\n        super().__init__(stick_range=StickRange(min_val=0, mid_val=128, max_val=255))\r\n        log.info(\"Debug RC model initialized.\")\r\n        self.throttle = self.yaw = self.pitch = self.roll = 128\r\n\r\n    def update(self, dt, axes):\r\n        # This model doesn't need to update state over time, but it must\r\n        # be implemented to satisfy the abstract base class.\r\n        pass\r\n\r\n    def get_control_state(self):\r\n        # Return a dictionary of the current control values.\r\n        # This is what gets sent to the protocol adapter.\r\n        return {\r\n            \"throttle\": self.throttle,\r\n            \"yaw\": self.yaw,\r\n            \"pitch\": self.pitch,\r\n            \"roll\": self.roll,\r\n        }\r\n\r\n    def set_throttle(self, value: int):\r\n        log.debug(f\"Debug: set_throttle({value})\")\r\n        self.throttle = value\r\n\r\n    def set_yaw(self, value: int):\r\n        log.debug(f\"Debug: set_yaw({value})\")\r\n        self.yaw = value\r\n\r\n    def set_pitch(self, value: int):\r\n        log.debug(f\"Debug: set_pitch({value})\")\r\n        self.pitch = value\r\n\r\n    def set_roll(self, value: int):\r\n        log.debug(f\"Debug: set_roll({value})\")\r\n        self.roll = value\r\n\r\n    def takeoff(self):\r\n        log.info(\"Debug: takeoff()\")\r\n\r\n    def land(self):\r\n        log.info(\"Debug: land()\") "
  },
  {
    "path": "backend/models/s2x_rc.py",
    "content": "# S20 & S29 Controller model\n\nfrom models.base_rc import BaseRCModel\nfrom models.control_profile import ControlProfile\nfrom control.strategies import IncrementalStrategy\nfrom models.stick_range import StickRange\n\nclass S2xDroneModel(BaseRCModel):\n    \"\"\"Model for S2x protocol drones (S20, S29)\"\"\"\n    \n    STICK_RANGE = StickRange(60, 128, 200)   # ← tailorable per drone\n\n    PRESETS = {\n        \"normal\":     ControlProfile(\"normal\",     2.08, 4.86, 0.5, 0.02),\n        \"precise\":    ControlProfile(\"precise\",    1.39, 5.56, 0.3, 0.01),\n        \"aggressive\": ControlProfile(\"aggressive\", 4.17, 3.89, 1.5, 0.11),\n    }\n\n    def __init__(self, profile: str | ControlProfile = \"normal\"):\n        # BaseRCModel handles STICK_RANGE, presets and profile application\n        super().__init__(stick_range=self.STICK_RANGE, profile=profile)\n\n        self.strategy = IncrementalStrategy()   # default\n\n        # one-shot flags\n        self.takeoff_flag = False\n        self.land_flag = False\n        self.stop_flag = False\n        self.headless_flag = False\n        self.calibration_flag = False\n\n        # misc\n        self.speed = 20    # matches 0x14 from dumps\n        self.speed_index = 2\n\n        # Track last direction for each axis\n        self.last_throttle_dir = 0\n        self.last_yaw_dir = 0\n        self.last_pitch_dir = 0\n        self.last_roll_dir = 0\n    \n    def update(self, dt, axes):\n        self.strategy.update(self, dt, axes)\n    \n    def takeoff(self):\n        \"\"\"Set takeoff flag\"\"\"\n        self.takeoff_flag = True\n    \n    def land(self):\n        \"\"\"\n        Request the stock app's land action.\n\n        In the HiTurbo app this shares the same one-shot flight bit as takeoff,\n        so the protocol adapter maps the semantic intent rather than a distinct\n        dedicated land bit.\n        \"\"\"\n        self.land_flag = True\n\n    def emergency_stop(self):\n        \"\"\"Set the emergency stop flag.\"\"\"\n        self.stop_flag = True\n\n    def set_speed_index(self, speed_index: int) -> None:\n        \"\"\"Set the Macrochip app speed tier: 0=low, 1=medium, 2=full.\"\"\"\n        self.speed_index = max(0, min(2, int(speed_index)))\n    \n    def get_control_state(self):\n        \"\"\"Get current control state as a dict\"\"\"\n        return {\n            \"throttle\":  self.throttle,\n            \"yaw\":       self.yaw,\n            \"pitch\":     self.pitch,\n            \"roll\":      self.roll,\n            \"speed_index\": self.speed_index,\n        }\n\n    def set_strategy(self, strategy) -> None:\n        self.strategy = strategy\n"
  },
  {
    "path": "backend/models/s2x_video_model.py",
    "content": "from typing import Dict, Optional\n\nfrom models.video_frame import VideoFrame\nfrom models.base_video_model import BaseVideoModel\n\n\nclass S2xVideoModel(BaseVideoModel):\n    \"\"\"\n    Reassembles sliced JPEG frames used by S2x drones.\n\n    • Ignores the unreliable \"is-last-slice\" flag.\n    • Finishes a frame when the frame-id rolls over.\n    \"\"\"\n\n    SOI_MARKER = b\"\\xFF\\xD8\"\n    EOI_MARKER = b\"\\xFF\\xD9\"\n\n    def __init__(self) -> None:\n        self._cur_fid: Optional[int] = None\n        self._frags: Dict[int, bytes] = {}\n\n    # ──────────────────────────────────────────────────────────\n    # BaseVideoModel interface\n    # ──────────────────────────────────────────────────────────\n    def ingest_chunk(\n        self,\n        *,\n        stream_id: int | None = None,\n        chunk_id: int | None = None,\n        total_chunks: int | None = None,\n        payload: bytes,\n    ) -> Optional[VideoFrame]:\n\n        if stream_id is None or chunk_id is None:\n            return None  # S2x packets always carry both ids\n\n        # frame-id changed? -> finish previous frame. Native VNDK emits as soon\n        # as all declared chunks arrive, but this fallback preserves older\n        # rollover-based behavior for captures without a usable chunk count.\n        completed: Optional[VideoFrame] = None\n        if self._cur_fid is None:\n            self._cur_fid = stream_id\n        elif stream_id != self._cur_fid:\n            completed = self._assemble_current()      # may be None\n            self._reset(stream_id)\n\n        # stash slice (ignore duplicates)\n        self._frags.setdefault(chunk_id, payload)\n\n        if completed is not None:\n            return completed\n\n        if total_chunks is not None and 0 < total_chunks <= 100:\n            expected = set(range(total_chunks))\n            if expected.issubset(self._frags):\n                return self._assemble_current(expected)\n\n        return completed\n\n    # ──────────────────────────────────────────────────────────\n    # helpers\n    # ──────────────────────────────────────────────────────────\n    def _reset(self, new_fid: Optional[int]) -> None:\n        self._cur_fid = new_fid\n        self._frags.clear()\n\n    def _assemble_current(self, keys_to_use: set[int] | None = None) -> Optional[VideoFrame]:\n        if not self._frags:\n            return None\n\n        if keys_to_use is None:\n            keys = sorted(self._frags)\n            complete = len(keys) == keys[-1] - keys[0] + 1\n            if not complete:\n                missing = (keys[-1] - keys[0] + 1) - len(keys)\n               # print(f\"[s2x-model] Dropping frame {self._cur_fid}: {missing} slices missing\")\n                return None\n        else:\n            keys = sorted(keys_to_use)\n\n        data = b\"\".join(self._frags[k] for k in keys)\n\n        start = data.find(self.SOI_MARKER)\n        end   = data.rfind(self.EOI_MARKER)\n        if start < 0 or end < 0 or end <= start:\n            #print(f\"[s2x-model] JPEG markers not found on frame {self._cur_fid}\")\n            return None\n\n        jpeg = data[start : end + len(self.EOI_MARKER)]\n        #print(f\"[s2x-model] Frame {self._cur_fid} OK \"\n        #      f\"({len(jpeg)} bytes, {len(keys)} slices)\")\n        frame = VideoFrame(self._cur_fid, jpeg, \"jpeg\")\n\n        self._reset(None)          # prepare for next frame\n        return frame"
  },
  {
    "path": "backend/models/stick_range.py",
    "content": "from dataclasses import dataclass\r\n\r\n@dataclass(frozen=True)\r\nclass StickRange:\r\n    \"\"\"Drone-specific raw protocol limits.\"\"\"\r\n    min_val: float   # e.g. 60\r\n    mid_val: float   # e.g. 128\r\n    max_val: float   # e.g. 200"
  },
  {
    "path": "backend/models/video_frame.py",
    "content": "import time\r\n\r\nclass VideoFrame:\r\n    \"\"\"Model representing a video frame from the drone\"\"\"\r\n    \r\n    def __init__(self, frame_id, data, format_type=\"jpeg\", timestamp=None):\r\n        self.frame_id = frame_id\r\n        self.data = data\r\n        self.format = format_type  # jpeg, h264, h265, yuv, etc.\r\n        self.timestamp = timestamp or time.time()\r\n        self.width = None\r\n        self.height = None\r\n        self.size = len(data) if data else 0\r\n        \r\n    def __repr__(self):\r\n        return f\"VideoFrame(id={self.frame_id}, format={self.format}, size={self.size})\" "
  },
  {
    "path": "backend/models/wifi_cam_rc.py",
    "content": "\"\"\"\r\nRC model for WiFi_CAM native UDP drones.\r\n\r\nThe stock app uses the same centered 0x80 stick semantics as the CooingDV\r\nfamily, but sends raw command bytes to a separate UDP command port.\r\n\"\"\"\r\n\r\nfrom __future__ import annotations\r\n\r\nfrom control.strategies import IncrementalStrategy\r\nfrom models.base_rc import BaseRCModel\r\nfrom models.control_profile import ControlProfile\r\nfrom models.stick_range import StickRange\r\n\r\n\r\nclass WifiCamRcModel(BaseRCModel):\r\n    \"\"\"High-level control state for WiFi_CAM drones.\"\"\"\r\n\r\n    STICK_RANGE = StickRange(50, 128, 200)\r\n\r\n    PRESETS = {\r\n        \"normal\": ControlProfile(\"normal\", 2.0, 4.0, 0.5, 0.02),\r\n        \"precise\": ControlProfile(\"precise\", 1.2, 5.0, 0.3, 0.01),\r\n        \"aggressive\": ControlProfile(\"aggressive\", 4.0, 3.0, 1.2, 0.10),\r\n    }\r\n\r\n    def __init__(self, profile: str | ControlProfile = \"normal\") -> None:\r\n        super().__init__(stick_range=self.STICK_RANGE, profile=profile)\r\n        self.strategy = IncrementalStrategy()\r\n\r\n        self.takeoff_flag = False\r\n        self.land_flag = False\r\n        self.stop_flag = False\r\n        self.flip_flag = False\r\n        self.headless_flag = False\r\n        self.altitude_hold_flag = False\r\n        self.calibration_flag = False\r\n\r\n        self.last_throttle_dir = 0\r\n        self.last_yaw_dir = 0\r\n        self.last_pitch_dir = 0\r\n        self.last_roll_dir = 0\r\n\r\n    def update(self, dt, axes):\r\n        self.strategy.update(self, dt, axes)\r\n\r\n    def takeoff(self):\r\n        self.takeoff_flag = True\r\n\r\n    def land(self):\r\n        self.land_flag = True\r\n\r\n    def emergency_stop(self):\r\n        self.stop_flag = True\r\n\r\n    def flip(self):\r\n        self.flip_flag = True\r\n\r\n    def toggle_headless(self):\r\n        self.headless_flag = not self.headless_flag\r\n\r\n    def toggle_altitude_hold(self):\r\n        self.altitude_hold_flag = not self.altitude_hold_flag\r\n\r\n    def calibrate_gyro(self):\r\n        self.calibration_flag = True\r\n\r\n    def get_control_state(self):\r\n        return {\r\n            \"throttle\": self.throttle,\r\n            \"yaw\": self.yaw,\r\n            \"pitch\": self.pitch,\r\n            \"roll\": self.roll,\r\n            \"headless\": self.headless_flag,\r\n            \"altitude_hold\": self.altitude_hold_flag,\r\n        }\r\n\r\n    def set_strategy(self, strategy) -> None:\r\n        self.strategy = strategy\r\n"
  },
  {
    "path": "backend/models/wifi_uav_rc.py",
    "content": "from __future__ import annotations\r\n\r\nfrom models.base_rc import BaseRCModel\r\nfrom models.control_profile import ControlProfile\r\nfrom models.stick_range import StickRange\r\nfrom control.strategies import IncrementalStrategy\r\n\r\n\r\nclass WifiUavRcModel(BaseRCModel):\r\n    \"\"\"\r\n    RC model for toy drones that use the \"WiFi UAV\" mobile app (E58, LH-X20, …).\r\n\r\n    RC rate needs to be 50 - 80 Hz to work well.\r\n\r\n    Observations from packet captures:\r\n\r\n    • All 4 stick axes sit at 0x7F (127) when centred.\r\n    • Min / max values hover around 0x3F (63) and 0xBF (191).\r\n      That is the default range we expose to the user code, but it can\r\n      be tuned per drone via STICK_RANGE.\r\n    \"\"\"\r\n\r\n    #            min, mid, max\r\n    # STICK_RANGE = StickRange(0, 128, 255)\r\n    STICK_RANGE = StickRange(40, 128, 220)\r\n\r\n    PRESETS = {\r\n        # name         accel   decel  expo  immediate-boost\r\n        \"normal\":     ControlProfile(\"normal\",     2.0, 4.0, 0.5, 0.02),\r\n        \"precise\":    ControlProfile(\"precise\",    1.2, 5.0, 0.3, 0.01),\r\n        \"aggressive\": ControlProfile(\"aggressive\", 4.0, 3.0, 1.2, 0.10),\r\n    }\r\n\r\n    def __init__(self, profile: str | ControlProfile = \"normal\") -> None:\r\n        super().__init__(stick_range=self.STICK_RANGE, profile=profile)\r\n\r\n        self.strategy = IncrementalStrategy()\r\n\r\n        # one-shot flags\r\n        self.takeoff_flag     = False\r\n        self.land_flag        = False\r\n        self.stop_flag        = False\r\n        self.calibration_flag = False\r\n        self.headless_flag    = False\r\n        self.flip_flag        = False\r\n        self.speed_index      = 2\r\n        self.camera_tilt_state = 0\r\n\r\n        # track last motion direction for each axis\r\n        self.last_throttle_dir = 0\r\n        self.last_yaw_dir      = 0\r\n        self.last_pitch_dir    = 0\r\n        self.last_roll_dir     = 0\r\n\r\n    # ------------------------------------------------------------------ #\r\n    # BaseRCModel API\r\n    # ------------------------------------------------------------------ #\r\n    def update(self, dt, axes):          # type: ignore[override]\r\n        self.strategy.update(self, dt, axes)\r\n\r\n    def takeoff(self):\r\n        self.takeoff_flag = True\r\n\r\n    def land(self):\r\n        \"\"\"Request the app's normal land / descend action.\"\"\"\r\n        self.land_flag = True\r\n\r\n    def emergency_stop(self):\r\n        \"\"\"Immediate motor stop, distinct from the normal land action.\"\"\"\r\n        self.stop_flag = True\r\n\r\n    def flip(self):\r\n        \"\"\"Request a flip / roll action when supported by the active variant.\"\"\"\r\n        self.flip_flag = True\r\n\r\n    def set_speed_index(self, speed_index: int) -> None:\r\n        \"\"\"Set WiFi-UAV app speed tier: 0=30%, 1=60%, 2=100%, 3=25%.\"\"\"\r\n        self.speed_index = max(0, min(3, int(speed_index)))\r\n\r\n    def set_camera_tilt_state(self, tilt_state: int) -> None:\r\n        \"\"\"Set camera tilt command state: 0=neutral, 1/2=opposite tilt directions.\"\"\"\r\n        self.camera_tilt_state = max(0, min(2, int(tilt_state)))\r\n\r\n    # unsupported – always returns 0\r\n    def toggle_record(self):             # type: ignore[override]\r\n        return 0\r\n\r\n    def get_control_state(self):\r\n        return {\r\n            \"throttle\":  self.throttle,\r\n            \"yaw\":       self.yaw,\r\n            \"pitch\":     self.pitch,\r\n            \"roll\":      self.roll,\r\n            \"headless\":  self.headless_flag,\r\n            \"speed_index\": self.speed_index,\r\n            \"camera_tilt\": self.camera_tilt_state,\r\n        }\r\n\r\n    def set_strategy(self, strategy) -> None:\r\n        self.strategy = strategy\r\n"
  },
  {
    "path": "backend/models/wifi_uav_video_model.py",
    "content": "from collections import defaultdict\r\nfrom typing import Dict, Optional\r\n\r\nfrom models.video_frame import VideoFrame\r\nfrom utils.wifi_uav_jpeg import generate_jpeg_headers, EOI\r\n\r\n\r\nclass WifiUavVideoModel:\r\n    \"\"\"\r\n    Re-assembles one JPEG frame from the WiFi-UAV stream.\r\n\r\n    The drone sends many UDP packets per frame.  Every packet contains a\r\n    56-byte proprietary header and a JPEG fragment:\r\n\r\n        • payload[1] == 0x01 … «JPEG» packet marker\r\n        • payload[2] == 0x38 … *not* last fragment\r\n                     != 0x38 … last fragment\r\n        • payload[16:18]      … little-endian frame counter\r\n        • payload[32:34]      … little-endian fragment index\r\n        • payload[56:]        … actual JPEG bytes\r\n    \"\"\"\r\n\r\n    HEADER_LEN = 56\r\n\r\n    def __init__(self, width: int = 640, height: int = 360, num_components: int = 3):\r\n        self._jpeg_header = generate_jpeg_headers(width, height, num_components)\r\n\r\n        self._current_frame_id: Optional[int] = None\r\n        self._expected_fragments: Optional[int] = None\r\n        self._fragments: Dict[int, bytes] = defaultdict(bytes)\r\n\r\n    # --------------------------------------------------------------------- #\r\n    # public API\r\n    # --------------------------------------------------------------------- #\r\n    def ingest_chunk(self, payload: bytes) -> Optional[VideoFrame]:\r\n        \"\"\"\r\n        Returns a complete `VideoFrame` when all fragments of one JPEG\r\n        have been received; otherwise returns None.\r\n        \"\"\"\r\n\r\n        # 1. Validate basic markers\r\n        if len(payload) <= self.HEADER_LEN or payload[1] != 0x01:\r\n            return None  # not a JPEG packet – ignore\r\n\r\n        last_fragment = payload[2] != 0x38\r\n        frame_id = int.from_bytes(payload[16:18], \"little\")\r\n        fragment_id = int.from_bytes(payload[32:34], \"little\")\r\n        jpeg_slice = payload[self.HEADER_LEN :]\r\n\r\n        # 2. Start a new frame if necessary\r\n        if self._current_frame_id != frame_id:\r\n            self._reset_state(frame_id)\r\n\r\n        # 3. Store slice\r\n        self._fragments[fragment_id] = jpeg_slice\r\n        if last_fragment:\r\n            self._expected_fragments = fragment_id + 1\r\n\r\n        # 4. Assemble when complete\r\n        if (\r\n            self._expected_fragments is not None\r\n            and len(self._fragments) == self._expected_fragments\r\n        ):\r\n            ordered = (self._fragments[i] for i in range(self._expected_fragments))\r\n            full_jpeg = (\r\n                self._jpeg_header + b\"\".join(ordered) + bytes(EOI)\r\n            )  # ensure immutable bytes\r\n\r\n            # Prepare next frame\r\n            self._reset_state(None)\r\n\r\n            return VideoFrame(frame_id, full_jpeg)\r\n\r\n        return None\r\n\r\n    # ------------------------------------------------------------------ #\r\n    # private helpers\r\n    # ------------------------------------------------------------------ #\r\n    def _reset_state(self, new_frame_id: Optional[int]):\r\n        self._current_frame_id = new_frame_id\r\n        self._expected_fragments = None\r\n        self._fragments.clear()\r\n"
  },
  {
    "path": "backend/plugins/__init__.py",
    "content": ""
  },
  {
    "path": "backend/plugins/base.py",
    "content": "from abc import ABC, abstractmethod\r\nfrom typing import Iterator\r\nfrom services.flight_controller import FlightController\r\nfrom models.video_frame import VideoFrame\r\n\r\nclass Plugin(ABC):\r\n    \"\"\"\r\n    Base class all runtime plug-ins must inherit from.\r\n    `frame_source` is ANY iterator that yields either:\r\n      • backend.models.video_frame.VideoFrame  (format == \"jpeg\"),\r\n      • or an np.ndarray BGR/RGB image.\r\n    \"\"\"\r\n\r\n    def __init__(self,\r\n                 name: str,\r\n                 flight_controller: FlightController,\r\n                 frame_source: Iterator,\r\n                 overlay_queue = None,\r\n                 **kwargs):\r\n        self.name   = name\r\n        self.fc     = flight_controller\r\n        self.frames = frame_source\r\n        self.overlays = overlay_queue\r\n        self.running = False\r\n        # Lifecycle guards: make stop() idempotent and ensure cleanup runs\r\n        # even if subclasses flip `running` directly.\r\n        self._started = False\r\n        self._stopped = False\r\n        self.loop_thread = None\r\n\r\n    def start(self):\r\n        if self.running:\r\n            return\r\n        self.running = True\r\n        self._started = True\r\n        self._stopped = False\r\n        self._on_start()\r\n\r\n    def stop(self):\r\n        # Idempotent stop: allow cleanup to run once even if `running` was\r\n        # already set False by a subclass or background thread.\r\n        if self._stopped:\r\n            return\r\n        self.running = False\r\n        self._stopped = True\r\n        if self._started:\r\n            self._on_stop()\r\n\r\n    @abstractmethod\r\n    def _on_start(self):\r\n        ...\r\n\r\n    def _on_stop(self):\r\n        pass\r\n\r\n    def send_overlay(self, data: list):\r\n        if self.overlays:\r\n            try:\r\n                self.overlays.put_nowait(data)\r\n            except:\r\n                pass "
  },
  {
    "path": "backend/plugins/follow/__init__.py",
    "content": ""
  },
  {
    "path": "backend/plugins/follow/follow_controller.py",
    "content": "class FollowController:\r\n    \"\"\"\r\n    Calculates yaw/pitch commands to keep a target centered and at a stable distance.\r\n    \r\n    Uses constant-rate (bang-bang) control: outputs fixed command values when\r\n    the target is outside the deadzone, zero when inside.\r\n    \"\"\"\r\n\r\n    def __init__(\r\n        self,\r\n        yaw_deadzone: float = 0.15,\r\n        pitch_deadzone: float = 0.02,\r\n        min_box_width: float = 0.30,\r\n        max_box_width: float = 0.80,\r\n        invert_yaw: bool = False,\r\n        invert_pitch: bool = False,\r\n        yaw_speed: float = 20.0,\r\n        pitch_speed: float = 20.0,\r\n    ):\r\n        self.yaw_deadzone = yaw_deadzone\r\n        self.pitch_deadzone = pitch_deadzone\r\n        self.min_box_width = min_box_width\r\n        self.max_box_width = max_box_width\r\n        self.invert_yaw = invert_yaw\r\n        self.invert_pitch = invert_pitch\r\n        self.yaw_speed = min(100.0, max(0.0, yaw_speed))\r\n        self.pitch_speed = min(100.0, max(0.0, pitch_speed))\r\n\r\n    def compute(self, box_center_x: float, box_width: float) -> tuple[float, float]:\r\n        \"\"\"\r\n        Compute yaw and pitch commands.\r\n        \r\n        Args:\r\n            box_center_x: Normalized x position of box center (0.0 = left, 1.0 = right)\r\n            box_width: Normalized width of box (0.0 to 1.0)\r\n        \r\n        Returns:\r\n            (yaw, pitch) commands in range -100 to 100\r\n        \"\"\"\r\n        # Yaw: rotate to center the target horizontally\r\n        yaw = 0.0\r\n        error_x = box_center_x - 0.5\r\n        if abs(error_x) > self.yaw_deadzone:\r\n            yaw = self.yaw_speed if error_x > 0 else -self.yaw_speed\r\n            if self.invert_yaw:\r\n                yaw = -yaw\r\n\r\n        # Pitch: move forward/backward to keep target at desired size\r\n        pitch = 0.0\r\n        if box_width < (self.min_box_width - self.pitch_deadzone):\r\n            pitch = self.pitch_speed  # too far, move forward\r\n        elif box_width > (self.max_box_width + self.pitch_deadzone):\r\n            pitch = -self.pitch_speed  # too close, move backward\r\n        if self.invert_pitch:\r\n            pitch = -pitch\r\n\r\n        return yaw, pitch\r\n"
  },
  {
    "path": "backend/plugins/follow/follow_plugin.py",
    "content": "import threading\r\nimport cv2\r\nimport numpy as np\r\nfrom ultralytics import YOLO\r\nimport time\r\nimport os\r\nimport logging\r\n\r\nfrom ..base import Plugin\r\nfrom .follow_controller import FollowController\r\nfrom control.strategies import DirectStrategy\r\n\r\nlogger = logging.getLogger(__name__)\r\n\r\n\r\nclass FollowPlugin(Plugin):\r\n    \"\"\"\r\n    Person-following plugin using YOLOv10 for detection.\r\n    Detects people in video frames and sends yaw/pitch commands to keep them centered.\r\n    \"\"\"\r\n\r\n    def _on_start(self):\r\n        # ---- Thread caps for better CPU behavior ----\r\n        try:\r\n            import torch\r\n            torch.set_num_threads(int(os.getenv(\"TORCH_NUM_THREADS\", \"2\")))\r\n            torch.set_num_interop_threads(1)\r\n        except Exception:\r\n            pass\r\n\r\n        try:\r\n            cv2.setNumThreads(1)\r\n        except Exception:\r\n            pass\r\n\r\n        os.environ.setdefault(\"OMP_NUM_THREADS\", \"2\")\r\n        os.environ.setdefault(\"MKL_NUM_THREADS\", \"2\")\r\n\r\n        # ---- Configuration ----\r\n        self.frame_rate = int(os.getenv(\"FOLLOW_FPS\", \"20\"))\r\n        self.img_size = int(os.getenv(\"YOLO_IMG_SIZE\", \"320\"))\r\n        self.confidence = float(os.getenv(\"YOLO_CONFIDENCE\", \"0.65\"))\r\n        self.log_interval = float(os.getenv(\"FOLLOW_LOG_INTERVAL\", \"2.0\"))\r\n\r\n        # ---- Load YOLO model ----\r\n        weights_env = os.getenv(\"YOLO_WEIGHTS\")\r\n        if weights_env and os.path.exists(weights_env):\r\n            weights_path = weights_env\r\n        else:\r\n            repo_root = os.path.abspath(os.path.join(os.path.dirname(__file__), \"..\", \"..\"))\r\n            default_weights = os.path.join(repo_root, \"yolov10n.pt\")\r\n            weights_path = default_weights if os.path.exists(default_weights) else \"yolov10n.pt\"\r\n\r\n        self.model = YOLO(weights_path)\r\n\r\n        # ---- Follow controller ----\r\n        self.ctrl = FollowController(\r\n            yaw_deadzone=float(os.getenv(\"FOLLOW_CENTER_DEADZONE\", \"0.15\")),\r\n            pitch_deadzone=float(os.getenv(\"FOLLOW_PITCH_DEADZONE\", \"0.02\")),\r\n            min_box_width=float(os.getenv(\"FOLLOW_MIN_BOX_WIDTH\", \"0.30\")),\r\n            max_box_width=float(os.getenv(\"FOLLOW_MAX_BOX_WIDTH\", \"0.80\")),\r\n            invert_yaw=os.getenv(\"FOLLOW_INVERT_YAW\", \"false\").lower() in (\"1\", \"true\", \"yes\"),\r\n            invert_pitch=os.getenv(\"FOLLOW_INVERT_PITCH\", \"false\").lower() in (\"1\", \"true\", \"yes\"),\r\n            yaw_speed=float(os.getenv(\"FOLLOW_YAW_SPEED\", \"20.0\")),\r\n            pitch_speed=float(os.getenv(\"FOLLOW_PITCH_SPEED\", \"20.0\")),\r\n        )\r\n\r\n        # ---- Set DirectStrategy for follow control ----\r\n        self._prev_strategy = getattr(self.fc.model, \"strategy\", None)\r\n        self._prev_expo = getattr(self.fc.model, \"expo_factor\", None)\r\n\r\n        try:\r\n            self.fc.model.set_strategy(DirectStrategy())\r\n            self.fc.model.expo_factor = 0.0\r\n            logger.info(\"[FollowPlugin] Started with DirectStrategy, expo=0\")\r\n        except Exception as e:\r\n            logger.warning(\"[FollowPlugin] Warning: %s\", e)\r\n\r\n        self.loop_thread = threading.Thread(target=self._loop, daemon=True)\r\n        self.loop_thread.start()\r\n\r\n    def _on_stop(self):\r\n        # Restore previous strategy\r\n        try:\r\n            if self._prev_strategy is not None:\r\n                self.fc.model.set_strategy(self._prev_strategy)\r\n            if self._prev_expo is not None:\r\n                self.fc.model.expo_factor = self._prev_expo\r\n        except Exception:\r\n            pass\r\n\r\n        if self.loop_thread:\r\n            self.loop_thread.join(timeout=1.0)\r\n\r\n    def _loop(self):\r\n        logger.info(\"[FollowPlugin] Loop started. Waiting for frames...\")\r\n        frame_interval = 1.0 / self.frame_rate\r\n        last_frame_time = 0\r\n        last_log_time = 0\r\n\r\n        for frame in self.frames:\r\n            # Rate limit\r\n            now = time.time()\r\n            if now - last_frame_time < frame_interval:\r\n                continue\r\n            last_frame_time = now\r\n\r\n            if not self.running:\r\n                break\r\n\r\n            # Decode frame\r\n            if hasattr(frame, \"format\") and frame.format == \"jpeg\":\r\n                img = cv2.imdecode(np.frombuffer(frame.data, np.uint8), cv2.IMREAD_COLOR)\r\n                if img is None:\r\n                    continue\r\n            elif isinstance(frame, np.ndarray):\r\n                img = frame\r\n            else:\r\n                continue\r\n\r\n            # Run YOLO detection\r\n            persons = []\r\n            try:\r\n                results = self.model(\r\n                    img,\r\n                    stream=True,\r\n                    verbose=False,\r\n                    classes=[0],  # person class only\r\n                    imgsz=self.img_size,\r\n                    conf=self.confidence,\r\n                )\r\n                for r in results:\r\n                    for box in r.boxes or []:\r\n                        xyxy = box.xyxy[0].tolist()\r\n                        persons.append(xyxy)\r\n            except Exception:\r\n                continue\r\n\r\n            if not persons:\r\n                # No detection - stop moving and clear overlay\r\n                self.fc.set_axes(throttle=0, yaw=0, pitch=0, roll=0)\r\n                self.send_overlay([])\r\n                continue\r\n\r\n            # Pick largest person (by area)\r\n            x1, y1, x2, y2 = max(persons, key=lambda b: (b[2] - b[0]) * (b[3] - b[1]))\r\n            box_w, box_h = x2 - x1, y2 - y1\r\n            frame_h, frame_w = img.shape[:2]\r\n\r\n            # Send overlay\r\n            overlay = [\r\n                {\r\n                    \"type\": \"rect\",\r\n                    \"coords\": [x1 / frame_w, y1 / frame_h, x2 / frame_w, y2 / frame_h],\r\n                    \"color\": \"lime\",\r\n                }\r\n            ]\r\n            self.send_overlay(overlay)\r\n\r\n            # Calculate and send commands\r\n            yaw, pitch = self.ctrl.compute(\r\n                box_center_x=(x1 + box_w / 2) / frame_w,\r\n                box_width=box_w / frame_w,\r\n            )\r\n            self.fc.set_axes_from(\"follow\", throttle=0, yaw=yaw / 100.0, pitch=pitch / 100.0, roll=0)\r\n\r\n            # Periodic logging\r\n            if now - last_log_time >= self.log_interval:\r\n                center_offset = ((x1 + box_w / 2) / frame_w - 0.5) * 100\r\n                logger.debug(\r\n                    \"[FollowPlugin] offset=%+.1f%% box=%.1f%% yaw=%.0f pitch=%.0f\",\r\n                    center_offset,\r\n                    box_w / frame_w * 100,\r\n                    yaw,\r\n                    pitch,\r\n                )\r\n                last_log_time = now\r\n\r\n"
  },
  {
    "path": "backend/plugins/follow/test_yolo.py",
    "content": "import cv2\r\nfrom ultralytics import YOLO\r\n\r\ndef main():\r\n    # Load the YOLOv10 model\r\n    model = YOLO(\"yolov10n.pt\")\r\n\r\n    # Open a connection to the default webcam (0)\r\n    cap = cv2.VideoCapture(0)\r\n\r\n    if not cap.isOpened():\r\n        print(\"Error: Could not open webcam.\")\r\n        return\r\n\r\n    while True:\r\n        # Capture frame-by-frame\r\n        ret, frame = cap.read()\r\n        if not ret:\r\n            print(\"Error: Can't receive frame (stream end?). Exiting ...\")\r\n            break\r\n\r\n        # Run YOLOv10 inference on the frame\r\n        results = model(frame, verbose=False)\r\n\r\n        # Draw bounding boxes and labels on the frame\r\n        for r in results:\r\n            for box in r.boxes:\r\n                if box.cls == 0:  # Class for 'person'\r\n                    x1, y1, x2, y2 = map(int, box.xyxy[0])\r\n                    confidence = box.conf[0]\r\n                    \r\n                    # Draw rectangle\r\n                    cv2.rectangle(frame, (x1, y1), (x2, y2), (0, 255, 0), 2)\r\n                    \r\n                    # Prepare label text\r\n                    label = f\"Person: {confidence:.2f}\"\r\n                    \r\n                    # Put label on the frame\r\n                    cv2.putText(frame, label, (x1, y1 - 10), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 255, 0), 2)\r\n\r\n        # Display the resulting frame\r\n        cv2.imshow('YOLOv10 Webcam Test', frame)\r\n\r\n        # Break the loop on 'q' key press\r\n        if cv2.waitKey(1) == ord('q'):\r\n            break\r\n\r\n    # When everything done, release the capture and destroy windows\r\n    cap.release()\r\n    cv2.destroyAllWindows()\r\n\r\nif __name__ == '__main__':\r\n    main()\r\n"
  },
  {
    "path": "backend/plugins/manager.py",
    "content": "import importlib\r\nimport inspect\r\nimport logging\r\nimport pkgutil\r\nimport queue\r\nimport threading\r\nfrom typing import Dict, Iterator, Type\r\nfrom services.flight_controller import FlightController\r\nfrom .base import Plugin\r\n\r\nlogger = logging.getLogger(__name__)\r\n\r\nclass PluginManager:\r\n    def __init__(self,\r\n                 flight_controller: FlightController,\r\n                 frame_queue: queue.Queue,\r\n                 overlay_queue: queue.Queue):\r\n        self._fc      = flight_controller\r\n        self._frames_q  = frame_queue\r\n        self._overlay_q = overlay_queue\r\n        self._registry: Dict[str, Type[Plugin]] = {}\r\n        self._pool: Dict[str, Plugin] = {}\r\n        self._frame_stop_events: Dict[str, threading.Event] = {}\r\n        self._discover_plugins()\r\n\r\n    def available(self) -> list[str]:\r\n        return list(self._registry.keys())\r\n\r\n    def running(self) -> list[str]:\r\n        return list(self._pool.keys())\r\n\r\n    def clear_overlays(self) -> None:\r\n        \"\"\"\r\n        Clears any currently displayed overlays (frontend will render none).\r\n        \"\"\"\r\n        if not self._overlay_q:\r\n            return\r\n        try:\r\n            self._overlay_q.put_nowait([])\r\n        except Exception:\r\n            pass\r\n\r\n    def start(self, name: str) -> bool:\r\n        \"\"\"\r\n        Starts a plugin by name.\r\n\r\n        Returns True if started, False if already running.\r\n        Raises ValueError if the plugin is unknown.\r\n        \"\"\"\r\n        if name not in self._registry:\r\n            raise ValueError(f\"Unknown plugin: {name}\")\r\n        if name in self._pool:\r\n            return False\r\n        \r\n        logger.info(\"[PluginManager] Starting plugin: %s\", name)\r\n        cls = self._registry[name]\r\n\r\n        stop_event = threading.Event()\r\n        self._frame_stop_events[name] = stop_event\r\n\r\n        def frame_iterator() -> Iterator:\r\n            \"\"\"\r\n            Yield frames from the shared queue, but remain stoppable.\r\n\r\n            Important: do NOT block indefinitely on Queue.get() or plugin threads\r\n            consuming this iterator can hang forever during shutdown.\r\n            \"\"\"\r\n            while not stop_event.is_set():\r\n                try:\r\n                    # Keep the block bounded so we can observe stop_event.\r\n                    yield self._frames_q.get(timeout=0.2)\r\n                except queue.Empty:\r\n                    continue\r\n\r\n        try:\r\n            # Pass a new, unique generator instance and the overlay queue to the plugin\r\n            inst = cls(name=name,\r\n                       flight_controller=self._fc,\r\n                       frame_source=frame_iterator(),\r\n                       overlay_queue=self._overlay_q)\r\n            inst.start()\r\n            self._pool[name] = inst\r\n            return True\r\n        except Exception as e:\r\n            logger.exception(\"[PluginManager] Error starting plugin %s: %s\", name, e)\r\n            # Ensure we don't leak stop events on failed startup.\r\n            self._frame_stop_events.pop(name, None)\r\n            raise\r\n\r\n    def stop(self, name: str) -> bool:\r\n        \"\"\"\r\n        Stops a plugin by name.\r\n\r\n        Returns True if stopped, False if it wasn't running.\r\n        Raises ValueError if the plugin is unknown.\r\n        \"\"\"\r\n        if name not in self._registry:\r\n            raise ValueError(f\"Unknown plugin: {name}\")\r\n\r\n        inst = self._pool.pop(name, None)\r\n        if not inst:\r\n            return False\r\n\r\n        logger.info(\"[PluginManager] Stopping plugin: %s\", name)\r\n\r\n        # Unblock any plugin thread currently waiting on the frame iterator.\r\n        stop_evt = self._frame_stop_events.pop(name, None)\r\n        if stop_evt:\r\n            stop_evt.set()\r\n\r\n        inst.stop()\r\n\r\n        # If we just stopped the last plugin, clear overlays so stale UI doesn't linger.\r\n        if not self._pool:\r\n            self.clear_overlays()\r\n        return True\r\n\r\n    def stop_all(self):\r\n        for name in list(self._pool.keys()):\r\n            self.stop(name)\r\n\r\n    def _discover_plugins(self):\r\n        \"\"\"Finds all Plugin subclasses in the 'plugins' package.\"\"\"\r\n        import plugins\r\n        \r\n        plugin_pkg_path = plugins.__path__\r\n        plugin_pkg_name = plugins.__name__\r\n\r\n        for _, mod_name, _ in pkgutil.walk_packages(path=plugin_pkg_path, prefix=f\"{plugin_pkg_name}.\"):\r\n            module = importlib.import_module(mod_name)\r\n            for _, obj in inspect.getmembers(module, inspect.isclass):\r\n                # Ensure it's a direct subclass of Plugin and not Plugin itself\r\n                if issubclass(obj, Plugin) and obj is not Plugin:\r\n                    self._registry[obj.__name__] = obj\r\n                    logger.debug(\"[PluginManager] Discovered plugin: %s\", obj.__name__)"
  },
  {
    "path": "backend/plugins/test_yaw_plugin.py",
    "content": "import threading\nimport time\nimport os\n\nfrom .base import Plugin\nfrom control.strategies import DirectStrategy\n\n\nclass TestYawPlugin(Plugin):\n    \"\"\"\n    Simple plugin to apply a small absolute yaw deflection using DirectStrategy\n    for a short, fixed duration. Intended for isolating yaw behaviour on S2x.\n\n    Behaviour:\n    - Switch model to DirectStrategy (do NOT change expo)\n    - Apply yaw = TEST_YAW_PCT (default +0.30), others 0\n    - Maintain for TEST_YAW_DURATION_S seconds (default 5s) or until stopped\n    - Then stop sending and restore previous strategy\n    \"\"\"\n\n    def _on_start(self):\n        # Read tunables from environment\n        try:\n            self._yaw_pct = float(os.getenv(\"TEST_YAW_PCT\", \"0.30\"))\n        except Exception:\n            self._yaw_pct = 0.30\n        try:\n            self._duration_s = float(os.getenv(\"TEST_YAW_DURATION_S\", \"5.0\"))\n        except Exception:\n            self._duration_s = 5.0\n\n        # Remember previous strategy to restore on stop\n        self._prev_strategy = getattr(self.fc.model, \"strategy\", None)\n\n        # Force DirectStrategy for absolute mapping; keep expo as-is\n        try:\n            self.fc.model.set_strategy(DirectStrategy())\n            print(f\"[TestYawPlugin] Using DirectStrategy, yaw={self._yaw_pct:+.2f} for {self._duration_s:.1f}s\")\n        except Exception as e:\n            print(f\"[TestYawPlugin] Failed to set DirectStrategy: {e}\")\n\n        # Start worker thread\n        self._thread = threading.Thread(target=self._run, daemon=True)\n        self._thread.start()\n\n    def _on_stop(self):\n        # Stop worker\n        if hasattr(self, \"_thread\") and self._thread:\n            # Let the run loop exit naturally by checking self.running.\n            # If stop() is called from inside the worker thread (auto-stop),\n            # do NOT join ourselves.\n            try:\n                if threading.current_thread() is not self._thread:\n                    self._thread.join(timeout=1.0)\n            except RuntimeError:\n                # Defensive: joining current thread raises at runtime.\n                pass\n\n        # Send zeros once\n        try:\n            self.fc.set_axes(throttle=0, yaw=0, pitch=0, roll=0)\n        except Exception:\n            pass\n\n        # Restore previous strategy\n        try:\n            if hasattr(self, \"_prev_strategy\") and self._prev_strategy is not None:\n                self.fc.model.set_strategy(self._prev_strategy)\n        except Exception:\n            pass\n\n    def _run(self):\n        start = time.time()\n        # Send commands at ~30 Hz; FC loop will sample the latest values\n        interval = 1.0 / 30.0\n\n        while self.running:\n            now = time.time()\n            elapsed = now - start\n            if elapsed >= self._duration_s:\n                # Auto-stop after duration\n                try:\n                    print(\"[TestYawPlugin] Duration elapsed; stopping\")\n                finally:\n                    # Route through stop() so _on_stop() runs (zeros + restore strategy)\n                    self.stop()\n                    return\n\n            # Apply yaw deflection only\n            try:\n                self.fc.set_axes_from(\"test_yaw\", throttle=0.0, yaw=self._yaw_pct, pitch=0.0, roll=0.0)\n            except Exception:\n                pass\n\n            time.sleep(interval)\n\n\n"
  },
  {
    "path": "backend/protocols/__init__.py",
    "content": "from .s2x_rc_protocol_adapter import S2xRCProtocolAdapter\r\n\r\n__all__ = ['S2xRCProtocolAdapter']\r\n"
  },
  {
    "path": "backend/protocols/base_protocol_adapter.py",
    "content": "from abc import ABC, abstractmethod\r\n\r\nclass BaseProtocolAdapter(ABC):\r\n    \"\"\"Base abstract class for drone protocol adapters\"\"\"\r\n    \r\n    @abstractmethod\r\n    def build_control_packet(self, drone_model):\r\n        \"\"\"Build a control packet for the specific drone protocol\"\"\"\r\n        pass\r\n        \r\n    @abstractmethod\r\n    def send_control_packet(self, packet):\r\n        \"\"\"Send the control packet to the drone\"\"\"\r\n        pass\r\n        \r\n    @abstractmethod\r\n    def toggle_debug(self):\r\n        \"\"\"Toggle debug packet logging\"\"\"\r\n        pass"
  },
  {
    "path": "backend/protocols/base_video_protocol.py",
    "content": "from abc import ABC, abstractmethod\r\nimport socket\r\nimport threading\r\nfrom typing import Optional\r\n\r\nfrom models.video_frame import VideoFrame\r\n\r\n\r\nclass BaseVideoProtocolAdapter(ABC):\r\n    \"\"\"\r\n    Owns transport (UDP or TCP socket, keep-alives) and converts\r\n    raw payloads into VideoFrame objects via an inner VideoModel.\r\n    \"\"\"\r\n\r\n    def __init__(self, drone_ip: str, control_port: int, video_port: int):\r\n        self.drone_ip = drone_ip\r\n        self.control_port = control_port\r\n        self.video_port = video_port\r\n        self._keepalive_thread: Optional[threading.Thread] = None\r\n\r\n    # ────────── keep-alive helpers ────────── #\r\n    def start_keepalive(self, interval: float = 1.0) -> None:\r\n        if self._keepalive_thread and self._keepalive_thread.is_alive():\r\n            return\r\n\r\n        self._stop_evt = threading.Event()\r\n        self._keepalive_thread = threading.Thread(\r\n            target=self._keepalive_loop,\r\n            args=(interval,),\r\n            daemon=True,\r\n        )\r\n        self._keepalive_thread.start()\r\n\r\n    def stop_keepalive(self) -> None:\r\n        if hasattr(self, \"_stop_evt\"):\r\n            self._stop_evt.set()\r\n        if self._keepalive_thread:\r\n            self._keepalive_thread.join()\r\n\r\n    def _keepalive_loop(self, interval: float) -> None:\r\n        while not self._stop_evt.is_set():\r\n            self.send_start_command()\r\n            self._stop_evt.wait(interval)\r\n\r\n    # ────────── transport helpers ────────── #\r\n    def recv_from_socket(self, sock) -> Optional[bytes]:\r\n        \"\"\"\r\n        Read one payload chunk from `sock`.\r\n\r\n        The default implementation assumes UDP; override for TCP.\r\n        \"\"\"\r\n        try:\r\n            pkt, _ = sock.recvfrom(4096)\r\n            return pkt\r\n        except socket.timeout:\r\n            return None\r\n\r\n    # ────────── abstract API ────────── #\r\n    @abstractmethod\r\n    def send_start_command(self) -> None:\r\n        \"\"\"Tell the drone to start/continue sending video.\"\"\"\r\n        raise NotImplementedError\r\n\r\n    @abstractmethod\r\n    def create_receiver_socket(self) -> socket.socket:\r\n        \"\"\"Return a configured socket ready for recv().\"\"\"\r\n        raise NotImplementedError\r\n\r\n    @abstractmethod\r\n    def handle_payload(self, payload: bytes) -> Optional[VideoFrame]:\r\n        \"\"\"\r\n        Convert one transport payload into a VideoFrame or return None\r\n        if the frame is not yet complete.\r\n        \"\"\"\r\n        raise NotImplementedError"
  },
  {
    "path": "backend/protocols/cooingdv_jieli_rc_protocol_adapter.py",
    "content": "\"\"\"\r\nRC protocol adapter for the CooingDV/Jieli CTP backend.\r\n\r\nThis is the backend used by KY FPV's Jieli DeviceClient path. It keeps the\r\nfamiliar CooingDV 8-byte TC stick payload, but sends it as decimal BYTE0..BYTE7\r\nfields inside a CTP JSON packet over UDP instead of sending the raw 0x03/0x66\r\ndatagram used by the classic CooingDV apps.\r\n\"\"\"\r\n\r\nfrom __future__ import annotations\r\n\r\nimport logging\r\nimport socket\r\nimport threading\r\nfrom typing import Final, Optional\r\n\r\nfrom models.cooingdv_rc import CooingdvRcModel\r\nfrom protocols.base_protocol_adapter import BaseProtocolAdapter\r\nfrom utils.cooingdv_jieli_ctp import build_ctp_packet\r\n\r\nlogger = logging.getLogger(__name__)\r\n\r\n\r\nclass CooingdvJieliRcProtocolAdapter(BaseProtocolAdapter):\r\n    \"\"\"CooingDV Jieli/CTP UDP adapter used by KY FPV Jieli devices.\"\"\"\r\n\r\n    DEFAULT_DRONE_IP: Final = \"192.168.8.15\"\r\n    DEFAULT_PORT: Final = 2228\r\n    HEARTBEAT_INTERVAL: Final = 1.0\r\n\r\n    START_MARKER: Final = 0x66\r\n    END_MARKER: Final = 0x99\r\n\r\n    TOPIC_CONTROL_MODE: Final = \"CONTROL_MODE\"\r\n    TOPIC_FLYING_CTRL: Final = \"FLYING_CTRL\"\r\n    TOPIC_KEEP_ALIVE: Final = \"CTP_KEEP_ALIVE\"\r\n\r\n    OP_PUT: Final = \"PUT\"\r\n\r\n    FLAG_FAST_FLY: Final = 0x01\r\n    FLAG_FAST_DROP: Final = 0x02\r\n    FLAG_EMERGENCY_STOP: Final = 0x04\r\n    FLAG_CIRCLE_TURN_END: Final = 0x08\r\n    FLAG_HEADLESS: Final = 0x10\r\n    FLAG_UNLOCK_OR_RETURN: Final = 0x20\r\n    FLAG_LIGHT: Final = 0x40\r\n    FLAG_GYRO: Final = 0x80\r\n\r\n    def __init__(\r\n        self,\r\n        drone_ip: str = DEFAULT_DRONE_IP,\r\n        control_port: int = DEFAULT_PORT,\r\n        *,\r\n        bind_port: Optional[int] = DEFAULT_PORT,\r\n    ) -> None:\r\n        self.drone_ip = drone_ip\r\n        self.control_port = control_port\r\n        self.debug_packets = False\r\n        self._pkt_counter = 0\r\n        self._heartbeat_running = False\r\n        self._heartbeat_stop = threading.Event()\r\n        self._heartbeat_thread: Optional[threading.Thread] = None\r\n\r\n        self.sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)\r\n        # The Android UDPClientImpl binds DatagramSocket(2228). If that port is\r\n        # busy on a development machine, fall back to an ephemeral local port so\r\n        # packet sending still works.\r\n        if bind_port is not None:\r\n            try:\r\n                self.sock.bind((\"\", bind_port))\r\n            except OSError as exc:\r\n                logger.warning(\r\n                    \"[cooingdv-jieli] Could not bind local UDP port %s (%s); using ephemeral port.\",\r\n                    bind_port,\r\n                    exc,\r\n                )\r\n                self.sock.bind((\"\", 0))\r\n        else:\r\n            self.sock.bind((\"\", 0))\r\n\r\n        self.start_heartbeat()\r\n        self._send_control_mode(True)\r\n        logger.info(\r\n            \"[cooingdv-jieli] RC adapter initialized for %s:%s (local %s)\",\r\n            drone_ip,\r\n            control_port,\r\n            self.sock.getsockname()[1],\r\n        )\r\n\r\n    def build_control_packet(self, drone_model: CooingdvRcModel) -> bytes:\r\n        \"\"\"Build a CTP FLYING_CTRL packet from the shared CooingDV model.\"\"\"\r\n        payload = self._build_flying_payload(drone_model)\r\n        self._clear_one_shot_flags(drone_model)\r\n        params = {f\"BYTE{i}\": str(value) for i, value in enumerate(payload)}\r\n        return build_ctp_packet(self.TOPIC_FLYING_CTRL, params)\r\n\r\n    def send_control_packet(self, packet: bytes) -> None:\r\n        try:\r\n            self.sock.sendto(packet, (self.drone_ip, self.control_port))\r\n        except OSError:\r\n            return\r\n\r\n        if self.debug_packets:\r\n            self._pkt_counter += 1\r\n            logger.debug(\r\n                \"[cooingdv-jieli] #%05d %s\",\r\n                self._pkt_counter,\r\n                packet.hex(\" \"),\r\n            )\r\n\r\n    def toggle_debug(self) -> bool:\r\n        self.debug_packets = not self.debug_packets\r\n        logger.info(\"[cooingdv-jieli] debug %s\", \"ON\" if self.debug_packets else \"OFF\")\r\n        return self.debug_packets\r\n\r\n    def stop(self) -> None:\r\n        self.stop_heartbeat()\r\n        self._send_control_mode(False)\r\n        try:\r\n            self.sock.close()\r\n        except OSError:\r\n            pass\r\n\r\n    def start_heartbeat(self) -> None:\r\n        if self._heartbeat_thread and self._heartbeat_thread.is_alive():\r\n            return\r\n\r\n        self._heartbeat_stop.clear()\r\n        self._heartbeat_running = True\r\n        self._heartbeat_thread = threading.Thread(\r\n            target=self._heartbeat_loop,\r\n            daemon=True,\r\n            name=\"CooingdvJieliHeartbeat\",\r\n        )\r\n        self._heartbeat_thread.start()\r\n\r\n    def stop_heartbeat(self) -> None:\r\n        self._heartbeat_running = False\r\n        self._heartbeat_stop.set()\r\n        if self._heartbeat_thread:\r\n            self._heartbeat_thread.join(timeout=2.0)\r\n            self._heartbeat_thread = None\r\n\r\n    def _heartbeat_loop(self) -> None:\r\n        while self._heartbeat_running and not self._heartbeat_stop.is_set():\r\n            self._send_ctp(self.TOPIC_KEEP_ALIVE, {})\r\n            self._heartbeat_stop.wait(self.HEARTBEAT_INTERVAL)\r\n\r\n    def _send_control_mode(self, enabled: bool) -> None:\r\n        self._send_ctp(self.TOPIC_CONTROL_MODE, {\"state\": \"1\" if enabled else \"0\"})\r\n\r\n    def _send_ctp(self, topic: str, params: dict[str, str]) -> None:\r\n        packet = build_ctp_packet(topic, params)\r\n        try:\r\n            self.sock.sendto(packet, (self.drone_ip, self.control_port))\r\n        except OSError:\r\n            pass\r\n\r\n    def _build_flying_payload(self, model: CooingdvRcModel) -> list[int]:\r\n        b1 = self._clamp_axis(model.roll)\r\n        b2 = self._clamp_axis(model.pitch)\r\n        acc = self._clamp_axis(model.throttle)\r\n        if acc == 1:\r\n            acc = 0\r\n        turn = self._clamp_axis(model.yaw)\r\n        flags = self._build_flags(model)\r\n        checksum = (b1 ^ b2 ^ acc ^ turn ^ flags) & 0xFF\r\n        return [self.START_MARKER, b1, b2, acc, turn, flags, checksum, self.END_MARKER]\r\n\r\n    def _build_flags(self, model: CooingdvRcModel) -> int:\r\n        flags = 0\r\n        if model.takeoff_flag:\r\n            flags |= self.FLAG_FAST_FLY\r\n        if model.land_flag:\r\n            flags |= self.FLAG_FAST_DROP\r\n        if model.stop_flag:\r\n            flags |= self.FLAG_EMERGENCY_STOP\r\n        if model.flip_flag:\r\n            flags |= self.FLAG_CIRCLE_TURN_END\r\n        if model.headless_flag:\r\n            flags |= self.FLAG_HEADLESS\r\n        if model.calibration_flag:\r\n            flags |= self.FLAG_GYRO\r\n        return flags & 0xFF\r\n\r\n    def _clear_one_shot_flags(self, model: CooingdvRcModel) -> None:\r\n        model.takeoff_flag = False\r\n        model.land_flag = False\r\n        model.stop_flag = False\r\n        model.flip_flag = False\r\n        model.calibration_flag = False\r\n\r\n    def _clamp_axis(self, value: float) -> int:\r\n        return max(1, min(255, int(value))) & 0xFF\r\n"
  },
  {
    "path": "backend/protocols/cooingdv_jieli_video_protocol.py",
    "content": "\"\"\"\r\nFirst-pass video adapter for the CooingDV/Jieli backend.\r\n\r\nKY FPV's Jieli path requests video with CTP JSON commands and receives an RTP\r\nstream on local UDP ports. This adapter implements the JPEG RTP path, which is\r\nthe safest initial target because the frontend expects JPEG frames.\r\n\"\"\"\r\n\r\nfrom __future__ import annotations\r\n\r\nimport logging\r\nimport queue\r\nimport socket\r\nimport threading\r\nimport time\r\nfrom typing import Final, Optional\r\n\r\nfrom models.video_frame import VideoFrame\r\nfrom protocols.base_video_protocol import BaseVideoProtocolAdapter\r\nfrom utils.cooingdv_jieli_ctp import build_ctp_packet\r\nfrom utils.wifi_uav_jpeg import EOI, generate_jpeg_headers\r\n\r\nlogger = logging.getLogger(__name__)\r\n\r\n\r\nclass CooingdvJieliVideoProtocolAdapter(BaseVideoProtocolAdapter):\r\n    DEFAULT_DRONE_IP: Final = \"192.168.8.15\"\r\n    DEFAULT_CONTROL_PORT: Final = 2228\r\n    DEFAULT_VIDEO_PORT: Final = 6666\r\n    DEFAULT_AUDIO_PORT: Final = 1234\r\n    DEFAULT_SDP_PORT: Final = 6789\r\n\r\n    TOPIC_OPEN_FRONT_RTS: Final = \"OPEN_RT_STREAM\"\r\n    TOPIC_CLOSE_FRONT_RTS: Final = \"CLOSE_RT_STREAM\"\r\n\r\n    FORMAT_JPEG: Final = 0\r\n    FORMAT_H264: Final = 1\r\n\r\n    def __init__(\r\n        self,\r\n        drone_ip: str = DEFAULT_DRONE_IP,\r\n        control_port: int = DEFAULT_CONTROL_PORT,\r\n        video_port: int = DEFAULT_VIDEO_PORT,\r\n        *,\r\n        audio_port: int = DEFAULT_AUDIO_PORT,\r\n        sdp_port: int = DEFAULT_SDP_PORT,\r\n        width: int = 640,\r\n        height: int = 360,\r\n        fps: int = 30,\r\n        debug: bool = False,\r\n    ) -> None:\r\n        super().__init__(drone_ip, control_port, video_port)\r\n        self.audio_port = audio_port\r\n        self.sdp_port = sdp_port\r\n        self.width = width\r\n        self.height = height\r\n        self.fps = fps\r\n        self.debug = debug\r\n\r\n        self._running = threading.Event()\r\n        self._rx_thread: Optional[threading.Thread] = None\r\n        self._sdp_thread: Optional[threading.Thread] = None\r\n        self._rx_sock: Optional[socket.socket] = None\r\n        self._sdp_sock: Optional[socket.socket] = None\r\n        self._control_sock: Optional[socket.socket] = None\r\n        self._frame_q: \"queue.Queue[VideoFrame]\" = queue.Queue(maxsize=2)\r\n        self._frame_id = 0\r\n        self._rtp_jpeg_buffers: dict[int, bytearray] = {}\r\n\r\n    def start(self) -> None:\r\n        if self.is_running():\r\n            return\r\n        self._running.set()\r\n        self._control_sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)\r\n        self._control_sock.bind((\"\", 0))\r\n        self._start_sdp_server()\r\n        self._send_open_stream()\r\n        self._rx_thread = threading.Thread(\r\n            target=self._rx_loop,\r\n            daemon=True,\r\n            name=\"CooingdvJieliVideoRx\",\r\n        )\r\n        self._rx_thread.start()\r\n        logger.info(\r\n            \"[cooingdv-jieli-video] listening for RTP/JPEG on UDP %s; SDP on TCP %s\",\r\n            self.video_port,\r\n            self.sdp_port,\r\n        )\r\n\r\n    def stop(self) -> None:\r\n        self._running.clear()\r\n        self._send_close_stream()\r\n        for sock in (self._rx_sock, self._sdp_sock, self._control_sock):\r\n            if sock:\r\n                try:\r\n                    sock.close()\r\n                except OSError:\r\n                    pass\r\n        if self._rx_thread:\r\n            self._rx_thread.join(timeout=1.0)\r\n        if self._sdp_thread:\r\n            self._sdp_thread.join(timeout=1.0)\r\n\r\n    def is_running(self) -> bool:\r\n        return self._running.is_set() and self._rx_thread is not None and self._rx_thread.is_alive()\r\n\r\n    def get_frame(self, timeout: float = 1.0) -> Optional[VideoFrame]:\r\n        try:\r\n            return self._frame_q.get(timeout=timeout)\r\n        except queue.Empty:\r\n            return None\r\n\r\n    def get_packets(self) -> list[bytes]:\r\n        return []\r\n\r\n    def send_start_command(self) -> None:\r\n        self._send_open_stream()\r\n\r\n    def create_receiver_socket(self):\r\n        return None\r\n\r\n    def handle_payload(self, payload: bytes) -> Optional[VideoFrame]:\r\n        self._frame_id = (self._frame_id + 1) & 0xFFFF\r\n        return VideoFrame(self._frame_id, payload, format_type=\"jpeg\")\r\n\r\n    def _send_open_stream(self) -> None:\r\n        self._send_ctp(\r\n            self.TOPIC_OPEN_FRONT_RTS,\r\n            {\r\n                \"format\": str(self.FORMAT_JPEG),\r\n                \"w\": str(self.width),\r\n                \"h\": str(self.height),\r\n                \"fps\": str(self.fps),\r\n            },\r\n        )\r\n\r\n    def _send_close_stream(self) -> None:\r\n        self._send_ctp(self.TOPIC_CLOSE_FRONT_RTS, {\"status\": \"1\"})\r\n\r\n    def _send_ctp(self, topic: str, params: dict[str, str]) -> None:\r\n        sock = self._control_sock\r\n        if sock is None:\r\n            return\r\n        packet = build_ctp_packet(topic, params)\r\n        try:\r\n            sock.sendto(packet, (self.drone_ip, self.control_port))\r\n        except OSError:\r\n            pass\r\n\r\n    def _start_sdp_server(self) -> None:\r\n        self._sdp_thread = threading.Thread(\r\n            target=self._sdp_loop,\r\n            daemon=True,\r\n            name=\"CooingdvJieliSdpServer\",\r\n        )\r\n        self._sdp_thread.start()\r\n\r\n    def _sdp_loop(self) -> None:\r\n        sdp = (\r\n            \"c=IN IP4 127.0.0.1\\n\"\r\n            f\"m=audio {self.audio_port} RTP/AVP 97\\n\"\r\n            \"a=rtpmap:97 L16/8000/1\\n\"\r\n            \"a=ptime:20\\n\"\r\n            f\"m=video {self.video_port} RTP/AVP 26\\n\"\r\n            \"a=rtpmap:26 JPEG/90000\\n\"\r\n            f\"a=framerate:{self.fps}\"\r\n        ).encode(\"utf-8\")\r\n\r\n        try:\r\n            sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\r\n            sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)\r\n            sock.bind((\"\", self.sdp_port))\r\n            sock.listen(1)\r\n            sock.settimeout(0.5)\r\n            self._sdp_sock = sock\r\n        except OSError as exc:\r\n            logger.warning(\"[cooingdv-jieli-video] SDP server unavailable: %s\", exc)\r\n            return\r\n\r\n        while self._running.is_set():\r\n            try:\r\n                conn, _ = sock.accept()\r\n            except socket.timeout:\r\n                continue\r\n            except OSError:\r\n                break\r\n            with conn:\r\n                try:\r\n                    conn.sendall(sdp)\r\n                except OSError:\r\n                    pass\r\n\r\n    def _rx_loop(self) -> None:\r\n        try:\r\n            sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)\r\n            sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)\r\n            sock.bind((\"\", self.video_port))\r\n            sock.settimeout(0.5)\r\n            self._rx_sock = sock\r\n        except OSError as exc:\r\n            logger.warning(\"[cooingdv-jieli-video] RTP socket unavailable: %s\", exc)\r\n            self._running.clear()\r\n            return\r\n\r\n        while self._running.is_set():\r\n            try:\r\n                packet, _ = sock.recvfrom(65535)\r\n            except socket.timeout:\r\n                continue\r\n            except OSError:\r\n                break\r\n\r\n            frame = self._handle_rtp_packet(packet)\r\n            if frame is None:\r\n                continue\r\n            try:\r\n                self._frame_q.put(frame, timeout=0.1)\r\n            except queue.Full:\r\n                try:\r\n                    self._frame_q.get_nowait()\r\n                    self._frame_q.put(frame, timeout=0.1)\r\n                except (queue.Empty, queue.Full):\r\n                    pass\r\n\r\n    def _handle_rtp_packet(self, packet: bytes) -> Optional[VideoFrame]:\r\n        if len(packet) < 12 or (packet[0] >> 6) != 2:\r\n            return self._handle_possible_raw_jpeg(packet)\r\n\r\n        csrc_count = packet[0] & 0x0F\r\n        marker = bool(packet[1] & 0x80)\r\n        payload_type = packet[1] & 0x7F\r\n        timestamp = int.from_bytes(packet[4:8], \"big\")\r\n        offset = 12 + (csrc_count * 4)\r\n        if packet[0] & 0x10:\r\n            if len(packet) < offset + 4:\r\n                return None\r\n            ext_len = int.from_bytes(packet[offset + 2 : offset + 4], \"big\") * 4\r\n            offset += 4 + ext_len\r\n        payload = packet[offset:]\r\n\r\n        if payload_type == 26:\r\n            return self._handle_rtp_jpeg(timestamp, payload, marker)\r\n        return self._handle_possible_raw_jpeg(payload)\r\n\r\n    def _handle_rtp_jpeg(self, timestamp: int, payload: bytes, marker: bool) -> Optional[VideoFrame]:\r\n        if len(payload) < 8:\r\n            return None\r\n        fragment_offset = int.from_bytes(payload[1:4], \"big\")\r\n        width = payload[6] * 8 or self.width\r\n        height = payload[7] * 8 or self.height\r\n        scan = payload[8:]\r\n\r\n        if fragment_offset == 0:\r\n            self._rtp_jpeg_buffers[timestamp] = bytearray(generate_jpeg_headers(width, height))\r\n\r\n        buf = self._rtp_jpeg_buffers.setdefault(timestamp, bytearray())\r\n        buf.extend(scan)\r\n        if not marker:\r\n            return None\r\n\r\n        data = bytes(buf)\r\n        self._rtp_jpeg_buffers.pop(timestamp, None)\r\n        if not data.endswith(EOI):\r\n            data += bytes(EOI)\r\n        return self.handle_payload(data)\r\n\r\n    def _handle_possible_raw_jpeg(self, payload: bytes) -> Optional[VideoFrame]:\r\n        start = payload.find(b\"\\xff\\xd8\")\r\n        end = payload.rfind(b\"\\xff\\xd9\")\r\n        if start >= 0 and end > start:\r\n            return self.handle_payload(payload[start : end + 2])\r\n        if self.debug and payload:\r\n            logger.debug(\"[cooingdv-jieli-video] ignored payload %sB\", len(payload))\r\n        return None\r\n"
  },
  {
    "path": "backend/protocols/cooingdv_rc_protocol_adapter.py",
    "content": "\"\"\"\r\nRC Protocol Adapter for CooingDV drones.\r\n\r\nThe CooingDV apps share a UDP control plane on port 7099, but they do not all\r\nuse the same control packet layout. The apps listen for UDP telemetry, classify\r\nthe drone as either a short \"TC\" packet variant or an extended \"GL\" packet\r\nvariant, and then switch the encoder accordingly.\r\n\r\nShared behavior observed in the apps:\r\n  - Heartbeat {0x01, 0x01} every second\r\n  - RTSP preview at rtsp://192.168.1.1:7070/webcam (handled elsewhere)\r\n  - UDP telemetry on the same socket used for control\r\n\r\nImportant: {0x08, 0x01} is used by the apps when leaving control mode. It is\r\nnot a startup \"init\" packet.\r\n\"\"\"\r\n\r\nimport os\r\nimport socket\r\nimport threading\r\nimport logging\r\nfrom typing import Final, Optional\r\n\r\nfrom models.cooingdv_rc import CooingdvRcModel\r\nfrom protocols.base_protocol_adapter import BaseProtocolAdapter\r\n\r\nlogger = logging.getLogger(__name__)\r\n\r\n\r\nclass CooingdvRcProtocolAdapter(BaseProtocolAdapter):\r\n    \"\"\"Protocol adapter for the CooingDV drone family.\"\"\"\r\n\r\n    DEFAULT_DRONE_IP: Final = \"192.168.1.1\"\r\n    DEFAULT_PORT: Final = 7099\r\n    HEARTBEAT_INTERVAL: Final = 1.0\r\n    RECV_TIMEOUT: Final = 0.25\r\n    RECV_BUFFER_SIZE: Final = 256\r\n\r\n    # Packet markers\r\n    PREFIX: Final = 0x03\r\n    START_MARKER: Final = 0x66\r\n    EXTENDED_MARKER: Final = 0x14\r\n    END_MARKER: Final = 0x99\r\n\r\n    # Discrete UDP commands\r\n    HEARTBEAT_COMMAND: Final = bytes([0x01, 0x01])\r\n    STOP_COMMAND: Final = bytes([0x08, 0x01])\r\n\r\n    # Variant names\r\n    DEVICE_UNKNOWN: Final = \"unknown\"\r\n    DEVICE_TC: Final = \"tc\"\r\n    DEVICE_GL: Final = \"gl\"\r\n\r\n    # TC packet flag bits\r\n    FLAG_TAKEOFF: Final = 0x01\r\n    FLAG_LAND: Final = 0x02\r\n    FLAG_STOP: Final = 0x04\r\n    FLAG_FLIP: Final = 0x08\r\n    FLAG_HEADLESS: Final = 0x10\r\n    FLAG_CALIBRATE: Final = 0x80\r\n\r\n    # GL packet flag groups\r\n    GL_FLAG_ONE_KEY_ACTION: Final = 0x01\r\n    GL_FLAG_STOP: Final = 0x02\r\n    GL_FLAG_CALIBRATE: Final = 0x04\r\n    GL_FLAG_FLIP: Final = 0x08\r\n    GL_FLAG_HEADLESS: Final = 0x01\r\n\r\n    # Resolution / capability IDs seen in the CooingDV apps. The apps use the\r\n    # first byte of received UDP telemetry to decide whether the drone belongs\r\n    # to the GL or TC family.\r\n    GL_RESOLUTION_IDS: Final = frozenset(set(range(90, 102)) | {82, 85, 103, 105})\r\n    KNOWN_RESOLUTION_IDS: Final = frozenset({\r\n        3, 5, 9, 11, 12, 19, 20, 21, 23, 24, 26, 27, 29, 30, 31,\r\n        41, 43, 44, 45, 51, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72,\r\n        80, 81, 82, 83, 84, 85, 86, 87, 90, 91, 92, 93, 94, 95, 96,\r\n        97, 98, 99, 100, 101, 103, 105,\r\n    })\r\n\r\n    def __init__(\r\n        self,\r\n        drone_ip: str = DEFAULT_DRONE_IP,\r\n        control_port: int = DEFAULT_PORT,\r\n        variant: Optional[str] = None,\r\n    ) -> None:\r\n        self.drone_ip = drone_ip\r\n        self.control_port = control_port\r\n\r\n        self.sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)\r\n        # Match Java DatagramSocket() semantics by binding to an ephemeral port\r\n        # so replies return to the same socket we use for telemetry detection.\r\n        self.sock.bind((\"\", 0))\r\n        self.sock.settimeout(self.RECV_TIMEOUT)\r\n\r\n        self.debug_packets = False\r\n        self._pkt_counter = 0\r\n\r\n        env_variant = os.getenv(\"COOINGDV_VARIANT\")\r\n        self._variant_override = self._normalise_variant(variant or env_variant)\r\n        self._detected_variant: Optional[str] = None\r\n        self.detected_resolution_id: Optional[int] = None\r\n        self.last_rx_packet: bytes = b\"\"\r\n        self.last_rx_addr: Optional[tuple[str, int]] = None\r\n        self._override_mismatch_reported = False\r\n\r\n        # Heartbeat thread\r\n        self._heartbeat_running = False\r\n        self._heartbeat_thread: Optional[threading.Thread] = None\r\n        self._heartbeat_stop = threading.Event()\r\n\r\n        # Receive thread\r\n        self._receiver_running = False\r\n        self._receiver_thread: Optional[threading.Thread] = None\r\n        self._receiver_stop = threading.Event()\r\n\r\n        self.start_receiver()\r\n        self.start_heartbeat()\r\n\r\n        active_variant = self._active_variant()\r\n        active_label = active_variant.upper()\r\n        if self._variant_override:\r\n            logger.info(\r\n                \"[cooingdv] RC adapter initialized for %s:%s (forced %s, local %s)\",\r\n                drone_ip,\r\n                control_port,\r\n                active_label,\r\n                self.sock.getsockname()[1],\r\n            )\r\n        else:\r\n            logger.info(\r\n                \"[cooingdv] RC adapter initialized for %s:%s (auto-detect, TC fallback, local %s)\",\r\n                drone_ip,\r\n                control_port,\r\n                self.sock.getsockname()[1],\r\n            )\r\n\r\n    def start_heartbeat(self) -> None:\r\n        \"\"\"Start the heartbeat thread to keep the UDP session alive.\"\"\"\r\n        if self._heartbeat_thread and self._heartbeat_thread.is_alive():\r\n            return\r\n\r\n        self._heartbeat_stop.clear()\r\n        self._heartbeat_running = True\r\n        self._heartbeat_thread = threading.Thread(\r\n            target=self._heartbeat_loop,\r\n            daemon=True,\r\n            name=\"CooingdvHeartbeat\",\r\n        )\r\n        self._heartbeat_thread.start()\r\n\r\n    def stop_heartbeat(self) -> None:\r\n        \"\"\"Stop the heartbeat thread.\"\"\"\r\n        self._heartbeat_running = False\r\n        self._heartbeat_stop.set()\r\n        if self._heartbeat_thread:\r\n            self._heartbeat_thread.join(timeout=2.0)\r\n            self._heartbeat_thread = None\r\n\r\n    def _heartbeat_loop(self) -> None:\r\n        \"\"\"Send heartbeat packets every HEARTBEAT_INTERVAL seconds.\"\"\"\r\n        while self._heartbeat_running and not self._heartbeat_stop.is_set():\r\n            try:\r\n                self.sock.sendto(self.HEARTBEAT_COMMAND, (self.drone_ip, self.control_port))\r\n                if self.debug_packets:\r\n                    logger.debug(\"[cooingdv] heartbeat sent: %s\", self.HEARTBEAT_COMMAND.hex())\r\n            except OSError:\r\n                pass\r\n            self._heartbeat_stop.wait(self.HEARTBEAT_INTERVAL)\r\n\r\n    def start_receiver(self) -> None:\r\n        \"\"\"Start the telemetry receiver used for TC/GL auto-detection.\"\"\"\r\n        if self._receiver_thread and self._receiver_thread.is_alive():\r\n            return\r\n\r\n        self._receiver_stop.clear()\r\n        self._receiver_running = True\r\n        self._receiver_thread = threading.Thread(\r\n            target=self._receiver_loop,\r\n            daemon=True,\r\n            name=\"CooingdvUdpRx\",\r\n        )\r\n        self._receiver_thread.start()\r\n\r\n    def stop_receiver(self) -> None:\r\n        \"\"\"Stop the telemetry receiver thread.\"\"\"\r\n        self._receiver_running = False\r\n        self._receiver_stop.set()\r\n        if self._receiver_thread:\r\n            self._receiver_thread.join(timeout=2.0)\r\n            self._receiver_thread = None\r\n\r\n    def _receiver_loop(self) -> None:\r\n        while self._receiver_running and not self._receiver_stop.is_set():\r\n            try:\r\n                packet, addr = self.sock.recvfrom(self.RECV_BUFFER_SIZE)\r\n            except socket.timeout:\r\n                continue\r\n            except OSError:\r\n                if self._receiver_stop.is_set():\r\n                    break\r\n                continue\r\n\r\n            if not packet:\r\n                continue\r\n\r\n            self.last_rx_packet = packet\r\n            self.last_rx_addr = addr\r\n            self._process_received_packet(packet, addr)\r\n\r\n    def _process_received_packet(self, packet: bytes, addr: tuple[str, int]) -> None:\r\n        \"\"\"Track UDP telemetry and auto-detect the control variant.\"\"\"\r\n        if self.debug_packets:\r\n            logger.debug(\r\n                \"[cooingdv] udp recv %sB from %s:%s: %s\",\r\n                len(packet),\r\n                addr[0],\r\n                addr[1],\r\n                packet.hex(),\r\n            )\r\n\r\n        variant = self._detect_variant(packet)\r\n        if variant is None:\r\n            return\r\n\r\n        resolution_id = packet[0]\r\n        if resolution_id in self.KNOWN_RESOLUTION_IDS:\r\n            self.detected_resolution_id = resolution_id\r\n\r\n        if self._variant_override:\r\n            if variant != self._variant_override and not self._override_mismatch_reported:\r\n                logger.warning(\r\n                    \"[cooingdv] Ignoring auto-detected %s telemetry; using override %s.\",\r\n                    variant.upper(),\r\n                    self._variant_override.upper(),\r\n                )\r\n                self._override_mismatch_reported = True\r\n            return\r\n\r\n        if variant == self._detected_variant:\r\n            return\r\n\r\n        self._detected_variant = variant\r\n        resolution_text = (\r\n            f\" resolution-id={self.detected_resolution_id}\"\r\n            if self.detected_resolution_id is not None\r\n            else \"\"\r\n        )\r\n        logger.info(\"[cooingdv] Auto-detected %s control variant.%s\", variant.upper(), resolution_text)\r\n\r\n    def stop(self) -> None:\r\n        \"\"\"Clean shutdown of the adapter.\"\"\"\r\n        self.stop_heartbeat()\r\n        try:\r\n            self.sock.sendto(self.STOP_COMMAND, (self.drone_ip, self.control_port))\r\n        except OSError:\r\n            pass\r\n        self.stop_receiver()\r\n        try:\r\n            self.sock.close()\r\n        except Exception:\r\n            pass\r\n\r\n    # ------------------------------------------------------------------ #\r\n    # BaseProtocolAdapter interface\r\n    # ------------------------------------------------------------------ #\r\n    def build_control_packet(self, drone_model: CooingdvRcModel) -> bytes:\r\n        \"\"\"Build a control packet for the detected CooingDV variant.\"\"\"\r\n        variant = self._active_variant()\r\n        if variant == self.DEVICE_GL:\r\n            packet = self._build_gl_control_packet(drone_model)\r\n        else:\r\n            packet = self._build_tc_control_packet(drone_model)\r\n\r\n        self._clear_one_shot_flags(drone_model)\r\n        return packet\r\n\r\n    def send_control_packet(self, packet: bytes) -> None:\r\n        \"\"\"Send a control packet to the drone via UDP.\"\"\"\r\n        try:\r\n            self.sock.sendto(packet, (self.drone_ip, self.control_port))\r\n        except OSError:\r\n            return\r\n\r\n        if self.debug_packets:\r\n            self._pkt_counter += 1\r\n            variant = self._active_variant().upper()\r\n            hex_dump = \" \".join(f\"{b:02x}\" for b in packet)\r\n            logger.debug(\"[cooingdv] #%05d [%s] %s\", self._pkt_counter, variant, hex_dump)\r\n\r\n            if len(packet) == 9:\r\n                roll, pitch, throttle, yaw = packet[2], packet[3], packet[4], packet[5]\r\n                flags = packet[6]\r\n                logger.debug(\"  Controls: R:%s P:%s T:%s Y:%s\", roll, pitch, throttle, yaw)\r\n                flag_names = self._decode_tc_flags(flags)\r\n                if flag_names:\r\n                    logger.debug(\"  Flags: %s\", \", \".join(flag_names))\r\n            elif len(packet) == 21:\r\n                roll, pitch, throttle, yaw = packet[3], packet[4], packet[5], packet[6]\r\n                flags1, flags2 = packet[7], packet[8]\r\n                logger.debug(\"  Controls: R:%s P:%s T:%s Y:%s\", roll, pitch, throttle, yaw)\r\n                flag_names = self._decode_gl_flags(flags1, flags2)\r\n                if flag_names:\r\n                    logger.debug(\"  Flags: %s\", \", \".join(flag_names))\r\n\r\n    def toggle_debug(self) -> bool:\r\n        \"\"\"Toggle debug packet logging on/off.\"\"\"\r\n        self.debug_packets = not self.debug_packets\r\n        state = \"ON\" if self.debug_packets else \"OFF\"\r\n        logger.info(\"[cooingdv] debug %s\", state)\r\n        return self.debug_packets\r\n\r\n    # ------------------------------------------------------------------ #\r\n    # Helper methods\r\n    # ------------------------------------------------------------------ #\r\n    def _active_variant(self) -> str:\r\n        \"\"\"Return the currently active control variant.\"\"\"\r\n        if self._variant_override:\r\n            return self._variant_override\r\n        if self._detected_variant:\r\n            return self._detected_variant\r\n        return self.DEVICE_TC\r\n\r\n    def _normalise_variant(self, value: Optional[str]) -> Optional[str]:\r\n        if value is None:\r\n            return None\r\n\r\n        raw = value.strip().lower()\r\n        if raw in (\"\", \"auto\", \"detect\", \"autodetect\"):\r\n            return None\r\n        if raw in (\"tc\", \"e88\", \"short\"):\r\n            return self.DEVICE_TC\r\n        if raw in (\"gl\", \"flow\", \"extended\"):\r\n            return self.DEVICE_GL\r\n\r\n        logger.warning(\"[cooingdv] Unknown variant override '%s', using auto-detect.\", value)\r\n        return None\r\n\r\n    def _detect_variant(self, packet: bytes) -> Optional[str]:\r\n        \"\"\"Infer the control family from the first telemetry byte.\"\"\"\r\n        first_byte = packet[0]\r\n        if first_byte in self.GL_RESOLUTION_IDS:\r\n            return self.DEVICE_GL\r\n        if first_byte in self.KNOWN_RESOLUTION_IDS:\r\n            return self.DEVICE_TC\r\n        return None\r\n\r\n    def _build_tc_control_packet(self, model: CooingdvRcModel) -> bytes:\r\n        pkt = bytearray(9)\r\n        pkt[0] = self.PREFIX\r\n        pkt[1] = self.START_MARKER\r\n        pkt[2] = self._clamp_axis(model.roll)\r\n        pkt[3] = self._clamp_axis(model.pitch)\r\n        pkt[4] = self._clamp_axis(model.throttle)\r\n        pkt[5] = self._clamp_axis(model.yaw)\r\n        pkt[6] = self._build_tc_flags(model)\r\n        pkt[7] = self._calculate_checksum(pkt[2:7])\r\n        pkt[8] = self.END_MARKER\r\n        return bytes(pkt)\r\n\r\n    def _build_gl_control_packet(self, model: CooingdvRcModel) -> bytes:\r\n        roll = self._clamp_axis(model.roll)\r\n        pitch = self._clamp_axis(model.pitch)\r\n        throttle = self._clamp_axis(model.throttle)\r\n        yaw = self._clamp_axis(model.yaw)\r\n        flags1, flags2 = self._build_gl_flags(model)\r\n\r\n        inner = bytearray(20)\r\n        inner[0] = self.START_MARKER\r\n        inner[1] = self.EXTENDED_MARKER\r\n        inner[2] = roll\r\n        inner[3] = pitch\r\n        inner[4] = throttle\r\n        inner[5] = yaw\r\n        inner[6] = flags1\r\n        inner[7] = flags2\r\n        inner[18] = self._calculate_checksum(\r\n            bytes([roll, pitch, throttle, yaw, flags1, flags2])\r\n        )\r\n        inner[19] = self.END_MARKER\r\n\r\n        return bytes([self.PREFIX]) + bytes(inner)\r\n\r\n    def _clamp_axis(self, value: float) -> int:\r\n        \"\"\"Clamp axis values into the byte range used by the apps.\"\"\"\r\n        clamped = max(1, min(255, int(value)))\r\n        return clamped & 0xFF\r\n\r\n    def _build_tc_flags(self, model: CooingdvRcModel) -> int:\r\n        flags = 0\r\n\r\n        if model.takeoff_flag:\r\n            flags |= self.FLAG_TAKEOFF\r\n        if model.land_flag:\r\n            flags |= self.FLAG_LAND\r\n        if model.stop_flag:\r\n            flags |= self.FLAG_STOP\r\n        if model.flip_flag:\r\n            flags |= self.FLAG_FLIP\r\n        if model.headless_flag:\r\n            flags |= self.FLAG_HEADLESS\r\n        if model.calibration_flag:\r\n            flags |= self.FLAG_CALIBRATE\r\n\r\n        return flags & 0xFF\r\n\r\n    def _build_gl_flags(self, model: CooingdvRcModel) -> tuple[int, int]:\r\n        \"\"\"\r\n        Build the extended GL flag groups.\r\n\r\n        In the decompiled apps, the GL family uses two flag bytes and does not\r\n        expose separate takeoff/land bits. The one-key action bit is reused for\r\n        either button, so we mirror that behavior here.\r\n        \"\"\"\r\n        flags1 = 0\r\n        flags2 = 0\r\n\r\n        if model.takeoff_flag or model.land_flag:\r\n            flags1 |= self.GL_FLAG_ONE_KEY_ACTION\r\n        if model.stop_flag:\r\n            flags1 |= self.GL_FLAG_STOP\r\n        if model.calibration_flag:\r\n            flags1 |= self.GL_FLAG_CALIBRATE\r\n        if model.flip_flag:\r\n            flags1 |= self.GL_FLAG_FLIP\r\n        if model.headless_flag:\r\n            flags2 |= self.GL_FLAG_HEADLESS\r\n\r\n        return flags1 & 0xFF, flags2 & 0xFF\r\n\r\n    def _clear_one_shot_flags(self, model: CooingdvRcModel) -> None:\r\n        model.takeoff_flag = False\r\n        model.land_flag = False\r\n        model.stop_flag = False\r\n        model.flip_flag = False\r\n        model.calibration_flag = False\r\n\r\n    def _decode_tc_flags(self, flags: int) -> list[str]:\r\n        names: list[str] = []\r\n        if flags & self.FLAG_TAKEOFF:\r\n            names.append(\"TAKEOFF\")\r\n        if flags & self.FLAG_LAND:\r\n            names.append(\"LAND\")\r\n        if flags & self.FLAG_STOP:\r\n            names.append(\"STOP\")\r\n        if flags & self.FLAG_FLIP:\r\n            names.append(\"FLIP\")\r\n        if flags & self.FLAG_HEADLESS:\r\n            names.append(\"HEADLESS\")\r\n        if flags & self.FLAG_CALIBRATE:\r\n            names.append(\"CALIBRATE\")\r\n        return names\r\n\r\n    def _decode_gl_flags(self, flags1: int, flags2: int) -> list[str]:\r\n        names: list[str] = []\r\n        if flags1 & self.GL_FLAG_ONE_KEY_ACTION:\r\n            names.append(\"ONE_KEY\")\r\n        if flags1 & self.GL_FLAG_STOP:\r\n            names.append(\"STOP\")\r\n        if flags1 & self.GL_FLAG_CALIBRATE:\r\n            names.append(\"CALIBRATE\")\r\n        if flags1 & self.GL_FLAG_FLIP:\r\n            names.append(\"FLIP\")\r\n        if flags2 & self.GL_FLAG_HEADLESS:\r\n            names.append(\"HEADLESS\")\r\n        return names\r\n\r\n    def _calculate_checksum(self, data: bytes) -> int:\r\n        checksum = 0\r\n        for b in data:\r\n            checksum ^= b\r\n        return checksum & 0xFF\r\n\r\n"
  },
  {
    "path": "backend/protocols/cooingdv_video_protocol.py",
    "content": "\"\"\"\r\nVideo Protocol Adapter for Cooingdv drones.\r\n\r\nUses RTSP streaming instead of custom UDP protocol - much simpler than \r\nthe S2x and WiFi UAV implementations.\r\n\r\nThe drone broadcasts video at: rtsp://192.168.1.1:7070/webcam\r\n\r\nThis adapter uses OpenCV's VideoCapture to connect to the RTSP stream\r\nand provides frames through the standard turbodrone interface.\r\n\"\"\"\r\n\r\nimport logging\r\nimport cv2\r\nimport queue\r\nimport threading\r\nimport time\r\nfrom typing import Final, Optional, List\r\n\r\nfrom models.cooingdv_video_model import CooingdvVideoModel\r\nfrom models.video_frame import VideoFrame\r\nfrom protocols.base_video_protocol import BaseVideoProtocolAdapter\r\n\r\nlogger = logging.getLogger(__name__)\r\n\r\n\r\nclass CooingdvVideoProtocolAdapter(BaseVideoProtocolAdapter):\r\n    \"\"\"\r\n    Video protocol adapter for cooingdv drones using RTSP streaming.\r\n    \r\n    Unlike the UDP-based adapters for S2x and WiFi UAV drones, this adapter\r\n    connects to a standard RTSP stream, making it much simpler and more\r\n    reliable.\r\n    \r\n    Features:\r\n    - Automatic reconnection on stream failure\r\n    - Frame rate limiting to prevent buffer overflow\r\n    - Thread-safe frame queue\r\n    \"\"\"\r\n\r\n    DEFAULT_DRONE_IP: Final = \"192.168.1.1\"\r\n    DEFAULT_RTSP_PORT: Final = 7070\r\n    DEFAULT_CONTROL_PORT: Final = 7099\r\n    \r\n    RTSP_PATH: Final = \"/webcam\"\r\n    \r\n    # Reconnection settings\r\n    RECONNECT_DELAY: Final = 2.0  # seconds\r\n    MAX_RECONNECT_ATTEMPTS: Final = 10\r\n    \r\n    # Frame capture settings\r\n    FRAME_TIMEOUT: Final = 5.0  # seconds without frame triggers reconnect\r\n    READ_FAILURE_BACKOFF: Final = 0.05  # seconds between failed reads\r\n\r\n    def __init__(\r\n        self,\r\n        drone_ip: str = DEFAULT_DRONE_IP,\r\n        control_port: int = DEFAULT_CONTROL_PORT,\r\n        video_port: int = DEFAULT_RTSP_PORT,\r\n        *,\r\n        debug: bool = False,\r\n    ):\r\n        super().__init__(drone_ip, control_port, video_port)\r\n        self.model = CooingdvVideoModel()\r\n        \r\n        self.debug = debug or logger.isEnabledFor(logging.DEBUG)\r\n        self._dbg = logger.debug if self.debug else (lambda *a, **k: None)\r\n        \r\n        # Build RTSP URL\r\n        self.rtsp_url = f\"rtsp://{drone_ip}:{video_port}{self.RTSP_PATH}\"\r\n        self._dbg(f\"[cooingdv-video] RTSP URL: {self.rtsp_url}\")\r\n        \r\n        # OpenCV capture object\r\n        self._cap: Optional[cv2.VideoCapture] = None\r\n        self._cap_lock = threading.Lock()\r\n        \r\n        # Threading\r\n        self._running = False\r\n        self._rx_thread: Optional[threading.Thread] = None\r\n        self._frame_q: \"queue.Queue[VideoFrame]\" = queue.Queue(maxsize=2)\r\n        \r\n        # Packet buffer for compatibility with existing interface\r\n        self._pkt_lock = threading.Lock()\r\n        self._pkt_buffer: List[bytes] = []\r\n        \r\n        # Statistics\r\n        self.frames_ok = 0\r\n        self.frames_dropped = 0\r\n        self.reconnect_count = 0\r\n        self._last_frame_time = time.time()\r\n\r\n    # ------------------------------------------------------------------ #\r\n    # RTSP Connection Management\r\n    # ------------------------------------------------------------------ #\r\n    def _open_stream(self) -> bool:\r\n        \"\"\"\r\n        Open the RTSP stream. Returns True on success.\r\n        \r\n        Uses specific OpenCV settings optimized for RTSP:\r\n        - CAP_FFMPEG backend for better RTSP support\r\n        - Low buffer size to reduce latency\r\n        \"\"\"\r\n        with self._cap_lock:\r\n            if self._cap is not None:\r\n                self._cap.release()\r\n            \r\n            self._dbg(f\"[cooingdv-video] Opening RTSP stream: {self.rtsp_url}\")\r\n            \r\n            # Create capture with FFMPEG backend for better RTSP handling\r\n            self._cap = cv2.VideoCapture(self.rtsp_url, cv2.CAP_FFMPEG)\r\n            \r\n            # Configure for low latency\r\n            self._cap.set(cv2.CAP_PROP_BUFFERSIZE, 1)\r\n            \r\n            if self._cap.isOpened():\r\n                self._dbg(\"[cooingdv-video] Stream opened successfully\")\r\n                self._last_frame_time = time.time()\r\n                return True\r\n            else:\r\n                self._dbg(\"[cooingdv-video] Failed to open stream\")\r\n                self._cap = None\r\n                return False\r\n\r\n    def _close_stream(self) -> None:\r\n        \"\"\"Close the RTSP stream.\"\"\"\r\n        with self._cap_lock:\r\n            if self._cap is not None:\r\n                self._cap.release()\r\n                self._cap = None\r\n                self._dbg(\"[cooingdv-video] Stream closed\")\r\n\r\n    def _reconnect(self) -> bool:\r\n        \"\"\"Attempt to reconnect to the stream.\"\"\"\r\n        self.reconnect_count += 1\r\n        self._dbg(f\"[cooingdv-video] Reconnection attempt #{self.reconnect_count}\")\r\n        \r\n        self._close_stream()\r\n        time.sleep(self.RECONNECT_DELAY)\r\n        \r\n        for attempt in range(self.MAX_RECONNECT_ATTEMPTS):\r\n            if not self._running:\r\n                return False\r\n            \r\n            if self._open_stream():\r\n                return True\r\n            \r\n            self._dbg(f\"[cooingdv-video] Reconnect attempt {attempt + 1}/{self.MAX_RECONNECT_ATTEMPTS} failed\")\r\n            time.sleep(self.RECONNECT_DELAY)\r\n        \r\n        return False\r\n\r\n    # ------------------------------------------------------------------ #\r\n    # BaseVideoProtocolAdapter interface\r\n    # ------------------------------------------------------------------ #\r\n    def send_start_command(self) -> None:\r\n        \"\"\"\r\n        For RTSP, we don't need to send a start command.\r\n        The stream starts when we connect.\r\n        \"\"\"\r\n        pass\r\n\r\n    def create_receiver_socket(self):\r\n        \"\"\"\r\n        Not used for RTSP - OpenCV handles the connection.\r\n        Returns None for compatibility.\r\n        \"\"\"\r\n        return None\r\n\r\n    def handle_payload(self, payload: bytes) -> Optional[VideoFrame]:\r\n        \"\"\"\r\n        Wrap one encoded RTSP frame using the shared video-model interface.\r\n        \"\"\"\r\n        return self.model.ingest_chunk(payload=payload)\r\n\r\n    # ------------------------------------------------------------------ #\r\n    # Receiver Thread API (matches existing interface)\r\n    # ------------------------------------------------------------------ #\r\n    def start(self) -> None:\r\n        \"\"\"Start the video receiver thread.\"\"\"\r\n        if self._rx_thread and self._rx_thread.is_alive():\r\n            return\r\n        \r\n        self._running = True\r\n        self._frame_q = queue.Queue(maxsize=2)\r\n        \r\n        # Open initial connection\r\n        if not self._open_stream():\r\n            self._dbg(\"[cooingdv-video] Warning: Could not open stream on start\")\r\n        \r\n        # Start receiver thread\r\n        self._rx_thread = threading.Thread(\r\n            target=self._rx_loop,\r\n            daemon=True,\r\n            name=\"CooingdvVideoRx\",\r\n        )\r\n        self._rx_thread.start()\r\n        self._dbg(\"[cooingdv-video] Receiver thread started\")\r\n\r\n    def _rx_loop(self) -> None:\r\n        \"\"\"\r\n        Main receiver loop - reads frames from RTSP stream.\r\n        \"\"\"\r\n        while self._running:\r\n            # Check if we have a valid capture\r\n            with self._cap_lock:\r\n                cap = self._cap\r\n            \r\n            if cap is None or not cap.isOpened():\r\n                if not self._reconnect():\r\n                    self._dbg(\"[cooingdv-video] Failed to reconnect, retrying...\")\r\n                    time.sleep(self.RECONNECT_DELAY)\r\n                continue\r\n            \r\n            # Try to read a frame\r\n            try:\r\n                ret, frame = cap.read()\r\n                \r\n                if not ret or frame is None:\r\n                    # Check for timeout\r\n                    if time.time() - self._last_frame_time > self.FRAME_TIMEOUT:\r\n                        self._dbg(\"[cooingdv-video] Frame timeout, reconnecting...\")\r\n                        self._reconnect()\r\n                    else:\r\n                        time.sleep(self.READ_FAILURE_BACKOFF)\r\n                    continue\r\n                \r\n                self._last_frame_time = time.time()\r\n                \r\n                # Encode as JPEG (OpenCV handles BGR→YCbCr conversion internally)\r\n                encode_param = [int(cv2.IMWRITE_JPEG_QUALITY), 85]\r\n                _, jpeg_data = cv2.imencode('.jpg', frame, encode_param)\r\n                \r\n                video_frame = self.handle_payload(jpeg_data.tobytes())\r\n                if video_frame is None:\r\n                    continue\r\n\r\n                self.frames_ok += 1\r\n                \r\n                # Add to queue (drop if full)\r\n                try:\r\n                    self._frame_q.put(video_frame, timeout=0.1)\r\n                except queue.Full:\r\n                    self.frames_dropped += 1\r\n                    # Drop oldest frame and add new one\r\n                    try:\r\n                        self._frame_q.get_nowait()\r\n                        self._frame_q.put(video_frame, timeout=0.1)\r\n                    except (queue.Empty, queue.Full):\r\n                        pass\r\n                \r\n            except cv2.error as e:\r\n                self._dbg(f\"[cooingdv-video] OpenCV error: {e}\")\r\n                self._reconnect()\r\n            except Exception as e:\r\n                self._dbg(f\"[cooingdv-video] Unexpected error: {e}\")\r\n                time.sleep(0.1)\r\n\r\n    def stop(self) -> None:\r\n        \"\"\"Stop the video receiver and clean up.\"\"\"\r\n        self._dbg(\"[cooingdv-video] Stopping...\")\r\n        self._running = False\r\n        \r\n        # Wait for thread to finish\r\n        if self._rx_thread and self._rx_thread.is_alive():\r\n            self._rx_thread.join(timeout=2.0)\r\n        \r\n        # Close stream\r\n        self._close_stream()\r\n        \r\n        self._dbg(\r\n            f\"[cooingdv-video] Stopped. Stats: \"\r\n            f\"frames_ok={self.frames_ok}, \"\r\n            f\"frames_dropped={self.frames_dropped}, \"\r\n            f\"reconnects={self.reconnect_count}\"\r\n        )\r\n\r\n    def is_running(self) -> bool:\r\n        \"\"\"Check if the receiver is running.\"\"\"\r\n        return self._running and self._rx_thread is not None and self._rx_thread.is_alive()\r\n\r\n    def get_frame(self, timeout: float = 1.0) -> Optional[VideoFrame]:\r\n        \"\"\"Get the next available frame from the queue.\"\"\"\r\n        try:\r\n            return self._frame_q.get(timeout=timeout)\r\n        except queue.Empty:\r\n            return None\r\n\r\n    def get_packets(self) -> List[bytes]:\r\n        \"\"\"\r\n        Get raw packets - not really applicable for RTSP.\r\n        Returns empty list for compatibility.\r\n        \"\"\"\r\n        return []\r\n\r\n    # ------------------------------------------------------------------ #\r\n    # Keep-alive (not needed for RTSP, but required by base class)\r\n    # ------------------------------------------------------------------ #\r\n    def start_keepalive(self, interval: float = 1.0) -> None:\r\n        \"\"\"RTSP doesn't need keep-alive packets.\"\"\"\r\n        pass\r\n\r\n    def stop_keepalive(self) -> None:\r\n        \"\"\"RTSP doesn't need keep-alive packets.\"\"\"\r\n        pass\r\n\r\n"
  },
  {
    "path": "backend/protocols/debug_rc_protocol_adapter.py",
    "content": "import logging\r\n\r\nlog = logging.getLogger(__name__)\r\n\r\nclass DebugRcProtocolAdapter:\r\n    \"\"\"Dummy RC protocol adapter for debugging.\"\"\"\r\n\r\n    def __init__(self):\r\n        log.info(\"Debug RC protocol adapter initialized.\")\r\n\r\n    def send_control_data(self, data: bytes):\r\n        log.debug(f\"Debug: send_control_data({data.hex() if isinstance(data, bytes) else data})\") "
  },
  {
    "path": "backend/protocols/debug_video_protocol.py",
    "content": "import cv2\r\nimport logging\r\nimport time\r\nimport threading\r\nimport queue\r\nfrom typing import Optional, List\r\n\r\nfrom models.video_frame import VideoFrame\r\nfrom protocols.base_video_protocol import BaseVideoProtocolAdapter\r\nfrom utils.dropping_queue import DroppingQueue\r\n\r\nlog = logging.getLogger(__name__)\r\n\r\n\r\nclass DebugVideoProtocolAdapter(BaseVideoProtocolAdapter):\r\n    \"\"\"\r\n    Drop-in video protocol adapter that fetches frames from the local\r\n    webcam instead of a network socket.\r\n    \"\"\"\r\n    def __init__(self, camera_index: int = 0, debug: bool = False, max_queue_size=100):\r\n        super().__init__(drone_ip=\"localhost\", control_port=0, video_port=0)\r\n\r\n        self.camera_index = camera_index\r\n        self.debug = debug\r\n        self._cap = None\r\n        self._frame_id = 0\r\n        self._running = threading.Event()\r\n        self._thread: Optional[threading.Thread] = None\r\n        self.frame_queue = DroppingQueue(maxsize=max_queue_size)\r\n\r\n    def start(self):\r\n        self._cap = cv2.VideoCapture(self.camera_index)\r\n        if not self._cap.isOpened():\r\n            raise RuntimeError(f\"Cannot open local camera #{self.camera_index}\")\r\n\r\n        self._running.set()\r\n        self._thread = threading.Thread(target=self._capture_loop, daemon=True)\r\n        self._thread.start()\r\n        log.info(f\"[debug-video] webcam #{self.camera_index} opened\")\r\n\r\n    def stop(self):\r\n        if not self._running.is_set():\r\n            return\r\n        \r\n        self._running.clear()\r\n        if self._thread:\r\n            self._thread.join(timeout=1.0)\r\n        \r\n        if self._cap and self._cap.isOpened():\r\n            self._cap.release()\r\n        \r\n        log.info(\"[debug-video] webcam released\")\r\n\r\n    def is_running(self) -> bool:\r\n        return self._running.is_set()\r\n\r\n    def get_frame(self, timeout: float = 1.0) -> Optional[VideoFrame]:\r\n        try:\r\n            return self.frame_queue.get(timeout=timeout)\r\n        except queue.Empty:\r\n            return None\r\n    \r\n    def get_packets(self) -> List[bytes]:\r\n        # Not applicable for webcam, but required by the service\r\n        return []\r\n\r\n    def _capture_loop(self):\r\n        while self._running.is_set():\r\n            if not self._cap or not self._cap.isOpened():\r\n                log.error(\"[debug-video] camera is not open.\")\r\n                self._running.clear()\r\n                break\r\n\r\n            ret, frame_bgr = self._cap.read()\r\n            if not ret:\r\n                log.warning(\"[debug-video] failed to grab frame\")\r\n                time.sleep(0.1)\r\n                continue\r\n\r\n            ok, jpg = cv2.imencode(\".jpg\", frame_bgr, [int(cv2.IMWRITE_JPEG_QUALITY), 85])\r\n            if not ok:\r\n                log.warning(\"[debug-video] JPEG encode failed\")\r\n                continue\r\n\r\n            self._frame_id = (self._frame_id + 1) & 0xFFFF\r\n            video_frame = VideoFrame(\r\n                frame_id=self._frame_id,\r\n                data=jpg.tobytes(),\r\n                format_type=\"jpeg\",\r\n            )\r\n            \r\n            try:\r\n                self.frame_queue.put(video_frame)\r\n            except queue.Full:\r\n                # This is expected if the queue is full\r\n                pass\r\n\r\n        log.info(\"[debug-video] capture loop stopped.\")\r\n\r\n    # --- Stubs for methods that are not used by this adapter ---\r\n    def create_receiver_socket(self):\r\n        return None\r\n\r\n    def send_start_command(self):\r\n        pass\r\n\r\n    def handle_payload(self, payload: bytes) -> Optional[VideoFrame]:\r\n        return None\r\n"
  },
  {
    "path": "backend/protocols/no_video_protocol.py",
    "content": "import logging\r\nimport queue\r\nimport threading\r\nimport time\r\nfrom typing import Optional\r\n\r\nfrom models.video_frame import VideoFrame\r\nfrom protocols.base_video_protocol import BaseVideoProtocolAdapter\r\n\r\nlogger = logging.getLogger(__name__)\r\n\r\n\r\nclass NoVideoProtocolAdapter(BaseVideoProtocolAdapter):\r\n    \"\"\"\r\n    Placeholder video adapter for RC backends whose video transport is not yet\r\n    implemented.\r\n\r\n    It keeps the web service alive without pretending to decode a stream. The\r\n    logs tell users that the selected drone type currently has RC-only support.\r\n    \"\"\"\r\n\r\n    def __init__(\r\n        self,\r\n        drone_ip: str = \"0.0.0.0\",\r\n        control_port: int = 0,\r\n        video_port: int = 0,\r\n        *,\r\n        reason: str = \"video transport not implemented\",\r\n    ) -> None:\r\n        super().__init__(drone_ip, control_port, video_port)\r\n        self.reason = reason\r\n        self._running = threading.Event()\r\n\r\n    def start(self) -> None:\r\n        logger.warning(\"[no-video] %s\", self.reason)\r\n        self._running.set()\r\n\r\n    def stop(self) -> None:\r\n        self._running.clear()\r\n\r\n    def is_running(self) -> bool:\r\n        return self._running.is_set()\r\n\r\n    def get_frame(self, timeout: float = 1.0) -> Optional[VideoFrame]:\r\n        if self._running.is_set():\r\n            time.sleep(timeout)\r\n        raise queue.Empty\r\n\r\n    def get_packets(self) -> list[bytes]:\r\n        return []\r\n\r\n    def send_start_command(self) -> None:\r\n        pass\r\n\r\n    def create_receiver_socket(self):\r\n        return None\r\n\r\n    def handle_payload(self, payload: bytes) -> Optional[VideoFrame]:\r\n        return None\r\n"
  },
  {
    "path": "backend/protocols/s2x_rc_protocol_adapter.py",
    "content": "from protocols.base_protocol_adapter import BaseProtocolAdapter\nimport socket\n\nclass S2xRCProtocolAdapter(BaseProtocolAdapter):\n    \"\"\"Protocol adapter for S2x drones (S20, S29)\"\"\"\n    \n    def __init__(self, drone_ip, control_port=8080):\n        self.drone_ip = drone_ip\n        self.control_port = control_port\n        self.sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)\n        self.debug_packets = False\n        self.packet_counter = 0\n        self.swap_yaw_roll = False\n        # Stock Macrochip HY packets scale roll/pitch for app speed tiers.\n        # Keep full-scale as the default so existing S2X behavior is unchanged.\n        self.speed_scale_by_index = {\n            0: 0.7,  # PL FPV uses 0.7; older HiTurbo builds use 0.6.\n            1: 0.8,\n            2: 1.0,\n        }\n        \n    def build_control_packet(self, drone_model):\n        \"\"\"Build a control packet for the S2x protocol\"\"\"\n        pkt = bytearray(20)\n        pkt[0] = 0x66\n        pkt[1] = drone_model.speed & 0xFF\n\n        roll = drone_model.roll\n        yaw = drone_model.yaw\n        if self.swap_yaw_roll:\n            roll, yaw = yaw, roll\n\n        speed_scale = self.speed_scale_by_index.get(getattr(drone_model, \"speed_index\", 2), 1.0)\n\n        # Remap from constrained range to full 0-255 range.\n        # Macrochip HY control only speed-scales roll/pitch, not throttle/yaw.\n        pkt[2] = int(self._remap_to_full_range(self._scale_axis(roll, drone_model, speed_scale), drone_model)) & 0xFF\n        pkt[3] = int(self._remap_to_full_range(self._scale_axis(drone_model.pitch, drone_model, speed_scale), drone_model)) & 0xFF\n        pkt[4] = int(self._remap_to_full_range(drone_model.throttle, drone_model)) & 0xFF\n        pkt[5] = int(self._remap_to_full_range(yaw, drone_model)) & 0xFF\n\n        # Byte 6 for command flags\n        pkt[6] = 0x00\n        \n        # The stock HiTurbo app uses the same one-shot bit for both takeoff and\n        # land, while emergency stop is a separate bit.\n        if drone_model.takeoff_flag:\n            pkt[6] |= 0x01\n        if drone_model.land_flag:\n            pkt[6] |= 0x01\n        if drone_model.stop_flag:\n            pkt[6] |= 0x02\n        if drone_model.calibration_flag:\n            pkt[6] |= 0x04\n\n        # Byte 7 - historical TurboDrone base mode bits plus optional headless\n        # flag. Stock apps commonly send 0x02, but 0x0a is proven on hardware.\n        pkt[7] = 0x0a\n        if drone_model.headless_flag:\n            pkt[7] |= 0x01\n        \n        # bytes 8-17 are zero-filled\n\n        # Calculate checksum (bytes 2-17)\n        chk = 0\n        for i in range(2, 18):\n            chk ^= pkt[i]\n        pkt[18] = chk & 0xFF\n        pkt[19] = 0x99\n\n        # Clear one-shot flags after building packet\n        drone_model.takeoff_flag = False\n        drone_model.land_flag = False\n        drone_model.stop_flag = False\n        drone_model.calibration_flag = False\n\n        return bytes(pkt)\n        \n    def send_control_packet(self, packet):\n        \"\"\"Send the control packet to the drone\"\"\"\n        self.sock.sendto(packet, (self.drone_ip, self.control_port))\n        \n        # Log packet details if debug is enabled\n        if self.debug_packets:\n            self.packet_counter += 1\n            \n            # Print full packet hex dump\n            hex_dump = ' '.join(f'{b:02x}' for b in packet)\n            print(f\"Packet #{self.packet_counter}: {hex_dump}\")\n            \n            # Print decoded controls\n            print(f\"  Controls: R:{packet[2]} P:{packet[3]} T:{packet[4]} Y:{packet[5]}\")\n            \n            # Print flags\n            flags6 = packet[6]\n            flags7 = packet[7]\n            flags_desc = []\n            if flags6 & 0x01: flags_desc.append(\"FLY_OR_LAND\")\n            if flags6 & 0x02: flags_desc.append(\"STOP\")\n            if flags6 & 0x04: flags_desc.append(\"CALIBRATE\")\n            if flags7 & 0x01: flags_desc.append(\"HEADLESS\")\n            \n            print(f\"  Flags: {flags_desc}\")\n            print(f\"  Checksum: 0x{packet[18]:02x}\")\n            print()\n    \n    def toggle_debug(self):\n        \"\"\"Toggle debug packet logging\"\"\"\n        self.debug_packets = not self.debug_packets\n        return self.debug_packets\n        \n    def _remap_to_full_range(self, value, model):\n        \"\"\"Remap value from constrained range to full 0-255 range for sending to drone\"\"\"\n        if value >= model.center_value:\n            # Map center...max_control to 128...255\n            return 128.0 + (value - model.center_value) * (255.0 - 128.0) / (model.max_control_value - model.center_value)\n        else:\n            # Map min_control...center to 0...128\n            return (value - model.min_control_value) * 128.0 / (model.center_value - model.min_control_value)\n\n    def _scale_axis(self, value, model, scale):\n        \"\"\"Scale a raw axis around center using the stock app speed tier.\"\"\"\n        if scale >= 1.0:\n            return value\n        return model.center_value + ((value - model.center_value) * scale)"
  },
  {
    "path": "backend/protocols/s2x_video_protocol.py",
    "content": "import ipaddress\nimport socket\nimport threading\nimport queue\nfrom typing import Optional, List\n\nfrom models.s2x_video_model import S2xVideoModel\nfrom models.video_frame import VideoFrame\nfrom protocols.base_video_protocol import BaseVideoProtocolAdapter\n\n\nclass S2xVideoProtocolAdapter(BaseVideoProtocolAdapter):\n    \"\"\"Transport + header parser for S2x JPEG stream\"\"\"\n\n    SYNC_BYTES = b\"\\x40\\x40\"\n    EOS_MARKER = b\"\\x23\\x23\"\n    HEADER_LEN = 8        # S2x packets always use an 8-byte header\n    LINK_DEAD_TIMEOUT = 8.0   # camera can stay silent for ~5 s on boot\n\n    def __init__(\n        self,\n        drone_ip: str = \"172.16.10.1\",\n        control_port: int = 8080,\n        video_port: int = 8888,\n        debug: bool = False,\n    ):\n        super().__init__(drone_ip, control_port, video_port)\n        self.model = S2xVideoModel()\n        self.local_ip = self._discover_local_ip()\n        self._sock_lock = threading.Lock()\n        self._sock = self.create_receiver_socket()\n        self._keepalive_thread: Optional[threading.Thread] = None\n        self._keepalive_stop: Optional[threading.Event] = None\n        if debug:\n            print(f\"[s2x] Video socket on *:{self._sock.getsockname()[1]}\")\n        self._running = threading.Event()\n        self._rx_thread: Optional[threading.Thread] = None\n        self._frame_q: \"queue.Queue[VideoFrame]\" = queue.Queue(maxsize=2)\n        self._pkt_lock = threading.Lock()\n        self._pkt_buffer: List[bytes] = []\n\n    # ────────── BaseVideoProtocolAdapter ────────── #\n    def send_start_command(self) -> None:\n        payload = b\"\\x08\" + ipaddress.IPv4Address(self.local_ip).packed\n        with socket.socket(socket.AF_INET, socket.SOCK_DGRAM) as sock:\n            sock.sendto(payload, (self.drone_ip, self.control_port))\n        print(f\"[video] Start command sent ({payload.hex(' ')})\")\n\n    def start_keepalive(self, interval: float = 2.0) -> None:\n        \"\"\"Starts a thread to periodically send the start command.\"\"\"\n        if self._keepalive_thread is None:\n            self._keepalive_stop = threading.Event()\n            self._keepalive_thread = threading.Thread(\n                target=self._keepalive_loop,\n                args=(interval, self._keepalive_stop),\n                daemon=True,\n                name=\"S2xVideoKeepAlive\",\n            )\n            self._keepalive_thread.start()\n\n    def stop_keepalive(self) -> None:\n        \"\"\"Stops the keepalive thread.\"\"\"\n        if self._keepalive_stop:\n            self._keepalive_stop.set()\n        if self._keepalive_thread:\n            self._keepalive_thread.join(timeout=1.0)\n            self._keepalive_thread = None\n\n    def create_receiver_socket(self) -> socket.socket:\n        \"\"\"UDP socket bound to the drone's video port.\"\"\"\n        sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)\n        sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)\n        sock.bind((\"0.0.0.0\", self.video_port))\n        sock.settimeout(1.0)\n        return sock\n\n    def get_receiver_socket(self) -> socket.socket:\n        \"\"\"Returns the main data socket, required by the new VideoReceiverService.\"\"\"\n        with self._sock_lock:\n            return self._sock\n\n    def recv_from_socket(self, sock: socket.socket) -> Optional[bytes]:\n        \"\"\"Receives from the socket and handles timeouts.\"\"\"\n        try:\n            return sock.recv(4096)  # Use a reasonable buffer size\n        except socket.timeout:\n            return None\n\n    def handle_payload(self, payload: bytes) -> Optional[VideoFrame]:\n        \"\"\"\n        1. Validate & strip the native 8-byte S2x header\n        2. Forward the slice payload to the model\n\n        PL FPV's native `libvison_main.so` parser treats the header as:\n        0-1 sync `0x40 0x40`, 2-3 little-endian frame id, 4 total chunks,\n        5 chunk id, 6-7 little-endian datagram length.\n        \"\"\"\n        if len(payload) <= self.HEADER_LEN or payload[:2] != self.SYNC_BYTES:\n            return None\n\n        frame_id = int.from_bytes(payload[2:4], \"little\")\n        total_chunks = payload[4]\n        slice_id_raw = payload[5]\n        declared_len = int.from_bytes(payload[6:8], \"little\")\n\n        if total_chunks == 0 or total_chunks > 100:\n            return None\n        if slice_id_raw >= total_chunks:\n            return None\n        if declared_len != len(payload):\n            return None\n\n        # Native 872 parsing copies declared_len - 10 bytes from offset 8,\n        # effectively dropping the 2-byte datagram trailer.\n        body_end = declared_len\n        if declared_len >= self.HEADER_LEN + len(self.EOS_MARKER) and payload[\n            declared_len - len(self.EOS_MARKER) : declared_len\n        ] == self.EOS_MARKER:\n            body_end -= len(self.EOS_MARKER)\n\n        body = payload[self.HEADER_LEN:body_end]\n\n        return self.model.ingest_chunk(\n            stream_id=frame_id,\n            chunk_id=slice_id_raw,\n            total_chunks=total_chunks,\n            payload=body,\n        )\n\n    def stop(self) -> None:\n        \"\"\"Shuts down the adapter, required by the new VideoReceiverService.\"\"\"\n        print(\"[s2x] Stopping protocol adapter.\")\n        self.stop_keepalive()\n        self._running.clear()\n        if self._rx_thread and self._rx_thread.is_alive():\n            self._rx_thread.join(timeout=1.0)\n        try:\n            self._sock.close()\n        except Exception:\n            pass\n\n    # ────────── helpers ────────── #\n    def _keepalive_loop(self, interval: float, stop_event: threading.Event) -> None:\n        while not stop_event.is_set():\n            self.send_start_command()\n            stop_event.wait(interval)\n\n    # ────────── Receiver thread API (service expects) ────────── #\n    def start(self) -> None:\n        if self._rx_thread and self._rx_thread.is_alive():\n            return\n        self._running.set()\n        self.start_keepalive(2.0)\n\n        def _rx_loop() -> None:\n            sock = self.get_receiver_socket()\n            while self._running.is_set():\n                try:\n                    payload = self.recv_from_socket(sock)\n                    if not payload:\n                        continue\n                    with self._pkt_lock:\n                        self._pkt_buffer.append(payload)\n                    frame = self.handle_payload(payload)\n                    if frame is not None:\n                        try:\n                            self._frame_q.put(frame, timeout=0.2)\n                        except queue.Full:\n                            pass\n                except OSError:\n                    break\n                except Exception:\n                    continue\n\n        self._rx_thread = threading.Thread(target=_rx_loop, daemon=True, name=\"S2xVideoRx\")\n        self._rx_thread.start()\n\n    def is_running(self) -> bool:\n        return self._running.is_set() and self._rx_thread is not None and self._rx_thread.is_alive()\n\n    def get_frame(self, timeout: float = 1.0) -> Optional[VideoFrame]:\n        try:\n            return self._frame_q.get(timeout=timeout)\n        except queue.Empty:\n            return None\n\n    def get_packets(self) -> List[bytes]:\n        with self._pkt_lock:\n            packets = self._pkt_buffer\n            self._pkt_buffer = []\n            return packets\n\n    def _discover_local_ip(self) -> str:\n        s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)\n        try:\n            s.connect((self.drone_ip, 1))\n            return s.getsockname()[0]\n        finally:\n            s.close()\n"
  },
  {
    "path": "backend/protocols/wifi_cam_rc_protocol_adapter.py",
    "content": "\"\"\"RC protocol adapter for WiFi_CAM native UDP drones.\"\"\"\r\n\r\nfrom __future__ import annotations\r\n\r\nimport logging\r\nimport socket\r\nfrom typing import Final, Literal\r\n\r\nfrom models.wifi_cam_rc import WifiCamRcModel\r\nfrom protocols.base_protocol_adapter import BaseProtocolAdapter\r\n\r\nlogger = logging.getLogger(__name__)\r\n\r\nCommandMode = Literal[\"auto\", \"short\", \"extended\"]\r\n\r\n\r\nclass WifiCamRcProtocolAdapter(BaseProtocolAdapter):\r\n    \"\"\"Send raw WiFi_CAM stick packets to the native command socket.\"\"\"\r\n\r\n    DEFAULT_DRONE_IP: Final = \"192.168.4.153\"\r\n    DEFAULT_COMMAND_PORT: Final = 8090\r\n\r\n    START_MARKER: Final = 0x66\r\n    END_MARKER: Final = 0x99\r\n    EXTENDED_LENGTH: Final = 0x14\r\n\r\n    FLAG_TAKEOFF: Final = 0x01\r\n    FLAG_LAND: Final = 0x02\r\n    FLAG_EMERGENCY: Final = 0x04\r\n    FLAG_ROTATE: Final = 0x08\r\n    FLAG_HEADLESS: Final = 0x10\r\n    FLAG_CALIBRATE: Final = 0x80\r\n\r\n    EXT_FLAG_TAKEOFF_OR_LAND: Final = 0x01\r\n    EXT_FLAG_EMERGENCY: Final = 0x02\r\n    EXT_FLAG_CALIBRATE: Final = 0x04\r\n    EXT_FLAG_ROTATE: Final = 0x08\r\n    EXT_FLAG_HEADLESS: Final = 0x01\r\n    EXT_FLAG_ALTITUDE_HOLD: Final = 0x02\r\n\r\n    def __init__(\r\n        self,\r\n        drone_ip: str = DEFAULT_DRONE_IP,\r\n        command_port: int = DEFAULT_COMMAND_PORT,\r\n        *,\r\n        command_mode: CommandMode = \"auto\",\r\n    ) -> None:\r\n        self.drone_ip = drone_ip\r\n        self.command_port = command_port\r\n        self.command_mode = self._normalize_mode(command_mode)\r\n        self.camera_type = 0\r\n        self.debug_packets = False\r\n        self.packet_counter = 0\r\n        self.sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)\r\n        self.sock.bind((\"\", 0))\r\n\r\n    def build_control_packet(self, drone_model: WifiCamRcModel) -> bytes:\r\n        if self._uses_extended_mode():\r\n            packet = self._build_extended_packet(drone_model)\r\n        else:\r\n            packet = self._build_short_packet(drone_model)\r\n        self._clear_one_shot_flags(drone_model)\r\n        return packet\r\n\r\n    def send_control_packet(self, packet: bytes) -> None:\r\n        try:\r\n            self.sock.sendto(packet, (self.drone_ip, self.command_port))\r\n        except OSError:\r\n            return\r\n\r\n        if self.debug_packets:\r\n            self.packet_counter += 1\r\n            logger.debug(\"[wifi-cam-rc] #%05d %s\", self.packet_counter, packet.hex(\" \"))\r\n\r\n    def set_camera_type(self, camera_type: int) -> None:\r\n        self.camera_type = int(camera_type)\r\n        if self.command_mode == \"auto\":\r\n            logger.info(\r\n                \"[wifi-cam-rc] camera type %s selected %s command mode\",\r\n                camera_type,\r\n                \"extended\" if self._uses_extended_mode() else \"short\",\r\n            )\r\n\r\n    def stop(self) -> None:\r\n        try:\r\n            self.sock.close()\r\n        except OSError:\r\n            pass\r\n\r\n    def toggle_debug(self) -> bool:\r\n        self.debug_packets = not self.debug_packets\r\n        logger.info(\"[wifi-cam-rc] debug %s\", \"ON\" if self.debug_packets else \"OFF\")\r\n        return self.debug_packets\r\n\r\n    def _build_short_packet(self, model: WifiCamRcModel) -> bytes:\r\n        packet = bytearray(8)\r\n        packet[0] = self.START_MARKER\r\n        packet[1] = self._right_data(self._axis(model.roll))\r\n        packet[2] = self._right_data(self._axis(model.pitch))\r\n        packet[3] = self._right_data(self._axis(model.throttle))\r\n        packet[4] = self._right_data(self._axis(model.yaw))\r\n        packet[5] = self._build_short_flags(model)\r\n        packet[6] = self._right_data(self._xor(packet[1:6]))\r\n        packet[7] = self.END_MARKER\r\n        return bytes(packet)\r\n\r\n    def _build_extended_packet(self, model: WifiCamRcModel) -> bytes:\r\n        packet = bytearray(20)\r\n        packet[0] = self.START_MARKER\r\n        packet[1] = self.EXTENDED_LENGTH\r\n        packet[2] = self._right_data(self._axis(model.roll))\r\n        packet[3] = self._right_data(self._axis(model.pitch))\r\n        packet[4] = self._right_data(self._axis(model.throttle))\r\n        packet[5] = self._right_data(self._axis(model.yaw))\r\n        flags1, flags2 = self._build_extended_flags(model)\r\n        packet[6] = flags1\r\n        packet[7] = flags2\r\n        packet[18] = self._right_data(self._xor(packet[2:18]))\r\n        packet[19] = self.END_MARKER\r\n        return bytes(packet)\r\n\r\n    def _build_short_flags(self, model: WifiCamRcModel) -> int:\r\n        flags = 0\r\n        if model.takeoff_flag:\r\n            flags |= self.FLAG_TAKEOFF\r\n        if model.land_flag:\r\n            flags |= self.FLAG_LAND\r\n        if model.stop_flag:\r\n            flags |= self.FLAG_EMERGENCY\r\n        if model.flip_flag:\r\n            flags |= self.FLAG_ROTATE\r\n        if model.headless_flag:\r\n            flags |= self.FLAG_HEADLESS\r\n        if model.calibration_flag:\r\n            flags |= self.FLAG_CALIBRATE\r\n        return flags & 0xFF\r\n\r\n    def _build_extended_flags(self, model: WifiCamRcModel) -> tuple[int, int]:\r\n        flags1 = 0\r\n        flags2 = 0\r\n        # The decompiled app maps both one-key fly and land to the same bit.\r\n        if model.takeoff_flag or model.land_flag:\r\n            flags1 |= self.EXT_FLAG_TAKEOFF_OR_LAND\r\n        if model.stop_flag:\r\n            flags1 |= self.EXT_FLAG_EMERGENCY\r\n        if model.calibration_flag:\r\n            flags1 |= self.EXT_FLAG_CALIBRATE\r\n        if model.flip_flag:\r\n            flags1 |= self.EXT_FLAG_ROTATE\r\n        if model.headless_flag:\r\n            flags2 |= self.EXT_FLAG_HEADLESS\r\n        if model.altitude_hold_flag:\r\n            flags2 |= self.EXT_FLAG_ALTITUDE_HOLD\r\n        return flags1 & 0xFF, flags2 & 0xFF\r\n\r\n    def _clear_one_shot_flags(self, model: WifiCamRcModel) -> None:\r\n        model.takeoff_flag = False\r\n        model.land_flag = False\r\n        model.stop_flag = False\r\n        model.flip_flag = False\r\n        model.calibration_flag = False\r\n\r\n    def _uses_extended_mode(self) -> bool:\r\n        if self.command_mode == \"extended\":\r\n            return True\r\n        if self.command_mode == \"short\":\r\n            return False\r\n        return self.camera_type == 2\r\n\r\n    def _normalize_mode(self, command_mode: str) -> CommandMode:\r\n        mode = (command_mode or \"auto\").strip().lower()\r\n        if mode not in {\"auto\", \"short\", \"extended\"}:\r\n            logger.warning(\"[wifi-cam-rc] Unknown command mode %r; using auto.\", command_mode)\r\n            return \"auto\"\r\n        return mode  # type: ignore[return-value]\r\n\r\n    def _axis(self, value: float) -> int:\r\n        return max(0, min(255, int(value))) & 0xFF\r\n\r\n    def _right_data(self, value: int) -> int:\r\n        value &= 0xFF\r\n        if value in (self.START_MARKER, self.END_MARKER):\r\n            value = (value + 1) & 0xFF\r\n        return value\r\n\r\n    def _xor(self, values: bytes | bytearray) -> int:\r\n        checksum = 0\r\n        for value in values:\r\n            checksum ^= value\r\n        return checksum & 0xFF\r\n"
  },
  {
    "path": "backend/protocols/wifi_cam_video_protocol.py",
    "content": "\"\"\"Video protocol adapter for WiFi_CAM native UDP drones.\"\"\"\r\n\r\nfrom __future__ import annotations\r\n\r\nimport logging\r\nimport queue\r\nimport socket\r\nimport threading\r\nfrom typing import Final, Optional\r\n\r\nfrom models.video_frame import VideoFrame\r\nfrom protocols.base_video_protocol import BaseVideoProtocolAdapter\r\n\r\nlogger = logging.getLogger(__name__)\r\n\r\n\r\nclass WifiCamVideoProtocolAdapter(BaseVideoProtocolAdapter):\r\n    \"\"\"Start the WiFi_CAM MJPEG stream and reassemble native JPEG chunks.\"\"\"\r\n\r\n    DEFAULT_DRONE_IP: Final = \"192.168.4.153\"\r\n    DEFAULT_COMMAND_PORT: Final = 8090\r\n    DEFAULT_VIDEO_PORT: Final = 8080\r\n\r\n    START_STREAM: Final = b\"\\x42\\x76\"\r\n    STOP_STREAM: Final = b\"\\x42\\x77\"\r\n    ROTATE: Final = b\"\\x42\\x78\"\r\n    CAMERA_SWITCH: Final = b\"\\x42\\x79\"\r\n\r\n    NATIVE_PACKET_SIZE: Final = 0x5C0\r\n    HEADER_SIZE: Final = 8\r\n    CHUNK_SIZE: Final = 0x5B8\r\n    SOI: Final = b\"\\xff\\xd8\"\r\n    EOI: Final = b\"\\xff\\xd9\"\r\n\r\n    def __init__(\r\n        self,\r\n        drone_ip: str = DEFAULT_DRONE_IP,\r\n        control_port: int = DEFAULT_COMMAND_PORT,\r\n        video_port: int = DEFAULT_VIDEO_PORT,\r\n        *,\r\n        debug: bool = False,\r\n    ) -> None:\r\n        super().__init__(drone_ip, control_port, video_port)\r\n        self.debug = debug\r\n        self._running = threading.Event()\r\n        self._rx_thread: Optional[threading.Thread] = None\r\n        self._sock: Optional[socket.socket] = None\r\n        self._frame_q: \"queue.Queue[VideoFrame]\" = queue.Queue(maxsize=2)\r\n        self._pkt_lock = threading.Lock()\r\n        self._pkt_buffer: list[bytes] = []\r\n        self._rc_adapter = None\r\n\r\n        self._current_frame_id: Optional[int] = None\r\n        self._current_chunk_index = 0\r\n        self._frame_buffer = bytearray()\r\n        self._frame_counter = 0\r\n        self.camera_type = 0\r\n\r\n    def set_rc_adapter(self, rc_adapter) -> None:\r\n        self._rc_adapter = rc_adapter\r\n        if self.camera_type and hasattr(rc_adapter, \"set_camera_type\"):\r\n            rc_adapter.set_camera_type(self.camera_type)\r\n\r\n    def start(self) -> None:\r\n        if self.is_running():\r\n            return\r\n        self._sock = self.create_receiver_socket()\r\n        self._running.set()\r\n        self.send_start_command()\r\n        self._rx_thread = threading.Thread(\r\n            target=self._rx_loop,\r\n            daemon=True,\r\n            name=\"WifiCamVideoRx\",\r\n        )\r\n        self._rx_thread.start()\r\n        logger.info(\"[wifi-cam-video] listening on local UDP %s\", self._sock.getsockname()[1])\r\n\r\n    def stop(self) -> None:\r\n        self._running.clear()\r\n        self._send_command(self.STOP_STREAM)\r\n        sock = self._sock\r\n        if sock:\r\n            try:\r\n                sock.close()\r\n            except OSError:\r\n                pass\r\n        if self._rx_thread:\r\n            self._rx_thread.join(timeout=1.0)\r\n        self._rx_thread = None\r\n        self._sock = None\r\n\r\n    def is_running(self) -> bool:\r\n        return self._running.is_set() and self._rx_thread is not None and self._rx_thread.is_alive()\r\n\r\n    def get_frame(self, timeout: float = 1.0) -> Optional[VideoFrame]:\r\n        try:\r\n            return self._frame_q.get(timeout=timeout)\r\n        except queue.Empty:\r\n            return None\r\n\r\n    def get_packets(self) -> list[bytes]:\r\n        with self._pkt_lock:\r\n            packets = self._pkt_buffer\r\n            self._pkt_buffer = []\r\n            return packets\r\n\r\n    def send_start_command(self) -> None:\r\n        self._send_command(self.START_STREAM)\r\n\r\n    def create_receiver_socket(self) -> socket.socket:\r\n        sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)\r\n        sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)\r\n        # The Android app does not bind to 8080; it receives replies on the\r\n        # same ephemeral local UDP port used to send 42 76 to the drone.\r\n        sock.bind((\"\", 0))\r\n        sock.settimeout(0.5)\r\n        return sock\r\n\r\n    def handle_payload(self, payload: bytes) -> Optional[VideoFrame]:\r\n        camera_type = self._parse_camera_type(payload)\r\n        if camera_type is not None:\r\n            self._set_camera_type(camera_type)\r\n            return None\r\n\r\n        if len(payload) < self.HEADER_SIZE:\r\n            return None\r\n\r\n        frame_id = payload[0]\r\n        final_marker = payload[1]\r\n        total_chunks = payload[2]\r\n        resolution = payload[3]\r\n        retain = payload[7]\r\n        chunk = payload[self.HEADER_SIZE :]\r\n\r\n        if self._current_frame_id != frame_id:\r\n            self._current_frame_id = frame_id\r\n            self._current_chunk_index = 0\r\n            self._frame_buffer = bytearray()\r\n\r\n        chunk_index = self._current_chunk_index\r\n        self._frame_buffer.extend(chunk)\r\n\r\n        frame = None\r\n        is_final = final_marker == 1 and total_chunks > 0 and chunk_index + 1 == total_chunks\r\n        if is_final:\r\n            frame = self._finish_frame(chunk, chunk_index, resolution, retain)\r\n            self._current_chunk_index = 0\r\n            self._current_frame_id = None\r\n            self._frame_buffer = bytearray()\r\n        else:\r\n            self._current_chunk_index += 1\r\n\r\n        return frame\r\n\r\n    def switch_camera(self) -> None:\r\n        self._send_command(self.CAMERA_SWITCH)\r\n\r\n    def rotate(self) -> None:\r\n        self._send_command(self.ROTATE)\r\n\r\n    def _rx_loop(self) -> None:\r\n        sock = self._sock\r\n        if sock is None:\r\n            return\r\n        while self._running.is_set():\r\n            try:\r\n                packet, _ = sock.recvfrom(self.NATIVE_PACKET_SIZE)\r\n            except socket.timeout:\r\n                continue\r\n            except OSError:\r\n                break\r\n\r\n            with self._pkt_lock:\r\n                self._pkt_buffer.append(packet)\r\n\r\n            frame = self.handle_payload(packet)\r\n            if frame is None:\r\n                continue\r\n            try:\r\n                self._frame_q.put(frame, timeout=0.1)\r\n            except queue.Full:\r\n                try:\r\n                    self._frame_q.get_nowait()\r\n                    self._frame_q.put(frame, timeout=0.1)\r\n                except (queue.Empty, queue.Full):\r\n                    pass\r\n\r\n    def _send_command(self, command: bytes) -> None:\r\n        sock = self._sock\r\n        if sock is None:\r\n            return\r\n        try:\r\n            sock.sendto(command, (self.drone_ip, self.video_port))\r\n        except OSError:\r\n            pass\r\n\r\n    def _parse_camera_type(self, payload: bytes) -> Optional[int]:\r\n        if len(payload) != 8 or payload[0] != 0x55 or payload[7] != 0x99:\r\n            return None\r\n        if payload == b\"\\x55\\x00\\x01\\x00\\x00\\x00\\x01\\x99\":\r\n            return 1\r\n        if payload == b\"\\x55\\x00\\x02\\x00\\x00\\x00\\x02\\x99\":\r\n            return 2\r\n        return 0\r\n\r\n    def _set_camera_type(self, camera_type: int) -> None:\r\n        if camera_type == self.camera_type:\r\n            return\r\n        self.camera_type = camera_type\r\n        logger.info(\"[wifi-cam-video] detected camera type %s\", camera_type)\r\n        if self._rc_adapter is not None and hasattr(self._rc_adapter, \"set_camera_type\"):\r\n            self._rc_adapter.set_camera_type(camera_type)\r\n\r\n    def _finish_frame(\r\n        self,\r\n        final_chunk: bytes,\r\n        final_chunk_index: int,\r\n        resolution: int,\r\n        retain: int,\r\n    ) -> Optional[VideoFrame]:\r\n        eoi = final_chunk.find(self.EOI)\r\n        if eoi < 0:\r\n            return None\r\n\r\n        frame_len = final_chunk_index * self.CHUNK_SIZE + eoi + len(self.EOI)\r\n        data = bytes(self._frame_buffer[:frame_len])\r\n        if not data.startswith(self.SOI) or not data.endswith(self.EOI):\r\n            if self.debug:\r\n                logger.debug(\"[wifi-cam-video] rejected malformed JPEG frame\")\r\n            return None\r\n\r\n        self._frame_counter = (self._frame_counter + 1) & 0xFFFF\r\n        frame = VideoFrame(self._frame_counter, data, format_type=\"jpeg\")\r\n        frame.resolution = resolution  # type: ignore[attr-defined]\r\n        frame.retain = retain  # type: ignore[attr-defined]\r\n        return frame\r\n"
  },
  {
    "path": "backend/protocols/wifi_uav_rc_protocol_adapter.py",
    "content": "import socket\r\nfrom typing import Final, List, Optional\r\n\r\nfrom protocols.base_protocol_adapter import BaseProtocolAdapter\r\nfrom models.wifi_uav_rc import WifiUavRcModel\r\nfrom utils.wifi_uav_variants import get_wifi_uav_capabilities\r\n\r\n\r\nclass WifiUavRcProtocolAdapter(BaseProtocolAdapter):\r\n    \"\"\"\r\n    Builds and transmits control packets for the WiFi-UAV family.\r\n    Packet layout derived from reverse-engineered Android app traces.\r\n\r\n    Turbodrone uses the extended WiFi-UAV command layout (`66 14 ...`). In\r\n    that layout the app maps takeoff and land onto the same one-key action bit,\r\n    while emergency stop is a separate bit.\r\n    \"\"\"\r\n\r\n    DEFAULT_DRONE_IP: Final = \"192.168.169.1\"\r\n    DEFAULT_PORT:     Final = 8800\r\n\r\n    # ──────────────────────────────────────────────────────────\r\n    # Static parts (taken 1:1 from packet dumps)\r\n    # ──────────────────────────────────────────────────────────\r\n    _HEADER         = bytes([0xef, 0x02, 0x7c, 0x00, 0x02, 0x02,\r\n                             0x00, 0x01, 0x02, 0x00, 0x00, 0x00])\r\n\r\n    _COUNTER1_SUFFIX = bytes([0x00, 0x00, 0x14, 0x00, 0x66, 0x14])\r\n    _CONTROL_SUFFIX  = bytes(10)                            # 10 × 0x00\r\n\r\n    _CHECKSUM_SUFFIX = bytes([0x99]) + bytes(44) + bytes([0x32, 0x4b, 0x14, 0x2d, 0x00, 0x00])\r\n\r\n    _COUNTER2_SUFFIX = bytes([\r\n        0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00,\r\n        0x00, 0x00, 0x14, 0x00, 0x00, 0x00,\r\n        0xff, 0xff, 0xff, 0xff\r\n    ])\r\n\r\n    _COUNTER3_SUFFIX = bytes([\r\n        0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\r\n        0x03, 0x00, 0x00, 0x00, 0x10, 0x00,\r\n        0x00, 0x00\r\n    ])\r\n\r\n    FLAG_TAKEOFF_OR_LAND = 0x01\r\n    FLAG_STOP = 0x02\r\n    FLAG_CALIBRATION = 0x04\r\n    FLAG_FLIP = 0x08\r\n\r\n    _SPEED_SCALES: Final[tuple[float, ...]] = (0.30, 0.60, 1.00, 0.25)\r\n\r\n    # ------------------------------------------------------------------ #\r\n    def __init__(self,\r\n                 drone_ip: str = DEFAULT_DRONE_IP,\r\n                 control_port: int = DEFAULT_PORT,\r\n                 shared_sock: Optional[socket.socket] = None,\r\n                 variant: str = \"auto\") -> None:\r\n        self.drone_ip = drone_ip\r\n        self.control_port = control_port\r\n        self.variant = (variant or \"auto\").strip().lower()\r\n        self.capabilities = get_wifi_uav_capabilities(self.variant)\r\n        self._target_ports = self._resolve_target_ports(control_port)\r\n\r\n        self.sock = shared_sock or socket.socket(socket.AF_INET, socket.SOCK_DGRAM)\r\n        self._is_shared_sock = shared_sock is not None\r\n        self.debug_packets = False\r\n        self._pkt_counter = 0\r\n\r\n        # rolling 16-bit counters found in the original protocol\r\n        self._ctr1 = 0x0000\r\n        self._ctr2 = 0x0001\r\n        self._ctr3 = 0x0002\r\n\r\n    def set_socket(self, sock: socket.socket) -> None:\r\n        \"\"\"Use an externally managed socket instead of the internal one.\"\"\"\r\n        # Don't close the old socket if it was created here\r\n        if self.sock and not self._is_shared_sock:\r\n            self.sock.close()\r\n        \r\n        self.sock = sock\r\n        self._is_shared_sock = True\r\n\r\n    def stop(self) -> None:\r\n        \"\"\"Close the socket if it's not shared.\"\"\"\r\n        if self.sock and not self._is_shared_sock:\r\n            try:\r\n                self.sock.close()\r\n            except Exception:\r\n                pass # Ignore errors on shutdown\r\n\r\n    # ------------------------------------------------------------------ #\r\n    # BaseProtocolAdapter\r\n    # ------------------------------------------------------------------ #\r\n    def build_control_packet(self, drone_model: WifiUavRcModel) -> bytes:  # type: ignore[override]\r\n        # ----- counters -------------------------------------------------\r\n        c1 = self._ctr1.to_bytes(2, \"little\")\r\n        c2 = self._ctr2.to_bytes(2, \"little\")\r\n        c3 = self._ctr3.to_bytes(2, \"little\")\r\n\r\n        # advance for next call\r\n        self._ctr1 = (self._ctr1 + 1) & 0xFFFF\r\n        self._ctr2 = (self._ctr2 + 1) & 0xFFFF\r\n        self._ctr3 = (self._ctr3 + 1) & 0xFFFF\r\n\r\n        # ----- command / headless --------------------------------------\r\n        command = 0x00\r\n        if drone_model.takeoff_flag or drone_model.land_flag:\r\n            command |= self.FLAG_TAKEOFF_OR_LAND\r\n        if drone_model.land_flag:\r\n            self._last_command_intent = \"LAND\"\r\n        elif drone_model.takeoff_flag:\r\n            self._last_command_intent = \"TAKEOFF\"\r\n        elif drone_model.stop_flag:\r\n            self._last_command_intent = \"STOP\"\r\n        elif drone_model.calibration_flag:\r\n            self._last_command_intent = \"CALIBRATE\"\r\n        else:\r\n            self._last_command_intent = None\r\n        if drone_model.stop_flag:\r\n            command |= self.FLAG_STOP\r\n        if drone_model.calibration_flag:\r\n            command |= self.FLAG_CALIBRATION\r\n        if getattr(drone_model, \"flip_flag\", False):\r\n            command |= self.FLAG_FLIP\r\n        camera_tilt_state = max(0, min(2, int(getattr(drone_model, \"camera_tilt_state\", 0))))\r\n        command |= (camera_tilt_state & 0x03) << 6\r\n\r\n        headless = 0x03 if drone_model.headless_flag else 0x02\r\n\r\n        # ----- controls -------------------------------------------------\r\n        speed_index = max(0, min(3, int(getattr(drone_model, \"speed_index\", 2))))\r\n        controls: List[int] = [\r\n            self._apply_speed_scale(drone_model.roll, speed_index),\r\n            self._apply_speed_scale(drone_model.pitch, speed_index),\r\n            self._apply_speed_scale(drone_model.throttle, speed_index),\r\n            self._apply_speed_scale(drone_model.yaw, speed_index),\r\n            command & 0xFF,\r\n            headless & 0xFF,\r\n        ]\r\n\r\n        # Stash for debug printing at send time (roll, pitch, throttle, yaw)\r\n        try:\r\n            self._last_controls = tuple(controls[:4])\r\n            self._last_command = command & 0xFF\r\n            self._last_headless = headless & 0xFF\r\n        except Exception:\r\n            pass\r\n\r\n        checksum = 0\r\n        for b in controls:\r\n            checksum ^= b\r\n\r\n        # ----- assemble -------------------------------------------------\r\n        pkt = bytearray()\r\n        pkt += self._HEADER\r\n        pkt += c1 + self._COUNTER1_SUFFIX\r\n        pkt += bytes(controls)\r\n        pkt += self._CONTROL_SUFFIX\r\n        pkt.append(checksum)\r\n        pkt += self._CHECKSUM_SUFFIX\r\n        pkt += c2 + self._COUNTER2_SUFFIX\r\n        pkt += c3 + self._COUNTER3_SUFFIX\r\n\r\n        # one-shot flags → clear\r\n        drone_model.takeoff_flag = False\r\n        drone_model.land_flag = False\r\n        drone_model.stop_flag = False\r\n        drone_model.calibration_flag = False\r\n        drone_model.flip_flag = False\r\n        drone_model.camera_tilt_state = 0\r\n\r\n        return bytes(pkt)\r\n\r\n    def send_control_packet(self, packet: bytes):  # type: ignore[override]\r\n        \"\"\"\r\n        Transmit one RC packet.\r\n        If the video layer has just torn the shared socket down, the send\r\n        will raise OSError(EBADF).  Swallow it and wait until the receiver\r\n        hands us the fresh socket.\r\n        \"\"\"\r\n        try:\r\n            for port in self._target_ports:\r\n                self.sock.sendto(packet, (self.drone_ip, port))\r\n        except OSError:\r\n            # Socket was closed during video-reconnect window.\r\n            # Wait for VideoReceiverService to call set_socket(…) with the\r\n            # new descriptor.  Until then we just skip transmitting.\r\n            return\r\n\r\n        if self.debug_packets:\r\n            self._pkt_counter += 1\r\n            print(f\"[wifi-uav] #{self._pkt_counter:05d}   \"\r\n                  f\"{' '.join(f'{b:02x}' for b in packet[:40])} …\")\r\n            try:\r\n                r, p, t, y = getattr(self, \"_last_controls\", (None, None, None, None))\r\n                if None not in (r, p, t, y):\r\n                    print(f\"[wifi-uav] controls R:{r} P:{p} T:{t} Y:{y}\")\r\n                command = getattr(self, \"_last_command\", None)\r\n                if command is not None:\r\n                    flags = []\r\n                    if command & self.FLAG_TAKEOFF_OR_LAND:\r\n                        flags.append(getattr(self, \"_last_command_intent\", None) or \"TAKEOFF_OR_LAND\")\r\n                    if command & self.FLAG_STOP:\r\n                        flags.append(\"STOP\")\r\n                    if command & self.FLAG_CALIBRATION:\r\n                        flags.append(\"CALIBRATE\")\r\n                    if command & self.FLAG_FLIP:\r\n                        flags.append(\"FLIP\")\r\n                    tilt = (command >> 6) & 0x03\r\n                    if tilt:\r\n                        flags.append(f\"TILT:{tilt}\")\r\n                    if flags:\r\n                        print(f\"[wifi-uav] command flags: {', '.join(flags)}\")\r\n            except Exception:\r\n                pass\r\n\r\n    def toggle_debug(self) -> bool:                # type: ignore[override]\r\n        self.debug_packets = not self.debug_packets\r\n        state = \"ON\" if self.debug_packets else \"OFF\"\r\n        print(f\"[wifi-uav] debug {state}\")\r\n        return self.debug_packets\r\n\r\n    def _resolve_target_ports(self, control_port: int) -> tuple[int, ...]:\r\n        if self.capabilities.transport == \"uav_dual_port\":\r\n            return (control_port, control_port + 1)\r\n        return (control_port,)\r\n\r\n    def _apply_speed_scale(self, value: float, speed_index: int) -> int:\r\n        \"\"\"Scale a raw model axis around its center using the app's speed table.\"\"\"\r\n        scale = self._SPEED_SCALES[speed_index]\r\n        centered = float(value) - 128.0\r\n        return max(0, min(255, round(128.0 + centered * scale)))\r\n"
  },
  {
    "path": "backend/protocols/wifi_uav_video_protocol.py",
    "content": "import logging\nimport socket\nimport queue\nimport threading\nimport time\nfrom typing import Optional\n\nfrom models.video_frame import VideoFrame\nfrom protocols.base_video_protocol import BaseVideoProtocolAdapter\nfrom utils.wifi_uav_ack_state import WifiUavAckState\nfrom utils.wifi_uav_packets import (\n    START_STREAM,\n    build_native_ack_packet,\n)\nfrom utils.wifi_uav_jpeg import generate_jpeg_headers, EOI\nfrom utils.wifi_uav_variants import get_wifi_uav_capabilities\n\nlogger = logging.getLogger(__name__)\n\n\nclass WifiUavVideoProtocolAdapter(BaseVideoProtocolAdapter):\n    \"\"\"\n    Protocol adapter for the inexpensive \"WiFi UAV\" drones.\n\n    Differences to the S2x family:\n      • A single duplex UDP socket is used for tx/rx.\n      • The drone stops streaming unless it receives two custom\n        *frame-request* packets (REQUEST_A / REQUEST_B) for every JPEG.\n      • Each UDP datagram has a 56-byte proprietary header that must be\n        stripped; the JPEG SOI/APPx headers are completely absent and are\n        generated on the client.\n    \"\"\"\n\n    DEFAULT_DRONE_IP = \"192.168.169.1\"\n\n    REQUEST_A_OFFSETS = (12, 13)          # two-byte LE frame counter\n    REQUEST_B_OFFSETS = (12, 13, 88, 89, 107, 108)\n\n    FRAME_TIMEOUT = 0.08          # 80 ms without a full frame → retry sooner\n    MAX_RETRIES = 3              # allow one more retry for first-frame reliability\n    WATCHDOG_SLEEP = 0.05          # 50 ms between watchdog checks\n    INITIAL_STREAM_TIMEOUT = 8.0  # give warmup some time before forcing restart\n    LINK_STALL_TIMEOUT = 4.0      # no packets for this long -> recreate adapter\n\n    # ------------------------------------------------------------------ #\n    # life-cycle helpers\n    # ------------------------------------------------------------------ #\n    def __init__(\n        self,\n        drone_ip: str = DEFAULT_DRONE_IP,\n        control_port: int = 8800,\n        video_port: int = 8800,\n        jpeg_width: int = 640,\n        jpeg_height: int = 360,\n        components: int = 3,\n        *,\n        variant: str = \"auto\",\n        debug: bool = False,\n    ):\n        super().__init__(drone_ip, control_port, video_port)\n\n        self.variant = (variant or \"auto\").strip().lower()\n        self.capabilities = get_wifi_uav_capabilities(self.variant)\n        self.debug = debug or logger.isEnabledFor(logging.DEBUG)\n        self._dbg = logger.debug if self.debug else (lambda *a, **k: None)\n        self._sock_lock = threading.Lock()\n        self._pkt_lock = threading.Lock()\n        self._pkt_buffer: list[bytes] = []\n        self._warmup_stop = threading.Event()\n        self._warmup_thread: Optional[threading.Thread] = None\n        self._rx_thread: Optional[threading.Thread] = None\n        self._target_ports = self._resolve_target_ports(control_port)\n\n        self._sock = self._create_duplex_socket()\n\n        # Pre-built JPEG header (SOI + quant tables + SOF0 + …)\n        self._jpeg_header = generate_jpeg_headers(jpeg_width, jpeg_height, components)\n\n        # State for the current frame being assembled\n        # If I send 0 it sends 1, starting with 1 is more reliable.\n        self._current_fid: int = 1\n        self._ack_state = WifiUavAckState()\n        self._last_req_ts = time.time()\n        self._last_rx_ts = time.time()\n        self._stream_started = False\n        self._started_once = False\n\n        # Stats\n        self.frames_ok = 0\n        self.frames_dropped = 0\n        self._dbg(f\"[init] adapter ready (control:{control_port}  video:{video_port})\")\n        if self.capabilities.transport == \"uav_dual_port\":\n            logger.info(\"[wifi-uav] UAV/FLOW variant selected; probing UDP ports %s\", self._target_ports)\n\n        # Watchdog for per-frame timeouts\n        self._running = True\n        self._watchdog = threading.Thread(\n            target=self._watchdog_loop, daemon=True, name=\"FrameWatchdog\"\n        )\n        self._watchdog.start()\n\n        self._retry_cnt       = 0          # retries for *current* frame\n        self._had_retry       = False      # did we already retry this frame?\n        self.retry_attempts   = 0          # global counter\n        self.retry_successes  = 0          # global counter\n\n        self._dbg(f\"Main UDP socket created, listening on *:{self._sock.getsockname()[1]}\")\n\n    # ------------------------------------------------------------------ #\n    # disable keep-alive – one start command is enough for this drone\n    # ------------------------------------------------------------------ #\n    def start_keepalive(self, interval: float = 1.0) -> None:  # type: ignore[override]\n        return\n\n    def stop_keepalive(self) -> None:  # type: ignore[override]\n        return\n\n    # ------------------------------------------------------------------ #\n    # Base-class hooks\n    # ------------------------------------------------------------------ #\n    def create_receiver_socket(self) -> socket.socket:\n        return self._sock\n\n    def send_start_command(self) -> None:\n        for port in self._target_ports:\n            self._sock.sendto(START_STREAM, (self.drone_ip, port))\n        self._dbg(\"[wifi-uav] START_STREAM sent to %s\", self._target_ports)\n\n    def handle_payload(self, payload: bytes) -> Optional[VideoFrame]:\n        \"\"\"\n        Collect slices belonging to the requested frame.\n\n        Native packet layout:\n        byte 0        : 0x93\n        byte 1        : message type; 0x01 means JPEG fragment\n        bytes 2..3    : total packet length\n        bytes 8..15   : image sequence\n        bytes 32..35  : fragment index\n        bytes 36..39  : fragment count\n        bytes 56+     : JPEG payload\n        \"\"\"\n        parsed = self._parse_fragment_header(payload)\n        if parsed is None:\n            return None\n\n        frame_id, frag_id, fragment_total, frame_body_len, quality, jpeg_payload = parsed\n        self._stream_started = True\n        self._last_rx_ts = time.time()\n        self._retry_cnt = 0\n\n        # resynchronise if the drone skipped ahead\n        if frame_id != self._current_fid:\n            self.frames_dropped += 1\n            self._dbg(f\"⚠ skip   expected {self._current_fid:04x} \"\n                      f\"got {frame_id:04x}\")\n            self._current_fid = frame_id\n\n        slot = self._ack_state.ingest_fragment(\n            frame_id,\n            frag_id,\n            fragment_total,\n            jpeg_payload,\n            frame_body_len=frame_body_len,\n            quality=quality,\n        )\n        self._dbg(f\"← FID:{frame_id:04x} FRAG:{frag_id:04x}\")\n\n        if slot is None:\n            received = len(self._ack_state._slot_for_seq(frame_id).received_fragments)\n            if fragment_total > 0:\n                missing = fragment_total - received\n                self._dbg(\n                    f\"⚠ incomplete FID:{frame_id:04x} \"\n                    f\"missing {missing} fragment(s); waiting for retry\"\n                )\n            else:\n                self._dbg(\n                    f\"⚠ incomplete FID:{frame_id:04x} \"\n                    f\"received {received} fragment(s); waiting for tail\"\n                )\n            return None\n\n        # Only emit a frame once every fragment up to the announced tail exists.\n        jpeg = self._jpeg_header + slot.ordered_payload() + EOI\n        frame = VideoFrame(frame_id=frame_id, data=jpeg)\n\n        self.frames_ok += 1\n\n        # ── was this frame finished thanks to a retry? ───────────\n        if self._had_retry:\n            self.retry_successes += 1\n            self._dbg(f\"✓ recovery! {frame_id:04x}  \"\n                      f\"SUC:{self.retry_successes}  \"\n                      f\"ATT:{self.retry_attempts}\")\n            self._had_retry = False\n        # ──────────────────────────────────────────────────────────────\n\n        self._dbg(f\"✓ {frame_id:04x} ({slot.fragment_total} frags)  \"\n                  f\"OK:{self.frames_ok}  DROP:{self.frames_dropped}\")\n\n        # prepare next iteration\n        self._ack_state.mark_delivered(frame_id)\n        self._send_frame_request(frame_id)            # ask for next\n        self._current_fid = frame_id + 1\n        self._last_rx_ts = self._last_req_ts = time.time()\n\n        return frame\n\n    # ------------------------------------------------------------------ #\n    # helpers\n    # ------------------------------------------------------------------ #\n    def _warmup_loop(self) -> None:\n        \"\"\"During warmup, periodically resend START_STREAM + frame request\n        until the first frame is observed, then exit.\"\"\"\n        while getattr(self, \"_first_frame\", False) and not self._warmup_stop.is_set():\n            try:\n                self.send_start_command()\n                # Ask for the previous frame id; the drone will respond with next\n                self._send_frame_request(max(0, self._current_fid - 1))\n            except Exception:\n                pass\n            self._warmup_stop.wait(0.2)\n    def _send_frame_request(self, frame_id: int) -> None:\n        command_seq = frame_id & 0xFFFFFFFF\n\n        rqst_a = build_native_ack_packet(command_seq, [])\n        rqst_b = build_native_ack_packet(command_seq, self._build_ack_slots(frame_id))\n\n        for port in self._target_ports:\n            self._sock.sendto(rqst_a, (self.drone_ip, port))\n            self._sock.sendto(rqst_b, (self.drone_ip, port))\n        self._last_req_ts = time.time()\n        self._dbg(\"→ REQ %04x to %s\", frame_id, self._target_ports)\n\n    def _build_ack_slots(self, seq: int) -> list[bytes]:\n        return self._ack_state.build_ack_slots(seq)\n\n    def _parse_fragment_header(self, payload: bytes) -> Optional[tuple[int, int, int, int, int, bytes]]:\n        if len(payload) < 56 or payload[0] != 0x93 or payload[1] != 0x01:\n            return None\n\n        declared_len = int.from_bytes(payload[2:4], \"little\")\n        native_layout = declared_len == len(payload)\n        if native_layout:\n            frame_id = int.from_bytes(payload[8:16], \"little\")\n            frag_id = int.from_bytes(payload[32:36], \"little\")\n            fragment_total = int.from_bytes(payload[36:40], \"little\")\n            frame_body_len = int.from_bytes(payload[40:44], \"little\")\n            quality = payload[48]\n            if fragment_total > 0 and frag_id < fragment_total:\n                return frame_id, frag_id, fragment_total, frame_body_len, quality, payload[56:]\n\n        # Compatibility fallback for older captures/comments that only used\n        # 16-bit counters and inferred the last fragment from packet length.\n        frame_id = int.from_bytes(payload[16:18], \"little\")\n        frag_id = int.from_bytes(payload[32:34], \"little\")\n        # Legacy packets only reveal the total when the tail fragment arrives.\n        fragment_total = frag_id + 1 if payload[2] != 0x38 else 0\n        return frame_id, frag_id, fragment_total, 0, 0, payload[56:]\n\n    def _resolve_target_ports(self, control_port: int) -> tuple[int, ...]:\n        \"\"\"\n        The native UAVSDK starts the legacy backend on 8800 and the BL618\n        backend on 8801. For the explicit UAV/FLOW variant, probe both just as\n        nativeStart() does. The FLD/legacy path keeps the single-port behavior.\n        \"\"\"\n        if self.capabilities.transport == \"uav_dual_port\":\n            return (control_port, control_port + 1)\n        return (control_port,)\n\n    def _create_duplex_socket(self) -> socket.socket:\n        sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)\n        sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)\n        sock.bind((\"\", 0))          # let OS choose a free local port\n        sock.settimeout(1.0)\n        self._dbg(f\"Main UDP socket created, listening on *:{sock.getsockname()[1]}\")\n        return sock\n\n    def get_receiver_socket(self) -> socket.socket:\n        \"\"\"Returns the main socket for the receiver thread to use.\"\"\"\n        with self._sock_lock:\n            return self._sock\n\n    def set_rc_adapter(self, rc_adapter) -> None:\n        \"\"\"Provide the RC adapter with our shared UDP socket.\"\"\"\n        try:\n            rc_adapter.set_socket(self._sock)\n            self._dbg(\"[wifi-uav] RC adapter socket shared\")\n        except Exception:\n            # If the RC adapter is not ready or doesn't support socket injection,\n            # ignore and continue – the receiver loop will still function.\n            pass\n\n    # ------------------------------------------------------------------ #\n    # Receiver thread API expected by VideoReceiverService\n    # ------------------------------------------------------------------ #\n    def start(self) -> None:\n        if hasattr(self, \"_rx_thread\") and self._rx_thread and self._rx_thread.is_alive():\n            return\n        # Small frame buffer; upstream will drop if slow\n        self._frame_q: \"queue.Queue[VideoFrame]\" = queue.Queue(maxsize=2)\n        self._last_rx_ts = time.time()\n        with self._pkt_lock:\n            self._pkt_buffer = []\n\n        def _rx_loop() -> None:\n            sock = self.get_receiver_socket()\n            while self._running:\n                try:\n                    payload = self.recv_from_socket(sock)\n                    if not payload:\n                        continue\n                    # Collect raw packet bytes for optional dumping\n                    with self._pkt_lock:\n                        self._pkt_buffer.append(payload)\n                    # Try to assemble a frame\n                    frame = self.handle_payload(payload)\n                    if frame is not None:\n                        try:\n                            self._frame_q.put(frame, timeout=0.2)\n                        except queue.Full:\n                            # Drop frame if consumer is slow\n                            pass\n                except OSError:\n                    # Socket likely closed during stop(); exit loop\n                    break\n                except Exception as e:\n                    self._dbg(f\"[wifi-uav] rx error: {e}\")\n                    continue\n\n        self._rx_thread = threading.Thread(target=_rx_loop, daemon=True, name=\"WifiUavVideoRx\")\n        self._rx_thread.start()\n\n        if not self._started_once:\n            self._started_once = True\n            # Kick off the stream only after the receiver is already draining\n            # the UDP socket. K417 responds very quickly and can otherwise dump\n            # the first frame burst before the RX loop is active.\n            self.send_start_command()\n            self._send_frame_request(0)\n            self._first_frame = True\n            self._warmup_thread = threading.Thread(\n                target=self._warmup_loop,\n                daemon=True,\n                name=\"Warmup\",\n            )\n            self._warmup_thread.start()\n\n    def is_running(self) -> bool:\n        rx_thread = getattr(self, \"_rx_thread\", None)\n        if not (self._running and rx_thread and rx_thread.is_alive()):\n            return False\n\n        now = time.time()\n        if self._stream_started:\n            return (now - self._last_rx_ts) < self.LINK_STALL_TIMEOUT\n\n        return (now - self._last_rx_ts) < self.INITIAL_STREAM_TIMEOUT\n\n    def get_frame(self, timeout: float = 1.0) -> Optional[VideoFrame]:\n        try:\n            frame = self._frame_q.get(timeout=timeout)\n            # mark warmup complete on first delivered frame\n            if getattr(self, \"_first_frame\", False):\n                self._first_frame = False\n            return frame\n        except queue.Empty:\n            return None\n\n    def get_packets(self) -> list[bytes]:\n        with self._pkt_lock:\n            packets = self._pkt_buffer\n            self._pkt_buffer = []\n            return packets\n\n    # ------------------------------------------------------------------ #\n    # watchdog\n    # ------------------------------------------------------------------ #\n    def _watchdog_loop(self) -> None:\n        \"\"\"\n        Runs in a daemon thread. If the current frame doesn't finish within\n        FRAME_TIMEOUT seconds, resend the request for that frame.\n        Link-level reconnection is handled by the VideoReceiverService.\n        \"\"\"\n        self._dbg(\"Watchdog started for per-frame timeouts.\")\n        while self._running:\n            time.sleep(self.WATCHDOG_SLEEP)\n            now = time.time()\n\n            if self._stream_started and now - self._last_rx_ts >= self.LINK_STALL_TIMEOUT:\n                self._dbg(\"[wifi-uav] link stalled; forcing adapter restart\")\n                self._running = False\n                self._first_frame = False\n                self._warmup_stop.set()\n                try:\n                    self._sock.close()\n                except OSError:\n                    pass\n                break\n\n            if now - self._last_req_ts < self.FRAME_TIMEOUT:\n                continue                    # still waiting → nothing to do\n\n            # ----------------------------------------------------------\n            # retry or drop?\n            # ----------------------------------------------------------\n            if self._retry_cnt < self.MAX_RETRIES:\n                self._dbg(f\"⚠ timeout FID {self._current_fid:04x} – retry \"\n                          f\"({self._retry_cnt +1}/{self.MAX_RETRIES})\")\n                self._send_frame_request(max(0, self._current_fid - 1))\n                self._retry_cnt += 1\n                self.retry_attempts += 1\n                self._had_retry = True\n            else:\n                self.frames_dropped += 1\n                self._dbg(f\"✗ drop   FID {self._current_fid:04x} \"\n                          f\"(after {self._retry_cnt} retries)  \"\n                          f\"OK:{self.frames_ok}  DROP:{self.frames_dropped}\")\n                self._ack_state.mark_dropped(self._current_fid)\n                self._retry_cnt  = 0\n                self._current_fid += 1\n                self._send_frame_request(max(0, self._current_fid - 1))\n                self._had_retry = False\n\n    def stop(self) -> None:\n        \"\"\"Gracefully shut down the adapter and its threads.\"\"\"\n        self._dbg(f\"Stopping protocol adapter instance...\")\n        self._running = False\n        self._first_frame = False\n        self._warmup_stop.set()\n        for thread in (self._warmup_thread, self._watchdog, self._rx_thread):\n            try:\n                if thread and thread.is_alive():\n                    thread.join(timeout=0.5)\n            except Exception as e:\n                self._dbg(f\"Ignoring thread shutdown error: {e}\")\n        try:\n            self._sock.close()\n        except Exception as e:\n            self._dbg(f\"Ignoring socket shutdown error: {e}\")\n\n        self._dbg(\n            f\"[stats] ok:{self.frames_ok}  dropped:{self.frames_dropped}  \"\n            f\"retry_att:{self.retry_attempts}  retry_suc:{self.retry_successes}\"\n        )\n\n"
  },
  {
    "path": "backend/receive_video.py",
    "content": "import argparse\r\nimport ipaddress\r\nimport queue\r\nimport socket\r\nimport threading\r\nimport time\r\nimport os\r\nfrom datetime import datetime\r\n\r\nimport cv2\r\nimport numpy as np\r\n\r\n###############################################################################\r\n# Constants\r\n###############################################################################\r\nDRONE_IP        = \"172.16.10.1\"\r\nCONTROL_PORT    = 8080        # where we send the 5-byte \"start\" cmd\r\nVIDEO_PORT      = 8888        # where the drone sends JPEG slices\r\nSOI_MARKER      = b\"\\xFF\\xD8\"\r\nEOI_MARKER      = b\"\\xFF\\xD9\"\r\nSYNC_BYTES      = b\"\\x40\\x40\"\r\n\r\n# --------------------------------------------------------------------------- #\r\n# The drone's header (derived from packet dumps)\r\n#\r\n#   0   1   2   3   4   5   6  7    ← byte index\r\n# +---+---+---+---+---+---+---+---+\r\n# |40|40|FID| 2 |22|SID| 78 | 05    <- 0x40 0x40 | frame id | slice id | 78 05 (usually present) | \r\n# +---+---+---+---+---+---+---+---+\r\n#\r\n# • FID          … increments once per JPEG frame   (0-255, wraps)\r\n# • SID bits 0-3 … slice number inside that frame   (1,2,3…)\r\n#   SID bit  4   … LAST-SLICE flag (1 == this is the final fragment)\r\n#   SID bits 5-7 … always 0\r\n# • bytes 6 & 7    … static header piece (usually starts with 0x78 0x05 )\r\n# • bytes 8+      … payload\r\n# --------------------------------------------------------------------------- #\r\nHEADER_LEN = 8\r\n\r\n###############################################################################\r\n# Small helper to discover the IP address of the interface that can reach\r\n# 172.16.10.1  (works on Windows, macOS, Linux)\r\n###############################################################################\r\ndef discover_local_ip(remote_ip=DRONE_IP):\r\n    s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)\r\n    try:\r\n        # We never actually send anything – connect() is enough for the OS to\r\n        # choose the right outgoing interface.\r\n        s.connect((remote_ip, 1))\r\n        return s.getsockname()[0]\r\n    finally:\r\n        s.close()\r\n\r\n###############################################################################\r\n# 1. Control channel – send the 5-byte \"start video\" command\r\n###############################################################################\r\ndef send_start_command(drone_ip: str, my_ip: str):\r\n    \"\"\"\r\n    Build and send the 5-byte payload:\r\n        0x08 <my_ip as 4 bytes>\r\n    \"\"\"\r\n    payload = b\"\\x08\" + ipaddress.IPv4Address(my_ip).packed\r\n    with socket.socket(socket.AF_INET, socket.SOCK_DGRAM) as sock:\r\n        sock.sendto(payload, (drone_ip, CONTROL_PORT))\r\n    print(f\"[control] start-cmd sent   ({payload.hex(' ')})\")\r\n\r\nclass ControlKeepAlive(threading.Thread):\r\n    \"\"\"\r\n    Periodically re-send the start-video command so the drone keeps streaming.\r\n    \"\"\"\r\n    def __init__(self, drone_ip, my_ip, interval=1.0):\r\n        super().__init__(daemon=True)\r\n        self.drone_ip = drone_ip\r\n        self.my_ip    = my_ip\r\n        self.interval = interval\r\n        self._stop    = threading.Event()\r\n\r\n    def run(self):\r\n        while not self._stop.is_set():\r\n            send_start_command(self.drone_ip, self.my_ip)\r\n            time.sleep(self.interval)\r\n\r\n    def stop(self):\r\n        self._stop.set()\r\n\r\n###############################################################################\r\n# 2. Video receiver thread – re-assemble slices into full JPEG frames\r\n###############################################################################\r\nclass VideoReceiver(threading.Thread):\r\n    def __init__(\r\n        self,\r\n        frame_queue:  queue.Queue,\r\n        port:          int  = VIDEO_PORT,\r\n        dump_frames:   bool = False,\r\n        dump_packets:  bool = False,\r\n    ):\r\n        super().__init__(daemon=True)\r\n        self.frame_q      = frame_queue\r\n        self.port         = port\r\n        self.dump_frames  = dump_frames\r\n        self.dump_packets = dump_packets\r\n\r\n        # control flag for run()\r\n        self.running      = threading.Event()\r\n        self.running.set()\r\n\r\n        # assembly state\r\n        self._cur_fid     = None\r\n        self._fragments   = {}     # sid_raw:int -> payload:bytes\r\n\r\n        if self.dump_packets:\r\n            ts = int(time.time()*1000)\r\n            self._pktlog = open(f\"logged_packets_{ts}.bin\", \"wb\")\r\n\r\n    def stop(self):\r\n        self.running.clear()\r\n\r\n    def _reset_frame(self, new_fid):\r\n        \"\"\"Forget the old frame and start a fresh one.\"\"\"\r\n        self._cur_fid   = new_fid\r\n        self._fragments.clear()\r\n\r\n    def _finalise_frame(self, fid, fragments):\r\n        # 1) stitch slices together in ascending order\r\n        data = b\"\".join(fragments[i] for i in sorted(fragments))\r\n\r\n        # 2) find the real JPEG in the bytes\r\n        start = data.find(SOI_MARKER)\r\n        end   = data.rfind(EOI_MARKER)\r\n        if start < 0 or end < 0 or end <= start:\r\n            print(f\"[receiver] JPEG markers missing on frame {fid}\")\r\n            return\r\n\r\n        jpeg = data[start : end + 2]\r\n\r\n        # 3) dump & push\r\n        if self.dump_frames:\r\n            ts = int(time.time() * 1000)\r\n            with open(f\"frame_{fid:02x}_{ts}.jpg\", \"wb\") as f:\r\n                f.write(jpeg)\r\n\r\n        print(f\"[receiver] frame {fid} complete – {len(jpeg)} bytes\")\r\n        self.frame_q.put(jpeg)\r\n\r\n    def run(self):\r\n        sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)\r\n        sock.bind((\"0.0.0.0\", self.port))\r\n        sock.settimeout(1.0)\r\n        print(f\"[receiver] listening on UDP/*:{self.port}\")\r\n\r\n        try:\r\n            while self.running.is_set():\r\n                try:\r\n                    pkt, addr = sock.recvfrom(2048)\r\n                except socket.timeout:\r\n                    continue\r\n\r\n                if self.dump_packets:\r\n                    self._pktlog.write(pkt)\r\n\r\n                # sanity‐check\r\n                if len(pkt) <= HEADER_LEN or pkt[:2] != SYNC_BYTES:\r\n                    continue\r\n\r\n                fid     = pkt[2]\r\n                sid_raw = pkt[5]\r\n                # if packet byte 7 and 8 are 0x78 and 0x05 respectively, then strip the 8 bytes\r\n                if pkt[6] == 0x78 and pkt[7] == 0x05:\r\n                    payload = pkt[8:]\r\n                else:\r\n                  payload = pkt[6:]\r\n\r\n                # strip trailing 0x23 0x23 if present\r\n                if payload.endswith(b\"\\x23\\x23\"):\r\n                    payload = payload[:-2]\r\n\r\n                if sid_raw % 20 == 0:   # throttle the spam\r\n                    head = payload[:8].hex()\r\n                    ascii_payload = payload[:8].decode('ascii', errors='replace')\r\n                    print(f\"[slice] FID=0x{fid:02x} SID={sid_raw:3d} \"\r\n                          f\"head={head} ascii={ascii_payload!r}\")\r\n\r\n                # new frame detected?\r\n                if self._cur_fid is None:\r\n                    self._reset_frame(fid)\r\n\r\n                elif fid != self._cur_fid:\r\n                    if self._fragments:\r\n                        keys = sorted(self._fragments)\r\n                        # simple completeness check\r\n                        if len(keys) == (keys[-1] - keys[0] + 1):\r\n                            self._finalise_frame(self._cur_fid, self._fragments)\r\n                        else:\r\n                            print(f\"[receiver] dropping frame {self._cur_fid}, \"\r\n                                  f\"slices {keys[0]}..{keys[-1]} missing \"\r\n                                  f\"{(keys[-1]-keys[0]+1) - len(keys)}\")\r\n\r\n                    self._reset_frame(fid)\r\n\r\n                # stash this slice (ignore dupes)\r\n                if sid_raw not in self._fragments:\r\n                    self._fragments[sid_raw] = payload\r\n\r\n        finally:\r\n            sock.close()\r\n            if self.dump_packets:\r\n                self._pktlog.close()\r\n            print(\"[receiver] stopped\")\r\n\r\n###############################################################################\r\n# 3. Display loop (main thread) – show frames with OpenCV\r\n###############################################################################\r\ndef display_frames(frame_q: queue.Queue):\r\n    cv2.namedWindow(\"Drone\", cv2.WINDOW_NORMAL)\r\n\r\n    # build a single placeholder image (black + red warning text)\r\n    placeholder_h, placeholder_w = 480, 640\r\n    placeholder = np.zeros((placeholder_h, placeholder_w, 3), np.uint8)\r\n    txt = \"No JPEG frames reconstructed yet\"\r\n    font, scale, th = cv2.FONT_HERSHEY_SIMPLEX, 0.6, 2\r\n    (tw, th_), _ = cv2.getTextSize(txt, font, scale, th)\r\n    x = (placeholder_w - tw) // 2\r\n    y = (placeholder_h + th_) // 2\r\n    cv2.putText(placeholder, txt, (x, y), font, scale, (0, 0, 255), th)\r\n\r\n    fps_timer = time.time()\r\n    frame_count = 0\r\n\r\n    while True:\r\n        jpeg = None\r\n        try:\r\n            jpeg = frame_q.get(timeout=1.0)\r\n        except queue.Empty:\r\n            pass\r\n\r\n        if jpeg is None:\r\n            img = placeholder\r\n            is_real = False\r\n        else:\r\n            arr = np.frombuffer(jpeg, dtype=np.uint8)\r\n            frame = cv2.imdecode(arr, cv2.IMREAD_COLOR)\r\n            if frame is None:\r\n                print(f\"[display] ⚠ imdecode failed ({len(arr)} bytes)\")\r\n                img, is_real = placeholder, False\r\n            else:\r\n                print(f\"[display] decoded frame {frame.shape}\")\r\n                img, is_real = frame, True\r\n\r\n        cv2.imshow(\"Drone\", img)\r\n        key = cv2.waitKey(1) & 0xFF\r\n        if key == ord(\"q\"):\r\n            break\r\n\r\n        # only count toward FPS when we had a real frame\r\n        if is_real:\r\n            frame_count += 1\r\n            if frame_count % 60 == 0:\r\n                now = time.time()\r\n                print(f\"[display] ~{frame_count/(now-fps_timer):4.1f} fps\")\r\n                fps_timer, frame_count = now, 0\r\n\r\n    cv2.destroyAllWindows()\r\n\r\n###############################################################################\r\n# Entry-point\r\n###############################################################################\r\ndef main():\r\n    parser = argparse.ArgumentParser(\r\n        description=\"Simple UDP/MJPEG client for FH-style drone\"\r\n    )\r\n    parser.add_argument(\"--drone-ip\",   default=DRONE_IP,  help=\"Drone host address\")\r\n    parser.add_argument(\"--video-port\", type=int,      default=VIDEO_PORT)\r\n    parser.add_argument(\"--control-port\", type=int,    default=CONTROL_PORT)\r\n    parser.add_argument(\r\n        \"--keepalive\", type=float, default=1.0,\r\n        help=\"Re-send start-video every N seconds\"\r\n    )\r\n    parser.add_argument(\r\n        \"--dump-frames\", action=\"store_true\",\r\n        help=\"Dump every reassembled JPEG to ./dumped_frames/\"\r\n    )\r\n    parser.add_argument(\r\n        \"--dump-packets\", action=\"store_true\",\r\n        help=\"Dump every raw UDP packet to ./dumped_packets/\"\r\n    )\r\n    args = parser.parse_args()\r\n\r\n    my_ip = discover_local_ip(args.drone_ip)\r\n    print(f\"[info] local IP that reaches the drone: {my_ip}\")\r\n\r\n    # 1. Tell the drone to start sending\r\n    send_start_command(args.drone_ip, my_ip)\r\n\r\n    # 2. Start keep-alive sender\r\n    keepalive = ControlKeepAlive(args.drone_ip, my_ip, interval=args.keepalive)\r\n    keepalive.start()\r\n\r\n    # 3. Start receiver thread (pass dump flags)\r\n    frame_q = queue.Queue(maxsize=100)\r\n    receiver = VideoReceiver(\r\n        frame_q,\r\n        port=args.video_port,\r\n        dump_frames=args.dump_frames,\r\n        dump_packets=args.dump_packets\r\n    )\r\n    receiver.start()\r\n\r\n    # 4. UI loop\r\n    try:\r\n        display_frames(frame_q)\r\n    finally:\r\n        print(\"[main] shutting down …\")\r\n        keepalive.stop()\r\n        receiver.stop()\r\n        receiver.join()\r\n\r\nif __name__ == \"__main__\":\r\n    main()\r\n"
  },
  {
    "path": "backend/remote_control.py",
    "content": "#!/usr/bin/env python3\r\nimport socket\r\nimport threading\r\nimport time\r\nimport argparse\r\nimport curses\r\n\r\nclass DroneController:\r\n    def __init__(self, drone_ip, control_port):\r\n        self.drone_ip     = drone_ip\r\n        self.control_port = control_port\r\n        self.sock         = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)\r\n\r\n        # stick midpoints = 128.0\r\n        self.yaw      = 128.0\r\n        self.throttle = 128.0\r\n        self.pitch    = 128.0\r\n        self.roll     = 128.0\r\n\r\n        # one-shot flags\r\n        self.takeoff     = False\r\n        self.land        = False\r\n        self.stop        = False\r\n        self.headless    = False\r\n        self.calibration = False\r\n\r\n        # misc\r\n        self.speed   = 20    # matches 0x14 from dumps\r\n        self.record  = 0     # bit 2 in byte 7\r\n        self.rocker  = 0     # bit 3 in byte 7\r\n\r\n        self.running = True\r\n\r\n        # how fast to move stick inputs (units/sec)\r\n        self.accel_rate = 200.0\r\n        self.decel_rate = 300.0\r\n        \r\n        # Control range limits (instead of 0-255)\r\n        self.min_control_value = 60.0\r\n        self.max_control_value = 200.0\r\n        self.center_value = 128.0\r\n        \r\n        # Immediate response boost factor\r\n        self.immediate_response = 5.0\r\n        \r\n        # Exponential control factor (higher values = more aggressive response)\r\n        self.expo_factor = 0.8\r\n\r\n    def update_axes(self, dt, throttle_dir, yaw_dir, pitch_dir, roll_dir):\r\n        \"\"\"Apply acceleration or deceleration for each axis.\"\"\"\r\n        for attr, direction, boost_enabled in (\r\n            ('throttle', throttle_dir, False),\r\n            ('yaw',      yaw_dir,      False),\r\n            ('pitch',    pitch_dir,    True),  \r\n            ('roll',     roll_dir,     True),  # Enable boost for roll and pitch\r\n        ):\r\n            cur = getattr(self, attr)\r\n            \r\n            # Handle exponential control mapping\r\n            if direction > 0:\r\n                # Apply immediate boost on direction change\r\n                if boost_enabled and getattr(self, f\"last_{attr}_dir\", 0) <= 0:\r\n                    jump = min(self.max_control_value - cur, self.immediate_response)\r\n                    cur += jump\r\n                \r\n                # Calculate acceleration with exponential factor\r\n                distance_to_max = self.max_control_value - cur\r\n                accel = self.accel_rate * dt * (1 + self.expo_factor * distance_to_max / (self.max_control_value - self.center_value))\r\n                new = min(self.max_control_value, cur + accel)\r\n                \r\n            elif direction < 0:\r\n                # Apply immediate boost on direction change\r\n                if boost_enabled and getattr(self, f\"last_{attr}_dir\", 0) >= 0:\r\n                    jump = min(cur - self.min_control_value, self.immediate_response)\r\n                    cur -= jump\r\n                \r\n                # Calculate acceleration with exponential factor\r\n                distance_to_min = cur - self.min_control_value\r\n                accel = self.accel_rate * dt * (1 + self.expo_factor * distance_to_min / (self.center_value - self.min_control_value))\r\n                new = max(self.min_control_value, cur - accel)\r\n                \r\n            else:\r\n                # Return to center faster from extremes\r\n                if cur > self.center_value:\r\n                    # Exponential return to center\r\n                    distance_from_center = cur - self.center_value\r\n                    decel = self.decel_rate * dt * (1 + 0.5 * distance_from_center / (self.max_control_value - self.center_value))\r\n                    new = max(self.center_value, cur - decel)\r\n                elif cur < self.center_value:\r\n                    # Exponential return to center\r\n                    distance_from_center = self.center_value - cur\r\n                    decel = self.decel_rate * dt * (1 + 0.5 * distance_from_center / (self.center_value - self.min_control_value))\r\n                    new = min(self.center_value, cur + decel)\r\n                else:\r\n                    new = cur\r\n                    \r\n            # Store last direction for detecting direction changes\r\n            setattr(self, f\"last_{attr}_dir\", direction)\r\n            setattr(self, attr, new)\r\n\r\n    def remap_to_full_range(self, value):\r\n        \"\"\"Remap value from constrained range to full 0-255 range for sending to drone\"\"\"\r\n        if value >= self.center_value:\r\n            # Map center...max_control to 128...255\r\n            return 128.0 + (value - self.center_value) * (255.0 - 128.0) / (self.max_control_value - self.center_value)\r\n        else:\r\n            # Map min_control...center to 0...128\r\n            return (value - self.min_control_value) * 128.0 / (self.center_value - self.min_control_value)\r\n\r\n    def build_packet_hy(self):\r\n        pkt = bytearray(20)\r\n        pkt[0] = 0x66\r\n        pkt[1] = self.speed & 0xFF\r\n\r\n        # Cast floats back to ints with CORRECTED ORDER\r\n        # Remap from our constrained range to full 0-255 range\r\n        pkt[2] = int(self.remap_to_full_range(self.roll))     & 0xFF\r\n        pkt[3] = int(self.remap_to_full_range(self.pitch))    & 0xFF  \r\n        pkt[4] = int(self.remap_to_full_range(self.throttle)) & 0xFF\r\n        pkt[5] = int(self.remap_to_full_range(self.yaw))      & 0xFF\r\n\r\n        # FIXED: flags in byte 6 and 7 were reversed compared to mobile app\r\n        # Byte 6 should be 0x00\r\n        pkt[6] = 0x00\r\n        \r\n        # Handle one-shot flags\r\n        if self.takeoff:\r\n            pkt[6] |= 0x01\r\n        if self.land:\r\n            pkt[6] |= 0x02\r\n        if self.stop:\r\n            pkt[6] |= 0x04\r\n\r\n        # Byte 7 should be 0x0a\r\n        pkt[7] = 0x0a  # Base value is 0x0a\r\n        \r\n        # record flag\r\n        if self.record:\r\n            pkt[7] |= (self.record << 2)\r\n\r\n        # bytes 8-17 = 0 (zero-filled)\r\n\r\n        # checksum over bytes 2-17\r\n        chk = 0\r\n        for i in range(2, 18):\r\n            chk ^= pkt[i]\r\n        pkt[18] = chk & 0xFF\r\n        pkt[19] = 0x99\r\n\r\n        # clear one-shots\r\n        self.takeoff = self.land = self.stop = False\r\n\r\n        return pkt\r\n\r\n    def send_loop(self, interval=0.03):\r\n        # debug flag\r\n        self.debug_packets = False\r\n        packet_counter = 0\r\n        \r\n        while self.running:\r\n            buf = self.build_packet_hy()\r\n            self.sock.sendto(buf, (self.drone_ip, self.control_port))\r\n            \r\n            # Log packet details if debug is enabled\r\n            if self.debug_packets:\r\n                packet_counter += 1\r\n                \r\n                # Print full packet hex dump\r\n                hex_dump = ' '.join(f'{b:02x}' for b in buf)\r\n                print(f\"Packet #{packet_counter}: {hex_dump}\")\r\n                \r\n                # Print decoded controls\r\n                print(f\"  Controls: R:{buf[2]} P:{buf[3]} T:{buf[4]} Y:{buf[5]}\")\r\n                \r\n                # Print flags\r\n                flags6 = buf[6]\r\n                flags7 = buf[7]\r\n                flags_desc = []\r\n                if flags6 & 0x01: flags_desc.append(\"TAKEOFF\")\r\n                if flags6 & 0x02: flags_desc.append(\"LAND\")\r\n                if flags6 & 0x04: flags_desc.append(\"STOP\")\r\n                if flags7 & 0x01: flags_desc.append(\"HEADLESS\")\r\n                if flags7 & 0x04: flags_desc.append(\"RECORD\")\r\n                \r\n                print(f\"  Flags: {flags_desc}\")\r\n                print(f\"  Checksum: 0x{buf[18]:02x}\")\r\n                print()\r\n                \r\n            time.sleep(interval)\r\n\r\n    def stop_loop(self):\r\n        self.running = False\r\n\r\n    def toggle_debug(self):\r\n        \"\"\"Toggle debug packet logging\"\"\"\r\n        self.debug_packets = not self.debug_packets\r\n        return self.debug_packets\r\n\r\n\r\ndef ui_loop(stdscr, controller):\r\n    curses.curs_set(0)\r\n    stdscr.nodelay(True)\r\n    stdscr.keypad(True)\r\n    help_msg = \"W/S=throttle  A/D=yaw  Arrows=pitch/roll  T=takeoff  L=land  Q=quit\"\r\n    help_msg2 = \"R=record  F=debug packets  X=sensitivity mode\"\r\n\r\n    # direction states and last-press timestamps\r\n    throttle_dir = yaw_dir = pitch_dir = roll_dir = 0\r\n    throttle_ts = yaw_ts = pitch_ts = roll_ts = 0.0\r\n    PRESS_THRESHOLD = 0.4  # threshold for key being held (increased from 0.2)\r\n\r\n    prev_time = time.time()\r\n    debug_enabled = False\r\n    sensitivity_mode = 0  # 0=normal, 1=precise, 2=aggressive\r\n    sensitivity_labels = [\"Normal\", \"Precise\", \"Aggressive\"]\r\n\r\n    while controller.running:\r\n        now = time.time()\r\n        dt  = now - prev_time\r\n        prev_time = now\r\n\r\n        c = stdscr.getch()\r\n        if c in (ord('q'), ord('Q')):\r\n            controller.stop_loop()\r\n            break\r\n\r\n        elif c in (ord('t'), ord('T')):\r\n            controller.takeoff = True\r\n        elif c in (ord('l'), ord('L')):\r\n            controller.land = True\r\n        elif c in (ord('r'), ord('R')):\r\n            controller.record = 1 if controller.record == 0 else 0\r\n        elif c in (ord('f'), ord('F')):\r\n            debug_enabled = controller.toggle_debug()\r\n        elif c in (ord('x'), ord('X')):\r\n            # Toggle sensitivity mode\r\n            sensitivity_mode = (sensitivity_mode + 1) % 3\r\n            \r\n            # Apply sensitivity settings\r\n            if sensitivity_mode == 0:  # Normal (using previous Precise settings)\r\n                controller.accel_rate = 150.0\r\n                controller.decel_rate = 350.0\r\n                controller.expo_factor = 0.5\r\n                controller.immediate_response = 3.0\r\n            elif sensitivity_mode == 1:  # Very Precise (even slower/more precise)\r\n                controller.accel_rate = 100.0\r\n                controller.decel_rate = 400.0\r\n                controller.expo_factor = 0.3\r\n                controller.immediate_response = 1.5\r\n            else:  # Aggressive (unchanged)\r\n                controller.accel_rate = 300.0\r\n                controller.decel_rate = 280.0\r\n                controller.expo_factor = 1.5\r\n                controller.immediate_response = 15.0\r\n\r\n        # throttle\r\n        elif c in (ord('w'), ord('W')):\r\n            throttle_dir = +1; throttle_ts = now\r\n        elif c in (ord('s'), ord('S')):\r\n            throttle_dir = -1; throttle_ts = now\r\n\r\n        # yaw\r\n        elif c in (ord('a'), ord('A')):\r\n            yaw_dir = -1; yaw_ts = now\r\n        elif c in (ord('d'), ord('D')):\r\n            yaw_dir = +1; yaw_ts = now\r\n\r\n        # pitch\r\n        elif c == curses.KEY_UP:\r\n            pitch_dir = +1; pitch_ts = now\r\n        elif c == curses.KEY_DOWN:\r\n            pitch_dir = -1; pitch_ts = now\r\n\r\n        # roll\r\n        elif c == curses.KEY_LEFT:\r\n            roll_dir = -1; roll_ts = now\r\n        elif c == curses.KEY_RIGHT:\r\n            roll_dir = +1; roll_ts = now\r\n\r\n        # decide if each axis is \"still held\"\r\n        active_throttle = throttle_dir if (now - throttle_ts) < PRESS_THRESHOLD else 0\r\n        active_yaw      = yaw_dir      if (now - yaw_ts)      < PRESS_THRESHOLD else 0\r\n        active_pitch    = pitch_dir    if (now - pitch_ts)    < PRESS_THRESHOLD else 0\r\n        active_roll     = roll_dir     if (now - roll_ts)     < PRESS_THRESHOLD else 0\r\n\r\n        # apply acceleration / deceleration\r\n        controller.update_axes(\r\n            dt,\r\n            active_throttle,\r\n            active_yaw,\r\n            active_pitch,\r\n            active_roll\r\n        )\r\n\r\n        # Update the UI\r\n        stdscr.clear()\r\n        stdscr.addstr(0, 0,\r\n            f\"Throttle: {int(controller.throttle):3d}    \"\r\n            f\"Yaw:      {int(controller.yaw):3d}\")\r\n        stdscr.addstr(1, 0,\r\n            f\" Pitch:   {int(controller.pitch):3d}    \"\r\n            f\"Roll:     {int(controller.roll):3d}\")\r\n            \r\n        # Add status flags to UI\r\n        status_flags = [f\"Mode: {sensitivity_labels[sensitivity_mode]}\"]\r\n        if controller.record: status_flags.append(\"RECORD\")\r\n        if debug_enabled: status_flags.append(\"DEBUG\")\r\n        status_str = \" | \".join(status_flags)\r\n        stdscr.addstr(2, 0, f\"Status: {status_str}\")\r\n            \r\n        stdscr.addstr(4, 0, help_msg)\r\n        stdscr.addstr(5, 0, help_msg2)\r\n        stdscr.refresh()\r\n\r\n        # small sleep to cap UI frame-rate\r\n        time.sleep(0.02)\r\n\r\n\r\ndef main():\r\n    parser = argparse.ArgumentParser(description=\"FH‐drone teleop interface\")\r\n    parser.add_argument(\"--drone-ip\",    type=str, default=\"172.16.10.1\", help=\"Drone UDP IP address\")\r\n    parser.add_argument(\"--control-port\", type=int, default=8080, help=\"Drone control port\")\r\n    parser.add_argument(\"--rate\",         type=float, default=20.0, help=\"Control packets per second\")\r\n    args = parser.parse_args()\r\n\r\n    controller = DroneController(args.drone_ip, args.control_port)\r\n    sender = threading.Thread(\r\n        target=controller.send_loop,\r\n        args=(1.0 / args.rate,),\r\n        daemon=True\r\n    )\r\n    sender.start()\r\n\r\n    try:\r\n        curses.wrapper(ui_loop, controller)\r\n    except KeyboardInterrupt:\r\n        pass\r\n\r\n    controller.stop_loop()\r\n    sender.join()\r\n\r\n\r\nif __name__ == \"__main__\":\r\n    main()\r\n"
  },
  {
    "path": "backend/requirements.txt",
    "content": "# Numerical array handling\r\nnumpy>=1.19.2\r\n\r\n# OpenCV bindings (for cv2)\r\nopencv-python>=4.5.1.48\r\n\r\n# FastAPI\r\nfastapi\r\n\r\n# Uvicorn\r\nuvicorn[standard]\r\n\r\n# Python-multipart\r\npython-multipart\r\n\r\n# Starlette\r\nstarlette\r\n\r\n# python-dotenv\r\npython-dotenv\r\n\r\n# YOLOv10\r\nultralytics\r\n"
  },
  {
    "path": "backend/services/__init__.py",
    "content": "from .flight_controller import FlightController\r\n\r\n__all__ = ['FlightController']\r\n\r\n"
  },
  {
    "path": "backend/services/flight_controller.py",
    "content": "import threading\r\nimport time\r\nimport os\r\nimport logging\r\n\r\nlogger = logging.getLogger(__name__)\r\n\r\nclass FlightController:\r\n    \"\"\"Core service that manages drone flight operations\"\"\"\r\n    \r\n    def __init__(self, drone_model, protocol_adapter, update_rate=80.0):\r\n        self.model = drone_model\r\n        self.protocol = protocol_adapter\r\n        self.update_interval = 1.0 / update_rate\r\n        self.running = True\r\n        \r\n        # Input state\r\n        self.throttle_dir = 0\r\n        self.yaw_dir = 0\r\n        self.pitch_dir = 0\r\n        self.roll_dir = 0\r\n\r\n        # Logging / diagnostics\r\n        self.last_control_source = \"init\"\r\n        self._last_log_time = 0.0\r\n        # When enabled, logs control state at DEBUG level (so it is still quiet\r\n        # unless LOG_LEVEL=DEBUG).\r\n        self.log_controls = os.getenv(\"FLIGHT_LOG_CONTROLS\", \"true\").lower() in (\"1\", \"true\", \"yes\", \"on\")\r\n        \r\n    def start(self):\r\n        \"\"\"Start the control thread\"\"\"\r\n        self.control_thread = threading.Thread(target=self._control_loop)\r\n        self.control_thread.daemon = True\r\n        self.control_thread.start()\r\n        \r\n    def stop(self):\r\n        \"\"\"Stop the control thread\"\"\"\r\n        self.running = False\r\n        if hasattr(self, 'control_thread'):\r\n            self.control_thread.join(timeout=1.0)\r\n        \r\n        if hasattr(self.protocol, 'stop'):\r\n            self.protocol.stop()\r\n            \r\n    def set_control_direction(self, control, direction):\r\n        \"\"\"Set control direction (-1, 0, 1)\"\"\"\r\n        if control == 'throttle':\r\n            self.throttle_dir = direction\r\n        elif control == 'yaw':\r\n            self.yaw_dir = direction\r\n        elif control == 'pitch':\r\n            self.pitch_dir = direction  \r\n        elif control == 'roll':\r\n            self.roll_dir = direction\r\n            \r\n    def set_axes(self, throttle: float, yaw: float, pitch: float, roll: float) -> None:\r\n        \"\"\"\r\n        Atomically update all four stick directions.\r\n        Each value is expected in the range [-1.0 … +1.0].\r\n        \"\"\"\r\n        self.set_axes_from(\"unknown\", throttle, yaw, pitch, roll)\r\n\r\n    def set_axes_from(self, source: str, throttle: float, yaw: float, pitch: float, roll: float) -> None:\r\n        \"\"\"Same as set_axes, but records the control source and logs.\"\"\"\r\n        self.throttle_dir = max(-1.0, min(1.0, throttle))\r\n        self.yaw_dir      = max(-1.0, min(1.0, yaw))\r\n        self.pitch_dir    = max(-1.0, min(1.0, pitch))\r\n        self.roll_dir     = max(-1.0, min(1.0, roll))\r\n        self.last_control_source = source\r\n\r\n        # Optional immediate debug of inbound commands\r\n        try:\r\n            if getattr(self, \"debug_set_axes\", False) or self.log_controls:\r\n                state = {}\r\n                try:\r\n                    state = self.model.get_control_state()\r\n                except Exception:\r\n                    pass\r\n                logger.debug(\r\n                    \"[RC-In] src=%-8s norm T:%+.2f Y:%+.2f P:%+.2f R:%+.2f | raw T:%s Y:%s P:%s R:%s\",\r\n                    source,\r\n                    self.throttle_dir,\r\n                    self.yaw_dir,\r\n                    self.pitch_dir,\r\n                    self.roll_dir,\r\n                    state.get(\"throttle\"),\r\n                    state.get(\"yaw\"),\r\n                    state.get(\"pitch\"),\r\n                    state.get(\"roll\"),\r\n                )\r\n        except Exception:\r\n            pass\r\n            \r\n    def _control_loop(self):\r\n        \"\"\"Background thread for sending control updates\"\"\"\r\n        prev_time = time.time()\r\n        \r\n        while self.running:\r\n            now = time.time()\r\n            dt = now - prev_time\r\n            prev_time = now\r\n            \r\n            # Update drone controls based on input directions\r\n            self.model.update(dt, {\r\n                \"throttle\": self.throttle_dir,   # still -1…+1\r\n                \"yaw\":      self.yaw_dir,\r\n                \"pitch\":    self.pitch_dir,\r\n                \"roll\":     self.roll_dir,\r\n            })\r\n            \r\n            # Build and send packet\r\n            try:\r\n                packet = self.protocol.build_control_packet(self.model)\r\n                self.protocol.send_control_packet(packet)\r\n            except Exception:\r\n                # The RC socket may be momentarily unavailable while the\r\n                # video layer is recreating its socket. Ignore and keep\r\n                # looping – a fresh socket will be injected shortly.\r\n                pass\r\n            \r\n            # Sleep to maintain update rate\r\n            time.sleep(self.update_interval)\r\n\r\n            # Periodic state log (shows current model raw sticks after update)\r\n            if self.log_controls:\r\n                if now - self._last_log_time >= 0.5:\r\n                    try:\r\n                        state = self.model.get_control_state()\r\n                        strategy = getattr(self.model, \"strategy\", None)\r\n                        strat_name = strategy.__class__.__name__ if strategy else \"(none)\"\r\n                        logger.debug(\r\n                            \"[RC-Loop] src=%-8s norm T:%+.2f Y:%+.2f P:%+.2f R:%+.2f | raw T:%s Y:%s P:%s R:%s | strat=%s\",\r\n                            self.last_control_source,\r\n                            self.throttle_dir,\r\n                            self.yaw_dir,\r\n                            self.pitch_dir,\r\n                            self.roll_dir,\r\n                            state.get(\"throttle\"),\r\n                            state.get(\"yaw\"),\r\n                            state.get(\"pitch\"),\r\n                            state.get(\"roll\"),\r\n                            strat_name,\r\n                        )\r\n                    except Exception:\r\n                        pass\r\n                    self._last_log_time = now"
  },
  {
    "path": "backend/services/video_receiver.py",
    "content": "import queue\r\nimport socket\r\nimport threading\r\nimport time\r\nimport os\r\nimport logging\r\n\r\nfrom utils.dropping_queue import DroppingQueue\r\nfrom models.video_frame import VideoFrame\r\n\r\nlogger = logging.getLogger(__name__)\r\n\r\n\r\nclass VideoReceiverService:\r\n    \"\"\"\r\n    Creates and manages a protocol adapter, destroying and recreating\r\n    it from scratch if the connection is lost, per the user's experiment.\r\n    \"\"\"\r\n\r\n    def __init__(\r\n        self,\r\n        protocol_adapter_class,\r\n        protocol_adapter_args,\r\n        frame_queue=None,\r\n        max_queue_size=100,\r\n        dump_frames=False,\r\n        dump_packets=False,\r\n        dump_dir=None,\r\n        rc_adapter=None,\r\n    ):\r\n        self.protocol_adapter_class = protocol_adapter_class\r\n        self.protocol_adapter_args = protocol_adapter_args\r\n        self.frame_queue = frame_queue or DroppingQueue(maxsize=max_queue_size)\r\n        self.dump_frames = dump_frames\r\n        self.dump_packets = dump_packets\r\n        self.rc_adapter = rc_adapter\r\n\r\n        self.protocol = None # Will be managed in the receiver loop\r\n\r\n        if dump_frames or dump_packets:\r\n            self.dump_dir = dump_dir or f\"dumps_{int(time.time())}\"\r\n            os.makedirs(self.dump_dir, exist_ok=True)\r\n        if self.dump_packets:\r\n            ts = int(time.time() * 1000)\r\n            self._pktlog = open(\r\n                os.path.join(self.dump_dir, f\"packets_{ts}.bin\"), \"wb\"\r\n            )\r\n\r\n        self._running = threading.Event()\r\n        self._receiver_thread = None\r\n\r\n    # ────────── lifecycle ────────── #\r\n    def start(self) -> None:\r\n        if self._receiver_thread and self._receiver_thread.is_alive():\r\n            return\r\n\r\n        self._running.set()\r\n        self._receiver_thread = threading.Thread(\r\n            target=self._receiver_loop, name=\"VideoReceiver\", daemon=True\r\n        )\r\n        self._receiver_thread.start()\r\n\r\n    def stop(self) -> None:\r\n        self._running.clear()\r\n        if self.protocol:\r\n            self.protocol.stop()\r\n\r\n        if self._receiver_thread and self._receiver_thread.is_alive():\r\n            self._receiver_thread.join(timeout=1.0)\r\n        \r\n        if self.dump_packets and self._pktlog:\r\n            self._pktlog.close()\r\n\r\n    # ────────── stream access ────────── #\r\n    def get_frame_queue(self) -> queue.Queue:\r\n        return self.frame_queue\r\n    \r\n    # ────────── receiver loop ────────── #\r\n    def _receiver_loop(self) -> None:\r\n        \"\"\"\r\n        Manages the lifecycle of the video protocol adapter. If the connection\r\n        is lost, it will be destroyed and a new one will be created.\r\n        \"\"\"\r\n        while self._running.is_set():\r\n            try:\r\n                # 1. Create a new protocol instance\r\n                self.protocol = self.protocol_adapter_class(\r\n                    **self.protocol_adapter_args\r\n                )\r\n                \r\n                if self.rc_adapter and hasattr(self.protocol, \"set_rc_adapter\"):\r\n                    self.protocol.set_rc_adapter(self.rc_adapter)\r\n\r\n                # 2. Start the protocol's receiver loop\r\n                self.protocol.start()\r\n\r\n                # 3. Frame processing loop\r\n                frame_idx = 0\r\n                while self._running.is_set() and self.protocol.is_running():\r\n                    try:\r\n                        frame = self.protocol.get_frame(timeout=1.0)\r\n                        if frame:\r\n                            self.frame_queue.put(frame)\r\n\r\n                            if self.dump_frames:\r\n                                frame_idx += 1\r\n                                self._dump_frame(frame, frame_idx)\r\n                        \r\n                        # Packets are dumped inside the protocol adapter\r\n                        if self.dump_packets:\r\n                            packets = self.protocol.get_packets()\r\n                            for p in packets:\r\n                                self._pktlog.write(p)\r\n                                self._pktlog.flush()\r\n\r\n                    except queue.Empty:\r\n                        continue # Normal, just means no frame was ready\r\n                    except Exception as e:\r\n                        logger.warning(\"[VideoReceiverService] Error processing frame: %s\", e)\r\n                        break\r\n\r\n            except socket.error as e:\r\n                logger.warning(\"[VideoReceiverService] Socket error: %s. Reconnecting...\", e)\r\n            except Exception as e:\r\n                logger.exception(\"[VideoReceiverService] Unexpected error: %s\", e)\r\n                # Optionally, decide if you want to stop the whole loop on certain errors\r\n                # self.stop()\r\n                # break\r\n\r\n            finally:\r\n                # Cleanup before the next iteration\r\n                if self.protocol:\r\n                    self.protocol.stop()\r\n                    self.protocol = None\r\n\r\n            # Wait before attempting to reconnect\r\n            if self._running.is_set():\r\n                logger.debug(\"[VideoReceiverService] Waiting 5 seconds before reconnecting...\")\r\n                time.sleep(5)\r\n\r\n        logger.debug(\"[VideoReceiverService] Receiver loop has stopped.\")\r\n\r\n    # ────────── frame dumping ────────── #\r\n    def _dump_frame(self, frame: \"VideoFrame | bytes | bytearray | memoryview\", frame_idx: int) -> None:\r\n        \"\"\"\r\n        Saves a frame to the file system in the dump directory.\r\n\r\n        Note: the receiver loop deals in `VideoFrame` objects; we persist the\r\n        underlying encoded bytes (`VideoFrame.data`), not the object itself.\r\n        \"\"\"\r\n        if isinstance(frame, VideoFrame):\r\n            frame_bytes = frame.data\r\n            ext = \"jpg\" if getattr(frame, \"format\", None) in (None, \"jpeg\", \"jpg\") else str(frame.format)\r\n        else:\r\n            frame_bytes = frame\r\n            ext = \"jpg\"\r\n\r\n        if not isinstance(frame_bytes, (bytes, bytearray, memoryview)):\r\n            raise TypeError(f\"Expected frame bytes, got {type(frame_bytes).__name__}\")\r\n\r\n        filename = os.path.join(self.dump_dir, f\"frame_{frame_idx:04d}.{ext}\")\r\n        try:\r\n            with open(filename, \"wb\") as f:\r\n                f.write(frame_bytes)\r\n        except Exception as e:\r\n            logger.warning(\"[VideoReceiverService] Error dumping frame: %s\", e)\r\n"
  },
  {
    "path": "backend/tests/test_cooingdv_jieli_ctp.py",
    "content": "import unittest\r\n\r\nfrom utils.cooingdv_jieli_ctp import build_ctp_packet, parse_ctp_packet\r\nfrom protocols.cooingdv_jieli_rc_protocol_adapter import CooingdvJieliRcProtocolAdapter\r\nfrom models.cooingdv_rc import CooingdvRcModel\r\n\r\n\r\nclass CooingdvJieliCtpTests(unittest.TestCase):\r\n    def test_builds_little_endian_ctp_packet(self):\r\n        packet = build_ctp_packet(\"CONTROL_MODE\", {\"state\": \"1\"})\r\n\r\n        self.assertTrue(packet.startswith(b\"CTP:\"))\r\n        topic_len = int.from_bytes(packet[4:6], \"little\")\r\n        self.assertEqual(topic_len, len(\"CONTROL_MODE\"))\r\n        self.assertEqual(packet[6 : 6 + topic_len], b\"CONTROL_MODE\")\r\n\r\n        topic, payload = parse_ctp_packet(packet)\r\n        self.assertEqual(topic, \"CONTROL_MODE\")\r\n        self.assertEqual(payload, {\"op\": \"PUT\", \"param\": {\"state\": \"1\"}})\r\n\r\n    def test_flying_control_payload_preserves_tc_bytes(self):\r\n        model = CooingdvRcModel()\r\n        model.roll = 128\r\n        model.pitch = 129\r\n        model.throttle = 130\r\n        model.yaw = 131\r\n        model.stop_flag = True\r\n\r\n        adapter = CooingdvJieliRcProtocolAdapter.__new__(CooingdvJieliRcProtocolAdapter)\r\n        payload = adapter._build_flying_payload(model)\r\n\r\n        expected_checksum = 128 ^ 129 ^ 130 ^ 131 ^ 0x04\r\n        self.assertEqual(payload, [102, 128, 129, 130, 131, 4, expected_checksum, 153])\r\n\r\n        packet = build_ctp_packet(\"FLYING_CTRL\", {f\"BYTE{i}\": str(v) for i, v in enumerate(payload)})\r\n        topic, body = parse_ctp_packet(packet)\r\n        self.assertEqual(topic, \"FLYING_CTRL\")\r\n        self.assertEqual(body[\"op\"], \"PUT\")\r\n        self.assertEqual(body[\"param\"][\"BYTE0\"], \"102\")\r\n        self.assertEqual(body[\"param\"][\"BYTE7\"], \"153\")\r\n\r\n\r\nif __name__ == \"__main__\":\r\n    unittest.main()\r\n"
  },
  {
    "path": "backend/tests/test_s2x_protocol.py",
    "content": "import unittest\n\nfrom models.s2x_rc import S2xDroneModel\nfrom protocols.s2x_rc_protocol_adapter import S2xRCProtocolAdapter\n\n\nclass S2xRcProtocolTests(unittest.TestCase):\n    def _adapter(self):\n        adapter = S2xRCProtocolAdapter.__new__(S2xRCProtocolAdapter)\n        adapter.swap_yaw_roll = False\n        adapter.speed_scale_by_index = {\n            0: 0.7,\n            1: 0.8,\n            2: 1.0,\n        }\n        return adapter\n\n    def test_default_packet_matches_macrochip_hy_layout(self):\n        model = S2xDroneModel()\n        model.roll = 128\n        model.pitch = 129\n        model.throttle = 130\n        model.yaw = 131\n        model.takeoff_flag = True\n        adapter = self._adapter()\n\n        packet = adapter.build_control_packet(model)\n\n        self.assertEqual(len(packet), 20)\n        self.assertEqual(packet[:8], bytes([0x66, 0x14, 128, 129, 131, 133, 0x01, 0x0A]))\n        self.assertEqual(packet[18], 128 ^ 129 ^ 131 ^ 133 ^ 0x01 ^ 0x0A)\n        self.assertEqual(packet[19], 0x99)\n        self.assertFalse(model.takeoff_flag)\n\n    def test_swap_yaw_roll_changes_transmitted_axes(self):\n        model = S2xDroneModel()\n        model.roll = 60\n        model.pitch = 128\n        model.throttle = 128\n        model.yaw = 200\n        adapter = self._adapter()\n        adapter.swap_yaw_roll = True\n\n        packet = adapter.build_control_packet(model)\n\n        self.assertEqual(packet[2], 255)\n        self.assertEqual(packet[5], 0)\n\n    def test_speed_index_scales_roll_and_pitch_only(self):\n        model = S2xDroneModel()\n        model.roll = 200\n        model.pitch = 60\n        model.throttle = 200\n        model.yaw = 60\n        model.set_speed_index(0)\n        adapter = self._adapter()\n\n        packet = adapter.build_control_packet(model)\n\n        self.assertEqual(packet[2], 216)\n        self.assertEqual(packet[3], 38)\n        self.assertEqual(packet[4], 255)\n        self.assertEqual(packet[5], 0)\n\n\nif __name__ == \"__main__\":\n    unittest.main()\n"
  },
  {
    "path": "backend/tests/test_s2x_video_protocol.py",
    "content": "import unittest\n\nfrom models.s2x_video_model import S2xVideoModel\nfrom protocols.s2x_video_protocol import S2xVideoProtocolAdapter\n\n\nclass S2xVideoProtocolTests(unittest.TestCase):\n    def _adapter(self):\n        adapter = S2xVideoProtocolAdapter.__new__(S2xVideoProtocolAdapter)\n        adapter.model = S2xVideoModel()\n        return adapter\n\n    def _packet(self, frame_id: int, total_chunks: int, chunk_id: int, body: bytes) -> bytes:\n        payload = (\n            b\"\\x40\\x40\"\n            + frame_id.to_bytes(2, \"little\")\n            + bytes([total_chunks, chunk_id])\n        )\n        packet_len = 8 + len(body) + 2\n        return payload + packet_len.to_bytes(2, \"little\") + body + b\"##\"\n\n    def test_emits_frame_when_all_declared_chunks_arrive(self):\n        adapter = self._adapter()\n        first = self._packet(0x1234, 2, 0, b\"\\xff\\xd8hello\")\n        second = self._packet(0x1234, 2, 1, b\" world\\xff\\xd9\")\n\n        self.assertIsNone(adapter.handle_payload(first))\n        frame = adapter.handle_payload(second)\n\n        self.assertIsNotNone(frame)\n        self.assertEqual(frame.frame_id, 0x1234)\n        self.assertEqual(frame.data, b\"\\xff\\xd8hello world\\xff\\xd9\")\n        self.assertEqual(frame.format, \"jpeg\")\n\n    def test_accepts_out_of_order_chunks(self):\n        adapter = self._adapter()\n        second = self._packet(7, 2, 1, b\" tail\\xff\\xd9\")\n        first = self._packet(7, 2, 0, b\"\\xff\\xd8head\")\n\n        self.assertIsNone(adapter.handle_payload(second))\n        frame = adapter.handle_payload(first)\n\n        self.assertIsNotNone(frame)\n        self.assertEqual(frame.data, b\"\\xff\\xd8head tail\\xff\\xd9\")\n\n    def test_rejects_mismatched_declared_length(self):\n        adapter = self._adapter()\n        packet = bytearray(self._packet(1, 1, 0, b\"\\xff\\xd8x\\xff\\xd9\"))\n        packet[6:8] = (len(packet) + 1).to_bytes(2, \"little\")\n\n        self.assertIsNone(adapter.handle_payload(bytes(packet)))\n\n\nif __name__ == \"__main__\":\n    unittest.main()\n"
  },
  {
    "path": "backend/tests/test_wifi_cam_protocol.py",
    "content": "import unittest\r\n\r\nfrom models.wifi_cam_rc import WifiCamRcModel\r\nfrom protocols.wifi_cam_rc_protocol_adapter import WifiCamRcProtocolAdapter\r\nfrom protocols.wifi_cam_video_protocol import WifiCamVideoProtocolAdapter\r\n\r\n\r\nclass WifiCamRcProtocolTests(unittest.TestCase):\r\n    def _adapter(self, mode=\"short\"):\r\n        adapter = WifiCamRcProtocolAdapter.__new__(WifiCamRcProtocolAdapter)\r\n        adapter.command_mode = mode\r\n        adapter.camera_type = 0\r\n        return adapter\r\n\r\n    def test_short_packet_matches_base_cmd_layout(self):\r\n        model = WifiCamRcModel()\r\n        model.roll = 128\r\n        model.pitch = 129\r\n        model.throttle = 130\r\n        model.yaw = 131\r\n        model.takeoff_flag = True\r\n        adapter = self._adapter(\"short\")\r\n\r\n        packet = adapter.build_control_packet(model)\r\n\r\n        expected_checksum = 128 ^ 129 ^ 130 ^ 131 ^ 0x01\r\n        self.assertEqual(packet, bytes([0x66, 128, 129, 130, 131, 0x01, expected_checksum, 0x99]))\r\n        self.assertFalse(model.takeoff_flag)\r\n\r\n    def test_short_packet_escapes_marker_checksum(self):\r\n        model = WifiCamRcModel()\r\n        model.roll = 128\r\n        model.pitch = 128\r\n        model.throttle = 128\r\n        model.yaw = 230\r\n        adapter = self._adapter(\"short\")\r\n\r\n        packet = adapter.build_control_packet(model)\r\n\r\n        self.assertEqual(packet[6], 0x67)\r\n\r\n    def test_extended_packet_uses_camera_type_two_in_auto_mode(self):\r\n        model = WifiCamRcModel()\r\n        model.roll = 128\r\n        model.pitch = 129\r\n        model.throttle = 130\r\n        model.yaw = 131\r\n        model.land_flag = True\r\n        model.stop_flag = True\r\n        model.headless_flag = True\r\n        adapter = self._adapter(\"auto\")\r\n        adapter.set_camera_type(2)\r\n\r\n        packet = adapter.build_control_packet(model)\r\n\r\n        self.assertEqual(len(packet), 20)\r\n        self.assertEqual(packet[:8], bytes([0x66, 0x14, 128, 129, 130, 131, 0x03, 0x01]))\r\n        self.assertEqual(packet[18], 128 ^ 129 ^ 130 ^ 131 ^ 0x03 ^ 0x01)\r\n        self.assertEqual(packet[19], 0x99)\r\n        self.assertFalse(model.land_flag)\r\n        self.assertFalse(model.stop_flag)\r\n\r\n\r\nclass FakeRcAdapter:\r\n    def __init__(self):\r\n        self.camera_type = None\r\n\r\n    def set_camera_type(self, camera_type):\r\n        self.camera_type = camera_type\r\n\r\n\r\nclass WifiCamVideoProtocolTests(unittest.TestCase):\r\n    def test_camera_type_probe_updates_rc_adapter(self):\r\n        adapter = WifiCamVideoProtocolAdapter()\r\n        rc_adapter = FakeRcAdapter()\r\n        adapter.set_rc_adapter(rc_adapter)\r\n\r\n        frame = adapter.handle_payload(b\"\\x55\\x00\\x02\\x00\\x00\\x00\\x02\\x99\")\r\n\r\n        self.assertIsNone(frame)\r\n        self.assertEqual(adapter.camera_type, 2)\r\n        self.assertEqual(rc_adapter.camera_type, 2)\r\n\r\n    def test_single_chunk_jpeg_frame(self):\r\n        adapter = WifiCamVideoProtocolAdapter()\r\n        jpeg = b\"\\xff\\xd8hello\\xff\\xd9\"\r\n        packet = bytes([1, 1, 1, 8, 0, 0, 0, 0]) + jpeg\r\n\r\n        frame = adapter.handle_payload(packet)\r\n\r\n        self.assertIsNotNone(frame)\r\n        self.assertEqual(frame.data, jpeg)\r\n        self.assertEqual(frame.format, \"jpeg\")\r\n        self.assertEqual(frame.resolution, 8)\r\n        self.assertEqual(frame.retain, 0)\r\n\r\n    def test_multi_chunk_jpeg_frame_uses_native_final_marker(self):\r\n        adapter = WifiCamVideoProtocolAdapter()\r\n        first_chunk = b\"\\xff\\xd8\" + (b\"a\" * (adapter.CHUNK_SIZE - 2))\r\n        second_chunk = b\"tail\\xff\\xd9\"\r\n        packet1 = bytes([7, 0, 2, 8, 0, 0, 0, 1]) + first_chunk\r\n        packet2 = bytes([7, 1, 2, 8, 0, 0, 0, 1]) + second_chunk\r\n\r\n        self.assertIsNone(adapter.handle_payload(packet1))\r\n        frame = adapter.handle_payload(packet2)\r\n\r\n        self.assertIsNotNone(frame)\r\n        self.assertEqual(frame.data, first_chunk + second_chunk)\r\n        self.assertEqual(frame.retain, 1)\r\n\r\n    def test_rejects_final_chunk_without_jpeg_tail(self):\r\n        adapter = WifiCamVideoProtocolAdapter()\r\n        packet = bytes([1, 1, 1, 8, 0, 0, 0, 0]) + b\"\\xff\\xd8missing-tail\"\r\n\r\n        frame = adapter.handle_payload(packet)\r\n\r\n        self.assertIsNone(frame)\r\n\r\n\r\nif __name__ == \"__main__\":\r\n    unittest.main()\r\n"
  },
  {
    "path": "backend/utils/cooingdv_jieli_ctp.py",
    "content": "from __future__ import annotations\r\n\r\nimport json\r\nfrom typing import Mapping\r\n\r\n\r\nCTP_SIGNATURE = b\"CTP:\"\r\nOP_PUT = \"PUT\"\r\n\r\n\r\ndef build_ctp_packet(\r\n    topic: str,\r\n    params: Mapping[str, str] | None = None,\r\n    *,\r\n    operation: str = OP_PUT,\r\n) -> bytes:\r\n    \"\"\"\r\n    Build the Jieli CTP envelope used by KY FPV's DeviceClient backend.\r\n\r\n    Format:\r\n      CTP: + le16(topic_length) + topic + le32(json_length) + json\r\n    \"\"\"\r\n    payload: dict[str, object] = {\"op\": operation}\r\n    if params:\r\n        payload[\"param\"] = dict(params)\r\n\r\n    topic_bytes = topic.encode(\"utf-8\")\r\n    json_bytes = json.dumps(payload, separators=(\",\", \":\")).encode(\"utf-8\")\r\n\r\n    return (\r\n        CTP_SIGNATURE\r\n        + len(topic_bytes).to_bytes(2, \"little\", signed=False)\r\n        + topic_bytes\r\n        + len(json_bytes).to_bytes(4, \"little\", signed=False)\r\n        + json_bytes\r\n    )\r\n\r\n\r\ndef parse_ctp_packet(packet: bytes) -> tuple[str, dict]:\r\n    \"\"\"Parse a CTP packet. Intended for tests and diagnostics.\"\"\"\r\n    if not packet.startswith(CTP_SIGNATURE):\r\n        raise ValueError(\"missing CTP signature\")\r\n    offset = len(CTP_SIGNATURE)\r\n    if len(packet) < offset + 2:\r\n        raise ValueError(\"missing topic length\")\r\n    topic_len = int.from_bytes(packet[offset : offset + 2], \"little\")\r\n    offset += 2\r\n    topic = packet[offset : offset + topic_len].decode(\"utf-8\")\r\n    offset += topic_len\r\n    if len(packet) < offset + 4:\r\n        raise ValueError(\"missing payload length\")\r\n    payload_len = int.from_bytes(packet[offset : offset + 4], \"little\")\r\n    offset += 4\r\n    payload = json.loads(packet[offset : offset + payload_len].decode(\"utf-8\"))\r\n    return topic, payload\r\n"
  },
  {
    "path": "backend/utils/dropping_queue.py",
    "content": "import queue\r\n\r\nclass DroppingQueue(queue.Queue):\r\n    \"\"\"\r\n    A queue that drops the oldest item when it is full.\r\n    \"\"\"\r\n    def put(self, item, block=True, timeout=None):\r\n        \"\"\"\r\n        Put an item into the queue.\r\n\r\n        If the queue is full, it drops the oldest item and adds the new one.\r\n\r\n        This operation is atomic with respect to other producers: under concurrent\r\n        access, `put_nowait()` will not raise `queue.Full` because this method\r\n        always makes space (by dropping the oldest element) before enqueuing.\r\n        \"\"\"\r\n        # Note: We intentionally do NOT call super().put(), because Queue.put()\r\n        # may raise queue.Full when called with block=False. Instead, we perform\r\n        # a \"drop oldest (if needed) + put\" as one critical section under the\r\n        # queue's internal mutex.\r\n        with self.mutex:\r\n            if self.maxsize > 0 and self._qsize() >= self.maxsize:\r\n                # Drop exactly one oldest item to make room.\r\n                self._get()\r\n\r\n                # If users rely on join()/task_done(), dropped work should not\r\n                # keep join() waiting forever.\r\n                if self.unfinished_tasks > 0:\r\n                    self.unfinished_tasks -= 1\r\n\r\n            self._put(item)\r\n            self.unfinished_tasks += 1\r\n            self.not_empty.notify()\r\n\r\n    def put_nowait(self, item):\r\n        \"\"\"\r\n        Put an item into the queue without blocking.\r\n\r\n        If the queue is full, it drops the oldest item and adds the new one.\r\n        \"\"\"\r\n        self.put(item, block=False)\r\n"
  },
  {
    "path": "backend/utils/logging_config.py",
    "content": "from __future__ import annotations\n\nimport logging\nimport os\n\nimport dotenv\n\n\n_BOOTSTRAPPED = False\n_CONFIGURED = False\n_LOG_FORMAT = \"%(asctime)s %(levelname)s %(name)s: %(message)s\"\n\n\ndef _normalise_level(raw: str | None) -> str:\n    value = (raw or \"INFO\").strip().upper()\n    if value == \"WARN\":\n        return \"WARNING\"\n    if value not in {\"CRITICAL\", \"ERROR\", \"WARNING\", \"INFO\", \"DEBUG\", \"NOTSET\"}:\n        return \"INFO\"\n    return value\n\n\ndef bootstrap_runtime() -> None:\n    \"\"\"\n    Load `.env` and set sane defaults for noisy native libraries before modules\n    that import `cv2` are loaded.\n    \"\"\"\n    global _BOOTSTRAPPED\n    if _BOOTSTRAPPED:\n        return\n\n    dotenv.load_dotenv()\n\n    level_name = _normalise_level(os.getenv(\"LOG_LEVEL\"))\n    debug_native = level_name == \"DEBUG\"\n\n    # Keep OpenCV / FFmpeg mostly quiet by default. These can still be\n    # overridden explicitly by environment variables if needed.\n    os.environ.setdefault(\"OPENCV_LOG_LEVEL\", \"INFO\" if debug_native else \"ERROR\")\n    os.environ.setdefault(\"OPENCV_FFMPEG_LOGLEVEL\", \"32\" if debug_native else \"16\")\n    os.environ.setdefault(\"OPENCV_VIDEOIO_DEBUG\", \"1\" if debug_native else \"0\")\n    os.environ.setdefault(\"OPENCV_FFMPEG_DEBUG\", \"1\" if debug_native else \"0\")\n\n    _BOOTSTRAPPED = True\n\n\ndef configure_logging(level: str | None = None) -> str:\n    \"\"\"\n    Configure stdlib logging for backend entrypoints.\n\n    Returns the resolved log level name.\n    \"\"\"\n    global _CONFIGURED\n\n    bootstrap_runtime()\n\n    level_name = _normalise_level(level or os.getenv(\"LOG_LEVEL\"))\n    level_value = getattr(logging, level_name, logging.INFO)\n\n    root = logging.getLogger()\n    if not root.handlers:\n        logging.basicConfig(level=level_value, format=_LOG_FORMAT)\n    else:\n        root.setLevel(level_value)\n        for handler in root.handlers:\n            if handler.formatter is None:\n                handler.setFormatter(logging.Formatter(_LOG_FORMAT))\n\n    _CONFIGURED = True\n    return level_name\n"
  },
  {
    "path": "backend/utils/wifi_uav_ack_state.py",
    "content": "from __future__ import annotations\r\n\r\nfrom dataclasses import dataclass, field\r\nfrom collections import deque\r\nfrom typing import Optional\r\n\r\nfrom utils.wifi_uav_packets import build_ack_slot, build_fragment_ack_bitmap\r\n\r\n\r\nSLOT_EMPTY = 0\r\nSLOT_RECEIVING = 1\r\nSLOT_COMPLETE = 2\r\nSLOT_DELIVERED = 3\r\nSLOT_DROPPED = 4\r\n\r\n\r\n@dataclass\r\nclass WifiUavFrameSlot:\r\n    \"\"\"State for one native WiFi-UAV in-flight frame slot.\"\"\"\r\n\r\n    seq: int = 0\r\n    status: int = SLOT_EMPTY\r\n    fragment_total: int = 0\r\n    frame_body_len: int = 0\r\n    quality: int = 0\r\n    fragments: dict[int, bytes] = field(default_factory=dict)\r\n    received_fragments: set[int] = field(default_factory=set)\r\n\r\n    def reset(self, seq: int, fragment_total: int, frame_body_len: int, quality: int) -> None:\r\n        self.seq = seq\r\n        self.status = SLOT_RECEIVING\r\n        self.fragment_total = fragment_total\r\n        self.frame_body_len = frame_body_len\r\n        self.quality = quality\r\n        self.fragments.clear()\r\n        self.received_fragments.clear()\r\n\r\n    def ingest(\r\n        self,\r\n        fragment_id: int,\r\n        fragment_total: int,\r\n        payload: bytes,\r\n        *,\r\n        frame_body_len: int = 0,\r\n        quality: int = 0,\r\n    ) -> None:\r\n        if self.status in (SLOT_COMPLETE, SLOT_DELIVERED):\r\n            return\r\n\r\n        if self.status in (SLOT_EMPTY, SLOT_DELIVERED, SLOT_DROPPED):\r\n            self.fragment_total = fragment_total\r\n            self.frame_body_len = frame_body_len\r\n            self.quality = quality\r\n            self.status = SLOT_RECEIVING\r\n        elif fragment_total > 0:\r\n            self.fragment_total = fragment_total\r\n            self.frame_body_len = frame_body_len\r\n            self.quality = quality\r\n\r\n        self.fragments[fragment_id] = payload\r\n        self.received_fragments.add(fragment_id)\r\n\r\n        if self.is_complete():\r\n            self.status = SLOT_COMPLETE\r\n\r\n    def is_complete(self) -> bool:\r\n        return (\r\n            self.fragment_total > 0\r\n            and len(self.received_fragments) == self.fragment_total\r\n            and all(i in self.fragments for i in range(self.fragment_total))\r\n        )\r\n\r\n    def ordered_payload(self) -> bytes:\r\n        return b\"\".join(self.fragments[i] for i in range(self.fragment_total))\r\n\r\n    def mark_delivered(self) -> None:\r\n        self.status = SLOT_DELIVERED\r\n\r\n    def mark_dropped(self) -> None:\r\n        self.status = SLOT_DROPPED\r\n\r\n    def ack_status(self) -> int:\r\n        if self.status == SLOT_RECEIVING:\r\n            return 0\r\n        if self.status in (SLOT_COMPLETE, SLOT_DELIVERED):\r\n            return 1\r\n        if self.status == SLOT_DROPPED:\r\n            return 2\r\n        return 3\r\n\r\n    def ack_bitmap(self) -> bytes:\r\n        if self.status != SLOT_RECEIVING or self.fragment_total <= 0:\r\n            return b\"\"\r\n        return build_fragment_ack_bitmap(self.fragment_total, self.received_fragments)\r\n\r\n    def ack_slot(self) -> bytes:\r\n        return build_ack_slot(self.seq, self.ack_status(), self.ack_bitmap())\r\n\r\n\r\nclass WifiUavAckState:\r\n    \"\"\"\r\n    Four-slot ACK/frame tracker shaped after native build_send_ack().\r\n\r\n    This keeps Turbodrone's delivery behavior simple while giving ACK generation\r\n    a native-style home that can be evolved independently of socket code.\r\n    \"\"\"\r\n\r\n    SLOT_COUNT = 4\r\n\r\n    def __init__(self) -> None:\r\n        self.slots = [WifiUavFrameSlot() for _ in range(self.SLOT_COUNT)]\r\n        self.max_recv_seq = 0\r\n        self.last_completed_seq: Optional[int] = None\r\n        self._delivered_history: deque[int] = deque(maxlen=32)\r\n\r\n    def reset(self) -> None:\r\n        for slot in self.slots:\r\n            slot.seq = 0\r\n            slot.status = SLOT_EMPTY\r\n            slot.fragment_total = 0\r\n            slot.frame_body_len = 0\r\n            slot.quality = 0\r\n            slot.fragments.clear()\r\n            slot.received_fragments.clear()\r\n        self.max_recv_seq = 0\r\n        self.last_completed_seq = None\r\n        self._delivered_history.clear()\r\n\r\n    def ingest_fragment(\r\n        self,\r\n        seq: int,\r\n        fragment_id: int,\r\n        fragment_total: int,\r\n        payload: bytes,\r\n        *,\r\n        frame_body_len: int = 0,\r\n        quality: int = 0,\r\n    ) -> Optional[WifiUavFrameSlot]:\r\n        if seq in self._delivered_history:\r\n            return None\r\n\r\n        slot = self._slot_for_seq(seq)\r\n        if slot.seq != seq:\r\n            slot.reset(seq, fragment_total, frame_body_len, quality)\r\n\r\n        if fragment_total > 0 and fragment_id >= fragment_total:\r\n            return None\r\n\r\n        self.max_recv_seq = max(self.max_recv_seq, seq)\r\n        slot.ingest(fragment_id, fragment_total, payload, frame_body_len=frame_body_len, quality=quality)\r\n        if slot.is_complete():\r\n            self.last_completed_seq = seq\r\n            return slot\r\n        return None\r\n\r\n    def mark_delivered(self, seq: int) -> None:\r\n        slot = self._find_slot(seq)\r\n        if slot is not None:\r\n            slot.mark_delivered()\r\n        if seq not in self._delivered_history:\r\n            self._delivered_history.append(seq)\r\n\r\n    def mark_dropped(self, seq: int) -> None:\r\n        slot = self._find_slot(seq)\r\n        if slot is not None:\r\n            slot.mark_dropped()\r\n\r\n    def build_ack_slots(self, request_seq: int) -> list[bytes]:\r\n        slots = []\r\n        for slot in self.slots:\r\n            if slot.status == SLOT_EMPTY or slot.seq == 0:\r\n                continue\r\n            if self.max_recv_seq and self.max_recv_seq - slot.seq >= 5:\r\n                continue\r\n            slots.append(slot.ack_slot())\r\n\r\n        if slots:\r\n            return slots\r\n\r\n        return [\r\n            build_ack_slot(request_seq, 1, b\"\\xff\\xff\\xff\\xff\"),\r\n            build_ack_slot(request_seq, 3),\r\n        ]\r\n\r\n    def _slot_for_seq(self, seq: int) -> WifiUavFrameSlot:\r\n        return self.slots[(seq + 3) % self.SLOT_COUNT]\r\n\r\n    def _find_slot(self, seq: int) -> Optional[WifiUavFrameSlot]:\r\n        for slot in self.slots:\r\n            if slot.seq == seq:\r\n                return slot\r\n        return None\r\n"
  },
  {
    "path": "backend/utils/wifi_uav_jpeg.py",
    "content": "from typing import List\r\n\r\n# Start Of Image\r\nSOI = bytearray(b\"\\xff\\xd8\")\r\n# End Of Image\r\nEOI = bytearray(b\"\\xff\\xd9\")\r\n\r\n# fmt: off\r\n# Standard luminance quantization table\r\nstd_luminance_qt = [\r\n     16, 11, 10, 16, 24,  40,  51,  61,\r\n     12, 12, 14, 19, 26,  58,  60,  55,\r\n     14, 13, 16, 24, 40,  57,  69,  56,\r\n     14, 17, 22, 29, 51,  87,  80,  62,\r\n     18, 22, 37, 56, 68, 109, 103,  77,\r\n     24, 35, 55, 64, 81, 104, 113,  92,\r\n     49, 64, 78, 87,103, 121, 120, 101,\r\n     72, 92, 95, 98,112, 100, 103,  99\r\n]\r\n\r\n# Standard chrominance quantization table\r\nstd_chrominance_qt = [\r\n    17, 18, 24, 47, 99,  99,  99,  99,\r\n    18, 21, 26, 66, 99,  99,  99,  99,\r\n    24, 26, 56, 99, 99,  99,  99,  99,\r\n    47, 66, 99, 99, 99,  99,  99,  99,\r\n    99, 99, 99, 99, 99,  99,  99,  99,\r\n    99, 99, 99, 99, 99,  99,  99,  99,\r\n    99, 99, 99, 99, 99,  99,  99,  99,\r\n    99, 99, 99, 99, 99,  99,  99,  99\r\n]\r\n# fmt: on\r\n\r\n\r\ndef generate_dqt_segment(id: int, table: List[int], precision: int = 0) -> bytes:\r\n    \"\"\"\r\n    Generates the DQT (Define Quantization Table) segment.\r\n\r\n    Args:\r\n        id (int): Table ID (0–3)\r\n        table (list of 64 ints): Quantization values in zigzag order\r\n        precision (int): 0 for 8-bit, 1 for 16-bit. Defaults to 0.\r\n\r\n    Returns:\r\n        bytes: The full DQT segment.\r\n    \"\"\"\r\n\r\n    if len(table) != 64:\r\n        raise ValueError(f\"Quantization table must have 64 values.\")\r\n    if precision not in (0, 1):\r\n        raise ValueError(\"Precision must be 0 (8-bit) or 1 (16-bit).\")\r\n\r\n    segment = bytearray(b\"\\xff\\xdb\")  # Marker\r\n    payload = bytearray()\r\n\r\n    # Info byte: (precision << 4) | table_id\r\n    payload.append((precision << 4) | id)\r\n\r\n    # Add table data\r\n    if precision == 0:\r\n        payload.extend(table)\r\n    else:\r\n        for val in table:\r\n            payload.extend(val.to_bytes(2, \"big\"))\r\n\r\n    # Length = 2 (for length field itself) + payload size\r\n    length = len(payload) + 2\r\n    segment.extend(length.to_bytes(2, \"big\"))\r\n    segment.extend(payload)\r\n\r\n    return bytes(segment)\r\n\r\n\r\ndef generate_sof0_segment(width: int, height: int, num_components: int = 3) -> bytes:\r\n    \"\"\"\r\n    Generates a SOF0 (Start of Frame, Baseline DCT) segment for a JPEG image.\r\n    Uses 4:4:4 subsampling.\r\n\r\n    Args:\r\n        width (int): Image width in pixels.\r\n        height (int): Image height in pixels.\r\n        num_components (int): Number of components (e.g., 1 for grayscale, 3 for YCbCr).\r\n            Defaults to 3.\r\n        component_info (list of dict, optional): List of dicts with keys:\r\n            - 'id' (int): Component ID\r\n            - 'sampling' (tuple): (H, V) sampling factors\r\n            - 'qt_id' (int): Quantization table ID\r\n            If not provided, defaults to common layout.\r\n\r\n    Returns:\r\n        bytes: Complete SOF0 segment (including marker).\r\n    \"\"\"\r\n    if not (1 <= width <= 65535 and 1 <= height <= 65535):\r\n        raise ValueError(\"Width and height must be between 1 and 65535.\")\r\n    if num_components not in (1, 3):\r\n        raise ValueError(\"Number of components must be 1 or 3.\")\r\n\r\n    marker = b\"\\xff\\xc0\"  # SOF0 marker\r\n    precision = b\"\\x08\"  # 8-bit sample precision\r\n    height_bytes = height.to_bytes(2, \"big\")\r\n    width_bytes = width.to_bytes(2, \"big\")\r\n    num_components_byte = num_components.to_bytes(1, \"big\")\r\n\r\n    # Default component layout\r\n    if num_components == 1:\r\n        component_info = [{\"id\": 1, \"sampling\": (1, 1), \"qt_id\": 0}]\r\n    elif num_components == 3:\r\n        component_info = [\r\n            {\"id\": 1, \"sampling\": (1, 1), \"qt_id\": 0},  # Y\r\n            {\"id\": 2, \"sampling\": (1, 1), \"qt_id\": 1},  # Cb\r\n            {\"id\": 3, \"sampling\": (1, 1), \"qt_id\": 1},  # Cr\r\n        ]\r\n    else:\r\n        raise ValueError(\"Invalid num_components\")\r\n\r\n    # Build component specs\r\n    component_specs = b\"\"\r\n    for comp in component_info:\r\n        comp_id = comp[\"id\"].to_bytes(1, \"big\")\r\n        H, V = comp[\"sampling\"]\r\n        sampling_byte = ((H << 4) | V).to_bytes(1, \"big\")\r\n        qt_id = comp[\"qt_id\"].to_bytes(1, \"big\")\r\n        component_specs += comp_id + sampling_byte + qt_id\r\n\r\n    # Total length = 8 (header) + 3 bytes per component\r\n    length = (8 + 3 * num_components).to_bytes(2, \"big\")\r\n\r\n    return (\r\n        marker\r\n        + length\r\n        + precision\r\n        + height_bytes\r\n        + width_bytes\r\n        + num_components_byte\r\n        + component_specs\r\n    )\r\n\r\n\r\ndef generate_sos_segment(\r\n    num_components: int, Ss: int = 0, Se: int = 63, AhAl: int = 0\r\n) -> bytes:\r\n    \"\"\"\r\n    Generates the SOS (Start of Scan) segment for a JPEG file.\r\n\r\n    Args:\r\n        num_components (int): Number of components (1 for grayscale, 3 for YCbCr).\r\n        Ss (int): Start of spectral selection. Defaults to 0.\r\n        Se (int): End of spectral selection. Defaults to 63\r\n        AhAl (int): Approximation bit position. Defaults to 0.\r\n\r\n    Returns:\r\n        bytes: Complete SOS segment (including marker).\r\n    \"\"\"\r\n    if num_components == 1:\r\n        component_selectors = [{\"id\": 1, \"dc\": 0, \"ac\": 0}]\r\n    elif num_components == 3:\r\n        component_selectors = [\r\n            {\"id\": 1, \"dc\": 0, \"ac\": 0},  # Y\r\n            {\"id\": 2, \"dc\": 1, \"ac\": 1},  # Cb\r\n            {\"id\": 3, \"dc\": 1, \"ac\": 1},  # Cr\r\n        ]\r\n    else:\r\n        raise ValueError(\"Component count must be 1 or 3.\")\r\n\r\n    marker = b\"\\xff\\xda\"\r\n    length = 6 + 2 * num_components\r\n    segment = bytearray(marker)\r\n    segment += length.to_bytes(2, \"big\")\r\n    segment.append(num_components)\r\n\r\n    for comp in component_selectors:\r\n        table_selector = (comp[\"dc\"] << 4) | comp[\"ac\"]\r\n        segment.append(comp[\"id\"])\r\n        segment.append(table_selector)\r\n\r\n    segment.append(Ss)\r\n    segment.append(Se)\r\n    segment.append(AhAl)\r\n\r\n    return bytes(segment)\r\n\r\n\r\ndef generate_jpeg_headers(width: int, height: int, num_components: int = 3) -> bytes:\r\n    \"\"\"\r\n    Generates a minimal JPEG header without Huffman tables and default quantization tables.\r\n\r\n    Args:\r\n        width (int): Image width in pixels.\r\n        height (int): Image height in pixels.\r\n        num_components (int): Number of components (1 for grayscale, 3 for YCbCr).\r\n\r\n    Returns:\r\n        bytes: JPEG header bytes (SOI, DQT, SOF0, SOS).\r\n    \"\"\"\r\n    # Generate segments\r\n    luminance_dqt = generate_dqt_segment(id=0, table=std_luminance_qt)\r\n    chrominance_dqt = generate_dqt_segment(id=1, table=std_chrominance_qt)\r\n    sof = generate_sof0_segment(\r\n        width=width, height=height, num_components=num_components\r\n    )\r\n    sos = generate_sos_segment(num_components=num_components)\r\n\r\n    # Build the header\r\n    header = bytearray()\r\n    header += SOI\r\n    header += luminance_dqt\r\n    if num_components == 3:\r\n        header += chrominance_dqt\r\n    header += sof\r\n    header += sos\r\n    return bytes(header)\r\n"
  },
  {
    "path": "backend/utils/wifi_uav_packets.py",
    "content": "# Start Video Feed\r\nSTART_STREAM = b\"\\xef\\x00\\x04\\x00\"\r\n\r\n# Unknown\r\nUNK_FRAME = b\"\\xef\\x20\\x06\\x00\\x01\\x65\"\r\n\r\n# Drone Info I think\r\nSSID2 = (\r\n    b\"\\xef\\x20\\x19\\x00\\x01\\x67\"\r\n    b\"\\x3c\\x69\\x3d\\x32\\x5e\\x62\\x66\\x5f\\x73\\x73\\x69\\x64\\x3d\\x63\\x6d\\x64\"\r\n    b\"\\x3d\\x32\\x3e\"\r\n)\r\nSSID3 = (\r\n    b\"\\xef\\x20\\x19\\x00\\x01\\x67\"\r\n    b\"\\x3c\\x69\\x3d\\x32\\x5e\\x62\\x66\\x5f\\x73\\x73\\x69\\x64\\x3d\\x63\\x6d\\x64\"\r\n    b\"\\x3d\\x33\\x3e\"\r\n)\r\n\r\n# Both of these are sent for each frame. Native `build_send_ack()` builds this\r\n# same outer packet shape:\r\n#\r\n#   ef 02 <len:u16> 02 02 00 01 <ack-count> ...\r\n#\r\n# The packet also embeds the latest user command at offset 18. The current\r\n# constants carry a neutral extended command (`66 14 80 80 80 80 ... 99`) so\r\n# video can advance even when no RC input has changed.\r\nREQUEST_A = (\r\n    b\"\\xef\\x02\\x58\\x00\\x02\\x02\"\r\n    b\"\\x00\\x01\\x00\\x00\\x00\\x00\\x05\\x00\\x00\\x00\\x14\\x00\\x66\\x14\\x80\\x80\"\r\n    b\"\\x80\\x80\\x00\\x02\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x02\\x99\"\r\n    b\"\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\"\r\n    b\"\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\"\r\n    b\"\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x32\\x4b\\x14\\x2d\"\r\n    b\"\\x00\\x00\"\r\n)\r\n\r\n# See previous comment\r\nREQUEST_B = (\r\n    b\"\\xef\\x02\\x7c\\x00\\x02\\x02\"\r\n    b\"\\x00\\x01\\x02\\x00\\x00\\x00\\x09\\x00\\x00\\x00\\x14\\x00\\x66\\x14\\x80\\x80\"\r\n    b\"\\x80\\x80\\x00\\x02\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x02\\x99\"\r\n    b\"\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\"\r\n    b\"\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\"\r\n    b\"\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x32\\x4b\\x14\\x2d\"\r\n    b\"\\x00\\x00\\x08\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x14\\x00\"\r\n    b\"\\x00\\x00\\xff\\xff\\xff\\xff\\x09\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x03\\x00\"\r\n    b\"\\x00\\x00\\x10\\x00\\x00\\x00\"\r\n)\r\n\r\nNEUTRAL_EXTENDED_COMMAND = (\r\n    b\"\\x66\\x14\\x80\\x80\\x80\\x80\\x00\\x02\"\r\n    b\"\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x00\"\r\n    b\"\\x00\\x00\\x02\\x99\"\r\n)\r\n\r\nDEFAULT_QUALITY_PARAMS = b\"\\x32\\x4b\\x14\\x2d\\x00\"\r\n\r\n\r\ndef build_fragment_ack_bitmap(fragment_total: int, received_fragments: set[int]) -> bytes:\r\n    \"\"\"Build the native little-endian fragment ACK bitmap.\"\"\"\r\n    if fragment_total <= 0:\r\n        return b\"\"\r\n\r\n    word_count = (fragment_total + 31) // 32\r\n    words = [0] * word_count\r\n    for fragment_id in received_fragments:\r\n        if 0 <= fragment_id < fragment_total:\r\n            words[fragment_id // 32] |= 1 << (fragment_id & 31)\r\n\r\n    return b\"\".join(word.to_bytes(4, \"little\") for word in words)\r\n\r\n\r\ndef build_ack_slot(seq: int, status: int, bitmap: bytes = b\"\") -> bytes:\r\n    \"\"\"Build one native ACK slot record.\"\"\"\r\n    record_len = 16 + len(bitmap)\r\n    return (\r\n        (seq & 0xFFFFFFFFFFFFFFFF).to_bytes(8, \"little\")\r\n        + (status & 0xFFFFFFFF).to_bytes(4, \"little\")\r\n        + record_len.to_bytes(4, \"little\")\r\n        + bitmap\r\n    )\r\n\r\n\r\ndef build_native_ack_packet(\r\n    command_seq: int,\r\n    ack_slots: list[bytes],\r\n    command: bytes = NEUTRAL_EXTENDED_COMMAND,\r\n    quality_params: bytes = DEFAULT_QUALITY_PARAMS,\r\n) -> bytes:\r\n    \"\"\"\r\n    Build a native-shaped WiFi-UAV ACK/request packet.\r\n\r\n    This mirrors the header layout emitted by `build_send_ack()` /\r\n    `build_send_ack_bl618()` in `libuav_lib.so`.\r\n    \"\"\"\r\n    if len(command) > 64:\r\n        raise ValueError(\"WiFi-UAV command payload must be <= 64 bytes\")\r\n    if len(quality_params) != 5:\r\n        raise ValueError(\"WiFi-UAV quality params must be exactly 5 bytes\")\r\n\r\n    packet = bytearray()\r\n    packet += b\"\\xef\\x02\\x00\\x00\"\r\n    packet += b\"\\x02\\x02\\x00\\x01\"\r\n    packet += bytes([len(ack_slots) & 0xFF])\r\n    packet += b\"\\x00\\x00\\x00\"\r\n    packet += (command_seq & 0xFFFFFFFF).to_bytes(4, \"little\")\r\n    packet += len(command).to_bytes(2, \"little\")\r\n    packet += command.ljust(64, b\"\\x00\")\r\n    packet += quality_params\r\n    packet += b\"\\x00\"\r\n    for slot in ack_slots:\r\n        packet += slot\r\n    packet[2:4] = len(packet).to_bytes(2, \"little\")\r\n    return bytes(packet)\r\n"
  },
  {
    "path": "backend/utils/wifi_uav_variants.py",
    "content": "from __future__ import annotations\r\n\r\nimport os\r\nimport subprocess\r\nimport sys\r\nfrom dataclasses import dataclass\r\nfrom typing import Final\r\n\r\n\r\nWIFI_UAV_DRONE_TYPES: Final[frozenset[str]] = frozenset({\r\n    \"wifi_uav\",\r\n    \"wifi_uav_fld\",\r\n    \"wifi_uav_uav\",\r\n})\r\n\r\n_UAV_PREFIXES: Final[tuple[str, ...]] = (\"flow_\", \"flow-\", \"flow\")\r\n_FLD_PREFIXES: Final[tuple[str, ...]] = (\"wifi_\", \"gd89pro_\", \"wtech-\", \"wtech_\", \"drone_\")\r\n\r\n\r\n@dataclass(frozen=True)\r\nclass WifiUavCapabilities:\r\n    variant: str\r\n    transport: str\r\n    supports_camera_tilt: bool\r\n    supports_camera_switch: bool\r\n    video_ports: tuple[int, ...]\r\n    rc_command_shape: str\r\n\r\n\r\n_VARIANT_CAPABILITIES: Final[dict[str, WifiUavCapabilities]] = {\r\n    \"fld\": WifiUavCapabilities(\r\n        variant=\"fld\",\r\n        transport=\"fld_compat\",\r\n        supports_camera_tilt=False,\r\n        supports_camera_switch=False,\r\n        video_ports=(8800,),\r\n        rc_command_shape=\"native_ack_embedded\",\r\n    ),\r\n    \"uav\": WifiUavCapabilities(\r\n        variant=\"uav\",\r\n        transport=\"uav_dual_port\",\r\n        supports_camera_tilt=True,\r\n        supports_camera_switch=True,\r\n        video_ports=(8800, 8801),\r\n        rc_command_shape=\"native_ack_embedded\",\r\n    ),\r\n}\r\n\r\n\r\ndef wifi_uav_variant_from_drone_type(drone_type: str) -> str:\r\n    \"\"\"Map the user-facing DRONE_TYPE onto the internal wifi_uav variant name.\"\"\"\r\n    normalised = (drone_type or \"\").strip().lower()\r\n    if normalised == \"wifi_uav_fld\":\r\n        return \"fld\"\r\n    if normalised == \"wifi_uav_uav\":\r\n        return \"uav\"\r\n    return \"auto\"\r\n\r\n\r\ndef resolve_wifi_uav_variant(drone_type: str) -> str:\r\n    \"\"\"\r\n    Resolve the effective wifi_uav transport variant.\r\n\r\n    Explicit DRONE_TYPE aliases win. For the umbrella `wifi_uav` type, prefer a\r\n    best-effort SSID-based choice and otherwise fall back to the legacy/default\r\n    path, which today is closest to the FLD-style transport.\r\n    \"\"\"\r\n    explicit = wifi_uav_variant_from_drone_type(drone_type)\r\n    if explicit != \"auto\":\r\n        return explicit\r\n\r\n    ssid = detect_active_wifi_ssid()\r\n    mapped = map_wifi_uav_variant_from_ssid(ssid)\r\n    if mapped:\r\n        return mapped\r\n\r\n    return \"fld\"\r\n\r\n\r\ndef get_wifi_uav_capabilities(variant: str) -> WifiUavCapabilities:\r\n    \"\"\"Return the internal capability profile for a resolved WiFi-UAV variant.\"\"\"\r\n    return _VARIANT_CAPABILITIES.get((variant or \"\").strip().lower(), _VARIANT_CAPABILITIES[\"fld\"])\r\n\r\n\r\ndef resolve_wifi_uav_capabilities(drone_type: str) -> WifiUavCapabilities:\r\n    \"\"\"Resolve a user-facing DRONE_TYPE to its internal capability profile.\"\"\"\r\n    return get_wifi_uav_capabilities(resolve_wifi_uav_variant(drone_type))\r\n\r\n\r\ndef map_wifi_uav_variant_from_ssid(ssid: str | None) -> str | None:\r\n    \"\"\"Map a Wi-Fi SSID prefix onto the app's Uav/Fld backend choice.\"\"\"\r\n    if not ssid:\r\n        return None\r\n\r\n    normalised = ssid.strip().lower()\r\n    if any(normalised.startswith(prefix) for prefix in _UAV_PREFIXES):\r\n        return \"uav\"\r\n    if any(normalised.startswith(prefix) for prefix in _FLD_PREFIXES):\r\n        return \"fld\"\r\n    return None\r\n\r\n\r\ndef detect_active_wifi_ssid() -> str | None:\r\n    \"\"\"\r\n    Best-effort detection of the currently connected Wi-Fi SSID.\r\n\r\n    This is intentionally conservative: if platform-specific commands are not\r\n    available or parsing fails, return None and let the caller fall back.\r\n    \"\"\"\r\n    for env_name in (\"WIFI_UAV_SSID\", \"DRONE_SSID\", \"WIFI_SSID\"):\r\n        value = os.getenv(env_name, \"\").strip()\r\n        if value:\r\n            return value\r\n\r\n    try:\r\n        if sys.platform.startswith(\"win\"):\r\n            return _detect_ssid_windows()\r\n        if sys.platform == \"darwin\":\r\n            return _detect_ssid_macos()\r\n        return _detect_ssid_linux()\r\n    except Exception:\r\n        return None\r\n\r\n\r\ndef _detect_ssid_windows() -> str | None:\r\n    result = subprocess.run(\r\n        [\"netsh\", \"wlan\", \"show\", \"interfaces\"],\r\n        capture_output=True,\r\n        text=True,\r\n        timeout=2.0,\r\n        check=False,\r\n    )\r\n    if result.returncode != 0:\r\n        return None\r\n\r\n    for line in result.stdout.splitlines():\r\n        stripped = line.strip()\r\n        if not stripped.lower().startswith(\"ssid\") or stripped.lower().startswith(\"bssid\"):\r\n            continue\r\n        _, _, value = stripped.partition(\":\")\r\n        ssid = value.strip()\r\n        if ssid:\r\n            return ssid\r\n    return None\r\n\r\n\r\ndef _detect_ssid_macos() -> str | None:\r\n    airport = \"/System/Library/PrivateFrameworks/Apple80211.framework/Versions/Current/Resources/airport\"\r\n    result = subprocess.run(\r\n        [airport, \"-I\"],\r\n        capture_output=True,\r\n        text=True,\r\n        timeout=2.0,\r\n        check=False,\r\n    )\r\n    if result.returncode != 0:\r\n        return None\r\n\r\n    for line in result.stdout.splitlines():\r\n        stripped = line.strip()\r\n        if not stripped.startswith(\"SSID:\"):\r\n            continue\r\n        _, _, value = stripped.partition(\":\")\r\n        ssid = value.strip()\r\n        if ssid:\r\n            return ssid\r\n    return None\r\n\r\n\r\ndef _detect_ssid_linux() -> str | None:\r\n    for command in ([\"iwgetid\", \"-r\"], [\"nmcli\", \"-t\", \"-f\", \"active,ssid\", \"dev\", \"wifi\"]):\r\n        result = subprocess.run(\r\n            command,\r\n            capture_output=True,\r\n            text=True,\r\n            timeout=2.0,\r\n            check=False,\r\n        )\r\n        if result.returncode != 0:\r\n            continue\r\n\r\n        output = result.stdout.strip()\r\n        if not output:\r\n            continue\r\n\r\n        if command[0] == \"nmcli\":\r\n            for line in output.splitlines():\r\n                if not line.startswith(\"yes:\"):\r\n                    continue\r\n                ssid = line.split(\":\", 1)[1].strip()\r\n                if ssid:\r\n                    return ssid\r\n        else:\r\n            return output\r\n\r\n    return None\r\n"
  },
  {
    "path": "backend/video_client.py",
    "content": "#!/usr/bin/env python3\r\nimport argparse\r\nimport queue\r\nimport signal\r\nimport sys\r\nimport os\r\n\r\nfrom protocols.s2x_video_protocol import S2xVideoProtocolAdapter\r\nfrom protocols.wifi_uav_video_protocol import WifiUavVideoProtocolAdapter\r\nfrom services.video_receiver import VideoReceiverService\r\nfrom views.opencv_video_view import OpenCVVideoView\r\n\r\ndef main():\r\n    parser = argparse.ArgumentParser(\r\n        description=\"Modular drone video client\"\r\n    )\r\n    parser.add_argument(\"--drone-ip\", default=\"172.16.10.1\", help=\"Drone host address\")\r\n    parser.add_argument(\"--video-port\", type=int, default=8888)\r\n    parser.add_argument(\"--control-port\", type=int, default=8080)\r\n    parser.add_argument(\r\n        \"--keepalive\", type=float, default=1.0,\r\n        help=\"Re-send start-video every N seconds\"\r\n    )\r\n    parser.add_argument(\r\n        \"--dump-frames\", action=\"store_true\",\r\n        help=\"Dump every reassembled frame to disk\"\r\n    )\r\n    parser.add_argument(\r\n        \"--dump-packets\", action=\"store_true\",\r\n        help=\"Dump every raw packet to disk\"\r\n    )\r\n    parser.add_argument(\r\n        \"--dump-dir\", type=str, default=None,\r\n        help=\"Directory to store dumps (default: dumps_timestamp)\"\r\n    )\r\n    args = parser.parse_args()\r\n\r\n    # Define the blueprint for our protocol adapter.\r\n    # The VideoReceiverService will use this to create new instances.\r\n    protocol_class = WifiUavVideoProtocolAdapter\r\n    protocol_args = {\r\n        \"drone_ip\": \"192.168.169.1\",\r\n        \"control_port\": 8800,\r\n        \"video_port\": 8800,\r\n        \"debug\": True\r\n    }\r\n\r\n    # protocol_class = S2xVideoProtocolAdapter\r\n    # protocol_args = {\r\n    #     \"drone_ip\": \"172.16.10.1\",\r\n    #     \"control_port\": 8080,\r\n    #     \"video_port\": 8888,\r\n    #     \"debug\": True\r\n    # }\r\n    \r\n    # Create frame queue\r\n    frame_queue = queue.Queue(maxsize=100)\r\n    \r\n    # The service now takes the class and args to manage the protocol's lifecycle.\r\n    receiver = VideoReceiverService(\r\n        protocol_class,\r\n        protocol_args,\r\n        frame_queue,\r\n        dump_frames=args.dump_frames,\r\n        dump_packets=args.dump_packets,\r\n        dump_dir=args.dump_dir\r\n    )\r\n    \r\n    # Create view\r\n    view = OpenCVVideoView(frame_queue)\r\n    \r\n    # Set up signal handler for clean shutdown\r\n    def signal_handler(sig, frame):\r\n        print(\"\\n[main] Caught signal, shutting down...\")\r\n        \r\n        # First stop the receiver (which stops the keepalive)\r\n        receiver.stop()\r\n        \r\n        # Then stop the view\r\n        view.stop()\r\n        \r\n        # Exit more forcefully\r\n        os._exit(0)  # Use os._exit instead of sys.exit\r\n    \r\n    signal.signal(signal.SIGINT, signal_handler)\r\n    signal.signal(signal.SIGTERM, signal_handler)\r\n    \r\n    # The receiver service now manages the entire protocol lifecycle.\r\n    # We no longer send the initial start command from here.\r\n    receiver.start()\r\n    \r\n    try:\r\n        view.run()\r\n    finally:\r\n        print(\"[main] Shutting down...\")\r\n        view.stop()\r\n        receiver.stop()\r\n\r\nif __name__ == \"__main__\":\r\n    main() "
  },
  {
    "path": "backend/views/__init__.py",
    "content": "\r\nfrom .cli_rc import CLIView\r\n\r\n__all__ = ['CLIView']\r\n"
  },
  {
    "path": "backend/views/base_video_view.py",
    "content": "from abc import ABC, abstractmethod\r\n\r\nclass BaseVideoView(ABC):\r\n    \"\"\"Base abstract class for video display views\"\"\"\r\n    \r\n    def __init__(self, frame_queue):\r\n        self.frame_queue = frame_queue\r\n        self.running = True\r\n    \r\n    @abstractmethod\r\n    def run(self):\r\n        \"\"\"Start the view's main loop\"\"\"\r\n        pass\r\n    \r\n    @abstractmethod\r\n    def stop(self):\r\n        \"\"\"Stop the view\"\"\"\r\n        pass "
  },
  {
    "path": "backend/views/cli_rc.py",
    "content": "# Remote Control using curses cli interface\r\n\r\nimport curses\r\nimport time\r\nfrom services.flight_controller import FlightController\r\n\r\nclass CLIView:\r\n    \"\"\"Curses-based CLI view for drone remote control\"\"\"\r\n    \r\n    def __init__(self, flight_controller):\r\n        self.controller = flight_controller\r\n        self.sensitivity_mode = 0  # 0=normal, 1=precise, 2=aggressive\r\n        self.sensitivity_labels = [\"Normal\", \"Precise\", \"Aggressive\"]\r\n        self.PRESS_THRESHOLD = 0.4  # threshold for key being held\r\n\r\n    def run(self):\r\n        \"\"\"Start the CLI interface\"\"\"\r\n        curses.wrapper(self._ui_loop)\r\n        \r\n    def _ui_loop(self, stdscr):\r\n        \"\"\"Main curses UI loop\"\"\"\r\n        curses.curs_set(0)\r\n        stdscr.nodelay(True)\r\n        stdscr.keypad(True)\r\n        help_msg = \"W/S=throttle  A/D=yaw  Arrows=pitch/roll  T=takeoff  L=land  Q=quit\"\r\n        help_msg2 = \"F=debug packets  X=sensitivity mode\"\r\n\r\n        # direction states and last-press timestamps\r\n        throttle_dir = yaw_dir = pitch_dir = roll_dir = 0\r\n        throttle_ts = yaw_ts = pitch_ts = roll_ts = 0.0\r\n\r\n        prev_time = time.time()\r\n        debug_enabled = False\r\n\r\n        while self.controller.running:\r\n            now = time.time()\r\n            dt = now - prev_time\r\n            prev_time = now\r\n\r\n            c = stdscr.getch()\r\n            if c in (ord('q'), ord('Q')):\r\n                self.controller.stop()\r\n                break\r\n\r\n            elif c in (ord('t'), ord('T')):\r\n                self.controller.model.takeoff()\r\n            elif c in (ord('l'), ord('L')):\r\n                self.controller.model.land()\r\n            elif c in (ord('f'), ord('F')):\r\n                debug_enabled = self.controller.protocol.toggle_debug()\r\n            elif c in (ord('x'), ord('X')):\r\n                # Toggle sensitivity mode\r\n                self.sensitivity_mode = (self.sensitivity_mode + 1) % 3\r\n                self.controller.model.set_sensitivity(self.sensitivity_mode)\r\n\r\n            # throttle\r\n            elif c in (ord('w'), ord('W')):\r\n                throttle_dir = +1; throttle_ts = now\r\n            elif c in (ord('s'), ord('S')):\r\n                throttle_dir = -1; throttle_ts = now\r\n\r\n            # yaw\r\n            elif c in (ord('a'), ord('A')):\r\n                yaw_dir = -1; yaw_ts = now\r\n            elif c in (ord('d'), ord('D')):\r\n                yaw_dir = +1; yaw_ts = now\r\n\r\n            # pitch\r\n            elif c == curses.KEY_UP:\r\n                pitch_dir = +1; pitch_ts = now\r\n            elif c == curses.KEY_DOWN:\r\n                pitch_dir = -1; pitch_ts = now\r\n\r\n            # roll\r\n            elif c == curses.KEY_LEFT:\r\n                roll_dir = -1; roll_ts = now\r\n            elif c == curses.KEY_RIGHT:\r\n                roll_dir = +1; roll_ts = now\r\n\r\n            # decide if each axis is \"still held\"\r\n            active_throttle = throttle_dir if (now - throttle_ts) < self.PRESS_THRESHOLD else 0\r\n            active_yaw = yaw_dir if (now - yaw_ts) < self.PRESS_THRESHOLD else 0\r\n            active_pitch = pitch_dir if (now - pitch_ts) < self.PRESS_THRESHOLD else 0\r\n            active_roll = roll_dir if (now - roll_ts) < self.PRESS_THRESHOLD else 0\r\n\r\n            # Update controller with current control directions\r\n            self.controller.set_control_direction('throttle', active_throttle)\r\n            self.controller.set_control_direction('yaw', active_yaw)\r\n            self.controller.set_control_direction('pitch', active_pitch)\r\n            self.controller.set_control_direction('roll', active_roll)\r\n\r\n            # Update the UI\r\n            state = self.controller.model.get_control_state()\r\n            stdscr.clear()\r\n            stdscr.addstr(0, 0,\r\n                f\"Throttle: {int(state['throttle']):3d}    \"\r\n                f\"Yaw:      {int(state['yaw']):3d}\")\r\n            stdscr.addstr(1, 0,\r\n                f\" Pitch:   {int(state['pitch']):3d}    \"\r\n                f\"Roll:     {int(state['roll']):3d}\")\r\n                \r\n            # Add status flags to UI\r\n            status_flags = [f\"Mode: {self.sensitivity_labels[self.sensitivity_mode]}\"]\r\n            if debug_enabled: status_flags.append(\"DEBUG\")\r\n            status_str = \" | \".join(status_flags)\r\n            stdscr.addstr(2, 0, f\"Status: {status_str}\")\r\n                \r\n            stdscr.addstr(4, 0, help_msg)\r\n            stdscr.addstr(5, 0, help_msg2)\r\n            stdscr.refresh()\r\n\r\n            # small sleep to cap UI frame-rate\r\n            time.sleep(0.01)\r\n"
  },
  {
    "path": "backend/views/opencv_video_view.py",
    "content": "import cv2\r\nimport numpy as np\r\nimport queue\r\nimport time\r\nimport sys\r\nimport ctypes\r\nfrom views.base_video_view import BaseVideoView\r\n\r\nclass OpenCVVideoView(BaseVideoView):\r\n    \"\"\"OpenCV-based video display view\"\"\"\r\n    \r\n    def __init__(self, frame_queue, window_name=\"Drone Video\"):\r\n        super().__init__(frame_queue)\r\n        self.window_name = window_name\r\n        \r\n    # ------------------------------------------------------------------ #\r\n    # private helper – poke HighGUI so waitKey() returns immediately\r\n    # ------------------------------------------------------------------ #\r\n    def _wakeup_highgui(self):\r\n        if sys.platform.startswith(\"win\"):\r\n            hwnd = ctypes.windll.user32.FindWindowW(None, self.window_name)\r\n            if hwnd:\r\n                ctypes.windll.user32.PostMessageW(hwnd, 0, 0, 0)\r\n        else:\r\n            # On X11 / Cocoa / Qt nothing special is needed – an extra waitKey\r\n            # call will do.\r\n            cv2.waitKey(1)\r\n\r\n    def run(self):\r\n        \"\"\"Start the OpenCV display loop\"\"\"\r\n        cv2.namedWindow(self.window_name, cv2.WINDOW_NORMAL)\r\n        \r\n        # Build a placeholder image (black + red warning text)\r\n        placeholder_h, placeholder_w = 480, 640\r\n        placeholder = np.zeros((placeholder_h, placeholder_w, 3), np.uint8)\r\n        txt = \"No video frames received yet\"\r\n        font, scale, th = cv2.FONT_HERSHEY_SIMPLEX, 0.6, 2\r\n        (tw, th_), _ = cv2.getTextSize(txt, font, scale, th)\r\n        x = (placeholder_w - tw) // 2\r\n        y = (placeholder_h + th_) // 2\r\n        cv2.putText(placeholder, txt, (x, y), font, scale, (0, 0, 255), th)\r\n        \r\n        fps_timer = time.time()\r\n        frame_count = 0\r\n        \r\n        while self.running:\r\n            frame = None\r\n            try:\r\n                frame = self.frame_queue.get(timeout=1.0)\r\n            except queue.Empty:\r\n                pass\r\n            \r\n            if frame is None:\r\n                img = placeholder\r\n                is_real = False\r\n            else:\r\n                # Handle different frame formats\r\n                if frame.format == \"jpeg\":\r\n                    arr = np.frombuffer(frame.data, dtype=np.uint8)\r\n                    decoded = cv2.imdecode(arr, cv2.IMREAD_COLOR)\r\n                    if decoded is None:\r\n                        print(f\"[display] ⚠ imdecode failed ({len(arr)} bytes)\")\r\n                        img, is_real = placeholder, False\r\n                    else:\r\n                        img, is_real = decoded, True\r\n                        # Store dimensions in frame for future reference\r\n                        frame.width, frame.height = img.shape[1], img.shape[0]\r\n                else:\r\n                    # For future formats like h264, h265, etc.\r\n                    print(f\"[display] Unsupported format: {frame.format}\")\r\n                    img, is_real = placeholder, False\r\n            \r\n            cv2.imshow(self.window_name, img)\r\n            key = cv2.waitKey(1) & 0xFF\r\n            if key == ord(\"q\"):\r\n                self.running = False\r\n                break\r\n            \r\n            # Only count toward FPS when we had a real frame\r\n            if is_real:\r\n                frame_count += 1\r\n                if frame_count % 60 == 0:\r\n                    now = time.time()\r\n                    print(f\"[display] ~{frame_count/(now-fps_timer):4.1f} fps\")\r\n                    fps_timer, frame_count = now, 0\r\n        \r\n        cv2.destroyAllWindows()\r\n\r\n    def stop(self):\r\n        \"\"\"Stop the display loop\"\"\"\r\n        self.running = False\r\n        self._wakeup_highgui()            # make waitKey return "
  },
  {
    "path": "backend/web_server.py",
    "content": "import asyncio\nimport threading\nimport queue\nimport time\nfrom contextlib import asynccontextmanager\nfrom typing import Any, Optional\nimport logging\nimport os\n\nfrom fastapi import FastAPI, WebSocket, WebSocketDisconnect, HTTPException\nfrom fastapi.middleware.cors import CORSMiddleware\nfrom starlette.websockets import WebSocketState\nfrom utils.logging_config import bootstrap_runtime, configure_logging\n\nbootstrap_runtime()\n\nfrom services.flight_controller import FlightController\nfrom control.strategies import DirectStrategy, IncrementalStrategy\nfrom services.video_receiver import VideoReceiverService\nfrom models.s2x_rc import S2xDroneModel as S2xRcModel\nfrom models.debug_rc import DebugRcModel\nfrom protocols.s2x_rc_protocol_adapter import S2xRCProtocolAdapter\nfrom protocols.debug_rc_protocol_adapter import DebugRcProtocolAdapter\nfrom protocols.s2x_video_protocol import S2xVideoProtocolAdapter\nfrom protocols.debug_video_protocol import DebugVideoProtocolAdapter\nfrom protocols.wifi_uav_rc_protocol_adapter import WifiUavRcProtocolAdapter\nfrom protocols.wifi_uav_video_protocol import WifiUavVideoProtocolAdapter\nfrom models.wifi_uav_rc import WifiUavRcModel\nfrom models.cooingdv_rc import CooingdvRcModel\nfrom protocols.cooingdv_rc_protocol_adapter import CooingdvRcProtocolAdapter\nfrom protocols.cooingdv_jieli_rc_protocol_adapter import CooingdvJieliRcProtocolAdapter\nfrom protocols.cooingdv_jieli_video_protocol import CooingdvJieliVideoProtocolAdapter\nfrom protocols.cooingdv_video_protocol import CooingdvVideoProtocolAdapter\nfrom models.wifi_cam_rc import WifiCamRcModel\nfrom protocols.wifi_cam_rc_protocol_adapter import WifiCamRcProtocolAdapter\nfrom protocols.wifi_cam_video_protocol import WifiCamVideoProtocolAdapter\nfrom plugins.manager import PluginManager\nfrom utils.dropping_queue import DroppingQueue\nfrom utils.wifi_uav_variants import (\n    WIFI_UAV_DRONE_TYPES,\n    resolve_wifi_uav_capabilities,\n    resolve_wifi_uav_variant,\n)\n\nCOOINGDV_DRONE_TYPES = {\"cooingdv\", \"cooingdv_jieli\"}\n\n\nclass ConnectionManager:\n    \"\"\"\n    Manages active WebSocket connections for broadcasting messages.\n    \"\"\"\n    def __init__(self):\n        self.active_connections: list[WebSocket] = []\n\n    async def connect(self, websocket: WebSocket):\n        await websocket.accept()\n        self.active_connections.append(websocket)\n\n    def disconnect(self, websocket: WebSocket):\n        if websocket in self.active_connections:\n            self.active_connections.remove(websocket)\n\n    async def broadcast(self, message: str):\n        # Copy to avoid mutation during iteration\n        for connection in list(self.active_connections):\n            if connection.client_state == WebSocketState.CONNECTED:\n                try:\n                    await connection.send_text(message)\n                except Exception:\n                    self.disconnect(connection)\n\n    async def broadcast_bytes(self, message: bytes):\n        # Copy to avoid mutation during iteration\n        for connection in list(self.active_connections):\n            if connection.client_state == WebSocketState.CONNECTED:\n                try:\n                    await connection.send_bytes(message)\n                except Exception:\n                    self.disconnect(connection)\n\n    async def broadcast_json(self, obj: Any):\n        # Copy to avoid mutation during iteration\n        for connection in list(self.active_connections):\n            if connection.client_state == WebSocketState.CONNECTED:\n                try:\n                    await connection.send_json(obj)\n                except Exception:\n                    self.disconnect(connection)\n\n# Feature flags\nPLUGINS_ENABLED = os.getenv(\"PLUGINS_ENABLED\", \"false\").lower() in (\"1\", \"true\", \"yes\", \"on\")\n\nconfigure_logging()\nlogger = logging.getLogger(__name__)\n\n# Managers for WebSocket connections\noverlay_manager = ConnectionManager()\nvideo_manager = ConnectionManager()\n\n\ndef _control_capabilities_for_drone(drone_type: str) -> dict[str, bool]:\n    \"\"\"\n    Describe which one-shot commands the active implementation exposes.\n\n    These capabilities let the frontend keep its control cluster honest instead\n    of assuming every implementation supports the same actions.\n    \"\"\"\n    if drone_type in WIFI_UAV_DRONE_TYPES:\n        wifi_uav_capabilities = resolve_wifi_uav_capabilities(drone_type)\n        return {\n            \"takeoff\": True,\n            \"land\": True,\n            \"estop\": True,\n            \"camera_tilt\": wifi_uav_capabilities.supports_camera_tilt,\n            \"camera_switch\": wifi_uav_capabilities.supports_camera_switch,\n            \"speed_control\": True,\n        }\n\n    if drone_type in {\"s2x\", \"wifi_cam\"} or drone_type in COOINGDV_DRONE_TYPES:\n        return {\n            \"takeoff\": True,\n            \"land\": True,\n            \"estop\": True,\n            \"camera_tilt\": False,\n            \"camera_switch\": drone_type == \"wifi_cam\",\n            \"speed_control\": drone_type == \"s2x\",\n        }\n\n    if drone_type == \"debug\":\n        return {\n            \"takeoff\": True,\n            \"land\": True,\n            \"estop\": False,\n            \"camera_tilt\": False,\n            \"camera_switch\": False,\n            \"speed_control\": False,\n        }\n\n    return {\n        \"takeoff\": False,\n        \"land\": False,\n        \"estop\": False,\n        \"camera_tilt\": False,\n        \"camera_switch\": False,\n        \"speed_control\": False,\n    }\n\nclass FrameHub:\n    \"\"\"\n    Fan-out hub for MJPEG frames.\n\n    Each /mjpeg client gets its own asyncio.Queue, so multiple clients don't\n    steal frames from each other.\n    \"\"\"\n    def __init__(self, per_client_queue_size: int = 2):\n        self._per_client_queue_size = per_client_queue_size\n        self._clients: set[asyncio.Queue] = set()\n        self._lock = asyncio.Lock()\n\n    async def register(self) -> asyncio.Queue:\n        q: asyncio.Queue = asyncio.Queue(self._per_client_queue_size)\n        async with self._lock:\n            self._clients.add(q)\n        return q\n\n    async def unregister(self, q: asyncio.Queue) -> None:\n        async with self._lock:\n            self._clients.discard(q)\n\n    async def publish(self, frame: Optional[bytes]) -> None:\n        # Snapshot under lock; we only do non-blocking puts.\n        async with self._lock:\n            clients = list(self._clients)\n\n        for q in clients:\n            try:\n                q.put_nowait(frame)\n            except asyncio.QueueFull:\n                # Drop oldest then try again.\n                try:\n                    _ = q.get_nowait()\n                except asyncio.QueueEmpty:\n                    pass\n                try:\n                    q.put_nowait(frame)\n                except Exception:\n                    # Give up on this client queue; it's likely stalled.\n                    await self.unregister(q)\n\nFRAME_HUB = FrameHub(per_client_queue_size=2)\n\n@asynccontextmanager\nasync def lifespan(app: FastAPI):\n    # Startup\n    global flight_controller, receiver, plugin_manager, active_drone_type, control_capabilities\n\n    drone_type = os.getenv(\"DRONE_TYPE\", \"s2x\").lower()\n    active_drone_type = drone_type\n    control_capabilities = _control_capabilities_for_drone(drone_type)\n    \n    logger.info(\"[main] Using drone type: %s\", drone_type)\n\n    if drone_type == \"s2x\":\n        logger.info(\"[main] Using S2X drone implementation.\")\n        # Allow overriding IP and ports via env to match prior behavior\n        default_ip = \"172.16.10.1\"\n        default_ctrl_port = 8080\n        default_video_port = 8888\n        default_control_rate = 80.0\n\n        drone_ip = os.getenv(\"DRONE_IP\", default_ip)\n        ctrl_port = int(os.getenv(\"CONTROL_PORT\", default_ctrl_port))\n        video_port = int(os.getenv(\"VIDEO_PORT\", default_video_port))\n\n        model = S2xRcModel()\n        rc_proto = S2xRCProtocolAdapter(drone_ip, ctrl_port)\n        # Optional remap via env: some S2x variants swap yaw and roll\n        try:\n            rc_proto.swap_yaw_roll = os.getenv(\"S2X_SWAP_YAW_ROLL\", \"false\").lower() in (\"1\", \"true\", \"yes\", \"on\")\n            if rc_proto.swap_yaw_roll:\n                logger.info(\"[main] S2X swap_yaw_roll enabled\")\n        except Exception:\n            pass\n        video_adapter_cls = S2xVideoProtocolAdapter\n        video_adapter_args = {\n            \"drone_ip\": drone_ip,\n            \"control_port\": ctrl_port,\n            \"video_port\": video_port,\n        }\n    elif drone_type in WIFI_UAV_DRONE_TYPES:\n        wifi_uav_variant = resolve_wifi_uav_variant(drone_type)\n        logger.info(\"[main] Using WiFi UAV drone implementation (variant=%s).\", wifi_uav_variant)\n        # Align with previous working setup: env-configurable IP and ports\n        default_ip = \"192.168.169.1\"\n        default_ctrl_port = 8800\n        default_video_port = 8800\n        default_control_rate = 80.0\n\n        drone_ip = os.getenv(\"DRONE_IP\", default_ip)\n        ctrl_port = int(os.getenv(\"CONTROL_PORT\", default_ctrl_port))\n        video_port = int(os.getenv(\"VIDEO_PORT\", default_video_port))\n\n        model = WifiUavRcModel()\n        rc_proto = WifiUavRcProtocolAdapter(drone_ip, ctrl_port, variant=wifi_uav_variant)\n        video_adapter_cls = WifiUavVideoProtocolAdapter\n        video_adapter_args = {\n            \"drone_ip\": drone_ip,\n            \"control_port\": ctrl_port,\n            \"video_port\": video_port,\n            \"variant\": wifi_uav_variant,\n            \"debug\": False,\n        }\n    elif drone_type in COOINGDV_DRONE_TYPES:\n        logger.info(\"[main] Using Cooingdv drone implementation (RC UFO, KY UFO, E88 Pro).\")\n        if drone_type == \"cooingdv_jieli\":\n            default_ip = \"192.168.8.15\"\n            default_ctrl_port = 2228\n            default_video_port = 0\n            default_control_rate = 20.0\n        else:\n            default_ip = \"192.168.1.1\"\n            default_ctrl_port = 7099\n            default_video_port = 7070  # RTSP port\n            default_control_rate = 20.0\n\n        drone_ip = os.getenv(\"DRONE_IP\", default_ip)\n        ctrl_port = int(os.getenv(\"CONTROL_PORT\", default_ctrl_port))\n        video_port = int(os.getenv(\"VIDEO_PORT\", default_video_port))\n\n        model = CooingdvRcModel()\n        if drone_type == \"cooingdv_jieli\":\n            rc_proto = CooingdvJieliRcProtocolAdapter(drone_ip, ctrl_port)\n            video_adapter_cls = CooingdvJieliVideoProtocolAdapter\n            video_adapter_args = {\n                \"drone_ip\": drone_ip,\n                \"control_port\": ctrl_port,\n                \"video_port\": int(os.getenv(\"JIELI_RTP_VIDEO_PORT\", video_port or 6666)),\n                \"audio_port\": int(os.getenv(\"JIELI_RTP_AUDIO_PORT\", 1234)),\n                \"sdp_port\": int(os.getenv(\"JIELI_SDP_PORT\", 6789)),\n                \"width\": int(os.getenv(\"JIELI_VIDEO_WIDTH\", 640)),\n                \"height\": int(os.getenv(\"JIELI_VIDEO_HEIGHT\", 360)),\n                \"fps\": int(os.getenv(\"JIELI_VIDEO_FPS\", 30)),\n            }\n        else:\n            rc_proto = CooingdvRcProtocolAdapter(drone_ip, ctrl_port)\n            video_adapter_cls = CooingdvVideoProtocolAdapter\n            video_adapter_args = {\n                \"drone_ip\": drone_ip,\n                \"control_port\": ctrl_port,\n                \"video_port\": video_port,\n            }\n    elif drone_type == \"wifi_cam\":\n        logger.info(\"[main] Using WiFi_CAM native UDP implementation.\")\n        default_ip = \"192.168.4.153\"\n        default_ctrl_port = 8090\n        default_video_port = 8080\n        default_control_rate = 25.0\n\n        drone_ip = os.getenv(\"DRONE_IP\", default_ip)\n        ctrl_port = int(os.getenv(\"CONTROL_PORT\", default_ctrl_port))\n        video_port = int(os.getenv(\"VIDEO_PORT\", default_video_port))\n        command_mode = os.getenv(\"WIFI_CAM_COMMAND_MODE\", \"auto\")\n\n        model = WifiCamRcModel()\n        rc_proto = WifiCamRcProtocolAdapter(drone_ip, ctrl_port, command_mode=command_mode)\n        video_adapter_cls = WifiCamVideoProtocolAdapter\n        video_adapter_args = {\n            \"drone_ip\": drone_ip,\n            \"control_port\": ctrl_port,\n            \"video_port\": video_port,\n            \"debug\": os.getenv(\"WIFI_CAM_VIDEO_DEBUG\", \"false\").lower() in (\"1\", \"true\", \"yes\", \"on\"),\n        }\n    elif drone_type == \"debug\":\n        logger.info(\"[main] Using debug drone implementation.\")\n        default_control_rate = 80.0\n        model = DebugRcModel()\n        rc_proto = DebugRcProtocolAdapter()\n        video_adapter_cls = DebugVideoProtocolAdapter\n        video_adapter_args = {\"camera_index\": 0, \"debug\": False}\n    else:\n        raise ValueError(f\"Unknown drone type: {drone_type}\")\n\n    # 1. Video – let the service create / recycle the adapter\n    video_service_args = {\n        \"protocol_adapter_class\": video_adapter_cls,\n        \"protocol_adapter_args\": video_adapter_args,\n        \"frame_queue\": RAW_Q,\n    }\n    if drone_type in WIFI_UAV_DRONE_TYPES or drone_type == \"wifi_cam\":\n        video_service_args[\"rc_adapter\"] = rc_proto\n    \n    receiver = VideoReceiverService(**video_service_args)\n    receiver.start()\n\n    # Wait a moment for video to stabilize\n    await asyncio.sleep(1)\n\n    # 2. RC / flight\n    # Optional: enable low-level RC packet debug via env\n    try:\n        if os.getenv(\"RC_DEBUG_PACKETS\", \"false\").lower() in (\"1\", \"true\", \"yes\", \"on\"):\n            try:\n                rc_proto.toggle_debug()\n                logger.info(\"[main] RC packet debug: ON\")\n            except Exception:\n                pass\n    except Exception:\n        pass\n\n    try:\n        control_rate = float(os.getenv(\"CONTROL_RATE\", default_control_rate))\n    except (TypeError, ValueError):\n        control_rate = default_control_rate\n\n    flight_controller = FlightController(model, rc_proto, control_rate)\n    flight_controller.start()\n\n    # 3. Plugins (optional)\n    plugin_frame_q: Optional[queue.Queue] = None\n    overlay_broadcaster: Optional[\"OverlayBroadcaster\"] = None\n    if PLUGINS_ENABLED:\n        PLUGIN_FRAME_Q = DroppingQueue(maxsize=100)\n        PLUGIN_OVERLAY_Q = DroppingQueue(maxsize=100)\n        plugin_manager = PluginManager(flight_controller, PLUGIN_FRAME_Q, PLUGIN_OVERLAY_Q)\n        plugin_frame_q = PLUGIN_FRAME_Q\n\n        # Start overlay broadcaster only when plugins are enabled\n        overlay_broadcaster = OverlayBroadcaster(PLUGIN_OVERLAY_Q, asyncio.get_running_loop())\n        overlay_broadcaster.start()\n        logger.info(\"[plugins] Plugins enabled\")\n    else:\n        plugin_manager = None\n        logger.info(\"[plugins] Plugins disabled (set PLUGINS_ENABLED=true to enable)\")\n\n    # 4. start bridge thread (daemon) for video pump (always, for MJPEG)\n    _pump_stop = threading.Event()\n    main_loop = asyncio.get_running_loop()\n    _pump_thread = threading.Thread(\n        target=_frame_pump_worker,\n        args=(RAW_Q, plugin_frame_q, FRAME_HUB, _pump_stop, main_loop),\n        name=\"FramePump\",\n        daemon=True,\n    )\n    _pump_thread.start()\n\n    yield\n\n    # Shutdown\n    if overlay_broadcaster:\n        overlay_broadcaster.stop()\n    if plugin_manager:\n        plugin_manager.stop_all()\n    if flight_controller:\n        flight_controller.stop()\n    if receiver:\n        receiver.stop()\n    if _pump_stop:\n        _pump_stop.set()\n    if _pump_thread:\n        _pump_thread.join(timeout=1.0)\n\n# ───────────────────────────────────────────────────────────────\n# FastAPI app + permissive CORS (tighten in production!)\n# ───────────────────────────────────────────────────────────────\napp = FastAPI(title=\"Drone web adapter\", lifespan=lifespan)\n\napp.add_middleware(\n    CORSMiddleware,\n    allow_origins=[\"*\"],  # TODO: restrict in prod\n    allow_methods=[\"*\"],\n    allow_headers=[\"*\"],\n)\n\n# ───────────────────────────────────────────────────────────────\n# Global objects (single-drone)\n# ───────────────────────────────────────────────────────────────\nRAW_Q: queue.Queue = DroppingQueue(maxsize=2)          # thread-safe → pump\n\nflight_controller: Optional[FlightController] = None\nreceiver: Optional[VideoReceiverService] = None\nplugin_manager: Optional[PluginManager] = None\nactive_drone_type = os.getenv(\"DRONE_TYPE\", \"s2x\").lower()\ncontrol_capabilities = _control_capabilities_for_drone(active_drone_type)\n\nvideo_keepalive = None  # legacy; no longer used\n\n# ───────────────────────────────────────────────────────────────\n# Plugin Management\n# ───────────────────────────────────────────────────────────────\n@app.get(\"/capabilities\")\nasync def get_capabilities():\n    return {\n        \"drone_type\": active_drone_type,\n        \"commands\": control_capabilities,\n    }\n\n\n@app.get(\"/plugins\")\nasync def get_plugins():\n    if not PLUGINS_ENABLED:\n        raise HTTPException(status_code=404, detail=\"Plugins disabled\")\n    if not plugin_manager:\n        raise HTTPException(status_code=503, detail=\"PluginManager not available\")\n    return {\n        \"available\": plugin_manager.available(),\n        \"running\": plugin_manager.running(),\n    }\n\n@app.post(\"/plugins/{name}/start\")\nasync def start_plugin(name: str):\n    if not PLUGINS_ENABLED:\n        raise HTTPException(status_code=404, detail=\"Plugins disabled\")\n    if not plugin_manager:\n        raise HTTPException(status_code=503, detail=\"PluginManager not available\")\n    try:\n        # Current architecture can technically run multiple plugins, but they\n        # will compete for frames from the shared plugin frame queue. For now\n        # we enforce a single running plugin for predictable behavior.\n        running = plugin_manager.running()\n        if running and name not in running:\n            raise HTTPException(\n                status_code=409,\n                detail=f\"Another plugin is already running: {running}. Stop it first.\",\n            )\n        started = plugin_manager.start(name)\n        if not started:\n            raise HTTPException(status_code=409, detail=\"Plugin already running\")\n        return {\"status\": \"started\", \"name\": name}\n    except ValueError as e:\n        raise HTTPException(status_code=404, detail=str(e))\n    except Exception as e:\n        raise HTTPException(status_code=500, detail=str(e))\n\n@app.post(\"/plugins/{name}/stop\")\nasync def stop_plugin(name: str):\n    if not PLUGINS_ENABLED:\n        raise HTTPException(status_code=404, detail=\"Plugins disabled\")\n    if not plugin_manager:\n        raise HTTPException(status_code=503, detail=\"PluginManager not available\")\n    try:\n        stopped = plugin_manager.stop(name)\n        if not stopped:\n            raise HTTPException(status_code=409, detail=\"Plugin not running\")\n        return {\"status\": \"stopped\", \"name\": name}\n    except ValueError as e:\n        raise HTTPException(status_code=404, detail=str(e))\n    except Exception as e:\n        raise HTTPException(status_code=500, detail=str(e))\n\n# ───────────────────────────────────────────────────────────────\n# Websocket handlers\n# ───────────────────────────────────────────────────────────────\n@app.websocket(\"/ws/overlays\")\nasync def websocket_overlay_endpoint(websocket: WebSocket):\n    await overlay_manager.connect(websocket)\n    try:\n        while True:\n            await websocket.receive_text() # Keep connection open\n    except WebSocketDisconnect:\n        overlay_manager.disconnect(websocket)\n    except Exception:\n        overlay_manager.disconnect(websocket)\n\n@app.websocket(\"/ws\")\nasync def ws_endpoint(websocket: WebSocket) -> None:\n    await websocket.accept()\n    try:\n        while True:\n            data = await websocket.receive_json()\n            if not flight_controller:\n                continue\n\n            msg_type = data.get(\"type\")\n            if msg_type == \"axes\":\n                mode = data.get(\"mode\", \"abs\")\n\n                # If any plugin is running, completely ignore frontend control commands\n                # Frontend should already be suppressing these, but this is a safety check\n                plugin_running = bool(plugin_manager and plugin_manager.running())\n\n                if plugin_running:\n                    # Plugin has full control - don't process frontend axes at all\n                    pass\n                else:\n                    # Switch strategy based on mode (treat \"mouse\" as absolute)\n                    try:\n                        if mode in (\"abs\", \"mouse\"):\n                            if not isinstance(flight_controller.model.strategy, DirectStrategy):\n                                flight_controller.model.set_strategy(DirectStrategy())\n                        else:\n                            if not isinstance(flight_controller.model.strategy, IncrementalStrategy):\n                                flight_controller.model.set_strategy(IncrementalStrategy())\n                    except Exception:\n                        pass\n\n                    throttle = float(data.get(\"throttle\", 0))\n                    yaw      = float(data.get(\"yaw\", 0))\n                    pitch    = float(data.get(\"pitch\", 0))\n                    roll     = float(data.get(\"roll\", 0))\n                    \n                    flight_controller.set_axes_from(\"frontend\", throttle, yaw, pitch, roll)\n            elif msg_type == \"set_profile\":\n                try:\n                    flight_controller.model.set_profile(data.get(\"name\", \"normal\"))\n                except Exception:\n                    pass\n            elif msg_type in (\"set_speed_index\", \"speed_index\"):\n                try:\n                    flight_controller.model.set_speed_index(data.get(\"speed_index\", data.get(\"value\", 2)))\n                except Exception:\n                    pass\n            elif msg_type == \"takeoff\":\n                try:\n                    flight_controller.model.takeoff()\n                except Exception:\n                    pass\n            elif msg_type == \"land\":\n                try:\n                    flight_controller.model.land()\n                except Exception:\n                    pass\n            elif msg_type in (\"estop\", \"emergency_stop\"):\n                try:\n                    flight_controller.model.emergency_stop()\n                except Exception:\n                    try:\n                        flight_controller.model.stop_flag = True\n                    except Exception:\n                        pass\n    except WebSocketDisconnect:\n        logger.info(\"[WebSocket] Client disconnected\")\n    except Exception as e:\n        logger.exception(\"[WebSocket] Error: %s\", e)\n\n# ───────────────────────────────────────────────────────────────\n# Video streaming\n# ───────────────────────────────────────────────────────────────\n\ndef _frame_pump_worker(\n    raw_q: queue.Queue,\n    plugin_q: Optional[queue.Queue],\n    frame_hub: FrameHub,\n    stop_event: threading.Event,\n    loop: asyncio.AbstractEventLoop,\n):\n    \"\"\"\n    This worker runs in a separate thread and pumps frames from the\n    thread-safe queue to the asyncio queues.\n    \"\"\"\n    # Wait for the very first frame before starting keepalive, to avoid\n    # prematurely closing the MJPEG stream during initial connection.\n    first_frame_seen = False\n    last_frame_time = time.monotonic()\n    timed_out = False\n    while not stop_event.is_set() and not first_frame_seen:\n        try:\n            frame = raw_q.get(timeout=5.0)\n            if frame:\n                first_frame_seen = True\n                last_frame_time = time.monotonic()\n                # Send to MJPEG/overlay pipelines immediately\n                asyncio.run_coroutine_threadsafe(frame_hub.publish(frame.data), loop)\n                if plugin_q is not None:\n                    try:\n                        plugin_q.put_nowait(frame)\n                    except queue.Full:\n                        pass\n                break\n        except queue.Empty:\n            # keep waiting for initial frame without killing the stream\n            continue\n\n    # After frames start flowing, if the stream stalls for too long we close\n    # existing MJPEG clients by publishing None. (Pump continues regardless.)\n    stream_timeout_s = 3.0\n    while not stop_event.is_set():\n        try:\n            frame = raw_q.get(timeout=1.0)\n            if frame:\n                last_frame_time = time.monotonic()\n                timed_out = False\n                try:\n                    asyncio.run_coroutine_threadsafe(frame_hub.publish(frame.data), loop)\n                except Exception:\n                    pass\n                if plugin_q is not None:\n                    try:\n                        plugin_q.put_nowait(frame)\n                    except queue.Full:\n                        pass\n        except queue.Empty:\n            if first_frame_seen and not timed_out and (time.monotonic() - last_frame_time) > stream_timeout_s:\n                timed_out = True\n                try:\n                    asyncio.run_coroutine_threadsafe(frame_hub.publish(None), loop)\n                except Exception:\n                    pass\n            continue\n\n@app.get(\"/mjpeg\")\nasync def mjpeg_stream():\n    \"\"\"\n    Streams JPEG frames over HTTP multipart/x-mixed-replace.\n    \"\"\"\n    from fastapi.responses import StreamingResponse\n    \n    async def frame_generator():\n        q = await FRAME_HUB.register()\n        try:\n            while True:\n                frame = await q.get()\n                if frame is None:\n                    break\n                yield (\n                    b\"--frame\\r\\n\"\n                    b\"Content-Type: image/jpeg\\r\\n\\r\\n\" + frame + b\"\\r\\n\"\n                )\n        finally:\n            await FRAME_HUB.unregister(q)\n\n    return StreamingResponse(\n        frame_generator(), media_type=\"multipart/x-mixed-replace; boundary=frame\"\n    )\n\nclass OverlayBroadcaster:\n    def __init__(self, q: queue.Queue, loop: asyncio.AbstractEventLoop):\n        self.q = q\n        self.loop = loop\n        self.thread: Optional[threading.Thread] = None\n        self.stop_event = threading.Event()\n\n    def start(self):\n        self.thread = threading.Thread(target=self._run, daemon=True)\n        self.thread.start()\n\n    def stop(self):\n        self.stop_event.set()\n        if self.thread:\n            self.thread.join(timeout=1.0)\n\n    def _run(self):\n        while not self.stop_event.is_set():\n            try:\n                data = self.q.get(timeout=0.1)\n                # IMPORTANT: empty overlays are sent as [] and must still be broadcast,\n                # otherwise the frontend will keep rendering the last overlay forever.\n                if data is None:\n                    continue\n\n                # Plugins typically put python objects (lists/dicts) into this queue.\n                # The frontend expects JSON. IMPORTANT: decide which coroutine to create\n                # BEFORE constructing it, otherwise an un-awaited coroutine may be\n                # garbage-collected (RuntimeWarning: coroutine was never awaited).\n                if isinstance(data, (str, bytes)):\n                    msg = data if isinstance(data, str) else data.decode(\"utf-8\", errors=\"ignore\")\n                    coro = overlay_manager.broadcast(msg)\n                else:\n                    coro = overlay_manager.broadcast_json(data)\n\n                future = asyncio.run_coroutine_threadsafe(coro, self.loop)\n                future.result(timeout=1.0)\n            except queue.Empty:\n                continue\n            except Exception as e:\n                logger.exception(\"[OverlayBroadcaster] Error: %s\", e)\n"
  },
  {
    "path": "docs/research/S2x.md",
    "content": "# Research for the S2x drones (S20, S29, PL FPV)\n\n## Chipset\n\nThe S20 and S29 boards seem to use the [XR872AT](https://jlcpcb.com/partdetail/MACHINEINTELLIGENCE-XR872AT/C879208)\nMCU, a Cortex-M4 ARM processor. The likely firmware SDK family is\nhttps://github.com/XradioTech/xradio-skylark-sdk.\n\n## App family\n\nThese drones belong to the `com.vison.macrochip` Android app family. Confirmed\napps so far:\n\n- HiTurbo FPV: `com.vison.macrochip.hiturbo.fpv`, decompiled at\n  `decompile-s2x-hiturbo-app`.\n- PL FPV: `com.vison.macrochip.pl.fpv`, version `1.1.5`, decompiled at\n  `decompiled-pl-fpv-1.1.5`.\n\nPL FPV is compatible with TurboDrone's existing `s2x` implementation. A\nPlegble PL-1515 that lists PL FPV in its guidebook was flown successfully with\n`DRONE_TYPE=s2x`: RC controls, video, takeoff, land, and e-stop all worked.\n\n## Network shape\n\n- Device target is the phone's Wi-Fi gateway. TurboDrone's default remains\n  `172.16.10.1`, but app code uses the DHCP gateway rather than a hard-coded\n  address.\n- RC/control is UDP to port `8080`.\n- Video is UDP on port `8888`.\n- The app also opens TCP `8888` for some Macrochip variants, but the working\n  S2x path is the UDP video path.\n- There is an auxiliary UDP receive socket on `8081` in newer PL FPV base\n  library code.\n\nVideo start/keepalive is a five-byte UDP command sent to port `8080`:\n\n```text\n08 <local-ipv4-byte0> <local-ipv4-byte1> <local-ipv4-byte2> <local-ipv4-byte3>\n```\n\nHiTurbo's `UdpRequestVideo` sends this every 1000 ms. PL FPV's\n`StreamUdpConnection` sends the same shape every 1000 ms. TurboDrone currently\nsends the same start payload every 2000 ms, which has worked on tested drones.\n\n## RC packet\n\nThe stock apps use the 20-byte \"HY\" control packet for this family:\n\n```text\n66 14 RR PP TT YY F1 F2 00 00 00 00 00 00 00 00 00 00 XX 99\n```\n\n- Byte `0`: start marker `0x66`.\n- Byte `1`: packet length/value `0x14`.\n- Byte `2`: roll.\n- Byte `3`: pitch.\n- Byte `4`: throttle.\n- Byte `5`: yaw.\n- Byte `6`: one-shot flags.\n- Byte `7`: mode/status flags.\n- Bytes `8..17`: zero.\n- Byte `18`: XOR of bytes `2..17`.\n- Byte `19`: end marker `0x99`.\n\nObserved flag bits:\n\n- Byte `6`, bit `0`: one-key fly/land. Both takeoff and land use this same bit\n  in the inspected HiTurbo and PL FPV code.\n- Byte `6`, bit `1`: emergency stop.\n- Byte `6`, bit `2`: calibration.\n- Byte `6`, bit `3`: roll/flip in the inline HiTurbo thread variant.\n- Byte `7`, bit `0`: headless.\n- Byte `7`, bit `1`: always set by both inspected apps.\n- Byte `7`, bit `2`: record state.\n- Byte `7`, bit `3`: \"rocker\" UI/control bit.\n\nTurboDrone has historically sent byte `7 = 0x0a` by default. Both inspected app\npaths build `0x02` plus optional bits, but `0x0a` has worked on real S2x and\nPL-1515 hardware. Treat byte `7` as a possible variant knob if a drone flies but\nhas odd mode behavior.\n\n## RC timing and feel\n\nThe inspected stock app paths send RC packets every 50 ms:\n\n- HiTurbo `SendHuiYuanThread` sleeps `50L` between packets.\n- PL FPV subscribes to `RxManager.getObservable(0L, 50L)` for\n  `HyControlConsumer`.\n\nTurboDrone's S2x backend sends RC packets at 80 Hz by default, so perceived lag\nis unlikely to be caused by a lower packet rate. More likely causes:\n\n- Frontend `inc` mode is intentionally ramped by `IncrementalStrategy`.\n- Gamepad/absolute mode should feel closer to the app because it uses\n  `DirectStrategy`.\n- Browser input is forwarded to the backend at 30 Hz, while the backend repeats\n  the latest state at the configured control rate.\n- Debug control logging can add small overhead when enabled.\n\n## Speed tiers\n\nThe stock apps do not use byte `1` as a speed selector; byte `1` stays `0x14`.\nInstead, the app scales roll and pitch around center before sending the packet.\n\nObserved HY speed scales:\n\n- HiTurbo: speed `0` = `0.6`, speed `1` = `0.8`, speed `2` = `1.0`.\n- PL FPV: speed `0` = `0.7`, speed `1` = `0.8`, speed `2` = `1.0`.\n\nTurboDrone now keeps S2x default behavior at full scale (`speed_index = 2`) and\nsupports lower S2x speed tiers via `set_speed_index`.\n\n## Native libraries\n\nPL FPV's `config.arm64_v8a.apk` split contains native libraries under\n`resources/config.arm64_v8a.apk/lib/arm64-v8a`. The important app-specific\nlibraries are:\n\n- `libvison_main.so`: implements `com.vison.sdk.VNDK` JNI methods such as\n  `addVideoStream`, `add872Stream`, `createVideoStream`,\n  `getVideoOneFrameArray`, `convertJPEGToI420`, `convertNV12ToI420`, and\n  FFmpeg/H.265 decode helpers. Printable symbols include `_872StreamBuf`,\n  `udp_pack`, `MJPGToI420`, and the Java `VNDK` exports. This looks like the\n  native video parser/decoder bridge, not the RC command transport.\n- `libdetector-lib.so`: implements `com.vison.macrochip.sdk.JNIManage` for\n  hand detection, follow/track, obstacle detection, image stitching, and\n  OpenCV/ncnn/ONNX helpers. This is vision/autonomy support and does not appear\n  to own the RC packet format.\n- `librxffmpeg-*`, `libav*`, `libsw*`, `libHW_H265dec_Andr.so`,\n  `libturbojpeg.so`, `libjpeg.so`: codec, FFmpeg, and JPEG support.\n\nString-level inspection did not find hard-coded S2x IPs or ports in\n`libvison_main.so`; the network target and control socket behavior still appear\nto be owned by the Java `BaseApplication` / connection classes. This supports\nkeeping TurboDrone's S2x RC implementation as a Java-level packet match while\nusing native findings mostly to understand video parsing.\n\n### Native S2x UDP video parser\n\nGhidra decompilation of `libvison_main.so` shows that PL FPV's Java\n`VNDK.add872Stream(byte[], int)` calls a native `analysis(int, char*, char*&)`\nfunction before writing a completed image into the internal BLB frame buffer.\nThat function is the best match for TurboDrone's S2x UDP packet parser.\n\nObserved native packet rules:\n\n- Bytes `0..1`: sync marker `0x40 0x40`.\n- Bytes `2..3`: little-endian 16-bit frame/image id.\n- Byte `4`: total chunks in the frame. The native parser rejects `0`.\n- Byte `5`: chunk index. The native parser rejects values above `100`.\n- Bytes `6..7`: little-endian datagram length, which must equal the received\n  packet length.\n- Bytes `8..packet_len-3`: JPEG payload data.\n- Bytes `packet_len-2..packet_len-1`: two-byte trailer, normally `##`.\n\nThe native parser keeps two frame slots, accepts out-of-order chunks, stores\neach chunk at `chunk_id * 0x56e`, tracks a per-frame chunk bitmap, and emits the\nframe as soon as all chunk ids `0..total_chunks-1` have arrived. This is better\nthan waiting for the next frame id to know the previous frame is complete.\n\nTurboDrone's S2x video parser now mirrors the important parts of this behavior:\nit uses the 16-bit frame id, validates total chunks and declared packet length,\nstrips the `##` trailer, and emits a frame immediately once all declared chunks\nare present. It still keeps a frame-id rollover fallback for older captures or\nunexpected variants.\n\n## TurboDrone implementation notes\n\nCurrent matching files:\n\n- `backend/models/s2x_rc.py`\n- `backend/protocols/s2x_rc_protocol_adapter.py`\n- `backend/protocols/s2x_video_protocol.py`\n- `backend/models/s2x_video_model.py`\n\nImplementation parity notes:\n\n- RC packet shape matches the Macrochip HY 20-byte packet.\n- Video start command matches the app's `0x08 + local IPv4` command.\n- Video receive port and native `0x40 0x40` chunk header match the S2x stream\n  behavior.\n- `S2X_SWAP_YAW_ROLL` is available as a variant knob in the web backend.\n- S2x speed tiers are supported as a model-level knob; the default remains full\n  scale to preserve existing flight feel.\n\n## Notes\n\n`nmap` on all TCP ports yielded only `8888` open. This is likely a backup or\nvariant path for the main video feed over UDP."
  },
  {
    "path": "docs/research/cooingdv.md",
    "content": "# CooingDV Protocol Research\r\n\r\nThis note documents the CooingDV-style drone protocol as implemented in\r\nTurboDrone and as observed in the decompiled KY UFO and RC UFO Android apps.\r\nThe publisher/app family appears to reuse the same core control and video stack\r\nacross cosmetically different drone apps.\r\n\r\nPrimary evidence:\r\n\r\n- TurboDrone implementation:\r\n  - `turbodrone/backend/models/cooingdv_rc.py`\r\n  - `turbodrone/backend/protocols/cooingdv_rc_protocol_adapter.py`\r\n  - `turbodrone/backend/models/cooingdv_video_model.py`\r\n  - `turbodrone/backend/protocols/cooingdv_video_protocol.py`\r\n  - `turbodrone/backend/main.py`\r\n  - `turbodrone/backend/web_server.py`\r\n  - `turbodrone/backend/services/flight_controller.py`\r\n  - `turbodrone/backend/services/video_receiver.py`\r\n- Decompiled KY UFO app:\r\n  - `decompiled-ky-ufo-1.5.8/sources/com/cooingdv/kyufo/socket/Config.java`\r\n  - `decompiled-ky-ufo-1.5.8/sources/com/cooingdv/kyufo/socket/SocketClient.java`\r\n  - `decompiled-ky-ufo-1.5.8/sources/com/cooingdv/kyufo/socket/UdpComm.java`\r\n  - `decompiled-ky-ufo-1.5.8/sources/com/cooingdv/kyufo/tools/FlyController.java`\r\n  - `decompiled-ky-ufo-1.5.8/sources/com/cooingdv/kyufo/fragment/DeviceBLFragment.java`\r\n  - `decompiled-ky-ufo-1.5.8/sources/com/cooingdv/kyufo/fragment/DeviceGLFragment.java`\r\n  - `decompiled-ky-ufo-1.5.8/sources/com/cooingdv/kyufo/thread/MjpegThread.java`\r\n  - `decompiled-ky-ufo-1.5.8/sources/com/cooingdv/kyufo/models/VideoModel.java`\r\n  - `decompiled-ky-ufo-1.5.8/sources/com/cooingdv/bl60xmjpeg/UAV.java`\r\n  - `decompiled-ky-ufo-1.5.8/sources/com/cooingdv/bl60xmjpeg/utils/GLJni.java`\r\n  - `decompiled-ky-ufo-1.5.8/sources/com/cooingdv/bl60xmjpeg/utils/TCJni.java`\r\n- Decompiled RC UFO app:\r\n  - `decompiled-rc-uf-19.3/sources/com/cooingdv/rcufo/socket/Config.java`\r\n  - `decompiled-rc-uf-19.3/sources/com/cooingdv/rcufo/socket/SocketClient.java`\r\n  - `decompiled-rc-uf-19.3/sources/com/cooingdv/rcufo/socket/UdpComm.java`\r\n  - `decompiled-rc-uf-19.3/sources/com/cooingdv/rcufo/tools/FlyController.java`\r\n  - `decompiled-rc-uf-19.3/sources/com/cooingdv/rcufo/fragment/DeviceBLFragment.java`\r\n  - `decompiled-rc-uf-19.3/sources/com/cooingdv/rcufo/thread/MjpegThread.java`\r\n  - `decompiled-rc-uf-19.3/sources/com/cooingdv/rcufo/models/VideoModel.java`\r\n  - `decompiled-rc-uf-19.3/sources/com/cooingdv/rcufo/dialog/EnterPasswordDialog.java`\r\n  - `decompiled-rc-uf-19.3/sources/com/cooingdv/rcufo/utils/WifiIdUtils.java`\r\n- Decompiled RC FPV app:\r\n  - `decompiled-rc-fpv-1.8.0/sources/com/cooingdv/rcfpv/socket/Config.java`\r\n  - `decompiled-rc-fpv-1.8.0/sources/com/cooingdv/rcfpv/socket/SocketClient.java`\r\n  - `decompiled-rc-fpv-1.8.0/sources/com/cooingdv/rcfpv/socket/UdpComm.java`\r\n  - `decompiled-rc-fpv-1.8.0/sources/com/cooingdv/rcfpv/tools/FlyController.java`\r\n  - `decompiled-rc-fpv-1.8.0/sources/com/cooingdv/bl60xmjpeg/UAV.java`\r\n  - `decompiled-rc-fpv-1.8.0/sources/com/cooingdv/rcfpv/utils/WifiIdUtils.java`\r\n- Decompiled KY FPV app:\r\n  - `decompiled-ky-fpv-2.0.0/sources/com/cooingdv/kyfpv/socket/Config.java`\r\n  - `decompiled-ky-fpv-2.0.0/sources/com/cooingdv/kyfpv/socket/UdpComm.java`\r\n  - `decompiled-ky-fpv-2.0.0/sources/com/cooingdv/kyfpv/fragment/DeviceTXFragment.java`\r\n  - `decompiled-ky-fpv-2.0.0/sources/com/cooingdv/kyfpv/tools/FlyController.java`\r\n  - `decompiled-ky-fpv-2.0.0/sources/com/cooingdv/kyfpv/utils/StreamClient.java`\r\n  - `decompiled-ky-fpv-2.0.0/sources/com/cooingdv/kyfpv/utils/FlyCommandUtils.java`\r\n  - `decompiled-ky-fpv-2.0.0/sources/com/cooingdv/kyfpv/tools/FlyCommand.java`\r\n  - `decompiled-ky-fpv-2.0.0/sources/com/cooingdv/kyfpv/utils/WifiIdUtils.java`\r\n- Decompiled 4DRC FPV app:\r\n  - `decompiled-4drc-fpv-1.6.0/sources/com/cooingdv/fpv4drc/socket/Config.java`\r\n  - `decompiled-4drc-fpv-1.6.0/sources/com/cooingdv/fpv4drc/socket/SocketClient.java`\r\n  - `decompiled-4drc-fpv-1.6.0/sources/com/cooingdv/fpv4drc/tools/FlyController.java`\r\n  - `decompiled-4drc-fpv-1.6.0/sources/com/cooingdv/fpv4drc/tools/DriveController.java`\r\n  - `decompiled-4drc-fpv-1.6.0/sources/com/cooingdv/fpv4drc/fragment/ExcavatorFragment.java`\r\n  - `decompiled-4drc-fpv-1.6.0/sources/com/cooingdv/fpv4drc/utils/WifiIdUtils.java`\r\n\r\n## Executive Summary\r\n\r\nCooingDV drones in these apps use a simple Wi-Fi control plane:\r\n\r\n- Drone IP: `192.168.1.1`\r\n- RC/command UDP port: `7099`\r\n- Preview video: `rtsp://192.168.1.1:7070/webcam`\r\n- HTTP media access: `http://192.168.1.1/PHOTO/...` and\r\n  `http://192.168.1.1/DCIM/...`\r\n- FTP media root: `/0/`, username `ftp`, password `ftp`\r\n\r\nThe RC loop is a 20 Hz loop in the Android apps: `FlyController` schedules a\r\n`TimerTask` every 50 ms. TurboDrone mirrors that by defaulting CooingDV control\r\nto 20 Hz.\r\n\r\nThere are two Wi-Fi control packet families:\r\n\r\n- TC / short packets: 9 bytes total over UDP.\r\n- GL / extended packets: 21 bytes total over UDP.\r\n\r\nBoth packet families are wrapped with a leading `0x03` byte before being sent\r\nover UDP. KY UFO also has a native/BLE-like path through `UAV`, `GLJni`, and\r\n`TCJni` where the same inner payloads are sent without the Wi-Fi wrapper.\r\n\r\nThe video path in the Wi-Fi app mode is RTSP. The apps use an IJK-based video\r\nview configured to expose original video frames as `byte[]`, then decode those\r\nbytes with `BitmapFactory.decodeByteArray`, which strongly indicates JPEG/MJPEG\r\nframe blobs at the app boundary. The apps can also re-encode displayed frames to\r\nlocal H.264 MP4 for recording. That local H.264 encoder is not evidence that the\r\ndrone's wire stream itself is H.264.\r\n\r\n## TurboDrone Compatibility Summary\r\n\r\nTurboDrone's current `cooingdv` implementation is a good match for drones that\r\nexpose the classic CooingDV Wi-Fi path:\r\n\r\n- RC/telemetry on UDP `192.168.1.1:7099`\r\n- Video on `rtsp://192.168.1.1:7070/webcam`\r\n- 20 Hz stick loop\r\n- TC short packet `03 66 ... 99`\r\n- GL extended packet `03 66 14 ... 99`\r\n- Heartbeat `01 01`\r\n- Exit-control command `08 01`\r\n\r\nCompatibility by reviewed app:\r\n\r\n| Reviewed app | Current TurboDrone fit | Notes / edge cases |\r\n| --- | --- | --- |\r\n| KY UFO `com.cooingdv.kyufo` | Strong for classic Wi-Fi TC/GL; partial for native BL60x path | The Java Wi-Fi path matches UDP `7099` and RTSP `7070`. KY UFO also has native `UAV` / `libuav_gl.so` / `libuav_tc.so` transport targeting `192.168.169.1:8800`; TurboDrone does not implement that native path yet. |\r\n| RC UFO `com.cooingdv.rcufo` | Strong for classic Wi-Fi TC/GL | This is closest to TurboDrone's current adapter: UDP `7099`, RTSP `7070`, TC/GL auto-detection, password-capable IDs, and GL status telemetry. Some decompiled methods are damaged, so runtime captures are still useful. |\r\n| RC FPV `com.cooingdv.rcfpv` | Good for classic TC/short Wi-Fi path | RC FPV uses the same RTSP/UDP constants and short `03 66 ... 99` packet. Its Java `FlyController` does not show the GL 21-byte branch; treat it as TC/short unless telemetry proves otherwise. It references `UAV` native bridges, but this decompile lacks bundled `.so` files. |\r\n| KY FPV `com.cooingdv.kyfpv` | Good only for its classic `DeviceTXFragment` path; not complete for all KY FPV backends | KY FPV includes a TurboDrone-compatible RTSP/UDP TC path, but also has native `StreamClient` / `mjpeg_jni` and Jieli `DeviceClient` JSON `FLYING_CTRL` backends. TurboDrone does not currently implement those non-classic backends. |\r\n| 4DRC FPV `com.cooingdv.fpv4drc` | Strong for classic Wi-Fi TC/GL drone path; separate for excavator/drive mode | Drone flight control uses the same TC/GL split as KY UFO, with UDP `7099`, RTSP `7070`, and optional BL60x native `UAV`. It also adds `EXCAVATOR_720 = 89` and a distinct drive/excavator packet `03 33 ... 88`, which is not covered by TurboDrone's flight controller. |\r\n\r\nControl expectations for compatible classic-path drones:\r\n\r\n- Video should generally work via RTSP `7070`.\r\n- Manual stick control should generally work via UDP `7099`.\r\n- Emergency stop should generally work through the TC/GL emergency flag mapping.\r\n- Takeoff and land are best understood as TurboDrone UI abstractions over the\r\n  apps' fast-up / fast-down one-shot bits. The reviewed apps do not expose clear\r\n  named Android `takeoff` / `land` wire opcodes for the classic UDP path.\r\n\r\nPractical rule: if a drone works in one of these apps while exposing\r\n`192.168.1.1:7070` RTSP and responding on UDP `7099`, TurboDrone's current\r\n`cooingdv` adapter is likely the correct first attempt. If the app is using KY\r\nFPV's `mjpeg_jni`, Jieli JSON, or KY UFO's native `192.168.169.1:8800` path\r\nexclusively, TurboDrone needs a separate backend before compatibility can be\r\nclaimed.\r\n\r\nLikely future TurboDrone split:\r\n\r\n- `cooingdv`: current classic RTSP/UDP implementation.\r\n- `cooingdv_bl`: BL60x native MJPEG/control transport used by KY UFO `UAV` and\r\n  KY FPV `StreamClient`, both centered on `192.168.169.1:8800`.\r\n- `cooingdv_jieli`: KY FPV Jieli/CTP backend using `192.168.8.15:2228`,\r\n  `DeviceClient`, `FLYING_CTRL`, and `CTP:` JSON packets.\r\n- `cooingdv_drive` or `cooingdv_4drc_drive`: optional future non-drone backend\r\n  for the 4DRC excavator/ground-vehicle path. This should stay separate from\r\n  flight because its packet shape and controls are not quadcopter RC controls.\r\n\r\nImplementation status in TurboDrone:\r\n\r\n- `cooingdv` is implemented for classic RTSP/UDP.\r\n- `cooingdv_jieli` has an initial RC implementation for `CTP:`/`FLYING_CTRL`\r\n  over UDP `2228` and a first-pass RTP/JPEG video adapter using SDP `6789` and\r\n  RTP video port `6666`.\r\n- `cooingdv_bl` remains research-only until runtime captures confirm the safe\r\n  ACK/session behavior needed for motor control.\r\n- `cooingdv_drive` is not implemented and is out of scope for flight support.\r\n\r\n## Network Constants\r\n\r\nThe two apps share the same network constants in `Config.java`.\r\n\r\n- `SERVER_IP = \"192.168.1.1\"`\r\n- `SERVER_PORT = 7070`\r\n- `PREVIEW_ADDRESS = \"rtsp://192.168.1.1:7070/webcam\"`\r\n- `TCP_SERVER_HOST = \"192.168.1.1\"`\r\n- `TCP_SERVER_PORT = 5000`\r\n- `FTP_HOST = \"192.168.1.1\"`\r\n- `FTP_USERNAME = \"ftp\"`\r\n- `FTP_PASSWORD = \"ftp\"`\r\n- `FTP_ROOT_DIR = \"/0/\"`\r\n- `VIDEO_PATH = \"DCIM\"`\r\n- `IMAGE_PATH = \"PHOTO\"`\r\n- `LOCAL_IMAGE_SUFFIX = \".jpg\"`\r\n- `LOCAL_VIDEO_SUFFIX = \".avi\"`\r\n- `REMOTE_IMAGE_SUFFIX = \".jpg\"`\r\n- `REMOTE_VIDEO_SUFFIX = \".avi\"`\r\n\r\nThe Java code inspected does not use `TCP_SERVER_PORT = 5000` for flight\r\ncontrol. Active RC, heartbeat, camera switch, gallery sync, and password\r\ncommands all go through `UdpComm` to UDP port `7099`.\r\n\r\nThe newer KY FPV app still carries these constants for one of its RTSP/UDP\r\nscreens, but it also supports other backends through `StreamClient` and\r\n`DeviceClient`; see \"Additional CooingDV Publisher Apps\" below.\r\n\r\n## UDP Session Lifecycle\r\n\r\nBoth apps create their UDP client like this:\r\n\r\n```text\r\nUdpComm.getInstance(\"192.168.1.1\", 7099)\r\n```\r\n\r\n`UdpComm` uses `new DatagramSocket()` with no explicit local bind, so Android\r\nuses an ephemeral local port. The same socket is used for transmit and receive.\r\n\r\nThe receive thread allocates a 20-byte buffer:\r\n\r\n```text\r\nbyte[] bArr = new byte[20]\r\nDatagramPacket(bArr, bArr.length)\r\nsocket.receive(datagramPacket)\r\ncallback.onReceiveData(copyOf(datagramPacket.getData(), datagramPacket.getLength()))\r\n```\r\n\r\nThat means telemetry observed by the Java callback is limited to 20 bytes in\r\nthese app builds. This is enough for the app's first-byte capability detection,\r\ncamera reset state, gallery counters, password metadata, and small GL status\r\nframes.\r\n\r\nThe apps send a heartbeat every 1000 ms:\r\n\r\n```text\r\n01 01\r\n```\r\n\r\nWhen leaving flight-control mode, the Wi-Fi path sends:\r\n\r\n```text\r\n08 01\r\n```\r\n\r\nImportant: this is not a startup init packet. In both apps it is sent when the\r\ncontrol timer is cancelled. KY sends native command `65` (`0x65`, decimal 101)\r\ninstead when its `UAV` native path is active.\r\n\r\n## Discrete UDP Commands\r\n\r\nObserved discrete commands on UDP port `7099`:\r\n\r\n- `01 01`: heartbeat, once per second while preview/control is active.\r\n- `08 01`: leave flight-control mode / stop controller timer.\r\n- `06 01`: select one camera, usually front/default.\r\n- `06 02`: select alternate camera, usually rear/secondary.\r\n- `09 01`: screen/gallery/photo-side synchronization. Used by KY `switchScreen`\r\n  and by both apps after photo telemetry (`M`, `0x4d`).\r\n- `09 02`: screen/gallery/video-side synchronization. Used by KY `switchScreen`\r\n  and by both apps after video telemetry (`X`, `0x58`).\r\n- `0a d0 d1 d2 d3 d4 d5 d6 d7`: RC UFO password set command, where each `dN`\r\n  is a numeric byte parsed from one character of an 8-digit UI password.\r\n\r\nKY native-only commands through `UAV.sendCommand`:\r\n\r\n- `64` (`0x64`, decimal 100): sent after first native `picData` frame to\r\n  acknowledge/activate native streaming.\r\n- `63` (`0x63`, decimal 99): sent after native resolution/capability messages.\r\n- `65` (`0x65`, decimal 101): sent when leaving control mode while `UAV` is\r\n  active.\r\n\r\n## Flight Control Axes\r\n\r\nThe Android apps name the four stick bytes as:\r\n\r\n- `controlByte1`\r\n- `controlByte2`\r\n- `controlAccelerator`\r\n- `controlTurn`\r\n\r\nThe values use byte-centered joystick semantics:\r\n\r\n- Default center: `128`\r\n- Minimum: `1`\r\n- Maximum: `255`\r\n- If `controlAccelerator == 1`, the app writes it as `0` before sending.\r\n\r\nTurboDrone maps these into the higher-level model fields:\r\n\r\n- `roll` -> `controlByte1`\r\n- `pitch` -> `controlByte2`\r\n- `throttle` -> `controlAccelerator`\r\n- `yaw` -> `controlTurn`\r\n\r\n`CooingdvRcModel` defines a safe control range of `50..200` centered at `128`.\r\nThe protocol adapter performs a final byte clamp to `1..255`, matching the app\r\nframe builder. This separation is intentional: the model limits normal control\r\nmotion, while the adapter preserves protocol validity.\r\n\r\n## TC / Short Flight Packet\r\n\r\nThe TC path is used when the app's device type is `10`.\r\n\r\nNative inner payload in KY `UAV` mode:\r\n\r\n```text\r\n66 B1 B2 ACC TURN FLAGS CHECKSUM 99\r\n```\r\n\r\nWi-Fi UDP payload:\r\n\r\n```text\r\n03 66 B1 B2 ACC TURN FLAGS CHECKSUM 99\r\n```\r\n\r\nTotal Wi-Fi length: 9 bytes.\r\n\r\nByte layout:\r\n\r\n- Byte 0: `0x03`, Wi-Fi wrapper/prefix.\r\n- Byte 1: `0x66`, start marker.\r\n- Byte 2: `controlByte1`.\r\n- Byte 3: `controlByte2`.\r\n- Byte 4: `controlAccelerator`, except app coerces `1` to `0`.\r\n- Byte 5: `controlTurn`.\r\n- Byte 6: flags.\r\n- Byte 7: XOR checksum.\r\n- Byte 8: `0x99`, end marker.\r\n\r\nChecksum:\r\n\r\n```text\r\nchecksum = B1 ^ B2 ^ ACC ^ TURN ^ FLAGS\r\n```\r\n\r\nThe Android app's TC flag byte is:\r\n\r\n- Bit `0x01`: `isFastFly`\r\n- Bit `0x02`: `isFastDrop`\r\n- Bit `0x04`: `isEmergencyStop`\r\n- Bit `0x08`: `isCircleTurnEnd`\r\n- Bit `0x10`: `isNoHeadMode`\r\n- Bit `0x20`: `isFastReturn || isUnLock`\r\n- Bit `0x40`: KY `isOpenLight`; RC source is damaged here and has no\r\n  `isOpenLight` field.\r\n- Bit `0x80`: `isGyroCorrection`\r\n\r\nTurboDrone's TC flag names are higher-level names:\r\n\r\n- `0x01`: `takeoff_flag`\r\n- `0x02`: `land_flag`\r\n- `0x04`: `stop_flag`\r\n- `0x08`: `flip_flag`\r\n- `0x10`: `headless_flag`\r\n- `0x80`: `calibration_flag`\r\n\r\nThe TurboDrone mapping matches the byte positions and the most plausible\r\nbutton-level effects, but the decompiled app does not name explicit\r\n`takeoff`/`land` opcodes. The app names the low two bits as one-shot\r\nfast-up/fast-down actions. Treat TurboDrone's `takeoff` and `land` semantics as\r\nan abstraction over those app buttons, not as literal Android symbol names.\r\n\r\n## GL / Extended Flight Packet\r\n\r\nThe GL path is used when the app's device type is not `10`, normally device type\r\n`2`.\r\n\r\nNative inner payload in KY `UAV` mode:\r\n\r\n```text\r\n66 14 B1 B2 ACC TURN FLAGS1 FLAGS2 00 00 00 00 00 00 00 00 00 00 CHECKSUM 99\r\n```\r\n\r\nWi-Fi UDP payload:\r\n\r\n```text\r\n03 66 14 B1 B2 ACC TURN FLAGS1 FLAGS2 00 00 00 00 00 00 00 00 00 00 CHECKSUM 99\r\n```\r\n\r\nTotal Wi-Fi length: 21 bytes.\r\n\r\nByte layout:\r\n\r\n- Byte 0: `0x03`, Wi-Fi wrapper/prefix.\r\n- Byte 1: `0x66`, start marker.\r\n- Byte 2: `0x14`, decimal 20, extended payload marker/inner length.\r\n- Byte 3: `controlByte1`.\r\n- Byte 4: `controlByte2`.\r\n- Byte 5: `controlAccelerator`, except app coerces `1` to `0`.\r\n- Byte 6: `controlTurn`.\r\n- Byte 7: `flags1`.\r\n- Byte 8: `flags2`.\r\n- Bytes 9..18: reserved zero bytes in the generated app frames, except byte 19\r\n  below is checksum when using zero-based Wi-Fi indexing.\r\n- Byte 19: XOR checksum.\r\n- Byte 20: `0x99`, end marker.\r\n\r\nChecksum:\r\n\r\n```text\r\nchecksum = B1 ^ B2 ^ ACC ^ TURN ^ FLAGS1 ^ FLAGS2\r\n```\r\n\r\nThe Android app's GL `flags1` byte is:\r\n\r\n- Bit `0x01`: `isFastFly || isFastDrop`\r\n- Bit `0x02`: `isEmergencyStop`\r\n- Bit `0x04`: `isGyroCorrection`\r\n- Bit `0x08`: `isCircleTurnEnd`\r\n- Bit `0x10`: KY `isOpenLight`; absent in RC `FlyController`.\r\n- Bit `0x40`: `isGestureMode`\r\n\r\nThe Android app's GL `flags2` byte is:\r\n\r\n- Bit `0x01`: `isNoHeadMode`\r\n- Bit `0x02`: `isFixedHeightMode`\r\n\r\nTurboDrone's GL mapping:\r\n\r\n- `takeoff_flag` or `land_flag` -> `flags1 0x01`\r\n- `stop_flag` -> `flags1 0x02`\r\n- `calibration_flag` -> `flags1 0x04`\r\n- `flip_flag` -> `flags1 0x08`\r\n- `headless_flag` -> `flags2 0x01`\r\n\r\nAgain, TurboDrone's names represent the product-level control surface. The\r\ndecompiled apps expose these bits as fast up/down, emergency, gyro, circle/flip,\r\nheadless, and fixed-height style features.\r\n\r\n## Variant Detection\r\n\r\nThe apps infer TC versus GL behavior from UDP telemetry/capability bytes.\r\n\r\nKY `WifiIdUtils.isGL(i)`:\r\n\r\n```text\r\n90..101, 103\r\n```\r\n\r\nKY `WifiIdUtils.isNoGL(i)`:\r\n\r\n```text\r\n5, 9, 12, 19, 20, 21, 23, 24, 31, 45, 51, 63, 64, 65, 66, 67, 72\r\n```\r\n\r\nRC `WifiIdUtils.isGL(i)`:\r\n\r\n```text\r\n90..101, 103, 82, 85\r\n```\r\n\r\nRC password-capable IDs:\r\n\r\n```text\r\n80, 81, 82, 85\r\n```\r\n\r\nRC adds many KY aliases plus RC-specific resolution IDs:\r\n\r\n```text\r\n26, 27, 29, 30, 31, 41, 43, 44, 45, 68, 69, 70, 71, 72,\r\n80, 81, 82, 83, 84, 85, 86, 87, 90..101, 103, 105\r\n```\r\n\r\nTurboDrone mirrors this detection through the first byte of received UDP\r\ntelemetry:\r\n\r\n- IDs in `GL_RESOLUTION_IDS` select GL.\r\n- IDs in `KNOWN_RESOLUTION_IDS` select TC unless they also match GL.\r\n- Until a known ID is received, TurboDrone falls back to TC.\r\n- `COOINGDV_VARIANT=tc` or `COOINGDV_VARIANT=gl` can force a variant.\r\n- Aliases accepted by TurboDrone:\r\n  - TC: `tc`, `e88`, `short`\r\n  - GL: `gl`, `flow`, `extended`\r\n  - Auto: empty, `auto`, `detect`, `autodetect`\r\n\r\n## Additional CooingDV Publisher Apps\r\n\r\nTwo later decompiles broaden the CooingDV publisher picture:\r\n\r\n- `RC FPV 1.8.0` package: `com.cooingdv.rcfpv`\r\n- `KY FPV 2.0.0` package: `com.cooingdv.kyfpv`\r\n\r\nBoth are recognizably from the same publisher family, but they do not map\r\none-to-one onto the same runtime stack.\r\n\r\n### RC FPV 1.8.0\r\n\r\nRC FPV is closest to the original KY UFO / RC UFO implementation.\r\n\r\nShared constants and paths:\r\n\r\n- `Config.PREVIEW_ADDRESS = \"rtsp://192.168.1.1:7070/webcam\"`\r\n- `SERVER_IP = \"192.168.1.1\"`\r\n- `SERVER_PORT = 7070`\r\n- `TCP_SERVER_PORT = 5000` defined but not used for RC in the inspected Java\r\n  paths.\r\n- `UdpComm.getInstance(\"192.168.1.1\", 7099)`\r\n- `UdpComm` uses the same `DatagramSocket()` send/receive model and 20-byte\r\n  receive buffer.\r\n- Heartbeat is `01 01` every 1000 ms.\r\n- Camera switch uses `06 01` / `06 02`.\r\n- Photo/video gallery sync uses `09 01` / `09 02`.\r\n- Exit control mode uses `08 01` when not in native `UAV` mode.\r\n\r\nRC FPV includes the same Java native bridge package:\r\n\r\n- `com.cooingdv.bl60xmjpeg.UAV`\r\n- `GLJni` -> `System.loadLibrary(\"uav_gl\")`\r\n- `TCJni` -> `System.loadLibrary(\"uav_tc\")`\r\n\r\nHowever, the inspected `decompiled-rc-fpv-1.8.0/resources` tree has no\r\n`resources/lib` directory. Like RC UFO, this decompile either lacks the native\r\nsplit/universal APK libs or represents a packaging variant where those binaries\r\nwere omitted from the extracted resource tree.\r\n\r\nRC FPV's `FlyController` builds only the TC/short inner packet:\r\n\r\n```text\r\n66 B1 B2 ACC TURN FLAGS CHECKSUM 99\r\n```\r\n\r\nWi-Fi mode prefixes that with `0x03`:\r\n\r\n```text\r\n03 66 B1 B2 ACC TURN FLAGS CHECKSUM 99\r\n```\r\n\r\nNative `UAV` mode sends the inner 8-byte payload directly.\r\n\r\nRC FPV does not show the GL 20-byte packet branch in its `FlyController`. It is\r\ntherefore best modeled as a TC/short CooingDV variant, with optional native\r\n`UAV` support if its missing native libraries are recovered.\r\n\r\nRC FPV flag bits match the TC-style family:\r\n\r\n- `0x01`: fast fly / up\r\n- `0x02`: fast drop / down\r\n- `0x04`: emergency stop\r\n- `0x08`: circle turn end / flip-like one-shot\r\n- `0x10`: no-head / headless\r\n- `0x20`: fast return or unlock\r\n- `0x80`: gyro correction\r\n\r\nTwo details differ from the older RC UFO source:\r\n\r\n- `controlTurn` is dead-zoned to 128 when it falls between 104 and 152.\r\n- The decompiled fast-return/unlock branch is cleaner than RC UFO's damaged\r\n  output: `0x20` is set when `isFastReturn` or `isUnLock` is true.\r\n\r\nRC FPV `WifiIdUtils` is narrower than RC UFO:\r\n\r\n- Adds `COVERT_8K = 33`\r\n- Adds `RESOLUTION_SETTING = 46`\r\n- Double-camera IDs: `41`, `43`, `44`\r\n- Password-capable IDs still include `80`, `81`, `82`, `85` constants, but no\r\n  `EnterPasswordDialog` equivalent was observed in the same way as RC UFO.\r\n- No GL/flow ID table was observed in this `WifiIdUtils`; it is not doing the\r\n  same GL-vs-TC selection as RC UFO.\r\n\r\n### KY FPV 2.0.0\r\n\r\nKY FPV 2.0.0 is broader than the UFO apps and carries at least three control\r\nfamilies behind one publisher UI.\r\n\r\n1. Native BL/MJPEG `StreamClient`\r\n\r\n`StreamClient` loads:\r\n\r\n```text\r\nSystem.loadLibrary(\"mjpeg_jni\")\r\n```\r\n\r\nNative methods:\r\n\r\n- `streamStartServer()`\r\n- `streamStopServer()`\r\n- `streamSendCommand(byte[] command, int channel)`\r\n- `streamSwitchCamera(int index)`\r\n- `streamSetModify(String ssid, int resolution, int channel, int camera)`\r\n\r\nCallbacks:\r\n\r\n- `functionPicture(byte[], long, byte)` marks the stream active, sends native\r\n  command `64` (`0x64`, decimal 100) once, and forwards frame bytes to\r\n  `OnStreamListener.onVideo`.\r\n- `functionMessage(int)` handles resolution/status values, broadcasts fake\r\n  resolution changes, applies model-specific UI customization, and forwards the\r\n  integer to `OnStreamListener.onReceiver`.\r\n\r\n`FlyController` type `1` sends the same short TC inner packet through\r\n`StreamClient.sendCommand(...)`:\r\n\r\n```text\r\n66 B1 B2 ACC TURN FLAGS CHECKSUM 99\r\n```\r\n\r\nIt also sends native command `65` (`0x65`, decimal 101) when leaving control\r\nmode, matching the older `UAV` native exit behavior.\r\n\r\nThe native split APK for KY FPV contains `libmjpeg_jni.so` under\r\n`resources/config.armeabi_v7a.apk/lib/armeabi-v7a`. Ghidra and ELF inspection\r\nshow it is the same protocol family as the KY UFO `libuav_tc.so` engine, but\r\nwith a different Java wrapper:\r\n\r\n- Java class: `com.cooingdv.kyfpv.utils.StreamClient`\r\n- JNI library: `libmjpeg_jni.so`\r\n- Native target: `192.168.169.1:8800`\r\n- Local socket: UDP bound to `0.0.0.0` with an ephemeral local port\r\n- Start packet: `ef 00 04 00`\r\n- Incoming envelope marker: `0x93`\r\n- Native frame assembly: 1024-byte fragments, embedded `640x360` JPEG headers,\r\n  quality tables for 5/10/25/50/75/100, and final JPEG EOI `ff d9`\r\n- Native callbacks:\r\n  - `functionPicture(byte[], long, byte)`\r\n  - `functionMessage(int)`\r\n\r\nImportant Ghidra-confirmed functions:\r\n\r\n- `Java_com_cooingdv_kyfpv_utils_StreamClient_streamStartServer`\r\n- `Java_com_cooingdv_kyfpv_utils_StreamClient_streamStopServer`\r\n- `Java_com_cooingdv_kyfpv_utils_StreamClient_streamSendCommand`\r\n- `Java_com_cooingdv_kyfpv_utils_StreamClient_streamSwitchCamera`\r\n- `Java_com_cooingdv_kyfpv_utils_StreamClient_streamSetModify`\r\n- `mjpeg_ndk_command_send`\r\n- `mjpeg_ndk_settings_send`\r\n- `mjpeg_ndk_queryinfo_cmd_send`\r\n- `handle_mcu_msg_frag`\r\n- `handle_mcu_msg_ack`\r\n- `build_send_ack`\r\n\r\n`streamSendCommand(byte[], int)` copies the Java command bytes and calls\r\n`mjpeg_ndk_command_send`. That native command path accepts payloads shorter than\r\n`0x81`, stores command length plus a `length + 0x0c` field, and piggybacks the\r\ncommand into the native ACK/control stream. This is not the Java UDP `7099`\r\ntransport; it should be treated as a separate BL60x native backend even though\r\nthe 8-byte stick payload is familiar.\r\n\r\n`streamSetModify(...)` builds the same `oGMcOfyZdIurm2kS` settings payload\r\nfamily seen in the KY UFO native code, with a mode value of `2` for this\r\nStreamClient path.\r\n\r\n2. Jieli / JSON `DeviceClient`\r\n\r\nKY FPV adds `com.jieli.lib.dv.control.DeviceClient` through `ClientManager`.\r\nThis is a materially different backend from the raw UDP packet path.\r\n\r\nKY FPV constructs `new DeviceClient(context, 1)`, selecting the UDP\r\nimplementation. The app connects to:\r\n\r\n```text\r\n192.168.8.15:2228\r\n```\r\n\r\nfrom `IConstants.DEFAULT_DEV_IP` and `AP_MODE_UDP_PORT`.\r\n\r\nThe Jieli command envelope is not the CooingDV `0x03 0x66` packet. It is a\r\nlittle-endian CTP envelope:\r\n\r\n```text\r\noffset  size  meaning\r\n0x00    4     ASCII \"CTP:\"\r\n0x04    2     little-endian topic length\r\n0x06    N     topic string\r\n...     4     little-endian JSON payload length\r\n...     M     JSON payload\r\n```\r\n\r\nThe JSON payload looks like:\r\n\r\n```json\r\n{\"op\":\"PUT\",\"param\":{\"state\":\"1\"}}\r\n```\r\n\r\nor, for flight controls:\r\n\r\n```json\r\n{\r\n  \"op\": \"PUT\",\r\n  \"param\": {\r\n    \"BYTE0\": \"102\",\r\n    \"BYTE1\": \"...\",\r\n    \"BYTE2\": \"...\",\r\n    \"BYTE3\": \"...\",\r\n    \"BYTE4\": \"...\",\r\n    \"BYTE5\": \"...\",\r\n    \"BYTE6\": \"...\",\r\n    \"BYTE7\": \"153\"\r\n  }\r\n}\r\n```\r\n\r\n`FlyCommandUtils.tryToSendFlyCommand(int[])` sends a `SettingCmd` topic\r\n`FLYING_CTRL` with parameters:\r\n\r\n```text\r\nBYTE0=102\r\nBYTE1=B1\r\nBYTE2=B2\r\nBYTE3=ACC\r\nBYTE4=TURN\r\nBYTE5=FLAGS\r\nBYTE6=CHECKSUM\r\nBYTE7=153\r\n```\r\n\r\n`FlyController` type `2` uses this JSON backend. The same 8-byte TC control\r\npayload is preserved, but it is transported as named JSON settings rather than\r\nas a raw UDP datagram.\r\n\r\nOther JSON command topics in `FlyCommand` include:\r\n\r\n- `CONTROL_MODE`\r\n- `FLYING_CTRL`\r\n- `SWITCH_CAMERA`\r\n- `RT_PIC_POSITION_CTL`\r\n- `REQUEST_FAKE_NUMBER`\r\n- numeric legacy topics such as `0136`, `0138`, `0141`, `0142`, `0143`,\r\n  `0144`, `0145`, `0146`, `0147`\r\n\r\nThis strongly suggests that newer CooingDV apps support at least one chipset/app\r\nbackend that is not covered by TurboDrone's current raw UDP/RTSP CooingDV\r\nadapter.\r\n\r\n3. Classic RTSP/UDP `DeviceTXFragment`\r\n\r\nKY FPV still includes a classic RTSP/UDP path:\r\n\r\n- RTSP preview: `rtsp://192.168.1.1:7070/webcam`\r\n- UDP telemetry/control: `192.168.1.1:7099`\r\n- Heartbeat: `01 01`\r\n- Exit control: `08 01`\r\n- Photo/video notifications: `M` / `X`, with `09 01` / `09 02` responses\r\n- `OnReceivedOriginalDataListener` -> `MjpegThread.drawBitmap(byte[])`\r\n\r\n`FlyController` type `3` delegates raw 8-byte TC inner frames back to\r\n`DeviceTXFragment.sendFlyControllerData`, which prefixes `0x03` and sends the\r\nresult by `UdpComm`.\r\n\r\nKY FPV therefore preserves the TurboDrone-compatible TC/short path, but it also\r\ncontains non-TurboDrone backends.\r\n\r\nKY FPV `WifiIdUtils` has the broadest observed model table so far. Notable\r\nadditional IDs and model families:\r\n\r\n- FPV IDs: `25`, `26`, `27`, `29`, `30`\r\n- DF FPV: `14`, `50`\r\n- E19 Eachine: `10`\r\n- Hasakee Q8: `62`\r\n- Qixin Toy: `34`\r\n- XKY 4K: `60`, `61`\r\n- hide/custom UI variants: `13`, `28`, `32`\r\n- PRO26 family: `105`, `109`, `112`, `119`, `121`, `123`, `124`\r\n- F-resolution no music/gesture IDs: `212`, `213`\r\n\r\nThese model IDs should be considered publisher-level capability IDs, not just\r\nresolution values. KY FPV uses them to hide controls, change language, switch\r\nbackgrounds, set double-camera state, and select backend behavior.\r\n\r\n### Impact On TurboDrone\r\n\r\nCurrent TurboDrone `cooingdv` support maps well to:\r\n\r\n- RC UFO Wi-Fi TC/GL path.\r\n- KY UFO Wi-Fi TC/GL path.\r\n- RC FPV classic TC Wi-Fi path.\r\n- KY FPV `DeviceTXFragment` classic TC Wi-Fi path.\r\n- 4DRC FPV drone-flight path.\r\n\r\nCurrent TurboDrone does not yet cover:\r\n\r\n- KY/RC native `UAV` / `StreamClient` BL60x-style native transport unless the\r\n  drone also exposes the classic Java Wi-Fi path.\r\n- KY FPV `DeviceClient` / Jieli JSON settings backend.\r\n- The expanded KY FPV model-ID capability table and UI-hide/customization\r\n  behavior.\r\n- 4DRC FPV excavator/ground-vehicle controls.\r\n\r\nThe safest immediate expansion is to broaden CooingDV telemetry recognition with\r\nthe RC FPV and KY FPV IDs while keeping the packet encoder conservative:\r\n\r\n- Treat RC FPV as TC/short unless runtime telemetry proves otherwise.\r\n- Treat KY FPV classic `DeviceTXFragment` as TC/short.\r\n- Do not assume the Jieli JSON backend is reachable through UDP `7099`.\r\n- Do not assume KY FPV native `mjpeg_jni` behaves exactly like KY UFO\r\n  `libuav_gl.so` / `libuav_tc.so` for every model, even though Ghidra shows the\r\n  same BL60x-style `192.168.169.1:8800` native protocol family.\r\n- Treat 4DRC FPV `EXCAVATOR_720 = 89` as a non-flight device class. It may share\r\n  video and native BL transport, but its RC packet is a drive/excavator packet,\r\n  not a quadcopter packet.\r\n\r\nImplementation recommendation:\r\n\r\n- Add `cooingdv_bl` only if hardware capture shows target drones require the\r\n  native `192.168.169.1:8800` path and do not expose classic RTSP/UDP.\r\n- Add `cooingdv_jieli` as a distinct backend if we want KY FPV\r\n  `DeviceClient`/Jieli devices. It needs CTP packet construction, UDP `2228`,\r\n  Jieli realtime stream handling, and JSON command topics. TurboDrone now has\r\n  initial CTP RC and RTP/JPEG video support, but it still needs hardware\r\n  validation and H.264 handling if a device chooses RTS format `1`.\r\n- Add a drive/excavator backend only if TurboDrone intentionally grows beyond\r\n  flying drones. The 4DRC excavator path is clearly CooingDV-family, but it does\r\n  not belong in a flight-control adapter.\r\n\r\n### 4DRC FPV 1.6.0\r\n\r\n4DRC FPV package:\r\n\r\n```text\r\ncom.cooingdv.fpv4drc\r\n```\r\n\r\nThe app is a strong overlap with KY UFO / RC FPV:\r\n\r\n- Classic RTSP preview: `rtsp://192.168.1.1:7070/webcam`\r\n- UDP command/telemetry: `192.168.1.1:7099`\r\n- Heartbeat: `01 01`\r\n- Camera switch: `06 01` / `06 02`\r\n- Photo/video gallery sync: `09 01` / `09 02`\r\n- Native bridge: `com.cooingdv.bl60xmjpeg.UAV`\r\n- Native libraries in split APK:\r\n  - `libuav_gl.so`\r\n  - `libuav_tc.so`\r\n  - `libnative-lib.so`\r\n  - IJK/OpenCV/GPUImage/pocketsphinx support libraries\r\n\r\nThe 4DRC native `libuav_gl.so` and `libuav_tc.so` contain the same important\r\nBL60x-family strings and symbols:\r\n\r\n- `192.168.169.1`\r\n- `8800`\r\n- `mjpeg_ndk_start`\r\n- `mjpeg_ndk_command_send`\r\n- `mjpeg_ndk_set_active_camera_index`\r\n- `mjpeg_ndk_set_QPara`\r\n- `build_send_ack`\r\n- `handle_mcu_msg_frag`\r\n- embedded `jpeg_header_640x360_Q*` tables\r\n\r\nThis means 4DRC FPV overlaps with `cooingdv_bl`, not Jieli.\r\n\r\n4DRC FPV `FlyController` is the familiar KY UFO-style TC/GL split:\r\n\r\n- If `UAV.getInstance().getDeviceType() != 10`, it builds the GL extended\r\n  20-byte inner payload `66 14 ... 99`, then prefixes `03` for Wi-Fi.\r\n- If device type is `10`, it builds the TC 8-byte inner payload `66 ... 99`,\r\n  then prefixes `03` for Wi-Fi.\r\n- Native `UAV` mode sends the inner payload directly.\r\n- Stop/exit sends native `65` (`0x65`, decimal 101) when native is active, or\r\n  UDP `08 01` otherwise.\r\n\r\nThe biggest unique piece is not a drone: 4DRC FPV includes an excavator/drive\r\nmode.\r\n\r\n`WifiIdUtils` defines:\r\n\r\n```text\r\nEXCAVATOR_720 = 89\r\n```\r\n\r\nIf this ID is selected, the menu opens `ExcavatorFragment`, which uses\r\n`DriveController`, not `FlyController`.\r\n\r\n`DriveController` packet:\r\n\r\n```text\r\n33 TURN ACCEL BUCKET_FLAGS MODE_FLAGS 88\r\n```\r\n\r\nWi-Fi wrapper:\r\n\r\n```text\r\n03 33 TURN ACCEL BUCKET_FLAGS MODE_FLAGS 88\r\n```\r\n\r\nDrive/excavator bytes:\r\n\r\n- Byte 0: `0x33` (`51`)\r\n- Byte 1: turn\r\n- Byte 2: accelerator\r\n- Byte 3: bucket flags\r\n  - `0x01`: excavator rise\r\n  - `0x02`: excavator drop\r\n  - `0x04`: excavator left\r\n  - `0x08`: excavator right\r\n- Byte 4: mode flags\r\n  - `0x04`: auto\r\n  - `0x08`: music\r\n  - `0x10`: light\r\n- Byte 5: `0x88` (`136`)\r\n\r\nThis packet has no checksum byte and is not compatible with TurboDrone's\r\nquadcopter control model. If we ever support it, it should be a separate\r\nground-vehicle/excavator backend, not a `cooingdv` flight variant.\r\n\r\n## Telemetry And App Messages\r\n\r\nBoth apps pass received UDP packets to the flight fragment.\r\n\r\nCommon photo/video notifications:\r\n\r\n- If `bArr[2] == 77` (`0x4d`, ASCII `M`), the app treats it as a photo event.\r\n  The photo counter is read from `bArr[3]`.\r\n- If `bArr[2] == 88` (`0x58`, ASCII `X`), the app treats it as a video event.\r\n  The video counter is read from `bArr[4]`.\r\n- On new photo count, both apps send `09 01`.\r\n- On new video count, both apps send `09 02`.\r\n- Shorter packets with only `bArr[2] == M/X` trigger direct UI tab switching.\r\n\r\nRC UFO has additional GL Wi-Fi status handling when `SocketClient.getDeviceType()\r\n== 2` and `bArr[0] == 0x66`:\r\n\r\n- If `bArr[1] == 0`, it reads state from `bArr[2]` and `bArr[9]`.\r\n- If packet length is 10 or 15, it reads a state byte from `bArr[4]`.\r\n- State values toggle `isTakingControl` and simulate top-list UI clicks for\r\n  photo/video tabs.\r\n\r\nKY's Wi-Fi `SocketClient` handles first-byte resolution, GL/TC detection, camera\r\nreset state in `bArr[1]`, and screen-switch state in `bArr[2]`. KY's richer GL\r\nstatus parsing appears in the native `PicDataCallback` path used by\r\n`DeviceGLFragment` and `DeviceBLFragment`.\r\n\r\n## KY UFO Native Path\r\n\r\nKY UFO includes `com.cooingdv.bl60xmjpeg.UAV` and native wrappers:\r\n\r\n- `GLJni` loads `libuav_gl`\r\n- `TCJni` loads `libuav_tc`\r\n\r\n`MainActivity` initializes the native stack:\r\n\r\n```text\r\nUAV.getInstance().init(this)\r\nUAV.getInstance().startServer()\r\nSocketClient.getInstance().initVideoView(...)\r\nSocketClient.getInstance().start()\r\n```\r\n\r\n`UAV` starts in unknown device type `0`. Native `deviceStatus` sets:\r\n\r\n- `10` for TC.\r\n- `2` for GL.\r\n\r\n`UAV.sendCommand(byte[])` sends to the native implementation selected by\r\n`mDeviceType`.\r\n\r\nImportant distinction:\r\n\r\n- Wi-Fi control packets include the leading `0x03` wrapper.\r\n- Native `UAV` commands use the inner 8-byte TC or 20-byte GL payload directly.\r\n\r\nNative video callbacks:\r\n\r\n- `picData(byte[] bArr, long seq, byte quality)` receives JPEG-like frame bytes.\r\n- On first frame, `UAV` marks itself active and sends native command `0x64`.\r\n- If not stopped, frames are passed to `PicDataCallback.onData`.\r\n- `picMessage(byte[] bArr)` is used for resolution/status messages and can send\r\n  native command `0x63` after resolution discovery.\r\n\r\nRC UFO does not include this `bl60xmjpeg.UAV` path in the inspected package. It\r\nis Wi-Fi/RTSP oriented and adds password handling and advertising/consent code.\r\n\r\n## Native Library Inventory\r\n\r\nKY UFO ships native libraries under `resources/lib` for three ABIs:\r\n\r\n- `arm64-v8a`\r\n- `armeabi`\r\n- `armeabi-v7a`\r\n\r\nEach ABI contains:\r\n\r\n- `libgesture-lib.so`\r\n- `libgpuimage-library.so`\r\n- `libijkffmpeg.so`\r\n- `libijkplayer.so`\r\n- `libijksdl.so`\r\n- `libopencv_java3.so`\r\n- `libuav_gl.so`\r\n- `libuav_tc.so`\r\n\r\nThe `arm64-v8a` libraries are the most useful static-analysis target:\r\n\r\n| Library | Size | SHA-256 prefix | Role |\r\n| --- | ---: | --- | --- |\r\n| `libuav_gl.so` | 30,472 | `b62090ca898f41d4` | GL native MJPEG/control engine |\r\n| `libuav_tc.so` | 26,376 | `d43457e0f04b6025` | TC native MJPEG/control engine |\r\n| `libgesture-lib.so` | 501,976 | `b209601f7489a586` | OpenCV-backed gesture recognition |\r\n| `libijkplayer.so` | 418,984 | `c8ed8af43bf12090` | IJK player core |\r\n| `libijksdl.so` | 485,448 | `d53a2b3c63a6ce35` | IJK SDL/media glue |\r\n| `libijkffmpeg.so` | 3,780,216 | `3787aeac5935379a` | FFmpeg media stack |\r\n| `libopencv_java3.so` | 18,696,224 | `34b23b9914cfb6bb` | OpenCV runtime |\r\n| `libgpuimage-library.so` | 5,448 | `a7d4b44990bb5ef0` | GPUImage JNI/helper |\r\n\r\nRC UFO's inspected `resources` tree has no `resources/lib` directory and no\r\nbundled `.so` files. Its Java still calls `System.loadLibrary(\"lib_gesture\")`,\r\nso this particular decompile appears to be incomplete for that library, built\r\nfrom a split APK without the native split, or decompiled from an APK variant that\r\nomitted native libs.\r\n\r\n## Native JNI Surface\r\n\r\nKY `GLJni` exports these JNI entrypoints from `libuav_gl.so`:\r\n\r\n- `Java_com_cooingdv_bl60xmjpeg_utils_GLJni_nativeInit`\r\n- `Java_com_cooingdv_bl60xmjpeg_utils_GLJni_nativeStart`\r\n- `Java_com_cooingdv_bl60xmjpeg_utils_GLJni_nativeStop`\r\n- `Java_com_cooingdv_bl60xmjpeg_utils_GLJni_nativeUninit`\r\n- `Java_com_cooingdv_bl60xmjpeg_utils_GLJni_UnregisterDeviceStatus`\r\n- `Java_com_cooingdv_bl60xmjpeg_utils_GLJni_nativeSendCommand`\r\n- `Java_com_cooingdv_bl60xmjpeg_utils_GLJni_nativeSetCameraIndex`\r\n- `Java_com_cooingdv_bl60xmjpeg_utils_GLJni_nativeSetQPara`\r\n- `Java_com_cooingdv_bl60xmjpeg_utils_GLJni_nativeSetModify`\r\n\r\nKY `TCJni` exports these JNI entrypoints from `libuav_tc.so`:\r\n\r\n- `Java_com_cooingdv_bl60xmjpeg_utils_TCJni_stringFromJNI`\r\n- `Java_com_cooingdv_bl60xmjpeg_utils_TCJni_unimplementedStringFromJNI`\r\n- `Java_com_cooingdv_bl60xmjpeg_utils_TCJni_nativeSendCommand`\r\n- `Java_com_cooingdv_bl60xmjpeg_utils_TCJni_nativeSetModify`\r\n- `Java_com_cooingdv_bl60xmjpeg_utils_TCJni_setActiveCameraIndex`\r\n- `Java_com_cooingdv_bl60xmjpeg_utils_TCJni_setQPara`\r\n\r\n`libgesture-lib.so` exports:\r\n\r\n- `Java_com_cooingdv_kyufo_utils_JniUtils_nativeGestureRecognition`\r\n\r\nThe KY gesture library links against `libopencv_java3.so`, references\r\n`gesture.jpg`, `fist.xml`, and `rpalm.xml` under the app documents directory,\r\nand appears UI/vision-only. It does not appear to carry flight-control or video\r\ntransport protocol logic.\r\n\r\nThe native callback map is:\r\n\r\n```text\r\nlibuav_gl.so\r\n  -> GLJni.cbJpegFromNative(byte[], long, byte)\r\n      -> ReceiveDataCallback.picData(...)\r\n      -> UAV.AnonymousClass1.picData(...)\r\n      -> PicDataCallback.onData(...)\r\n      -> MjpegThread.drawBitmap(...)\r\n\r\n  -> GLJni.cbCtlMsgFromNative(byte[], long)\r\n      -> ReceiveDataCallback.picMessage(...)\r\n      -> UAV.AnonymousClass1.picMessage(...)\r\n      -> PicDataCallback.onReceiver(...)\r\n\r\n  -> GLJni.cbDeviceStatusFromNative(byte[], long)\r\n      -> ReceiveDataCallback.deviceStatus(...)\r\n      -> UAV.AnonymousClass1.deviceStatus(...)\r\n\r\nlibuav_tc.so\r\n  -> TCJni.function_for_pic(byte[], long, byte)\r\n      -> ReceiveDataCallback.picData(...)\r\n\r\n  -> TCJni.java_function_for_mcuctl(byte[], int, int)\r\n      -> ReceiveDataCallback.picMessage(...)\r\n```\r\n\r\n## Native MJPEG Engine Findings\r\n\r\n`libuav_gl.so` and `libuav_tc.so` are not generic wrappers only. They contain a\r\nsmall native MJPEG/control engine with symbols such as:\r\n\r\n- `mjpeg_ndk_start`\r\n- `mjpeg_ndk_startup`\r\n- `mjpeg_ndk_stop`\r\n- `mjpeg_ndk_command_send`\r\n- `mjpeg_ndk_settings_send`\r\n- `mjpeg_ndk_queryinfo_cmd_send` (TC)\r\n- `mjpeg_ndk_custom_cmd_send` (GL)\r\n- `mjpeg_ndk_frame_callback_register`\r\n- `mjpeg_ndk_ctlmsg_cb_register`\r\n- `mjpeg_ndk_device_status_cb_register` (GL)\r\n- `handle_mcu_msg_ctlmsg`\r\n- `handle_mcu_msg_frag`\r\n- `build_send_ack`\r\n\r\nThe native libraries also contain built-in JPEG header tables:\r\n\r\n- `jpeg_header_640x360_Q5`\r\n- `jpeg_header_640x360_Q10`\r\n- `jpeg_header_640x360_Q25`\r\n- `jpeg_header_640x360_Q50`\r\n- `jpeg_header_640x360_Q75`\r\n- `jpeg_header_640x360_Q100`\r\n\r\nThe decompiled native fragment handlers show the native path is assembling JPEG\r\nframes from MCU fragments:\r\n\r\n- The fragment payload size is `0x400` bytes for non-final fragments.\r\n- The final fragment copies `body_len & 0x3ff` bytes.\r\n- Fragment slots are keyed by sequence and fragment ID.\r\n- Four image assembly slots are used.\r\n- A small output queue is used; when full, the oldest queued frame is dropped.\r\n- Fragment receipt is tracked with a bitset.\r\n- The first fragment causes the library to copy one of the built-in JPEG\r\n  headers into the output buffer.\r\n- Width/height fields are patched into that JPEG header.\r\n- The final fragment appends JPEG EOI bytes `ff d9`.\r\n- Once all fragments are present, a complete JPEG is pushed to the Java callback\r\n  queue.\r\n\r\nThis is stronger than the Java-only inference: KY's native `UAV` path delivers\r\ncomplete JPEG byte arrays to Java after native fragment reassembly. The Java\r\n`MjpegThread` then decodes those complete JPEGs with `BitmapFactory`.\r\n\r\nThe quality byte from native fragment metadata selects the JPEG header table:\r\n\r\n- `5` -> Q5\r\n- `10` -> Q10\r\n- `25` -> Q25\r\n- `50` -> Q50\r\n- `75` -> Q75\r\n- `100` -> Q100\r\n\r\nBoth `libuav_gl.so` and `libuav_tc.so` contain `192.168.169.1` as a native\r\ntarget string, and the decompiled native start paths pass port string `8800`.\r\nThat does not match the Wi-Fi RTSP/UDP Java constants `192.168.1.1:7070` and\r\n`192.168.1.1:7099`; the KY native `UAV` path is therefore a separate BL60x-style\r\nnative transport, not the same Java `SocketClient` Wi-Fi path.\r\n\r\n### Native Socket Lifecycle\r\n\r\nBoth native engines allocate a large session object and open a UDP socket:\r\n\r\n- Local bind: `getaddrinfo(\"0.0.0.0\", NULL, ...)`, `socket(AF_INET, SOCK_DGRAM)`,\r\n  then `bind(...)`. Because no service/port is provided, this binds an ephemeral\r\n  local UDP port.\r\n- Remote target: `getaddrinfo(\"192.168.169.1\", \"8800\", ...)`.\r\n- GL marks the socket non-blocking with `fcntl(..., O_NONBLOCK)`.\r\n- Both engines store the local socket and remote sockaddr in the session.\r\n\r\nGL creates three detached threads from `create_instance`:\r\n\r\n- Timer / ACK / start thread.\r\n- Frame delivery thread.\r\n- Receive/parser thread.\r\n\r\nTC creates analogous threads, but its thread functions attach to the JVM because\r\nTC uses static Java callback functions.\r\n\r\nThe timer/start thread sends a 4-byte start packet until the first frame/fragment\r\nactivity is established:\r\n\r\n```text\r\nef 00 04 00\r\n```\r\n\r\nThis is `0x000400ef` in the little-endian decompiler output. The thread sends it\r\nabout every 100 ms while the engine is not yet active. Once active, it watches\r\nfor fragment silence; after roughly 3000 ms without fragments, it resets the\r\nnative assembly state and starts again.\r\n\r\nThe native engines also send ACKs about every 25 ms while active. If the last\r\nfragment was very recent, the ACK builder uses the latest fragment slot; if not,\r\nit uses a special `0xfffffffe` path that can request recovery or report pending\r\nflight-control-only state.\r\n\r\n### Native Incoming Message Envelope\r\n\r\nThe native receive thread reads up to `0x438` bytes from the UDP socket and only\r\naccepts packets whose first byte is `0x93` and whose 16-bit length field at\r\noffset 2 equals the number of bytes read.\r\n\r\nThe observed incoming envelope is:\r\n\r\n```text\r\noffset  size  meaning\r\n0x00    1     0x93 packet marker\r\n0x01    1     message type\r\n0x02    2     total packet length\r\n0x04    4     sequence / command id / status id\r\n0x08    2     payload length for control/status callbacks\r\n0x0c    ...   payload bytes\r\n```\r\n\r\nMessage types observed in native dispatch:\r\n\r\n- `0x01`: image fragment, handled by `handle_mcu_msg_frag`.\r\n- `0x02`: ACK, TC-only dispatch to `handle_mcu_msg_ack`.\r\n- `0x04`: MCU control/status message, handled by `handle_mcu_msg_ctlmsg`.\r\n- `0x08`: query-info response, TC-only dispatch to\r\n  `handle_mcu_msg_queryinfo_resp`.\r\n\r\nGL's receive thread handles fragment (`0x01`) and control/status (`0x04`).\r\nTC's receive thread handles fragment (`0x01`), ACK (`0x02`), control/status\r\n(`0x04`), and query-info response (`0x08`).\r\n\r\n### Native Control Message Handling\r\n\r\nNative `nativeSendCommand(byte[])` does not reinterpret the Java control bytes.\r\nBoth GL and TC wrappers copy the Java byte array from JNI into a local buffer and\r\npass it to `mjpeg_ndk_command_send`.\r\n\r\nImportant differences:\r\n\r\n- GL native command send accepts payloads shorter than `0x81` bytes and stores\r\n  command length and `length + 0x0c`.\r\n- TC native command send accepts payloads shorter than `9` bytes and stores a\r\n  duplicate command envelope with magic `0x04ef`.\r\n- The Java `FlyController` sends inner TC/GL payloads into native mode, not the\r\n  Wi-Fi `0x03` wrapper.\r\n\r\nNative command payloads are not always sent immediately as standalone UDP\r\npackets. The regular `mjpeg_ndk_command_send` path stores the command in the\r\nsession so the ACK/timer thread can include it in the next outgoing ACK/control\r\npacket. This explains the native log string:\r\n\r\n```text\r\n[ACK] 0 frame ack, flyctl msg only\r\n```\r\n\r\nSettings/custom helpers do send immediate UDP packets:\r\n\r\n- GL/TC settings packets use magic `0x04ef`, a `length + 0x0c` field, and copy\r\n  the settings payload after a 12-byte native envelope.\r\n- GL custom command packets use magic `0x20ef`, a `length + 4` field, and copy a\r\n  payload of 1..64 bytes.\r\n- TC query-info packets use magic `0x10ef` and message type/status `0x08` in the\r\n  decompiled `0x000810ef` word.\r\n\r\nNative `handle_mcu_msg_ctlmsg` receives MCU control/status payloads and forwards\r\nthe bytes after a 12-byte native envelope to Java callbacks:\r\n\r\n- GL forwards `param_2 + 0x0c`, length at `param_2 + 8`, and sequence/status at\r\n  `param_2 + 4`.\r\n- TC checks a header byte `0x93`, validates the packet length, forwards\r\n  `param_2 + 0x0c`, and sends an 8-byte ACK/control response with magic\r\n  `0x0808ef`.\r\n\r\nNative `build_send_ack` builds ACK packets for image fragments:\r\n\r\n- GL uses magic `0x02ef` with `0x01000202` in the decompiled local header.\r\n- TC uses analogous ACK behavior and logs urgent whole-loss, normal, special,\r\n  and fly-control-only ACK cases.\r\n- GL logs `[ACK] 0 frame ack, flyctl msg only` when there are no image-frame\r\n  ACKs but a pending flight-control message exists.\r\n\r\nThese ACK/envelope formats are internal to the KY native MJPEG transport. They\r\nare not the same as the Java Wi-Fi RC packets documented earlier.\r\n\r\n### Native Frame Delivery Thread\r\n\r\nAfter `handle_mcu_msg_frag` has assembled a complete JPEG, it pushes the image\r\ninto a small native queue. A separate delivery thread drains that queue:\r\n\r\n- Sleeps around 38 ms between normal delivery attempts.\r\n- If the queue is empty, waits an additional ~19 ms and checks again.\r\n- Logs delayed delivery when frame interval exceeds about 76 ms.\r\n- Calls the registered Java frame callback with:\r\n  - JPEG bytes\r\n  - total JPEG length\r\n  - sequence metadata\r\n  - quality byte\r\n  - GL-only extra camera/status flag\r\n\r\nThis means the native `UAV` frame callback is already latency-throttled before\r\nJava sees it. It is not just a raw packet callback.\r\n\r\n## PTZ / Servo / Camera Tilt Findings\r\n\r\nNo confirmed PTZ or camera-tilt servo command was found in the inspected KY UFO\r\nor RC UFO paths.\r\n\r\nSeveral UI elements look suspicious at first:\r\n\r\n- `verticalBar`\r\n- `horizontalBar`\r\n- `horizontalCenterBar`\r\n- `view_control_device_vertical_add`\r\n- `view_control_device_vertical_del`\r\n- `VerticalSeekBar`\r\n\r\nHowever, these are flight trim controls, not camera servos. In both KY UFO and\r\nRC UFO, `verticalCurrentValue` is added into `controlByte2`, the same pitch axis\r\nthat is sent in the regular RC control packet. `horizontalCurrentValue` adjusts\r\nyaw/turn, and `horizontalCenterCurrentValue` adjusts `controlByte1`.\r\n\r\nObserved mapping:\r\n\r\n- `horizontalBar` -> yaw trim / `controlTurn`\r\n- `horizontalCenterBar` -> roll trim / `controlByte1`\r\n- `verticalBar` -> pitch trim / `controlByte2`\r\n\r\nKY native camera-related functions are also not PTZ:\r\n\r\n- `UAV.switchActiveCamera(...)` -> `setActiveCameraIndex(...)` /\r\n  `nativeSetCameraIndex(...)`. Native code stores a camera index byte that is\r\n  later included in ACK/control state; this appears to select front/rear or\r\n  active camera stream.\r\n- `UAV.setQPara(...)` -> native JPEG quality parameter ranges. Native code\r\n  validates quality values such as 5, 10, 25, 50, 75, and 100 and stores\r\n  min/max quality thresholds.\r\n- `UAV.setModify(...)` is firmware/configuration UI for SSID, resolution,\r\n  channel, protocol, rotate, and flow orientation. It is not exposed as runtime\r\n  tilt control.\r\n\r\nThe apps also implement digital pan/zoom on decoded frames through\r\n`MjpegThread.setFocusScale(...)` and `setFocusMove(...)`, but that is display\r\ncrop/zoom, not mechanical camera motion.\r\n\r\nConclusion: if some CooingDV-published drones have tilt servos, this pair of\r\ndecompiled apps does not expose an obvious runtime PTZ command for them. It may\r\nexist in another white-label app, another firmware telemetry/status packet, or a\r\nmodel-specific command not reachable from the visible KY/RC UI.\r\n\r\n## Video Feed\r\n\r\n### Wi-Fi RTSP Path\r\n\r\nBoth apps use:\r\n\r\n```text\r\nrtsp://192.168.1.1:7070/webcam\r\n```\r\n\r\nPlayback is handled by `IjkVideoView`. The apps configure:\r\n\r\n- `mediacodec = 0`\r\n- `readtimeout = 5000000`\r\n- `preferred-image-type = 0`\r\n- `image-quality-min = 2`\r\n- `image-quality-max = 20`\r\n- `preferred-video-type = 2`\r\n- `video-need-transcoding = 1`\r\n- `mjpeg-pix-fmt = 1`\r\n- `video-quality-min = 2`\r\n- `video-quality-max = 20`\r\n- `x264-option-preset = 0`\r\n- `x264-option-tune = 5`\r\n- `x264-option-profile = 1`\r\n- `x264-params = \"crf=23\"`\r\n- `auto-drop-record-frame = 3`\r\n- codec option `err_detect = \"explode\"`\r\n\r\nThe app enables original frame callbacks:\r\n\r\n```text\r\nmVideoView.setOutputOriginalVideo(true)\r\nmVideoView.setOnReceivedOriginalDataListener(...)\r\n```\r\n\r\nThe callback passes each `byte[]` to the fragment's `onVideo(...)`, and the\r\nfragment calls:\r\n\r\n```text\r\nmjpegThread.drawBitmap(bArr)\r\n```\r\n\r\n`MjpegThread` decodes the whole byte array:\r\n\r\n```text\r\nBitmapFactory.decodeByteArray(remove, 0, remove.length)\r\n```\r\n\r\nThat makes the app boundary frame format effectively \"complete JPEG blob per\r\ncallback\". There is no Java RTP packet assembler in these app paths.\r\n\r\n### Display Transform Variations\r\n\r\nThe apps apply display-side transformations after decoding JPEG bytes:\r\n\r\n- Cropping `800x600` frames to `800x540` by removing 30 pixels top and bottom.\r\n- Rotating portrait-like frame sizes such as `240x320`, `120x160`, and\r\n  `160x272` by 90 degrees unless the resolution ID is a no-rotate ID.\r\n- Optional 180-degree rotation through `isTurnBitmap`.\r\n- Portrait crop to a 9:16 center region.\r\n- Focus/zoom cropping controlled by `focusScale`, `focusMoveX`, and\r\n  `focusMoveY`.\r\n- Gesture recognition uses the same video byte stream.\r\n\r\nNo equivalent display transforms are currently implemented in\r\n`CooingdvVideoProtocolAdapter`. TurboDrone receives decoded RTSP frames from\r\nOpenCV and re-encodes them to JPEG without app-specific rotation/cropping.\r\n\r\n### Local Recording\r\n\r\nThe apps' `VideoModel` records by encoding processed display frames to H.264:\r\n\r\n- Encoder: `video/avc` or `OMX.google.h264.encoder`.\r\n- Output: `REC_<unix>_0.mp4`.\r\n- Bitrate: `2000000`.\r\n- I-frame interval: `5`.\r\n- Presentation time: `(frameIndex * 1000000 / fps) + 132`.\r\n- SPS/PPS handling: once codec config is captured, if output byte 4 is `101`\r\n  (`0x65`, H.264 IDR NAL), the app prepends SPS/PPS before returning the encoded\r\n  buffer.\r\n\r\nThis is the app's local recording pipeline. It should not be confused with the\r\ndrone-to-phone RTSP/JPEG frame boundary.\r\n\r\n## TurboDrone Implementation\r\n\r\n### CLI and Web Defaults\r\n\r\n`main.py` supports:\r\n\r\n```text\r\n--drone-type cooingdv\r\n```\r\n\r\nDefaults:\r\n\r\n- `drone_ip = 192.168.1.1`\r\n- `control_port = 7099`\r\n- `video_port = 7070`\r\n- `control_rate = 20.0`\r\n\r\n`web_server.py` follows the same class wiring for `DRONE_TYPE=cooingdv`.\r\n\r\n### RC Model\r\n\r\n`CooingdvRcModel` extends `BaseRCModel` with:\r\n\r\n- Stick range: `50..200`, center `128`.\r\n- `IncrementalStrategy`.\r\n- One-shot flags:\r\n  - `takeoff_flag`\r\n  - `land_flag`\r\n  - `stop_flag`\r\n  - `flip_flag`\r\n  - `calibration_flag`\r\n- Toggle state:\r\n  - `headless_flag`\r\n\r\nThe model exposes:\r\n\r\n- `takeoff()`\r\n- `land()`\r\n- `emergency_stop()`\r\n- `flip()`\r\n- `toggle_headless()`\r\n- `calibrate_gyro()`\r\n- `get_control_state()`\r\n\r\n### RC Protocol Adapter\r\n\r\n`CooingdvRcProtocolAdapter`:\r\n\r\n- Opens one UDP socket.\r\n- Binds to an ephemeral local port to match Android `DatagramSocket()`.\r\n- Sends heartbeat `01 01` every second.\r\n- Starts a receive thread for telemetry-driven variant detection.\r\n- Builds TC or GL packets from the active variant.\r\n- Sends packets to `drone_ip:control_port`.\r\n- On `stop()`, stops heartbeat, sends `08 01`, stops receiver, and closes the\r\n  socket.\r\n\r\nImportant constants:\r\n\r\n- `PREFIX = 0x03`\r\n- `START_MARKER = 0x66`\r\n- `EXTENDED_MARKER = 0x14`\r\n- `END_MARKER = 0x99`\r\n- `HEARTBEAT_COMMAND = bytes([0x01, 0x01])`\r\n- `STOP_COMMAND = bytes([0x08, 0x01])`\r\n\r\nThe adapter clears one-shot command flags immediately after building each\r\npacket. It does not clear `headless_flag`, because that is a toggle state.\r\n\r\n### Flight Controller Scheduling\r\n\r\n`FlightController._control_loop`:\r\n\r\n- Computes `dt`.\r\n- Calls `model.update(dt, axes)`.\r\n- Calls `protocol.build_control_packet(model)`.\r\n- Calls `protocol.send_control_packet(packet)`.\r\n- Sleeps `1 / update_rate`.\r\n\r\nFor CooingDV the default update interval is 50 ms, matching the Android app's\r\n`SEND_COMMAND_INTERVAL = 50` in `FlyController`.\r\n\r\n### Video Protocol Adapter\r\n\r\n`CooingdvVideoProtocolAdapter`:\r\n\r\n- Builds `rtsp://{drone_ip}:{video_port}/webcam`.\r\n- Opens it using `cv2.VideoCapture(rtsp_url, cv2.CAP_FFMPEG)`.\r\n- Sets `CAP_PROP_BUFFERSIZE = 1` for lower latency.\r\n- Reads decoded BGR frames.\r\n- Re-encodes each frame as JPEG with quality 85.\r\n- Wraps the bytes with `CooingdvVideoModel.ingest_chunk`.\r\n- Publishes `VideoFrame` objects through a size-2 queue.\r\n- Drops the oldest queued frame when the queue is full.\r\n- Reconnects after a 5 s frame timeout or OpenCV errors.\r\n\r\nThis is different from TurboDrone's S2x and WiFi-UAV adapters. There is no raw\r\nUDP video packet assembler for CooingDV in TurboDrone; `get_packets()` returns\r\nan empty list.\r\n\r\n## Implementation Gaps And Risks\r\n\r\n- TurboDrone currently uses product-level names (`takeoff`, `land`, `flip`) for\r\n  bits whose app symbols are lower-level (`isFastFly`, `isFastDrop`,\r\n  `isCircleTurnEnd`). This may be correct UX-wise, but the doc should not imply\r\n  the Android apps have named takeoff/land opcodes.\r\n- `0x08 0x01` is only an exit-control command in the apps. It should not be used\r\n  as a startup/init packet.\r\n- GL bit `flags2 0x02` (`isFixedHeightMode`) is observed in both apps, but\r\n  TurboDrone does not currently expose or set a CooingDV fixed-height flag.\r\n- KY GL `flags1 0x10` (`isOpenLight`) is not exposed by TurboDrone.\r\n- GL `flags1 0x40` (`isGestureMode`) is app-side UI/vision behavior and should\r\n  not be treated as a drone flight command unless separately verified.\r\n- RC UFO's decompiled `FlyControlTask.run()` has damaged control-flow output\r\n  around `isFastReturn` / `isUnLock`; KY's source is cleaner and should be\r\n  preferred for TC flag interpretation.\r\n- RC UFO's decompiled UDP receive method is also damaged around password and\r\n  telemetry parsing. The visible fragments still confirm password IDs and the\r\n  8-digit password command.\r\n- RC UFO's Java references `System.loadLibrary(\"lib_gesture\")`, but this\r\n  workspace's RC UFO decompile has no `resources/lib` tree. If RC native\r\n  behavior matters later, reacquire the full universal APK or the relevant split\r\n  APK containing native libraries.\r\n- The exact RTSP wire codec is not fully proven from Java source alone. At the\r\n  app boundary, the original-data callback is handled as JPEG bytes.\r\n- KY native `UAV` video is now stronger than inference: `libuav_gl.so` and\r\n  `libuav_tc.so` reassemble 1024-byte MCU fragments into complete JPEG images\r\n  using embedded 640x360 JPEG headers and append `ff d9` on the final fragment.\r\n- TurboDrone does not yet implement the Android app's display transforms:\r\n  no-rotate IDs, portrait rotations, `800x600 -> 800x540` crop, or alternate\r\n  camera/screen restart behavior.\r\n- TurboDrone auto-detection falls back to TC until it receives a recognized\r\n  telemetry byte. GL hardware may need `COOINGDV_VARIANT=gl` if telemetry is\r\n  delayed, filtered, or not sent to the ephemeral local port.\r\n- KY native `libuav_gl.so` and `libuav_tc.so` use a separate\r\n  `192.168.169.1:8800` native transport, while the Java CooingDV Wi-Fi path uses\r\n  `192.168.1.1:7099` for RC and `192.168.1.1:7070` for RTSP. Do not change\r\n  TurboDrone's CooingDV defaults based on the native path without hardware\r\n  evidence that the target drone exposes that BL60x interface.\r\n\r\n## Deep-Dive Follow-Ups\r\n\r\n- Decompile the remaining unnamed helper functions around the GL/TC native\r\n  threads to improve field names and confirm every byte in the ACK packet\r\n  envelope. The main native socket target and incoming envelope are now mapped.\r\n- Recover or reacquire RC UFO's missing native split if the RC gesture/native\r\n  behavior needs parity with KY. The Java package references `lib_gesture`, but\r\n  the inspected RC resources do not include it.\r\n- Capture KY native traffic when `UAV.isActive()` is true. Confirm whether\r\n  `192.168.169.1:8800` is reachable on real KY hardware, whether packets are\r\n  sent over Wi-Fi or another interface, and how it relates to the Java RTSP path.\r\n- Capture GL and TC Wi-Fi traffic from hardware while toggling fixed-height,\r\n  light, camera, screen switch, gyro correction, and emergency stop. Use the\r\n  captures to decide which currently unimplemented flags are safe to add to\r\n  TurboDrone.\r\n- Add optional TurboDrone diagnostics to log first-byte telemetry IDs, GL `0x66`\r\n  status packets, and camera/gallery notifications before adding more control\r\n  surface.\r\n\r\n## Useful Test Packets\r\n\r\nNeutral TC hover/control packet:\r\n\r\n```text\r\n03 66 80 80 80 80 00 00 99\r\n```\r\n\r\nNeutral GL hover/control packet:\r\n\r\n```text\r\n03 66 14 80 80 80 80 00 00 00 00 00 00 00 00 00 00 00 00 00 99\r\n```\r\n\r\nTC emergency-stop style packet:\r\n\r\n```text\r\n03 66 80 80 80 80 04 04 99\r\n```\r\n\r\nGL emergency-stop style packet:\r\n\r\n```text\r\n03 66 14 80 80 80 80 02 00 00 00 00 00 00 00 00 00 00 02 99\r\n```\r\n\r\nHeartbeat:\r\n\r\n```text\r\n01 01\r\n```\r\n\r\nLeave control mode:\r\n\r\n```text\r\n08 01\r\n```\r\n\r\nCamera switch:\r\n\r\n```text\r\n06 01\r\n06 02\r\n```\r\n\r\nGallery/screen sync:\r\n\r\n```text\r\n09 01\r\n09 02\r\n```\r\n\r\nRC UFO password command for password `12345678`:\r\n\r\n```text\r\n0a 01 02 03 04 05 06 07 08\r\n```\r\n"
  },
  {
    "path": "docs/research/wifi_cam.md",
    "content": "# WiFi_CAM Protocol Research\r\n\r\nThis note documents the decompiled `WiFi_CAM` Android app from\r\n`decompiled-wifi-cam-6.1.8`, including the native `libCamera.so` transport in\r\n`resources/lib/arm64-v8a`.\r\n\r\n## Executive Summary\r\n\r\n`WiFi_CAM` should be treated as a separate TurboDrone backend candidate named\r\n`wifi_cam`.\r\n\r\nThe first Java-only pass looked similar to `cooingdv_jieli` because\r\n`IConstants.java` contains Jieli-style constants such as `192.168.8.15`, UDP\r\n`2228`, RTP `6666`, and SDP `6789`. Native analysis of `libCamera.so` shows the\r\nactual live camera/control path is different:\r\n\r\n- Device IP: `192.168.4.153`\r\n- Video/data UDP socket: `8080`\r\n- RC/command UDP socket: `8090`\r\n- Stream start command: `42 76`\r\n- Stream stop command: `42 77`\r\n- Rotate command: `42 78`\r\n- Camera switch command: `42 79`\r\n- RC payload: raw Java-built `66 ... 99` 8-byte or 20-byte command sent to UDP\r\n  `8090`\r\n- Video payload: fixed `0x5c0` UDP datagrams with an 8-byte native header and a\r\n  `0x5b8` JPEG slice body, reassembled into complete JPEG frames by native code\r\n\r\nThis does not match TurboDrone's current `cooingdv`, `cooingdv_jieli`, `s2x`, or\r\n`wifi_uav` implementations closely enough to reuse one unchanged.\r\n\r\n## Native Evidence\r\n\r\nNative libraries are present under:\r\n\r\n```text\r\ndecompiled-wifi-cam-6.1.8/resources/lib/arm64-v8a/\r\n```\r\n\r\nImportant library:\r\n\r\n```text\r\nlibCamera.so\r\n```\r\n\r\nGhidra headless outputs used for this pass:\r\n\r\n- `tools/ghidra_out/wifi_cam_libCamera_functions_all.txt`\r\n- `tools/ghidra_out/wifi_cam_libCamera_jni_table.txt`\r\n- `tools/ghidra_out/wifi_cam_libCamera_decompiled/`\r\n\r\nHelper scripts added for repeatable native analysis:\r\n\r\n- `tools/ghidra_scripts/ExportFunctionsByAddress.java`\r\n- `tools/ghidra_scripts/DumpJniNativeTable.java`\r\n- `tools/ghidra_scripts/DumpMatchingSymbols.java`\r\n\r\n## JNI Table\r\n\r\n`JNI_OnLoad` registers 22 native methods for `com.tzh.wifi.utils.Camera`.\r\nImportant mappings:\r\n\r\n```text\r\niCameraInit      -> FUN_00133718\r\niCameraDeinit    -> FUN_001337f4\r\niCameraStart     -> FUN_001338f8\r\niCameraStop      -> FUN_0013392c\r\niCmdStart        -> FUN_00133b20\r\niCameraRoate     -> FUN_00133b28\r\niCameraSwitch    -> FUN_00133b50\r\niCmdSend         -> FUN_00133b78\r\niCmdResume       -> FUN_00133c2c\r\niCmdStop         -> FUN_00133c34\r\niYuanInit        -> FUN_00133d2c\r\niYuanProc        -> FUN_00133e6c\r\niYuanRelease     -> video_uninit\r\n```\r\n\r\n`iCameraInit` allocates the native `Socket`, `MjpegToAvi`, `AviReader`, and\r\n`MjpegToMp4` objects. `iCameraStart` calls `Socket::connect()`.\r\n\r\n## Socket Lifecycle\r\n\r\n`Socket::connect()` starts two threads:\r\n\r\n- `udpSocketEnterance`\r\n- `cmdSocketEnterance`\r\n\r\n`udpSocketEnterance` creates two UDP sockets:\r\n\r\n```text\r\nSocket::create(udpSocket, \"192.168.4.153\", 0x1f90)  // 8080\r\nSocket::create(cmdSocket, \"192.168.4.153\", 0x1f9a)  // 8090\r\n```\r\n\r\nThe sockets are unconnected UDP sockets using `sendto(...)` and `recvfrom(...)`.\r\nThere is no CTP envelope, no `CTP:` string, no `FLYING_CTRL` topic, and no Jieli\r\nJSON transport in the native library.\r\n\r\n## IP Address Families\r\n\r\nThe app carries several IP constants, but they do not all represent active live\r\ntransport paths in the reviewed build:\r\n\r\n- `192.168.4.153` is the active native camera/control target. `libCamera.so`\r\n  hardcodes this address for both UDP `8080` video/data and UDP `8090` command\r\n  sockets.\r\n- `192.168.4.151` appears only in `WiFiApp.java` as `TCP_SERVER_IP`. The app\r\n  compares the Wi-Fi DHCP gateway against `192.168.4.151` and `192.168.4.153`,\r\n  returning `1` or `2`, but no caller was found under `com/tzh/wifi`. Treat this\r\n  as a dormant/legacy mode hint until packet captures prove otherwise.\r\n- `192.168.8.15` appears in `IConstants.DEFAULT_DEV_IP` with Jieli-like\r\n  `2228`/RTP/SDP constants. No Java caller or native `libCamera.so` evidence was\r\n  found for this path in the reviewed WiFi_CAM live preview/control flow.\r\n- `192.168.1.1` appears as an FTP/media constant, likely for legacy file access\r\n  or shared firmware support, not live flight/video control.\r\n\r\nThis suggests WiFi_CAM is a broad whitelabel codebase carrying constants for\r\nmultiple device families. For TurboDrone, implement the observed\r\n`192.168.4.153` native UDP backend first, and only split additional\r\n`wifi_cam_*` variants if later apps or captures show one of the dormant IP\r\nfamilies is actually used.\r\n\r\n## Stream Commands\r\n\r\nGhidra data-symbol dump shows the 2-byte commands:\r\n\r\n```text\r\nstartCmd   = 42 76\r\nstopCmd    = 42 77\r\nrotateCmd  = 42 78\r\nswitchCmd  = 42 79\r\n```\r\n\r\n`udpSocketEnterance` sends `startCmd` to `192.168.4.153:8080` after socket\r\ncreation and sends `stopCmd` when disconnecting. `iCameraRoate()` sends\r\n`rotateCmd`; `iCameraSwitch()` sends `switchCmd`.\r\n\r\n## Camera Type Probe\r\n\r\nAfter sending `startCmd`, `udpSocketEnterance` waits for an 8-byte response from\r\nthe `8080` socket.\r\n\r\nKnown camera-type responses:\r\n\r\n```text\r\n55 00 01 00 00 00 01 99  -> camera type 1\r\n55 00 02 00 00 00 02 99  -> camera type 2\r\n```\r\n\r\nThe native code calls `C_Method::onNotifyCameraType(type)`, which reaches\r\n`WiFiModelImpl.ICameraType(int)` and selects the Java command payload mode via\r\n`BaseCmd.setCameraType(int)`.\r\n\r\n## RC Command Path\r\n\r\nJava builds flight packets in:\r\n\r\n```text\r\ndecompiled-wifi-cam-6.1.8/sources/com/tzh/wifi/wificam/model/base/BaseCmd.java\r\n```\r\n\r\n`iCmdSend([B, int)` copies the Java byte array and calls:\r\n\r\n```text\r\nSocket::sendCmd(payload, length)\r\n```\r\n\r\n`Socket::sendCmd` sends raw bytes to `192.168.4.153:8090`.\r\n\r\nShort 8-byte payload:\r\n\r\n```text\r\nindex  value\r\n0      0x66\r\n1      roll\r\n2      pitch\r\n3      throttle / power\r\n4      yaw\r\n5      flags\r\n6      xor checksum over bytes 1..5\r\n7      0x99\r\n```\r\n\r\nAlternate 20-byte payload for camera type `2`:\r\n\r\n```text\r\nindex  value\r\n0      0x66\r\n1      0x14\r\n2      roll\r\n3      pitch\r\n4      throttle / power\r\n5      yaw\r\n6      flags group 1\r\n7      flags group 2\r\n8..17  reserved / zero in Java initializer\r\n18     xor checksum over bytes 2..17\r\n19     0x99\r\n```\r\n\r\nFor the 8-byte payload, byte `5` flags are:\r\n\r\n- `0x01`: one-key fly / fast fly\r\n- `0x02`: one-key land / fast drop\r\n- `0x04`: emergency\r\n- `0x08`: rotate / circle turn\r\n- `0x10`: headless mode\r\n- `0x80`: checkout / calibration-like flag\r\n\r\nFor the 20-byte payload, bytes `6` and `7` are:\r\n\r\n- byte `6`, `0x01`: fly and land both map here in the decompiled Java\r\n- byte `6`, `0x02`: emergency\r\n- byte `6`, `0x04`: checkout\r\n- byte `6`, `0x08`: rotate / circle turn\r\n- byte `7`, `0x01`: headless mode\r\n- byte `7`, `0x02`: stay-high / altitude hold\r\n\r\n## Command Response Path\r\n\r\n`cmdSocketEnterance` listens on the `8090` command socket for 8-byte responses.\r\nIt copies each response into `recvCmd` and uses the same logic as\r\n`parseCmdRecv(...)`:\r\n\r\n- Read exactly 8 bytes.\r\n- Update `prevIdx` from response byte `1`.\r\n- Notify Java snap state with response byte `2`.\r\n- Ignore duplicate response indexes.\r\n\r\nThe checksum expression in the decompile is awkward, but the shape is clearly an\r\n8-byte command acknowledgement/status channel rather than CTP or JSON.\r\n\r\n## Video Packet Format\r\n\r\n`udpSocketEnterance` receives up to `0x5c0` bytes from the `8080` socket and\r\npasses the packet to `Image::put(...)`.\r\n\r\nNative reassembly behavior:\r\n\r\n- Full UDP datagram size: `0x5c0` bytes (`1472`)\r\n- Native packet header: 8 bytes\r\n- JPEG slice body: `0x5b8` bytes (`1464`)\r\n- Packet byte `0`: frame id / sequence byte\r\n- Packet byte `1`: final-frame marker used by native code; completion requires\r\n  this byte to equal `1`\r\n- Packet byte `2`: total fragment count for the frame\r\n- Packet byte `3`: resolution id forwarded to Java\r\n- Packet byte `7`: retain/orientation value forwarded to Java\r\n- Payload starts at byte `8`\r\n\r\n`Image::putImgtoBuf(...)` appends each `0x5b8` payload into an internal frame\r\nbuffer. When the final chunk arrives, it scans for JPEG EOI `ff d9`, validates\r\nthat the assembled frame starts with JPEG SOI `ff d8` and ends with `ff d9`, and\r\ncalls:\r\n\r\n```text\r\nC_Method::onNotifyRecvState(resolution, jpeg_bytes, jpeg_len, retain)\r\n```\r\n\r\nJava receives this in `Camera.OnImageRecv(...)`, then forwards frame bytes to\r\n`WiFiPresenter.onData(...)`. The app log tag says `MJPEG`, but the app boundary\r\nis complete JPEG frame blobs.\r\n\r\n## Comparison With Existing TurboDrone Backends\r\n\r\n`cooingdv` is not a match:\r\n\r\n- Uses `192.168.1.1`\r\n- Uses UDP `7099`\r\n- Uses RTSP `rtsp://192.168.1.1:7070/webcam`\r\n- Wraps flight payloads with a leading `0x03`\r\n\r\n`cooingdv_jieli` is not a match:\r\n\r\n- Uses `192.168.8.15`\r\n- Uses UDP `2228`\r\n- Uses CTP envelope beginning with `CTP:`\r\n- Uses JSON topics such as `CONTROL_MODE`, `FLYING_CTRL`, and `OPEN_RT_STREAM`\r\n- Uses RTP/JPEG on UDP `6666` plus SDP TCP `6789`\r\n\r\n`wifi_uav` is not a match:\r\n\r\n- Uses `192.168.169.1`\r\n- Uses UDP `8800` / `8801`\r\n- Uses different native packet headers and ACK/request behavior\r\n\r\n`s2x` is the closest structural comparison but still not a match:\r\n\r\n- Both use a port `8080`, but S2x targets `172.16.10.1:8080`\r\n- S2x video uses a separate UDP `8888` receiver and a `0x40 0x40` packet header\r\n- S2x start command is `0x08 + local IPv4`, not `42 76`\r\n- S2x RC packets are 20-byte packets sent to `8080`, not a separate `8090`\r\n  command socket\r\n\r\n## Recommended TurboDrone Implementation\r\n\r\nAdd a new backend family:\r\n\r\n```text\r\nDRONE_TYPE=wifi_cam\r\n```\r\n\r\nImplementation status:\r\n\r\n- `wifi_cam` is wired as a first-pass backend in TurboDrone.\r\n- RC supports short and extended raw command packets.\r\n- Video supports `42 76` stream start, camera-type probing, and native JPEG\r\n  chunk reassembly.\r\n- Hardware validation is still needed against a WiFi_CAM-compatible device.\r\n\r\nSuggested defaults:\r\n\r\n```text\r\nWIFI_CAM_DRONE_IP=192.168.4.153\r\nWIFI_CAM_VIDEO_PORT=8080\r\nWIFI_CAM_COMMAND_PORT=8090\r\n```\r\n\r\nImplementation pieces:\r\n\r\n- `models/wifi_cam_rc.py`\r\n  - Can reuse the CooingDV-style stick center/range initially.\r\n  - Needs explicit support for both 8-byte and 20-byte payload modes.\r\n- `protocols/wifi_cam_rc_protocol_adapter.py`\r\n  - Sends raw RC packets to UDP `8090`.\r\n  - Starts with 8-byte mode unless camera-type probe says type `2`.\r\n  - Builds checksums exactly like `BaseCmd.java`.\r\n- `protocols/wifi_cam_video_protocol.py`\r\n  - Opens a UDP socket to the device.\r\n  - Sends `42 76` to `192.168.4.153:8080` on start and `42 77` on stop.\r\n  - Receives `0x5c0` datagrams.\r\n  - Handles camera-type responses before frame assembly.\r\n  - Reassembles `0x5b8` JPEG chunks using the 8-byte native header.\r\n  - Emits complete JPEG `VideoFrame` objects.\r\n- `main.py` / `web_server.py`\r\n  - Add `wifi_cam` selection and env defaults.\r\n  - Wire the video adapter and RC adapter separately because ports differ.\r\n\r\nOpen implementation question:\r\n\r\n- Where to route the native camera type notification. The cleanest Python design\r\n  is for the video adapter to expose the detected camera type and for\r\n  `web_server.py` / `VideoReceiverService` to pass that into the RC adapter, or\r\n  for the RC adapter to default to 8-byte mode with an env override such as\r\n  `WIFI_CAM_COMMAND_MODE=short|extended`.\r\n\r\n## Validation Plan\r\n\r\nPacket captures should confirm:\r\n\r\n- App sends `42 76` to `192.168.4.153:8080` at live preview start.\r\n- Device returns one of the `55 00 xx 00 00 00 xx 99` camera-type responses.\r\n- App sends raw `66 ... 99` RC payloads to `192.168.4.153:8090`.\r\n- Video datagrams are `0x5c0` bytes and payload chunks assemble into JPEG frames.\r\n- Stop/disconnect sends `42 77` to `8080`.\r\n\r\nIf captures confirm the native decompile, this should become a new `wifi_cam`\r\nbackend rather than a variant of `cooingdv_jieli`.\r\n"
  },
  {
    "path": "docs/research/wifi_uav.md",
    "content": "# WiFi-UAV Protocol Research\n\nThis note captures findings from the decompiled WiFi-UAV Android app in\n`wifi-uav-app-decompiled` and from native analysis of\n`wifi-uav-app-decompiled/resources/lib/arm64-v8a/libuav_lib.so`.\n\nThe main takeaway is that \"WiFi-UAV\" is a drone family, not one protocol.\nThe app routes different SSID families to different backend SDKs.\n\n## App Backend Variants\n\nThe app uses `defpackage.d00` as a dispatcher. It maps SSID prefixes onto two\nbackend families:\n\n```java\nput(\"FlOW_\", f.Uav);\nput(\"FLOW_\", f.Uav);\nput(\"WIFI_\", f.Fld);\nput(\"GD89Pro_\", f.Fld);\nput(\"WTECH-\", f.Fld);\nput(\"WTECH_\", f.Fld);\n```\n\nThe dispatcher then binds those backend enum values to concrete implementations:\n\n```java\nput(f.Fld, wz.X());\nput(f.Uav, e00.i0());\n```\n\nObserved / inferred mapping:\n\n| SSID prefix | App backend | Java class | Native dependency | Notes |\n| --- | --- | --- | --- | --- |\n| `WIFI_` | `Fld` | `defpackage.wz` | `com.lxProLib.lxSigPro` / `lxPro` | Classic WiFi-UAV path. |\n| `GD89Pro_` | `Fld` | `defpackage.wz` | `com.lxProLib.lxSigPro` / `lxPro` | Same backend as `WIFI_`. |\n| `WTECH-`, `WTECH_` | `Fld` | `defpackage.wz` | `com.lxProLib.lxSigPro` / `lxPro` | Same backend as `WIFI_`. |\n| `FLOW_`, `FlOW_` | `Uav` | `defpackage.e00` | `com.example.sdk.UAVSDK` / `libuav_lib.so` | Native UAVSDK backend. |\n| `DRONE_` | Not in app map | Turbodrone maps to `fld` | Appears K417-compatible with `fld` wire behavior | Added from K417 testing. |\n\n## App Call Graph\n\nThe main flight screen is `com.lcfld.fldpublic.ActCtrl`. It owns:\n\n- `d00 W = d00.Y()` as the selected drone backend.\n- `xx X = new xx()` as the RC packet builder.\n\nControl flow:\n\n```text\nActCtrl\n  -> d00 dispatcher\n      -> wz / lxSigPro for Fld devices\n      -> e00 / UAVSDK / libuav_lib.so for FLOW/Uav devices\n  -> xx packet builder\n      -> short 8-byte command for Fld-style control\n      -> extended 20-byte command for UAV/FLOW when supported\n```\n\n`d00.g()` starts every registered backend, not just the currently selected one.\nThe selected backend is changed by successful callbacks:\n\n- `wz` / `lxSigPro` connection state `5` calls `e0(f.Fld)`.\n- `e00` posts a connected callback after MCU version data arrives and calls\n  `e0(f.Uav)`.\n\n`d00.h(byte[])` is the main command forwarding point. It sends the current RC\npacket only to the active backend.\n\n`ActCtrl` starts its own worker thread in `onCreate()`. The thread calls\n`P1()` about every `50 ms`, updates PTZ visibility about once per second, and\nalso drives UI effects such as roll animation and water-ripple rendering.\n\nThe Android app version in the analyzed manifest is:\n\n```text\nversionName=2024.11.08\nversionCode=135\npackage=com.lcfld.fldpublic\n```\n\n## Fld Backend\n\n`defpackage.wz` wraps `com.lxProLib.lxSigPro`:\n\n```java\npublic class wz extends tz {\n    public lxSigPro d = lxSigPro.getInstance();\n}\n```\n\nImportant methods:\n\n```java\npublic int g() { return this.d.Connect(0); }\npublic int h(byte[] bArr) { return this.d.DataForward(bArr, 0); }\npublic int i() { return this.d.DisConnect(); }\npublic int p() { return this.d.StPlay(0); }\npublic int q() { return this.d.StStop(); }\n```\n\nImplications:\n\n- `Fld` has explicit connect / data-forward / disconnect / start-play /\n  stop-play lifecycle in the app.\n- Turbodrone does not call `lxSigPro`; it reconstructs the wire behavior in\n  Python.\n- K417 (`DRONE_*`) appears to use a wire path compatible with this family.\n\nAdditional `Fld` callbacks:\n\n- `onStreamCbk()` forwards decoded video frames from `lxSigPro`.\n- `onSrlDataCbk()` forwards serial/control data from the drone.\n- `onCfgCbk()` forwards config responses.\n- `onUpdateFwCbk()` forwards firmware update progress.\n\n`wz.k()` initializes `lxSigPro` with `SetMid(1)`, `UidsInit(new int[]{0})`,\nand `AStRecType = 0`.\n\n`lxPro` loads `lxffmpeg` and `lxPro`. Ghidra analysis of\n`resources/lib/arm64-v8a/liblxPro.so` shows this is a separate native stack\nfrom `libuav_lib.so`.\n\nImportant `lxPro` native wrappers used by the app:\n\n| Java call | Native wrapper | App use |\n| --- | --- | --- |\n| `Connect(0)` | `ntvConnectInt()` | Start FLD connection. |\n| `DataForward(bytes, 0)` | `ntvDataForward()` | Forward RC bytes. |\n| `DisConnect()` | `ntvDisConnect()` | Stop FLD connection. |\n| `StPlay(0)` | `ntvStPlay()` | Start video stream. |\n| `StStop()` | `ntvStStop()` | Stop video stream. |\n| `SdCapture(1)` | `ntvSdCapture()` | Ask drone-side SD/photo capture. |\n| `SdRecord(boolean)` | `ntvSdRecord()` | Ask drone-side SD/video record. |\n| `SetCfg(json)` | `ntvSetCfg()` | Configure WiFi SSID and other SDK JSON commands. |\n\n### Native FLD / lxPro Findings\n\n`liblxPro.so` is an ARM64 ELF with DWARF/debug info. The relevant compile units\ninclude:\n\n- `lgProIntface/lxPro.c`\n- `lgProIntface/lxProJni.c`\n- `lgProSrc/lxProtocol.c`\n- `lgProSrc/lgProDev.c`\n- `lgPublicSrc/lxFFmpeg.c`\n\n`lgCreate()` allocates a `0xa30` byte device context, creates command locks and\nstats queues, initializes check/command/stream sockets to empty state, creates\nan audio player, and starts `lgThreadTimming()`. The native version log in this\nlibrary is:\n\n```text\nv1.0.0(20201022)\n```\n\n`Connect(0)` flows through:\n\n```text\nJava_com_lxProLib_lxPro_ntvConnectInt()\n  -> lgConnect()\n  -> lgConnectPort(dev, ip, 0x49c1)\n```\n\n`0x49c1 == 18881`. If the app passes IP `0`, `lgConnectPort()` defaults to:\n\n```text\n192.168.100.1:18881\n```\n\nThat is different from the `libuav_lib.so` / BL608/BL618 transport, which uses\n`192.168.169.1:8800` and `:8801`.\n\n`lgConnectPort()` starts the FLD control/check path:\n\n- creates `0x400000` byte buffers for command and check sockets\n- configures the check socket as TCP\n- starts `lgThreadCmdParse`\n- starts `lgThreadCekClient`\n\n`lgThreadCekClient()` calls `lgOnSocketResv()` on the check socket. The socket\nlogic uses Java-configured connection cycle/timeout values, with minimums of\n`500 ms` cycle and `100 ms` connect timeout.\n\nThe FLD command parser accepts native packets with:\n\n```text\nstart marker: f5 e2 e3 cb\nend marker:   e3 a5 cb cc\n```\n\n`lgPackCmdSend()` builds this envelope:\n\n| Offset | Size | Meaning |\n| --- | ---: | --- |\n| `0` | 4 | start marker `f5 e2 e3 cb` |\n| `4` | 4 | `DgCmdId_e` command id |\n| `8` | 4 | payload size |\n| `12` | var | payload |\n| `12 + payload_size` | 4 | end marker `e3 a5 cb cc` |\n\n`DataForward(bytes, 0)` is not a length bug. JNI ignores the Java-side `0`\nfor length, obtains the actual byte-array length, and calls:\n\n```c\nlgDataForward(dev, data, array_length, isTcp);\n```\n\nWith `isTcp == 0`, `lgDataForward()` selects UDP and sends:\n\n```text\nlgPackCmdSend(dev, eDgSrlOspf, rc_bytes, rc_len, eSckTypeUdp)\n```\n\nSo the short/extended `xx` RC bytes are nested inside the FLD native command\nenvelope as an `eDgSrlOspf` payload.\n\n`lgSendCJsonData()` creates JSON and sends it as:\n\n```text\nlgPackCmdSend(dev, eDgCmdJson, json_with_trailing_nul, len + 1, eSckTypeTcp)\n```\n\nExamples from native decompilation:\n\n- `lgStPlay(Rel)` sends JSON with `cmd`, `StRelIdx=Rel`, and `vopen=1`, then\n  starts stream threads.\n- `lgStStop()` sends JSON with `cmd` and `vopen=0`, then starts a delayed\n  stream-thread destroy path.\n- `lgSdRecord(start)` sends JSON with `cmd` and `record=(start != 0)`.\n- `lgGetCfg(Kid)` sends JSON with only `cmd=Kid`.\n- `lgSetCfg(json)` forwards the Java JSON string directly through `lgSetCfg()`\n  and logs the result.\n\n`lgStartStThread()` creates a `5,000,000` byte stream buffer, initializes video\nand audio packet lists, then starts:\n\n- `lgThreadStClient`\n- `lgStDecodeThread`\n- `lgVideoDecodeThread`\n- `lgAudioDecodeThread`\n\n`lgThreadStClient()` receives stream socket data through `lgOnSocketResv()` and\nregisters `onStRecvCbk()` for stream receive stats. `lgVideoDecodeThread()`\ncreates an H.264 FFmpeg decoder and dispatches packet payloads based on a byte\nat packet offset `0x0f`; decoded output eventually reaches the Java\n`lgStreamCbk()` / `onStreamCbk()` path as `lgFrameInFo`.\n\nFLD stream packets use a separate stream frame marker:\n\n```text\na5 a5 ef cc\n```\n\n`lgGetCompleteOneFrame()` searches for that marker, requires a `0x14` byte\nstream header, then expects one of these end markers after `Size + 0x14`:\n\n```text\ne3 a5 cb cc\na5 a5 ef cc\n```\n\nBad/misaligned frames increment `BadFrameCount` and advance the ring-buffer\nread pointer. This is separate from the UAVSDK `0x93 0x01` JPEG-fragment\ntransport.\n\nFLD decode path:\n\n- stream bytes are buffered by `lgOnSocketResv()`\n- complete stream frames are parsed by `lgGetCompleteOneFrame()`\n- video packets are queued into `VStList`\n- `lgVideoDecodeThread()` pops video packets\n- packet byte `0x0f` selects a decoder entry in `lxAVDcMap`\n- decoded H.264 output is copied to Java preview buffers by\n  `lxGeneratePreview()`\n\n`lxGeneratePreview()` lays out preview YUV as planar Y, U, V:\n\n```text\nY plane: offset 0, size w*h\nU plane: offset w*h, size (w/2)*(h/2)\nV plane: offset w*h*5/4, size (w/2)*(h/2)\n```\n\nSo `FrameType == 2` on the Java side is consistent with planar YUV420 output\nfrom the FLD FFmpeg decode path.\n\nImplication: the FLD backend is not just a raw `xx` packet UDP sender. It is a\nfull P2P-ish native protocol with TCP JSON/check traffic, UDP serial forwarding,\nand a separate H.264-oriented stream decode pipeline.\n\n`lxPro.lgFrameInFo` is the common Java frame envelope for FLD and UAV video:\n\n| Field | Meaning |\n| --- | --- |\n| `FrameBuf` | Frame bytes. JPEG for `FrameType == 1`; YUV420p-like bytes for `FrameType == 2`. |\n| `FrameType` | App dispatches `1` as JPEG bitmap and `2` as YUV data. |\n| `W`, `H` | Frame dimensions. |\n| `FrameNum`, `FrameRate`, `Pts`, `DisplayMs`, `StType`, `UserId`, `RecTimes` | Metadata forwarded by native SDK. |\n\n## Uav / FLOW Backend\n\n`defpackage.e00` wraps `com.example.sdk.UAVSDK`:\n\n```java\npublic class e00 extends tz implements UAVSDK.DataListener {\n    public UAVSDK f = UAVSDK.getInstance();\n}\n```\n\nImportant methods:\n\n```java\npublic int g() {\n    if (this.n) return 0;\n    this.n = true;\n    this.f.nativeStart();\n    return 0;\n}\n\npublic int h(byte[] bArr) {\n    this.f.nativeSendCtlMsg(bArr, bArr.length);\n    return 0;\n}\n\npublic int i() {\n    this.q = 0L;\n    this.h.f();\n    this.o = false;\n    this.p = true;\n    if (this.n) {\n        this.n = false;\n        this.f.nativeStop();\n    }\n    return 0;\n}\n```\n\n`UAVSDK` loads:\n\n```java\nSystem.loadLibrary(\"uav_lib\");\nSystem.loadLibrary(\"upcnn-cpu\");\nif (Build.VERSION.SDK_INT >= 24) {\n    System.loadLibrary(\"upcnn-gpu\");\n}\n```\n\n`UAVSDK` exposes JNI methods including:\n\n```java\nnativeCreate();\nnativeDestroy();\nnativeGetVersion();\nnativeInit();\nnativeSendCtlMsg(byte[] data, int len);\nnativeSendCustomMsg(byte[] data, int len);\nnativeSetCameraIndex(int index);\nnativeSetCameraRotate180(int value);\nnativeSetQPara(int q1, int q2, int t1, int t2);\nnativeStart();\nnativeStop();\n```\n\n### Java UAV Lifecycle\n\n`e00.k()` initializes the native layer:\n\n```java\nthis.g.h();\nthis.f.nativeInit();\nthis.f.setDataListener(this);\ng0();\n```\n\n`nativeInit()` caches JNI callback method IDs for:\n\n- `cbJpegFromNative([BJBI)V`\n- `cbCtlMsgFromNative([BJ)V`\n- `cbUpdateFromNative(II)V`\n- `cbTrackFromNative(I[F)V`\n\nThe native SDK version logged by this build is:\n\n```text\nV2.2.1 20240428\n```\n\n`e00.g()` calls `nativeStart()` once. `e00.i()` clears connection state and\ncalls `nativeStop()` if the native backend is running.\n\nThe app's UAV liveness check is stricter than \"socket exists\":\n\n```java\npublic boolean u() {\n    return this.o && System.currentTimeMillis() - this.q < 1500;\n}\n```\n\n`o` becomes true after a recognized MCU version/control message. `q` is updated\nby `handleJpeg()`, so the Java app considers the UAV backend connected only\nwhile JPEG frames are arriving.\n\n`e00.handleJpeg()` wraps native JPEG bytes into `lxPro.lgFrameInFo` through\n`d00.e()`:\n\n```java\nlgframeinfo.FrameType = 1;\nlgframeinfo.FrameBuf = bArr;\n```\n\nThe native callback's `quality` and `is_normal_pic` parameters are not copied\ninto `lgFrameInFo` by the Java bridge, so `ActCtrl` sees the same frame shape\nfor UAV JPEGs that it sees for FLD JPEGs.\n\n### Java UAV Control Messages\n\n`e00.handleCtlMsg()` interprets native control callbacks:\n\n| Native payload | Java effect |\n| --- | --- |\n| `[1, 101, ...]` | MCU version string. Parses board/capabilities, marks UAV connected, and probes extra commands when supported. |\n| `[1, 103, ...]` | SSID/config response. Parses scene/camera information. |\n| `[1, 105]` | MCU busy. |\n| `[1, 106]` | MCU disconnected. Clears version/state and posts a disconnect. |\n\n`e00.l` parses version strings into:\n\n- board family, resolution token, firmware version, build date, and extra flags\n- flow-device markers: `FLOW...` / `(FLOW)...`\n- no-flow marker: `(NOFLOW)...`\n- extended command support when the parsed build date is `>= 20211218`\n- optional `I=<hex>` image/capability bitfield\n\nThe Java custom-command helpers are string based:\n\n| Java method | Custom payload string |\n| --- | --- |\n| `Q()` | `cmd=2` |\n| `P()` | `cmd=3` |\n| `W()` | `cmd=4` |\n| `T(scene)` | `cmd=<scene << 8 | 1>` |\n| `k0(imgfunc)` | `imgfunc=<hex>` |\n| `o(ssid)` | `<i=2^bf_ssid=<ssid>>` |\n\nThese are wrapped as custom native messages, not as joystick bytes.\n\n### UAV Capability Bitfield\n\n`e00.j` parses the optional `I=<hex>` capability field from the MCU version\nstring. It stores nibbles and flags as:\n\n| Bits | Java field | Observed app use |\n| --- | --- | --- |\n| `0..3` | `b` | Mirror/camera command value used by `E(true)` / main camera. |\n| `4..7` | `c` | Mirror/camera command value used by `E(false)` / flow camera. |\n| `8..11` | `d` | Scene/current camera-ish capability. |\n| `12..15` | `e` | Resolution selector for flow/secondary camera. |\n| `16..19` | `f` | Resolution selector for main/primary camera. |\n| `20..23` | `g` | Extra image capability nibble. |\n| `24..27` | `h` | Extra image capability nibble. |\n| `28` | `i` | Camera/mirror behavior flag. |\n| `29` | `j` | Camera/mirror behavior flag. |\n| `30` | `k` | Front/back camera support flag; `x()` returns true when `k == 1`. |\n\n`e00.z()` means the `I=<hex>` field exists. `e00.y()` means custom image\ncommands are supported, scene/camera state is `1`, and front/back camera\nsupport is not already active.\n\n## Native UAVSDK Findings\n\n`libuav_lib.so` is an ARM64 ELF shared object. It is not stripped and contains\ndebug info, so Ghidra headless analysis is useful.\n\nExports of interest:\n\n- `Java_com_example_sdk_UAVSDK_nativeStart`\n- `Java_com_example_sdk_UAVSDK_nativeStop`\n- `Java_com_example_sdk_UAVSDK_nativeSendCtlMsg`\n- `Java_com_example_sdk_UAVSDK_nativeSendCustomMsg`\n- `mjpeg_ndk_start`\n- `mjpeg_ndk_stop`\n- `mjpeg_ndk_command_send`\n- `mjpeg_ndk_custom_cmd_send`\n- `mjpeg_ndk_start_bl618`\n- `mjpeg_ndk_stop_bl618`\n- `mjpeg_ndk_command_send_bl618`\n- `mjpeg_ndk_custom_cmd_send_bl618`\n- `handle_mcu_msg_frag`\n- `handle_mcu_msg_frag_bl618`\n- `build_send_ack`\n- `build_send_ack_bl618`\n\n### Native Start Behavior\n\n`nativeStart()` starts two internal native backends:\n\n```c\ncontext = mjpeg_ndk_start(\"192.168.169.1\", \"8800\", NULL);\nmjpeg_ndk_frame_callback_register(context, callback_jpeg, context);\nmjpeg_ndk_ctlmsg_cb_register(context, callback_ctlmsg, context);\nmjpeg_ndk_track_set_sdk(context, sdk);\nmjpeg_ndk_track_callback_register(context, callback_track, context);\n\ncontext_bl618 = mjpeg_ndk_start_bl618(NULL, \"192.168.169.1\", \"8801\");\nmjpeg_ndk_frame_callback_register_bl618(context_bl618, callback_jpeg, context_bl618);\nmjpeg_ndk_ctlmsg_cb_register_bl618(context_bl618, callback_ctlmsg, context_bl618);\nmjpeg_ndk_track_set_sdk_bl618(context_bl618, sdk);\nmjpeg_ndk_track_callback_register_bl618(context_bl618, callback_track, context_bl618);\n```\n\nImplications:\n\n- Native UAVSDK probes both the normal path and the BL618 path.\n- Normal backend targets `192.168.169.1:8800`.\n- BL618 backend targets `192.168.169.1:8801`.\n- Turbodrone's `wifi_uav_uav` mode now mirrors this by sending RC and video\n  startup/request traffic to both ports.\n\n### Native Socket Setup\n\nNormal `create_instance()`:\n\n- creates an ACK/socket bound to `0.0.0.0` on an ephemeral local port\n- sets MCU target to `192.168.169.1:8800`\n- creates a side UDP socket using `NetworkSocket_Create(Network_UDP, 0x271a)`\n- `0x271a == 10010`\n\nBL618 `create_instance_bl618()`:\n\n- creates an ACK/socket bound to `0.0.0.0` on an ephemeral local port\n- sets MCU target to `192.168.169.1:8801`\n- creates a side UDP socket using `NetworkSocket_Create(Network_UDP, 0x271b)`\n- `0x271b == 10011`\n- logs the ephemeral ACK socket with `getsockname()`\n\nK417 captures showed video fragments arrive at the ephemeral ACK socket, not at\n`10010` or `10011`.\n\nThe native build strings in the analyzed library are:\n\n- normal path: `BL608_V20240426`\n- BL618 path: `BL618_V20240426`\n\n### Native Startup Packet\n\nThe startup packet is:\n\n```text\nef 00 04 00\n```\n\nBoth native paths send this repeatedly during startup until receive state is\nestablished.\n\nNormal timer behavior:\n\n- before first frame/control receive, sends the startup packet about every\n  `> 100 ms`\n- after receiving data, sends ACK/request packets about every `> 24 ms`\n- if no activity for about `3001 ms`, emits a two-byte callback and resets\n  native receive state\n\nBL618 timer behavior:\n\n- before first receive, sends the startup packet about every `> 200 ms`\n- after receiving data, sends ACK/request packets about every `> 50 ms`\n- only sends an ACK packet when at least one ACK slot is present\n- has an extra device-config update path that packages SSID/channel/user bytes\n  with the marker string `oGMcOfyZdIurm2kS`\n\n### Native Command Queueing\n\n`nativeSendCtlMsg(byte[], len)` does not immediately `sendto()` the joystick\npacket. It copies commands shorter than `65` bytes into each backend's\n`user_msg_ctx`:\n\n```c\nmjpeg_ndk_command_send(context, data, len, &seq);\nmjpeg_ndk_command_send_bl618(context_bl618, data, len, &seq);\n```\n\nNormal `mjpeg_ndk_command_send()` accepts commands only when:\n\n- context is non-null and alive\n- command length is `<= 64`\n- backend is enabled\n- `has_received != 0`\n\nBL618 is similar but allows `<= 128` bytes at the lower native layer. The JNI\nwrapper still limits `nativeSendCtlMsg()` to `< 65` bytes.\n\nThe queued command is embedded into the next ACK/request packet:\n\n- `user_msg_ctx.seq`\n- `user_msg_ctx.length`\n- up to `64` bytes of user command data\n\n`nativeSendCustomMsg()` is different. It wraps the payload in an immediate\ncustom packet:\n\n```text\nef 20 <len:u16 little-endian> <custom-payload...>\n```\n\nbut it still requires the backend to be enabled and to have received data.\n\nThere are native JNI declarations for camera index, rotation, and quality\nparameters:\n\n```java\nnativeSetCameraIndex(int index);\nnativeSetCameraRotate180(int value);\nnativeSetQPara(int q1, int q2, int t1, int t2);\n```\n\nGhidra export of these functions confirms:\n\n- `nativeSetCameraIndex(index)` fans out to both BL608 and BL618:\n  `mjpeg_ndk_set_active_camera_index(context, index)` and\n  `mjpeg_ndk_set_active_camera_index_bl618(context_bl618, index)`.\n- `nativeSetCameraRotate180(value)` fans out to both tracking pipelines:\n  `mjpeg_ndk_track_set_rgb_rotate_180*()`.\n- `nativeSetQPara(q1, q2, t1, t2)` updates only the normal/BL608 context in\n  this exported function: `mjpeg_ndk_set_QPara(context, ...)`.\n\nTheir effects are visible in exported state:\n\n- `build_send_ack*()` places quality bytes and `active_cam_idx` into every\n  ACK/request packet.\n- `handle_mcu_msg_frag()` uses `active_cam_idx` to choose whether main-camera\n  or flow-camera status is passed to the JPEG callback.\n- Java calls `nativeSetCameraIndex(0)` on stream start for UAV and\n  `nativeSetCameraIndex(z ? 1 : 0)` for camera switching.\n\n`handle_mcu_msg_ack_bl618()` is present, but decompiles to a no-op apart from\nstack checking. BL618 type `0x02` ACK messages are recognized by the receive\nthread but ignored in this build.\n\n`callback_jpeg()` has one important backend-selection behavior: once either the\nnormal context or BL618 context produces JPEG frames, it stops the other\ncontext. This is how the dual-probe `nativeStart()` collapses to the working\nbackend at runtime.\n\n`callback_ctlmsg()` attaches to the JVM, copies native bytes into a Java\n`byte[]`, and calls `cbCtlMsgFromNative`. The native callback carries the MCU\nsequence; Java's decompiled `UAVSDK` then forwards the byte array to\n`e00.handleCtlMsg()`.\n\n## Video Packet Format\n\nK417 captures and native `handle_mcu_msg_frag*()` agree on this layout:\n\n| Offset | Size | Meaning |\n| --- | ---: | --- |\n| `0` | 1 | `0x93` |\n| `1` | 1 | message type; `0x01` means JPEG fragment |\n| `2` | 2 | total packet length, little-endian |\n| `4` | 4 | message sequence / id |\n| `8` | 8 | image sequence |\n| `16` | 8 | observed duplicate/secondary sequence; K417 captures match image sequence, exported native frag handler does not read this field |\n| `24` | 8 | last-finished / acked-ish sequence field |\n| `32` | 4 | fragment index |\n| `36` | 4 | fragment count |\n| `40` | 4 | frame body length |\n| `44` | 2 | width |\n| `46` | 2 | height |\n| `48` | 1 | quality |\n| `52` | 1 | main camera status |\n| `53` | 1 | flow camera status |\n| `56+` | var | JPEG payload fragment |\n\nObserved K417 traffic:\n\n- drone sends from `192.168.169.1:1234`\n- PC receives on the ephemeral socket used to send ACK/request packets\n- typical packet length: `1088` bytes\n- quality: `50`\n- about `10-19` fragments per frame\n- about `15 fps` in a working capture\n\nShort capture validation from `captures/k417_wifi_uav_fld.pcapng` and\n`captures/k417_wifi_uav_live_after_parser.pcapng`:\n\n- outbound flow: `192.168.169.2:<ephemeral> -> 192.168.169.1:8800`\n- inbound flow: `192.168.169.1:1234 -> 192.168.169.2:<ephemeral>`\n- startup packets: `ef 00 04 00`\n- request packets alternate between `88` and `124` byte `ef 02` packets\n- `ef 20` custom packets were not present in these short captures\n- inbound fragments were `0x93 0x01`, usually `1080` bytes each\n- observed frame header values: `640x360`, quality `50`, fragment count `20`\n- the 124-byte request tail decodes as two ACK slots, for example:\n\n```text\nseq=9  status=1  len=20  bitmap=ff ff ff ff\nseq=9  status=3  len=16\n```\n\nThose slots match the current Turbodrone fallback/request behavior and the\nnative ACK slot layout.\n\n## ACK / Request Packet Format\n\nNative `build_send_ack()` and `build_send_ack_bl618()` emit this broad shape:\n\n| Offset | Size | Meaning |\n| --- | ---: | --- |\n| `0` | 1 | `0xef` |\n| `1` | 1 | `0x02` |\n| `2` | 2 | packet length, little-endian |\n| `4` | 4 | constant `02 02 00 01` |\n| `8` | 1 | ACK slot count |\n| `9` | 3 | padding |\n| `12` | 4 | queued user-command sequence |\n| `16` | 2 | queued user-command length |\n| `18` | 64 | queued user-command data |\n| `82` | 1 | quality1 |\n| `83` | 1 | quality2 |\n| `84` | 1 | q_threshold1 |\n| `85` | 1 | q_threshold2 |\n| `86` | 1 | active camera index |\n| `87` | 1 | padding |\n| `88+` | var | ACK slot records |\n\nEach ACK slot:\n\n| Offset | Size | Meaning |\n| --- | ---: | --- |\n| `0` | 8 | image sequence |\n| `8` | 4 | status |\n| `12` | 4 | slot record length |\n| `16+` | var | optional fragment ACK bitmap |\n\nStatus values inferred from native:\n\n- `0`: receiving / partial\n- `1`: complete / delivered\n- `2`: dropped\n- `3`: future/request slot\n\nTurbodrone now generates native-shaped ACK packets and tracks in-flight frame\nstate using `WifiUavAckState`.\n\n### Native Frame State Machine\n\nThe native receiver uses four frame slots:\n\n```c\nslot_idx = (image_sequence + 3) & 3;\n```\n\nInternal native slot statuses are distinct from the ACK status values:\n\n| Internal status | Meaning | ACK status emitted |\n| --- | --- | --- |\n| `0` | empty / unused | none or future slot |\n| `1` | receiving fragments | `0` |\n| `2` | all fragments received, ready to deliver | `1` |\n| `3` | delivered | `1` |\n| `4` | dropped / skipped | `2` |\n\nFor a new frame, `handle_mcu_msg_frag()`:\n\n- updates `last_finished` from the header's offset `24` field\n- updates `max_recv_seq`\n- initializes the slot's fragment count, body length, width, height, and\n  quality\n- records main/flow camera status from offsets `52` and `53`\n- initializes a fragment ACK bitmap and sets the received fragment bit\n- copies payload from offset `56` into the frame buffer\n\nWhen all fragments are present, the frame moves to the native image queue. The\nqueue has three entries but keeps at most two queued frames; if it is full, the\noldest queued frame is dropped before adding the next one.\n\nThe native `thread_ctl` then drains queued frames into Java/native callbacks:\n\n- normal path sleeps roughly `38 ms` per loop, with an extra `19 ms` wait when\n  no frame is queued\n- BL618 path sleeps roughly `16 ms` per loop\n- both paths also call `UPUAVSDK_UpdateJPEGData()` for the tracking pipeline\n\nBL618 adds a frame-drop/recovery path that the normal BL608 path does not show\nin the exported decompilation. If no frame was delivered and the image queue is\nempty, BL618 can mark an in-progress frame at `seq_to_deliver` as dropped\n(`status = 4`) so later complete frames can advance.\n\n### Native Control Callback Format\n\nNative `thread_recv()` accepts only packets with:\n\n- byte `0`: `0x93`\n- bytes `2..3`: packet length matching the datagram size\n\nMessage type `0x01` is JPEG fragment data. Message type `0x04` is MCU/control\ndata.\n\nFor MCU/control packets, native code forwards:\n\n- payload pointer: `data + 0x0c`\n- payload length: little-endian `uint16` at offset `8`\n- sequence: little-endian `uint32` at offset `4`\n\nThere are special cases for photo/record callbacks with 9-byte payloads shaped\nlike `<p....>` or `<r....>` plus XOR checksum. These are collapsed into\ntwo-byte Java callbacks so the Java layer sees `UAV_Flight_Ctl_Picture` or\n`UAV_Flight_Ctl_Record`.\n\n## Control Packet Semantics\n\nThe app has two control builders:\n\n- `xx.f()` short 8-byte layout\n- `xx.g()` extended 20-byte layout\n\nTurbodrone embeds the extended layout (`66 14 ...`) in the longer `ef 02 ...`\npacket wrapper.\n\nIn `xx.g()`, takeoff and land share the same one-key bit:\n\n```java\nbArr[6] = (takeoff ? 1 : 0)\n        | (land    ? 1 : 0)\n        | (stop    ? 2 : 0)\n        | (gyro    ? 4 : 0)\n        | (roll    ? 8 : 0)\n        | ((ptz & 3) << 6);\n```\n\nMeaning for Turbodrone's current WiFi-UAV extended command layout:\n\n| Action | Byte 6 bit |\n| --- | ---: |\n| takeoff / land one-key action | `0x01` |\n| emergency stop | `0x02` |\n| gyro/calibration | `0x04` |\n| flip/roll | `0x08` |\n\nK417 testing confirmed:\n\n- app land button descends gracefully\n- Turbodrone previously mapped land to `0x02`, causing motor cutoff\n- Turbodrone now maps both takeoff and land to `0x01`, and e-stop to `0x02`\n\n### App Control Loop Details\n\n`ActCtrl.P1()` is the app's recurring RC send loop. It runs only when:\n\n```java\nthis.W.u() && this.W.f() == 1\n```\n\nFor the UAV backend, `f()` always returns `1`. For the FLD backend, `f()` is\n`lxSigPro.AppCtrlSt()`.\n\nWhen the on/off control is disabled, the app sends a neutral FLD-style packet a\nfew times:\n\n```java\n{ 0x66, 0x80, 0x80, 0x80, 0x80, 0x40, 0x40, 0x99 }\n```\n\nFor the UAV backend, after having sent active controls, it also sends an\neight-byte all-zero packet once when turning controls off.\n\nDuring active control, `ActCtrl` fills `xx` from the UI:\n\n| `xx` field | Source |\n| --- | --- |\n| `a` | `getTrSliderYpercent()` or hand/track override |\n| `b` | `getTrSliderXpercent()` or hand/track override |\n| `c` | `getEaSliderXpercent() + getPathPercentX()` or trim override |\n| `d` | `getEaSliderYpercent() + getPathPercentY()` or trim override |\n| `e/f/g/h` | trim/tuning sliders |\n| `i` | up / one-key takeoff flag |\n| `j` | down / one-key land flag |\n| `k` | stop flag |\n| `l` | roll/flip flag |\n| `m` | back flag |\n| `n` | headless flag |\n| `o` | gyro/check flag |\n| `p` | high/low mode flag |\n| `q` | hand-flow active |\n| `r` | tracking active |\n| `s` | speed index |\n| `v` | PTZ up/down state; likely camera tilt servo in practice |\n\nThe app chooses the packet shape with:\n\n```java\nint extended = (this.W.d0() && this.W.L()) ? 1 : 0;\nbyte[] packet = extended != 0 ? this.X.g() : this.X.f();\n```\n\nSo `xx.g()` is used only for the UAV backend when the version string says this\nis a FLOW device. Otherwise the app uses the short `xx.f()` packet.\n\nBoth packet builders use an XOR checksum:\n\n- short packet: XOR bytes `1..5` into byte `6`, terminator `0x99`\n- extended packet: XOR bytes `2..17` into byte `18`, terminator `0x99`\n\nThe app also parses a serial/status packet in `xx.j()`:\n\n```text\n88 00 <fly-state> <battery> <flags> ... <xor> 55\n```\n\nThe status flags byte exposes speed, RC-control support, PTZ support,\nheadless/photo/video/light flags, and is forwarded through `xx.a`.\n\n### RC Packet Layouts\n\n`xx.f()` short packet:\n\n| Offset | Meaning |\n| --- | --- |\n| `0` | Constant `0x66`. |\n| `1` | Rudder / yaw axis: `h(c, g, s, true)`. |\n| `2` | Elevator / pitch axis: `h(d, h, s, true)`. |\n| `3` | Throttle axis: `h(a, e, 2, p)`. |\n| `4` | Aileron / roll axis: `h(b, f, s, true)`. |\n| `5` | Button bitfield. |\n| `6` | XOR checksum over bytes `1..5`. |\n| `7` | Terminator `0x99`. |\n\nShort packet byte `5`:\n\n| Bit | Java source | Meaning |\n| ---: | --- | --- |\n| `0x01` | `i` / `getUpBtnState()` | Takeoff/up. |\n| `0x02` | `j` / `getDownBtnState()` | Land/down. |\n| `0x04` | `k` / `getStopBtnState()` | Stop. |\n| `0x08` | `l` / `getRollBtnState()` | Flip/roll. |\n| `0x10` | `n` / `getHeadlessBtnState()` | Headless. |\n| `0x20` | `m` / `getBackBtnState()` | Back/return. |\n| `0x40` | `p` / `getHlBtnState()` | High/low or throttle-center mode. |\n| `0x80` | `o` / `getGyoCheckBtnState()` | Gyro/calibration/check. |\n\n`xx.g()` extended packet:\n\n| Offset | Meaning |\n| --- | --- |\n| `0` | Constant `0x66`. |\n| `1` | Constant length `0x14`. |\n| `2` | Rudder / yaw axis: `h(c, g, s, true)`. |\n| `3` | Elevator / pitch axis: `h(d, h, s, true)`. |\n| `4` | Throttle axis: `h(a, e, 2, p)`. |\n| `5` | Aileron / roll axis: `h(b, f, s, true)`. |\n| `6` | Primary button/PTZ bitfield. |\n| `7` | Secondary mode bitfield. |\n| `8..17` | Zero in this app build. |\n| `18` | XOR checksum over bytes `2..17`. |\n| `19` | Terminator `0x99`. |\n\nExtended packet byte `6`:\n\n| Bits | Java source | Meaning |\n| ---: | --- | --- |\n| `0x01` | `i` or `j` | Shared one-key takeoff/land action. |\n| `0x02` | `k` | Emergency stop. |\n| `0x04` | `o` | Gyro/calibration/check. |\n| `0x08` | `l` | Flip/roll. |\n| `0xc0` | `v << 6` | PTZ up/down state; likely tilt up/down. |\n\nExtended packet byte `7`:\n\n| Bit | Java source | Meaning |\n| ---: | --- | --- |\n| `0x01` | `n` | Headless. |\n| `0x02` | `p` | High/low or throttle-center mode. |\n| `0x04` | `r` | Person tracking active. |\n| `0x80` | `q` | Hand-flow active. |\n\nAxis scaling uses `xx.h(float f, float trim, int speed, boolean centered)`.\nSpeed indexes map to:\n\n| Speed index | Scale |\n| ---: | ---: |\n| `0` | `0.30` |\n| `1` | `0.60` |\n| `2` | `1.00` |\n| `3` | `0.25` |\n\nFor centered axes, neutral is approximately `127.5`, stick movement is\n`(f - 0.5) * scale * 255`, and trim adds `(trim - 0.5) * 63`.\n\nFor non-centered throttle mode, the app uses `f * scale * 255` plus the same\ntrim term. In practice `xx.f()` and `xx.g()` pass `p` as the throttle\n`centered` argument, so high/low mode also changes throttle centering behavior.\n\n### UI Button To Command Map\n\n`lxCtrlView` switches between `lxFldCtrlView` and `lxUavCtrlView`. Both expose\nthe same `lxBasicView` state getters, but their visible controls differ.\n\n`lxFldCtrlView` mappings:\n\n| UI control | Getter / field | RC effect |\n| --- | --- | --- |\n| `takeoff_nor` button | `getUpBtnState()` / `m` | Short packet bit `0x01`. |\n| `landing_nor` button | `getDownBtnState()` / `n` | Short packet bit `0x02`. |\n| `Stop` button | `getStopBtnState()` / `o` | Short packet bit `0x04`. |\n| `roll360` button | `getRollBtnState()` / `l` | Short packet bit `0x08`; XY roll direction can override axis byte. |\n| `nohead` button | `getHeadlessBtnState()` / `F` | Short packet bit `0x10`. |\n| `hl` button | `getHlBtnState()` / `w` | Short packet bit `0x40`; also hides takeoff/land/stop/roll when off. |\n| `checkout` button | `getGyoCheckBtnState()` / `G` | Short packet bit `0x80`. |\n| `record` button | `getRecordBtnState()` / `s` | Local recording state; also `ActCtrl.f()` starts/stops local record. |\n| `photo` button | calls `ActCtrl.q()` | Triggers local snapshot path. |\n| `speed` button | `getSpeedBtnState()` / `M` | Cycles speed index `0..2`. |\n| `path` / gyro / mirror / VR | view state | Affects axes, rendering, or mode bits where applicable. |\n\n`lxUavCtrlView` mappings:\n\n| UI control | Getter / field | RC/custom effect |\n| --- | --- | --- |\n| `btn_flight_mode` | `getOnoffBtnState()` / `r` | Enables periodic RC sending. |\n| takeoff/land shared button | `getUpBtnState()` and `getDownBtnState()` / `S` | Extended packet shared one-key bit `0x01`. |\n| `btn_stop` | `getStopBtnState()` / `s` | Extended packet e-stop bit `0x02`. |\n| `roll360` | `getRollBtnState()` / `t` | Extended packet roll bit `0x08`; XY roll direction can override axis bytes. |\n| `btn_gyocheck` | `getGyoCheckBtnState()` / `I` | Extended packet gyro bit `0x04`. |\n| `btn_headless_mode` | `getHeadlessBtnState()` / `J` | Extended packet byte `7` bit `0x01`. |\n| `btn_body` / hand-flow | `getHandFlowState()` / `D` | Extended packet byte `7` bit `0x80`; starts/stops palm tracking through `nativeStartPalmTrack()` / `nativeStopPalmTrack()`. |\n| person-flow button | `r` flag | Extended packet byte `7` bit `0x04`; starts person tracking through `nativeStartPersonTrack(x,y)`. |\n| PTZ slider | `getPtzUpDnState()` / `B` | Extended packet byte `6` bits `6..7`: `1` down/one tilt direction, `2` up/other tilt direction, `0` neutral. |\n| `btn_hvcam` | `getHvCamBtnState()` / `v` | Triggers custom command sequence through `W()`, `P()`, and `Q()` depending on capability state. |\n| `btn_scene` | scene state / `w.j` | Calls `T(scene)` when scene capability is known, otherwise probes with `Q()`. |\n| front/back camera button | `setFbCamBtnState()` | Calls `ActCtrl.R()`, which uses `W.W()` when supported. |\n| camera switch | `getSwCamBtnState()` / `k` | Calls `e00.t(z)`, which sets `nativeSetCameraIndex(z ? 1 : 0)`. |\n| record/photo/speed/mirror/VR/scale | view state or `ActCtrl` callbacks | Local record/snapshot/render controls plus possible SDK custom commands. |\n\nPTZ / Camera Tilt Note:\n\n```java\ngetPtzUpDnState() == 0 -> neutral\ngetPtzUpDnState() == 1 -> extended byte 6 bits 6..7 = 01b\ngetPtzUpDnState() == 2 -> extended byte 6 bits 6..7 = 10b\n```\n\nThe app labels this as PTZ, but the decompiled UI only exposes an up/down\nstate. Based on observed toy drone hardware, this should be treated first as a\ncamera tilt servo command, not full pan/tilt/zoom:\n\n- no pan axis has been identified in the Java UI or native command path\n- no optical zoom command has been identified\n- any \"zoom\" seen in the app is likely software/render zoom through\n  `lxEglView` scaling, not a drone-side camera command\n- the likely hardware action is camera pitch/tilt up or down\n\nThe app only samples this tilt/PTZ value into `xx.v`; it does not send a\nseparate custom PTZ message. Therefore Turbodrone tilt support should be\nimplemented as a persistent or momentary field in the extended `66 14 ...` RC\ncommand for UAV/FLOW-style devices. FLD views report no PTZ state in the\ndecompiled app, even though some hardware may expose camera tilt through other\nSDK-specific paths.\n\nThe click handlers call `lxBtn.e()`, which toggles the selected state and\nrecords the selection timestamp. The main RC loop then samples the selected\nstates on its next `P1()` tick.\n\n### Photo And Record Paths\n\nThere are two separate concepts:\n\n- local phone snapshot/recording through `lxEglView`\n- drone-side SD capture/record commands through the backend SDK\n\nLocal snapshot:\n\n```text\nActCtrl.g2()\n  -> choose output file under lxConfig.ePhotoFolder\n  -> choose resolution through d00.Z(...)\n  -> lxEglView.snapshot(width, height, 1, jpg_path)\n  -> W.l(1)\n```\n\nFor `Fld`, `W.l(1)` maps to `wz.l(1)` and then `lxSigPro.SdCapture(1)`.\nFor `Uav`, `e00.l()` returns `-1`, so the local snapshot is the effective\nbehavior unless a native photo callback arrives.\n\nLocal recording:\n\n```text\nActCtrl.c2()\n  -> W.m(true)\n  -> lxEglView.startARecord(width, height, mime_or_profile, mp4_path, false, cb)\n  -> lxEglView.snapshot(..., mp4_path + \".pic\")\n```\n\n`lxEglView.startARecord()` creates a Java hardware encoder (`zz`) and passes\nthe encoder surface to native `lxEglView.ntvStartRec()`. If the preferred\nresolution fails with `-2`, the app retries `4096x2160`, `3840x2160`,\n`2560x1440`, `1920x1080`, then `1280x720`.\n\nFor `Fld`, `W.m(true/false)` maps to `lxSigPro.SdRecord(boolean)`. For `Uav`,\n`e00.m()` returns `-1`, so local MP4 recording is the effective behavior.\n\nBackend callbacks can also toggle UI photo/record state:\n\n- FLD JSON config callback keys `lgDevCbkPic` and `lgDevCbkRec`.\n- UAV serial/control callbacks `[0, 113]` for picture and `[0, 114]` for\n  record.\n- UAV nested command `[0, 102, ..., status]` with status `2` for picture and\n  `4` for record.\n\n### Video Feed Rendering Path\n\nAll backend video frames converge at `ActCtrl.V(tz, lxPro.lgFrameInFo)`.\n\nFor `FrameType == 1`:\n\n```text\nFrameBuf JPEG bytes\n  -> BitmapFactory.decodeByteArray(...)\n  -> lxEglView.set(Bitmap)\n  -> lxEglView.requestRender()\n```\n\nThe app may rotate JPEG frames before rendering:\n\n- small frames (`min(width,height) < 320`) from a backend reporting\n  `tz.v() == true` are rotated `-90` unless BRoll rendering is active\n- custom UAV camera/mirror state can add another `+90`\n- mirror state is synchronized through `a2()`\n\nFor `FrameType == 2`:\n\n```text\nFrameBuf YUV bytes\n  -> lxEglView.set(byte[], width, height)\n  -> native ntvSetYuv420p(...)\n  -> requestRender()\n```\n\n`lxEglView` is a native OpenGL surface wrapper:\n\n- creates native state with `ntvCreate(LXLIB_SIGKEY)`\n- sends surface lifecycle into `ntvSurfaceCreated/Changed/Destroyed`\n- auto-renders from an internal thread by calling `ntvRequestRender()`\n- draws bitmap frames through `ntvSetBitmap()`\n- draws YUV frames through `ntvSetYuv420p()` or `ntvSetYuv()`\n- handles mirror, rotate, roll, scale, VR/split-screen, filters, snapshots, and\n  recording in native `lxEglView`\n\nThis means Turbodrone should treat decoded frame assembly separately from app\nrendering: the app's transport delivers JPEG/YUV frame payloads, and\n`lxEglView` handles presentation, local capture, and MP4 encoding.\n\n## K417 Notes\n\nObserved K417 SSID:\n\n```text\nDRONE_4C8172\n```\n\nThis prefix is not present in the decompiled app's dispatcher, but testing shows\nit is compatible with the `fld`-style wire path:\n\n```env\nDRONE_TYPE=wifi_uav_fld\n```\n\nWorking capture summary:\n\n- inbound: `192.168.169.1:1234 -> 192.168.169.2:<ephemeral>`\n- outbound: `<ephemeral> -> 192.168.169.1:8800`\n- `4527` video packets in about `20s`\n- `295` frame sequences\n- `294` complete frames\n- approximately `15 fps`\n\nWindows Firewall can block inbound video because the drone replies from UDP\nsource port `1234`, not from `8800`. Packet capture may show traffic even if\nPython does not receive it. During testing, disabling firewall allowed Python\nto receive and assemble frames.\n\n## Turbodrone Implementation State\n\nCurrent related files:\n\n- `backend/protocols/wifi_uav_rc_protocol_adapter.py`\n- `backend/protocols/wifi_uav_video_protocol.py`\n- `backend/utils/wifi_uav_packets.py`\n- `backend/utils/wifi_uav_ack_state.py`\n- `backend/utils/wifi_uav_variants.py`\n\nImplemented:\n\n- `DRONE_TYPE=wifi_uav`, `wifi_uav_fld`, `wifi_uav_uav`\n- best-effort SSID mapping\n- `DRONE_` maps to `fld`\n- `wifi_uav_uav` probes `8800` and `8801`\n- internal WiFi-UAV capability profiles for `fld` and `uav`\n- corrected WiFi-UAV extended land/e-stop semantics\n- WiFi-UAV speed tier field (`speed_index`) using the app's speed scale table\n- native-shaped ACK/request packet builder\n- native fragment parser\n- ACK state tracking\n- duplicate delivered-frame guard\n- startup/request burst moved after RX thread startup\n\nRecommended implementation cleanup:\n\n- Split WiFi-UAV RC packet builders by variant instead of using the extended\n  UAV/FLOW packet shape for every WiFi-UAV device.\n- Keep `wifi_uav_uav` / FLOW on the native-shaped `ef 02` ACK/request packet\n  with embedded extended `66 14 ...` command.\n- Treat `wifi_uav_fld` / K417 as the empirically working compatibility path,\n  but document that the stock app's native FLD stack actually wraps RC bytes in\n  the `f5 e2 e3 cb ... e3 a5 cb cc` `eDgSrlOspf` envelope and targets\n  `192.168.100.1:18881`.\n- Add explicit model fields for:\n  - `flip_flag`\n  - `camera_tilt_state` / `ptz_state` (`0`, `1`, `2`) with the UI wording\n    biased toward \"tilt\" unless hardware proves full PTZ\n  - `camera_index` or camera-switch command\n- Add frontend controls for `speed_index` and camera tilt once the hardware\n  behavior is tested.\n- Build the extended command bytes from the documented `xx.g()` structure\n  rather than from fixed packet constants, then pass that command into\n  `build_native_ack_packet()`.\n- Consider a capability object per WiFi-UAV variant so the frontend can show:\n  - one-key takeoff/land vs independent takeoff/land\n  - camera tilt available or not\n  - camera-switch available or not\n  - FLOW/UAV dual-port probing vs FLD/K417 single-port behavior\n\nRemaining possible work:\n\n- Full native four-slot state machine parity, if needed.\n- More `wifi_uav_uav` / FLOW hardware testing, especially PTZ and camera\n  switch behavior.\n- Proper Windows firewall documentation or setup helper.\n- Frontend capability refinement: WiFi-UAV takeoff/land is really one-key\n  takeoff/land, not independent commands.\n\n## Completeness Assessment\n\nThe app-level map is now strong for:\n\n- Java UI-to-RC mapping.\n- `xx.f()` and `xx.g()` command byte layouts.\n- `d00` backend dispatch and lifecycle.\n- UAVSDK / `libuav_lib.so` startup, command queueing, ACK/request, fragment\n  parsing, and Java callback flow.\n- App rendering, local snapshot, and local recording behavior.\n- K417 `DRONE_*` capture behavior for the FLD-compatible wire path.\n\nThe remaining unknowns require artifacts not currently present in this\nworkspace, or another hardware/capture pass:\n\n| Gap | Why it matters | What would close it |\n| --- | --- | --- |\n| Deeper `lxPro` packet decode functions | `lgPackCmdSend()` and top-level parser are mapped, but individual `DFuntc` decoders behind `lxCmdMaps` still need function-pointer/xref work. | Export/decompile the `lxCmdMaps` targets or inspect them in the saved Ghidra project. |\n| Native rodata constants referenced as `DAT_*` in Ghidra output | Startup/timeout bytes are known from captures for `ef 00 04 00`, but not every timeout/status string/constant is byte-exact from the export snippets. | Ghidra data export or direct binary string/rodata inspection. |\n| Real FLOW/UAV hardware capture | Current hardware captures are K417/`DRONE_*` FLD-compatible. | PCAP from `FLOW_` or `FlOW_` hardware, especially ports `8800` and `8801`. |\n| Takeoff vs land UI distinction in `lxUavCtrlView` | Decompiled Java shows both `getUpBtnState()` and `getDownBtnState()` returning the same field; behavior matches one-key action, but original intent is ambiguous. | Runtime test or cleaner decompilation/original unobfuscated source. |\n\nSo the current map is complete enough to implement and debug the observed K417\npath, model the UAVSDK transport shape, and understand the major FLD/lxPro\nnative architecture. The remaining gaps are narrower: exact FLD decoder\ncallbacks behind `lxCmdMaps`, byte-exact rodata constants, and FLOW hardware\ncaptures.\n"
  },
  {
    "path": "experimental/README.md",
    "content": "# Experimental\nThis directory contains early-stage support for drones that are not yet integrated into the main Turbodrone architecture.\n\nEach subdirectory corresponds to a mobile app and contains control and video protocols.\n\n## Drones and Apps\n\n| App Name              | Supported Drones | Notes |\n|-----------------------|------------------|-------|\n| RC_UFO | E88 pro | PyQt5 app for flying it with a computer |\n"
  },
  {
    "path": "experimental/test_e88pro.py",
    "content": "'''\nAuthor: jithinbp@gmail.com\nFor the E88pro drone which is under 10$\nFeatures: One touch takeoff, land, flip(all ways), fly around, view video feed.\nFinicky features: Switching camera feed , one touch land(falls out of the sky sometimes while landing. might be a battery thing)\n\nControls\nZ : one touch takeoff\nX : land (careful)\nC : Calibrate gyro\nW/S : throttle\nA/D : YAW\nUP/DOWN: PITCH\nLEFT/RIGHT: roll\nF: FLIP (combine with up/down/left/right to specify direction of flip \nH: toggle headless mode\n1,2 : camera selection. barely works.\n\n'''\nimport sys\nimport socket\nimport cv2\nimport time # Import time for delays\nfrom PyQt5.QtWidgets import QApplication, QMainWindow, QLabel, QPushButton, QVBoxLayout, QWidget, QMessageBox\nfrom PyQt5.QtGui import QImage, QPixmap\nfrom PyQt5.QtCore import QThread, pyqtSignal, pyqtSlot, Qt, QByteArray, QTimer\n\n# --- Constants ---\nRTSP_URL = \"rtsp://192.168.1.1:7070/webcam\"\nUDP_IP = \"192.168.1.1\" # This is the target IP for sending commands\nUDP_SEND_PORT = 7099 # Port for sending commands\nUDP_LISTEN_PORT = 7099 # Port for listening to responses (assuming same port for simplicity, adjust if different)\nUDP_UP_COMMAND = b'\\x06\\x01'  # Byte sequence for UP command\nUDP_DOWN_COMMAND = b'\\x06\\x02' # Byte sequence for DOWN command\nUDP_BUFFER_SIZE = 1024 # Buffer size for UDP receive\nSTREAM_REINITIALIZE_DELAY_SEC = 2 # Delay before attempting to re-open stream after disruption\n\n# --- Video Stream Thread ---\nclass VideoStreamThread(QThread):\n    \"\"\"\n    A QThread subclass to handle video capture from an RTSP stream.\n    It emits a QPixmap signal for each new frame.\n    \"\"\"\n    change_pixmap_signal = pyqtSignal(QPixmap)\n    error_signal = pyqtSignal(str)\n\n    def __init__(self, rtsp_url):\n        super().__init__()\n        self._rtsp_url = rtsp_url\n        self._run_flag = True\n        self._reinitialize_flag = False # New flag to trigger stream re-initialization\n        self._cap = None\n        self._paused = False\n\n    def run(self):\n        \"\"\"\n        Main loop for video capture. Reads frames and emits them as QPixmap.\n        Handles stream opening, reading, and re-initialization.\n        \"\"\"\n        while self._run_flag:\n            if self._paused:\n                continue\n            if self._reinitialize_flag:\n                # If re-initialization is requested, release current capture and prepare to re-open\n                print(\"Re-initializing video stream...\")\n                if self._cap:\n                    self._cap.release()\n                    self._cap = None\n                self.msleep(int(STREAM_REINITIALIZE_DELAY_SEC * 1000)) # Wait before re-opening\n                self._reinitialize_flag = False # Clear the flag\n\n            if not self._cap or not self._cap.isOpened():\n                print(f\"Attempting to open RTSP stream: {self._rtsp_url}\")\n                self._cap = cv2.VideoCapture(self._rtsp_url)\n                if not self._cap.isOpened():\n                    self.error_signal.emit(f\"Error: Could not open RTSP stream at {self._rtsp_url}. \"\n                                           \"Please check the URL and network connection.\")\n                    self.msleep(1000) # Wait before retrying to open\n                    continue # Try again in the next loop iteration\n\n                print(\"RTSP stream opened successfully.\")\n\n            # Attempt to read a frame\n            ret, cv_img = self._cap.read()\n            if ret:\n                # Convert OpenCV image to QPixmap\n                qt_format = QImage.Format_RGB888\n                if len(cv_img.shape) == 3 and cv_img.shape[2] == 3: # Check if it's a color image\n                    rgb_image = cv2.cvtColor(cv_img, cv2.COLOR_BGR2RGB)\n                    # Rotate the image 90 degrees clockwise\n                    rgb_image = cv2.rotate(rgb_image, cv2.ROTATE_90_CLOCKWISE)\n                    h, w, ch = rgb_image.shape\n                    bytes_per_line = ch * w\n                    convert_to_qt_format = QImage(rgb_image.data, w, h, bytes_per_line, qt_format)\n                    p = convert_to_qt_format.scaled(640, 480, Qt.KeepAspectRatio) # Scale for display\n                    self.change_pixmap_signal.emit(QPixmap.fromImage(p))\n                else:\n                    print(\"Warning: Received non-RGB image, skipping display.\")\n            else:\n                # If reading fails, assume stream disruption and trigger re-initialization\n                print(\"Warning: Failed to read frame from RTSP stream. Triggering re-initialization.\")\n                self.error_signal.emit(f\"Stream disrupted. Attempting to re-establish connection to {self._rtsp_url}.\")\n                self._reinitialize_flag = True # Set flag to re-initialize in next loop iteration\n                if self._cap: # Release immediately to avoid blocking\n                    self._cap.release()\n                    self._cap = None\n            self.msleep(30) # Small delay to prevent busy-waiting and reduce CPU usage\n\n        print(\"Video stream thread stopping.\")\n        if self._cap:\n            self._cap.release()\n            print(\"RTSP stream released.\")\n\n    def stop(self):\n        \"\"\"Stops the video stream thread gracefully.\"\"\"\n        self._run_flag = False\n        self.wait() # Wait for the thread to finish its execution\n\n    @pyqtSlot()\n    def reinitialize_stream(self):\n        \"\"\"\n        Slot to trigger a full re-initialization of the video stream.\n        This is called when a known disruption (like a camera switch) occurs.\n        \"\"\"\n        self._reinitialize_flag = True\n        print(\"Re-initialization requested for video stream.\")\n\n\n# --- UDP Listener Thread ---\nclass UDPListenerThread(QThread):\n    \"\"\"\n    A QThread subclass to listen for incoming UDP data.\n    It emits a signal with the received data.\n    \"\"\"\n    data_received_signal = pyqtSignal(bytes, tuple) # Signal to emit data and sender address\n    error_signal = pyqtSignal(str)\n\n    def __init__(self, port, buffer_size):\n        super().__init__()\n        self._listen_ip = \"\" # We bind to \"\" (0.0.0.0) to listen on all available local interfaces\n        self._port = port\n        self._buffer_size = buffer_size\n        self._run_flag = True\n        self._sock = None\n\n    def run(self):\n        \"\"\"\n        Main loop for UDP listening. Receives data and emits it.\n        \"\"\"\n        try:\n            self._sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) # UDP socket\n            self._sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) # Allow reusing the address\n            # Bind to all available network interfaces on the specified port\n            self._sock.bind((self._listen_ip, self._port))\n            self._sock.settimeout(1.0) # Set a timeout to allow graceful shutdown\n            print(f\"UDP listener started on {self._listen_ip}:{self._port} (listening on all interfaces)\")\n        except socket.error as e:\n            self.error_signal.emit(f\"Error binding UDP socket to {self._listen_ip}:{self._port}: {e}\")\n            self._run_flag = False\n            return\n\n        while self._run_flag:\n            try:\n                data, addr = self._sock.recvfrom(self._buffer_size)\n                self.data_received_signal.emit(data, addr)\n                print(f\"Received UDP data from {addr}: {data.hex()}\")\n            except socket.timeout:\n                # Timeout occurred, continue loop to check _run_flag\n                pass\n            except socket.error as e:\n                if self._run_flag: # Only report error if not intentionally stopping\n                    self.error_signal.emit(f\"UDP receive error: {e}\")\n                break # Exit loop on persistent error\n\n        print(\"UDP listener thread stopping.\")\n        if self._sock:\n            self._sock.close()\n            print(\"UDP socket closed.\")\n\n    def stop(self):\n        \"\"\"Stops the UDP listener thread gracefully.\"\"\"\n        self._run_flag = False\n        if self._sock:\n            # A small trick to unblock recvfrom if it's blocking\n            try:\n                self._sock.shutdown(socket.SHUT_RDWR)\n            except OSError:\n                pass # Socket might already be closed or in a bad state\n        self.wait() # Wait for the thread to finish\n\n# --- Main Application Window ---\nclass RTSPViewerApp(QMainWindow):\n    \"\"\"\n    Main application window for RTSP viewing and UDP control.\n    \"\"\"\n    SOMERSAULT = 8\n    HEADLESS = 16\n    def __init__(self):\n        super().__init__()\n        self.setWindowTitle(\"RTSP Stream Viewer & Device Control\")\n        self.setGeometry(100, 100, 800, 700) # Adjusted height to accommodate new label\n\n        self.central_widget = QWidget()\n        self.setCentralWidget(self.central_widget)\n        self.layout = QVBoxLayout(self.central_widget)\n\n        # Video display label\n        self.image_label = QLabel(self)\n        self.image_label.setFixedSize(640, 480) # Fixed size for video display\n        self.image_label.setAlignment(Qt.AlignCenter)\n        self.image_label.setStyleSheet(\"background-color: black; border: 1px solid gray;\")\n        self.layout.addWidget(self.image_label, alignment=Qt.AlignCenter)\n        self.flip=False\n        self.headless=False\n        self.cam=0\n\n        # Control buttons\n        self.up_button = QPushButton(\"UP\")\n        self.up_button.setFixedSize(100, 40)\n        self.up_button.clicked.connect(self.on_up_button_clicked)\n        self.layout.addWidget(self.up_button, alignment=Qt.AlignCenter)\n\n        self.down_button = QPushButton(\"DOWN\")\n        self.down_button.setFixedSize(100, 40)\n        self.down_button.clicked.connect(self.on_down_button_clicked)\n        self.layout.addWidget(self.down_button, alignment=Qt.AlignCenter)\n\n        # UDP response display label\n        self.udp_response_label = QLabel(\"UDP Response: None\", self)\n        self.udp_response_label.setAlignment(Qt.AlignCenter)\n        self.udp_response_label.setStyleSheet(\"font-weight: bold; color: blue;\")\n        self.layout.addWidget(self.udp_response_label, alignment=Qt.AlignCenter)\n\n\n        # Initialize video thread\n        video = True\n        if video:\n            self.video_thread = VideoStreamThread(RTSP_URL)\n            self.video_thread.change_pixmap_signal.connect(self.update_image)\n            self.video_thread.error_signal.connect(self.show_error_message)\n            self.video_thread.start()\n\n        # Initialize UDP listener thread\n        self.udp_listener_thread = UDPListenerThread(UDP_LISTEN_PORT, UDP_BUFFER_SIZE)\n        self.udp_listener_thread.data_received_signal.connect(self.update_udp_response)\n        self.udp_listener_thread.error_signal.connect(self.show_error_message)\n        self.udp_listener_thread.start()\n\n        # Add a placeholder for when video is not loaded\n        self.image_label.setText(\"Loading RTSP stream...\")\n        self.image_label.setStyleSheet(\"background-color: black; color: white; border: 1px solid gray;\")\n\n        self.sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) # UDP socket\n\n        # Ensure the widget itself can receive key press events\n        # We also want it to be able to re-gain focus after buttons are clicked\n        self.setFocusPolicy(Qt.StrongFocus)\n        # Create a QTimer for periodic voltage measurement\n        self.accel = 50\n        self.decel = 5\n        self.send_udp_command(b'\\x08\\x01')\n        self.timer = QTimer(self)\n        self.last_heartbeat = time.time()\n        self.timer.timeout.connect(self.send)  # Connect the timeout signal to the update method\n        self.timer.start(30)  # Start the timer with an interval\n        #byte0:102,byte1:128,byte2:128,byte3:128,byte4:128,byte5:0,byte6:0,byte7:153\n        self.basebytes = bytearray(b'\\x66\\x80\\x80\\x80\\x80\\x00\\x00\\x99')\n\n    def send(self):\n        if self.flip:\n            self.basebytes[5]+=self.SOMERSAULT\n        if self.headless:\n            self.basebytes[5]+=self.HEADLESS\n        self.send_udp_command(bytes(bytearray(b'\\x03') + self.basebytes))\n        if time.time() - self.last_heartbeat > 1:\n            self.send_udp_command(b'\\x01\\x01')\n            self.last_heartbeat = time.time()\n        if self.cam>0:\n            self.video_thread._paused = True\n            time.sleep(1)\n            l = bytearray(b'\\x06')\n            l.append(self.cam)\n            self.send_udp_command(bytes(l))\n            self.cam=0\n            time.sleep(1)\n            self.video_thread._paused = False\n            self.video_thread.reinitialize_stream()\n\n\n        self.basebytes[5] = 0 \n        for a in range(1,5):\n            if (self.basebytes[a]>128):\n                self.basebytes[a]-=self.decel\n            elif (self.basebytes[a]<128):\n                self.basebytes[a]+=self.decel\n\n\n    def xor(self,bs):\n        s=0\n        for byte_value in bs[1:6]:\n            s ^= byte_value\n        bs[6]=s\n        return bs\n\n    def keyPressEvent(self, event):\n        if event.key() == Qt.Key_Up: #Forward (pitch down)\n            if self.basebytes[2]<200:\n                self.basebytes[2]+=self.accel\n            event.accept()  # Crucial: tell PyQt we handled this event\n        elif event.key() == Qt.Key_Down: #Back (pitch up)\n            if self.basebytes[2]>50:\n                self.basebytes[2]-=self.accel\n            event.accept()  # Crucial: tell PyQt we handled this event\n        elif event.key() == Qt.Key_Left: #Roll left\n            if self.basebytes[1]>50:\n                self.basebytes[1]-=self.accel\n            event.accept()  # Crucial: tell PyQt we handled this event\n        elif event.key() == Qt.Key_Right: #Roll right\n            if self.basebytes[1]<200:\n                self.basebytes[1]+=self.accel\n            event.accept()  # Crucial: tell PyQt we handled this event\n        elif event.key() == Qt.Key_W:\n            if self.basebytes[3]<200:\n                self.basebytes[3]+=self.accel\n            event.accept()  # Crucial: tell PyQt we handled this event\n        elif event.key() == Qt.Key_S:\n            if self.basebytes[3]>50:\n                self.basebytes[3]-=self.accel\n            event.accept()  # Crucial: tell PyQt we handled this event\n        elif event.key() == Qt.Key_D:\n            if self.basebytes[4]<200:\n                self.basebytes[4]+=self.accel\n            event.accept()  # Crucial: tell PyQt we handled this event\n        elif event.key() == Qt.Key_A:\n            if self.basebytes[4]>50:\n                self.basebytes[4]-=self.accel\n            event.accept()  # Crucial: tell PyQt we handled this event\n        elif event.key() == Qt.Key_Z: #Takeoff\n            self.basebytes[5] = 1\n            event.accept()  # Crucial: tell PyQt we handled this event\n        elif event.key() == Qt.Key_X: #Land\n            self.basebytes[5] = 2\n            event.accept()  # Crucial: tell PyQt we handled this event\n        elif event.key() == Qt.Key_C: #Calibrate\n            self.basebytes[5] = 128 \n            event.accept()  # Crucial: tell PyQt we handled this event\n        elif event.key() == Qt.Key_F: #Flip 360\n            self.flip = True\n            event.accept()  # Crucial: tell PyQt we handled this event\n        elif event.key() == Qt.Key_H: #Headless\n            self.headless = not self.headless\n            event.accept()  # Crucial: tell PyQt we handled this event\n        elif event.key() == Qt.Key_1: #cam 1\n            self.cam=1\n            event.accept()  # Crucial: tell PyQt we handled this event\n        elif event.key() == Qt.Key_2: #cam 2\n            self.cam=2\n            event.accept()  # Crucial: tell PyQt we handled this event\n        else:\n            # For other keys, let the default behavior happen\n            print(event.key())\n            super().keyPressEvent(event)\n\n\n    @pyqtSlot(QPixmap)\n    def update_image(self, pixmap):\n        \"\"\"Slot to update the QLabel with new video frames.\"\"\"\n        self.image_label.setPixmap(pixmap)\n        self.image_label.setText(\"\") # Clear loading text once video starts\n\n    @pyqtSlot(bytes, tuple)\n    def update_udp_response(self, data, addr):\n        \"\"\"Slot to update the QLabel with received UDP data.\"\"\"\n        self.udp_response_label.setText(f\"UDP Response from {addr[0]}:{addr[1]}: {data.hex()}\")\n\n    @pyqtSlot(str)\n    def show_error_message(self, message):\n        \"\"\"Slot to display error messages in a QMessageBox.\"\"\"\n        #QMessageBox.critical(self, \"Error\", message)\n        # For stream errors, keep the error message on the image label\n        if \"Stream disrupted\" in message or \"Could not open RTSP stream\" in message:\n            self.image_label.setText(\"Stream Error: \" + message)\n            self.image_label.setStyleSheet(\"background-color: red; color: white; border: 1px solid gray;\")\n            # Do not disable buttons for temporary stream disruptions, as they are part of control flow\n        else: # For other errors (e.g., UDP binding)\n            self.udp_response_label.setText(\"UDP Error: \" + message)\n            self.udp_response_label.setStyleSheet(\"font-weight: bold; color: red;\")\n\n\n    def send_udp_command(self, command):\n        \"\"\"Sends a UDP command to the specified IP and port.\"\"\"\n        try:\n            self.sock.sendto(command, (UDP_IP, UDP_SEND_PORT))\n            #print(f\"Sent UDP command: {command.hex()} to {UDP_IP}:{UDP_SEND_PORT}\")\n        except socket.error as e:\n            #QMessageBox.warning(self, \"Network Error\", f\"Failed to send UDP command: {e}\")\n            print(f\"UDP send error: {e}\")\n\n    def on_up_button_clicked(self):\n        \"\"\"Handler for the UP button click.\"\"\"\n        self.send_udp_command(UDP_UP_COMMAND)\n        # Give the device a moment to switch cameras and stabilize the stream\n        time.sleep(0.5) # Small delay after sending command\n        self.video_thread.reinitialize_stream() # Trigger full stream re-initialization\n\n    def on_down_button_clicked(self):\n        \"\"\"Handler for the DOWN button click.\"\"\"\n        self.send_udp_command(UDP_DOWN_COMMAND)\n        # Give the device a moment to switch cameras and stabilize the stream\n        time.sleep(0.5) # Small delay after sending command\n        self.video_thread.reinitialize_stream() # Trigger full stream re-initialization\n\n    def closeEvent(self, event):\n        \"\"\"Ensures all threads are stopped when the application closes.\"\"\"\n        print(\"Closing application. Stopping video and UDP threads...\")\n        self.video_thread.stop()\n        self.udp_listener_thread.stop()\n        event.accept()\n\n# --- Main execution ---\nif __name__ == \"__main__\":\n    app = QApplication(sys.argv)\n    window = RTSPViewerApp()\n    window.show()\n    sys.exit(app.exec_())\n\n"
  },
  {
    "path": "frontend/.gitignore",
    "content": "# Logs\nlogs\n*.log\nnpm-debug.log*\nyarn-debug.log*\nyarn-error.log*\npnpm-debug.log*\nlerna-debug.log*\n\nnode_modules\ndist\ndist-ssr\n*.local\n\n# Editor directories and files\n.vscode/*\n!.vscode/extensions.json\n.idea\n.DS_Store\n*.suo\n*.ntvs*\n*.njsproj\n*.sln\n*.sw?\n"
  },
  {
    "path": "frontend/eslint.config.js",
    "content": "import js from '@eslint/js'\nimport globals from 'globals'\nimport reactHooks from 'eslint-plugin-react-hooks'\nimport reactRefresh from 'eslint-plugin-react-refresh'\nimport tseslint from 'typescript-eslint'\n\nexport default tseslint.config(\n  { ignores: ['dist'] },\n  {\n    extends: [js.configs.recommended, ...tseslint.configs.recommended],\n    files: ['**/*.{ts,tsx}'],\n    languageOptions: {\n      ecmaVersion: 2020,\n      globals: globals.browser,\n    },\n    plugins: {\n      'react-hooks': reactHooks,\n      'react-refresh': reactRefresh,\n    },\n    rules: {\n      ...reactHooks.configs.recommended.rules,\n      'react-refresh/only-export-components': [\n        'warn',\n        { allowConstantExport: true },\n      ],\n    },\n  },\n)\n"
  },
  {
    "path": "frontend/index.html",
    "content": "<!doctype html>\n<html lang=\"en\">\n  <head>\n    <meta charset=\"UTF-8\" />\n    <link rel=\"icon\" type=\"image/svg+xml\" href=\"/vite.svg\" />\n    <meta name=\"viewport\" content=\"width=device-width, initial-scale=1.0\" />\n    <title>Turbodrone Web</title>\n  </head>\n  <body>\n    <div id=\"root\"></div>\n    <script type=\"module\" src=\"/src/main.tsx\"></script>\n  </body>\n</html>\n"
  },
  {
    "path": "frontend/package.json",
    "content": "{\n  \"name\": \"frontend\",\n  \"private\": true,\n  \"version\": \"0.0.0\",\n  \"type\": \"module\",\n  \"scripts\": {\n    \"dev\": \"vite\",\n    \"build\": \"tsc -b && vite build\",\n    \"lint\": \"eslint .\",\n    \"preview\": \"vite preview\"\n  },\n  \"dependencies\": {\n    \"react\": \"^19.1.0\",\n    \"react-dom\": \"^19.1.0\",\n    \"react-router-dom\": \"^7.6.2\"\n  },\n  \"devDependencies\": {\n    \"@eslint/js\": \"^9.25.0\",\n    \"@tailwindcss/cli\": \"^4.1.7\",\n    \"@tailwindcss/postcss\": \"^4.1.7\",\n    \"@types/react\": \"^19.1.2\",\n    \"@types/react-dom\": \"^19.1.2\",\n    \"@vitejs/plugin-react\": \"^4.4.1\",\n    \"autoprefixer\": \"^10.4.21\",\n    \"eslint\": \"^9.25.0\",\n    \"eslint-plugin-react-hooks\": \"^5.2.0\",\n    \"eslint-plugin-react-refresh\": \"^0.4.19\",\n    \"globals\": \"^16.0.0\",\n    \"postcss\": \"^8.5.3\",\n    \"tailwindcss\": \"^4.1.7\",\n    \"typescript\": \"~5.8.3\",\n    \"typescript-eslint\": \"^8.30.1\",\n    \"vite\": \"^6.3.5\"\n  }\n}\n"
  },
  {
    "path": "frontend/postcss.config.cjs",
    "content": " // postcss.config.cjs  (new)\r\nmodule.exports = {\r\n  plugins: {\r\n    '@tailwindcss/postcss': {},   // ← new v4 plugin\r\n    autoprefixer: {},             // optional but still fine\r\n  },\r\n};"
  },
  {
    "path": "frontend/src/App.css",
    "content": "#root {\n  max-width: 1280px;\n  margin: 0 auto;\n  padding: 2rem;\n  text-align: center;\n}\n\n.logo {\n  height: 6em;\n  padding: 1.5em;\n  will-change: filter;\n  transition: filter 300ms;\n}\n.logo:hover {\n  filter: drop-shadow(0 0 2em #646cffaa);\n}\n.logo.react:hover {\n  filter: drop-shadow(0 0 2em #61dafbaa);\n}\n\n@keyframes logo-spin {\n  from {\n    transform: rotate(0deg);\n  }\n  to {\n    transform: rotate(360deg);\n  }\n}\n\n@media (prefers-reduced-motion: no-preference) {\n  a:nth-of-type(2) .logo {\n    animation: logo-spin infinite 20s linear;\n  }\n}\n\n.card {\n  padding: 2em;\n}\n\n.read-the-docs {\n  color: #888;\n}\n"
  },
  {
    "path": "frontend/src/App.tsx",
    "content": "import { useControls } from './hooks/useControls';\nimport { ControlSchemeToggle } from './components/ControlSchemeToggle';\nimport { VideoFeed } from './components/VideoFeed';\nimport ControlsOverlay from './components/ControlsOverlay';\nimport { PluginControls } from './components/PluginControls';\nimport { DrawingOverlay } from './components/DrawingOverlay';\n\nfunction App() {\n  const {\n    axes,\n    mode,\n    setMode,\n    gamepadConnected,\n    droneType,\n    commandCapabilities,\n    takeOff,\n    land,\n    emergencyStop,\n    speedTier,\n    setSpeedTier,\n  } = useControls();\n\n  return (\n    <div className=\"relative w-screen h-screen bg-black\">\n      <VideoFeed src=\"http://localhost:8000/mjpeg\" />\n      <DrawingOverlay />\n      <ControlsOverlay\n        axes={axes}\n        droneType={droneType}\n        commandCapabilities={commandCapabilities}\n        speedTier={speedTier}\n        onTakeoff={takeOff}\n        onLand={land}\n        onEstop={emergencyStop}\n        onSpeedChange={setSpeedTier}\n      />\n      <ControlSchemeToggle mode={mode} setMode={setMode} gamepadConnected={gamepadConnected} />\n      <PluginControls />\n    </div>\n  );\n}\n\nexport default App;\n"
  },
  {
    "path": "frontend/src/components/AxisIndicator.tsx",
    "content": "interface Props {\r\n  x: number;           // -1 … +1 (left … right)\r\n  y: number;           // -1 … +1 (down … up)   – positive y = up\r\n  size?: number;\r\n  label?: string;\r\n}\r\n\r\nexport default function AxisIndicator({ x, y, size = 120, label }: Props) {\r\n  // clamp outside callers just in case\r\n  x = Math.max(-1, Math.min(1, x));\r\n  y = Math.max(-1, Math.min(1, y));\r\n\r\n  const radius = size * 0.4;            // max travel inside the square\r\n\r\n  return (\r\n    <div className=\"flex flex-col items-center gap-1\">\r\n      <div\r\n        className=\"relative border border-slate-400/60 rounded-sm\"\r\n        style={{ width: size, height: size }}\r\n      >\r\n        {/* cross-hair */}\r\n        <div className=\"absolute inset-0 m-auto h-px w-full bg-slate-500/40\" />\r\n        <div className=\"absolute inset-0 m-auto w-px h-full bg-slate-500/40\" />\r\n        {/* moving dot */}\r\n        <div\r\n          className=\"absolute w-4 h-4 rounded-full bg-emerald-400 shadow-lg\"\r\n          style={{ \r\n            left: `calc(50% + ${x * radius}px)`,\r\n            top: `calc(50% + ${-y * radius}px)`,\r\n            transform: 'translate(-50%, -50%)'\r\n          }}\r\n        />\r\n      </div>\r\n      {label && <span className=\"text-xs tracking-wide opacity-70\">{label}</span>}\r\n    </div>\r\n  );\r\n} "
  },
  {
    "path": "frontend/src/components/CommandButtons.tsx",
    "content": "import type { CommandCapabilities } from \"../hooks/useControls\";\r\n\r\ninterface CommandButtonsProps {\r\n  droneType: string;\r\n  capabilities: CommandCapabilities;\r\n  onTakeoff: () => void;\r\n  onLand: () => void;\r\n  onEstop: () => void;\r\n}\r\n\r\nconst disabledButtonClasses =\r\n  \"bg-gray-700/80 border border-gray-500/40 text-gray-400 cursor-not-allowed opacity-70\";\r\n\r\nconst baseButtonClasses =\r\n  \"min-w-[112px] px-5 py-2.5 font-semibold rounded-lg shadow-md transition-all duration-150 ease-in-out border focus:outline-none\";\r\n\r\nexport default function CommandButtons({\r\n  droneType,\r\n  capabilities,\r\n  onTakeoff,\r\n  onLand,\r\n  onEstop,\r\n}: CommandButtonsProps) {\r\n  return (\r\n    <div className=\"flex flex-col items-center gap-2 bg-gray-900/70 backdrop-blur-md border border-gray-700/80 rounded-lg shadow-xl p-3\">\r\n      <div className=\"text-[11px] tracking-[0.18em] uppercase text-gray-300 select-none\">\r\n        {droneType} Controls\r\n      </div>\r\n      <div className=\"flex gap-4\">\r\n        <button\r\n          onClick={onTakeoff}\r\n          disabled={!capabilities.takeoff}\r\n          className={`${baseButtonClasses} ${\r\n            capabilities.takeoff\r\n              ? \"bg-green-600 hover:bg-green-700 active:bg-green-800 active:scale-95 text-white border-green-500/60\"\r\n              : disabledButtonClasses\r\n          }`}\r\n          title={capabilities.takeoff ? \"Trigger takeoff\" : \"Takeoff is unavailable for this implementation\"}\r\n        >\r\n          Takeoff\r\n        </button>\r\n        <button\r\n          onClick={onLand}\r\n          disabled={!capabilities.land}\r\n          className={`${baseButtonClasses} ${\r\n            capabilities.land\r\n              ? \"bg-orange-600 hover:bg-orange-700 active:bg-orange-800 active:scale-95 text-white border-orange-400/70\"\r\n              : disabledButtonClasses\r\n          }`}\r\n          title={capabilities.land ? \"Trigger land / descend\" : \"Land is unavailable for this implementation\"}\r\n        >\r\n          Land\r\n        </button>\r\n        <button\r\n          onClick={onEstop}\r\n          disabled={!capabilities.estop}\r\n          className={`${baseButtonClasses} ${\r\n            capabilities.estop\r\n              ? \"bg-red-700 hover:bg-red-800 active:bg-red-900 active:scale-95 text-yellow-200 border-yellow-300/60\"\r\n              : disabledButtonClasses\r\n          }`}\r\n          title={capabilities.estop ? \"Emergency stop\" : \"Emergency stop is unavailable for this implementation\"}\r\n        >\r\n          E-STOP\r\n        </button>\r\n      </div>\r\n    </div>\r\n  );\r\n}"
  },
  {
    "path": "frontend/src/components/ControlSchemeToggle.tsx",
    "content": "import React from \"react\";\r\nimport type { ControlMode } from \"../hooks/useControls\";\r\n\r\ninterface Props {\r\n  mode: ControlMode;\r\n  setMode: (m: ControlMode) => void;\r\n  gamepadConnected: boolean;\r\n}\r\n\r\nexport const ControlSchemeToggle: React.FC<Props> = ({ mode, setMode, gamepadConnected }) => {\r\n  /* helpers ensure Pointer-Lock is requested inside the user gesture */\r\n  const toKeyboard = () => setMode(\"inc\");\r\n\r\n  const toGamepad = () => {\r\n    if (gamepadConnected) setMode(\"abs\");\r\n  };\r\n\r\n  const toTrackPoint = () => {\r\n    /* must be called from a user-generated event */\r\n    document.body.requestPointerLock();\r\n    setMode(\"mouse\");\r\n  };\r\n\r\n  return (\r\n    <div className=\"absolute bottom-4 left-4 z-30 bg-gray-900/70 backdrop-blur-md border border-gray-700/80 rounded-lg shadow-xl p-4\">\r\n      <div className=\"flex flex-col gap-4\">\r\n        <div className=\"flex items-center gap-4\">\r\n          <button\r\n            onClick={toKeyboard}\r\n            className={`px-3 py-1.5 rounded text-sm font-medium ${\r\n              mode === \"inc\" ? \"bg-sky-600\" : \"bg-gray-600 hover:bg-gray-500\"\r\n            }`}\r\n          >\r\n            Keyboard\r\n          </button>\r\n\r\n          <button\r\n            onClick={toGamepad}\r\n            disabled={!gamepadConnected}\r\n            className={`px-3 py-1.5 rounded text-sm font-medium ${\r\n              mode === \"abs\"\r\n                ? \"bg-green-600\"\r\n                : gamepadConnected\r\n                  ? \"bg-gray-600 hover:bg-gray-500\"\r\n                  : \"bg-gray-700 cursor-not-allowed opacity-60\"\r\n            }`}\r\n          >\r\n            Gamepad\r\n          </button>\r\n\r\n          <button\r\n            onClick={toTrackPoint}\r\n            className={`px-3 py-1.5 rounded text-sm font-medium ${\r\n              mode === \"mouse\"\r\n                ? \"bg-red-600\"\r\n                : \"bg-gray-600 hover:bg-gray-500\"\r\n            }`}\r\n          >\r\n            TrackPoint\r\n          </button>\r\n        </div>\r\n\r\n        {/* status / hints */}\r\n        <div className=\"text-xs text-gray-400 text-center\">\r\n          Current&nbsp;\r\n          <span className=\"font-semibold text-gray-200\">\r\n            {mode === \"inc\" ? \"Keyboard\"\r\n              : mode === \"abs\" ? \"Gamepad\"\r\n              : \"TrackPoint\"}\r\n          </span>\r\n          {mode === \"mouse\" && <span>&nbsp;(Esc to release)</span>}\r\n        </div>\r\n      </div>\r\n    </div>\r\n  );\r\n}; "
  },
  {
    "path": "frontend/src/components/ControlsOverlay.tsx",
    "content": "import AxisIndicator from \"./AxisIndicator\";\r\nimport type { Axes, CommandCapabilities, SpeedTier } from \"../hooks/useControls\";\r\nimport CommandButtons from \"./CommandButtons\";\r\nimport SpeedControl from \"./SpeedControl\";\r\n\r\ninterface ControlsOverlayProps {\r\n  axes: Axes;\r\n  droneType: string;\r\n  commandCapabilities: CommandCapabilities;\r\n  speedTier: SpeedTier;\r\n  onTakeoff: () => void;\r\n  onLand: () => void;\r\n  onEstop: () => void;\r\n  onSpeedChange: (tier: SpeedTier) => void;\r\n}\r\n\r\nexport default function ControlsOverlay({\r\n  axes,\r\n  droneType,\r\n  commandCapabilities,\r\n  speedTier,\r\n  onTakeoff,\r\n  onLand,\r\n  onEstop,\r\n  onSpeedChange,\r\n}: ControlsOverlayProps) {\r\n  const left   = { x: axes.roll,     y: axes.pitch };\r\n  const right  = { x: axes.yaw,      y: axes.throttle };\r\n  const droneLabel = droneType === \"unknown\" ? \"Unknown\" : droneType.replace(/_/g, \" \").toUpperCase();\r\n\r\n  return (\r\n    <div className=\"absolute inset-0 pointer-events-none z-10\">\r\n      {/* title bar */}\r\n      <div className=\"absolute top-0 left-0 right-0 flex justify-center py-6 bg-black/60 backdrop-blur-sm border-b border-white/10\">\r\n        <h1 \r\n          className=\"font-heading font-bold text-white drop-shadow-lg select-none\"\r\n          style={{ \r\n            fontSize: '2rem', \r\n            letterSpacing: '0.1em',\r\n            lineHeight: '1.2',\r\n            margin: 0,\r\n            padding: 0\r\n          }}\r\n        >\r\n          TURBODRONE WEB\r\n        </h1>\r\n      </div>\r\n\r\n      {/* Controls Area: Buttons and Sticks */}\r\n      <div className=\"absolute bottom-8 left-1/2 -translate-x-1/2 flex flex-col items-center gap-5 z-20 pointer-events-auto\">\r\n        {/* Command Buttons Cluster */}\r\n        <CommandButtons\r\n          droneType={droneLabel}\r\n          capabilities={commandCapabilities}\r\n          onTakeoff={onTakeoff}\r\n          onLand={onLand}\r\n          onEstop={onEstop}\r\n        />\r\n\r\n        {commandCapabilities.speed_control && (\r\n          <SpeedControl\r\n            enabled={commandCapabilities.speed_control}\r\n            value={speedTier}\r\n            onChange={onSpeedChange}\r\n          />\r\n        )}\r\n\r\n        {/* Sticks */}\r\n        <div className=\"flex gap-10\">\r\n          <AxisIndicator {...left}  label=\"PITCH / ROLL\" />\r\n          <AxisIndicator {...right} label=\"YAW / THROTTLE\" />\r\n        </div>\r\n      </div>\r\n    </div>\r\n  );\r\n} \r\n"
  },
  {
    "path": "frontend/src/components/DrawingOverlay.tsx",
    "content": "import { useOverlays } from \"../hooks/useOverlays\";\r\nimport { useVideoSizing } from \"../hooks/useVideoSizing\";\r\n\r\nfunction DrawingOverlay() {\r\n  const overlays = useOverlays();\r\n  const { videoRect } = useVideoSizing();\r\n\r\n  const style: React.CSSProperties = {\r\n    position: \"absolute\",\r\n    left: videoRect.x,\r\n    top: videoRect.y,\r\n    width: videoRect.width,\r\n    height: videoRect.height,\r\n    pointerEvents: \"none\",\r\n  };\r\n\r\n  return (\r\n    <div style={style} className=\"z-10\">\r\n      <svg viewBox=\"0 0 1 1\" width=\"100%\" height=\"100%\" preserveAspectRatio=\"none\">\r\n        {overlays.map((o, i) =>\r\n          o.type === \"rect\" ? (\r\n            <rect\r\n              key={i}\r\n              x={o.coords[0]}\r\n              y={o.coords[1]}\r\n              width={o.coords[2] - o.coords[0]}\r\n              height={o.coords[3] - o.coords[1]}\r\n              fill=\"none\"\r\n              stroke={o.color || \"lime\"}\r\n              strokeWidth={2}\r\n              vectorEffect=\"non-scaling-stroke\"\r\n            />\r\n          ) : null\r\n        )}\r\n      </svg>\r\n    </div>\r\n  );\r\n}\r\n\r\n// Making the component available as both a default and named export\r\n// to resolve the import issue in App.tsx.\r\nexport { DrawingOverlay };\r\nexport default DrawingOverlay;\r\n"
  },
  {
    "path": "frontend/src/components/PluginControls.tsx",
    "content": "import { usePlugins } from '../hooks/usePlugins';\r\n\r\nexport function PluginControls() {\r\n  const { pluginsEnabled, availablePlugins, runningPlugins, isLoading, error, togglePlugin } = usePlugins();\r\n\r\n  // Feature-flagged off by backend (PLUGINS_ENABLED=false)\r\n  if (!pluginsEnabled && !isLoading) {\r\n    return null;\r\n  }\r\n\r\n  if (isLoading) {\r\n    return <div className=\"text-white\">Loading plugins...</div>;\r\n  }\r\n\r\n  if (error) {\r\n    return <div className=\"text-red-500\">Error: {error}</div>;\r\n  }\r\n\r\n  return (\r\n    <div className=\"absolute bottom-4 right-4 bg-gray-800 bg-opacity-70 p-4 rounded-lg shadow-lg text-white\">\r\n      <h3 className=\"text-lg font-bold mb-2\">Plugins</h3>\r\n      {availablePlugins.length === 0 ? (\r\n        <p className=\"text-sm\">No plugins available.</p>\r\n      ) : (\r\n        <ul className=\"space-y-2\">\r\n          {availablePlugins.map((name) => {\r\n            const isRunning = runningPlugins.has(name);\r\n            return (\r\n              <li key={name} className=\"flex items-center justify-between\">\r\n                <span className=\"mr-4\">{name}</span>\r\n                <button\r\n                  onClick={async () => {\r\n                    await togglePlugin(name);\r\n                    // Dispatch events so other hooks (e.g. controls) know without polling\r\n                    const running = runningPlugins.has(name);\r\n                    window.dispatchEvent(new CustomEvent(running ? 'plugin:stopped' : 'plugin:running'));\r\n                  }}\r\n                  className={`px-4 py-1 rounded-full text-sm font-semibold transition-colors\r\n                    ${isRunning\r\n                      ? 'bg-green-500 hover:bg-green-600'\r\n                      : 'bg-red-500 hover:bg-red-600'\r\n                    }`}\r\n                >\r\n                  {isRunning ? 'ON' : 'OFF'}\r\n                </button>\r\n              </li>\r\n            );\r\n          })}\r\n        </ul>\r\n      )}\r\n    </div>\r\n  );\r\n} "
  },
  {
    "path": "frontend/src/components/SpeedControl.tsx",
    "content": "import type { SpeedTier } from \"../hooks/useControls\";\r\n\r\ninterface SpeedControlProps {\r\n  enabled: boolean;\r\n  value: SpeedTier;\r\n  onChange: (tier: SpeedTier) => void;\r\n}\r\n\r\nconst TIERS: { value: SpeedTier; label: string; help: string }[] = [\r\n  { value: 0, label: \"Low\",  help: \"Slow / beginner stick scaling\" },\r\n  { value: 1, label: \"Med\",  help: \"Medium stick scaling\" },\r\n  { value: 2, label: \"High\", help: \"Full stick range\" },\r\n];\r\n\r\nexport default function SpeedControl({ enabled, value, onChange }: SpeedControlProps) {\r\n  const baseSegment =\r\n    \"px-3 py-1.5 text-xs font-semibold uppercase tracking-wider transition-colors duration-150 ease-in-out focus:outline-none\";\r\n  const disabledTitle = \"Speed control is unavailable for this implementation\";\r\n\r\n  return (\r\n    <div\r\n      className={`flex items-center gap-2 bg-gray-900/70 backdrop-blur-md border border-gray-700/80 rounded-lg shadow-md px-3 py-2 ${\r\n        enabled ? \"\" : \"opacity-60\"\r\n      }`}\r\n      title={enabled ? \"Adjust drone control sensitivity\" : disabledTitle}\r\n    >\r\n      <span className=\"text-[10px] tracking-[0.18em] uppercase text-gray-300 select-none\">\r\n        Speed\r\n      </span>\r\n      <div className=\"inline-flex rounded-md border border-gray-600/70 overflow-hidden\">\r\n        {TIERS.map((tier, idx) => {\r\n          const active = enabled && value === tier.value;\r\n          const segmentClasses = active\r\n            ? \"bg-blue-600 text-white\"\r\n            : enabled\r\n              ? \"bg-gray-800/80 text-gray-200 hover:bg-gray-700/80\"\r\n              : \"bg-gray-800/60 text-gray-400 cursor-not-allowed\";\r\n          const borderClasses = idx > 0 ? \"border-l border-gray-600/70\" : \"\";\r\n          return (\r\n            <button\r\n              key={tier.value}\r\n              type=\"button\"\r\n              disabled={!enabled}\r\n              onClick={() => enabled && onChange(tier.value)}\r\n              className={`${baseSegment} ${segmentClasses} ${borderClasses}`}\r\n              title={enabled ? tier.help : disabledTitle}\r\n              aria-pressed={active}\r\n            >\r\n              {tier.label}\r\n            </button>\r\n          );\r\n        })}\r\n      </div>\r\n    </div>\r\n  );\r\n}\r\n"
  },
  {
    "path": "frontend/src/components/StaticDrawingOverlay.tsx",
    "content": "import { useOverlays } from \"../hooks/useOverlays\";\r\n\r\n/**\r\n * A simple overlay that assumes its parent container is correctly sized\r\n * with the same aspect ratio as the video feed. It uses a normalized\r\n * (0-1) coordinate system.\r\n */\r\nexport default function StaticDrawingOverlay() {\r\n  const overlays = useOverlays();\r\n\r\n  return (\r\n    <svg\r\n      className=\"absolute inset-0 w-full h-full pointer-events-none z-10\"\r\n      viewBox=\"0 0 1 1\"\r\n      preserveAspectRatio=\"none\"\r\n    >\r\n      {overlays.map((o, i) =>\r\n        o.type === \"rect\" ? (\r\n          <rect\r\n            key={i}\r\n            x={o.coords[0]}\r\n            y={o.coords[1]}\r\n            width={o.coords[2] - o.coords[0]}\r\n            height={o.coords[3] - o.coords[1]}\r\n            fill=\"none\"\r\n            stroke={o.color || \"lime\"}\r\n            strokeWidth=\"0.005\"\r\n          />\r\n        ) : null\r\n      )}\r\n    </svg>\r\n  );\r\n} "
  },
  {
    "path": "frontend/src/components/VideoFeed.tsx",
    "content": "export function VideoFeed({ src }: { src: string }) {\r\n  return (\r\n    <img\r\n      src={src}\r\n      alt=\"Drone video feed\"\r\n      className=\"absolute inset-0 w-full h-full object-contain\"\r\n    />\r\n  );\r\n}\r\n"
  },
  {
    "path": "frontend/src/hooks/useControls.ts",
    "content": "import { useCallback, useEffect, useRef, useState } from \"react\";\n\n/* ─────────────────────────────────────────────────────────── */\n/*  Shared types                                               */\nexport type ControlMode = \"inc\" | \"abs\" | \"mouse\";\n\nexport interface Axes {\n  throttle: number;  // -1 … +1  (down / up)\n  yaw:      number;  // -1 … +1  (left / right)\n  pitch:    number;  // -1 … +1  (back / fwd)\n  roll:     number;  // -1 … +1  (left / right)\n}\n\nexport interface CommandCapabilities {\n  takeoff: boolean;\n  land: boolean;\n  estop: boolean;\n  camera_tilt: boolean;\n  camera_switch: boolean;\n  speed_control: boolean;\n}\n\nexport type SpeedTier = 0 | 1 | 2;\n/* ─────────────────────────────────────────────────────────── */\n\nexport function useControls() {\n  const API_BASE_URL = \"http://localhost:8000\";\n\n  /* ------- state refs (mutable) ------- */\n  const axesRef = useRef<Axes>({ throttle: 0, yaw: 0, pitch: 0, roll: 0 });\n  const modeRef = useRef<ControlMode>(\"inc\");\n\n  /* ------- NEW: websocket ref & lifecycle ------- */\n  const ws = useRef<WebSocket | null>(null);\n  \n  /* ------- Plugin state (event-driven) ------- */\n  const pluginRunningRef = useRef<boolean>(false);\n  const stoppedPluginOnceRef = useRef<boolean>(false);    // rate-limit stop calls per burst\n\n  // Open WS once on mount, close on unmount\n  useEffect(() => {\n    ws.current = new WebSocket(\"ws://localhost:8000/ws\");\n    return () => {\n      ws.current?.close();\n      ws.current = null;\n    };\n  }, []);\n\n  // Listen for plugin start/stop events (dispatched from usePlugins and auto-stop)\n  useEffect(() => {\n    const onStart = () => { pluginRunningRef.current = true; };\n    const onStop  = () => { pluginRunningRef.current = false; stoppedPluginOnceRef.current = false; };\n    window.addEventListener('plugin:running', onStart as EventListener);\n    window.addEventListener('plugin:stopped', onStop as EventListener);\n    return () => {\n      window.removeEventListener('plugin:running', onStart as EventListener);\n      window.removeEventListener('plugin:stopped', onStop as EventListener);\n    };\n  }, []);\n\n  /* ------- state that triggers re-renders ------- */\n  const [axes,  setAxes]  = useState<Axes>(axesRef.current);\n  const [mode,  setModeSt] = useState<ControlMode>(\"inc\");\n  const [gamepadConnected, setGamepadConnected] = useState<boolean>(false);\n  const [droneType, setDroneType] = useState<string>(\"unknown\");\n  const [commandCapabilities, setCommandCapabilities] = useState<CommandCapabilities>({\n    takeoff: true,\n    land: true,\n    estop: true,\n    camera_tilt: false,\n    camera_switch: false,\n    speed_control: false,\n  });\n  const [speedTier, setSpeedTierSt] = useState<SpeedTier>(2);\n\n  // Track previous gamepad status to avoid spam\n  const prevGamepadStatus = useRef<boolean>(false);\n\n  /* make setMode update both the ref (for hooks) and the state (for UI) */\n  const setMode = useCallback((m: ControlMode) => {\n    modeRef.current = m;\n    setModeSt(m);\n  }, []);\n\n  useEffect(() => {\n    let cancelled = false;\n\n    const fetchCapabilities = async () => {\n      try {\n        const response = await fetch(`${API_BASE_URL}/capabilities`);\n        if (!response.ok) return;\n        const data = await response.json();\n        if (cancelled) return;\n\n        setDroneType(data?.drone_type ?? \"unknown\");\n        setCommandCapabilities({\n          takeoff: Boolean(data?.commands?.takeoff),\n          land: Boolean(data?.commands?.land),\n          estop: Boolean(data?.commands?.estop),\n          camera_tilt: Boolean(data?.commands?.camera_tilt),\n          camera_switch: Boolean(data?.commands?.camera_switch),\n          speed_control: Boolean(data?.commands?.speed_control),\n        });\n      } catch {\n        // Keep optimistic defaults when the backend is temporarily unavailable.\n      }\n    };\n\n    fetchCapabilities();\n    return () => {\n      cancelled = true;\n    };\n  }, []);\n\n  /* --------------- gamepad detection --------------- */\n  useEffect(() => {\n    const checkGamepad = () => {\n      const gamepads = navigator.getGamepads();\n      const hasGamepad = Array.from(gamepads).some(gp => gp !== null && gp.connected);\n      \n      // Only log when status changes\n      if (hasGamepad !== prevGamepadStatus.current) {\n        console.log(`Gamepad ${hasGamepad ? 'connected' : 'disconnected'}`);\n        if (hasGamepad) {\n          const connectedGamepads = Array.from(gamepads).filter(gp => gp !== null);\n          console.log('Connected gamepads:', connectedGamepads.map(gp => gp?.id));\n        }\n        prevGamepadStatus.current = hasGamepad;\n      }\n      \n      setGamepadConnected(hasGamepad);\n    };\n\n    // Check initially\n    checkGamepad();\n\n    // Listen for gamepad connect/disconnect events\n    const handleGamepadConnected = () => checkGamepad();\n    const handleGamepadDisconnected = () => checkGamepad();\n\n    window.addEventListener('gamepadconnected', handleGamepadConnected);\n    window.addEventListener('gamepaddisconnected', handleGamepadDisconnected);\n\n    // Also poll periodically since some browsers don't fire events reliably\n    const pollInterval = setInterval(checkGamepad, 1000);\n\n    return () => {\n      window.removeEventListener('gamepadconnected', handleGamepadConnected);\n      window.removeEventListener('gamepaddisconnected', handleGamepadDisconnected);\n      clearInterval(pollInterval);\n    };\n  }, []);\n\n  /* --------------- keyboard (incremental) --------------- */\n  useEffect(() => {\n    if (modeRef.current !== \"inc\") return;           // ignore when in abs mode\n\n    const map: Record<string, { axis: keyof Axes; dir: -1 | 1 }> = {\n      w:          { axis: \"pitch\",    dir: +1 },\n      s:          { axis: \"pitch\",    dir: -1 },\n      a:          { axis: \"roll\",     dir: -1 },\n      d:          { axis: \"roll\",     dir: +1 },\n      ArrowUp:    { axis: \"throttle\", dir: +1 },\n      ArrowDown:  { axis: \"throttle\", dir: -1 },\n      ArrowLeft:  { axis: \"yaw\",      dir: -1 },\n      ArrowRight: { axis: \"yaw\",      dir: +1 },\n    };\n\n    const down = (e: KeyboardEvent) => {\n      const m = map[e.key];\n      if (!m) return;\n      axesRef.current[m.axis] = m.dir;\n      setAxes({ ...axesRef.current });\n\n      // If any plugin is running and user provides input → stop plugin once\n      maybeStopPluginOnUserInput();\n    };\n\n    const up = (e: KeyboardEvent) => {\n      const m = map[e.key];\n      if (!m) return;\n      if (axesRef.current[m.axis] === m.dir) {\n        axesRef.current[m.axis] = 0;\n        setAxes({ ...axesRef.current });\n      }\n    };\n\n    window.addEventListener(\"keydown\", down);\n    window.addEventListener(\"keyup\",   up);\n    return () => {\n      window.removeEventListener(\"keydown\", down);\n      window.removeEventListener(\"keyup\",   up);\n    };\n  }, [mode]);      // re-run effect when mode flips\n\n  /* --------------- Xbox-360 game-pad (absolute) --------- */\n  useEffect(() => {\n    if (modeRef.current !== \"abs\") return;          // ignore when in inc mode\n\n    const DEADZONE = 0.15; // Adjust this value as needed\n    let raf = 0;\n\n    const applyDeadzone = (value: number): number => {\n      return Math.abs(value) < DEADZONE ? 0 : value;\n    };\n\n    const poll = () => {\n      const gp = navigator.getGamepads()[0] as Gamepad | null;\n      if (gp) {\n        /* Xbox-360 / Chrome mapping -------------------------------------\n           Left  stick: axes[0] (X)  axes[1] (Y)\n           Right stick: axes[2] (X)  axes[3] (Y)\n           Positive Y is *down*  → invert for throttle / pitch\n        ------------------------------------------------------------------*/\n        axesRef.current.roll     = applyDeadzone(gp.axes[0]);     // left X\n        axesRef.current.pitch    = applyDeadzone(-gp.axes[1]);    // left Y  (forward == -1)\n        axesRef.current.yaw      = applyDeadzone(gp.axes[2]);     // right X\n        axesRef.current.throttle = applyDeadzone(-gp.axes[3]);    // right Y (up == +1)\n        setAxes({ ...axesRef.current });\n\n        // Stop plugin when the first non-zero user sticks are detected\n        if (Math.abs(axesRef.current.roll) > 0 || Math.abs(axesRef.current.pitch) > 0 ||\n            Math.abs(axesRef.current.yaw)  > 0 || Math.abs(axesRef.current.throttle) > 0) {\n          maybeStopPluginOnUserInput();\n        }\n      }\n      raf = requestAnimationFrame(poll);\n    };\n    raf = requestAnimationFrame(poll);\n    return () => cancelAnimationFrame(raf);\n  }, [mode]);\n\n  /* ---------- TrackPoint / Mouse (relative) ------------------ */\n  useEffect(() => {\n    if (modeRef.current !== \"mouse\") return;\n\n    const sensitivity = 0.015;      // tune to taste for TrackPoint\n    const decay       = 0.90;       // spring-back to centre when idle\n    const axesState = axesRef.current;\n    let rafId = 0;\n\n    /* convert mouse deltas → roll / pitch  (-y = pitch forward) */\n    const onMove = (e: MouseEvent) => {\n      axesRef.current.roll  = Math.max(-1, Math.min(1, axesRef.current.roll  +  e.movementX * sensitivity));\n      axesRef.current.pitch = Math.max(-1, Math.min(1, axesRef.current.pitch - e.movementY * sensitivity));\n      setAxes({ ...axesRef.current });\n      \n      // Stop plugin when user moves mouse/trackpoint\n      if (Math.abs(e.movementX) > 0 || Math.abs(e.movementY) > 0) {\n        maybeStopPluginOnUserInput();\n      }\n    };\n\n    /* gentle recentre so sticks don't stay deflected forever */\n    const tick = () => {\n      axesRef.current.roll  *= decay;\n      axesRef.current.pitch *= decay;\n      if (Math.abs(axesRef.current.roll)  < 0.001) axesRef.current.roll  = 0;\n      if (Math.abs(axesRef.current.pitch) < 0.001) axesRef.current.pitch = 0;\n      setAxes({ ...axesRef.current });\n      rafId = requestAnimationFrame(tick);\n    };\n\n    /* when we lose pointer-lock (Esc, window blur, etc.) fall back to keyboard */\n    const onLockChange = () => {\n      if (document.pointerLockElement === null) {\n        setMode(\"inc\");\n      }\n    };\n\n    window.addEventListener(\"mousemove\",        onMove);\n    document.addEventListener(\"pointerlockchange\", onLockChange);\n    rafId = requestAnimationFrame(tick);\n\n    return () => {\n      window.removeEventListener(\"mousemove\", onMove);\n      document.removeEventListener(\"pointerlockchange\", onLockChange);\n      cancelAnimationFrame(rafId);\n      axesState.roll = 0;\n      axesState.pitch = 0;\n      setAxes((prev) => ({ ...prev, roll: 0, pitch: 0 }));\n    };\n  }, [mode, setMode]);\n\n  /* ----------- network TX 30 Hz (treat mouse as \"abs\") ------- */\n  useEffect(() => {\n    const interval = setInterval(() => {\n      if (ws.current?.readyState !== WebSocket.OPEN) return;\n\n      // COMPLETELY suppress all transmissions when any plugin is running\n      // This prevents frontend from overwriting plugin commands\n      if (pluginRunningRef.current) return;\n\n      ws.current.send(JSON.stringify({\n        type: \"axes\",\n        mode: modeRef.current,\n        ...axesRef.current,\n      }));\n    }, 1000 / 30);\n    return () => clearInterval(interval);\n  }, []);\n\n  /* ------------- helpers / commands ------------------------- */\n  const sendCommand = (type: string, payload = {}) => {\n    if (ws.current?.readyState === WebSocket.OPEN) {\n      ws.current.send(JSON.stringify({ type, ...payload }));\n    } else {\n      console.warn(\"Cannot send command, WebSocket not open.\");\n    }\n  };\n\n  const maybeStopPluginOnUserInput = async () => {\n    if (!pluginRunningRef.current || stoppedPluginOnceRef.current) return;\n    try {\n      // Stop all running plugins for simplicity\n      const res = await fetch(`${API_BASE_URL}/plugins`);\n      // If plugins are disabled on backend, nothing to stop.\n      if (res.status === 404) return;\n      if (!res.ok) return;\n      const data = await res.json();\n      const running: string[] = data?.running ?? [];\n      await Promise.all(running.map((name) => fetch(`${API_BASE_URL}/plugins/${name}/stop`, { method: 'POST' })));\n      stoppedPluginOnceRef.current = true;\n      pluginRunningRef.current = false;\n      // notify UI to flip OFF without polling\n      window.dispatchEvent(new CustomEvent('plugin:stopped'));\n    } catch {\n      // Ignore errors when stopping plugins (fire-and-forget)\n    }\n  };\n\n  const takeOff = () => sendCommand(\"takeoff\");\n  const land    = () => sendCommand(\"land\");\n  const emergencyStop = () => sendCommand(\"estop\");\n  const setSpeedIndex = useCallback((speedIndex: number) => {\n    sendCommand(\"set_speed_index\", { speed_index: speedIndex });\n  }, []);\n  const setSpeedTier = useCallback((tier: SpeedTier) => {\n    setSpeedTierSt(tier);\n    setSpeedIndex(tier);\n  }, [setSpeedIndex]);\n\n  /* ------------- hook return ------------------------------- */\n  return {\n    axes,\n    mode,\n    setMode,\n    gamepadConnected,\n    droneType,\n    commandCapabilities,\n    takeOff,\n    land,\n    emergencyStop,\n    setSpeedIndex,\n    speedTier,\n    setSpeedTier,\n  };\n}\n"
  },
  {
    "path": "frontend/src/hooks/useOverlays.ts",
    "content": "import { useState, useEffect, useRef } from 'react';\r\n\r\nconst OVERLAY_WS_URL = 'ws://localhost:8000/ws/overlays';\r\n\r\nexport interface OverlayObject {\r\n  type: 'rect';\r\n  coords: [number, number, number, number]; // [x1, y1, x2, y2]\r\n  color: string;\r\n}\r\n\r\nexport function useOverlays() {\r\n  const [overlays, setOverlays] = useState<OverlayObject[]>([]);\r\n  const ws = useRef<WebSocket | null>(null);\r\n\r\n  useEffect(() => {\r\n    const connect = () => {\r\n      ws.current = new WebSocket(OVERLAY_WS_URL);\r\n      \r\n      ws.current.onopen = () => console.log('%c[Overlays] WebSocket Connected', 'color: green');\r\n      ws.current.onmessage = (event) => {\r\n        try {\r\n          const data = JSON.parse(event.data);\r\n          if (Array.isArray(data)) {\r\n            setOverlays(data);\r\n          }\r\n        } catch (e) {\r\n          console.error('[Overlays] Failed to parse data:', e);\r\n        }\r\n      };\r\n\r\n      ws.current.onclose = () => {\r\n        console.log('%c[Overlays] WebSocket Disconnected. Reconnecting...', 'color: orange');\r\n        setTimeout(connect, 3000); \r\n      };\r\n\r\n      ws.current.onerror = (err) => {\r\n        console.error('%c[Overlays] WebSocket Error', 'color: red', err);\r\n        ws.current?.close();\r\n      };\r\n    };\r\n\r\n    connect();\r\n\r\n    return () => {\r\n      if (ws.current) {\r\n        ws.current.onclose = null; \r\n        ws.current.close();\r\n      }\r\n    };\r\n  }, []);\r\n\r\n  return overlays;\r\n}\r\n"
  },
  {
    "path": "frontend/src/hooks/usePlugins.ts",
    "content": "import { useState, useEffect, useCallback } from 'react';\r\n\r\n// Assuming your backend runs on port 8000\r\nconst API_BASE_URL = 'http://localhost:8000';\r\n\r\ninterface PluginState {\r\n  available: string[];\r\n  running: string[];\r\n}\r\n\r\nexport function usePlugins() {\r\n  const [pluginsEnabled, setPluginsEnabled] = useState<boolean>(false);\r\n  const [availablePlugins, setAvailablePlugins] = useState<string[]>([]);\r\n  const [runningPlugins, setRunningPlugins] = useState<Set<string>>(new Set());\r\n  const [isLoading, setIsLoading] = useState<boolean>(true);\r\n  const [error, setError] = useState<string | null>(null);\r\n\r\n  // Function to fetch the current state of plugins from the backend\r\n  const fetchPluginState = useCallback(async () => {\r\n    try {\r\n      const response = await fetch(`${API_BASE_URL}/plugins`);\r\n      if (!response.ok) {\r\n        // Backend returns 404 when plugins are disabled.\r\n        if (response.status === 404) {\r\n          setPluginsEnabled(false);\r\n          setAvailablePlugins([]);\r\n          setRunningPlugins(new Set());\r\n          setError(null);\r\n          return;\r\n        }\r\n        throw new Error('Failed to fetch plugin status');\r\n      }\r\n      setPluginsEnabled(true);\r\n      const data: PluginState = await response.json();\r\n      setAvailablePlugins(data.available);\r\n      setRunningPlugins(new Set(data.running));\r\n    } catch (e) {\r\n      setError(e instanceof Error ? e.message : 'An unknown error occurred');\r\n    } finally {\r\n      setIsLoading(false);\r\n    }\r\n  }, []);\r\n\r\n  // Function to toggle a plugin on or off\r\n  const togglePlugin = useCallback(async (name: string) => {\r\n    if (!pluginsEnabled) return;\r\n    const isRunning = runningPlugins.has(name);\r\n    const endpoint = isRunning ? 'stop' : 'start';\r\n\r\n    try {\r\n      const response = await fetch(`${API_BASE_URL}/plugins/${name}/${endpoint}`, {\r\n        method: 'POST',\r\n      });\r\n      if (!response.ok) {\r\n        throw new Error(`Failed to ${endpoint} plugin ${name}`);\r\n      }\r\n      // After toggling, refresh the state to ensure UI is in sync\r\n      await fetchPluginState();\r\n    } catch (e) {\r\n      setError(e instanceof Error ? e.message : 'An unknown error occurred');\r\n    }\r\n  }, [pluginsEnabled, runningPlugins, fetchPluginState]);\r\n\r\n  // Fetch the initial state when the hook is first used\r\n  useEffect(() => {\r\n    fetchPluginState();\r\n    return () => {};\r\n  }, [fetchPluginState]);\r\n\r\n  // Keep UI in sync when other parts of the app start/stop plugins\r\n  useEffect(() => {\r\n    const handler = () => { fetchPluginState(); };\r\n    window.addEventListener('plugin:running', handler as EventListener);\r\n    window.addEventListener('plugin:stopped', handler as EventListener);\r\n    return () => {\r\n      window.removeEventListener('plugin:running', handler as EventListener);\r\n      window.removeEventListener('plugin:stopped', handler as EventListener);\r\n    };\r\n  }, [fetchPluginState]);\r\n\r\n  return {\r\n    pluginsEnabled,\r\n    availablePlugins,\r\n    runningPlugins,\r\n    isLoading,\r\n    error,\r\n    togglePlugin,\r\n  };\r\n} "
  },
  {
    "path": "frontend/src/hooks/useVideoSizing.ts",
    "content": "import { useState, useLayoutEffect, useCallback } from \"react\";\r\n\r\nconst ZERO_RECT = { x: 0, y: 0, width: 0, height: 0, top: 0, left: 0, right: 0, bottom: 0 };\r\n\r\nexport function useVideoSizing() {\r\n  const [videoRect, setVideoRect] = useState(ZERO_RECT);\r\n  const [containerRect, setContainerRect] = useState(ZERO_RECT);\r\n\r\n  const recalculate = useCallback(() => {\r\n    const img = document.querySelector('img[alt=\"Drone video feed\"]') as HTMLImageElement;\r\n    if (!img || !img.parentElement) return;\r\n    if (img.naturalWidth === 0 || img.naturalHeight === 0) return;\r\n\r\n    const container = img.parentElement;\r\n    if (container.clientWidth === 0 || container.clientHeight === 0) return;\r\n    const containerAR = container.clientWidth / container.clientHeight;\r\n    const videoAR = img.naturalWidth / img.naturalHeight;\r\n\r\n    let newWidth, newHeight, newX, newY;\r\n\r\n    if (containerAR > videoAR) {\r\n      // Container is wider than video (letterboxed)\r\n      newHeight = container.clientHeight;\r\n      newWidth = newHeight * videoAR;\r\n      newX = (container.clientWidth - newWidth) / 2;\r\n      newY = 0;\r\n    } else {\r\n      // Container is taller than video (pillarboxed)\r\n      newWidth = container.clientWidth;\r\n      newHeight = newWidth / videoAR;\r\n      newY = (container.clientHeight - newHeight) / 2;\r\n      newX = 0;\r\n    }\r\n    \r\n    setVideoRect({\r\n      x: newX,\r\n      y: newY,\r\n      width: newWidth,\r\n      height: newHeight,\r\n      top: newY,\r\n      left: newX,\r\n      right: newX + newWidth,\r\n      bottom: newY + newHeight,\r\n    });\r\n    setContainerRect(container.getBoundingClientRect());\r\n  }, []);\r\n\r\n  useLayoutEffect(() => {\r\n    const img = document.querySelector('img[alt=\"Drone video feed\"]') as HTMLImageElement;\r\n    if (!img) return;\r\n\r\n    const ro = new ResizeObserver(recalculate);\r\n    ro.observe(img.parentElement!);\r\n\r\n    img.addEventListener('load', recalculate);\r\n    \r\n    // Initial calculation in case image is already loaded\r\n    if (img.complete && img.naturalWidth > 0) {\r\n      recalculate();\r\n    }\r\n\r\n    return () => {\r\n      ro.disconnect();\r\n      img.removeEventListener('load', recalculate);\r\n    };\r\n  }, [recalculate]);\r\n\r\n  return { videoRect, containerRect };\r\n}\r\n"
  },
  {
    "path": "frontend/src/index.css",
    "content": "@import \"tailwindcss\";\n\n/* tech-y heading font */\n@import url('https://fonts.googleapis.com/css2?family=Rajdhani:wght@500;700&display=swap');\n\n/* global dark theme */\nhtml, body {\n  margin: 0;\n  height: 100%;\n  background-color: #1e1e1e;   /* VS Code / Cursor dark */\n  color: #d4d4d4;\n  font-family: system-ui, sans-serif;\n}\n\n/* make the Heading font available as `font-heading` in Tailwind */\n@layer utilities {\n  .font-heading { font-family: 'Rajdhani', ui-sans-serif, sans-serif; }\n}\n\n:root {\n  font-family: system-ui, Avenir, Helvetica, Arial, sans-serif;\n  line-height: 1.5;\n  font-weight: 400;\n\n  color-scheme: light dark;\n  color: rgba(255, 255, 255, 0.87);\n  background-color: #242424;\n\n  font-synthesis: none;\n  text-rendering: optimizeLegibility;\n  -webkit-font-smoothing: antialiased;\n  -moz-osx-font-smoothing: grayscale;\n}\n\na {\n  font-weight: 500;\n  color: #646cff;\n  text-decoration: inherit;\n}\na:hover {\n  color: #535bf2;\n}\n\nbody {\n  margin: 0;\n  /* remove flex-box centring so children can use the full width */\n  min-width: 320px;\n  min-height: 100vh;\n}\n\n/* make sure the React root really spans the viewport */\n#root {\n  width: 100%;\n  min-height: 100vh;\n}\n\nh1 {\n  font-size: 3.2em;\n  line-height: 1.1;\n}\n\nbutton {\n  border-radius: 8px;\n  border: 1px solid transparent;\n  padding: 0.6em 1.2em;\n  font-size: 1em;\n  font-weight: 500;\n  font-family: inherit;\n  /* background-color: #1a1a1a; */\n  cursor: pointer;\n  transition: border-color 0.25s;\n}\nbutton:hover {\n  border-color: #646cff;\n}\nbutton:focus,\nbutton:focus-visible {\n  outline: 4px auto -webkit-focus-ring-color;\n}\n\n@media (prefers-color-scheme: light) {\n  :root {\n    color: #213547;\n    background-color: #ffffff;\n  }\n  a:hover {\n    color: #747bff;\n  }\n  /* background-color: #f9f9f9; */\n}\n"
  },
  {
    "path": "frontend/src/main.tsx",
    "content": "import { StrictMode } from 'react'\nimport { createRoot } from 'react-dom/client'\nimport './index.css'\nimport App from './App.tsx'\nimport { BrowserRouter, Routes, Route } from \"react-router-dom\";\nimport TestPage from \"./pages/TestPage\";\n\ncreateRoot(document.getElementById('root')!).render(\n  <StrictMode>\n    <BrowserRouter>\n      <Routes>\n        <Route path=\"/\" element={<App />} />\n        <Route path=\"/test\" element={<TestPage />} />\n      </Routes>\n    </BrowserRouter>\n  </StrictMode>,\n)\n"
  },
  {
    "path": "frontend/src/pages/TestPage.tsx",
    "content": "import StaticDrawingOverlay from \"../components/StaticDrawingOverlay\";\r\nimport { VideoFeed } from \"../components/VideoFeed\";\r\n\r\nexport default function TestPage() {\r\n  return (\r\n    <div className=\"min-h-screen w-full flex flex-col items-center justify-center bg-gray-900\">\r\n      <h1 className=\"text-2xl font-bold text-white mb-4\">\r\n        Static Video Feed (640×480)\r\n      </h1>\r\n      <div\r\n        className=\"relative border-2 border-dashed border-gray-600\"\r\n        style={{ width: 640, height: 480 }}\r\n      >\r\n        <VideoFeed src=\"http://localhost:8000/mjpeg\" />\r\n        <StaticDrawingOverlay />\r\n      </div>\r\n    </div>\r\n  );\r\n} "
  },
  {
    "path": "frontend/src/vite-env.d.ts",
    "content": "/// <reference types=\"vite/client\" />\n"
  },
  {
    "path": "frontend/tailwind.config.ts",
    "content": "import type { Config } from 'tailwindcss';\r\n\r\nexport default {\r\n  content: [\r\n    './index.html',\r\n    './src/**/*.{js,ts,jsx,tsx}',\r\n  ],\r\n  theme: {\r\n    extend: {\r\n      fontFamily: {\r\n        heading: ['Rajdhani', 'ui-sans-serif', 'sans-serif'],\r\n      },\r\n    },\r\n  },\r\n  plugins: [],\r\n} satisfies Config;\r\n"
  },
  {
    "path": "frontend/tsconfig.app.json",
    "content": "{\n  \"compilerOptions\": {\n    \"tsBuildInfoFile\": \"./node_modules/.tmp/tsconfig.app.tsbuildinfo\",\n    \"target\": \"ES2020\",\n    \"useDefineForClassFields\": true,\n    \"lib\": [\"ES2020\", \"DOM\", \"DOM.Iterable\"],\n    \"module\": \"ESNext\",\n    \"skipLibCheck\": true,\n\n    /* Bundler mode */\n    \"moduleResolution\": \"bundler\",\n    \"allowImportingTsExtensions\": true,\n    \"verbatimModuleSyntax\": true,\n    \"moduleDetection\": \"force\",\n    \"noEmit\": true,\n    \"jsx\": \"react-jsx\",\n\n    /* Linting */\n    \"strict\": true,\n    \"noUnusedLocals\": true,\n    \"noUnusedParameters\": true,\n    \"erasableSyntaxOnly\": true,\n    \"noFallthroughCasesInSwitch\": true,\n    \"noUncheckedSideEffectImports\": true\n  },\n  \"include\": [\"src\"]\n}\n"
  },
  {
    "path": "frontend/tsconfig.json",
    "content": "{\n  \"files\": [],\n  \"references\": [\n    { \"path\": \"./tsconfig.app.json\" },\n    { \"path\": \"./tsconfig.node.json\" }\n  ]\n}\n"
  },
  {
    "path": "frontend/tsconfig.node.json",
    "content": "{\n  \"compilerOptions\": {\n    \"tsBuildInfoFile\": \"./node_modules/.tmp/tsconfig.node.tsbuildinfo\",\n    \"target\": \"ES2022\",\n    \"lib\": [\"ES2023\"],\n    \"module\": \"ESNext\",\n    \"skipLibCheck\": true,\n\n    /* Bundler mode */\n    \"moduleResolution\": \"bundler\",\n    \"allowImportingTsExtensions\": true,\n    \"verbatimModuleSyntax\": true,\n    \"moduleDetection\": \"force\",\n    \"noEmit\": true,\n\n    /* Linting */\n    \"strict\": true,\n    \"noUnusedLocals\": true,\n    \"noUnusedParameters\": true,\n    \"erasableSyntaxOnly\": true,\n    \"noFallthroughCasesInSwitch\": true,\n    \"noUncheckedSideEffectImports\": true\n  },\n  \"include\": [\"vite.config.ts\"]\n}\n"
  },
  {
    "path": "frontend/vite.config.ts",
    "content": "import { defineConfig } from 'vite'\nimport react from '@vitejs/plugin-react'\n\n// https://vite.dev/config/\nexport default defineConfig({\n  plugins: [react()],\n})\n"
  }
]