Repository: marshallrichards/turbodrone Branch: master Commit: f1247b582345 Files: 102 Total size: 484.0 KB Directory structure: gitextract_1kcujrmi/ ├── .gitignore ├── LICENSE ├── NOTICE ├── README.md ├── backend/ │ ├── control/ │ │ └── strategies.py │ ├── main.py │ ├── models/ │ │ ├── __init__.py │ │ ├── base_rc.py │ │ ├── base_video_model.py │ │ ├── control_profile.py │ │ ├── cooingdv_rc.py │ │ ├── cooingdv_video_model.py │ │ ├── debug_rc.py │ │ ├── s2x_rc.py │ │ ├── s2x_video_model.py │ │ ├── stick_range.py │ │ ├── video_frame.py │ │ ├── wifi_cam_rc.py │ │ ├── wifi_uav_rc.py │ │ └── wifi_uav_video_model.py │ ├── plugins/ │ │ ├── __init__.py │ │ ├── base.py │ │ ├── follow/ │ │ │ ├── __init__.py │ │ │ ├── follow_controller.py │ │ │ ├── follow_plugin.py │ │ │ └── test_yolo.py │ │ ├── manager.py │ │ └── test_yaw_plugin.py │ ├── protocols/ │ │ ├── __init__.py │ │ ├── base_protocol_adapter.py │ │ ├── base_video_protocol.py │ │ ├── cooingdv_jieli_rc_protocol_adapter.py │ │ ├── cooingdv_jieli_video_protocol.py │ │ ├── cooingdv_rc_protocol_adapter.py │ │ ├── cooingdv_video_protocol.py │ │ ├── debug_rc_protocol_adapter.py │ │ ├── debug_video_protocol.py │ │ ├── no_video_protocol.py │ │ ├── s2x_rc_protocol_adapter.py │ │ ├── s2x_video_protocol.py │ │ ├── wifi_cam_rc_protocol_adapter.py │ │ ├── wifi_cam_video_protocol.py │ │ ├── wifi_uav_rc_protocol_adapter.py │ │ └── wifi_uav_video_protocol.py │ ├── receive_video.py │ ├── remote_control.py │ ├── requirements.txt │ ├── services/ │ │ ├── __init__.py │ │ ├── flight_controller.py │ │ └── video_receiver.py │ ├── tests/ │ │ ├── test_cooingdv_jieli_ctp.py │ │ ├── test_s2x_protocol.py │ │ ├── test_s2x_video_protocol.py │ │ └── test_wifi_cam_protocol.py │ ├── utils/ │ │ ├── cooingdv_jieli_ctp.py │ │ ├── dropping_queue.py │ │ ├── logging_config.py │ │ ├── wifi_uav_ack_state.py │ │ ├── wifi_uav_jpeg.py │ │ ├── wifi_uav_packets.py │ │ └── wifi_uav_variants.py │ ├── video_client.py │ ├── views/ │ │ ├── __init__.py │ │ ├── base_video_view.py │ │ ├── cli_rc.py │ │ └── opencv_video_view.py │ └── web_server.py ├── docs/ │ └── research/ │ ├── S2x.md │ ├── cooingdv.md │ ├── wifi_cam.md │ └── wifi_uav.md ├── experimental/ │ ├── README.md │ └── test_e88pro.py └── frontend/ ├── .gitignore ├── eslint.config.js ├── index.html ├── package.json ├── postcss.config.cjs ├── src/ │ ├── App.css │ ├── App.tsx │ ├── components/ │ │ ├── AxisIndicator.tsx │ │ ├── CommandButtons.tsx │ │ ├── ControlSchemeToggle.tsx │ │ ├── ControlsOverlay.tsx │ │ ├── DrawingOverlay.tsx │ │ ├── PluginControls.tsx │ │ ├── SpeedControl.tsx │ │ ├── StaticDrawingOverlay.tsx │ │ └── VideoFeed.tsx │ ├── hooks/ │ │ ├── useControls.ts │ │ ├── useOverlays.ts │ │ ├── usePlugins.ts │ │ └── useVideoSizing.ts │ ├── index.css │ ├── main.tsx │ ├── pages/ │ │ └── TestPage.tsx │ └── vite-env.d.ts ├── tailwind.config.ts ├── tsconfig.app.json ├── tsconfig.json ├── tsconfig.node.json └── vite.config.ts ================================================ FILE CONTENTS ================================================ ================================================ FILE: .gitignore ================================================ # ------------------------ # Python # ------------------------ # Byte‐compiled / optimized / DLL files __pycache__/ *.py[cod] *$py.class # C extensions *.so # pytorch models *.pt # Distribution / packaging .Python build/ develop-eggs/ dist/ downloads/ eggs/ .eggs/ lib/ lib64/ parts/ sdist/ var/ wheels/ *.egg-info/ .installed.cfg *.egg MANIFEST # Installer logs pip-log.txt pip-delete-this-directory.txt # Unit test / coverage reports htmlcov/ .coverage .coverage.* .cache nosetests.xml coverage.xml *.cover *.py,cover .hypothesis/ # Translations *.mo *.pot # Django (if ever) local_settings.py db.sqlite3 # Flask instance/ .webassets-cache # Sphinx docs docs/_build/ # PyBuilder target/ # Jupyter .ipynb_checkpoints # IPython profile_default/ ipython_config.py # pyenv .python-version # pipenv / poetry Pipfile.lock poetry.lock # PEP 582 __pypackages__/ # Virtual environments .env .venv/ env/ venv/ ENV/ env.bak/ venv.bak/ # Spyder .spyderproject .spyproject # Rope .ropeproject # mkdocs /site # mypy .mypy_cache/ # pytest .pytest_cache/ # IDEs / editors .vscode/ .idea/ *.iml # OS files .DS_Store Thumbs.db Desktop.ini ehthumbs.db # ------------------------ # Node / Frontend (React/TS/Tailwind) # ------------------------ # dependencies node_modules/ # logs npm-debug.log* yarn-debug.log* yarn-error.log* pnpm-debug.log* # lockfiles (if you prefer not to commit them) # Uncomment if you don't commit lockfiles # package-lock.json # yarn.lock # pnpm-lock.yaml # build outputs build/ dist/ .next/ out/ public/ # caches .cache/ .parcel-cache/ # tailwind .tailwind-cache/ # TypeScript *.tsbuildinfo # environment .env.local .env.development.local .env.test.local .env.production.local # framework-specific (if you ever add) .serverless/ .vuepress/dist/ # assets backend/plugins/follow/assets/* ================================================ FILE: LICENSE ================================================ Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1. Definitions. "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and (b) You must cause any modified files to carry prominent notices stating that You changed the files; and (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. END OF TERMS AND CONDITIONS APPENDIX: How to apply the Apache License to your work. To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "[]" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives. Copyright 2025 Marshall Richards Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ================================================ FILE: NOTICE ================================================ turbodrone — reverse-engineered control stack for cheap toy drones Copyright © 2025 Marshall Richards This product includes software developed by Marshall Richards (turbodrone, https://github.com/marshallrichards/turbodrone). Portions of this software are based on, or otherwise incorporate, the turbodrone project. Licensed under the Apache License, Version 2.0. You may obtain a copy of the license in the LICENSE file or at . -------------------------------------------------------------------- You may add your own attribution notices below; this block must remain. -------------------------------------------------------------------- ================================================ FILE: README.md ================================================ # Turbodrone Reverse-engineered API and client for controlling some of the best-selling ~$50 "toy" drones on Amazon from a computer replacing the closed-source mobile apps they come with. ![S20 Drone Short Clip](docs/images/s20-drone-short-clip-small.gif) ## Introduction Nowadays, there are incredibly cheap "toy" drones available on Amazon that are basically pared-down clones of the early DJI mavic. Only ~$50 to have a 1080p camera for FPV and recording, tiny downward-facing optical flow sensor for position and altitude hold, and a well-tuned flight profile out-of-the-box. The only problem with drones like this is that they run closed-source firmware and are locked to only being controlled by a custom mobile app. I thought it would be cool to free some of these drones from their "jail" and write an API and client for accessing the video feed and sending control commands down to the drone by reverse-engineering how the mobile apps work. That way you can turn a highly capable $50 "toy" drone into something that can be programmatically controlled and used for all sorts of applications and experiments. ## Hardware * WiFi Camera Drones (ranked in order of recommendation): | Brand | Model Number(s) | Compatibility | Implementation | Motor | Purchase Link | Notes | |------------|-----------------|---------------|----------------|------------|-------------------------------------------------------------|-------| | Generic | M10 | Tested | `cooingdv` | Brushless | [Aliexpress](https://www.aliexpress.us/item/3256809708895605.html) | My current favorite. RTSP video stream is very reliable. | | Plegble | PL-515 | Tested | `s2x` | Brushless | [Amazon](https://www.amazon.com/dp/B0D2B25YQQ) | Same airframe as the S2x models and takes the same batteries, but upgraded to brushless motors. | | Karuisrc | K417 | Tested | `wifi_uav` | Brushless | [Amazon](https://www.amazon.com/Electric-Adjustable-AIdrones-Quadcopter-Beginners/dp/B0CYPSJ34H/) | First model to be supported with brushless motors. Build quality is great. | | Loiley | S29 | Tested | `s2x` | Brushed | Can't find link anymore | Great build quality, has servo for tilting camera (_not implemented in API yet_) | | Hiturbo | S20 | Tested | `s2x` | Brushed | [Amazon](https://www.amazon.com/dp/B0BBVZ849G), [Alternate Amazon Listing](https://www.amazon.com/Beginners-Foldable-Quadcopter-Gestures-Batteries/dp/B0D8LK1KJ3) | Original test platform, great build quality | | FlyVista | V88 | Tested | `wifi_uav` | Brushed | [Amazon](https://www.amazon.com/dp/B0D5CXY6X8) | | | ? | D16/GT3/V66 | Tested | `wifi_uav` | Brushed | cheapest on [Aliexpress](https://www.aliexpress.us/item/3256808590663347.html), [Amazon](https://www.amazon.com/AUHIFVAX-Intelligent-Avoidance-Christmas-Thanksgiving/dp/B0FJRVH76T) | 20% smaller DJI Neo clone. Only really good for indoor flight. | | Several Brands | E58 | Tested | Unknown | Brushed | [Amazon](https://www.amazon.com/Foldable-Quadcopter-Beginners-Batteries-Waypoints/dp/B09KV8L7WN/) | | | Several Brands | E88 | Suspected | `cooingdv` | Brushed | [Amazon](https://www.amazon.com/Beginners-Foldable-Quadcopter-Real-Time-Rechargable/dp/B0FKNH6Q4T) | | | Several Brands | E88 Pro | Suspected | Unknown | Brushed | [Amazon](https://www.amazon.com/Beginners-Foldable-Quadcopter-Real-Time-Rechargable/dp/B0FKNH6Q4T) | | | Several Brands | E99 | Suspected | `cooingdv` | Brushed | [Amazon](https://www.amazon.com/LJN53-Foldable-Drone-Dual-Cameras/dp/B0DRH9C6RF) | | | Several Brands | E99 Pro | Suspected | Unknown | Brushed | [Amazon](https://www.amazon.com/LJN53-Foldable-Drone-Dual-Cameras/dp/B0DRH9C6RF) | | | Swifsen | A35 | Suspected | Unknown | Brushed | [Amazon](https://a.co/d/bqKvloz) | Very small "toy" drone | | Unknown | LSRC-S1S | Tested | `wifi_uav` | Brushed | | mentioned in another reverse-engineering effort for the WiFi UAV app | | Velcase | S101 | TODO | Unknown | Brushed | [Amazon](https://www.amazon.com/Foldable-Beginners-Quadcopter-Carrying-Positioning/dp/B0CH341G5F/) | lower quality build, smaller battery and props than S29 & S20 | | Redrie | X29 | Tested | `s2x` | Brushed | [Amazon](https://www.amazon.com/Adults-1080P-Foldable-Altitude-Auto-Follow-Batteries/dp/B0CZQKNYL5) | Average build quality | _**Tested** means the drone has been physically run with turbodrone to ensure its compatibility._ _**Suspected** means the APK for the drone appears to use the exact same packages and libraries as one of the tested drones._ _**TODO** means the APK operates with different byte packets and protocols and will have to be added as a new implementation in the API._ * WiFi Dongle ([recommend ALFA Network AWUS036ACM](https://www.amazon.com/Network-AWUS036ACM-Long-Range-Wide-Coverage-High-Sensitivity/dp/B08BJS8FXD) or similar) * Drone broadcasts its own WiFi network so your computer will have to connect to it. * Not strictly necessary because you can use your computer's built-in WiFi radio to connect to the drone's network, but nice to have that way you can stay connected to the internet while flying the drone. ## Setup Move to the `backend` directory ``` cd backend ``` Add venv ``` python -m venv venv source venv/bin/activate ``` Install the dependencies ``` pip install -r requirements.txt ``` _If_ you are on Windows, you will need to manually install the `curses` library. ``` pip install windows-curses ``` Open a new terminal window and install the dependencies for the frontend. _Make sure you have Node.js 20+ installed._ ``` cd frontend npm install ``` Make sure WiFi Dongle is plugged in, drone is turned on, connect to the "BRAND-MODEL-XXXXXX" network before proceeding. Create a `.env` file in the `backend` directory. Add a DRONE_TYPE based on which drone you have: ``` # For "com.vison.macrochip" (s2x) based drones like S20 and S29: DRONE_TYPE=s2x # For WiFi UAV-based drones like V88 and D16: # DRONE_TYPE=wifi_uav # For cooingdv publisher drone apps like RC UFO and KY UFO: # DRONE_TYPE=cooingdv # For KY FPV / Jieli CTP control with first-pass RTP/JPEG video: # DRONE_TYPE=cooingdv_jieli # For WiFi_CAM native UDP apps: # DRONE_TYPE=wifi_cam ``` Launch the backend: ``` uvicorn web_server:app ``` In a separate terminal, launch the frontend web client: ``` npm run dev ``` Open the web client which will be at `http://localhost:5173` and you should see the drone video feed and be able to control it. To control via a gaming controller, plug it in and move the sticks around for it to be detected and then push the toggle button to switch between keyboard and controller control. Make sure to fly in a safe area, preferably outdoors with little wind. And note that the "Land" button _currently_ is more of an E-stop button that will stop the drone motors immediately. ## Status Reconnection logic was solved recently. Video feed: solid. Controls: improved greatly via the web client. The implementation for WiFi UAV-based drones could use some fine-tuning. Web Client: support for various inputs like keyboard, gamepad controllers, and ThinkPad TrackPoint mouse(lol). Working on adding support for more drones from [Amazon's best-selling drone list](https://www.amazon.com/best-selling-drones/s?k=best+selling+drones). ## Contribute To contribute support for a new "toy" drone, download the APK the drone uses on a mirror site and start reverse engineering it by decompiling to Java files with [jadx](https://github.com/skylot/jadx). From there, look at the `AndroidManifest.xml` and see if you can find the classes that are entry points for the app. Look for port usage or protocol usage explicitly mentioned like TCP or UDP. Most of these apps will do the actual communication and video feed processing in native C++ libraries that will be embedded inside the APK. You can use a tool like Ghidra to decompile the native libraries and see if you can discover anything useful. For video feed processing you want to figure out what format it uses e.g. JPEG, YUV, etc. and also if it uses compression and what the byte structure looks like when its reforming an image frame from packets. Additionally, Wireshark is your friend for understanding the raw data packets being sent and received by the app. Watch this [video](https://x.com/marshallrichrds/status/1923165437698670818) for an overview into the reverse engineering process used for adding support for the Hiturbo S20 drone. Once you have the protocols and processing for RC and video figured out, make a small test program and add it to the `experimental` directory at that point if you'd like others to be able to try it out. After that, you can work on an implementation that is compatible with the existing back-end architecture; examples of this are the `s2x` and `wifi_uav` reverse-engineered implementations. ## Experimental Support For drones and apps with limited support which are not yet fully integrated into Turbodrone, see the `experimental` directory. ================================================ FILE: backend/control/strategies.py ================================================ from abc import ABC, abstractmethod from typing import Dict class ControlStrategy(ABC): """Maps user intent → raw stick values inside the RC model.""" @abstractmethod def update(self, model, dt: float, axes: Dict[str, float]) -> None: """`axes` always ranges -1 … +1 for throttle, yaw, pitch, roll""" ... # 1. Same behaviour you already have for the CLI keyboard class IncrementalStrategy(ControlStrategy): def update(self, model, dt, axes): # axes entries are -1, 0, +1 (discrete keys) model._update_axes_incremental(dt, axes) # 2. Direct mapping for joysticks / Gamepad API class DirectStrategy(ControlStrategy): """ Absolute mode: normalised stick position is mapped directly to the drone's raw range (optionally with expo curve). """ def update(self, model, dt, axes): expo = getattr(model, "expo_factor", 0.0) for axis, v in axes.items(): # optional expo curve if expo: sign = 1 if v >= 0 else -1 v = sign * (abs(v) ** (1 + expo)) setattr(model, axis, model._scale_normalised(v)) ================================================ FILE: backend/main.py ================================================ #!/usr/bin/env python3 import logging import os from utils.logging_config import bootstrap_runtime, configure_logging bootstrap_runtime() import argparse import threading import queue import signal import sys from models.s2x_rc import S2xDroneModel from protocols.s2x_rc_protocol_adapter import S2xRCProtocolAdapter from protocols.s2x_video_protocol import S2xVideoProtocolAdapter from models.wifi_uav_rc import WifiUavRcModel from protocols.wifi_uav_rc_protocol_adapter import WifiUavRcProtocolAdapter from protocols.wifi_uav_video_protocol import WifiUavVideoProtocolAdapter from models.cooingdv_rc import CooingdvRcModel from protocols.cooingdv_rc_protocol_adapter import CooingdvRcProtocolAdapter from protocols.cooingdv_jieli_rc_protocol_adapter import CooingdvJieliRcProtocolAdapter from protocols.cooingdv_jieli_video_protocol import CooingdvJieliVideoProtocolAdapter from protocols.cooingdv_video_protocol import CooingdvVideoProtocolAdapter from models.wifi_cam_rc import WifiCamRcModel from protocols.wifi_cam_rc_protocol_adapter import WifiCamRcProtocolAdapter from protocols.wifi_cam_video_protocol import WifiCamVideoProtocolAdapter from services.flight_controller import FlightController from services.video_receiver import VideoReceiverService from utils.wifi_uav_variants import WIFI_UAV_DRONE_TYPES, resolve_wifi_uav_variant from views.cli_rc import CLIView from views.opencv_video_view import OpenCVVideoView COOINGDV_DRONE_TYPES = {"cooingdv", "cooingdv_jieli"} configure_logging() logger = logging.getLogger(__name__) def main(): parser = argparse.ArgumentParser(description="Drone teleoperation interface") parser.add_argument("--drone-type", type=str, default="s2x", choices=["s2x", "wifi_uav", "wifi_uav_fld", "wifi_uav_uav", "cooingdv", "cooingdv_jieli", "wifi_cam"], help="Type of drone to control (s2x, wifi_uav, wifi_uav_fld, wifi_uav_uav, cooingdv, cooingdv_jieli, or wifi_cam, default: s2x)") parser.add_argument("--drone-ip", type=str, help="Drone UDP IP address (default: s2x=172.16.10.1, wifi_uav=192.168.169.1, cooingdv=192.168.1.1, wifi_cam=192.168.4.153)") parser.add_argument("--control-port", type=int, help="Drone control port (default: s2x=8080, wifi_uav=8800)") parser.add_argument("--video-port", type=int, help="Drone video port (default: s2x=8888, wifi_uav=8800)") parser.add_argument("--rate", type=float, default=None, help="Control packets per second (default depends on drone type)") parser.add_argument("--with-video", action="store_true", help="Enable video feed") parser.add_argument("--dump-frames", action="store_true", help="Dump video frames to files") parser.add_argument("--dump-packets", action="store_true", help="Dump raw video packets to files") args = parser.parse_args() # Create model, protocol adapter, and controller if args.drone_type == "s2x": logger.info("[main] Using S2X drone implementation.") default_ip = "172.16.10.1" default_control_port = 8080 default_video_port = 8888 default_rate = 80.0 drone_ip = args.drone_ip if args.drone_ip else default_ip control_port = args.control_port if args.control_port else default_control_port video_port = args.video_port if args.video_port else default_video_port drone_model = S2xDroneModel() protocol_adapter = S2xRCProtocolAdapter(drone_ip, control_port) video_protocol_adapter_class = S2xVideoProtocolAdapter elif args.drone_type in WIFI_UAV_DRONE_TYPES: wifi_uav_variant = resolve_wifi_uav_variant(args.drone_type) logger.info("[main] Using WiFi UAV drone implementation (variant=%s).", wifi_uav_variant) default_ip = "192.168.169.1" default_control_port = 8800 default_video_port = 8800 # For WifiUAV, control and video often use the same port default_rate = 80.0 drone_ip = args.drone_ip if args.drone_ip else default_ip control_port = args.control_port if args.control_port else default_control_port video_port = args.video_port if args.video_port else default_video_port drone_model = WifiUavRcModel() protocol_adapter = WifiUavRcProtocolAdapter(drone_ip, control_port, variant=wifi_uav_variant) video_protocol_adapter_class = WifiUavVideoProtocolAdapter elif args.drone_type in COOINGDV_DRONE_TYPES: logger.info("[main] Using Cooingdv drone implementation (%s).", args.drone_type) if args.drone_type == "cooingdv_jieli": default_ip = "192.168.8.15" default_control_port = 2228 default_video_port = 0 else: default_ip = "192.168.1.1" default_control_port = 7099 default_video_port = 7070 # RTSP port for video streaming default_rate = 20.0 drone_ip = args.drone_ip if args.drone_ip else default_ip control_port = args.control_port if args.control_port else default_control_port video_port = args.video_port if args.video_port else default_video_port drone_model = CooingdvRcModel() if args.drone_type == "cooingdv_jieli": protocol_adapter = CooingdvJieliRcProtocolAdapter(drone_ip, control_port) video_protocol_adapter_class = CooingdvJieliVideoProtocolAdapter else: protocol_adapter = CooingdvRcProtocolAdapter(drone_ip, control_port) video_protocol_adapter_class = CooingdvVideoProtocolAdapter elif args.drone_type == "wifi_cam": logger.info("[main] Using WiFi_CAM native UDP implementation.") default_ip = "192.168.4.153" default_control_port = 8090 default_video_port = 8080 default_rate = 25.0 drone_ip = args.drone_ip if args.drone_ip else default_ip control_port = args.control_port if args.control_port else default_control_port video_port = args.video_port if args.video_port else default_video_port drone_model = WifiCamRcModel() protocol_adapter = WifiCamRcProtocolAdapter( drone_ip, control_port, command_mode=os.getenv("WIFI_CAM_COMMAND_MODE", "auto"), ) video_protocol_adapter_class = WifiCamVideoProtocolAdapter else: # Should not happen due to choices in argparse logger.error("[main] Unknown drone type: %s", args.drone_type) sys.exit(1) control_rate = args.rate if args.rate is not None else default_rate controller = FlightController(drone_model, protocol_adapter, control_rate) # Start video if requested video_view = None video_receiver = None video_thread = None if args.with_video: # Define the blueprint for the video protocol adapter. # The VideoReceiverService will create and manage the instance. if args.drone_type == "s2x": video_protocol_args = { "drone_ip": drone_ip, "control_port": control_port, "video_port": video_port } elif args.drone_type in WIFI_UAV_DRONE_TYPES: video_protocol_args = { "drone_ip": drone_ip, "control_port": control_port, "video_port": video_port, "variant": wifi_uav_variant, } elif args.drone_type in COOINGDV_DRONE_TYPES: video_protocol_args = { "drone_ip": drone_ip, "control_port": control_port, "video_port": video_port } if args.drone_type == "cooingdv_jieli": video_protocol_args["video_port"] = video_port or 6666 elif args.drone_type == "wifi_cam": video_protocol_args = { "drone_ip": drone_ip, "control_port": control_port, "video_port": video_port, } frame_queue = queue.Queue(maxsize=100) video_receiver = VideoReceiverService( video_protocol_adapter_class, # The class to instantiate video_protocol_args, # The arguments for it frame_queue, dump_frames=args.dump_frames, dump_packets=args.dump_packets, rc_adapter=protocol_adapter if args.drone_type in WIFI_UAV_DRONE_TYPES or args.drone_type == "wifi_cam" else None, ) video_view = OpenCVVideoView(frame_queue) # Start video receiver service. It now handles the protocol's lifecycle. video_receiver.start() # Run HighGUI in its own, non-daemon thread video_thread = threading.Thread( target=video_view.run, name="OpenCVVideoThread" ) video_thread.start() # Start controller controller.start() # Set up signal handler for clean shutdown def signal_handler(sig, frame): logger.info("[main] Caught signal, shutting down...") # First stop video components if video_receiver: video_receiver.stop() if video_view: video_view.stop() if video_thread: video_thread.join(timeout=1.0) # Then stop controller controller.stop() # Exit more forcefully, but only if threads haven't cleaned up if video_thread and video_thread.is_alive(): logger.warning("[main] Forcing exit due to lingering threads") os._exit(0) else: # Normal exit sys.exit(0) signal.signal(signal.SIGINT, signal_handler) signal.signal(signal.SIGTERM, signal_handler) # Start CLI view try: view = CLIView(controller) view.run() except KeyboardInterrupt: pass finally: # Clean up in reverse order of creation controller.stop() # Clean up video components if video_view: video_view.stop() if video_receiver: video_receiver.stop() if video_thread: video_thread.join() # wait until the window thread exits if __name__ == "__main__": main() ================================================ FILE: backend/models/__init__.py ================================================ from .s2x_rc import S2xDroneModel __all__ = ['S2xDroneModel'] ================================================ FILE: backend/models/base_rc.py ================================================ from __future__ import annotations from abc import ABC, abstractmethod from typing import ClassVar, Dict, List, Union, Optional from models.stick_range import StickRange from models.control_profile import ControlProfile class BaseRCModel(ABC): """Common logic for every RC model / protocol implementation.""" # Sub-classes **must** override or pass a StickRange explicitly. STICK_RANGE: ClassVar[Optional[StickRange]] = None # Generic fall-back presets – drones override with their own set PRESETS: ClassVar[Dict[str, ControlProfile]] = { "normal": ControlProfile("normal", 1.5, 2.5, 0.5, 0.02), "precise": ControlProfile("precise", 1.0, 3.0, 0.3, 0.01), "aggressive": ControlProfile("aggressive", 3.0, 2.0, 1.5, 0.10), } SENSITIVITY_SEQ: ClassVar[List[str]] = ["normal", "precise", "aggressive"] # ----------------------------------------------------------------- def __init__( self, stick_range: Optional[StickRange] = None, profile: Union[str, ControlProfile] = "normal", ) -> None: # ----- enforce STICK_RANGE ----------------------------------- if stick_range is None: stick_range = self.__class__.STICK_RANGE if stick_range is None: raise TypeError( f"{self.__class__.__name__} must define STICK_RANGE " "or pass stick_range to BaseRCModel.__init__()" ) # ------------------------------------------------------------- if isinstance(profile, str): if profile not in self.PRESETS: raise ValueError(f"Unknown profile '{profile}'") profile = self.PRESETS[profile] self.range = stick_range self.min_control_value = float(stick_range.min_val) self.center_value = float(stick_range.mid_val) self.max_control_value = float(stick_range.max_val) self._apply_profile(profile) # axes start centred self.throttle = self.yaw = self.pitch = self.roll = self.center_value # ----- API that concrete models MUST still implement -------------- @abstractmethod def update(self, dt, axes): ... @abstractmethod def takeoff(self): ... @abstractmethod def land(self): ... @abstractmethod def get_control_state(self): ... # ----- shared helpers --------------------------------------------- def set_profile(self, name: str) -> None: if name not in self.PRESETS: raise ValueError(f"Unknown profile '{name}'") self._apply_profile(self.PRESETS[name]) def set_sensitivity(self, preset: int) -> None: idx = preset % len(self.SENSITIVITY_SEQ) self.set_profile(self.SENSITIVITY_SEQ[idx]) def set_strategy(self, strategy) -> None: self.strategy = strategy # ----------------------------------------------------------------- def _apply_profile(self, profile: ControlProfile) -> None: half_range = self.max_control_value - self.center_value full_range = self.max_control_value - self.min_control_value self.profile = profile self.accel_rate = profile.accel_ratio * half_range self.decel_rate = profile.decel_ratio * half_range self.expo_factor = profile.expo_factor self.immediate_response = profile.immediate_ratio * full_range # existing helper (unchanged) def _scale_normalised(self, value: float) -> float: """ Map a normalised [-1 … +1] input to raw protocol units using the model's StickRange. """ if value >= 0: return self.center_value + value * (self.max_control_value - self.center_value) return self.center_value + value * (self.center_value - self.min_control_value) def _update_axes_incremental(self, dt, axes): self.update_axes( dt, axes.get("throttle", 0), axes.get("yaw", 0), axes.get("pitch", 0), axes.get("roll", 0), ) def update_axes(self, dt, throttle_dir, yaw_dir, pitch_dir, roll_dir): """ Apply the shared incremental stick logic used by keyboard-style controls. Pitch and roll get a small immediate boost when the pilot reverses direction so the craft feels less sluggish during lateral movement. """ for attr, direction, boost_enabled in ( ("throttle", throttle_dir, False), ("yaw", yaw_dir, False), ("pitch", pitch_dir, True), ("roll", roll_dir, True), ): cur = getattr(self, attr) last_dir_attr = f"last_{attr}_dir" last_dir = getattr(self, last_dir_attr, 0) if direction > 0: if boost_enabled and last_dir <= 0: cur += min( self.max_control_value - cur, self.immediate_response, ) dist = self.max_control_value - cur accel = self.accel_rate * dt * ( 1 + self.expo_factor * dist / (self.max_control_value - self.center_value) ) new = min(self.max_control_value, cur + accel) elif direction < 0: if boost_enabled and last_dir >= 0: cur -= min( cur - self.min_control_value, self.immediate_response, ) dist = cur - self.min_control_value accel = self.accel_rate * dt * ( 1 + self.expo_factor * dist / (self.center_value - self.min_control_value) ) new = max(self.min_control_value, cur - accel) else: if cur > self.center_value: dist = cur - self.center_value decel = self.decel_rate * dt * ( 1 + 0.5 * dist / (self.max_control_value - self.center_value) ) new = max(self.center_value, cur - decel) elif cur < self.center_value: dist = self.center_value - cur decel = self.decel_rate * dt * ( 1 + 0.5 * dist / (self.center_value - self.min_control_value) ) new = min(self.center_value, cur + decel) else: new = cur setattr(self, attr, new) setattr(self, last_dir_attr, direction) def _update_axes_direct(self, axes): expo = getattr(self, "expo_factor", 0.0) for attr, value in axes.items(): if expo: # optional expo curve sign = 1 if value >= 0 else -1 value = sign * (abs(value) ** (1 + expo)) setattr(self, attr, self._scale_normalised(value)) ================================================ FILE: backend/models/base_video_model.py ================================================ from abc import ABC, abstractmethod from typing import Optional from models.video_frame import VideoFrame class BaseVideoModel(ABC): """ Stateless interface that turns *chunks* (whatever the protocol thinks a chunk is: a JPEG slice, a whole JPEG, a H.264 NALU …) into complete VideoFrame objects. """ @abstractmethod def ingest_chunk( self, *, stream_id: int | None = None, chunk_id: int | None = None, payload: bytes, ) -> Optional[VideoFrame]: """ Feed one chunk into the model. Parameters ---------- stream_id : int | None Identifier of the logical stream / frame (e.g. frame number). chunk_id : int | None Sequential id of this chunk inside the stream (e.g. slice index). payload : bytes Raw codec payload (JPEG slice, NALU, etc.). Returns ------- Optional[VideoFrame] • VideoFrame when a frame is complete • None if more data is required """ raise NotImplementedError ================================================ FILE: backend/models/control_profile.py ================================================ from dataclasses import dataclass @dataclass(frozen=True) class ControlProfile: """ Profile parameters expressed as *ratios* of the stick range. accel_ratio – fraction of half-range per second decel_ratio – fraction of half-range per second expo_factor – dimension-less (unchanged) immediate_ratio – fraction of full range for one-shot boost """ name: str accel_ratio: float decel_ratio: float expo_factor: float = 0.0 immediate_ratio: float = 0.0 ================================================ FILE: backend/models/cooingdv_rc.py ================================================ """ RC Model for Cooingdv drones (RC UFO, KY UFO, E88 Pro, etc.) These drones use the cooingdv publisher's mobile apps and communicate via UDP on port 7099 with RTSP video on port 7070. Key features: - Soft landing (distinct from emergency stop) - Headless mode - Flip/somersault capability - Gyro calibration """ from __future__ import annotations from models.base_rc import BaseRCModel from models.control_profile import ControlProfile from models.stick_range import StickRange from control.strategies import IncrementalStrategy class CooingdvRcModel(BaseRCModel): """ RC model for drones using cooingdv publisher apps (RC UFO, KY UFO, E88 Pro). Protocol details from decompiled apps: - Stick center: 128 (0x80) - Safe operating range: 50-200 (apps use these bounds) - The stock apps send control updates every 50 ms (about 20 Hz) - The CooingDV family has at least two packet variants: short "TC" and extended "GL". This model exposes the shared high-level commands while the protocol adapter maps them onto the correct flag layout. """ # Stick range from decompiled FlyController.java # Center at 128, safe bounds 50-200 (apps clamp to these) STICK_RANGE = StickRange(50, 128, 200) PRESETS = { # name accel decel expo immediate-boost "normal": ControlProfile("normal", 2.0, 4.0, 0.5, 0.02), "precise": ControlProfile("precise", 1.2, 5.0, 0.3, 0.01), "aggressive": ControlProfile("aggressive", 4.0, 3.0, 1.2, 0.10), } def __init__(self, profile: str | ControlProfile = "normal") -> None: super().__init__(stick_range=self.STICK_RANGE, profile=profile) self.strategy = IncrementalStrategy() # One-shot command flags self.takeoff_flag = False self.land_flag = False # Soft landing (0x02) self.stop_flag = False # Emergency stop (0x04) self.flip_flag = False # Flip/somersault (0x08) self.headless_flag = False # Headless mode (0x10) - toggle state self.calibration_flag = False # Gyro calibration (0x80) # Track last motion direction for each axis self.last_throttle_dir = 0 self.last_yaw_dir = 0 self.last_pitch_dir = 0 self.last_roll_dir = 0 # ------------------------------------------------------------------ # # BaseRCModel API # ------------------------------------------------------------------ # def update(self, dt, axes): self.strategy.update(self, dt, axes) def takeoff(self): """Initiate takeoff sequence.""" self.takeoff_flag = True def land(self): """ Initiate soft landing - gradual descent. This is distinct from emergency_stop() which cuts motors immediately. The drone will descend gracefully and land. """ self.land_flag = True def emergency_stop(self): """ Emergency motor cutoff - immediate stop. WARNING: This will cause the drone to fall from the sky! Use land() for normal landing operations. """ self.stop_flag = True def flip(self): """Execute a 360-degree flip/somersault.""" self.flip_flag = True def toggle_headless(self): """Toggle headless mode on/off.""" self.headless_flag = not self.headless_flag def calibrate_gyro(self): """Initiate gyroscope calibration. Drone should be on flat surface.""" self.calibration_flag = True def get_control_state(self): return { "throttle": self.throttle, "yaw": self.yaw, "pitch": self.pitch, "roll": self.roll, "headless": self.headless_flag, } def set_strategy(self, strategy) -> None: self.strategy = strategy ================================================ FILE: backend/models/cooingdv_video_model.py ================================================ """ Video Model for Cooingdv drones. This model is intentionally simple because cooingdv drones use RTSP streaming, which delivers complete frames directly. Unlike the S2x and WiFi UAV protocols that require assembling frames from UDP chunks, RTSP gives us ready-to-use frames via OpenCV. The model exists primarily for API consistency with other implementations. Most of the heavy lifting is done in the video protocol adapter. """ from typing import Optional from models.base_video_model import BaseVideoModel from models.video_frame import VideoFrame class CooingdvVideoModel(BaseVideoModel): """ Video model for cooingdv drones using RTSP streaming. Since RTSP provides complete frames, this model simply wraps the incoming data into VideoFrame objects. No assembly or decoding is required - OpenCV handles all of that. """ def __init__(self): self._frame_counter = 0 def ingest_chunk( self, *, stream_id: int | None = None, chunk_id: int | None = None, payload: bytes, ) -> Optional[VideoFrame]: """ Process incoming video data. For RTSP streams, each 'chunk' is actually a complete frame, so we simply wrap it in a VideoFrame and return it immediately. Parameters ---------- stream_id : int | None Frame identifier (from RTSP stream) chunk_id : int | None Not used for RTSP (frames are not chunked) payload : bytes Complete JPEG frame data Returns ------- VideoFrame Always returns a VideoFrame since RTSP delivers complete frames """ self._frame_counter += 1 # Use provided stream_id or auto-increment frame_id = stream_id if stream_id is not None else self._frame_counter return VideoFrame( frame_id=frame_id, data=payload, ) def reset(self) -> None: """Reset the model state.""" self._frame_counter = 0 ================================================ FILE: backend/models/debug_rc.py ================================================ from __future__ import annotations import logging from models.base_rc import BaseRCModel from models.stick_range import StickRange log = logging.getLogger(__name__) class DebugRcModel(BaseRCModel): """Dummy RC model for debugging purposes.""" def __init__(self): # Using a default stick range, can be customized if needed super().__init__(stick_range=StickRange(min_val=0, mid_val=128, max_val=255)) log.info("Debug RC model initialized.") self.throttle = self.yaw = self.pitch = self.roll = 128 def update(self, dt, axes): # This model doesn't need to update state over time, but it must # be implemented to satisfy the abstract base class. pass def get_control_state(self): # Return a dictionary of the current control values. # This is what gets sent to the protocol adapter. return { "throttle": self.throttle, "yaw": self.yaw, "pitch": self.pitch, "roll": self.roll, } def set_throttle(self, value: int): log.debug(f"Debug: set_throttle({value})") self.throttle = value def set_yaw(self, value: int): log.debug(f"Debug: set_yaw({value})") self.yaw = value def set_pitch(self, value: int): log.debug(f"Debug: set_pitch({value})") self.pitch = value def set_roll(self, value: int): log.debug(f"Debug: set_roll({value})") self.roll = value def takeoff(self): log.info("Debug: takeoff()") def land(self): log.info("Debug: land()") ================================================ FILE: backend/models/s2x_rc.py ================================================ # S20 & S29 Controller model from models.base_rc import BaseRCModel from models.control_profile import ControlProfile from control.strategies import IncrementalStrategy from models.stick_range import StickRange class S2xDroneModel(BaseRCModel): """Model for S2x protocol drones (S20, S29)""" STICK_RANGE = StickRange(60, 128, 200) # ← tailorable per drone PRESETS = { "normal": ControlProfile("normal", 2.08, 4.86, 0.5, 0.02), "precise": ControlProfile("precise", 1.39, 5.56, 0.3, 0.01), "aggressive": ControlProfile("aggressive", 4.17, 3.89, 1.5, 0.11), } def __init__(self, profile: str | ControlProfile = "normal"): # BaseRCModel handles STICK_RANGE, presets and profile application super().__init__(stick_range=self.STICK_RANGE, profile=profile) self.strategy = IncrementalStrategy() # default # one-shot flags self.takeoff_flag = False self.land_flag = False self.stop_flag = False self.headless_flag = False self.calibration_flag = False # misc self.speed = 20 # matches 0x14 from dumps self.speed_index = 2 # Track last direction for each axis self.last_throttle_dir = 0 self.last_yaw_dir = 0 self.last_pitch_dir = 0 self.last_roll_dir = 0 def update(self, dt, axes): self.strategy.update(self, dt, axes) def takeoff(self): """Set takeoff flag""" self.takeoff_flag = True def land(self): """ Request the stock app's land action. In the HiTurbo app this shares the same one-shot flight bit as takeoff, so the protocol adapter maps the semantic intent rather than a distinct dedicated land bit. """ self.land_flag = True def emergency_stop(self): """Set the emergency stop flag.""" self.stop_flag = True def set_speed_index(self, speed_index: int) -> None: """Set the Macrochip app speed tier: 0=low, 1=medium, 2=full.""" self.speed_index = max(0, min(2, int(speed_index))) def get_control_state(self): """Get current control state as a dict""" return { "throttle": self.throttle, "yaw": self.yaw, "pitch": self.pitch, "roll": self.roll, "speed_index": self.speed_index, } def set_strategy(self, strategy) -> None: self.strategy = strategy ================================================ FILE: backend/models/s2x_video_model.py ================================================ from typing import Dict, Optional from models.video_frame import VideoFrame from models.base_video_model import BaseVideoModel class S2xVideoModel(BaseVideoModel): """ Reassembles sliced JPEG frames used by S2x drones. • Ignores the unreliable "is-last-slice" flag. • Finishes a frame when the frame-id rolls over. """ SOI_MARKER = b"\xFF\xD8" EOI_MARKER = b"\xFF\xD9" def __init__(self) -> None: self._cur_fid: Optional[int] = None self._frags: Dict[int, bytes] = {} # ────────────────────────────────────────────────────────── # BaseVideoModel interface # ────────────────────────────────────────────────────────── def ingest_chunk( self, *, stream_id: int | None = None, chunk_id: int | None = None, total_chunks: int | None = None, payload: bytes, ) -> Optional[VideoFrame]: if stream_id is None or chunk_id is None: return None # S2x packets always carry both ids # frame-id changed? -> finish previous frame. Native VNDK emits as soon # as all declared chunks arrive, but this fallback preserves older # rollover-based behavior for captures without a usable chunk count. completed: Optional[VideoFrame] = None if self._cur_fid is None: self._cur_fid = stream_id elif stream_id != self._cur_fid: completed = self._assemble_current() # may be None self._reset(stream_id) # stash slice (ignore duplicates) self._frags.setdefault(chunk_id, payload) if completed is not None: return completed if total_chunks is not None and 0 < total_chunks <= 100: expected = set(range(total_chunks)) if expected.issubset(self._frags): return self._assemble_current(expected) return completed # ────────────────────────────────────────────────────────── # helpers # ────────────────────────────────────────────────────────── def _reset(self, new_fid: Optional[int]) -> None: self._cur_fid = new_fid self._frags.clear() def _assemble_current(self, keys_to_use: set[int] | None = None) -> Optional[VideoFrame]: if not self._frags: return None if keys_to_use is None: keys = sorted(self._frags) complete = len(keys) == keys[-1] - keys[0] + 1 if not complete: missing = (keys[-1] - keys[0] + 1) - len(keys) # print(f"[s2x-model] Dropping frame {self._cur_fid}: {missing} slices missing") return None else: keys = sorted(keys_to_use) data = b"".join(self._frags[k] for k in keys) start = data.find(self.SOI_MARKER) end = data.rfind(self.EOI_MARKER) if start < 0 or end < 0 or end <= start: #print(f"[s2x-model] JPEG markers not found on frame {self._cur_fid}") return None jpeg = data[start : end + len(self.EOI_MARKER)] #print(f"[s2x-model] Frame {self._cur_fid} OK " # f"({len(jpeg)} bytes, {len(keys)} slices)") frame = VideoFrame(self._cur_fid, jpeg, "jpeg") self._reset(None) # prepare for next frame return frame ================================================ FILE: backend/models/stick_range.py ================================================ from dataclasses import dataclass @dataclass(frozen=True) class StickRange: """Drone-specific raw protocol limits.""" min_val: float # e.g. 60 mid_val: float # e.g. 128 max_val: float # e.g. 200 ================================================ FILE: backend/models/video_frame.py ================================================ import time class VideoFrame: """Model representing a video frame from the drone""" def __init__(self, frame_id, data, format_type="jpeg", timestamp=None): self.frame_id = frame_id self.data = data self.format = format_type # jpeg, h264, h265, yuv, etc. self.timestamp = timestamp or time.time() self.width = None self.height = None self.size = len(data) if data else 0 def __repr__(self): return f"VideoFrame(id={self.frame_id}, format={self.format}, size={self.size})" ================================================ FILE: backend/models/wifi_cam_rc.py ================================================ """ RC model for WiFi_CAM native UDP drones. The stock app uses the same centered 0x80 stick semantics as the CooingDV family, but sends raw command bytes to a separate UDP command port. """ from __future__ import annotations from control.strategies import IncrementalStrategy from models.base_rc import BaseRCModel from models.control_profile import ControlProfile from models.stick_range import StickRange class WifiCamRcModel(BaseRCModel): """High-level control state for WiFi_CAM drones.""" STICK_RANGE = StickRange(50, 128, 200) PRESETS = { "normal": ControlProfile("normal", 2.0, 4.0, 0.5, 0.02), "precise": ControlProfile("precise", 1.2, 5.0, 0.3, 0.01), "aggressive": ControlProfile("aggressive", 4.0, 3.0, 1.2, 0.10), } def __init__(self, profile: str | ControlProfile = "normal") -> None: super().__init__(stick_range=self.STICK_RANGE, profile=profile) self.strategy = IncrementalStrategy() self.takeoff_flag = False self.land_flag = False self.stop_flag = False self.flip_flag = False self.headless_flag = False self.altitude_hold_flag = False self.calibration_flag = False self.last_throttle_dir = 0 self.last_yaw_dir = 0 self.last_pitch_dir = 0 self.last_roll_dir = 0 def update(self, dt, axes): self.strategy.update(self, dt, axes) def takeoff(self): self.takeoff_flag = True def land(self): self.land_flag = True def emergency_stop(self): self.stop_flag = True def flip(self): self.flip_flag = True def toggle_headless(self): self.headless_flag = not self.headless_flag def toggle_altitude_hold(self): self.altitude_hold_flag = not self.altitude_hold_flag def calibrate_gyro(self): self.calibration_flag = True def get_control_state(self): return { "throttle": self.throttle, "yaw": self.yaw, "pitch": self.pitch, "roll": self.roll, "headless": self.headless_flag, "altitude_hold": self.altitude_hold_flag, } def set_strategy(self, strategy) -> None: self.strategy = strategy ================================================ FILE: backend/models/wifi_uav_rc.py ================================================ from __future__ import annotations from models.base_rc import BaseRCModel from models.control_profile import ControlProfile from models.stick_range import StickRange from control.strategies import IncrementalStrategy class WifiUavRcModel(BaseRCModel): """ RC model for toy drones that use the "WiFi UAV" mobile app (E58, LH-X20, …). RC rate needs to be 50 - 80 Hz to work well. Observations from packet captures: • All 4 stick axes sit at 0x7F (127) when centred. • Min / max values hover around 0x3F (63) and 0xBF (191). That is the default range we expose to the user code, but it can be tuned per drone via STICK_RANGE. """ # min, mid, max # STICK_RANGE = StickRange(0, 128, 255) STICK_RANGE = StickRange(40, 128, 220) PRESETS = { # name accel decel expo immediate-boost "normal": ControlProfile("normal", 2.0, 4.0, 0.5, 0.02), "precise": ControlProfile("precise", 1.2, 5.0, 0.3, 0.01), "aggressive": ControlProfile("aggressive", 4.0, 3.0, 1.2, 0.10), } def __init__(self, profile: str | ControlProfile = "normal") -> None: super().__init__(stick_range=self.STICK_RANGE, profile=profile) self.strategy = IncrementalStrategy() # one-shot flags self.takeoff_flag = False self.land_flag = False self.stop_flag = False self.calibration_flag = False self.headless_flag = False self.flip_flag = False self.speed_index = 2 self.camera_tilt_state = 0 # track last motion direction for each axis self.last_throttle_dir = 0 self.last_yaw_dir = 0 self.last_pitch_dir = 0 self.last_roll_dir = 0 # ------------------------------------------------------------------ # # BaseRCModel API # ------------------------------------------------------------------ # def update(self, dt, axes): # type: ignore[override] self.strategy.update(self, dt, axes) def takeoff(self): self.takeoff_flag = True def land(self): """Request the app's normal land / descend action.""" self.land_flag = True def emergency_stop(self): """Immediate motor stop, distinct from the normal land action.""" self.stop_flag = True def flip(self): """Request a flip / roll action when supported by the active variant.""" self.flip_flag = True def set_speed_index(self, speed_index: int) -> None: """Set WiFi-UAV app speed tier: 0=30%, 1=60%, 2=100%, 3=25%.""" self.speed_index = max(0, min(3, int(speed_index))) def set_camera_tilt_state(self, tilt_state: int) -> None: """Set camera tilt command state: 0=neutral, 1/2=opposite tilt directions.""" self.camera_tilt_state = max(0, min(2, int(tilt_state))) # unsupported – always returns 0 def toggle_record(self): # type: ignore[override] return 0 def get_control_state(self): return { "throttle": self.throttle, "yaw": self.yaw, "pitch": self.pitch, "roll": self.roll, "headless": self.headless_flag, "speed_index": self.speed_index, "camera_tilt": self.camera_tilt_state, } def set_strategy(self, strategy) -> None: self.strategy = strategy ================================================ FILE: backend/models/wifi_uav_video_model.py ================================================ from collections import defaultdict from typing import Dict, Optional from models.video_frame import VideoFrame from utils.wifi_uav_jpeg import generate_jpeg_headers, EOI class WifiUavVideoModel: """ Re-assembles one JPEG frame from the WiFi-UAV stream. The drone sends many UDP packets per frame. Every packet contains a 56-byte proprietary header and a JPEG fragment: • payload[1] == 0x01 … «JPEG» packet marker • payload[2] == 0x38 … *not* last fragment != 0x38 … last fragment • payload[16:18] … little-endian frame counter • payload[32:34] … little-endian fragment index • payload[56:] … actual JPEG bytes """ HEADER_LEN = 56 def __init__(self, width: int = 640, height: int = 360, num_components: int = 3): self._jpeg_header = generate_jpeg_headers(width, height, num_components) self._current_frame_id: Optional[int] = None self._expected_fragments: Optional[int] = None self._fragments: Dict[int, bytes] = defaultdict(bytes) # --------------------------------------------------------------------- # # public API # --------------------------------------------------------------------- # def ingest_chunk(self, payload: bytes) -> Optional[VideoFrame]: """ Returns a complete `VideoFrame` when all fragments of one JPEG have been received; otherwise returns None. """ # 1. Validate basic markers if len(payload) <= self.HEADER_LEN or payload[1] != 0x01: return None # not a JPEG packet – ignore last_fragment = payload[2] != 0x38 frame_id = int.from_bytes(payload[16:18], "little") fragment_id = int.from_bytes(payload[32:34], "little") jpeg_slice = payload[self.HEADER_LEN :] # 2. Start a new frame if necessary if self._current_frame_id != frame_id: self._reset_state(frame_id) # 3. Store slice self._fragments[fragment_id] = jpeg_slice if last_fragment: self._expected_fragments = fragment_id + 1 # 4. Assemble when complete if ( self._expected_fragments is not None and len(self._fragments) == self._expected_fragments ): ordered = (self._fragments[i] for i in range(self._expected_fragments)) full_jpeg = ( self._jpeg_header + b"".join(ordered) + bytes(EOI) ) # ensure immutable bytes # Prepare next frame self._reset_state(None) return VideoFrame(frame_id, full_jpeg) return None # ------------------------------------------------------------------ # # private helpers # ------------------------------------------------------------------ # def _reset_state(self, new_frame_id: Optional[int]): self._current_frame_id = new_frame_id self._expected_fragments = None self._fragments.clear() ================================================ FILE: backend/plugins/__init__.py ================================================ ================================================ FILE: backend/plugins/base.py ================================================ from abc import ABC, abstractmethod from typing import Iterator from services.flight_controller import FlightController from models.video_frame import VideoFrame class Plugin(ABC): """ Base class all runtime plug-ins must inherit from. `frame_source` is ANY iterator that yields either: • backend.models.video_frame.VideoFrame (format == "jpeg"), • or an np.ndarray BGR/RGB image. """ def __init__(self, name: str, flight_controller: FlightController, frame_source: Iterator, overlay_queue = None, **kwargs): self.name = name self.fc = flight_controller self.frames = frame_source self.overlays = overlay_queue self.running = False # Lifecycle guards: make stop() idempotent and ensure cleanup runs # even if subclasses flip `running` directly. self._started = False self._stopped = False self.loop_thread = None def start(self): if self.running: return self.running = True self._started = True self._stopped = False self._on_start() def stop(self): # Idempotent stop: allow cleanup to run once even if `running` was # already set False by a subclass or background thread. if self._stopped: return self.running = False self._stopped = True if self._started: self._on_stop() @abstractmethod def _on_start(self): ... def _on_stop(self): pass def send_overlay(self, data: list): if self.overlays: try: self.overlays.put_nowait(data) except: pass ================================================ FILE: backend/plugins/follow/__init__.py ================================================ ================================================ FILE: backend/plugins/follow/follow_controller.py ================================================ class FollowController: """ Calculates yaw/pitch commands to keep a target centered and at a stable distance. Uses constant-rate (bang-bang) control: outputs fixed command values when the target is outside the deadzone, zero when inside. """ def __init__( self, yaw_deadzone: float = 0.15, pitch_deadzone: float = 0.02, min_box_width: float = 0.30, max_box_width: float = 0.80, invert_yaw: bool = False, invert_pitch: bool = False, yaw_speed: float = 20.0, pitch_speed: float = 20.0, ): self.yaw_deadzone = yaw_deadzone self.pitch_deadzone = pitch_deadzone self.min_box_width = min_box_width self.max_box_width = max_box_width self.invert_yaw = invert_yaw self.invert_pitch = invert_pitch self.yaw_speed = min(100.0, max(0.0, yaw_speed)) self.pitch_speed = min(100.0, max(0.0, pitch_speed)) def compute(self, box_center_x: float, box_width: float) -> tuple[float, float]: """ Compute yaw and pitch commands. Args: box_center_x: Normalized x position of box center (0.0 = left, 1.0 = right) box_width: Normalized width of box (0.0 to 1.0) Returns: (yaw, pitch) commands in range -100 to 100 """ # Yaw: rotate to center the target horizontally yaw = 0.0 error_x = box_center_x - 0.5 if abs(error_x) > self.yaw_deadzone: yaw = self.yaw_speed if error_x > 0 else -self.yaw_speed if self.invert_yaw: yaw = -yaw # Pitch: move forward/backward to keep target at desired size pitch = 0.0 if box_width < (self.min_box_width - self.pitch_deadzone): pitch = self.pitch_speed # too far, move forward elif box_width > (self.max_box_width + self.pitch_deadzone): pitch = -self.pitch_speed # too close, move backward if self.invert_pitch: pitch = -pitch return yaw, pitch ================================================ FILE: backend/plugins/follow/follow_plugin.py ================================================ import threading import cv2 import numpy as np from ultralytics import YOLO import time import os import logging from ..base import Plugin from .follow_controller import FollowController from control.strategies import DirectStrategy logger = logging.getLogger(__name__) class FollowPlugin(Plugin): """ Person-following plugin using YOLOv10 for detection. Detects people in video frames and sends yaw/pitch commands to keep them centered. """ def _on_start(self): # ---- Thread caps for better CPU behavior ---- try: import torch torch.set_num_threads(int(os.getenv("TORCH_NUM_THREADS", "2"))) torch.set_num_interop_threads(1) except Exception: pass try: cv2.setNumThreads(1) except Exception: pass os.environ.setdefault("OMP_NUM_THREADS", "2") os.environ.setdefault("MKL_NUM_THREADS", "2") # ---- Configuration ---- self.frame_rate = int(os.getenv("FOLLOW_FPS", "20")) self.img_size = int(os.getenv("YOLO_IMG_SIZE", "320")) self.confidence = float(os.getenv("YOLO_CONFIDENCE", "0.65")) self.log_interval = float(os.getenv("FOLLOW_LOG_INTERVAL", "2.0")) # ---- Load YOLO model ---- weights_env = os.getenv("YOLO_WEIGHTS") if weights_env and os.path.exists(weights_env): weights_path = weights_env else: repo_root = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "..")) default_weights = os.path.join(repo_root, "yolov10n.pt") weights_path = default_weights if os.path.exists(default_weights) else "yolov10n.pt" self.model = YOLO(weights_path) # ---- Follow controller ---- self.ctrl = FollowController( yaw_deadzone=float(os.getenv("FOLLOW_CENTER_DEADZONE", "0.15")), pitch_deadzone=float(os.getenv("FOLLOW_PITCH_DEADZONE", "0.02")), min_box_width=float(os.getenv("FOLLOW_MIN_BOX_WIDTH", "0.30")), max_box_width=float(os.getenv("FOLLOW_MAX_BOX_WIDTH", "0.80")), invert_yaw=os.getenv("FOLLOW_INVERT_YAW", "false").lower() in ("1", "true", "yes"), invert_pitch=os.getenv("FOLLOW_INVERT_PITCH", "false").lower() in ("1", "true", "yes"), yaw_speed=float(os.getenv("FOLLOW_YAW_SPEED", "20.0")), pitch_speed=float(os.getenv("FOLLOW_PITCH_SPEED", "20.0")), ) # ---- Set DirectStrategy for follow control ---- self._prev_strategy = getattr(self.fc.model, "strategy", None) self._prev_expo = getattr(self.fc.model, "expo_factor", None) try: self.fc.model.set_strategy(DirectStrategy()) self.fc.model.expo_factor = 0.0 logger.info("[FollowPlugin] Started with DirectStrategy, expo=0") except Exception as e: logger.warning("[FollowPlugin] Warning: %s", e) self.loop_thread = threading.Thread(target=self._loop, daemon=True) self.loop_thread.start() def _on_stop(self): # Restore previous strategy try: if self._prev_strategy is not None: self.fc.model.set_strategy(self._prev_strategy) if self._prev_expo is not None: self.fc.model.expo_factor = self._prev_expo except Exception: pass if self.loop_thread: self.loop_thread.join(timeout=1.0) def _loop(self): logger.info("[FollowPlugin] Loop started. Waiting for frames...") frame_interval = 1.0 / self.frame_rate last_frame_time = 0 last_log_time = 0 for frame in self.frames: # Rate limit now = time.time() if now - last_frame_time < frame_interval: continue last_frame_time = now if not self.running: break # Decode frame if hasattr(frame, "format") and frame.format == "jpeg": img = cv2.imdecode(np.frombuffer(frame.data, np.uint8), cv2.IMREAD_COLOR) if img is None: continue elif isinstance(frame, np.ndarray): img = frame else: continue # Run YOLO detection persons = [] try: results = self.model( img, stream=True, verbose=False, classes=[0], # person class only imgsz=self.img_size, conf=self.confidence, ) for r in results: for box in r.boxes or []: xyxy = box.xyxy[0].tolist() persons.append(xyxy) except Exception: continue if not persons: # No detection - stop moving and clear overlay self.fc.set_axes(throttle=0, yaw=0, pitch=0, roll=0) self.send_overlay([]) continue # Pick largest person (by area) x1, y1, x2, y2 = max(persons, key=lambda b: (b[2] - b[0]) * (b[3] - b[1])) box_w, box_h = x2 - x1, y2 - y1 frame_h, frame_w = img.shape[:2] # Send overlay overlay = [ { "type": "rect", "coords": [x1 / frame_w, y1 / frame_h, x2 / frame_w, y2 / frame_h], "color": "lime", } ] self.send_overlay(overlay) # Calculate and send commands yaw, pitch = self.ctrl.compute( box_center_x=(x1 + box_w / 2) / frame_w, box_width=box_w / frame_w, ) self.fc.set_axes_from("follow", throttle=0, yaw=yaw / 100.0, pitch=pitch / 100.0, roll=0) # Periodic logging if now - last_log_time >= self.log_interval: center_offset = ((x1 + box_w / 2) / frame_w - 0.5) * 100 logger.debug( "[FollowPlugin] offset=%+.1f%% box=%.1f%% yaw=%.0f pitch=%.0f", center_offset, box_w / frame_w * 100, yaw, pitch, ) last_log_time = now ================================================ FILE: backend/plugins/follow/test_yolo.py ================================================ import cv2 from ultralytics import YOLO def main(): # Load the YOLOv10 model model = YOLO("yolov10n.pt") # Open a connection to the default webcam (0) cap = cv2.VideoCapture(0) if not cap.isOpened(): print("Error: Could not open webcam.") return while True: # Capture frame-by-frame ret, frame = cap.read() if not ret: print("Error: Can't receive frame (stream end?). Exiting ...") break # Run YOLOv10 inference on the frame results = model(frame, verbose=False) # Draw bounding boxes and labels on the frame for r in results: for box in r.boxes: if box.cls == 0: # Class for 'person' x1, y1, x2, y2 = map(int, box.xyxy[0]) confidence = box.conf[0] # Draw rectangle cv2.rectangle(frame, (x1, y1), (x2, y2), (0, 255, 0), 2) # Prepare label text label = f"Person: {confidence:.2f}" # Put label on the frame cv2.putText(frame, label, (x1, y1 - 10), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 255, 0), 2) # Display the resulting frame cv2.imshow('YOLOv10 Webcam Test', frame) # Break the loop on 'q' key press if cv2.waitKey(1) == ord('q'): break # When everything done, release the capture and destroy windows cap.release() cv2.destroyAllWindows() if __name__ == '__main__': main() ================================================ FILE: backend/plugins/manager.py ================================================ import importlib import inspect import logging import pkgutil import queue import threading from typing import Dict, Iterator, Type from services.flight_controller import FlightController from .base import Plugin logger = logging.getLogger(__name__) class PluginManager: def __init__(self, flight_controller: FlightController, frame_queue: queue.Queue, overlay_queue: queue.Queue): self._fc = flight_controller self._frames_q = frame_queue self._overlay_q = overlay_queue self._registry: Dict[str, Type[Plugin]] = {} self._pool: Dict[str, Plugin] = {} self._frame_stop_events: Dict[str, threading.Event] = {} self._discover_plugins() def available(self) -> list[str]: return list(self._registry.keys()) def running(self) -> list[str]: return list(self._pool.keys()) def clear_overlays(self) -> None: """ Clears any currently displayed overlays (frontend will render none). """ if not self._overlay_q: return try: self._overlay_q.put_nowait([]) except Exception: pass def start(self, name: str) -> bool: """ Starts a plugin by name. Returns True if started, False if already running. Raises ValueError if the plugin is unknown. """ if name not in self._registry: raise ValueError(f"Unknown plugin: {name}") if name in self._pool: return False logger.info("[PluginManager] Starting plugin: %s", name) cls = self._registry[name] stop_event = threading.Event() self._frame_stop_events[name] = stop_event def frame_iterator() -> Iterator: """ Yield frames from the shared queue, but remain stoppable. Important: do NOT block indefinitely on Queue.get() or plugin threads consuming this iterator can hang forever during shutdown. """ while not stop_event.is_set(): try: # Keep the block bounded so we can observe stop_event. yield self._frames_q.get(timeout=0.2) except queue.Empty: continue try: # Pass a new, unique generator instance and the overlay queue to the plugin inst = cls(name=name, flight_controller=self._fc, frame_source=frame_iterator(), overlay_queue=self._overlay_q) inst.start() self._pool[name] = inst return True except Exception as e: logger.exception("[PluginManager] Error starting plugin %s: %s", name, e) # Ensure we don't leak stop events on failed startup. self._frame_stop_events.pop(name, None) raise def stop(self, name: str) -> bool: """ Stops a plugin by name. Returns True if stopped, False if it wasn't running. Raises ValueError if the plugin is unknown. """ if name not in self._registry: raise ValueError(f"Unknown plugin: {name}") inst = self._pool.pop(name, None) if not inst: return False logger.info("[PluginManager] Stopping plugin: %s", name) # Unblock any plugin thread currently waiting on the frame iterator. stop_evt = self._frame_stop_events.pop(name, None) if stop_evt: stop_evt.set() inst.stop() # If we just stopped the last plugin, clear overlays so stale UI doesn't linger. if not self._pool: self.clear_overlays() return True def stop_all(self): for name in list(self._pool.keys()): self.stop(name) def _discover_plugins(self): """Finds all Plugin subclasses in the 'plugins' package.""" import plugins plugin_pkg_path = plugins.__path__ plugin_pkg_name = plugins.__name__ for _, mod_name, _ in pkgutil.walk_packages(path=plugin_pkg_path, prefix=f"{plugin_pkg_name}."): module = importlib.import_module(mod_name) for _, obj in inspect.getmembers(module, inspect.isclass): # Ensure it's a direct subclass of Plugin and not Plugin itself if issubclass(obj, Plugin) and obj is not Plugin: self._registry[obj.__name__] = obj logger.debug("[PluginManager] Discovered plugin: %s", obj.__name__) ================================================ FILE: backend/plugins/test_yaw_plugin.py ================================================ import threading import time import os from .base import Plugin from control.strategies import DirectStrategy class TestYawPlugin(Plugin): """ Simple plugin to apply a small absolute yaw deflection using DirectStrategy for a short, fixed duration. Intended for isolating yaw behaviour on S2x. Behaviour: - Switch model to DirectStrategy (do NOT change expo) - Apply yaw = TEST_YAW_PCT (default +0.30), others 0 - Maintain for TEST_YAW_DURATION_S seconds (default 5s) or until stopped - Then stop sending and restore previous strategy """ def _on_start(self): # Read tunables from environment try: self._yaw_pct = float(os.getenv("TEST_YAW_PCT", "0.30")) except Exception: self._yaw_pct = 0.30 try: self._duration_s = float(os.getenv("TEST_YAW_DURATION_S", "5.0")) except Exception: self._duration_s = 5.0 # Remember previous strategy to restore on stop self._prev_strategy = getattr(self.fc.model, "strategy", None) # Force DirectStrategy for absolute mapping; keep expo as-is try: self.fc.model.set_strategy(DirectStrategy()) print(f"[TestYawPlugin] Using DirectStrategy, yaw={self._yaw_pct:+.2f} for {self._duration_s:.1f}s") except Exception as e: print(f"[TestYawPlugin] Failed to set DirectStrategy: {e}") # Start worker thread self._thread = threading.Thread(target=self._run, daemon=True) self._thread.start() def _on_stop(self): # Stop worker if hasattr(self, "_thread") and self._thread: # Let the run loop exit naturally by checking self.running. # If stop() is called from inside the worker thread (auto-stop), # do NOT join ourselves. try: if threading.current_thread() is not self._thread: self._thread.join(timeout=1.0) except RuntimeError: # Defensive: joining current thread raises at runtime. pass # Send zeros once try: self.fc.set_axes(throttle=0, yaw=0, pitch=0, roll=0) except Exception: pass # Restore previous strategy try: if hasattr(self, "_prev_strategy") and self._prev_strategy is not None: self.fc.model.set_strategy(self._prev_strategy) except Exception: pass def _run(self): start = time.time() # Send commands at ~30 Hz; FC loop will sample the latest values interval = 1.0 / 30.0 while self.running: now = time.time() elapsed = now - start if elapsed >= self._duration_s: # Auto-stop after duration try: print("[TestYawPlugin] Duration elapsed; stopping") finally: # Route through stop() so _on_stop() runs (zeros + restore strategy) self.stop() return # Apply yaw deflection only try: self.fc.set_axes_from("test_yaw", throttle=0.0, yaw=self._yaw_pct, pitch=0.0, roll=0.0) except Exception: pass time.sleep(interval) ================================================ FILE: backend/protocols/__init__.py ================================================ from .s2x_rc_protocol_adapter import S2xRCProtocolAdapter __all__ = ['S2xRCProtocolAdapter'] ================================================ FILE: backend/protocols/base_protocol_adapter.py ================================================ from abc import ABC, abstractmethod class BaseProtocolAdapter(ABC): """Base abstract class for drone protocol adapters""" @abstractmethod def build_control_packet(self, drone_model): """Build a control packet for the specific drone protocol""" pass @abstractmethod def send_control_packet(self, packet): """Send the control packet to the drone""" pass @abstractmethod def toggle_debug(self): """Toggle debug packet logging""" pass ================================================ FILE: backend/protocols/base_video_protocol.py ================================================ from abc import ABC, abstractmethod import socket import threading from typing import Optional from models.video_frame import VideoFrame class BaseVideoProtocolAdapter(ABC): """ Owns transport (UDP or TCP socket, keep-alives) and converts raw payloads into VideoFrame objects via an inner VideoModel. """ def __init__(self, drone_ip: str, control_port: int, video_port: int): self.drone_ip = drone_ip self.control_port = control_port self.video_port = video_port self._keepalive_thread: Optional[threading.Thread] = None # ────────── keep-alive helpers ────────── # def start_keepalive(self, interval: float = 1.0) -> None: if self._keepalive_thread and self._keepalive_thread.is_alive(): return self._stop_evt = threading.Event() self._keepalive_thread = threading.Thread( target=self._keepalive_loop, args=(interval,), daemon=True, ) self._keepalive_thread.start() def stop_keepalive(self) -> None: if hasattr(self, "_stop_evt"): self._stop_evt.set() if self._keepalive_thread: self._keepalive_thread.join() def _keepalive_loop(self, interval: float) -> None: while not self._stop_evt.is_set(): self.send_start_command() self._stop_evt.wait(interval) # ────────── transport helpers ────────── # def recv_from_socket(self, sock) -> Optional[bytes]: """ Read one payload chunk from `sock`. The default implementation assumes UDP; override for TCP. """ try: pkt, _ = sock.recvfrom(4096) return pkt except socket.timeout: return None # ────────── abstract API ────────── # @abstractmethod def send_start_command(self) -> None: """Tell the drone to start/continue sending video.""" raise NotImplementedError @abstractmethod def create_receiver_socket(self) -> socket.socket: """Return a configured socket ready for recv().""" raise NotImplementedError @abstractmethod def handle_payload(self, payload: bytes) -> Optional[VideoFrame]: """ Convert one transport payload into a VideoFrame or return None if the frame is not yet complete. """ raise NotImplementedError ================================================ FILE: backend/protocols/cooingdv_jieli_rc_protocol_adapter.py ================================================ """ RC protocol adapter for the CooingDV/Jieli CTP backend. This is the backend used by KY FPV's Jieli DeviceClient path. It keeps the familiar CooingDV 8-byte TC stick payload, but sends it as decimal BYTE0..BYTE7 fields inside a CTP JSON packet over UDP instead of sending the raw 0x03/0x66 datagram used by the classic CooingDV apps. """ from __future__ import annotations import logging import socket import threading from typing import Final, Optional from models.cooingdv_rc import CooingdvRcModel from protocols.base_protocol_adapter import BaseProtocolAdapter from utils.cooingdv_jieli_ctp import build_ctp_packet logger = logging.getLogger(__name__) class CooingdvJieliRcProtocolAdapter(BaseProtocolAdapter): """CooingDV Jieli/CTP UDP adapter used by KY FPV Jieli devices.""" DEFAULT_DRONE_IP: Final = "192.168.8.15" DEFAULT_PORT: Final = 2228 HEARTBEAT_INTERVAL: Final = 1.0 START_MARKER: Final = 0x66 END_MARKER: Final = 0x99 TOPIC_CONTROL_MODE: Final = "CONTROL_MODE" TOPIC_FLYING_CTRL: Final = "FLYING_CTRL" TOPIC_KEEP_ALIVE: Final = "CTP_KEEP_ALIVE" OP_PUT: Final = "PUT" FLAG_FAST_FLY: Final = 0x01 FLAG_FAST_DROP: Final = 0x02 FLAG_EMERGENCY_STOP: Final = 0x04 FLAG_CIRCLE_TURN_END: Final = 0x08 FLAG_HEADLESS: Final = 0x10 FLAG_UNLOCK_OR_RETURN: Final = 0x20 FLAG_LIGHT: Final = 0x40 FLAG_GYRO: Final = 0x80 def __init__( self, drone_ip: str = DEFAULT_DRONE_IP, control_port: int = DEFAULT_PORT, *, bind_port: Optional[int] = DEFAULT_PORT, ) -> None: self.drone_ip = drone_ip self.control_port = control_port self.debug_packets = False self._pkt_counter = 0 self._heartbeat_running = False self._heartbeat_stop = threading.Event() self._heartbeat_thread: Optional[threading.Thread] = None self.sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) # The Android UDPClientImpl binds DatagramSocket(2228). If that port is # busy on a development machine, fall back to an ephemeral local port so # packet sending still works. if bind_port is not None: try: self.sock.bind(("", bind_port)) except OSError as exc: logger.warning( "[cooingdv-jieli] Could not bind local UDP port %s (%s); using ephemeral port.", bind_port, exc, ) self.sock.bind(("", 0)) else: self.sock.bind(("", 0)) self.start_heartbeat() self._send_control_mode(True) logger.info( "[cooingdv-jieli] RC adapter initialized for %s:%s (local %s)", drone_ip, control_port, self.sock.getsockname()[1], ) def build_control_packet(self, drone_model: CooingdvRcModel) -> bytes: """Build a CTP FLYING_CTRL packet from the shared CooingDV model.""" payload = self._build_flying_payload(drone_model) self._clear_one_shot_flags(drone_model) params = {f"BYTE{i}": str(value) for i, value in enumerate(payload)} return build_ctp_packet(self.TOPIC_FLYING_CTRL, params) def send_control_packet(self, packet: bytes) -> None: try: self.sock.sendto(packet, (self.drone_ip, self.control_port)) except OSError: return if self.debug_packets: self._pkt_counter += 1 logger.debug( "[cooingdv-jieli] #%05d %s", self._pkt_counter, packet.hex(" "), ) def toggle_debug(self) -> bool: self.debug_packets = not self.debug_packets logger.info("[cooingdv-jieli] debug %s", "ON" if self.debug_packets else "OFF") return self.debug_packets def stop(self) -> None: self.stop_heartbeat() self._send_control_mode(False) try: self.sock.close() except OSError: pass def start_heartbeat(self) -> None: if self._heartbeat_thread and self._heartbeat_thread.is_alive(): return self._heartbeat_stop.clear() self._heartbeat_running = True self._heartbeat_thread = threading.Thread( target=self._heartbeat_loop, daemon=True, name="CooingdvJieliHeartbeat", ) self._heartbeat_thread.start() def stop_heartbeat(self) -> None: self._heartbeat_running = False self._heartbeat_stop.set() if self._heartbeat_thread: self._heartbeat_thread.join(timeout=2.0) self._heartbeat_thread = None def _heartbeat_loop(self) -> None: while self._heartbeat_running and not self._heartbeat_stop.is_set(): self._send_ctp(self.TOPIC_KEEP_ALIVE, {}) self._heartbeat_stop.wait(self.HEARTBEAT_INTERVAL) def _send_control_mode(self, enabled: bool) -> None: self._send_ctp(self.TOPIC_CONTROL_MODE, {"state": "1" if enabled else "0"}) def _send_ctp(self, topic: str, params: dict[str, str]) -> None: packet = build_ctp_packet(topic, params) try: self.sock.sendto(packet, (self.drone_ip, self.control_port)) except OSError: pass def _build_flying_payload(self, model: CooingdvRcModel) -> list[int]: b1 = self._clamp_axis(model.roll) b2 = self._clamp_axis(model.pitch) acc = self._clamp_axis(model.throttle) if acc == 1: acc = 0 turn = self._clamp_axis(model.yaw) flags = self._build_flags(model) checksum = (b1 ^ b2 ^ acc ^ turn ^ flags) & 0xFF return [self.START_MARKER, b1, b2, acc, turn, flags, checksum, self.END_MARKER] def _build_flags(self, model: CooingdvRcModel) -> int: flags = 0 if model.takeoff_flag: flags |= self.FLAG_FAST_FLY if model.land_flag: flags |= self.FLAG_FAST_DROP if model.stop_flag: flags |= self.FLAG_EMERGENCY_STOP if model.flip_flag: flags |= self.FLAG_CIRCLE_TURN_END if model.headless_flag: flags |= self.FLAG_HEADLESS if model.calibration_flag: flags |= self.FLAG_GYRO return flags & 0xFF def _clear_one_shot_flags(self, model: CooingdvRcModel) -> None: model.takeoff_flag = False model.land_flag = False model.stop_flag = False model.flip_flag = False model.calibration_flag = False def _clamp_axis(self, value: float) -> int: return max(1, min(255, int(value))) & 0xFF ================================================ FILE: backend/protocols/cooingdv_jieli_video_protocol.py ================================================ """ First-pass video adapter for the CooingDV/Jieli backend. KY FPV's Jieli path requests video with CTP JSON commands and receives an RTP stream on local UDP ports. This adapter implements the JPEG RTP path, which is the safest initial target because the frontend expects JPEG frames. """ from __future__ import annotations import logging import queue import socket import threading import time from typing import Final, Optional from models.video_frame import VideoFrame from protocols.base_video_protocol import BaseVideoProtocolAdapter from utils.cooingdv_jieli_ctp import build_ctp_packet from utils.wifi_uav_jpeg import EOI, generate_jpeg_headers logger = logging.getLogger(__name__) class CooingdvJieliVideoProtocolAdapter(BaseVideoProtocolAdapter): DEFAULT_DRONE_IP: Final = "192.168.8.15" DEFAULT_CONTROL_PORT: Final = 2228 DEFAULT_VIDEO_PORT: Final = 6666 DEFAULT_AUDIO_PORT: Final = 1234 DEFAULT_SDP_PORT: Final = 6789 TOPIC_OPEN_FRONT_RTS: Final = "OPEN_RT_STREAM" TOPIC_CLOSE_FRONT_RTS: Final = "CLOSE_RT_STREAM" FORMAT_JPEG: Final = 0 FORMAT_H264: Final = 1 def __init__( self, drone_ip: str = DEFAULT_DRONE_IP, control_port: int = DEFAULT_CONTROL_PORT, video_port: int = DEFAULT_VIDEO_PORT, *, audio_port: int = DEFAULT_AUDIO_PORT, sdp_port: int = DEFAULT_SDP_PORT, width: int = 640, height: int = 360, fps: int = 30, debug: bool = False, ) -> None: super().__init__(drone_ip, control_port, video_port) self.audio_port = audio_port self.sdp_port = sdp_port self.width = width self.height = height self.fps = fps self.debug = debug self._running = threading.Event() self._rx_thread: Optional[threading.Thread] = None self._sdp_thread: Optional[threading.Thread] = None self._rx_sock: Optional[socket.socket] = None self._sdp_sock: Optional[socket.socket] = None self._control_sock: Optional[socket.socket] = None self._frame_q: "queue.Queue[VideoFrame]" = queue.Queue(maxsize=2) self._frame_id = 0 self._rtp_jpeg_buffers: dict[int, bytearray] = {} def start(self) -> None: if self.is_running(): return self._running.set() self._control_sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) self._control_sock.bind(("", 0)) self._start_sdp_server() self._send_open_stream() self._rx_thread = threading.Thread( target=self._rx_loop, daemon=True, name="CooingdvJieliVideoRx", ) self._rx_thread.start() logger.info( "[cooingdv-jieli-video] listening for RTP/JPEG on UDP %s; SDP on TCP %s", self.video_port, self.sdp_port, ) def stop(self) -> None: self._running.clear() self._send_close_stream() for sock in (self._rx_sock, self._sdp_sock, self._control_sock): if sock: try: sock.close() except OSError: pass if self._rx_thread: self._rx_thread.join(timeout=1.0) if self._sdp_thread: self._sdp_thread.join(timeout=1.0) def is_running(self) -> bool: return self._running.is_set() and self._rx_thread is not None and self._rx_thread.is_alive() def get_frame(self, timeout: float = 1.0) -> Optional[VideoFrame]: try: return self._frame_q.get(timeout=timeout) except queue.Empty: return None def get_packets(self) -> list[bytes]: return [] def send_start_command(self) -> None: self._send_open_stream() def create_receiver_socket(self): return None def handle_payload(self, payload: bytes) -> Optional[VideoFrame]: self._frame_id = (self._frame_id + 1) & 0xFFFF return VideoFrame(self._frame_id, payload, format_type="jpeg") def _send_open_stream(self) -> None: self._send_ctp( self.TOPIC_OPEN_FRONT_RTS, { "format": str(self.FORMAT_JPEG), "w": str(self.width), "h": str(self.height), "fps": str(self.fps), }, ) def _send_close_stream(self) -> None: self._send_ctp(self.TOPIC_CLOSE_FRONT_RTS, {"status": "1"}) def _send_ctp(self, topic: str, params: dict[str, str]) -> None: sock = self._control_sock if sock is None: return packet = build_ctp_packet(topic, params) try: sock.sendto(packet, (self.drone_ip, self.control_port)) except OSError: pass def _start_sdp_server(self) -> None: self._sdp_thread = threading.Thread( target=self._sdp_loop, daemon=True, name="CooingdvJieliSdpServer", ) self._sdp_thread.start() def _sdp_loop(self) -> None: sdp = ( "c=IN IP4 127.0.0.1\n" f"m=audio {self.audio_port} RTP/AVP 97\n" "a=rtpmap:97 L16/8000/1\n" "a=ptime:20\n" f"m=video {self.video_port} RTP/AVP 26\n" "a=rtpmap:26 JPEG/90000\n" f"a=framerate:{self.fps}" ).encode("utf-8") try: sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) sock.bind(("", self.sdp_port)) sock.listen(1) sock.settimeout(0.5) self._sdp_sock = sock except OSError as exc: logger.warning("[cooingdv-jieli-video] SDP server unavailable: %s", exc) return while self._running.is_set(): try: conn, _ = sock.accept() except socket.timeout: continue except OSError: break with conn: try: conn.sendall(sdp) except OSError: pass def _rx_loop(self) -> None: try: sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) sock.bind(("", self.video_port)) sock.settimeout(0.5) self._rx_sock = sock except OSError as exc: logger.warning("[cooingdv-jieli-video] RTP socket unavailable: %s", exc) self._running.clear() return while self._running.is_set(): try: packet, _ = sock.recvfrom(65535) except socket.timeout: continue except OSError: break frame = self._handle_rtp_packet(packet) if frame is None: continue try: self._frame_q.put(frame, timeout=0.1) except queue.Full: try: self._frame_q.get_nowait() self._frame_q.put(frame, timeout=0.1) except (queue.Empty, queue.Full): pass def _handle_rtp_packet(self, packet: bytes) -> Optional[VideoFrame]: if len(packet) < 12 or (packet[0] >> 6) != 2: return self._handle_possible_raw_jpeg(packet) csrc_count = packet[0] & 0x0F marker = bool(packet[1] & 0x80) payload_type = packet[1] & 0x7F timestamp = int.from_bytes(packet[4:8], "big") offset = 12 + (csrc_count * 4) if packet[0] & 0x10: if len(packet) < offset + 4: return None ext_len = int.from_bytes(packet[offset + 2 : offset + 4], "big") * 4 offset += 4 + ext_len payload = packet[offset:] if payload_type == 26: return self._handle_rtp_jpeg(timestamp, payload, marker) return self._handle_possible_raw_jpeg(payload) def _handle_rtp_jpeg(self, timestamp: int, payload: bytes, marker: bool) -> Optional[VideoFrame]: if len(payload) < 8: return None fragment_offset = int.from_bytes(payload[1:4], "big") width = payload[6] * 8 or self.width height = payload[7] * 8 or self.height scan = payload[8:] if fragment_offset == 0: self._rtp_jpeg_buffers[timestamp] = bytearray(generate_jpeg_headers(width, height)) buf = self._rtp_jpeg_buffers.setdefault(timestamp, bytearray()) buf.extend(scan) if not marker: return None data = bytes(buf) self._rtp_jpeg_buffers.pop(timestamp, None) if not data.endswith(EOI): data += bytes(EOI) return self.handle_payload(data) def _handle_possible_raw_jpeg(self, payload: bytes) -> Optional[VideoFrame]: start = payload.find(b"\xff\xd8") end = payload.rfind(b"\xff\xd9") if start >= 0 and end > start: return self.handle_payload(payload[start : end + 2]) if self.debug and payload: logger.debug("[cooingdv-jieli-video] ignored payload %sB", len(payload)) return None ================================================ FILE: backend/protocols/cooingdv_rc_protocol_adapter.py ================================================ """ RC Protocol Adapter for CooingDV drones. The CooingDV apps share a UDP control plane on port 7099, but they do not all use the same control packet layout. The apps listen for UDP telemetry, classify the drone as either a short "TC" packet variant or an extended "GL" packet variant, and then switch the encoder accordingly. Shared behavior observed in the apps: - Heartbeat {0x01, 0x01} every second - RTSP preview at rtsp://192.168.1.1:7070/webcam (handled elsewhere) - UDP telemetry on the same socket used for control Important: {0x08, 0x01} is used by the apps when leaving control mode. It is not a startup "init" packet. """ import os import socket import threading import logging from typing import Final, Optional from models.cooingdv_rc import CooingdvRcModel from protocols.base_protocol_adapter import BaseProtocolAdapter logger = logging.getLogger(__name__) class CooingdvRcProtocolAdapter(BaseProtocolAdapter): """Protocol adapter for the CooingDV drone family.""" DEFAULT_DRONE_IP: Final = "192.168.1.1" DEFAULT_PORT: Final = 7099 HEARTBEAT_INTERVAL: Final = 1.0 RECV_TIMEOUT: Final = 0.25 RECV_BUFFER_SIZE: Final = 256 # Packet markers PREFIX: Final = 0x03 START_MARKER: Final = 0x66 EXTENDED_MARKER: Final = 0x14 END_MARKER: Final = 0x99 # Discrete UDP commands HEARTBEAT_COMMAND: Final = bytes([0x01, 0x01]) STOP_COMMAND: Final = bytes([0x08, 0x01]) # Variant names DEVICE_UNKNOWN: Final = "unknown" DEVICE_TC: Final = "tc" DEVICE_GL: Final = "gl" # TC packet flag bits FLAG_TAKEOFF: Final = 0x01 FLAG_LAND: Final = 0x02 FLAG_STOP: Final = 0x04 FLAG_FLIP: Final = 0x08 FLAG_HEADLESS: Final = 0x10 FLAG_CALIBRATE: Final = 0x80 # GL packet flag groups GL_FLAG_ONE_KEY_ACTION: Final = 0x01 GL_FLAG_STOP: Final = 0x02 GL_FLAG_CALIBRATE: Final = 0x04 GL_FLAG_FLIP: Final = 0x08 GL_FLAG_HEADLESS: Final = 0x01 # Resolution / capability IDs seen in the CooingDV apps. The apps use the # first byte of received UDP telemetry to decide whether the drone belongs # to the GL or TC family. GL_RESOLUTION_IDS: Final = frozenset(set(range(90, 102)) | {82, 85, 103, 105}) KNOWN_RESOLUTION_IDS: Final = frozenset({ 3, 5, 9, 11, 12, 19, 20, 21, 23, 24, 26, 27, 29, 30, 31, 41, 43, 44, 45, 51, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 80, 81, 82, 83, 84, 85, 86, 87, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 103, 105, }) def __init__( self, drone_ip: str = DEFAULT_DRONE_IP, control_port: int = DEFAULT_PORT, variant: Optional[str] = None, ) -> None: self.drone_ip = drone_ip self.control_port = control_port self.sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) # Match Java DatagramSocket() semantics by binding to an ephemeral port # so replies return to the same socket we use for telemetry detection. self.sock.bind(("", 0)) self.sock.settimeout(self.RECV_TIMEOUT) self.debug_packets = False self._pkt_counter = 0 env_variant = os.getenv("COOINGDV_VARIANT") self._variant_override = self._normalise_variant(variant or env_variant) self._detected_variant: Optional[str] = None self.detected_resolution_id: Optional[int] = None self.last_rx_packet: bytes = b"" self.last_rx_addr: Optional[tuple[str, int]] = None self._override_mismatch_reported = False # Heartbeat thread self._heartbeat_running = False self._heartbeat_thread: Optional[threading.Thread] = None self._heartbeat_stop = threading.Event() # Receive thread self._receiver_running = False self._receiver_thread: Optional[threading.Thread] = None self._receiver_stop = threading.Event() self.start_receiver() self.start_heartbeat() active_variant = self._active_variant() active_label = active_variant.upper() if self._variant_override: logger.info( "[cooingdv] RC adapter initialized for %s:%s (forced %s, local %s)", drone_ip, control_port, active_label, self.sock.getsockname()[1], ) else: logger.info( "[cooingdv] RC adapter initialized for %s:%s (auto-detect, TC fallback, local %s)", drone_ip, control_port, self.sock.getsockname()[1], ) def start_heartbeat(self) -> None: """Start the heartbeat thread to keep the UDP session alive.""" if self._heartbeat_thread and self._heartbeat_thread.is_alive(): return self._heartbeat_stop.clear() self._heartbeat_running = True self._heartbeat_thread = threading.Thread( target=self._heartbeat_loop, daemon=True, name="CooingdvHeartbeat", ) self._heartbeat_thread.start() def stop_heartbeat(self) -> None: """Stop the heartbeat thread.""" self._heartbeat_running = False self._heartbeat_stop.set() if self._heartbeat_thread: self._heartbeat_thread.join(timeout=2.0) self._heartbeat_thread = None def _heartbeat_loop(self) -> None: """Send heartbeat packets every HEARTBEAT_INTERVAL seconds.""" while self._heartbeat_running and not self._heartbeat_stop.is_set(): try: self.sock.sendto(self.HEARTBEAT_COMMAND, (self.drone_ip, self.control_port)) if self.debug_packets: logger.debug("[cooingdv] heartbeat sent: %s", self.HEARTBEAT_COMMAND.hex()) except OSError: pass self._heartbeat_stop.wait(self.HEARTBEAT_INTERVAL) def start_receiver(self) -> None: """Start the telemetry receiver used for TC/GL auto-detection.""" if self._receiver_thread and self._receiver_thread.is_alive(): return self._receiver_stop.clear() self._receiver_running = True self._receiver_thread = threading.Thread( target=self._receiver_loop, daemon=True, name="CooingdvUdpRx", ) self._receiver_thread.start() def stop_receiver(self) -> None: """Stop the telemetry receiver thread.""" self._receiver_running = False self._receiver_stop.set() if self._receiver_thread: self._receiver_thread.join(timeout=2.0) self._receiver_thread = None def _receiver_loop(self) -> None: while self._receiver_running and not self._receiver_stop.is_set(): try: packet, addr = self.sock.recvfrom(self.RECV_BUFFER_SIZE) except socket.timeout: continue except OSError: if self._receiver_stop.is_set(): break continue if not packet: continue self.last_rx_packet = packet self.last_rx_addr = addr self._process_received_packet(packet, addr) def _process_received_packet(self, packet: bytes, addr: tuple[str, int]) -> None: """Track UDP telemetry and auto-detect the control variant.""" if self.debug_packets: logger.debug( "[cooingdv] udp recv %sB from %s:%s: %s", len(packet), addr[0], addr[1], packet.hex(), ) variant = self._detect_variant(packet) if variant is None: return resolution_id = packet[0] if resolution_id in self.KNOWN_RESOLUTION_IDS: self.detected_resolution_id = resolution_id if self._variant_override: if variant != self._variant_override and not self._override_mismatch_reported: logger.warning( "[cooingdv] Ignoring auto-detected %s telemetry; using override %s.", variant.upper(), self._variant_override.upper(), ) self._override_mismatch_reported = True return if variant == self._detected_variant: return self._detected_variant = variant resolution_text = ( f" resolution-id={self.detected_resolution_id}" if self.detected_resolution_id is not None else "" ) logger.info("[cooingdv] Auto-detected %s control variant.%s", variant.upper(), resolution_text) def stop(self) -> None: """Clean shutdown of the adapter.""" self.stop_heartbeat() try: self.sock.sendto(self.STOP_COMMAND, (self.drone_ip, self.control_port)) except OSError: pass self.stop_receiver() try: self.sock.close() except Exception: pass # ------------------------------------------------------------------ # # BaseProtocolAdapter interface # ------------------------------------------------------------------ # def build_control_packet(self, drone_model: CooingdvRcModel) -> bytes: """Build a control packet for the detected CooingDV variant.""" variant = self._active_variant() if variant == self.DEVICE_GL: packet = self._build_gl_control_packet(drone_model) else: packet = self._build_tc_control_packet(drone_model) self._clear_one_shot_flags(drone_model) return packet def send_control_packet(self, packet: bytes) -> None: """Send a control packet to the drone via UDP.""" try: self.sock.sendto(packet, (self.drone_ip, self.control_port)) except OSError: return if self.debug_packets: self._pkt_counter += 1 variant = self._active_variant().upper() hex_dump = " ".join(f"{b:02x}" for b in packet) logger.debug("[cooingdv] #%05d [%s] %s", self._pkt_counter, variant, hex_dump) if len(packet) == 9: roll, pitch, throttle, yaw = packet[2], packet[3], packet[4], packet[5] flags = packet[6] logger.debug(" Controls: R:%s P:%s T:%s Y:%s", roll, pitch, throttle, yaw) flag_names = self._decode_tc_flags(flags) if flag_names: logger.debug(" Flags: %s", ", ".join(flag_names)) elif len(packet) == 21: roll, pitch, throttle, yaw = packet[3], packet[4], packet[5], packet[6] flags1, flags2 = packet[7], packet[8] logger.debug(" Controls: R:%s P:%s T:%s Y:%s", roll, pitch, throttle, yaw) flag_names = self._decode_gl_flags(flags1, flags2) if flag_names: logger.debug(" Flags: %s", ", ".join(flag_names)) def toggle_debug(self) -> bool: """Toggle debug packet logging on/off.""" self.debug_packets = not self.debug_packets state = "ON" if self.debug_packets else "OFF" logger.info("[cooingdv] debug %s", state) return self.debug_packets # ------------------------------------------------------------------ # # Helper methods # ------------------------------------------------------------------ # def _active_variant(self) -> str: """Return the currently active control variant.""" if self._variant_override: return self._variant_override if self._detected_variant: return self._detected_variant return self.DEVICE_TC def _normalise_variant(self, value: Optional[str]) -> Optional[str]: if value is None: return None raw = value.strip().lower() if raw in ("", "auto", "detect", "autodetect"): return None if raw in ("tc", "e88", "short"): return self.DEVICE_TC if raw in ("gl", "flow", "extended"): return self.DEVICE_GL logger.warning("[cooingdv] Unknown variant override '%s', using auto-detect.", value) return None def _detect_variant(self, packet: bytes) -> Optional[str]: """Infer the control family from the first telemetry byte.""" first_byte = packet[0] if first_byte in self.GL_RESOLUTION_IDS: return self.DEVICE_GL if first_byte in self.KNOWN_RESOLUTION_IDS: return self.DEVICE_TC return None def _build_tc_control_packet(self, model: CooingdvRcModel) -> bytes: pkt = bytearray(9) pkt[0] = self.PREFIX pkt[1] = self.START_MARKER pkt[2] = self._clamp_axis(model.roll) pkt[3] = self._clamp_axis(model.pitch) pkt[4] = self._clamp_axis(model.throttle) pkt[5] = self._clamp_axis(model.yaw) pkt[6] = self._build_tc_flags(model) pkt[7] = self._calculate_checksum(pkt[2:7]) pkt[8] = self.END_MARKER return bytes(pkt) def _build_gl_control_packet(self, model: CooingdvRcModel) -> bytes: roll = self._clamp_axis(model.roll) pitch = self._clamp_axis(model.pitch) throttle = self._clamp_axis(model.throttle) yaw = self._clamp_axis(model.yaw) flags1, flags2 = self._build_gl_flags(model) inner = bytearray(20) inner[0] = self.START_MARKER inner[1] = self.EXTENDED_MARKER inner[2] = roll inner[3] = pitch inner[4] = throttle inner[5] = yaw inner[6] = flags1 inner[7] = flags2 inner[18] = self._calculate_checksum( bytes([roll, pitch, throttle, yaw, flags1, flags2]) ) inner[19] = self.END_MARKER return bytes([self.PREFIX]) + bytes(inner) def _clamp_axis(self, value: float) -> int: """Clamp axis values into the byte range used by the apps.""" clamped = max(1, min(255, int(value))) return clamped & 0xFF def _build_tc_flags(self, model: CooingdvRcModel) -> int: flags = 0 if model.takeoff_flag: flags |= self.FLAG_TAKEOFF if model.land_flag: flags |= self.FLAG_LAND if model.stop_flag: flags |= self.FLAG_STOP if model.flip_flag: flags |= self.FLAG_FLIP if model.headless_flag: flags |= self.FLAG_HEADLESS if model.calibration_flag: flags |= self.FLAG_CALIBRATE return flags & 0xFF def _build_gl_flags(self, model: CooingdvRcModel) -> tuple[int, int]: """ Build the extended GL flag groups. In the decompiled apps, the GL family uses two flag bytes and does not expose separate takeoff/land bits. The one-key action bit is reused for either button, so we mirror that behavior here. """ flags1 = 0 flags2 = 0 if model.takeoff_flag or model.land_flag: flags1 |= self.GL_FLAG_ONE_KEY_ACTION if model.stop_flag: flags1 |= self.GL_FLAG_STOP if model.calibration_flag: flags1 |= self.GL_FLAG_CALIBRATE if model.flip_flag: flags1 |= self.GL_FLAG_FLIP if model.headless_flag: flags2 |= self.GL_FLAG_HEADLESS return flags1 & 0xFF, flags2 & 0xFF def _clear_one_shot_flags(self, model: CooingdvRcModel) -> None: model.takeoff_flag = False model.land_flag = False model.stop_flag = False model.flip_flag = False model.calibration_flag = False def _decode_tc_flags(self, flags: int) -> list[str]: names: list[str] = [] if flags & self.FLAG_TAKEOFF: names.append("TAKEOFF") if flags & self.FLAG_LAND: names.append("LAND") if flags & self.FLAG_STOP: names.append("STOP") if flags & self.FLAG_FLIP: names.append("FLIP") if flags & self.FLAG_HEADLESS: names.append("HEADLESS") if flags & self.FLAG_CALIBRATE: names.append("CALIBRATE") return names def _decode_gl_flags(self, flags1: int, flags2: int) -> list[str]: names: list[str] = [] if flags1 & self.GL_FLAG_ONE_KEY_ACTION: names.append("ONE_KEY") if flags1 & self.GL_FLAG_STOP: names.append("STOP") if flags1 & self.GL_FLAG_CALIBRATE: names.append("CALIBRATE") if flags1 & self.GL_FLAG_FLIP: names.append("FLIP") if flags2 & self.GL_FLAG_HEADLESS: names.append("HEADLESS") return names def _calculate_checksum(self, data: bytes) -> int: checksum = 0 for b in data: checksum ^= b return checksum & 0xFF ================================================ FILE: backend/protocols/cooingdv_video_protocol.py ================================================ """ Video Protocol Adapter for Cooingdv drones. Uses RTSP streaming instead of custom UDP protocol - much simpler than the S2x and WiFi UAV implementations. The drone broadcasts video at: rtsp://192.168.1.1:7070/webcam This adapter uses OpenCV's VideoCapture to connect to the RTSP stream and provides frames through the standard turbodrone interface. """ import logging import cv2 import queue import threading import time from typing import Final, Optional, List from models.cooingdv_video_model import CooingdvVideoModel from models.video_frame import VideoFrame from protocols.base_video_protocol import BaseVideoProtocolAdapter logger = logging.getLogger(__name__) class CooingdvVideoProtocolAdapter(BaseVideoProtocolAdapter): """ Video protocol adapter for cooingdv drones using RTSP streaming. Unlike the UDP-based adapters for S2x and WiFi UAV drones, this adapter connects to a standard RTSP stream, making it much simpler and more reliable. Features: - Automatic reconnection on stream failure - Frame rate limiting to prevent buffer overflow - Thread-safe frame queue """ DEFAULT_DRONE_IP: Final = "192.168.1.1" DEFAULT_RTSP_PORT: Final = 7070 DEFAULT_CONTROL_PORT: Final = 7099 RTSP_PATH: Final = "/webcam" # Reconnection settings RECONNECT_DELAY: Final = 2.0 # seconds MAX_RECONNECT_ATTEMPTS: Final = 10 # Frame capture settings FRAME_TIMEOUT: Final = 5.0 # seconds without frame triggers reconnect READ_FAILURE_BACKOFF: Final = 0.05 # seconds between failed reads def __init__( self, drone_ip: str = DEFAULT_DRONE_IP, control_port: int = DEFAULT_CONTROL_PORT, video_port: int = DEFAULT_RTSP_PORT, *, debug: bool = False, ): super().__init__(drone_ip, control_port, video_port) self.model = CooingdvVideoModel() self.debug = debug or logger.isEnabledFor(logging.DEBUG) self._dbg = logger.debug if self.debug else (lambda *a, **k: None) # Build RTSP URL self.rtsp_url = f"rtsp://{drone_ip}:{video_port}{self.RTSP_PATH}" self._dbg(f"[cooingdv-video] RTSP URL: {self.rtsp_url}") # OpenCV capture object self._cap: Optional[cv2.VideoCapture] = None self._cap_lock = threading.Lock() # Threading self._running = False self._rx_thread: Optional[threading.Thread] = None self._frame_q: "queue.Queue[VideoFrame]" = queue.Queue(maxsize=2) # Packet buffer for compatibility with existing interface self._pkt_lock = threading.Lock() self._pkt_buffer: List[bytes] = [] # Statistics self.frames_ok = 0 self.frames_dropped = 0 self.reconnect_count = 0 self._last_frame_time = time.time() # ------------------------------------------------------------------ # # RTSP Connection Management # ------------------------------------------------------------------ # def _open_stream(self) -> bool: """ Open the RTSP stream. Returns True on success. Uses specific OpenCV settings optimized for RTSP: - CAP_FFMPEG backend for better RTSP support - Low buffer size to reduce latency """ with self._cap_lock: if self._cap is not None: self._cap.release() self._dbg(f"[cooingdv-video] Opening RTSP stream: {self.rtsp_url}") # Create capture with FFMPEG backend for better RTSP handling self._cap = cv2.VideoCapture(self.rtsp_url, cv2.CAP_FFMPEG) # Configure for low latency self._cap.set(cv2.CAP_PROP_BUFFERSIZE, 1) if self._cap.isOpened(): self._dbg("[cooingdv-video] Stream opened successfully") self._last_frame_time = time.time() return True else: self._dbg("[cooingdv-video] Failed to open stream") self._cap = None return False def _close_stream(self) -> None: """Close the RTSP stream.""" with self._cap_lock: if self._cap is not None: self._cap.release() self._cap = None self._dbg("[cooingdv-video] Stream closed") def _reconnect(self) -> bool: """Attempt to reconnect to the stream.""" self.reconnect_count += 1 self._dbg(f"[cooingdv-video] Reconnection attempt #{self.reconnect_count}") self._close_stream() time.sleep(self.RECONNECT_DELAY) for attempt in range(self.MAX_RECONNECT_ATTEMPTS): if not self._running: return False if self._open_stream(): return True self._dbg(f"[cooingdv-video] Reconnect attempt {attempt + 1}/{self.MAX_RECONNECT_ATTEMPTS} failed") time.sleep(self.RECONNECT_DELAY) return False # ------------------------------------------------------------------ # # BaseVideoProtocolAdapter interface # ------------------------------------------------------------------ # def send_start_command(self) -> None: """ For RTSP, we don't need to send a start command. The stream starts when we connect. """ pass def create_receiver_socket(self): """ Not used for RTSP - OpenCV handles the connection. Returns None for compatibility. """ return None def handle_payload(self, payload: bytes) -> Optional[VideoFrame]: """ Wrap one encoded RTSP frame using the shared video-model interface. """ return self.model.ingest_chunk(payload=payload) # ------------------------------------------------------------------ # # Receiver Thread API (matches existing interface) # ------------------------------------------------------------------ # def start(self) -> None: """Start the video receiver thread.""" if self._rx_thread and self._rx_thread.is_alive(): return self._running = True self._frame_q = queue.Queue(maxsize=2) # Open initial connection if not self._open_stream(): self._dbg("[cooingdv-video] Warning: Could not open stream on start") # Start receiver thread self._rx_thread = threading.Thread( target=self._rx_loop, daemon=True, name="CooingdvVideoRx", ) self._rx_thread.start() self._dbg("[cooingdv-video] Receiver thread started") def _rx_loop(self) -> None: """ Main receiver loop - reads frames from RTSP stream. """ while self._running: # Check if we have a valid capture with self._cap_lock: cap = self._cap if cap is None or not cap.isOpened(): if not self._reconnect(): self._dbg("[cooingdv-video] Failed to reconnect, retrying...") time.sleep(self.RECONNECT_DELAY) continue # Try to read a frame try: ret, frame = cap.read() if not ret or frame is None: # Check for timeout if time.time() - self._last_frame_time > self.FRAME_TIMEOUT: self._dbg("[cooingdv-video] Frame timeout, reconnecting...") self._reconnect() else: time.sleep(self.READ_FAILURE_BACKOFF) continue self._last_frame_time = time.time() # Encode as JPEG (OpenCV handles BGR→YCbCr conversion internally) encode_param = [int(cv2.IMWRITE_JPEG_QUALITY), 85] _, jpeg_data = cv2.imencode('.jpg', frame, encode_param) video_frame = self.handle_payload(jpeg_data.tobytes()) if video_frame is None: continue self.frames_ok += 1 # Add to queue (drop if full) try: self._frame_q.put(video_frame, timeout=0.1) except queue.Full: self.frames_dropped += 1 # Drop oldest frame and add new one try: self._frame_q.get_nowait() self._frame_q.put(video_frame, timeout=0.1) except (queue.Empty, queue.Full): pass except cv2.error as e: self._dbg(f"[cooingdv-video] OpenCV error: {e}") self._reconnect() except Exception as e: self._dbg(f"[cooingdv-video] Unexpected error: {e}") time.sleep(0.1) def stop(self) -> None: """Stop the video receiver and clean up.""" self._dbg("[cooingdv-video] Stopping...") self._running = False # Wait for thread to finish if self._rx_thread and self._rx_thread.is_alive(): self._rx_thread.join(timeout=2.0) # Close stream self._close_stream() self._dbg( f"[cooingdv-video] Stopped. Stats: " f"frames_ok={self.frames_ok}, " f"frames_dropped={self.frames_dropped}, " f"reconnects={self.reconnect_count}" ) def is_running(self) -> bool: """Check if the receiver is running.""" return self._running and self._rx_thread is not None and self._rx_thread.is_alive() def get_frame(self, timeout: float = 1.0) -> Optional[VideoFrame]: """Get the next available frame from the queue.""" try: return self._frame_q.get(timeout=timeout) except queue.Empty: return None def get_packets(self) -> List[bytes]: """ Get raw packets - not really applicable for RTSP. Returns empty list for compatibility. """ return [] # ------------------------------------------------------------------ # # Keep-alive (not needed for RTSP, but required by base class) # ------------------------------------------------------------------ # def start_keepalive(self, interval: float = 1.0) -> None: """RTSP doesn't need keep-alive packets.""" pass def stop_keepalive(self) -> None: """RTSP doesn't need keep-alive packets.""" pass ================================================ FILE: backend/protocols/debug_rc_protocol_adapter.py ================================================ import logging log = logging.getLogger(__name__) class DebugRcProtocolAdapter: """Dummy RC protocol adapter for debugging.""" def __init__(self): log.info("Debug RC protocol adapter initialized.") def send_control_data(self, data: bytes): log.debug(f"Debug: send_control_data({data.hex() if isinstance(data, bytes) else data})") ================================================ FILE: backend/protocols/debug_video_protocol.py ================================================ import cv2 import logging import time import threading import queue from typing import Optional, List from models.video_frame import VideoFrame from protocols.base_video_protocol import BaseVideoProtocolAdapter from utils.dropping_queue import DroppingQueue log = logging.getLogger(__name__) class DebugVideoProtocolAdapter(BaseVideoProtocolAdapter): """ Drop-in video protocol adapter that fetches frames from the local webcam instead of a network socket. """ def __init__(self, camera_index: int = 0, debug: bool = False, max_queue_size=100): super().__init__(drone_ip="localhost", control_port=0, video_port=0) self.camera_index = camera_index self.debug = debug self._cap = None self._frame_id = 0 self._running = threading.Event() self._thread: Optional[threading.Thread] = None self.frame_queue = DroppingQueue(maxsize=max_queue_size) def start(self): self._cap = cv2.VideoCapture(self.camera_index) if not self._cap.isOpened(): raise RuntimeError(f"Cannot open local camera #{self.camera_index}") self._running.set() self._thread = threading.Thread(target=self._capture_loop, daemon=True) self._thread.start() log.info(f"[debug-video] webcam #{self.camera_index} opened") def stop(self): if not self._running.is_set(): return self._running.clear() if self._thread: self._thread.join(timeout=1.0) if self._cap and self._cap.isOpened(): self._cap.release() log.info("[debug-video] webcam released") def is_running(self) -> bool: return self._running.is_set() def get_frame(self, timeout: float = 1.0) -> Optional[VideoFrame]: try: return self.frame_queue.get(timeout=timeout) except queue.Empty: return None def get_packets(self) -> List[bytes]: # Not applicable for webcam, but required by the service return [] def _capture_loop(self): while self._running.is_set(): if not self._cap or not self._cap.isOpened(): log.error("[debug-video] camera is not open.") self._running.clear() break ret, frame_bgr = self._cap.read() if not ret: log.warning("[debug-video] failed to grab frame") time.sleep(0.1) continue ok, jpg = cv2.imencode(".jpg", frame_bgr, [int(cv2.IMWRITE_JPEG_QUALITY), 85]) if not ok: log.warning("[debug-video] JPEG encode failed") continue self._frame_id = (self._frame_id + 1) & 0xFFFF video_frame = VideoFrame( frame_id=self._frame_id, data=jpg.tobytes(), format_type="jpeg", ) try: self.frame_queue.put(video_frame) except queue.Full: # This is expected if the queue is full pass log.info("[debug-video] capture loop stopped.") # --- Stubs for methods that are not used by this adapter --- def create_receiver_socket(self): return None def send_start_command(self): pass def handle_payload(self, payload: bytes) -> Optional[VideoFrame]: return None ================================================ FILE: backend/protocols/no_video_protocol.py ================================================ import logging import queue import threading import time from typing import Optional from models.video_frame import VideoFrame from protocols.base_video_protocol import BaseVideoProtocolAdapter logger = logging.getLogger(__name__) class NoVideoProtocolAdapter(BaseVideoProtocolAdapter): """ Placeholder video adapter for RC backends whose video transport is not yet implemented. It keeps the web service alive without pretending to decode a stream. The logs tell users that the selected drone type currently has RC-only support. """ def __init__( self, drone_ip: str = "0.0.0.0", control_port: int = 0, video_port: int = 0, *, reason: str = "video transport not implemented", ) -> None: super().__init__(drone_ip, control_port, video_port) self.reason = reason self._running = threading.Event() def start(self) -> None: logger.warning("[no-video] %s", self.reason) self._running.set() def stop(self) -> None: self._running.clear() def is_running(self) -> bool: return self._running.is_set() def get_frame(self, timeout: float = 1.0) -> Optional[VideoFrame]: if self._running.is_set(): time.sleep(timeout) raise queue.Empty def get_packets(self) -> list[bytes]: return [] def send_start_command(self) -> None: pass def create_receiver_socket(self): return None def handle_payload(self, payload: bytes) -> Optional[VideoFrame]: return None ================================================ FILE: backend/protocols/s2x_rc_protocol_adapter.py ================================================ from protocols.base_protocol_adapter import BaseProtocolAdapter import socket class S2xRCProtocolAdapter(BaseProtocolAdapter): """Protocol adapter for S2x drones (S20, S29)""" def __init__(self, drone_ip, control_port=8080): self.drone_ip = drone_ip self.control_port = control_port self.sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) self.debug_packets = False self.packet_counter = 0 self.swap_yaw_roll = False # Stock Macrochip HY packets scale roll/pitch for app speed tiers. # Keep full-scale as the default so existing S2X behavior is unchanged. self.speed_scale_by_index = { 0: 0.7, # PL FPV uses 0.7; older HiTurbo builds use 0.6. 1: 0.8, 2: 1.0, } def build_control_packet(self, drone_model): """Build a control packet for the S2x protocol""" pkt = bytearray(20) pkt[0] = 0x66 pkt[1] = drone_model.speed & 0xFF roll = drone_model.roll yaw = drone_model.yaw if self.swap_yaw_roll: roll, yaw = yaw, roll speed_scale = self.speed_scale_by_index.get(getattr(drone_model, "speed_index", 2), 1.0) # Remap from constrained range to full 0-255 range. # Macrochip HY control only speed-scales roll/pitch, not throttle/yaw. pkt[2] = int(self._remap_to_full_range(self._scale_axis(roll, drone_model, speed_scale), drone_model)) & 0xFF pkt[3] = int(self._remap_to_full_range(self._scale_axis(drone_model.pitch, drone_model, speed_scale), drone_model)) & 0xFF pkt[4] = int(self._remap_to_full_range(drone_model.throttle, drone_model)) & 0xFF pkt[5] = int(self._remap_to_full_range(yaw, drone_model)) & 0xFF # Byte 6 for command flags pkt[6] = 0x00 # The stock HiTurbo app uses the same one-shot bit for both takeoff and # land, while emergency stop is a separate bit. if drone_model.takeoff_flag: pkt[6] |= 0x01 if drone_model.land_flag: pkt[6] |= 0x01 if drone_model.stop_flag: pkt[6] |= 0x02 if drone_model.calibration_flag: pkt[6] |= 0x04 # Byte 7 - historical TurboDrone base mode bits plus optional headless # flag. Stock apps commonly send 0x02, but 0x0a is proven on hardware. pkt[7] = 0x0a if drone_model.headless_flag: pkt[7] |= 0x01 # bytes 8-17 are zero-filled # Calculate checksum (bytes 2-17) chk = 0 for i in range(2, 18): chk ^= pkt[i] pkt[18] = chk & 0xFF pkt[19] = 0x99 # Clear one-shot flags after building packet drone_model.takeoff_flag = False drone_model.land_flag = False drone_model.stop_flag = False drone_model.calibration_flag = False return bytes(pkt) def send_control_packet(self, packet): """Send the control packet to the drone""" self.sock.sendto(packet, (self.drone_ip, self.control_port)) # Log packet details if debug is enabled if self.debug_packets: self.packet_counter += 1 # Print full packet hex dump hex_dump = ' '.join(f'{b:02x}' for b in packet) print(f"Packet #{self.packet_counter}: {hex_dump}") # Print decoded controls print(f" Controls: R:{packet[2]} P:{packet[3]} T:{packet[4]} Y:{packet[5]}") # Print flags flags6 = packet[6] flags7 = packet[7] flags_desc = [] if flags6 & 0x01: flags_desc.append("FLY_OR_LAND") if flags6 & 0x02: flags_desc.append("STOP") if flags6 & 0x04: flags_desc.append("CALIBRATE") if flags7 & 0x01: flags_desc.append("HEADLESS") print(f" Flags: {flags_desc}") print(f" Checksum: 0x{packet[18]:02x}") print() def toggle_debug(self): """Toggle debug packet logging""" self.debug_packets = not self.debug_packets return self.debug_packets def _remap_to_full_range(self, value, model): """Remap value from constrained range to full 0-255 range for sending to drone""" if value >= model.center_value: # Map center...max_control to 128...255 return 128.0 + (value - model.center_value) * (255.0 - 128.0) / (model.max_control_value - model.center_value) else: # Map min_control...center to 0...128 return (value - model.min_control_value) * 128.0 / (model.center_value - model.min_control_value) def _scale_axis(self, value, model, scale): """Scale a raw axis around center using the stock app speed tier.""" if scale >= 1.0: return value return model.center_value + ((value - model.center_value) * scale) ================================================ FILE: backend/protocols/s2x_video_protocol.py ================================================ import ipaddress import socket import threading import queue from typing import Optional, List from models.s2x_video_model import S2xVideoModel from models.video_frame import VideoFrame from protocols.base_video_protocol import BaseVideoProtocolAdapter class S2xVideoProtocolAdapter(BaseVideoProtocolAdapter): """Transport + header parser for S2x JPEG stream""" SYNC_BYTES = b"\x40\x40" EOS_MARKER = b"\x23\x23" HEADER_LEN = 8 # S2x packets always use an 8-byte header LINK_DEAD_TIMEOUT = 8.0 # camera can stay silent for ~5 s on boot def __init__( self, drone_ip: str = "172.16.10.1", control_port: int = 8080, video_port: int = 8888, debug: bool = False, ): super().__init__(drone_ip, control_port, video_port) self.model = S2xVideoModel() self.local_ip = self._discover_local_ip() self._sock_lock = threading.Lock() self._sock = self.create_receiver_socket() self._keepalive_thread: Optional[threading.Thread] = None self._keepalive_stop: Optional[threading.Event] = None if debug: print(f"[s2x] Video socket on *:{self._sock.getsockname()[1]}") self._running = threading.Event() self._rx_thread: Optional[threading.Thread] = None self._frame_q: "queue.Queue[VideoFrame]" = queue.Queue(maxsize=2) self._pkt_lock = threading.Lock() self._pkt_buffer: List[bytes] = [] # ────────── BaseVideoProtocolAdapter ────────── # def send_start_command(self) -> None: payload = b"\x08" + ipaddress.IPv4Address(self.local_ip).packed with socket.socket(socket.AF_INET, socket.SOCK_DGRAM) as sock: sock.sendto(payload, (self.drone_ip, self.control_port)) print(f"[video] Start command sent ({payload.hex(' ')})") def start_keepalive(self, interval: float = 2.0) -> None: """Starts a thread to periodically send the start command.""" if self._keepalive_thread is None: self._keepalive_stop = threading.Event() self._keepalive_thread = threading.Thread( target=self._keepalive_loop, args=(interval, self._keepalive_stop), daemon=True, name="S2xVideoKeepAlive", ) self._keepalive_thread.start() def stop_keepalive(self) -> None: """Stops the keepalive thread.""" if self._keepalive_stop: self._keepalive_stop.set() if self._keepalive_thread: self._keepalive_thread.join(timeout=1.0) self._keepalive_thread = None def create_receiver_socket(self) -> socket.socket: """UDP socket bound to the drone's video port.""" sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) sock.bind(("0.0.0.0", self.video_port)) sock.settimeout(1.0) return sock def get_receiver_socket(self) -> socket.socket: """Returns the main data socket, required by the new VideoReceiverService.""" with self._sock_lock: return self._sock def recv_from_socket(self, sock: socket.socket) -> Optional[bytes]: """Receives from the socket and handles timeouts.""" try: return sock.recv(4096) # Use a reasonable buffer size except socket.timeout: return None def handle_payload(self, payload: bytes) -> Optional[VideoFrame]: """ 1. Validate & strip the native 8-byte S2x header 2. Forward the slice payload to the model PL FPV's native `libvison_main.so` parser treats the header as: 0-1 sync `0x40 0x40`, 2-3 little-endian frame id, 4 total chunks, 5 chunk id, 6-7 little-endian datagram length. """ if len(payload) <= self.HEADER_LEN or payload[:2] != self.SYNC_BYTES: return None frame_id = int.from_bytes(payload[2:4], "little") total_chunks = payload[4] slice_id_raw = payload[5] declared_len = int.from_bytes(payload[6:8], "little") if total_chunks == 0 or total_chunks > 100: return None if slice_id_raw >= total_chunks: return None if declared_len != len(payload): return None # Native 872 parsing copies declared_len - 10 bytes from offset 8, # effectively dropping the 2-byte datagram trailer. body_end = declared_len if declared_len >= self.HEADER_LEN + len(self.EOS_MARKER) and payload[ declared_len - len(self.EOS_MARKER) : declared_len ] == self.EOS_MARKER: body_end -= len(self.EOS_MARKER) body = payload[self.HEADER_LEN:body_end] return self.model.ingest_chunk( stream_id=frame_id, chunk_id=slice_id_raw, total_chunks=total_chunks, payload=body, ) def stop(self) -> None: """Shuts down the adapter, required by the new VideoReceiverService.""" print("[s2x] Stopping protocol adapter.") self.stop_keepalive() self._running.clear() if self._rx_thread and self._rx_thread.is_alive(): self._rx_thread.join(timeout=1.0) try: self._sock.close() except Exception: pass # ────────── helpers ────────── # def _keepalive_loop(self, interval: float, stop_event: threading.Event) -> None: while not stop_event.is_set(): self.send_start_command() stop_event.wait(interval) # ────────── Receiver thread API (service expects) ────────── # def start(self) -> None: if self._rx_thread and self._rx_thread.is_alive(): return self._running.set() self.start_keepalive(2.0) def _rx_loop() -> None: sock = self.get_receiver_socket() while self._running.is_set(): try: payload = self.recv_from_socket(sock) if not payload: continue with self._pkt_lock: self._pkt_buffer.append(payload) frame = self.handle_payload(payload) if frame is not None: try: self._frame_q.put(frame, timeout=0.2) except queue.Full: pass except OSError: break except Exception: continue self._rx_thread = threading.Thread(target=_rx_loop, daemon=True, name="S2xVideoRx") self._rx_thread.start() def is_running(self) -> bool: return self._running.is_set() and self._rx_thread is not None and self._rx_thread.is_alive() def get_frame(self, timeout: float = 1.0) -> Optional[VideoFrame]: try: return self._frame_q.get(timeout=timeout) except queue.Empty: return None def get_packets(self) -> List[bytes]: with self._pkt_lock: packets = self._pkt_buffer self._pkt_buffer = [] return packets def _discover_local_ip(self) -> str: s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) try: s.connect((self.drone_ip, 1)) return s.getsockname()[0] finally: s.close() ================================================ FILE: backend/protocols/wifi_cam_rc_protocol_adapter.py ================================================ """RC protocol adapter for WiFi_CAM native UDP drones.""" from __future__ import annotations import logging import socket from typing import Final, Literal from models.wifi_cam_rc import WifiCamRcModel from protocols.base_protocol_adapter import BaseProtocolAdapter logger = logging.getLogger(__name__) CommandMode = Literal["auto", "short", "extended"] class WifiCamRcProtocolAdapter(BaseProtocolAdapter): """Send raw WiFi_CAM stick packets to the native command socket.""" DEFAULT_DRONE_IP: Final = "192.168.4.153" DEFAULT_COMMAND_PORT: Final = 8090 START_MARKER: Final = 0x66 END_MARKER: Final = 0x99 EXTENDED_LENGTH: Final = 0x14 FLAG_TAKEOFF: Final = 0x01 FLAG_LAND: Final = 0x02 FLAG_EMERGENCY: Final = 0x04 FLAG_ROTATE: Final = 0x08 FLAG_HEADLESS: Final = 0x10 FLAG_CALIBRATE: Final = 0x80 EXT_FLAG_TAKEOFF_OR_LAND: Final = 0x01 EXT_FLAG_EMERGENCY: Final = 0x02 EXT_FLAG_CALIBRATE: Final = 0x04 EXT_FLAG_ROTATE: Final = 0x08 EXT_FLAG_HEADLESS: Final = 0x01 EXT_FLAG_ALTITUDE_HOLD: Final = 0x02 def __init__( self, drone_ip: str = DEFAULT_DRONE_IP, command_port: int = DEFAULT_COMMAND_PORT, *, command_mode: CommandMode = "auto", ) -> None: self.drone_ip = drone_ip self.command_port = command_port self.command_mode = self._normalize_mode(command_mode) self.camera_type = 0 self.debug_packets = False self.packet_counter = 0 self.sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) self.sock.bind(("", 0)) def build_control_packet(self, drone_model: WifiCamRcModel) -> bytes: if self._uses_extended_mode(): packet = self._build_extended_packet(drone_model) else: packet = self._build_short_packet(drone_model) self._clear_one_shot_flags(drone_model) return packet def send_control_packet(self, packet: bytes) -> None: try: self.sock.sendto(packet, (self.drone_ip, self.command_port)) except OSError: return if self.debug_packets: self.packet_counter += 1 logger.debug("[wifi-cam-rc] #%05d %s", self.packet_counter, packet.hex(" ")) def set_camera_type(self, camera_type: int) -> None: self.camera_type = int(camera_type) if self.command_mode == "auto": logger.info( "[wifi-cam-rc] camera type %s selected %s command mode", camera_type, "extended" if self._uses_extended_mode() else "short", ) def stop(self) -> None: try: self.sock.close() except OSError: pass def toggle_debug(self) -> bool: self.debug_packets = not self.debug_packets logger.info("[wifi-cam-rc] debug %s", "ON" if self.debug_packets else "OFF") return self.debug_packets def _build_short_packet(self, model: WifiCamRcModel) -> bytes: packet = bytearray(8) packet[0] = self.START_MARKER packet[1] = self._right_data(self._axis(model.roll)) packet[2] = self._right_data(self._axis(model.pitch)) packet[3] = self._right_data(self._axis(model.throttle)) packet[4] = self._right_data(self._axis(model.yaw)) packet[5] = self._build_short_flags(model) packet[6] = self._right_data(self._xor(packet[1:6])) packet[7] = self.END_MARKER return bytes(packet) def _build_extended_packet(self, model: WifiCamRcModel) -> bytes: packet = bytearray(20) packet[0] = self.START_MARKER packet[1] = self.EXTENDED_LENGTH packet[2] = self._right_data(self._axis(model.roll)) packet[3] = self._right_data(self._axis(model.pitch)) packet[4] = self._right_data(self._axis(model.throttle)) packet[5] = self._right_data(self._axis(model.yaw)) flags1, flags2 = self._build_extended_flags(model) packet[6] = flags1 packet[7] = flags2 packet[18] = self._right_data(self._xor(packet[2:18])) packet[19] = self.END_MARKER return bytes(packet) def _build_short_flags(self, model: WifiCamRcModel) -> int: flags = 0 if model.takeoff_flag: flags |= self.FLAG_TAKEOFF if model.land_flag: flags |= self.FLAG_LAND if model.stop_flag: flags |= self.FLAG_EMERGENCY if model.flip_flag: flags |= self.FLAG_ROTATE if model.headless_flag: flags |= self.FLAG_HEADLESS if model.calibration_flag: flags |= self.FLAG_CALIBRATE return flags & 0xFF def _build_extended_flags(self, model: WifiCamRcModel) -> tuple[int, int]: flags1 = 0 flags2 = 0 # The decompiled app maps both one-key fly and land to the same bit. if model.takeoff_flag or model.land_flag: flags1 |= self.EXT_FLAG_TAKEOFF_OR_LAND if model.stop_flag: flags1 |= self.EXT_FLAG_EMERGENCY if model.calibration_flag: flags1 |= self.EXT_FLAG_CALIBRATE if model.flip_flag: flags1 |= self.EXT_FLAG_ROTATE if model.headless_flag: flags2 |= self.EXT_FLAG_HEADLESS if model.altitude_hold_flag: flags2 |= self.EXT_FLAG_ALTITUDE_HOLD return flags1 & 0xFF, flags2 & 0xFF def _clear_one_shot_flags(self, model: WifiCamRcModel) -> None: model.takeoff_flag = False model.land_flag = False model.stop_flag = False model.flip_flag = False model.calibration_flag = False def _uses_extended_mode(self) -> bool: if self.command_mode == "extended": return True if self.command_mode == "short": return False return self.camera_type == 2 def _normalize_mode(self, command_mode: str) -> CommandMode: mode = (command_mode or "auto").strip().lower() if mode not in {"auto", "short", "extended"}: logger.warning("[wifi-cam-rc] Unknown command mode %r; using auto.", command_mode) return "auto" return mode # type: ignore[return-value] def _axis(self, value: float) -> int: return max(0, min(255, int(value))) & 0xFF def _right_data(self, value: int) -> int: value &= 0xFF if value in (self.START_MARKER, self.END_MARKER): value = (value + 1) & 0xFF return value def _xor(self, values: bytes | bytearray) -> int: checksum = 0 for value in values: checksum ^= value return checksum & 0xFF ================================================ FILE: backend/protocols/wifi_cam_video_protocol.py ================================================ """Video protocol adapter for WiFi_CAM native UDP drones.""" from __future__ import annotations import logging import queue import socket import threading from typing import Final, Optional from models.video_frame import VideoFrame from protocols.base_video_protocol import BaseVideoProtocolAdapter logger = logging.getLogger(__name__) class WifiCamVideoProtocolAdapter(BaseVideoProtocolAdapter): """Start the WiFi_CAM MJPEG stream and reassemble native JPEG chunks.""" DEFAULT_DRONE_IP: Final = "192.168.4.153" DEFAULT_COMMAND_PORT: Final = 8090 DEFAULT_VIDEO_PORT: Final = 8080 START_STREAM: Final = b"\x42\x76" STOP_STREAM: Final = b"\x42\x77" ROTATE: Final = b"\x42\x78" CAMERA_SWITCH: Final = b"\x42\x79" NATIVE_PACKET_SIZE: Final = 0x5C0 HEADER_SIZE: Final = 8 CHUNK_SIZE: Final = 0x5B8 SOI: Final = b"\xff\xd8" EOI: Final = b"\xff\xd9" def __init__( self, drone_ip: str = DEFAULT_DRONE_IP, control_port: int = DEFAULT_COMMAND_PORT, video_port: int = DEFAULT_VIDEO_PORT, *, debug: bool = False, ) -> None: super().__init__(drone_ip, control_port, video_port) self.debug = debug self._running = threading.Event() self._rx_thread: Optional[threading.Thread] = None self._sock: Optional[socket.socket] = None self._frame_q: "queue.Queue[VideoFrame]" = queue.Queue(maxsize=2) self._pkt_lock = threading.Lock() self._pkt_buffer: list[bytes] = [] self._rc_adapter = None self._current_frame_id: Optional[int] = None self._current_chunk_index = 0 self._frame_buffer = bytearray() self._frame_counter = 0 self.camera_type = 0 def set_rc_adapter(self, rc_adapter) -> None: self._rc_adapter = rc_adapter if self.camera_type and hasattr(rc_adapter, "set_camera_type"): rc_adapter.set_camera_type(self.camera_type) def start(self) -> None: if self.is_running(): return self._sock = self.create_receiver_socket() self._running.set() self.send_start_command() self._rx_thread = threading.Thread( target=self._rx_loop, daemon=True, name="WifiCamVideoRx", ) self._rx_thread.start() logger.info("[wifi-cam-video] listening on local UDP %s", self._sock.getsockname()[1]) def stop(self) -> None: self._running.clear() self._send_command(self.STOP_STREAM) sock = self._sock if sock: try: sock.close() except OSError: pass if self._rx_thread: self._rx_thread.join(timeout=1.0) self._rx_thread = None self._sock = None def is_running(self) -> bool: return self._running.is_set() and self._rx_thread is not None and self._rx_thread.is_alive() def get_frame(self, timeout: float = 1.0) -> Optional[VideoFrame]: try: return self._frame_q.get(timeout=timeout) except queue.Empty: return None def get_packets(self) -> list[bytes]: with self._pkt_lock: packets = self._pkt_buffer self._pkt_buffer = [] return packets def send_start_command(self) -> None: self._send_command(self.START_STREAM) def create_receiver_socket(self) -> socket.socket: sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) # The Android app does not bind to 8080; it receives replies on the # same ephemeral local UDP port used to send 42 76 to the drone. sock.bind(("", 0)) sock.settimeout(0.5) return sock def handle_payload(self, payload: bytes) -> Optional[VideoFrame]: camera_type = self._parse_camera_type(payload) if camera_type is not None: self._set_camera_type(camera_type) return None if len(payload) < self.HEADER_SIZE: return None frame_id = payload[0] final_marker = payload[1] total_chunks = payload[2] resolution = payload[3] retain = payload[7] chunk = payload[self.HEADER_SIZE :] if self._current_frame_id != frame_id: self._current_frame_id = frame_id self._current_chunk_index = 0 self._frame_buffer = bytearray() chunk_index = self._current_chunk_index self._frame_buffer.extend(chunk) frame = None is_final = final_marker == 1 and total_chunks > 0 and chunk_index + 1 == total_chunks if is_final: frame = self._finish_frame(chunk, chunk_index, resolution, retain) self._current_chunk_index = 0 self._current_frame_id = None self._frame_buffer = bytearray() else: self._current_chunk_index += 1 return frame def switch_camera(self) -> None: self._send_command(self.CAMERA_SWITCH) def rotate(self) -> None: self._send_command(self.ROTATE) def _rx_loop(self) -> None: sock = self._sock if sock is None: return while self._running.is_set(): try: packet, _ = sock.recvfrom(self.NATIVE_PACKET_SIZE) except socket.timeout: continue except OSError: break with self._pkt_lock: self._pkt_buffer.append(packet) frame = self.handle_payload(packet) if frame is None: continue try: self._frame_q.put(frame, timeout=0.1) except queue.Full: try: self._frame_q.get_nowait() self._frame_q.put(frame, timeout=0.1) except (queue.Empty, queue.Full): pass def _send_command(self, command: bytes) -> None: sock = self._sock if sock is None: return try: sock.sendto(command, (self.drone_ip, self.video_port)) except OSError: pass def _parse_camera_type(self, payload: bytes) -> Optional[int]: if len(payload) != 8 or payload[0] != 0x55 or payload[7] != 0x99: return None if payload == b"\x55\x00\x01\x00\x00\x00\x01\x99": return 1 if payload == b"\x55\x00\x02\x00\x00\x00\x02\x99": return 2 return 0 def _set_camera_type(self, camera_type: int) -> None: if camera_type == self.camera_type: return self.camera_type = camera_type logger.info("[wifi-cam-video] detected camera type %s", camera_type) if self._rc_adapter is not None and hasattr(self._rc_adapter, "set_camera_type"): self._rc_adapter.set_camera_type(camera_type) def _finish_frame( self, final_chunk: bytes, final_chunk_index: int, resolution: int, retain: int, ) -> Optional[VideoFrame]: eoi = final_chunk.find(self.EOI) if eoi < 0: return None frame_len = final_chunk_index * self.CHUNK_SIZE + eoi + len(self.EOI) data = bytes(self._frame_buffer[:frame_len]) if not data.startswith(self.SOI) or not data.endswith(self.EOI): if self.debug: logger.debug("[wifi-cam-video] rejected malformed JPEG frame") return None self._frame_counter = (self._frame_counter + 1) & 0xFFFF frame = VideoFrame(self._frame_counter, data, format_type="jpeg") frame.resolution = resolution # type: ignore[attr-defined] frame.retain = retain # type: ignore[attr-defined] return frame ================================================ FILE: backend/protocols/wifi_uav_rc_protocol_adapter.py ================================================ import socket from typing import Final, List, Optional from protocols.base_protocol_adapter import BaseProtocolAdapter from models.wifi_uav_rc import WifiUavRcModel from utils.wifi_uav_variants import get_wifi_uav_capabilities class WifiUavRcProtocolAdapter(BaseProtocolAdapter): """ Builds and transmits control packets for the WiFi-UAV family. Packet layout derived from reverse-engineered Android app traces. Turbodrone uses the extended WiFi-UAV command layout (`66 14 ...`). In that layout the app maps takeoff and land onto the same one-key action bit, while emergency stop is a separate bit. """ DEFAULT_DRONE_IP: Final = "192.168.169.1" DEFAULT_PORT: Final = 8800 # ────────────────────────────────────────────────────────── # Static parts (taken 1:1 from packet dumps) # ────────────────────────────────────────────────────────── _HEADER = bytes([0xef, 0x02, 0x7c, 0x00, 0x02, 0x02, 0x00, 0x01, 0x02, 0x00, 0x00, 0x00]) _COUNTER1_SUFFIX = bytes([0x00, 0x00, 0x14, 0x00, 0x66, 0x14]) _CONTROL_SUFFIX = bytes(10) # 10 × 0x00 _CHECKSUM_SUFFIX = bytes([0x99]) + bytes(44) + bytes([0x32, 0x4b, 0x14, 0x2d, 0x00, 0x00]) _COUNTER2_SUFFIX = bytes([ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, 0xff, 0xff, 0xff, 0xff ]) _COUNTER3_SUFFIX = bytes([ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00 ]) FLAG_TAKEOFF_OR_LAND = 0x01 FLAG_STOP = 0x02 FLAG_CALIBRATION = 0x04 FLAG_FLIP = 0x08 _SPEED_SCALES: Final[tuple[float, ...]] = (0.30, 0.60, 1.00, 0.25) # ------------------------------------------------------------------ # def __init__(self, drone_ip: str = DEFAULT_DRONE_IP, control_port: int = DEFAULT_PORT, shared_sock: Optional[socket.socket] = None, variant: str = "auto") -> None: self.drone_ip = drone_ip self.control_port = control_port self.variant = (variant or "auto").strip().lower() self.capabilities = get_wifi_uav_capabilities(self.variant) self._target_ports = self._resolve_target_ports(control_port) self.sock = shared_sock or socket.socket(socket.AF_INET, socket.SOCK_DGRAM) self._is_shared_sock = shared_sock is not None self.debug_packets = False self._pkt_counter = 0 # rolling 16-bit counters found in the original protocol self._ctr1 = 0x0000 self._ctr2 = 0x0001 self._ctr3 = 0x0002 def set_socket(self, sock: socket.socket) -> None: """Use an externally managed socket instead of the internal one.""" # Don't close the old socket if it was created here if self.sock and not self._is_shared_sock: self.sock.close() self.sock = sock self._is_shared_sock = True def stop(self) -> None: """Close the socket if it's not shared.""" if self.sock and not self._is_shared_sock: try: self.sock.close() except Exception: pass # Ignore errors on shutdown # ------------------------------------------------------------------ # # BaseProtocolAdapter # ------------------------------------------------------------------ # def build_control_packet(self, drone_model: WifiUavRcModel) -> bytes: # type: ignore[override] # ----- counters ------------------------------------------------- c1 = self._ctr1.to_bytes(2, "little") c2 = self._ctr2.to_bytes(2, "little") c3 = self._ctr3.to_bytes(2, "little") # advance for next call self._ctr1 = (self._ctr1 + 1) & 0xFFFF self._ctr2 = (self._ctr2 + 1) & 0xFFFF self._ctr3 = (self._ctr3 + 1) & 0xFFFF # ----- command / headless -------------------------------------- command = 0x00 if drone_model.takeoff_flag or drone_model.land_flag: command |= self.FLAG_TAKEOFF_OR_LAND if drone_model.land_flag: self._last_command_intent = "LAND" elif drone_model.takeoff_flag: self._last_command_intent = "TAKEOFF" elif drone_model.stop_flag: self._last_command_intent = "STOP" elif drone_model.calibration_flag: self._last_command_intent = "CALIBRATE" else: self._last_command_intent = None if drone_model.stop_flag: command |= self.FLAG_STOP if drone_model.calibration_flag: command |= self.FLAG_CALIBRATION if getattr(drone_model, "flip_flag", False): command |= self.FLAG_FLIP camera_tilt_state = max(0, min(2, int(getattr(drone_model, "camera_tilt_state", 0)))) command |= (camera_tilt_state & 0x03) << 6 headless = 0x03 if drone_model.headless_flag else 0x02 # ----- controls ------------------------------------------------- speed_index = max(0, min(3, int(getattr(drone_model, "speed_index", 2)))) controls: List[int] = [ self._apply_speed_scale(drone_model.roll, speed_index), self._apply_speed_scale(drone_model.pitch, speed_index), self._apply_speed_scale(drone_model.throttle, speed_index), self._apply_speed_scale(drone_model.yaw, speed_index), command & 0xFF, headless & 0xFF, ] # Stash for debug printing at send time (roll, pitch, throttle, yaw) try: self._last_controls = tuple(controls[:4]) self._last_command = command & 0xFF self._last_headless = headless & 0xFF except Exception: pass checksum = 0 for b in controls: checksum ^= b # ----- assemble ------------------------------------------------- pkt = bytearray() pkt += self._HEADER pkt += c1 + self._COUNTER1_SUFFIX pkt += bytes(controls) pkt += self._CONTROL_SUFFIX pkt.append(checksum) pkt += self._CHECKSUM_SUFFIX pkt += c2 + self._COUNTER2_SUFFIX pkt += c3 + self._COUNTER3_SUFFIX # one-shot flags → clear drone_model.takeoff_flag = False drone_model.land_flag = False drone_model.stop_flag = False drone_model.calibration_flag = False drone_model.flip_flag = False drone_model.camera_tilt_state = 0 return bytes(pkt) def send_control_packet(self, packet: bytes): # type: ignore[override] """ Transmit one RC packet. If the video layer has just torn the shared socket down, the send will raise OSError(EBADF). Swallow it and wait until the receiver hands us the fresh socket. """ try: for port in self._target_ports: self.sock.sendto(packet, (self.drone_ip, port)) except OSError: # Socket was closed during video-reconnect window. # Wait for VideoReceiverService to call set_socket(…) with the # new descriptor. Until then we just skip transmitting. return if self.debug_packets: self._pkt_counter += 1 print(f"[wifi-uav] #{self._pkt_counter:05d} " f"{' '.join(f'{b:02x}' for b in packet[:40])} …") try: r, p, t, y = getattr(self, "_last_controls", (None, None, None, None)) if None not in (r, p, t, y): print(f"[wifi-uav] controls R:{r} P:{p} T:{t} Y:{y}") command = getattr(self, "_last_command", None) if command is not None: flags = [] if command & self.FLAG_TAKEOFF_OR_LAND: flags.append(getattr(self, "_last_command_intent", None) or "TAKEOFF_OR_LAND") if command & self.FLAG_STOP: flags.append("STOP") if command & self.FLAG_CALIBRATION: flags.append("CALIBRATE") if command & self.FLAG_FLIP: flags.append("FLIP") tilt = (command >> 6) & 0x03 if tilt: flags.append(f"TILT:{tilt}") if flags: print(f"[wifi-uav] command flags: {', '.join(flags)}") except Exception: pass def toggle_debug(self) -> bool: # type: ignore[override] self.debug_packets = not self.debug_packets state = "ON" if self.debug_packets else "OFF" print(f"[wifi-uav] debug {state}") return self.debug_packets def _resolve_target_ports(self, control_port: int) -> tuple[int, ...]: if self.capabilities.transport == "uav_dual_port": return (control_port, control_port + 1) return (control_port,) def _apply_speed_scale(self, value: float, speed_index: int) -> int: """Scale a raw model axis around its center using the app's speed table.""" scale = self._SPEED_SCALES[speed_index] centered = float(value) - 128.0 return max(0, min(255, round(128.0 + centered * scale))) ================================================ FILE: backend/protocols/wifi_uav_video_protocol.py ================================================ import logging import socket import queue import threading import time from typing import Optional from models.video_frame import VideoFrame from protocols.base_video_protocol import BaseVideoProtocolAdapter from utils.wifi_uav_ack_state import WifiUavAckState from utils.wifi_uav_packets import ( START_STREAM, build_native_ack_packet, ) from utils.wifi_uav_jpeg import generate_jpeg_headers, EOI from utils.wifi_uav_variants import get_wifi_uav_capabilities logger = logging.getLogger(__name__) class WifiUavVideoProtocolAdapter(BaseVideoProtocolAdapter): """ Protocol adapter for the inexpensive "WiFi UAV" drones. Differences to the S2x family: • A single duplex UDP socket is used for tx/rx. • The drone stops streaming unless it receives two custom *frame-request* packets (REQUEST_A / REQUEST_B) for every JPEG. • Each UDP datagram has a 56-byte proprietary header that must be stripped; the JPEG SOI/APPx headers are completely absent and are generated on the client. """ DEFAULT_DRONE_IP = "192.168.169.1" REQUEST_A_OFFSETS = (12, 13) # two-byte LE frame counter REQUEST_B_OFFSETS = (12, 13, 88, 89, 107, 108) FRAME_TIMEOUT = 0.08 # 80 ms without a full frame → retry sooner MAX_RETRIES = 3 # allow one more retry for first-frame reliability WATCHDOG_SLEEP = 0.05 # 50 ms between watchdog checks INITIAL_STREAM_TIMEOUT = 8.0 # give warmup some time before forcing restart LINK_STALL_TIMEOUT = 4.0 # no packets for this long -> recreate adapter # ------------------------------------------------------------------ # # life-cycle helpers # ------------------------------------------------------------------ # def __init__( self, drone_ip: str = DEFAULT_DRONE_IP, control_port: int = 8800, video_port: int = 8800, jpeg_width: int = 640, jpeg_height: int = 360, components: int = 3, *, variant: str = "auto", debug: bool = False, ): super().__init__(drone_ip, control_port, video_port) self.variant = (variant or "auto").strip().lower() self.capabilities = get_wifi_uav_capabilities(self.variant) self.debug = debug or logger.isEnabledFor(logging.DEBUG) self._dbg = logger.debug if self.debug else (lambda *a, **k: None) self._sock_lock = threading.Lock() self._pkt_lock = threading.Lock() self._pkt_buffer: list[bytes] = [] self._warmup_stop = threading.Event() self._warmup_thread: Optional[threading.Thread] = None self._rx_thread: Optional[threading.Thread] = None self._target_ports = self._resolve_target_ports(control_port) self._sock = self._create_duplex_socket() # Pre-built JPEG header (SOI + quant tables + SOF0 + …) self._jpeg_header = generate_jpeg_headers(jpeg_width, jpeg_height, components) # State for the current frame being assembled # If I send 0 it sends 1, starting with 1 is more reliable. self._current_fid: int = 1 self._ack_state = WifiUavAckState() self._last_req_ts = time.time() self._last_rx_ts = time.time() self._stream_started = False self._started_once = False # Stats self.frames_ok = 0 self.frames_dropped = 0 self._dbg(f"[init] adapter ready (control:{control_port} video:{video_port})") if self.capabilities.transport == "uav_dual_port": logger.info("[wifi-uav] UAV/FLOW variant selected; probing UDP ports %s", self._target_ports) # Watchdog for per-frame timeouts self._running = True self._watchdog = threading.Thread( target=self._watchdog_loop, daemon=True, name="FrameWatchdog" ) self._watchdog.start() self._retry_cnt = 0 # retries for *current* frame self._had_retry = False # did we already retry this frame? self.retry_attempts = 0 # global counter self.retry_successes = 0 # global counter self._dbg(f"Main UDP socket created, listening on *:{self._sock.getsockname()[1]}") # ------------------------------------------------------------------ # # disable keep-alive – one start command is enough for this drone # ------------------------------------------------------------------ # def start_keepalive(self, interval: float = 1.0) -> None: # type: ignore[override] return def stop_keepalive(self) -> None: # type: ignore[override] return # ------------------------------------------------------------------ # # Base-class hooks # ------------------------------------------------------------------ # def create_receiver_socket(self) -> socket.socket: return self._sock def send_start_command(self) -> None: for port in self._target_ports: self._sock.sendto(START_STREAM, (self.drone_ip, port)) self._dbg("[wifi-uav] START_STREAM sent to %s", self._target_ports) def handle_payload(self, payload: bytes) -> Optional[VideoFrame]: """ Collect slices belonging to the requested frame. Native packet layout: byte 0 : 0x93 byte 1 : message type; 0x01 means JPEG fragment bytes 2..3 : total packet length bytes 8..15 : image sequence bytes 32..35 : fragment index bytes 36..39 : fragment count bytes 56+ : JPEG payload """ parsed = self._parse_fragment_header(payload) if parsed is None: return None frame_id, frag_id, fragment_total, frame_body_len, quality, jpeg_payload = parsed self._stream_started = True self._last_rx_ts = time.time() self._retry_cnt = 0 # resynchronise if the drone skipped ahead if frame_id != self._current_fid: self.frames_dropped += 1 self._dbg(f"⚠ skip expected {self._current_fid:04x} " f"got {frame_id:04x}") self._current_fid = frame_id slot = self._ack_state.ingest_fragment( frame_id, frag_id, fragment_total, jpeg_payload, frame_body_len=frame_body_len, quality=quality, ) self._dbg(f"← FID:{frame_id:04x} FRAG:{frag_id:04x}") if slot is None: received = len(self._ack_state._slot_for_seq(frame_id).received_fragments) if fragment_total > 0: missing = fragment_total - received self._dbg( f"⚠ incomplete FID:{frame_id:04x} " f"missing {missing} fragment(s); waiting for retry" ) else: self._dbg( f"⚠ incomplete FID:{frame_id:04x} " f"received {received} fragment(s); waiting for tail" ) return None # Only emit a frame once every fragment up to the announced tail exists. jpeg = self._jpeg_header + slot.ordered_payload() + EOI frame = VideoFrame(frame_id=frame_id, data=jpeg) self.frames_ok += 1 # ── was this frame finished thanks to a retry? ─────────── if self._had_retry: self.retry_successes += 1 self._dbg(f"✓ recovery! {frame_id:04x} " f"SUC:{self.retry_successes} " f"ATT:{self.retry_attempts}") self._had_retry = False # ────────────────────────────────────────────────────────────── self._dbg(f"✓ {frame_id:04x} ({slot.fragment_total} frags) " f"OK:{self.frames_ok} DROP:{self.frames_dropped}") # prepare next iteration self._ack_state.mark_delivered(frame_id) self._send_frame_request(frame_id) # ask for next self._current_fid = frame_id + 1 self._last_rx_ts = self._last_req_ts = time.time() return frame # ------------------------------------------------------------------ # # helpers # ------------------------------------------------------------------ # def _warmup_loop(self) -> None: """During warmup, periodically resend START_STREAM + frame request until the first frame is observed, then exit.""" while getattr(self, "_first_frame", False) and not self._warmup_stop.is_set(): try: self.send_start_command() # Ask for the previous frame id; the drone will respond with next self._send_frame_request(max(0, self._current_fid - 1)) except Exception: pass self._warmup_stop.wait(0.2) def _send_frame_request(self, frame_id: int) -> None: command_seq = frame_id & 0xFFFFFFFF rqst_a = build_native_ack_packet(command_seq, []) rqst_b = build_native_ack_packet(command_seq, self._build_ack_slots(frame_id)) for port in self._target_ports: self._sock.sendto(rqst_a, (self.drone_ip, port)) self._sock.sendto(rqst_b, (self.drone_ip, port)) self._last_req_ts = time.time() self._dbg("→ REQ %04x to %s", frame_id, self._target_ports) def _build_ack_slots(self, seq: int) -> list[bytes]: return self._ack_state.build_ack_slots(seq) def _parse_fragment_header(self, payload: bytes) -> Optional[tuple[int, int, int, int, int, bytes]]: if len(payload) < 56 or payload[0] != 0x93 or payload[1] != 0x01: return None declared_len = int.from_bytes(payload[2:4], "little") native_layout = declared_len == len(payload) if native_layout: frame_id = int.from_bytes(payload[8:16], "little") frag_id = int.from_bytes(payload[32:36], "little") fragment_total = int.from_bytes(payload[36:40], "little") frame_body_len = int.from_bytes(payload[40:44], "little") quality = payload[48] if fragment_total > 0 and frag_id < fragment_total: return frame_id, frag_id, fragment_total, frame_body_len, quality, payload[56:] # Compatibility fallback for older captures/comments that only used # 16-bit counters and inferred the last fragment from packet length. frame_id = int.from_bytes(payload[16:18], "little") frag_id = int.from_bytes(payload[32:34], "little") # Legacy packets only reveal the total when the tail fragment arrives. fragment_total = frag_id + 1 if payload[2] != 0x38 else 0 return frame_id, frag_id, fragment_total, 0, 0, payload[56:] def _resolve_target_ports(self, control_port: int) -> tuple[int, ...]: """ The native UAVSDK starts the legacy backend on 8800 and the BL618 backend on 8801. For the explicit UAV/FLOW variant, probe both just as nativeStart() does. The FLD/legacy path keeps the single-port behavior. """ if self.capabilities.transport == "uav_dual_port": return (control_port, control_port + 1) return (control_port,) def _create_duplex_socket(self) -> socket.socket: sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) sock.bind(("", 0)) # let OS choose a free local port sock.settimeout(1.0) self._dbg(f"Main UDP socket created, listening on *:{sock.getsockname()[1]}") return sock def get_receiver_socket(self) -> socket.socket: """Returns the main socket for the receiver thread to use.""" with self._sock_lock: return self._sock def set_rc_adapter(self, rc_adapter) -> None: """Provide the RC adapter with our shared UDP socket.""" try: rc_adapter.set_socket(self._sock) self._dbg("[wifi-uav] RC adapter socket shared") except Exception: # If the RC adapter is not ready or doesn't support socket injection, # ignore and continue – the receiver loop will still function. pass # ------------------------------------------------------------------ # # Receiver thread API expected by VideoReceiverService # ------------------------------------------------------------------ # def start(self) -> None: if hasattr(self, "_rx_thread") and self._rx_thread and self._rx_thread.is_alive(): return # Small frame buffer; upstream will drop if slow self._frame_q: "queue.Queue[VideoFrame]" = queue.Queue(maxsize=2) self._last_rx_ts = time.time() with self._pkt_lock: self._pkt_buffer = [] def _rx_loop() -> None: sock = self.get_receiver_socket() while self._running: try: payload = self.recv_from_socket(sock) if not payload: continue # Collect raw packet bytes for optional dumping with self._pkt_lock: self._pkt_buffer.append(payload) # Try to assemble a frame frame = self.handle_payload(payload) if frame is not None: try: self._frame_q.put(frame, timeout=0.2) except queue.Full: # Drop frame if consumer is slow pass except OSError: # Socket likely closed during stop(); exit loop break except Exception as e: self._dbg(f"[wifi-uav] rx error: {e}") continue self._rx_thread = threading.Thread(target=_rx_loop, daemon=True, name="WifiUavVideoRx") self._rx_thread.start() if not self._started_once: self._started_once = True # Kick off the stream only after the receiver is already draining # the UDP socket. K417 responds very quickly and can otherwise dump # the first frame burst before the RX loop is active. self.send_start_command() self._send_frame_request(0) self._first_frame = True self._warmup_thread = threading.Thread( target=self._warmup_loop, daemon=True, name="Warmup", ) self._warmup_thread.start() def is_running(self) -> bool: rx_thread = getattr(self, "_rx_thread", None) if not (self._running and rx_thread and rx_thread.is_alive()): return False now = time.time() if self._stream_started: return (now - self._last_rx_ts) < self.LINK_STALL_TIMEOUT return (now - self._last_rx_ts) < self.INITIAL_STREAM_TIMEOUT def get_frame(self, timeout: float = 1.0) -> Optional[VideoFrame]: try: frame = self._frame_q.get(timeout=timeout) # mark warmup complete on first delivered frame if getattr(self, "_first_frame", False): self._first_frame = False return frame except queue.Empty: return None def get_packets(self) -> list[bytes]: with self._pkt_lock: packets = self._pkt_buffer self._pkt_buffer = [] return packets # ------------------------------------------------------------------ # # watchdog # ------------------------------------------------------------------ # def _watchdog_loop(self) -> None: """ Runs in a daemon thread. If the current frame doesn't finish within FRAME_TIMEOUT seconds, resend the request for that frame. Link-level reconnection is handled by the VideoReceiverService. """ self._dbg("Watchdog started for per-frame timeouts.") while self._running: time.sleep(self.WATCHDOG_SLEEP) now = time.time() if self._stream_started and now - self._last_rx_ts >= self.LINK_STALL_TIMEOUT: self._dbg("[wifi-uav] link stalled; forcing adapter restart") self._running = False self._first_frame = False self._warmup_stop.set() try: self._sock.close() except OSError: pass break if now - self._last_req_ts < self.FRAME_TIMEOUT: continue # still waiting → nothing to do # ---------------------------------------------------------- # retry or drop? # ---------------------------------------------------------- if self._retry_cnt < self.MAX_RETRIES: self._dbg(f"⚠ timeout FID {self._current_fid:04x} – retry " f"({self._retry_cnt +1}/{self.MAX_RETRIES})") self._send_frame_request(max(0, self._current_fid - 1)) self._retry_cnt += 1 self.retry_attempts += 1 self._had_retry = True else: self.frames_dropped += 1 self._dbg(f"✗ drop FID {self._current_fid:04x} " f"(after {self._retry_cnt} retries) " f"OK:{self.frames_ok} DROP:{self.frames_dropped}") self._ack_state.mark_dropped(self._current_fid) self._retry_cnt = 0 self._current_fid += 1 self._send_frame_request(max(0, self._current_fid - 1)) self._had_retry = False def stop(self) -> None: """Gracefully shut down the adapter and its threads.""" self._dbg(f"Stopping protocol adapter instance...") self._running = False self._first_frame = False self._warmup_stop.set() for thread in (self._warmup_thread, self._watchdog, self._rx_thread): try: if thread and thread.is_alive(): thread.join(timeout=0.5) except Exception as e: self._dbg(f"Ignoring thread shutdown error: {e}") try: self._sock.close() except Exception as e: self._dbg(f"Ignoring socket shutdown error: {e}") self._dbg( f"[stats] ok:{self.frames_ok} dropped:{self.frames_dropped} " f"retry_att:{self.retry_attempts} retry_suc:{self.retry_successes}" ) ================================================ FILE: backend/receive_video.py ================================================ import argparse import ipaddress import queue import socket import threading import time import os from datetime import datetime import cv2 import numpy as np ############################################################################### # Constants ############################################################################### DRONE_IP = "172.16.10.1" CONTROL_PORT = 8080 # where we send the 5-byte "start" cmd VIDEO_PORT = 8888 # where the drone sends JPEG slices SOI_MARKER = b"\xFF\xD8" EOI_MARKER = b"\xFF\xD9" SYNC_BYTES = b"\x40\x40" # --------------------------------------------------------------------------- # # The drone's header (derived from packet dumps) # # 0 1 2 3 4 5 6 7 ← byte index # +---+---+---+---+---+---+---+---+ # |40|40|FID| 2 |22|SID| 78 | 05 <- 0x40 0x40 | frame id | slice id | 78 05 (usually present) | # +---+---+---+---+---+---+---+---+ # # • FID … increments once per JPEG frame (0-255, wraps) # • SID bits 0-3 … slice number inside that frame (1,2,3…) # SID bit 4 … LAST-SLICE flag (1 == this is the final fragment) # SID bits 5-7 … always 0 # • bytes 6 & 7 … static header piece (usually starts with 0x78 0x05 ) # • bytes 8+ … payload # --------------------------------------------------------------------------- # HEADER_LEN = 8 ############################################################################### # Small helper to discover the IP address of the interface that can reach # 172.16.10.1 (works on Windows, macOS, Linux) ############################################################################### def discover_local_ip(remote_ip=DRONE_IP): s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) try: # We never actually send anything – connect() is enough for the OS to # choose the right outgoing interface. s.connect((remote_ip, 1)) return s.getsockname()[0] finally: s.close() ############################################################################### # 1. Control channel – send the 5-byte "start video" command ############################################################################### def send_start_command(drone_ip: str, my_ip: str): """ Build and send the 5-byte payload: 0x08 """ payload = b"\x08" + ipaddress.IPv4Address(my_ip).packed with socket.socket(socket.AF_INET, socket.SOCK_DGRAM) as sock: sock.sendto(payload, (drone_ip, CONTROL_PORT)) print(f"[control] start-cmd sent ({payload.hex(' ')})") class ControlKeepAlive(threading.Thread): """ Periodically re-send the start-video command so the drone keeps streaming. """ def __init__(self, drone_ip, my_ip, interval=1.0): super().__init__(daemon=True) self.drone_ip = drone_ip self.my_ip = my_ip self.interval = interval self._stop = threading.Event() def run(self): while not self._stop.is_set(): send_start_command(self.drone_ip, self.my_ip) time.sleep(self.interval) def stop(self): self._stop.set() ############################################################################### # 2. Video receiver thread – re-assemble slices into full JPEG frames ############################################################################### class VideoReceiver(threading.Thread): def __init__( self, frame_queue: queue.Queue, port: int = VIDEO_PORT, dump_frames: bool = False, dump_packets: bool = False, ): super().__init__(daemon=True) self.frame_q = frame_queue self.port = port self.dump_frames = dump_frames self.dump_packets = dump_packets # control flag for run() self.running = threading.Event() self.running.set() # assembly state self._cur_fid = None self._fragments = {} # sid_raw:int -> payload:bytes if self.dump_packets: ts = int(time.time()*1000) self._pktlog = open(f"logged_packets_{ts}.bin", "wb") def stop(self): self.running.clear() def _reset_frame(self, new_fid): """Forget the old frame and start a fresh one.""" self._cur_fid = new_fid self._fragments.clear() def _finalise_frame(self, fid, fragments): # 1) stitch slices together in ascending order data = b"".join(fragments[i] for i in sorted(fragments)) # 2) find the real JPEG in the bytes start = data.find(SOI_MARKER) end = data.rfind(EOI_MARKER) if start < 0 or end < 0 or end <= start: print(f"[receiver] JPEG markers missing on frame {fid}") return jpeg = data[start : end + 2] # 3) dump & push if self.dump_frames: ts = int(time.time() * 1000) with open(f"frame_{fid:02x}_{ts}.jpg", "wb") as f: f.write(jpeg) print(f"[receiver] frame {fid} complete – {len(jpeg)} bytes") self.frame_q.put(jpeg) def run(self): sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) sock.bind(("0.0.0.0", self.port)) sock.settimeout(1.0) print(f"[receiver] listening on UDP/*:{self.port}") try: while self.running.is_set(): try: pkt, addr = sock.recvfrom(2048) except socket.timeout: continue if self.dump_packets: self._pktlog.write(pkt) # sanity‐check if len(pkt) <= HEADER_LEN or pkt[:2] != SYNC_BYTES: continue fid = pkt[2] sid_raw = pkt[5] # if packet byte 7 and 8 are 0x78 and 0x05 respectively, then strip the 8 bytes if pkt[6] == 0x78 and pkt[7] == 0x05: payload = pkt[8:] else: payload = pkt[6:] # strip trailing 0x23 0x23 if present if payload.endswith(b"\x23\x23"): payload = payload[:-2] if sid_raw % 20 == 0: # throttle the spam head = payload[:8].hex() ascii_payload = payload[:8].decode('ascii', errors='replace') print(f"[slice] FID=0x{fid:02x} SID={sid_raw:3d} " f"head={head} ascii={ascii_payload!r}") # new frame detected? if self._cur_fid is None: self._reset_frame(fid) elif fid != self._cur_fid: if self._fragments: keys = sorted(self._fragments) # simple completeness check if len(keys) == (keys[-1] - keys[0] + 1): self._finalise_frame(self._cur_fid, self._fragments) else: print(f"[receiver] dropping frame {self._cur_fid}, " f"slices {keys[0]}..{keys[-1]} missing " f"{(keys[-1]-keys[0]+1) - len(keys)}") self._reset_frame(fid) # stash this slice (ignore dupes) if sid_raw not in self._fragments: self._fragments[sid_raw] = payload finally: sock.close() if self.dump_packets: self._pktlog.close() print("[receiver] stopped") ############################################################################### # 3. Display loop (main thread) – show frames with OpenCV ############################################################################### def display_frames(frame_q: queue.Queue): cv2.namedWindow("Drone", cv2.WINDOW_NORMAL) # build a single placeholder image (black + red warning text) placeholder_h, placeholder_w = 480, 640 placeholder = np.zeros((placeholder_h, placeholder_w, 3), np.uint8) txt = "No JPEG frames reconstructed yet" font, scale, th = cv2.FONT_HERSHEY_SIMPLEX, 0.6, 2 (tw, th_), _ = cv2.getTextSize(txt, font, scale, th) x = (placeholder_w - tw) // 2 y = (placeholder_h + th_) // 2 cv2.putText(placeholder, txt, (x, y), font, scale, (0, 0, 255), th) fps_timer = time.time() frame_count = 0 while True: jpeg = None try: jpeg = frame_q.get(timeout=1.0) except queue.Empty: pass if jpeg is None: img = placeholder is_real = False else: arr = np.frombuffer(jpeg, dtype=np.uint8) frame = cv2.imdecode(arr, cv2.IMREAD_COLOR) if frame is None: print(f"[display] ⚠ imdecode failed ({len(arr)} bytes)") img, is_real = placeholder, False else: print(f"[display] decoded frame {frame.shape}") img, is_real = frame, True cv2.imshow("Drone", img) key = cv2.waitKey(1) & 0xFF if key == ord("q"): break # only count toward FPS when we had a real frame if is_real: frame_count += 1 if frame_count % 60 == 0: now = time.time() print(f"[display] ~{frame_count/(now-fps_timer):4.1f} fps") fps_timer, frame_count = now, 0 cv2.destroyAllWindows() ############################################################################### # Entry-point ############################################################################### def main(): parser = argparse.ArgumentParser( description="Simple UDP/MJPEG client for FH-style drone" ) parser.add_argument("--drone-ip", default=DRONE_IP, help="Drone host address") parser.add_argument("--video-port", type=int, default=VIDEO_PORT) parser.add_argument("--control-port", type=int, default=CONTROL_PORT) parser.add_argument( "--keepalive", type=float, default=1.0, help="Re-send start-video every N seconds" ) parser.add_argument( "--dump-frames", action="store_true", help="Dump every reassembled JPEG to ./dumped_frames/" ) parser.add_argument( "--dump-packets", action="store_true", help="Dump every raw UDP packet to ./dumped_packets/" ) args = parser.parse_args() my_ip = discover_local_ip(args.drone_ip) print(f"[info] local IP that reaches the drone: {my_ip}") # 1. Tell the drone to start sending send_start_command(args.drone_ip, my_ip) # 2. Start keep-alive sender keepalive = ControlKeepAlive(args.drone_ip, my_ip, interval=args.keepalive) keepalive.start() # 3. Start receiver thread (pass dump flags) frame_q = queue.Queue(maxsize=100) receiver = VideoReceiver( frame_q, port=args.video_port, dump_frames=args.dump_frames, dump_packets=args.dump_packets ) receiver.start() # 4. UI loop try: display_frames(frame_q) finally: print("[main] shutting down …") keepalive.stop() receiver.stop() receiver.join() if __name__ == "__main__": main() ================================================ FILE: backend/remote_control.py ================================================ #!/usr/bin/env python3 import socket import threading import time import argparse import curses class DroneController: def __init__(self, drone_ip, control_port): self.drone_ip = drone_ip self.control_port = control_port self.sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) # stick midpoints = 128.0 self.yaw = 128.0 self.throttle = 128.0 self.pitch = 128.0 self.roll = 128.0 # one-shot flags self.takeoff = False self.land = False self.stop = False self.headless = False self.calibration = False # misc self.speed = 20 # matches 0x14 from dumps self.record = 0 # bit 2 in byte 7 self.rocker = 0 # bit 3 in byte 7 self.running = True # how fast to move stick inputs (units/sec) self.accel_rate = 200.0 self.decel_rate = 300.0 # Control range limits (instead of 0-255) self.min_control_value = 60.0 self.max_control_value = 200.0 self.center_value = 128.0 # Immediate response boost factor self.immediate_response = 5.0 # Exponential control factor (higher values = more aggressive response) self.expo_factor = 0.8 def update_axes(self, dt, throttle_dir, yaw_dir, pitch_dir, roll_dir): """Apply acceleration or deceleration for each axis.""" for attr, direction, boost_enabled in ( ('throttle', throttle_dir, False), ('yaw', yaw_dir, False), ('pitch', pitch_dir, True), ('roll', roll_dir, True), # Enable boost for roll and pitch ): cur = getattr(self, attr) # Handle exponential control mapping if direction > 0: # Apply immediate boost on direction change if boost_enabled and getattr(self, f"last_{attr}_dir", 0) <= 0: jump = min(self.max_control_value - cur, self.immediate_response) cur += jump # Calculate acceleration with exponential factor distance_to_max = self.max_control_value - cur accel = self.accel_rate * dt * (1 + self.expo_factor * distance_to_max / (self.max_control_value - self.center_value)) new = min(self.max_control_value, cur + accel) elif direction < 0: # Apply immediate boost on direction change if boost_enabled and getattr(self, f"last_{attr}_dir", 0) >= 0: jump = min(cur - self.min_control_value, self.immediate_response) cur -= jump # Calculate acceleration with exponential factor distance_to_min = cur - self.min_control_value accel = self.accel_rate * dt * (1 + self.expo_factor * distance_to_min / (self.center_value - self.min_control_value)) new = max(self.min_control_value, cur - accel) else: # Return to center faster from extremes if cur > self.center_value: # Exponential return to center distance_from_center = cur - self.center_value decel = self.decel_rate * dt * (1 + 0.5 * distance_from_center / (self.max_control_value - self.center_value)) new = max(self.center_value, cur - decel) elif cur < self.center_value: # Exponential return to center distance_from_center = self.center_value - cur decel = self.decel_rate * dt * (1 + 0.5 * distance_from_center / (self.center_value - self.min_control_value)) new = min(self.center_value, cur + decel) else: new = cur # Store last direction for detecting direction changes setattr(self, f"last_{attr}_dir", direction) setattr(self, attr, new) def remap_to_full_range(self, value): """Remap value from constrained range to full 0-255 range for sending to drone""" if value >= self.center_value: # Map center...max_control to 128...255 return 128.0 + (value - self.center_value) * (255.0 - 128.0) / (self.max_control_value - self.center_value) else: # Map min_control...center to 0...128 return (value - self.min_control_value) * 128.0 / (self.center_value - self.min_control_value) def build_packet_hy(self): pkt = bytearray(20) pkt[0] = 0x66 pkt[1] = self.speed & 0xFF # Cast floats back to ints with CORRECTED ORDER # Remap from our constrained range to full 0-255 range pkt[2] = int(self.remap_to_full_range(self.roll)) & 0xFF pkt[3] = int(self.remap_to_full_range(self.pitch)) & 0xFF pkt[4] = int(self.remap_to_full_range(self.throttle)) & 0xFF pkt[5] = int(self.remap_to_full_range(self.yaw)) & 0xFF # FIXED: flags in byte 6 and 7 were reversed compared to mobile app # Byte 6 should be 0x00 pkt[6] = 0x00 # Handle one-shot flags if self.takeoff: pkt[6] |= 0x01 if self.land: pkt[6] |= 0x02 if self.stop: pkt[6] |= 0x04 # Byte 7 should be 0x0a pkt[7] = 0x0a # Base value is 0x0a # record flag if self.record: pkt[7] |= (self.record << 2) # bytes 8-17 = 0 (zero-filled) # checksum over bytes 2-17 chk = 0 for i in range(2, 18): chk ^= pkt[i] pkt[18] = chk & 0xFF pkt[19] = 0x99 # clear one-shots self.takeoff = self.land = self.stop = False return pkt def send_loop(self, interval=0.03): # debug flag self.debug_packets = False packet_counter = 0 while self.running: buf = self.build_packet_hy() self.sock.sendto(buf, (self.drone_ip, self.control_port)) # Log packet details if debug is enabled if self.debug_packets: packet_counter += 1 # Print full packet hex dump hex_dump = ' '.join(f'{b:02x}' for b in buf) print(f"Packet #{packet_counter}: {hex_dump}") # Print decoded controls print(f" Controls: R:{buf[2]} P:{buf[3]} T:{buf[4]} Y:{buf[5]}") # Print flags flags6 = buf[6] flags7 = buf[7] flags_desc = [] if flags6 & 0x01: flags_desc.append("TAKEOFF") if flags6 & 0x02: flags_desc.append("LAND") if flags6 & 0x04: flags_desc.append("STOP") if flags7 & 0x01: flags_desc.append("HEADLESS") if flags7 & 0x04: flags_desc.append("RECORD") print(f" Flags: {flags_desc}") print(f" Checksum: 0x{buf[18]:02x}") print() time.sleep(interval) def stop_loop(self): self.running = False def toggle_debug(self): """Toggle debug packet logging""" self.debug_packets = not self.debug_packets return self.debug_packets def ui_loop(stdscr, controller): curses.curs_set(0) stdscr.nodelay(True) stdscr.keypad(True) help_msg = "W/S=throttle A/D=yaw Arrows=pitch/roll T=takeoff L=land Q=quit" help_msg2 = "R=record F=debug packets X=sensitivity mode" # direction states and last-press timestamps throttle_dir = yaw_dir = pitch_dir = roll_dir = 0 throttle_ts = yaw_ts = pitch_ts = roll_ts = 0.0 PRESS_THRESHOLD = 0.4 # threshold for key being held (increased from 0.2) prev_time = time.time() debug_enabled = False sensitivity_mode = 0 # 0=normal, 1=precise, 2=aggressive sensitivity_labels = ["Normal", "Precise", "Aggressive"] while controller.running: now = time.time() dt = now - prev_time prev_time = now c = stdscr.getch() if c in (ord('q'), ord('Q')): controller.stop_loop() break elif c in (ord('t'), ord('T')): controller.takeoff = True elif c in (ord('l'), ord('L')): controller.land = True elif c in (ord('r'), ord('R')): controller.record = 1 if controller.record == 0 else 0 elif c in (ord('f'), ord('F')): debug_enabled = controller.toggle_debug() elif c in (ord('x'), ord('X')): # Toggle sensitivity mode sensitivity_mode = (sensitivity_mode + 1) % 3 # Apply sensitivity settings if sensitivity_mode == 0: # Normal (using previous Precise settings) controller.accel_rate = 150.0 controller.decel_rate = 350.0 controller.expo_factor = 0.5 controller.immediate_response = 3.0 elif sensitivity_mode == 1: # Very Precise (even slower/more precise) controller.accel_rate = 100.0 controller.decel_rate = 400.0 controller.expo_factor = 0.3 controller.immediate_response = 1.5 else: # Aggressive (unchanged) controller.accel_rate = 300.0 controller.decel_rate = 280.0 controller.expo_factor = 1.5 controller.immediate_response = 15.0 # throttle elif c in (ord('w'), ord('W')): throttle_dir = +1; throttle_ts = now elif c in (ord('s'), ord('S')): throttle_dir = -1; throttle_ts = now # yaw elif c in (ord('a'), ord('A')): yaw_dir = -1; yaw_ts = now elif c in (ord('d'), ord('D')): yaw_dir = +1; yaw_ts = now # pitch elif c == curses.KEY_UP: pitch_dir = +1; pitch_ts = now elif c == curses.KEY_DOWN: pitch_dir = -1; pitch_ts = now # roll elif c == curses.KEY_LEFT: roll_dir = -1; roll_ts = now elif c == curses.KEY_RIGHT: roll_dir = +1; roll_ts = now # decide if each axis is "still held" active_throttle = throttle_dir if (now - throttle_ts) < PRESS_THRESHOLD else 0 active_yaw = yaw_dir if (now - yaw_ts) < PRESS_THRESHOLD else 0 active_pitch = pitch_dir if (now - pitch_ts) < PRESS_THRESHOLD else 0 active_roll = roll_dir if (now - roll_ts) < PRESS_THRESHOLD else 0 # apply acceleration / deceleration controller.update_axes( dt, active_throttle, active_yaw, active_pitch, active_roll ) # Update the UI stdscr.clear() stdscr.addstr(0, 0, f"Throttle: {int(controller.throttle):3d} " f"Yaw: {int(controller.yaw):3d}") stdscr.addstr(1, 0, f" Pitch: {int(controller.pitch):3d} " f"Roll: {int(controller.roll):3d}") # Add status flags to UI status_flags = [f"Mode: {sensitivity_labels[sensitivity_mode]}"] if controller.record: status_flags.append("RECORD") if debug_enabled: status_flags.append("DEBUG") status_str = " | ".join(status_flags) stdscr.addstr(2, 0, f"Status: {status_str}") stdscr.addstr(4, 0, help_msg) stdscr.addstr(5, 0, help_msg2) stdscr.refresh() # small sleep to cap UI frame-rate time.sleep(0.02) def main(): parser = argparse.ArgumentParser(description="FH‐drone teleop interface") parser.add_argument("--drone-ip", type=str, default="172.16.10.1", help="Drone UDP IP address") parser.add_argument("--control-port", type=int, default=8080, help="Drone control port") parser.add_argument("--rate", type=float, default=20.0, help="Control packets per second") args = parser.parse_args() controller = DroneController(args.drone_ip, args.control_port) sender = threading.Thread( target=controller.send_loop, args=(1.0 / args.rate,), daemon=True ) sender.start() try: curses.wrapper(ui_loop, controller) except KeyboardInterrupt: pass controller.stop_loop() sender.join() if __name__ == "__main__": main() ================================================ FILE: backend/requirements.txt ================================================ # Numerical array handling numpy>=1.19.2 # OpenCV bindings (for cv2) opencv-python>=4.5.1.48 # FastAPI fastapi # Uvicorn uvicorn[standard] # Python-multipart python-multipart # Starlette starlette # python-dotenv python-dotenv # YOLOv10 ultralytics ================================================ FILE: backend/services/__init__.py ================================================ from .flight_controller import FlightController __all__ = ['FlightController'] ================================================ FILE: backend/services/flight_controller.py ================================================ import threading import time import os import logging logger = logging.getLogger(__name__) class FlightController: """Core service that manages drone flight operations""" def __init__(self, drone_model, protocol_adapter, update_rate=80.0): self.model = drone_model self.protocol = protocol_adapter self.update_interval = 1.0 / update_rate self.running = True # Input state self.throttle_dir = 0 self.yaw_dir = 0 self.pitch_dir = 0 self.roll_dir = 0 # Logging / diagnostics self.last_control_source = "init" self._last_log_time = 0.0 # When enabled, logs control state at DEBUG level (so it is still quiet # unless LOG_LEVEL=DEBUG). self.log_controls = os.getenv("FLIGHT_LOG_CONTROLS", "true").lower() in ("1", "true", "yes", "on") def start(self): """Start the control thread""" self.control_thread = threading.Thread(target=self._control_loop) self.control_thread.daemon = True self.control_thread.start() def stop(self): """Stop the control thread""" self.running = False if hasattr(self, 'control_thread'): self.control_thread.join(timeout=1.0) if hasattr(self.protocol, 'stop'): self.protocol.stop() def set_control_direction(self, control, direction): """Set control direction (-1, 0, 1)""" if control == 'throttle': self.throttle_dir = direction elif control == 'yaw': self.yaw_dir = direction elif control == 'pitch': self.pitch_dir = direction elif control == 'roll': self.roll_dir = direction def set_axes(self, throttle: float, yaw: float, pitch: float, roll: float) -> None: """ Atomically update all four stick directions. Each value is expected in the range [-1.0 … +1.0]. """ self.set_axes_from("unknown", throttle, yaw, pitch, roll) def set_axes_from(self, source: str, throttle: float, yaw: float, pitch: float, roll: float) -> None: """Same as set_axes, but records the control source and logs.""" self.throttle_dir = max(-1.0, min(1.0, throttle)) self.yaw_dir = max(-1.0, min(1.0, yaw)) self.pitch_dir = max(-1.0, min(1.0, pitch)) self.roll_dir = max(-1.0, min(1.0, roll)) self.last_control_source = source # Optional immediate debug of inbound commands try: if getattr(self, "debug_set_axes", False) or self.log_controls: state = {} try: state = self.model.get_control_state() except Exception: pass logger.debug( "[RC-In] src=%-8s norm T:%+.2f Y:%+.2f P:%+.2f R:%+.2f | raw T:%s Y:%s P:%s R:%s", source, self.throttle_dir, self.yaw_dir, self.pitch_dir, self.roll_dir, state.get("throttle"), state.get("yaw"), state.get("pitch"), state.get("roll"), ) except Exception: pass def _control_loop(self): """Background thread for sending control updates""" prev_time = time.time() while self.running: now = time.time() dt = now - prev_time prev_time = now # Update drone controls based on input directions self.model.update(dt, { "throttle": self.throttle_dir, # still -1…+1 "yaw": self.yaw_dir, "pitch": self.pitch_dir, "roll": self.roll_dir, }) # Build and send packet try: packet = self.protocol.build_control_packet(self.model) self.protocol.send_control_packet(packet) except Exception: # The RC socket may be momentarily unavailable while the # video layer is recreating its socket. Ignore and keep # looping – a fresh socket will be injected shortly. pass # Sleep to maintain update rate time.sleep(self.update_interval) # Periodic state log (shows current model raw sticks after update) if self.log_controls: if now - self._last_log_time >= 0.5: try: state = self.model.get_control_state() strategy = getattr(self.model, "strategy", None) strat_name = strategy.__class__.__name__ if strategy else "(none)" logger.debug( "[RC-Loop] src=%-8s norm T:%+.2f Y:%+.2f P:%+.2f R:%+.2f | raw T:%s Y:%s P:%s R:%s | strat=%s", self.last_control_source, self.throttle_dir, self.yaw_dir, self.pitch_dir, self.roll_dir, state.get("throttle"), state.get("yaw"), state.get("pitch"), state.get("roll"), strat_name, ) except Exception: pass self._last_log_time = now ================================================ FILE: backend/services/video_receiver.py ================================================ import queue import socket import threading import time import os import logging from utils.dropping_queue import DroppingQueue from models.video_frame import VideoFrame logger = logging.getLogger(__name__) class VideoReceiverService: """ Creates and manages a protocol adapter, destroying and recreating it from scratch if the connection is lost, per the user's experiment. """ def __init__( self, protocol_adapter_class, protocol_adapter_args, frame_queue=None, max_queue_size=100, dump_frames=False, dump_packets=False, dump_dir=None, rc_adapter=None, ): self.protocol_adapter_class = protocol_adapter_class self.protocol_adapter_args = protocol_adapter_args self.frame_queue = frame_queue or DroppingQueue(maxsize=max_queue_size) self.dump_frames = dump_frames self.dump_packets = dump_packets self.rc_adapter = rc_adapter self.protocol = None # Will be managed in the receiver loop if dump_frames or dump_packets: self.dump_dir = dump_dir or f"dumps_{int(time.time())}" os.makedirs(self.dump_dir, exist_ok=True) if self.dump_packets: ts = int(time.time() * 1000) self._pktlog = open( os.path.join(self.dump_dir, f"packets_{ts}.bin"), "wb" ) self._running = threading.Event() self._receiver_thread = None # ────────── lifecycle ────────── # def start(self) -> None: if self._receiver_thread and self._receiver_thread.is_alive(): return self._running.set() self._receiver_thread = threading.Thread( target=self._receiver_loop, name="VideoReceiver", daemon=True ) self._receiver_thread.start() def stop(self) -> None: self._running.clear() if self.protocol: self.protocol.stop() if self._receiver_thread and self._receiver_thread.is_alive(): self._receiver_thread.join(timeout=1.0) if self.dump_packets and self._pktlog: self._pktlog.close() # ────────── stream access ────────── # def get_frame_queue(self) -> queue.Queue: return self.frame_queue # ────────── receiver loop ────────── # def _receiver_loop(self) -> None: """ Manages the lifecycle of the video protocol adapter. If the connection is lost, it will be destroyed and a new one will be created. """ while self._running.is_set(): try: # 1. Create a new protocol instance self.protocol = self.protocol_adapter_class( **self.protocol_adapter_args ) if self.rc_adapter and hasattr(self.protocol, "set_rc_adapter"): self.protocol.set_rc_adapter(self.rc_adapter) # 2. Start the protocol's receiver loop self.protocol.start() # 3. Frame processing loop frame_idx = 0 while self._running.is_set() and self.protocol.is_running(): try: frame = self.protocol.get_frame(timeout=1.0) if frame: self.frame_queue.put(frame) if self.dump_frames: frame_idx += 1 self._dump_frame(frame, frame_idx) # Packets are dumped inside the protocol adapter if self.dump_packets: packets = self.protocol.get_packets() for p in packets: self._pktlog.write(p) self._pktlog.flush() except queue.Empty: continue # Normal, just means no frame was ready except Exception as e: logger.warning("[VideoReceiverService] Error processing frame: %s", e) break except socket.error as e: logger.warning("[VideoReceiverService] Socket error: %s. Reconnecting...", e) except Exception as e: logger.exception("[VideoReceiverService] Unexpected error: %s", e) # Optionally, decide if you want to stop the whole loop on certain errors # self.stop() # break finally: # Cleanup before the next iteration if self.protocol: self.protocol.stop() self.protocol = None # Wait before attempting to reconnect if self._running.is_set(): logger.debug("[VideoReceiverService] Waiting 5 seconds before reconnecting...") time.sleep(5) logger.debug("[VideoReceiverService] Receiver loop has stopped.") # ────────── frame dumping ────────── # def _dump_frame(self, frame: "VideoFrame | bytes | bytearray | memoryview", frame_idx: int) -> None: """ Saves a frame to the file system in the dump directory. Note: the receiver loop deals in `VideoFrame` objects; we persist the underlying encoded bytes (`VideoFrame.data`), not the object itself. """ if isinstance(frame, VideoFrame): frame_bytes = frame.data ext = "jpg" if getattr(frame, "format", None) in (None, "jpeg", "jpg") else str(frame.format) else: frame_bytes = frame ext = "jpg" if not isinstance(frame_bytes, (bytes, bytearray, memoryview)): raise TypeError(f"Expected frame bytes, got {type(frame_bytes).__name__}") filename = os.path.join(self.dump_dir, f"frame_{frame_idx:04d}.{ext}") try: with open(filename, "wb") as f: f.write(frame_bytes) except Exception as e: logger.warning("[VideoReceiverService] Error dumping frame: %s", e) ================================================ FILE: backend/tests/test_cooingdv_jieli_ctp.py ================================================ import unittest from utils.cooingdv_jieli_ctp import build_ctp_packet, parse_ctp_packet from protocols.cooingdv_jieli_rc_protocol_adapter import CooingdvJieliRcProtocolAdapter from models.cooingdv_rc import CooingdvRcModel class CooingdvJieliCtpTests(unittest.TestCase): def test_builds_little_endian_ctp_packet(self): packet = build_ctp_packet("CONTROL_MODE", {"state": "1"}) self.assertTrue(packet.startswith(b"CTP:")) topic_len = int.from_bytes(packet[4:6], "little") self.assertEqual(topic_len, len("CONTROL_MODE")) self.assertEqual(packet[6 : 6 + topic_len], b"CONTROL_MODE") topic, payload = parse_ctp_packet(packet) self.assertEqual(topic, "CONTROL_MODE") self.assertEqual(payload, {"op": "PUT", "param": {"state": "1"}}) def test_flying_control_payload_preserves_tc_bytes(self): model = CooingdvRcModel() model.roll = 128 model.pitch = 129 model.throttle = 130 model.yaw = 131 model.stop_flag = True adapter = CooingdvJieliRcProtocolAdapter.__new__(CooingdvJieliRcProtocolAdapter) payload = adapter._build_flying_payload(model) expected_checksum = 128 ^ 129 ^ 130 ^ 131 ^ 0x04 self.assertEqual(payload, [102, 128, 129, 130, 131, 4, expected_checksum, 153]) packet = build_ctp_packet("FLYING_CTRL", {f"BYTE{i}": str(v) for i, v in enumerate(payload)}) topic, body = parse_ctp_packet(packet) self.assertEqual(topic, "FLYING_CTRL") self.assertEqual(body["op"], "PUT") self.assertEqual(body["param"]["BYTE0"], "102") self.assertEqual(body["param"]["BYTE7"], "153") if __name__ == "__main__": unittest.main() ================================================ FILE: backend/tests/test_s2x_protocol.py ================================================ import unittest from models.s2x_rc import S2xDroneModel from protocols.s2x_rc_protocol_adapter import S2xRCProtocolAdapter class S2xRcProtocolTests(unittest.TestCase): def _adapter(self): adapter = S2xRCProtocolAdapter.__new__(S2xRCProtocolAdapter) adapter.swap_yaw_roll = False adapter.speed_scale_by_index = { 0: 0.7, 1: 0.8, 2: 1.0, } return adapter def test_default_packet_matches_macrochip_hy_layout(self): model = S2xDroneModel() model.roll = 128 model.pitch = 129 model.throttle = 130 model.yaw = 131 model.takeoff_flag = True adapter = self._adapter() packet = adapter.build_control_packet(model) self.assertEqual(len(packet), 20) self.assertEqual(packet[:8], bytes([0x66, 0x14, 128, 129, 131, 133, 0x01, 0x0A])) self.assertEqual(packet[18], 128 ^ 129 ^ 131 ^ 133 ^ 0x01 ^ 0x0A) self.assertEqual(packet[19], 0x99) self.assertFalse(model.takeoff_flag) def test_swap_yaw_roll_changes_transmitted_axes(self): model = S2xDroneModel() model.roll = 60 model.pitch = 128 model.throttle = 128 model.yaw = 200 adapter = self._adapter() adapter.swap_yaw_roll = True packet = adapter.build_control_packet(model) self.assertEqual(packet[2], 255) self.assertEqual(packet[5], 0) def test_speed_index_scales_roll_and_pitch_only(self): model = S2xDroneModel() model.roll = 200 model.pitch = 60 model.throttle = 200 model.yaw = 60 model.set_speed_index(0) adapter = self._adapter() packet = adapter.build_control_packet(model) self.assertEqual(packet[2], 216) self.assertEqual(packet[3], 38) self.assertEqual(packet[4], 255) self.assertEqual(packet[5], 0) if __name__ == "__main__": unittest.main() ================================================ FILE: backend/tests/test_s2x_video_protocol.py ================================================ import unittest from models.s2x_video_model import S2xVideoModel from protocols.s2x_video_protocol import S2xVideoProtocolAdapter class S2xVideoProtocolTests(unittest.TestCase): def _adapter(self): adapter = S2xVideoProtocolAdapter.__new__(S2xVideoProtocolAdapter) adapter.model = S2xVideoModel() return adapter def _packet(self, frame_id: int, total_chunks: int, chunk_id: int, body: bytes) -> bytes: payload = ( b"\x40\x40" + frame_id.to_bytes(2, "little") + bytes([total_chunks, chunk_id]) ) packet_len = 8 + len(body) + 2 return payload + packet_len.to_bytes(2, "little") + body + b"##" def test_emits_frame_when_all_declared_chunks_arrive(self): adapter = self._adapter() first = self._packet(0x1234, 2, 0, b"\xff\xd8hello") second = self._packet(0x1234, 2, 1, b" world\xff\xd9") self.assertIsNone(adapter.handle_payload(first)) frame = adapter.handle_payload(second) self.assertIsNotNone(frame) self.assertEqual(frame.frame_id, 0x1234) self.assertEqual(frame.data, b"\xff\xd8hello world\xff\xd9") self.assertEqual(frame.format, "jpeg") def test_accepts_out_of_order_chunks(self): adapter = self._adapter() second = self._packet(7, 2, 1, b" tail\xff\xd9") first = self._packet(7, 2, 0, b"\xff\xd8head") self.assertIsNone(adapter.handle_payload(second)) frame = adapter.handle_payload(first) self.assertIsNotNone(frame) self.assertEqual(frame.data, b"\xff\xd8head tail\xff\xd9") def test_rejects_mismatched_declared_length(self): adapter = self._adapter() packet = bytearray(self._packet(1, 1, 0, b"\xff\xd8x\xff\xd9")) packet[6:8] = (len(packet) + 1).to_bytes(2, "little") self.assertIsNone(adapter.handle_payload(bytes(packet))) if __name__ == "__main__": unittest.main() ================================================ FILE: backend/tests/test_wifi_cam_protocol.py ================================================ import unittest from models.wifi_cam_rc import WifiCamRcModel from protocols.wifi_cam_rc_protocol_adapter import WifiCamRcProtocolAdapter from protocols.wifi_cam_video_protocol import WifiCamVideoProtocolAdapter class WifiCamRcProtocolTests(unittest.TestCase): def _adapter(self, mode="short"): adapter = WifiCamRcProtocolAdapter.__new__(WifiCamRcProtocolAdapter) adapter.command_mode = mode adapter.camera_type = 0 return adapter def test_short_packet_matches_base_cmd_layout(self): model = WifiCamRcModel() model.roll = 128 model.pitch = 129 model.throttle = 130 model.yaw = 131 model.takeoff_flag = True adapter = self._adapter("short") packet = adapter.build_control_packet(model) expected_checksum = 128 ^ 129 ^ 130 ^ 131 ^ 0x01 self.assertEqual(packet, bytes([0x66, 128, 129, 130, 131, 0x01, expected_checksum, 0x99])) self.assertFalse(model.takeoff_flag) def test_short_packet_escapes_marker_checksum(self): model = WifiCamRcModel() model.roll = 128 model.pitch = 128 model.throttle = 128 model.yaw = 230 adapter = self._adapter("short") packet = adapter.build_control_packet(model) self.assertEqual(packet[6], 0x67) def test_extended_packet_uses_camera_type_two_in_auto_mode(self): model = WifiCamRcModel() model.roll = 128 model.pitch = 129 model.throttle = 130 model.yaw = 131 model.land_flag = True model.stop_flag = True model.headless_flag = True adapter = self._adapter("auto") adapter.set_camera_type(2) packet = adapter.build_control_packet(model) self.assertEqual(len(packet), 20) self.assertEqual(packet[:8], bytes([0x66, 0x14, 128, 129, 130, 131, 0x03, 0x01])) self.assertEqual(packet[18], 128 ^ 129 ^ 130 ^ 131 ^ 0x03 ^ 0x01) self.assertEqual(packet[19], 0x99) self.assertFalse(model.land_flag) self.assertFalse(model.stop_flag) class FakeRcAdapter: def __init__(self): self.camera_type = None def set_camera_type(self, camera_type): self.camera_type = camera_type class WifiCamVideoProtocolTests(unittest.TestCase): def test_camera_type_probe_updates_rc_adapter(self): adapter = WifiCamVideoProtocolAdapter() rc_adapter = FakeRcAdapter() adapter.set_rc_adapter(rc_adapter) frame = adapter.handle_payload(b"\x55\x00\x02\x00\x00\x00\x02\x99") self.assertIsNone(frame) self.assertEqual(adapter.camera_type, 2) self.assertEqual(rc_adapter.camera_type, 2) def test_single_chunk_jpeg_frame(self): adapter = WifiCamVideoProtocolAdapter() jpeg = b"\xff\xd8hello\xff\xd9" packet = bytes([1, 1, 1, 8, 0, 0, 0, 0]) + jpeg frame = adapter.handle_payload(packet) self.assertIsNotNone(frame) self.assertEqual(frame.data, jpeg) self.assertEqual(frame.format, "jpeg") self.assertEqual(frame.resolution, 8) self.assertEqual(frame.retain, 0) def test_multi_chunk_jpeg_frame_uses_native_final_marker(self): adapter = WifiCamVideoProtocolAdapter() first_chunk = b"\xff\xd8" + (b"a" * (adapter.CHUNK_SIZE - 2)) second_chunk = b"tail\xff\xd9" packet1 = bytes([7, 0, 2, 8, 0, 0, 0, 1]) + first_chunk packet2 = bytes([7, 1, 2, 8, 0, 0, 0, 1]) + second_chunk self.assertIsNone(adapter.handle_payload(packet1)) frame = adapter.handle_payload(packet2) self.assertIsNotNone(frame) self.assertEqual(frame.data, first_chunk + second_chunk) self.assertEqual(frame.retain, 1) def test_rejects_final_chunk_without_jpeg_tail(self): adapter = WifiCamVideoProtocolAdapter() packet = bytes([1, 1, 1, 8, 0, 0, 0, 0]) + b"\xff\xd8missing-tail" frame = adapter.handle_payload(packet) self.assertIsNone(frame) if __name__ == "__main__": unittest.main() ================================================ FILE: backend/utils/cooingdv_jieli_ctp.py ================================================ from __future__ import annotations import json from typing import Mapping CTP_SIGNATURE = b"CTP:" OP_PUT = "PUT" def build_ctp_packet( topic: str, params: Mapping[str, str] | None = None, *, operation: str = OP_PUT, ) -> bytes: """ Build the Jieli CTP envelope used by KY FPV's DeviceClient backend. Format: CTP: + le16(topic_length) + topic + le32(json_length) + json """ payload: dict[str, object] = {"op": operation} if params: payload["param"] = dict(params) topic_bytes = topic.encode("utf-8") json_bytes = json.dumps(payload, separators=(",", ":")).encode("utf-8") return ( CTP_SIGNATURE + len(topic_bytes).to_bytes(2, "little", signed=False) + topic_bytes + len(json_bytes).to_bytes(4, "little", signed=False) + json_bytes ) def parse_ctp_packet(packet: bytes) -> tuple[str, dict]: """Parse a CTP packet. Intended for tests and diagnostics.""" if not packet.startswith(CTP_SIGNATURE): raise ValueError("missing CTP signature") offset = len(CTP_SIGNATURE) if len(packet) < offset + 2: raise ValueError("missing topic length") topic_len = int.from_bytes(packet[offset : offset + 2], "little") offset += 2 topic = packet[offset : offset + topic_len].decode("utf-8") offset += topic_len if len(packet) < offset + 4: raise ValueError("missing payload length") payload_len = int.from_bytes(packet[offset : offset + 4], "little") offset += 4 payload = json.loads(packet[offset : offset + payload_len].decode("utf-8")) return topic, payload ================================================ FILE: backend/utils/dropping_queue.py ================================================ import queue class DroppingQueue(queue.Queue): """ A queue that drops the oldest item when it is full. """ def put(self, item, block=True, timeout=None): """ Put an item into the queue. If the queue is full, it drops the oldest item and adds the new one. This operation is atomic with respect to other producers: under concurrent access, `put_nowait()` will not raise `queue.Full` because this method always makes space (by dropping the oldest element) before enqueuing. """ # Note: We intentionally do NOT call super().put(), because Queue.put() # may raise queue.Full when called with block=False. Instead, we perform # a "drop oldest (if needed) + put" as one critical section under the # queue's internal mutex. with self.mutex: if self.maxsize > 0 and self._qsize() >= self.maxsize: # Drop exactly one oldest item to make room. self._get() # If users rely on join()/task_done(), dropped work should not # keep join() waiting forever. if self.unfinished_tasks > 0: self.unfinished_tasks -= 1 self._put(item) self.unfinished_tasks += 1 self.not_empty.notify() def put_nowait(self, item): """ Put an item into the queue without blocking. If the queue is full, it drops the oldest item and adds the new one. """ self.put(item, block=False) ================================================ FILE: backend/utils/logging_config.py ================================================ from __future__ import annotations import logging import os import dotenv _BOOTSTRAPPED = False _CONFIGURED = False _LOG_FORMAT = "%(asctime)s %(levelname)s %(name)s: %(message)s" def _normalise_level(raw: str | None) -> str: value = (raw or "INFO").strip().upper() if value == "WARN": return "WARNING" if value not in {"CRITICAL", "ERROR", "WARNING", "INFO", "DEBUG", "NOTSET"}: return "INFO" return value def bootstrap_runtime() -> None: """ Load `.env` and set sane defaults for noisy native libraries before modules that import `cv2` are loaded. """ global _BOOTSTRAPPED if _BOOTSTRAPPED: return dotenv.load_dotenv() level_name = _normalise_level(os.getenv("LOG_LEVEL")) debug_native = level_name == "DEBUG" # Keep OpenCV / FFmpeg mostly quiet by default. These can still be # overridden explicitly by environment variables if needed. os.environ.setdefault("OPENCV_LOG_LEVEL", "INFO" if debug_native else "ERROR") os.environ.setdefault("OPENCV_FFMPEG_LOGLEVEL", "32" if debug_native else "16") os.environ.setdefault("OPENCV_VIDEOIO_DEBUG", "1" if debug_native else "0") os.environ.setdefault("OPENCV_FFMPEG_DEBUG", "1" if debug_native else "0") _BOOTSTRAPPED = True def configure_logging(level: str | None = None) -> str: """ Configure stdlib logging for backend entrypoints. Returns the resolved log level name. """ global _CONFIGURED bootstrap_runtime() level_name = _normalise_level(level or os.getenv("LOG_LEVEL")) level_value = getattr(logging, level_name, logging.INFO) root = logging.getLogger() if not root.handlers: logging.basicConfig(level=level_value, format=_LOG_FORMAT) else: root.setLevel(level_value) for handler in root.handlers: if handler.formatter is None: handler.setFormatter(logging.Formatter(_LOG_FORMAT)) _CONFIGURED = True return level_name ================================================ FILE: backend/utils/wifi_uav_ack_state.py ================================================ from __future__ import annotations from dataclasses import dataclass, field from collections import deque from typing import Optional from utils.wifi_uav_packets import build_ack_slot, build_fragment_ack_bitmap SLOT_EMPTY = 0 SLOT_RECEIVING = 1 SLOT_COMPLETE = 2 SLOT_DELIVERED = 3 SLOT_DROPPED = 4 @dataclass class WifiUavFrameSlot: """State for one native WiFi-UAV in-flight frame slot.""" seq: int = 0 status: int = SLOT_EMPTY fragment_total: int = 0 frame_body_len: int = 0 quality: int = 0 fragments: dict[int, bytes] = field(default_factory=dict) received_fragments: set[int] = field(default_factory=set) def reset(self, seq: int, fragment_total: int, frame_body_len: int, quality: int) -> None: self.seq = seq self.status = SLOT_RECEIVING self.fragment_total = fragment_total self.frame_body_len = frame_body_len self.quality = quality self.fragments.clear() self.received_fragments.clear() def ingest( self, fragment_id: int, fragment_total: int, payload: bytes, *, frame_body_len: int = 0, quality: int = 0, ) -> None: if self.status in (SLOT_COMPLETE, SLOT_DELIVERED): return if self.status in (SLOT_EMPTY, SLOT_DELIVERED, SLOT_DROPPED): self.fragment_total = fragment_total self.frame_body_len = frame_body_len self.quality = quality self.status = SLOT_RECEIVING elif fragment_total > 0: self.fragment_total = fragment_total self.frame_body_len = frame_body_len self.quality = quality self.fragments[fragment_id] = payload self.received_fragments.add(fragment_id) if self.is_complete(): self.status = SLOT_COMPLETE def is_complete(self) -> bool: return ( self.fragment_total > 0 and len(self.received_fragments) == self.fragment_total and all(i in self.fragments for i in range(self.fragment_total)) ) def ordered_payload(self) -> bytes: return b"".join(self.fragments[i] for i in range(self.fragment_total)) def mark_delivered(self) -> None: self.status = SLOT_DELIVERED def mark_dropped(self) -> None: self.status = SLOT_DROPPED def ack_status(self) -> int: if self.status == SLOT_RECEIVING: return 0 if self.status in (SLOT_COMPLETE, SLOT_DELIVERED): return 1 if self.status == SLOT_DROPPED: return 2 return 3 def ack_bitmap(self) -> bytes: if self.status != SLOT_RECEIVING or self.fragment_total <= 0: return b"" return build_fragment_ack_bitmap(self.fragment_total, self.received_fragments) def ack_slot(self) -> bytes: return build_ack_slot(self.seq, self.ack_status(), self.ack_bitmap()) class WifiUavAckState: """ Four-slot ACK/frame tracker shaped after native build_send_ack(). This keeps Turbodrone's delivery behavior simple while giving ACK generation a native-style home that can be evolved independently of socket code. """ SLOT_COUNT = 4 def __init__(self) -> None: self.slots = [WifiUavFrameSlot() for _ in range(self.SLOT_COUNT)] self.max_recv_seq = 0 self.last_completed_seq: Optional[int] = None self._delivered_history: deque[int] = deque(maxlen=32) def reset(self) -> None: for slot in self.slots: slot.seq = 0 slot.status = SLOT_EMPTY slot.fragment_total = 0 slot.frame_body_len = 0 slot.quality = 0 slot.fragments.clear() slot.received_fragments.clear() self.max_recv_seq = 0 self.last_completed_seq = None self._delivered_history.clear() def ingest_fragment( self, seq: int, fragment_id: int, fragment_total: int, payload: bytes, *, frame_body_len: int = 0, quality: int = 0, ) -> Optional[WifiUavFrameSlot]: if seq in self._delivered_history: return None slot = self._slot_for_seq(seq) if slot.seq != seq: slot.reset(seq, fragment_total, frame_body_len, quality) if fragment_total > 0 and fragment_id >= fragment_total: return None self.max_recv_seq = max(self.max_recv_seq, seq) slot.ingest(fragment_id, fragment_total, payload, frame_body_len=frame_body_len, quality=quality) if slot.is_complete(): self.last_completed_seq = seq return slot return None def mark_delivered(self, seq: int) -> None: slot = self._find_slot(seq) if slot is not None: slot.mark_delivered() if seq not in self._delivered_history: self._delivered_history.append(seq) def mark_dropped(self, seq: int) -> None: slot = self._find_slot(seq) if slot is not None: slot.mark_dropped() def build_ack_slots(self, request_seq: int) -> list[bytes]: slots = [] for slot in self.slots: if slot.status == SLOT_EMPTY or slot.seq == 0: continue if self.max_recv_seq and self.max_recv_seq - slot.seq >= 5: continue slots.append(slot.ack_slot()) if slots: return slots return [ build_ack_slot(request_seq, 1, b"\xff\xff\xff\xff"), build_ack_slot(request_seq, 3), ] def _slot_for_seq(self, seq: int) -> WifiUavFrameSlot: return self.slots[(seq + 3) % self.SLOT_COUNT] def _find_slot(self, seq: int) -> Optional[WifiUavFrameSlot]: for slot in self.slots: if slot.seq == seq: return slot return None ================================================ FILE: backend/utils/wifi_uav_jpeg.py ================================================ from typing import List # Start Of Image SOI = bytearray(b"\xff\xd8") # End Of Image EOI = bytearray(b"\xff\xd9") # fmt: off # Standard luminance quantization table std_luminance_qt = [ 16, 11, 10, 16, 24, 40, 51, 61, 12, 12, 14, 19, 26, 58, 60, 55, 14, 13, 16, 24, 40, 57, 69, 56, 14, 17, 22, 29, 51, 87, 80, 62, 18, 22, 37, 56, 68, 109, 103, 77, 24, 35, 55, 64, 81, 104, 113, 92, 49, 64, 78, 87,103, 121, 120, 101, 72, 92, 95, 98,112, 100, 103, 99 ] # Standard chrominance quantization table std_chrominance_qt = [ 17, 18, 24, 47, 99, 99, 99, 99, 18, 21, 26, 66, 99, 99, 99, 99, 24, 26, 56, 99, 99, 99, 99, 99, 47, 66, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99 ] # fmt: on def generate_dqt_segment(id: int, table: List[int], precision: int = 0) -> bytes: """ Generates the DQT (Define Quantization Table) segment. Args: id (int): Table ID (0–3) table (list of 64 ints): Quantization values in zigzag order precision (int): 0 for 8-bit, 1 for 16-bit. Defaults to 0. Returns: bytes: The full DQT segment. """ if len(table) != 64: raise ValueError(f"Quantization table must have 64 values.") if precision not in (0, 1): raise ValueError("Precision must be 0 (8-bit) or 1 (16-bit).") segment = bytearray(b"\xff\xdb") # Marker payload = bytearray() # Info byte: (precision << 4) | table_id payload.append((precision << 4) | id) # Add table data if precision == 0: payload.extend(table) else: for val in table: payload.extend(val.to_bytes(2, "big")) # Length = 2 (for length field itself) + payload size length = len(payload) + 2 segment.extend(length.to_bytes(2, "big")) segment.extend(payload) return bytes(segment) def generate_sof0_segment(width: int, height: int, num_components: int = 3) -> bytes: """ Generates a SOF0 (Start of Frame, Baseline DCT) segment for a JPEG image. Uses 4:4:4 subsampling. Args: width (int): Image width in pixels. height (int): Image height in pixels. num_components (int): Number of components (e.g., 1 for grayscale, 3 for YCbCr). Defaults to 3. component_info (list of dict, optional): List of dicts with keys: - 'id' (int): Component ID - 'sampling' (tuple): (H, V) sampling factors - 'qt_id' (int): Quantization table ID If not provided, defaults to common layout. Returns: bytes: Complete SOF0 segment (including marker). """ if not (1 <= width <= 65535 and 1 <= height <= 65535): raise ValueError("Width and height must be between 1 and 65535.") if num_components not in (1, 3): raise ValueError("Number of components must be 1 or 3.") marker = b"\xff\xc0" # SOF0 marker precision = b"\x08" # 8-bit sample precision height_bytes = height.to_bytes(2, "big") width_bytes = width.to_bytes(2, "big") num_components_byte = num_components.to_bytes(1, "big") # Default component layout if num_components == 1: component_info = [{"id": 1, "sampling": (1, 1), "qt_id": 0}] elif num_components == 3: component_info = [ {"id": 1, "sampling": (1, 1), "qt_id": 0}, # Y {"id": 2, "sampling": (1, 1), "qt_id": 1}, # Cb {"id": 3, "sampling": (1, 1), "qt_id": 1}, # Cr ] else: raise ValueError("Invalid num_components") # Build component specs component_specs = b"" for comp in component_info: comp_id = comp["id"].to_bytes(1, "big") H, V = comp["sampling"] sampling_byte = ((H << 4) | V).to_bytes(1, "big") qt_id = comp["qt_id"].to_bytes(1, "big") component_specs += comp_id + sampling_byte + qt_id # Total length = 8 (header) + 3 bytes per component length = (8 + 3 * num_components).to_bytes(2, "big") return ( marker + length + precision + height_bytes + width_bytes + num_components_byte + component_specs ) def generate_sos_segment( num_components: int, Ss: int = 0, Se: int = 63, AhAl: int = 0 ) -> bytes: """ Generates the SOS (Start of Scan) segment for a JPEG file. Args: num_components (int): Number of components (1 for grayscale, 3 for YCbCr). Ss (int): Start of spectral selection. Defaults to 0. Se (int): End of spectral selection. Defaults to 63 AhAl (int): Approximation bit position. Defaults to 0. Returns: bytes: Complete SOS segment (including marker). """ if num_components == 1: component_selectors = [{"id": 1, "dc": 0, "ac": 0}] elif num_components == 3: component_selectors = [ {"id": 1, "dc": 0, "ac": 0}, # Y {"id": 2, "dc": 1, "ac": 1}, # Cb {"id": 3, "dc": 1, "ac": 1}, # Cr ] else: raise ValueError("Component count must be 1 or 3.") marker = b"\xff\xda" length = 6 + 2 * num_components segment = bytearray(marker) segment += length.to_bytes(2, "big") segment.append(num_components) for comp in component_selectors: table_selector = (comp["dc"] << 4) | comp["ac"] segment.append(comp["id"]) segment.append(table_selector) segment.append(Ss) segment.append(Se) segment.append(AhAl) return bytes(segment) def generate_jpeg_headers(width: int, height: int, num_components: int = 3) -> bytes: """ Generates a minimal JPEG header without Huffman tables and default quantization tables. Args: width (int): Image width in pixels. height (int): Image height in pixels. num_components (int): Number of components (1 for grayscale, 3 for YCbCr). Returns: bytes: JPEG header bytes (SOI, DQT, SOF0, SOS). """ # Generate segments luminance_dqt = generate_dqt_segment(id=0, table=std_luminance_qt) chrominance_dqt = generate_dqt_segment(id=1, table=std_chrominance_qt) sof = generate_sof0_segment( width=width, height=height, num_components=num_components ) sos = generate_sos_segment(num_components=num_components) # Build the header header = bytearray() header += SOI header += luminance_dqt if num_components == 3: header += chrominance_dqt header += sof header += sos return bytes(header) ================================================ FILE: backend/utils/wifi_uav_packets.py ================================================ # Start Video Feed START_STREAM = b"\xef\x00\x04\x00" # Unknown UNK_FRAME = b"\xef\x20\x06\x00\x01\x65" # Drone Info I think SSID2 = ( b"\xef\x20\x19\x00\x01\x67" b"\x3c\x69\x3d\x32\x5e\x62\x66\x5f\x73\x73\x69\x64\x3d\x63\x6d\x64" b"\x3d\x32\x3e" ) SSID3 = ( b"\xef\x20\x19\x00\x01\x67" b"\x3c\x69\x3d\x32\x5e\x62\x66\x5f\x73\x73\x69\x64\x3d\x63\x6d\x64" b"\x3d\x33\x3e" ) # Both of these are sent for each frame. Native `build_send_ack()` builds this # same outer packet shape: # # ef 02 02 02 00 01 ... # # The packet also embeds the latest user command at offset 18. The current # constants carry a neutral extended command (`66 14 80 80 80 80 ... 99`) so # video can advance even when no RC input has changed. REQUEST_A = ( b"\xef\x02\x58\x00\x02\x02" b"\x00\x01\x00\x00\x00\x00\x05\x00\x00\x00\x14\x00\x66\x14\x80\x80" b"\x80\x80\x00\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\x99" b"\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" b"\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" b"\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x32\x4b\x14\x2d" b"\x00\x00" ) # See previous comment REQUEST_B = ( b"\xef\x02\x7c\x00\x02\x02" b"\x00\x01\x02\x00\x00\x00\x09\x00\x00\x00\x14\x00\x66\x14\x80\x80" b"\x80\x80\x00\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\x99" b"\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" b"\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" b"\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x32\x4b\x14\x2d" b"\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x14\x00" b"\x00\x00\xff\xff\xff\xff\x09\x00\x00\x00\x00\x00\x00\x00\x03\x00" b"\x00\x00\x10\x00\x00\x00" ) NEUTRAL_EXTENDED_COMMAND = ( b"\x66\x14\x80\x80\x80\x80\x00\x02" b"\x00\x00\x00\x00\x00\x00\x00\x00" b"\x00\x00\x02\x99" ) DEFAULT_QUALITY_PARAMS = b"\x32\x4b\x14\x2d\x00" def build_fragment_ack_bitmap(fragment_total: int, received_fragments: set[int]) -> bytes: """Build the native little-endian fragment ACK bitmap.""" if fragment_total <= 0: return b"" word_count = (fragment_total + 31) // 32 words = [0] * word_count for fragment_id in received_fragments: if 0 <= fragment_id < fragment_total: words[fragment_id // 32] |= 1 << (fragment_id & 31) return b"".join(word.to_bytes(4, "little") for word in words) def build_ack_slot(seq: int, status: int, bitmap: bytes = b"") -> bytes: """Build one native ACK slot record.""" record_len = 16 + len(bitmap) return ( (seq & 0xFFFFFFFFFFFFFFFF).to_bytes(8, "little") + (status & 0xFFFFFFFF).to_bytes(4, "little") + record_len.to_bytes(4, "little") + bitmap ) def build_native_ack_packet( command_seq: int, ack_slots: list[bytes], command: bytes = NEUTRAL_EXTENDED_COMMAND, quality_params: bytes = DEFAULT_QUALITY_PARAMS, ) -> bytes: """ Build a native-shaped WiFi-UAV ACK/request packet. This mirrors the header layout emitted by `build_send_ack()` / `build_send_ack_bl618()` in `libuav_lib.so`. """ if len(command) > 64: raise ValueError("WiFi-UAV command payload must be <= 64 bytes") if len(quality_params) != 5: raise ValueError("WiFi-UAV quality params must be exactly 5 bytes") packet = bytearray() packet += b"\xef\x02\x00\x00" packet += b"\x02\x02\x00\x01" packet += bytes([len(ack_slots) & 0xFF]) packet += b"\x00\x00\x00" packet += (command_seq & 0xFFFFFFFF).to_bytes(4, "little") packet += len(command).to_bytes(2, "little") packet += command.ljust(64, b"\x00") packet += quality_params packet += b"\x00" for slot in ack_slots: packet += slot packet[2:4] = len(packet).to_bytes(2, "little") return bytes(packet) ================================================ FILE: backend/utils/wifi_uav_variants.py ================================================ from __future__ import annotations import os import subprocess import sys from dataclasses import dataclass from typing import Final WIFI_UAV_DRONE_TYPES: Final[frozenset[str]] = frozenset({ "wifi_uav", "wifi_uav_fld", "wifi_uav_uav", }) _UAV_PREFIXES: Final[tuple[str, ...]] = ("flow_", "flow-", "flow") _FLD_PREFIXES: Final[tuple[str, ...]] = ("wifi_", "gd89pro_", "wtech-", "wtech_", "drone_") @dataclass(frozen=True) class WifiUavCapabilities: variant: str transport: str supports_camera_tilt: bool supports_camera_switch: bool video_ports: tuple[int, ...] rc_command_shape: str _VARIANT_CAPABILITIES: Final[dict[str, WifiUavCapabilities]] = { "fld": WifiUavCapabilities( variant="fld", transport="fld_compat", supports_camera_tilt=False, supports_camera_switch=False, video_ports=(8800,), rc_command_shape="native_ack_embedded", ), "uav": WifiUavCapabilities( variant="uav", transport="uav_dual_port", supports_camera_tilt=True, supports_camera_switch=True, video_ports=(8800, 8801), rc_command_shape="native_ack_embedded", ), } def wifi_uav_variant_from_drone_type(drone_type: str) -> str: """Map the user-facing DRONE_TYPE onto the internal wifi_uav variant name.""" normalised = (drone_type or "").strip().lower() if normalised == "wifi_uav_fld": return "fld" if normalised == "wifi_uav_uav": return "uav" return "auto" def resolve_wifi_uav_variant(drone_type: str) -> str: """ Resolve the effective wifi_uav transport variant. Explicit DRONE_TYPE aliases win. For the umbrella `wifi_uav` type, prefer a best-effort SSID-based choice and otherwise fall back to the legacy/default path, which today is closest to the FLD-style transport. """ explicit = wifi_uav_variant_from_drone_type(drone_type) if explicit != "auto": return explicit ssid = detect_active_wifi_ssid() mapped = map_wifi_uav_variant_from_ssid(ssid) if mapped: return mapped return "fld" def get_wifi_uav_capabilities(variant: str) -> WifiUavCapabilities: """Return the internal capability profile for a resolved WiFi-UAV variant.""" return _VARIANT_CAPABILITIES.get((variant or "").strip().lower(), _VARIANT_CAPABILITIES["fld"]) def resolve_wifi_uav_capabilities(drone_type: str) -> WifiUavCapabilities: """Resolve a user-facing DRONE_TYPE to its internal capability profile.""" return get_wifi_uav_capabilities(resolve_wifi_uav_variant(drone_type)) def map_wifi_uav_variant_from_ssid(ssid: str | None) -> str | None: """Map a Wi-Fi SSID prefix onto the app's Uav/Fld backend choice.""" if not ssid: return None normalised = ssid.strip().lower() if any(normalised.startswith(prefix) for prefix in _UAV_PREFIXES): return "uav" if any(normalised.startswith(prefix) for prefix in _FLD_PREFIXES): return "fld" return None def detect_active_wifi_ssid() -> str | None: """ Best-effort detection of the currently connected Wi-Fi SSID. This is intentionally conservative: if platform-specific commands are not available or parsing fails, return None and let the caller fall back. """ for env_name in ("WIFI_UAV_SSID", "DRONE_SSID", "WIFI_SSID"): value = os.getenv(env_name, "").strip() if value: return value try: if sys.platform.startswith("win"): return _detect_ssid_windows() if sys.platform == "darwin": return _detect_ssid_macos() return _detect_ssid_linux() except Exception: return None def _detect_ssid_windows() -> str | None: result = subprocess.run( ["netsh", "wlan", "show", "interfaces"], capture_output=True, text=True, timeout=2.0, check=False, ) if result.returncode != 0: return None for line in result.stdout.splitlines(): stripped = line.strip() if not stripped.lower().startswith("ssid") or stripped.lower().startswith("bssid"): continue _, _, value = stripped.partition(":") ssid = value.strip() if ssid: return ssid return None def _detect_ssid_macos() -> str | None: airport = "/System/Library/PrivateFrameworks/Apple80211.framework/Versions/Current/Resources/airport" result = subprocess.run( [airport, "-I"], capture_output=True, text=True, timeout=2.0, check=False, ) if result.returncode != 0: return None for line in result.stdout.splitlines(): stripped = line.strip() if not stripped.startswith("SSID:"): continue _, _, value = stripped.partition(":") ssid = value.strip() if ssid: return ssid return None def _detect_ssid_linux() -> str | None: for command in (["iwgetid", "-r"], ["nmcli", "-t", "-f", "active,ssid", "dev", "wifi"]): result = subprocess.run( command, capture_output=True, text=True, timeout=2.0, check=False, ) if result.returncode != 0: continue output = result.stdout.strip() if not output: continue if command[0] == "nmcli": for line in output.splitlines(): if not line.startswith("yes:"): continue ssid = line.split(":", 1)[1].strip() if ssid: return ssid else: return output return None ================================================ FILE: backend/video_client.py ================================================ #!/usr/bin/env python3 import argparse import queue import signal import sys import os from protocols.s2x_video_protocol import S2xVideoProtocolAdapter from protocols.wifi_uav_video_protocol import WifiUavVideoProtocolAdapter from services.video_receiver import VideoReceiverService from views.opencv_video_view import OpenCVVideoView def main(): parser = argparse.ArgumentParser( description="Modular drone video client" ) parser.add_argument("--drone-ip", default="172.16.10.1", help="Drone host address") parser.add_argument("--video-port", type=int, default=8888) parser.add_argument("--control-port", type=int, default=8080) parser.add_argument( "--keepalive", type=float, default=1.0, help="Re-send start-video every N seconds" ) parser.add_argument( "--dump-frames", action="store_true", help="Dump every reassembled frame to disk" ) parser.add_argument( "--dump-packets", action="store_true", help="Dump every raw packet to disk" ) parser.add_argument( "--dump-dir", type=str, default=None, help="Directory to store dumps (default: dumps_timestamp)" ) args = parser.parse_args() # Define the blueprint for our protocol adapter. # The VideoReceiverService will use this to create new instances. protocol_class = WifiUavVideoProtocolAdapter protocol_args = { "drone_ip": "192.168.169.1", "control_port": 8800, "video_port": 8800, "debug": True } # protocol_class = S2xVideoProtocolAdapter # protocol_args = { # "drone_ip": "172.16.10.1", # "control_port": 8080, # "video_port": 8888, # "debug": True # } # Create frame queue frame_queue = queue.Queue(maxsize=100) # The service now takes the class and args to manage the protocol's lifecycle. receiver = VideoReceiverService( protocol_class, protocol_args, frame_queue, dump_frames=args.dump_frames, dump_packets=args.dump_packets, dump_dir=args.dump_dir ) # Create view view = OpenCVVideoView(frame_queue) # Set up signal handler for clean shutdown def signal_handler(sig, frame): print("\n[main] Caught signal, shutting down...") # First stop the receiver (which stops the keepalive) receiver.stop() # Then stop the view view.stop() # Exit more forcefully os._exit(0) # Use os._exit instead of sys.exit signal.signal(signal.SIGINT, signal_handler) signal.signal(signal.SIGTERM, signal_handler) # The receiver service now manages the entire protocol lifecycle. # We no longer send the initial start command from here. receiver.start() try: view.run() finally: print("[main] Shutting down...") view.stop() receiver.stop() if __name__ == "__main__": main() ================================================ FILE: backend/views/__init__.py ================================================ from .cli_rc import CLIView __all__ = ['CLIView'] ================================================ FILE: backend/views/base_video_view.py ================================================ from abc import ABC, abstractmethod class BaseVideoView(ABC): """Base abstract class for video display views""" def __init__(self, frame_queue): self.frame_queue = frame_queue self.running = True @abstractmethod def run(self): """Start the view's main loop""" pass @abstractmethod def stop(self): """Stop the view""" pass ================================================ FILE: backend/views/cli_rc.py ================================================ # Remote Control using curses cli interface import curses import time from services.flight_controller import FlightController class CLIView: """Curses-based CLI view for drone remote control""" def __init__(self, flight_controller): self.controller = flight_controller self.sensitivity_mode = 0 # 0=normal, 1=precise, 2=aggressive self.sensitivity_labels = ["Normal", "Precise", "Aggressive"] self.PRESS_THRESHOLD = 0.4 # threshold for key being held def run(self): """Start the CLI interface""" curses.wrapper(self._ui_loop) def _ui_loop(self, stdscr): """Main curses UI loop""" curses.curs_set(0) stdscr.nodelay(True) stdscr.keypad(True) help_msg = "W/S=throttle A/D=yaw Arrows=pitch/roll T=takeoff L=land Q=quit" help_msg2 = "F=debug packets X=sensitivity mode" # direction states and last-press timestamps throttle_dir = yaw_dir = pitch_dir = roll_dir = 0 throttle_ts = yaw_ts = pitch_ts = roll_ts = 0.0 prev_time = time.time() debug_enabled = False while self.controller.running: now = time.time() dt = now - prev_time prev_time = now c = stdscr.getch() if c in (ord('q'), ord('Q')): self.controller.stop() break elif c in (ord('t'), ord('T')): self.controller.model.takeoff() elif c in (ord('l'), ord('L')): self.controller.model.land() elif c in (ord('f'), ord('F')): debug_enabled = self.controller.protocol.toggle_debug() elif c in (ord('x'), ord('X')): # Toggle sensitivity mode self.sensitivity_mode = (self.sensitivity_mode + 1) % 3 self.controller.model.set_sensitivity(self.sensitivity_mode) # throttle elif c in (ord('w'), ord('W')): throttle_dir = +1; throttle_ts = now elif c in (ord('s'), ord('S')): throttle_dir = -1; throttle_ts = now # yaw elif c in (ord('a'), ord('A')): yaw_dir = -1; yaw_ts = now elif c in (ord('d'), ord('D')): yaw_dir = +1; yaw_ts = now # pitch elif c == curses.KEY_UP: pitch_dir = +1; pitch_ts = now elif c == curses.KEY_DOWN: pitch_dir = -1; pitch_ts = now # roll elif c == curses.KEY_LEFT: roll_dir = -1; roll_ts = now elif c == curses.KEY_RIGHT: roll_dir = +1; roll_ts = now # decide if each axis is "still held" active_throttle = throttle_dir if (now - throttle_ts) < self.PRESS_THRESHOLD else 0 active_yaw = yaw_dir if (now - yaw_ts) < self.PRESS_THRESHOLD else 0 active_pitch = pitch_dir if (now - pitch_ts) < self.PRESS_THRESHOLD else 0 active_roll = roll_dir if (now - roll_ts) < self.PRESS_THRESHOLD else 0 # Update controller with current control directions self.controller.set_control_direction('throttle', active_throttle) self.controller.set_control_direction('yaw', active_yaw) self.controller.set_control_direction('pitch', active_pitch) self.controller.set_control_direction('roll', active_roll) # Update the UI state = self.controller.model.get_control_state() stdscr.clear() stdscr.addstr(0, 0, f"Throttle: {int(state['throttle']):3d} " f"Yaw: {int(state['yaw']):3d}") stdscr.addstr(1, 0, f" Pitch: {int(state['pitch']):3d} " f"Roll: {int(state['roll']):3d}") # Add status flags to UI status_flags = [f"Mode: {self.sensitivity_labels[self.sensitivity_mode]}"] if debug_enabled: status_flags.append("DEBUG") status_str = " | ".join(status_flags) stdscr.addstr(2, 0, f"Status: {status_str}") stdscr.addstr(4, 0, help_msg) stdscr.addstr(5, 0, help_msg2) stdscr.refresh() # small sleep to cap UI frame-rate time.sleep(0.01) ================================================ FILE: backend/views/opencv_video_view.py ================================================ import cv2 import numpy as np import queue import time import sys import ctypes from views.base_video_view import BaseVideoView class OpenCVVideoView(BaseVideoView): """OpenCV-based video display view""" def __init__(self, frame_queue, window_name="Drone Video"): super().__init__(frame_queue) self.window_name = window_name # ------------------------------------------------------------------ # # private helper – poke HighGUI so waitKey() returns immediately # ------------------------------------------------------------------ # def _wakeup_highgui(self): if sys.platform.startswith("win"): hwnd = ctypes.windll.user32.FindWindowW(None, self.window_name) if hwnd: ctypes.windll.user32.PostMessageW(hwnd, 0, 0, 0) else: # On X11 / Cocoa / Qt nothing special is needed – an extra waitKey # call will do. cv2.waitKey(1) def run(self): """Start the OpenCV display loop""" cv2.namedWindow(self.window_name, cv2.WINDOW_NORMAL) # Build a placeholder image (black + red warning text) placeholder_h, placeholder_w = 480, 640 placeholder = np.zeros((placeholder_h, placeholder_w, 3), np.uint8) txt = "No video frames received yet" font, scale, th = cv2.FONT_HERSHEY_SIMPLEX, 0.6, 2 (tw, th_), _ = cv2.getTextSize(txt, font, scale, th) x = (placeholder_w - tw) // 2 y = (placeholder_h + th_) // 2 cv2.putText(placeholder, txt, (x, y), font, scale, (0, 0, 255), th) fps_timer = time.time() frame_count = 0 while self.running: frame = None try: frame = self.frame_queue.get(timeout=1.0) except queue.Empty: pass if frame is None: img = placeholder is_real = False else: # Handle different frame formats if frame.format == "jpeg": arr = np.frombuffer(frame.data, dtype=np.uint8) decoded = cv2.imdecode(arr, cv2.IMREAD_COLOR) if decoded is None: print(f"[display] ⚠ imdecode failed ({len(arr)} bytes)") img, is_real = placeholder, False else: img, is_real = decoded, True # Store dimensions in frame for future reference frame.width, frame.height = img.shape[1], img.shape[0] else: # For future formats like h264, h265, etc. print(f"[display] Unsupported format: {frame.format}") img, is_real = placeholder, False cv2.imshow(self.window_name, img) key = cv2.waitKey(1) & 0xFF if key == ord("q"): self.running = False break # Only count toward FPS when we had a real frame if is_real: frame_count += 1 if frame_count % 60 == 0: now = time.time() print(f"[display] ~{frame_count/(now-fps_timer):4.1f} fps") fps_timer, frame_count = now, 0 cv2.destroyAllWindows() def stop(self): """Stop the display loop""" self.running = False self._wakeup_highgui() # make waitKey return ================================================ FILE: backend/web_server.py ================================================ import asyncio import threading import queue import time from contextlib import asynccontextmanager from typing import Any, Optional import logging import os from fastapi import FastAPI, WebSocket, WebSocketDisconnect, HTTPException from fastapi.middleware.cors import CORSMiddleware from starlette.websockets import WebSocketState from utils.logging_config import bootstrap_runtime, configure_logging bootstrap_runtime() from services.flight_controller import FlightController from control.strategies import DirectStrategy, IncrementalStrategy from services.video_receiver import VideoReceiverService from models.s2x_rc import S2xDroneModel as S2xRcModel from models.debug_rc import DebugRcModel from protocols.s2x_rc_protocol_adapter import S2xRCProtocolAdapter from protocols.debug_rc_protocol_adapter import DebugRcProtocolAdapter from protocols.s2x_video_protocol import S2xVideoProtocolAdapter from protocols.debug_video_protocol import DebugVideoProtocolAdapter from protocols.wifi_uav_rc_protocol_adapter import WifiUavRcProtocolAdapter from protocols.wifi_uav_video_protocol import WifiUavVideoProtocolAdapter from models.wifi_uav_rc import WifiUavRcModel from models.cooingdv_rc import CooingdvRcModel from protocols.cooingdv_rc_protocol_adapter import CooingdvRcProtocolAdapter from protocols.cooingdv_jieli_rc_protocol_adapter import CooingdvJieliRcProtocolAdapter from protocols.cooingdv_jieli_video_protocol import CooingdvJieliVideoProtocolAdapter from protocols.cooingdv_video_protocol import CooingdvVideoProtocolAdapter from models.wifi_cam_rc import WifiCamRcModel from protocols.wifi_cam_rc_protocol_adapter import WifiCamRcProtocolAdapter from protocols.wifi_cam_video_protocol import WifiCamVideoProtocolAdapter from plugins.manager import PluginManager from utils.dropping_queue import DroppingQueue from utils.wifi_uav_variants import ( WIFI_UAV_DRONE_TYPES, resolve_wifi_uav_capabilities, resolve_wifi_uav_variant, ) COOINGDV_DRONE_TYPES = {"cooingdv", "cooingdv_jieli"} class ConnectionManager: """ Manages active WebSocket connections for broadcasting messages. """ def __init__(self): self.active_connections: list[WebSocket] = [] async def connect(self, websocket: WebSocket): await websocket.accept() self.active_connections.append(websocket) def disconnect(self, websocket: WebSocket): if websocket in self.active_connections: self.active_connections.remove(websocket) async def broadcast(self, message: str): # Copy to avoid mutation during iteration for connection in list(self.active_connections): if connection.client_state == WebSocketState.CONNECTED: try: await connection.send_text(message) except Exception: self.disconnect(connection) async def broadcast_bytes(self, message: bytes): # Copy to avoid mutation during iteration for connection in list(self.active_connections): if connection.client_state == WebSocketState.CONNECTED: try: await connection.send_bytes(message) except Exception: self.disconnect(connection) async def broadcast_json(self, obj: Any): # Copy to avoid mutation during iteration for connection in list(self.active_connections): if connection.client_state == WebSocketState.CONNECTED: try: await connection.send_json(obj) except Exception: self.disconnect(connection) # Feature flags PLUGINS_ENABLED = os.getenv("PLUGINS_ENABLED", "false").lower() in ("1", "true", "yes", "on") configure_logging() logger = logging.getLogger(__name__) # Managers for WebSocket connections overlay_manager = ConnectionManager() video_manager = ConnectionManager() def _control_capabilities_for_drone(drone_type: str) -> dict[str, bool]: """ Describe which one-shot commands the active implementation exposes. These capabilities let the frontend keep its control cluster honest instead of assuming every implementation supports the same actions. """ if drone_type in WIFI_UAV_DRONE_TYPES: wifi_uav_capabilities = resolve_wifi_uav_capabilities(drone_type) return { "takeoff": True, "land": True, "estop": True, "camera_tilt": wifi_uav_capabilities.supports_camera_tilt, "camera_switch": wifi_uav_capabilities.supports_camera_switch, "speed_control": True, } if drone_type in {"s2x", "wifi_cam"} or drone_type in COOINGDV_DRONE_TYPES: return { "takeoff": True, "land": True, "estop": True, "camera_tilt": False, "camera_switch": drone_type == "wifi_cam", "speed_control": drone_type == "s2x", } if drone_type == "debug": return { "takeoff": True, "land": True, "estop": False, "camera_tilt": False, "camera_switch": False, "speed_control": False, } return { "takeoff": False, "land": False, "estop": False, "camera_tilt": False, "camera_switch": False, "speed_control": False, } class FrameHub: """ Fan-out hub for MJPEG frames. Each /mjpeg client gets its own asyncio.Queue, so multiple clients don't steal frames from each other. """ def __init__(self, per_client_queue_size: int = 2): self._per_client_queue_size = per_client_queue_size self._clients: set[asyncio.Queue] = set() self._lock = asyncio.Lock() async def register(self) -> asyncio.Queue: q: asyncio.Queue = asyncio.Queue(self._per_client_queue_size) async with self._lock: self._clients.add(q) return q async def unregister(self, q: asyncio.Queue) -> None: async with self._lock: self._clients.discard(q) async def publish(self, frame: Optional[bytes]) -> None: # Snapshot under lock; we only do non-blocking puts. async with self._lock: clients = list(self._clients) for q in clients: try: q.put_nowait(frame) except asyncio.QueueFull: # Drop oldest then try again. try: _ = q.get_nowait() except asyncio.QueueEmpty: pass try: q.put_nowait(frame) except Exception: # Give up on this client queue; it's likely stalled. await self.unregister(q) FRAME_HUB = FrameHub(per_client_queue_size=2) @asynccontextmanager async def lifespan(app: FastAPI): # Startup global flight_controller, receiver, plugin_manager, active_drone_type, control_capabilities drone_type = os.getenv("DRONE_TYPE", "s2x").lower() active_drone_type = drone_type control_capabilities = _control_capabilities_for_drone(drone_type) logger.info("[main] Using drone type: %s", drone_type) if drone_type == "s2x": logger.info("[main] Using S2X drone implementation.") # Allow overriding IP and ports via env to match prior behavior default_ip = "172.16.10.1" default_ctrl_port = 8080 default_video_port = 8888 default_control_rate = 80.0 drone_ip = os.getenv("DRONE_IP", default_ip) ctrl_port = int(os.getenv("CONTROL_PORT", default_ctrl_port)) video_port = int(os.getenv("VIDEO_PORT", default_video_port)) model = S2xRcModel() rc_proto = S2xRCProtocolAdapter(drone_ip, ctrl_port) # Optional remap via env: some S2x variants swap yaw and roll try: rc_proto.swap_yaw_roll = os.getenv("S2X_SWAP_YAW_ROLL", "false").lower() in ("1", "true", "yes", "on") if rc_proto.swap_yaw_roll: logger.info("[main] S2X swap_yaw_roll enabled") except Exception: pass video_adapter_cls = S2xVideoProtocolAdapter video_adapter_args = { "drone_ip": drone_ip, "control_port": ctrl_port, "video_port": video_port, } elif drone_type in WIFI_UAV_DRONE_TYPES: wifi_uav_variant = resolve_wifi_uav_variant(drone_type) logger.info("[main] Using WiFi UAV drone implementation (variant=%s).", wifi_uav_variant) # Align with previous working setup: env-configurable IP and ports default_ip = "192.168.169.1" default_ctrl_port = 8800 default_video_port = 8800 default_control_rate = 80.0 drone_ip = os.getenv("DRONE_IP", default_ip) ctrl_port = int(os.getenv("CONTROL_PORT", default_ctrl_port)) video_port = int(os.getenv("VIDEO_PORT", default_video_port)) model = WifiUavRcModel() rc_proto = WifiUavRcProtocolAdapter(drone_ip, ctrl_port, variant=wifi_uav_variant) video_adapter_cls = WifiUavVideoProtocolAdapter video_adapter_args = { "drone_ip": drone_ip, "control_port": ctrl_port, "video_port": video_port, "variant": wifi_uav_variant, "debug": False, } elif drone_type in COOINGDV_DRONE_TYPES: logger.info("[main] Using Cooingdv drone implementation (RC UFO, KY UFO, E88 Pro).") if drone_type == "cooingdv_jieli": default_ip = "192.168.8.15" default_ctrl_port = 2228 default_video_port = 0 default_control_rate = 20.0 else: default_ip = "192.168.1.1" default_ctrl_port = 7099 default_video_port = 7070 # RTSP port default_control_rate = 20.0 drone_ip = os.getenv("DRONE_IP", default_ip) ctrl_port = int(os.getenv("CONTROL_PORT", default_ctrl_port)) video_port = int(os.getenv("VIDEO_PORT", default_video_port)) model = CooingdvRcModel() if drone_type == "cooingdv_jieli": rc_proto = CooingdvJieliRcProtocolAdapter(drone_ip, ctrl_port) video_adapter_cls = CooingdvJieliVideoProtocolAdapter video_adapter_args = { "drone_ip": drone_ip, "control_port": ctrl_port, "video_port": int(os.getenv("JIELI_RTP_VIDEO_PORT", video_port or 6666)), "audio_port": int(os.getenv("JIELI_RTP_AUDIO_PORT", 1234)), "sdp_port": int(os.getenv("JIELI_SDP_PORT", 6789)), "width": int(os.getenv("JIELI_VIDEO_WIDTH", 640)), "height": int(os.getenv("JIELI_VIDEO_HEIGHT", 360)), "fps": int(os.getenv("JIELI_VIDEO_FPS", 30)), } else: rc_proto = CooingdvRcProtocolAdapter(drone_ip, ctrl_port) video_adapter_cls = CooingdvVideoProtocolAdapter video_adapter_args = { "drone_ip": drone_ip, "control_port": ctrl_port, "video_port": video_port, } elif drone_type == "wifi_cam": logger.info("[main] Using WiFi_CAM native UDP implementation.") default_ip = "192.168.4.153" default_ctrl_port = 8090 default_video_port = 8080 default_control_rate = 25.0 drone_ip = os.getenv("DRONE_IP", default_ip) ctrl_port = int(os.getenv("CONTROL_PORT", default_ctrl_port)) video_port = int(os.getenv("VIDEO_PORT", default_video_port)) command_mode = os.getenv("WIFI_CAM_COMMAND_MODE", "auto") model = WifiCamRcModel() rc_proto = WifiCamRcProtocolAdapter(drone_ip, ctrl_port, command_mode=command_mode) video_adapter_cls = WifiCamVideoProtocolAdapter video_adapter_args = { "drone_ip": drone_ip, "control_port": ctrl_port, "video_port": video_port, "debug": os.getenv("WIFI_CAM_VIDEO_DEBUG", "false").lower() in ("1", "true", "yes", "on"), } elif drone_type == "debug": logger.info("[main] Using debug drone implementation.") default_control_rate = 80.0 model = DebugRcModel() rc_proto = DebugRcProtocolAdapter() video_adapter_cls = DebugVideoProtocolAdapter video_adapter_args = {"camera_index": 0, "debug": False} else: raise ValueError(f"Unknown drone type: {drone_type}") # 1. Video – let the service create / recycle the adapter video_service_args = { "protocol_adapter_class": video_adapter_cls, "protocol_adapter_args": video_adapter_args, "frame_queue": RAW_Q, } if drone_type in WIFI_UAV_DRONE_TYPES or drone_type == "wifi_cam": video_service_args["rc_adapter"] = rc_proto receiver = VideoReceiverService(**video_service_args) receiver.start() # Wait a moment for video to stabilize await asyncio.sleep(1) # 2. RC / flight # Optional: enable low-level RC packet debug via env try: if os.getenv("RC_DEBUG_PACKETS", "false").lower() in ("1", "true", "yes", "on"): try: rc_proto.toggle_debug() logger.info("[main] RC packet debug: ON") except Exception: pass except Exception: pass try: control_rate = float(os.getenv("CONTROL_RATE", default_control_rate)) except (TypeError, ValueError): control_rate = default_control_rate flight_controller = FlightController(model, rc_proto, control_rate) flight_controller.start() # 3. Plugins (optional) plugin_frame_q: Optional[queue.Queue] = None overlay_broadcaster: Optional["OverlayBroadcaster"] = None if PLUGINS_ENABLED: PLUGIN_FRAME_Q = DroppingQueue(maxsize=100) PLUGIN_OVERLAY_Q = DroppingQueue(maxsize=100) plugin_manager = PluginManager(flight_controller, PLUGIN_FRAME_Q, PLUGIN_OVERLAY_Q) plugin_frame_q = PLUGIN_FRAME_Q # Start overlay broadcaster only when plugins are enabled overlay_broadcaster = OverlayBroadcaster(PLUGIN_OVERLAY_Q, asyncio.get_running_loop()) overlay_broadcaster.start() logger.info("[plugins] Plugins enabled") else: plugin_manager = None logger.info("[plugins] Plugins disabled (set PLUGINS_ENABLED=true to enable)") # 4. start bridge thread (daemon) for video pump (always, for MJPEG) _pump_stop = threading.Event() main_loop = asyncio.get_running_loop() _pump_thread = threading.Thread( target=_frame_pump_worker, args=(RAW_Q, plugin_frame_q, FRAME_HUB, _pump_stop, main_loop), name="FramePump", daemon=True, ) _pump_thread.start() yield # Shutdown if overlay_broadcaster: overlay_broadcaster.stop() if plugin_manager: plugin_manager.stop_all() if flight_controller: flight_controller.stop() if receiver: receiver.stop() if _pump_stop: _pump_stop.set() if _pump_thread: _pump_thread.join(timeout=1.0) # ─────────────────────────────────────────────────────────────── # FastAPI app + permissive CORS (tighten in production!) # ─────────────────────────────────────────────────────────────── app = FastAPI(title="Drone web adapter", lifespan=lifespan) app.add_middleware( CORSMiddleware, allow_origins=["*"], # TODO: restrict in prod allow_methods=["*"], allow_headers=["*"], ) # ─────────────────────────────────────────────────────────────── # Global objects (single-drone) # ─────────────────────────────────────────────────────────────── RAW_Q: queue.Queue = DroppingQueue(maxsize=2) # thread-safe → pump flight_controller: Optional[FlightController] = None receiver: Optional[VideoReceiverService] = None plugin_manager: Optional[PluginManager] = None active_drone_type = os.getenv("DRONE_TYPE", "s2x").lower() control_capabilities = _control_capabilities_for_drone(active_drone_type) video_keepalive = None # legacy; no longer used # ─────────────────────────────────────────────────────────────── # Plugin Management # ─────────────────────────────────────────────────────────────── @app.get("/capabilities") async def get_capabilities(): return { "drone_type": active_drone_type, "commands": control_capabilities, } @app.get("/plugins") async def get_plugins(): if not PLUGINS_ENABLED: raise HTTPException(status_code=404, detail="Plugins disabled") if not plugin_manager: raise HTTPException(status_code=503, detail="PluginManager not available") return { "available": plugin_manager.available(), "running": plugin_manager.running(), } @app.post("/plugins/{name}/start") async def start_plugin(name: str): if not PLUGINS_ENABLED: raise HTTPException(status_code=404, detail="Plugins disabled") if not plugin_manager: raise HTTPException(status_code=503, detail="PluginManager not available") try: # Current architecture can technically run multiple plugins, but they # will compete for frames from the shared plugin frame queue. For now # we enforce a single running plugin for predictable behavior. running = plugin_manager.running() if running and name not in running: raise HTTPException( status_code=409, detail=f"Another plugin is already running: {running}. Stop it first.", ) started = plugin_manager.start(name) if not started: raise HTTPException(status_code=409, detail="Plugin already running") return {"status": "started", "name": name} except ValueError as e: raise HTTPException(status_code=404, detail=str(e)) except Exception as e: raise HTTPException(status_code=500, detail=str(e)) @app.post("/plugins/{name}/stop") async def stop_plugin(name: str): if not PLUGINS_ENABLED: raise HTTPException(status_code=404, detail="Plugins disabled") if not plugin_manager: raise HTTPException(status_code=503, detail="PluginManager not available") try: stopped = plugin_manager.stop(name) if not stopped: raise HTTPException(status_code=409, detail="Plugin not running") return {"status": "stopped", "name": name} except ValueError as e: raise HTTPException(status_code=404, detail=str(e)) except Exception as e: raise HTTPException(status_code=500, detail=str(e)) # ─────────────────────────────────────────────────────────────── # Websocket handlers # ─────────────────────────────────────────────────────────────── @app.websocket("/ws/overlays") async def websocket_overlay_endpoint(websocket: WebSocket): await overlay_manager.connect(websocket) try: while True: await websocket.receive_text() # Keep connection open except WebSocketDisconnect: overlay_manager.disconnect(websocket) except Exception: overlay_manager.disconnect(websocket) @app.websocket("/ws") async def ws_endpoint(websocket: WebSocket) -> None: await websocket.accept() try: while True: data = await websocket.receive_json() if not flight_controller: continue msg_type = data.get("type") if msg_type == "axes": mode = data.get("mode", "abs") # If any plugin is running, completely ignore frontend control commands # Frontend should already be suppressing these, but this is a safety check plugin_running = bool(plugin_manager and plugin_manager.running()) if plugin_running: # Plugin has full control - don't process frontend axes at all pass else: # Switch strategy based on mode (treat "mouse" as absolute) try: if mode in ("abs", "mouse"): if not isinstance(flight_controller.model.strategy, DirectStrategy): flight_controller.model.set_strategy(DirectStrategy()) else: if not isinstance(flight_controller.model.strategy, IncrementalStrategy): flight_controller.model.set_strategy(IncrementalStrategy()) except Exception: pass throttle = float(data.get("throttle", 0)) yaw = float(data.get("yaw", 0)) pitch = float(data.get("pitch", 0)) roll = float(data.get("roll", 0)) flight_controller.set_axes_from("frontend", throttle, yaw, pitch, roll) elif msg_type == "set_profile": try: flight_controller.model.set_profile(data.get("name", "normal")) except Exception: pass elif msg_type in ("set_speed_index", "speed_index"): try: flight_controller.model.set_speed_index(data.get("speed_index", data.get("value", 2))) except Exception: pass elif msg_type == "takeoff": try: flight_controller.model.takeoff() except Exception: pass elif msg_type == "land": try: flight_controller.model.land() except Exception: pass elif msg_type in ("estop", "emergency_stop"): try: flight_controller.model.emergency_stop() except Exception: try: flight_controller.model.stop_flag = True except Exception: pass except WebSocketDisconnect: logger.info("[WebSocket] Client disconnected") except Exception as e: logger.exception("[WebSocket] Error: %s", e) # ─────────────────────────────────────────────────────────────── # Video streaming # ─────────────────────────────────────────────────────────────── def _frame_pump_worker( raw_q: queue.Queue, plugin_q: Optional[queue.Queue], frame_hub: FrameHub, stop_event: threading.Event, loop: asyncio.AbstractEventLoop, ): """ This worker runs in a separate thread and pumps frames from the thread-safe queue to the asyncio queues. """ # Wait for the very first frame before starting keepalive, to avoid # prematurely closing the MJPEG stream during initial connection. first_frame_seen = False last_frame_time = time.monotonic() timed_out = False while not stop_event.is_set() and not first_frame_seen: try: frame = raw_q.get(timeout=5.0) if frame: first_frame_seen = True last_frame_time = time.monotonic() # Send to MJPEG/overlay pipelines immediately asyncio.run_coroutine_threadsafe(frame_hub.publish(frame.data), loop) if plugin_q is not None: try: plugin_q.put_nowait(frame) except queue.Full: pass break except queue.Empty: # keep waiting for initial frame without killing the stream continue # After frames start flowing, if the stream stalls for too long we close # existing MJPEG clients by publishing None. (Pump continues regardless.) stream_timeout_s = 3.0 while not stop_event.is_set(): try: frame = raw_q.get(timeout=1.0) if frame: last_frame_time = time.monotonic() timed_out = False try: asyncio.run_coroutine_threadsafe(frame_hub.publish(frame.data), loop) except Exception: pass if plugin_q is not None: try: plugin_q.put_nowait(frame) except queue.Full: pass except queue.Empty: if first_frame_seen and not timed_out and (time.monotonic() - last_frame_time) > stream_timeout_s: timed_out = True try: asyncio.run_coroutine_threadsafe(frame_hub.publish(None), loop) except Exception: pass continue @app.get("/mjpeg") async def mjpeg_stream(): """ Streams JPEG frames over HTTP multipart/x-mixed-replace. """ from fastapi.responses import StreamingResponse async def frame_generator(): q = await FRAME_HUB.register() try: while True: frame = await q.get() if frame is None: break yield ( b"--frame\r\n" b"Content-Type: image/jpeg\r\n\r\n" + frame + b"\r\n" ) finally: await FRAME_HUB.unregister(q) return StreamingResponse( frame_generator(), media_type="multipart/x-mixed-replace; boundary=frame" ) class OverlayBroadcaster: def __init__(self, q: queue.Queue, loop: asyncio.AbstractEventLoop): self.q = q self.loop = loop self.thread: Optional[threading.Thread] = None self.stop_event = threading.Event() def start(self): self.thread = threading.Thread(target=self._run, daemon=True) self.thread.start() def stop(self): self.stop_event.set() if self.thread: self.thread.join(timeout=1.0) def _run(self): while not self.stop_event.is_set(): try: data = self.q.get(timeout=0.1) # IMPORTANT: empty overlays are sent as [] and must still be broadcast, # otherwise the frontend will keep rendering the last overlay forever. if data is None: continue # Plugins typically put python objects (lists/dicts) into this queue. # The frontend expects JSON. IMPORTANT: decide which coroutine to create # BEFORE constructing it, otherwise an un-awaited coroutine may be # garbage-collected (RuntimeWarning: coroutine was never awaited). if isinstance(data, (str, bytes)): msg = data if isinstance(data, str) else data.decode("utf-8", errors="ignore") coro = overlay_manager.broadcast(msg) else: coro = overlay_manager.broadcast_json(data) future = asyncio.run_coroutine_threadsafe(coro, self.loop) future.result(timeout=1.0) except queue.Empty: continue except Exception as e: logger.exception("[OverlayBroadcaster] Error: %s", e) ================================================ FILE: docs/research/S2x.md ================================================ # Research for the S2x drones (S20, S29, PL FPV) ## Chipset The S20 and S29 boards seem to use the [XR872AT](https://jlcpcb.com/partdetail/MACHINEINTELLIGENCE-XR872AT/C879208) MCU, a Cortex-M4 ARM processor. The likely firmware SDK family is https://github.com/XradioTech/xradio-skylark-sdk. ## App family These drones belong to the `com.vison.macrochip` Android app family. Confirmed apps so far: - HiTurbo FPV: `com.vison.macrochip.hiturbo.fpv`, decompiled at `decompile-s2x-hiturbo-app`. - PL FPV: `com.vison.macrochip.pl.fpv`, version `1.1.5`, decompiled at `decompiled-pl-fpv-1.1.5`. PL FPV is compatible with TurboDrone's existing `s2x` implementation. A Plegble PL-1515 that lists PL FPV in its guidebook was flown successfully with `DRONE_TYPE=s2x`: RC controls, video, takeoff, land, and e-stop all worked. ## Network shape - Device target is the phone's Wi-Fi gateway. TurboDrone's default remains `172.16.10.1`, but app code uses the DHCP gateway rather than a hard-coded address. - RC/control is UDP to port `8080`. - Video is UDP on port `8888`. - The app also opens TCP `8888` for some Macrochip variants, but the working S2x path is the UDP video path. - There is an auxiliary UDP receive socket on `8081` in newer PL FPV base library code. Video start/keepalive is a five-byte UDP command sent to port `8080`: ```text 08 ``` HiTurbo's `UdpRequestVideo` sends this every 1000 ms. PL FPV's `StreamUdpConnection` sends the same shape every 1000 ms. TurboDrone currently sends the same start payload every 2000 ms, which has worked on tested drones. ## RC packet The stock apps use the 20-byte "HY" control packet for this family: ```text 66 14 RR PP TT YY F1 F2 00 00 00 00 00 00 00 00 00 00 XX 99 ``` - Byte `0`: start marker `0x66`. - Byte `1`: packet length/value `0x14`. - Byte `2`: roll. - Byte `3`: pitch. - Byte `4`: throttle. - Byte `5`: yaw. - Byte `6`: one-shot flags. - Byte `7`: mode/status flags. - Bytes `8..17`: zero. - Byte `18`: XOR of bytes `2..17`. - Byte `19`: end marker `0x99`. Observed flag bits: - Byte `6`, bit `0`: one-key fly/land. Both takeoff and land use this same bit in the inspected HiTurbo and PL FPV code. - Byte `6`, bit `1`: emergency stop. - Byte `6`, bit `2`: calibration. - Byte `6`, bit `3`: roll/flip in the inline HiTurbo thread variant. - Byte `7`, bit `0`: headless. - Byte `7`, bit `1`: always set by both inspected apps. - Byte `7`, bit `2`: record state. - Byte `7`, bit `3`: "rocker" UI/control bit. TurboDrone has historically sent byte `7 = 0x0a` by default. Both inspected app paths build `0x02` plus optional bits, but `0x0a` has worked on real S2x and PL-1515 hardware. Treat byte `7` as a possible variant knob if a drone flies but has odd mode behavior. ## RC timing and feel The inspected stock app paths send RC packets every 50 ms: - HiTurbo `SendHuiYuanThread` sleeps `50L` between packets. - PL FPV subscribes to `RxManager.getObservable(0L, 50L)` for `HyControlConsumer`. TurboDrone's S2x backend sends RC packets at 80 Hz by default, so perceived lag is unlikely to be caused by a lower packet rate. More likely causes: - Frontend `inc` mode is intentionally ramped by `IncrementalStrategy`. - Gamepad/absolute mode should feel closer to the app because it uses `DirectStrategy`. - Browser input is forwarded to the backend at 30 Hz, while the backend repeats the latest state at the configured control rate. - Debug control logging can add small overhead when enabled. ## Speed tiers The stock apps do not use byte `1` as a speed selector; byte `1` stays `0x14`. Instead, the app scales roll and pitch around center before sending the packet. Observed HY speed scales: - HiTurbo: speed `0` = `0.6`, speed `1` = `0.8`, speed `2` = `1.0`. - PL FPV: speed `0` = `0.7`, speed `1` = `0.8`, speed `2` = `1.0`. TurboDrone now keeps S2x default behavior at full scale (`speed_index = 2`) and supports lower S2x speed tiers via `set_speed_index`. ## Native libraries PL FPV's `config.arm64_v8a.apk` split contains native libraries under `resources/config.arm64_v8a.apk/lib/arm64-v8a`. The important app-specific libraries are: - `libvison_main.so`: implements `com.vison.sdk.VNDK` JNI methods such as `addVideoStream`, `add872Stream`, `createVideoStream`, `getVideoOneFrameArray`, `convertJPEGToI420`, `convertNV12ToI420`, and FFmpeg/H.265 decode helpers. Printable symbols include `_872StreamBuf`, `udp_pack`, `MJPGToI420`, and the Java `VNDK` exports. This looks like the native video parser/decoder bridge, not the RC command transport. - `libdetector-lib.so`: implements `com.vison.macrochip.sdk.JNIManage` for hand detection, follow/track, obstacle detection, image stitching, and OpenCV/ncnn/ONNX helpers. This is vision/autonomy support and does not appear to own the RC packet format. - `librxffmpeg-*`, `libav*`, `libsw*`, `libHW_H265dec_Andr.so`, `libturbojpeg.so`, `libjpeg.so`: codec, FFmpeg, and JPEG support. String-level inspection did not find hard-coded S2x IPs or ports in `libvison_main.so`; the network target and control socket behavior still appear to be owned by the Java `BaseApplication` / connection classes. This supports keeping TurboDrone's S2x RC implementation as a Java-level packet match while using native findings mostly to understand video parsing. ### Native S2x UDP video parser Ghidra decompilation of `libvison_main.so` shows that PL FPV's Java `VNDK.add872Stream(byte[], int)` calls a native `analysis(int, char*, char*&)` function before writing a completed image into the internal BLB frame buffer. That function is the best match for TurboDrone's S2x UDP packet parser. Observed native packet rules: - Bytes `0..1`: sync marker `0x40 0x40`. - Bytes `2..3`: little-endian 16-bit frame/image id. - Byte `4`: total chunks in the frame. The native parser rejects `0`. - Byte `5`: chunk index. The native parser rejects values above `100`. - Bytes `6..7`: little-endian datagram length, which must equal the received packet length. - Bytes `8..packet_len-3`: JPEG payload data. - Bytes `packet_len-2..packet_len-1`: two-byte trailer, normally `##`. The native parser keeps two frame slots, accepts out-of-order chunks, stores each chunk at `chunk_id * 0x56e`, tracks a per-frame chunk bitmap, and emits the frame as soon as all chunk ids `0..total_chunks-1` have arrived. This is better than waiting for the next frame id to know the previous frame is complete. TurboDrone's S2x video parser now mirrors the important parts of this behavior: it uses the 16-bit frame id, validates total chunks and declared packet length, strips the `##` trailer, and emits a frame immediately once all declared chunks are present. It still keeps a frame-id rollover fallback for older captures or unexpected variants. ## TurboDrone implementation notes Current matching files: - `backend/models/s2x_rc.py` - `backend/protocols/s2x_rc_protocol_adapter.py` - `backend/protocols/s2x_video_protocol.py` - `backend/models/s2x_video_model.py` Implementation parity notes: - RC packet shape matches the Macrochip HY 20-byte packet. - Video start command matches the app's `0x08 + local IPv4` command. - Video receive port and native `0x40 0x40` chunk header match the S2x stream behavior. - `S2X_SWAP_YAW_ROLL` is available as a variant knob in the web backend. - S2x speed tiers are supported as a model-level knob; the default remains full scale to preserve existing flight feel. ## Notes `nmap` on all TCP ports yielded only `8888` open. This is likely a backup or variant path for the main video feed over UDP. ================================================ FILE: docs/research/cooingdv.md ================================================ # CooingDV Protocol Research This note documents the CooingDV-style drone protocol as implemented in TurboDrone and as observed in the decompiled KY UFO and RC UFO Android apps. The publisher/app family appears to reuse the same core control and video stack across cosmetically different drone apps. Primary evidence: - TurboDrone implementation: - `turbodrone/backend/models/cooingdv_rc.py` - `turbodrone/backend/protocols/cooingdv_rc_protocol_adapter.py` - `turbodrone/backend/models/cooingdv_video_model.py` - `turbodrone/backend/protocols/cooingdv_video_protocol.py` - `turbodrone/backend/main.py` - `turbodrone/backend/web_server.py` - `turbodrone/backend/services/flight_controller.py` - `turbodrone/backend/services/video_receiver.py` - Decompiled KY UFO app: - `decompiled-ky-ufo-1.5.8/sources/com/cooingdv/kyufo/socket/Config.java` - `decompiled-ky-ufo-1.5.8/sources/com/cooingdv/kyufo/socket/SocketClient.java` - `decompiled-ky-ufo-1.5.8/sources/com/cooingdv/kyufo/socket/UdpComm.java` - `decompiled-ky-ufo-1.5.8/sources/com/cooingdv/kyufo/tools/FlyController.java` - `decompiled-ky-ufo-1.5.8/sources/com/cooingdv/kyufo/fragment/DeviceBLFragment.java` - `decompiled-ky-ufo-1.5.8/sources/com/cooingdv/kyufo/fragment/DeviceGLFragment.java` - `decompiled-ky-ufo-1.5.8/sources/com/cooingdv/kyufo/thread/MjpegThread.java` - `decompiled-ky-ufo-1.5.8/sources/com/cooingdv/kyufo/models/VideoModel.java` - `decompiled-ky-ufo-1.5.8/sources/com/cooingdv/bl60xmjpeg/UAV.java` - `decompiled-ky-ufo-1.5.8/sources/com/cooingdv/bl60xmjpeg/utils/GLJni.java` - `decompiled-ky-ufo-1.5.8/sources/com/cooingdv/bl60xmjpeg/utils/TCJni.java` - Decompiled RC UFO app: - `decompiled-rc-uf-19.3/sources/com/cooingdv/rcufo/socket/Config.java` - `decompiled-rc-uf-19.3/sources/com/cooingdv/rcufo/socket/SocketClient.java` - `decompiled-rc-uf-19.3/sources/com/cooingdv/rcufo/socket/UdpComm.java` - `decompiled-rc-uf-19.3/sources/com/cooingdv/rcufo/tools/FlyController.java` - `decompiled-rc-uf-19.3/sources/com/cooingdv/rcufo/fragment/DeviceBLFragment.java` - `decompiled-rc-uf-19.3/sources/com/cooingdv/rcufo/thread/MjpegThread.java` - `decompiled-rc-uf-19.3/sources/com/cooingdv/rcufo/models/VideoModel.java` - `decompiled-rc-uf-19.3/sources/com/cooingdv/rcufo/dialog/EnterPasswordDialog.java` - `decompiled-rc-uf-19.3/sources/com/cooingdv/rcufo/utils/WifiIdUtils.java` - Decompiled RC FPV app: - `decompiled-rc-fpv-1.8.0/sources/com/cooingdv/rcfpv/socket/Config.java` - `decompiled-rc-fpv-1.8.0/sources/com/cooingdv/rcfpv/socket/SocketClient.java` - `decompiled-rc-fpv-1.8.0/sources/com/cooingdv/rcfpv/socket/UdpComm.java` - `decompiled-rc-fpv-1.8.0/sources/com/cooingdv/rcfpv/tools/FlyController.java` - `decompiled-rc-fpv-1.8.0/sources/com/cooingdv/bl60xmjpeg/UAV.java` - `decompiled-rc-fpv-1.8.0/sources/com/cooingdv/rcfpv/utils/WifiIdUtils.java` - Decompiled KY FPV app: - `decompiled-ky-fpv-2.0.0/sources/com/cooingdv/kyfpv/socket/Config.java` - `decompiled-ky-fpv-2.0.0/sources/com/cooingdv/kyfpv/socket/UdpComm.java` - `decompiled-ky-fpv-2.0.0/sources/com/cooingdv/kyfpv/fragment/DeviceTXFragment.java` - `decompiled-ky-fpv-2.0.0/sources/com/cooingdv/kyfpv/tools/FlyController.java` - `decompiled-ky-fpv-2.0.0/sources/com/cooingdv/kyfpv/utils/StreamClient.java` - `decompiled-ky-fpv-2.0.0/sources/com/cooingdv/kyfpv/utils/FlyCommandUtils.java` - `decompiled-ky-fpv-2.0.0/sources/com/cooingdv/kyfpv/tools/FlyCommand.java` - `decompiled-ky-fpv-2.0.0/sources/com/cooingdv/kyfpv/utils/WifiIdUtils.java` - Decompiled 4DRC FPV app: - `decompiled-4drc-fpv-1.6.0/sources/com/cooingdv/fpv4drc/socket/Config.java` - `decompiled-4drc-fpv-1.6.0/sources/com/cooingdv/fpv4drc/socket/SocketClient.java` - `decompiled-4drc-fpv-1.6.0/sources/com/cooingdv/fpv4drc/tools/FlyController.java` - `decompiled-4drc-fpv-1.6.0/sources/com/cooingdv/fpv4drc/tools/DriveController.java` - `decompiled-4drc-fpv-1.6.0/sources/com/cooingdv/fpv4drc/fragment/ExcavatorFragment.java` - `decompiled-4drc-fpv-1.6.0/sources/com/cooingdv/fpv4drc/utils/WifiIdUtils.java` ## Executive Summary CooingDV drones in these apps use a simple Wi-Fi control plane: - Drone IP: `192.168.1.1` - RC/command UDP port: `7099` - Preview video: `rtsp://192.168.1.1:7070/webcam` - HTTP media access: `http://192.168.1.1/PHOTO/...` and `http://192.168.1.1/DCIM/...` - FTP media root: `/0/`, username `ftp`, password `ftp` The RC loop is a 20 Hz loop in the Android apps: `FlyController` schedules a `TimerTask` every 50 ms. TurboDrone mirrors that by defaulting CooingDV control to 20 Hz. There are two Wi-Fi control packet families: - TC / short packets: 9 bytes total over UDP. - GL / extended packets: 21 bytes total over UDP. Both packet families are wrapped with a leading `0x03` byte before being sent over UDP. KY UFO also has a native/BLE-like path through `UAV`, `GLJni`, and `TCJni` where the same inner payloads are sent without the Wi-Fi wrapper. The video path in the Wi-Fi app mode is RTSP. The apps use an IJK-based video view configured to expose original video frames as `byte[]`, then decode those bytes with `BitmapFactory.decodeByteArray`, which strongly indicates JPEG/MJPEG frame blobs at the app boundary. The apps can also re-encode displayed frames to local H.264 MP4 for recording. That local H.264 encoder is not evidence that the drone's wire stream itself is H.264. ## TurboDrone Compatibility Summary TurboDrone's current `cooingdv` implementation is a good match for drones that expose the classic CooingDV Wi-Fi path: - RC/telemetry on UDP `192.168.1.1:7099` - Video on `rtsp://192.168.1.1:7070/webcam` - 20 Hz stick loop - TC short packet `03 66 ... 99` - GL extended packet `03 66 14 ... 99` - Heartbeat `01 01` - Exit-control command `08 01` Compatibility by reviewed app: | Reviewed app | Current TurboDrone fit | Notes / edge cases | | --- | --- | --- | | KY UFO `com.cooingdv.kyufo` | Strong for classic Wi-Fi TC/GL; partial for native BL60x path | The Java Wi-Fi path matches UDP `7099` and RTSP `7070`. KY UFO also has native `UAV` / `libuav_gl.so` / `libuav_tc.so` transport targeting `192.168.169.1:8800`; TurboDrone does not implement that native path yet. | | RC UFO `com.cooingdv.rcufo` | Strong for classic Wi-Fi TC/GL | This is closest to TurboDrone's current adapter: UDP `7099`, RTSP `7070`, TC/GL auto-detection, password-capable IDs, and GL status telemetry. Some decompiled methods are damaged, so runtime captures are still useful. | | RC FPV `com.cooingdv.rcfpv` | Good for classic TC/short Wi-Fi path | RC FPV uses the same RTSP/UDP constants and short `03 66 ... 99` packet. Its Java `FlyController` does not show the GL 21-byte branch; treat it as TC/short unless telemetry proves otherwise. It references `UAV` native bridges, but this decompile lacks bundled `.so` files. | | KY FPV `com.cooingdv.kyfpv` | Good only for its classic `DeviceTXFragment` path; not complete for all KY FPV backends | KY FPV includes a TurboDrone-compatible RTSP/UDP TC path, but also has native `StreamClient` / `mjpeg_jni` and Jieli `DeviceClient` JSON `FLYING_CTRL` backends. TurboDrone does not currently implement those non-classic backends. | | 4DRC FPV `com.cooingdv.fpv4drc` | Strong for classic Wi-Fi TC/GL drone path; separate for excavator/drive mode | Drone flight control uses the same TC/GL split as KY UFO, with UDP `7099`, RTSP `7070`, and optional BL60x native `UAV`. It also adds `EXCAVATOR_720 = 89` and a distinct drive/excavator packet `03 33 ... 88`, which is not covered by TurboDrone's flight controller. | Control expectations for compatible classic-path drones: - Video should generally work via RTSP `7070`. - Manual stick control should generally work via UDP `7099`. - Emergency stop should generally work through the TC/GL emergency flag mapping. - Takeoff and land are best understood as TurboDrone UI abstractions over the apps' fast-up / fast-down one-shot bits. The reviewed apps do not expose clear named Android `takeoff` / `land` wire opcodes for the classic UDP path. Practical rule: if a drone works in one of these apps while exposing `192.168.1.1:7070` RTSP and responding on UDP `7099`, TurboDrone's current `cooingdv` adapter is likely the correct first attempt. If the app is using KY FPV's `mjpeg_jni`, Jieli JSON, or KY UFO's native `192.168.169.1:8800` path exclusively, TurboDrone needs a separate backend before compatibility can be claimed. Likely future TurboDrone split: - `cooingdv`: current classic RTSP/UDP implementation. - `cooingdv_bl`: BL60x native MJPEG/control transport used by KY UFO `UAV` and KY FPV `StreamClient`, both centered on `192.168.169.1:8800`. - `cooingdv_jieli`: KY FPV Jieli/CTP backend using `192.168.8.15:2228`, `DeviceClient`, `FLYING_CTRL`, and `CTP:` JSON packets. - `cooingdv_drive` or `cooingdv_4drc_drive`: optional future non-drone backend for the 4DRC excavator/ground-vehicle path. This should stay separate from flight because its packet shape and controls are not quadcopter RC controls. Implementation status in TurboDrone: - `cooingdv` is implemented for classic RTSP/UDP. - `cooingdv_jieli` has an initial RC implementation for `CTP:`/`FLYING_CTRL` over UDP `2228` and a first-pass RTP/JPEG video adapter using SDP `6789` and RTP video port `6666`. - `cooingdv_bl` remains research-only until runtime captures confirm the safe ACK/session behavior needed for motor control. - `cooingdv_drive` is not implemented and is out of scope for flight support. ## Network Constants The two apps share the same network constants in `Config.java`. - `SERVER_IP = "192.168.1.1"` - `SERVER_PORT = 7070` - `PREVIEW_ADDRESS = "rtsp://192.168.1.1:7070/webcam"` - `TCP_SERVER_HOST = "192.168.1.1"` - `TCP_SERVER_PORT = 5000` - `FTP_HOST = "192.168.1.1"` - `FTP_USERNAME = "ftp"` - `FTP_PASSWORD = "ftp"` - `FTP_ROOT_DIR = "/0/"` - `VIDEO_PATH = "DCIM"` - `IMAGE_PATH = "PHOTO"` - `LOCAL_IMAGE_SUFFIX = ".jpg"` - `LOCAL_VIDEO_SUFFIX = ".avi"` - `REMOTE_IMAGE_SUFFIX = ".jpg"` - `REMOTE_VIDEO_SUFFIX = ".avi"` The Java code inspected does not use `TCP_SERVER_PORT = 5000` for flight control. Active RC, heartbeat, camera switch, gallery sync, and password commands all go through `UdpComm` to UDP port `7099`. The newer KY FPV app still carries these constants for one of its RTSP/UDP screens, but it also supports other backends through `StreamClient` and `DeviceClient`; see "Additional CooingDV Publisher Apps" below. ## UDP Session Lifecycle Both apps create their UDP client like this: ```text UdpComm.getInstance("192.168.1.1", 7099) ``` `UdpComm` uses `new DatagramSocket()` with no explicit local bind, so Android uses an ephemeral local port. The same socket is used for transmit and receive. The receive thread allocates a 20-byte buffer: ```text byte[] bArr = new byte[20] DatagramPacket(bArr, bArr.length) socket.receive(datagramPacket) callback.onReceiveData(copyOf(datagramPacket.getData(), datagramPacket.getLength())) ``` That means telemetry observed by the Java callback is limited to 20 bytes in these app builds. This is enough for the app's first-byte capability detection, camera reset state, gallery counters, password metadata, and small GL status frames. The apps send a heartbeat every 1000 ms: ```text 01 01 ``` When leaving flight-control mode, the Wi-Fi path sends: ```text 08 01 ``` Important: this is not a startup init packet. In both apps it is sent when the control timer is cancelled. KY sends native command `65` (`0x65`, decimal 101) instead when its `UAV` native path is active. ## Discrete UDP Commands Observed discrete commands on UDP port `7099`: - `01 01`: heartbeat, once per second while preview/control is active. - `08 01`: leave flight-control mode / stop controller timer. - `06 01`: select one camera, usually front/default. - `06 02`: select alternate camera, usually rear/secondary. - `09 01`: screen/gallery/photo-side synchronization. Used by KY `switchScreen` and by both apps after photo telemetry (`M`, `0x4d`). - `09 02`: screen/gallery/video-side synchronization. Used by KY `switchScreen` and by both apps after video telemetry (`X`, `0x58`). - `0a d0 d1 d2 d3 d4 d5 d6 d7`: RC UFO password set command, where each `dN` is a numeric byte parsed from one character of an 8-digit UI password. KY native-only commands through `UAV.sendCommand`: - `64` (`0x64`, decimal 100): sent after first native `picData` frame to acknowledge/activate native streaming. - `63` (`0x63`, decimal 99): sent after native resolution/capability messages. - `65` (`0x65`, decimal 101): sent when leaving control mode while `UAV` is active. ## Flight Control Axes The Android apps name the four stick bytes as: - `controlByte1` - `controlByte2` - `controlAccelerator` - `controlTurn` The values use byte-centered joystick semantics: - Default center: `128` - Minimum: `1` - Maximum: `255` - If `controlAccelerator == 1`, the app writes it as `0` before sending. TurboDrone maps these into the higher-level model fields: - `roll` -> `controlByte1` - `pitch` -> `controlByte2` - `throttle` -> `controlAccelerator` - `yaw` -> `controlTurn` `CooingdvRcModel` defines a safe control range of `50..200` centered at `128`. The protocol adapter performs a final byte clamp to `1..255`, matching the app frame builder. This separation is intentional: the model limits normal control motion, while the adapter preserves protocol validity. ## TC / Short Flight Packet The TC path is used when the app's device type is `10`. Native inner payload in KY `UAV` mode: ```text 66 B1 B2 ACC TURN FLAGS CHECKSUM 99 ``` Wi-Fi UDP payload: ```text 03 66 B1 B2 ACC TURN FLAGS CHECKSUM 99 ``` Total Wi-Fi length: 9 bytes. Byte layout: - Byte 0: `0x03`, Wi-Fi wrapper/prefix. - Byte 1: `0x66`, start marker. - Byte 2: `controlByte1`. - Byte 3: `controlByte2`. - Byte 4: `controlAccelerator`, except app coerces `1` to `0`. - Byte 5: `controlTurn`. - Byte 6: flags. - Byte 7: XOR checksum. - Byte 8: `0x99`, end marker. Checksum: ```text checksum = B1 ^ B2 ^ ACC ^ TURN ^ FLAGS ``` The Android app's TC flag byte is: - Bit `0x01`: `isFastFly` - Bit `0x02`: `isFastDrop` - Bit `0x04`: `isEmergencyStop` - Bit `0x08`: `isCircleTurnEnd` - Bit `0x10`: `isNoHeadMode` - Bit `0x20`: `isFastReturn || isUnLock` - Bit `0x40`: KY `isOpenLight`; RC source is damaged here and has no `isOpenLight` field. - Bit `0x80`: `isGyroCorrection` TurboDrone's TC flag names are higher-level names: - `0x01`: `takeoff_flag` - `0x02`: `land_flag` - `0x04`: `stop_flag` - `0x08`: `flip_flag` - `0x10`: `headless_flag` - `0x80`: `calibration_flag` The TurboDrone mapping matches the byte positions and the most plausible button-level effects, but the decompiled app does not name explicit `takeoff`/`land` opcodes. The app names the low two bits as one-shot fast-up/fast-down actions. Treat TurboDrone's `takeoff` and `land` semantics as an abstraction over those app buttons, not as literal Android symbol names. ## GL / Extended Flight Packet The GL path is used when the app's device type is not `10`, normally device type `2`. Native inner payload in KY `UAV` mode: ```text 66 14 B1 B2 ACC TURN FLAGS1 FLAGS2 00 00 00 00 00 00 00 00 00 00 CHECKSUM 99 ``` Wi-Fi UDP payload: ```text 03 66 14 B1 B2 ACC TURN FLAGS1 FLAGS2 00 00 00 00 00 00 00 00 00 00 CHECKSUM 99 ``` Total Wi-Fi length: 21 bytes. Byte layout: - Byte 0: `0x03`, Wi-Fi wrapper/prefix. - Byte 1: `0x66`, start marker. - Byte 2: `0x14`, decimal 20, extended payload marker/inner length. - Byte 3: `controlByte1`. - Byte 4: `controlByte2`. - Byte 5: `controlAccelerator`, except app coerces `1` to `0`. - Byte 6: `controlTurn`. - Byte 7: `flags1`. - Byte 8: `flags2`. - Bytes 9..18: reserved zero bytes in the generated app frames, except byte 19 below is checksum when using zero-based Wi-Fi indexing. - Byte 19: XOR checksum. - Byte 20: `0x99`, end marker. Checksum: ```text checksum = B1 ^ B2 ^ ACC ^ TURN ^ FLAGS1 ^ FLAGS2 ``` The Android app's GL `flags1` byte is: - Bit `0x01`: `isFastFly || isFastDrop` - Bit `0x02`: `isEmergencyStop` - Bit `0x04`: `isGyroCorrection` - Bit `0x08`: `isCircleTurnEnd` - Bit `0x10`: KY `isOpenLight`; absent in RC `FlyController`. - Bit `0x40`: `isGestureMode` The Android app's GL `flags2` byte is: - Bit `0x01`: `isNoHeadMode` - Bit `0x02`: `isFixedHeightMode` TurboDrone's GL mapping: - `takeoff_flag` or `land_flag` -> `flags1 0x01` - `stop_flag` -> `flags1 0x02` - `calibration_flag` -> `flags1 0x04` - `flip_flag` -> `flags1 0x08` - `headless_flag` -> `flags2 0x01` Again, TurboDrone's names represent the product-level control surface. The decompiled apps expose these bits as fast up/down, emergency, gyro, circle/flip, headless, and fixed-height style features. ## Variant Detection The apps infer TC versus GL behavior from UDP telemetry/capability bytes. KY `WifiIdUtils.isGL(i)`: ```text 90..101, 103 ``` KY `WifiIdUtils.isNoGL(i)`: ```text 5, 9, 12, 19, 20, 21, 23, 24, 31, 45, 51, 63, 64, 65, 66, 67, 72 ``` RC `WifiIdUtils.isGL(i)`: ```text 90..101, 103, 82, 85 ``` RC password-capable IDs: ```text 80, 81, 82, 85 ``` RC adds many KY aliases plus RC-specific resolution IDs: ```text 26, 27, 29, 30, 31, 41, 43, 44, 45, 68, 69, 70, 71, 72, 80, 81, 82, 83, 84, 85, 86, 87, 90..101, 103, 105 ``` TurboDrone mirrors this detection through the first byte of received UDP telemetry: - IDs in `GL_RESOLUTION_IDS` select GL. - IDs in `KNOWN_RESOLUTION_IDS` select TC unless they also match GL. - Until a known ID is received, TurboDrone falls back to TC. - `COOINGDV_VARIANT=tc` or `COOINGDV_VARIANT=gl` can force a variant. - Aliases accepted by TurboDrone: - TC: `tc`, `e88`, `short` - GL: `gl`, `flow`, `extended` - Auto: empty, `auto`, `detect`, `autodetect` ## Additional CooingDV Publisher Apps Two later decompiles broaden the CooingDV publisher picture: - `RC FPV 1.8.0` package: `com.cooingdv.rcfpv` - `KY FPV 2.0.0` package: `com.cooingdv.kyfpv` Both are recognizably from the same publisher family, but they do not map one-to-one onto the same runtime stack. ### RC FPV 1.8.0 RC FPV is closest to the original KY UFO / RC UFO implementation. Shared constants and paths: - `Config.PREVIEW_ADDRESS = "rtsp://192.168.1.1:7070/webcam"` - `SERVER_IP = "192.168.1.1"` - `SERVER_PORT = 7070` - `TCP_SERVER_PORT = 5000` defined but not used for RC in the inspected Java paths. - `UdpComm.getInstance("192.168.1.1", 7099)` - `UdpComm` uses the same `DatagramSocket()` send/receive model and 20-byte receive buffer. - Heartbeat is `01 01` every 1000 ms. - Camera switch uses `06 01` / `06 02`. - Photo/video gallery sync uses `09 01` / `09 02`. - Exit control mode uses `08 01` when not in native `UAV` mode. RC FPV includes the same Java native bridge package: - `com.cooingdv.bl60xmjpeg.UAV` - `GLJni` -> `System.loadLibrary("uav_gl")` - `TCJni` -> `System.loadLibrary("uav_tc")` However, the inspected `decompiled-rc-fpv-1.8.0/resources` tree has no `resources/lib` directory. Like RC UFO, this decompile either lacks the native split/universal APK libs or represents a packaging variant where those binaries were omitted from the extracted resource tree. RC FPV's `FlyController` builds only the TC/short inner packet: ```text 66 B1 B2 ACC TURN FLAGS CHECKSUM 99 ``` Wi-Fi mode prefixes that with `0x03`: ```text 03 66 B1 B2 ACC TURN FLAGS CHECKSUM 99 ``` Native `UAV` mode sends the inner 8-byte payload directly. RC FPV does not show the GL 20-byte packet branch in its `FlyController`. It is therefore best modeled as a TC/short CooingDV variant, with optional native `UAV` support if its missing native libraries are recovered. RC FPV flag bits match the TC-style family: - `0x01`: fast fly / up - `0x02`: fast drop / down - `0x04`: emergency stop - `0x08`: circle turn end / flip-like one-shot - `0x10`: no-head / headless - `0x20`: fast return or unlock - `0x80`: gyro correction Two details differ from the older RC UFO source: - `controlTurn` is dead-zoned to 128 when it falls between 104 and 152. - The decompiled fast-return/unlock branch is cleaner than RC UFO's damaged output: `0x20` is set when `isFastReturn` or `isUnLock` is true. RC FPV `WifiIdUtils` is narrower than RC UFO: - Adds `COVERT_8K = 33` - Adds `RESOLUTION_SETTING = 46` - Double-camera IDs: `41`, `43`, `44` - Password-capable IDs still include `80`, `81`, `82`, `85` constants, but no `EnterPasswordDialog` equivalent was observed in the same way as RC UFO. - No GL/flow ID table was observed in this `WifiIdUtils`; it is not doing the same GL-vs-TC selection as RC UFO. ### KY FPV 2.0.0 KY FPV 2.0.0 is broader than the UFO apps and carries at least three control families behind one publisher UI. 1. Native BL/MJPEG `StreamClient` `StreamClient` loads: ```text System.loadLibrary("mjpeg_jni") ``` Native methods: - `streamStartServer()` - `streamStopServer()` - `streamSendCommand(byte[] command, int channel)` - `streamSwitchCamera(int index)` - `streamSetModify(String ssid, int resolution, int channel, int camera)` Callbacks: - `functionPicture(byte[], long, byte)` marks the stream active, sends native command `64` (`0x64`, decimal 100) once, and forwards frame bytes to `OnStreamListener.onVideo`. - `functionMessage(int)` handles resolution/status values, broadcasts fake resolution changes, applies model-specific UI customization, and forwards the integer to `OnStreamListener.onReceiver`. `FlyController` type `1` sends the same short TC inner packet through `StreamClient.sendCommand(...)`: ```text 66 B1 B2 ACC TURN FLAGS CHECKSUM 99 ``` It also sends native command `65` (`0x65`, decimal 101) when leaving control mode, matching the older `UAV` native exit behavior. The native split APK for KY FPV contains `libmjpeg_jni.so` under `resources/config.armeabi_v7a.apk/lib/armeabi-v7a`. Ghidra and ELF inspection show it is the same protocol family as the KY UFO `libuav_tc.so` engine, but with a different Java wrapper: - Java class: `com.cooingdv.kyfpv.utils.StreamClient` - JNI library: `libmjpeg_jni.so` - Native target: `192.168.169.1:8800` - Local socket: UDP bound to `0.0.0.0` with an ephemeral local port - Start packet: `ef 00 04 00` - Incoming envelope marker: `0x93` - Native frame assembly: 1024-byte fragments, embedded `640x360` JPEG headers, quality tables for 5/10/25/50/75/100, and final JPEG EOI `ff d9` - Native callbacks: - `functionPicture(byte[], long, byte)` - `functionMessage(int)` Important Ghidra-confirmed functions: - `Java_com_cooingdv_kyfpv_utils_StreamClient_streamStartServer` - `Java_com_cooingdv_kyfpv_utils_StreamClient_streamStopServer` - `Java_com_cooingdv_kyfpv_utils_StreamClient_streamSendCommand` - `Java_com_cooingdv_kyfpv_utils_StreamClient_streamSwitchCamera` - `Java_com_cooingdv_kyfpv_utils_StreamClient_streamSetModify` - `mjpeg_ndk_command_send` - `mjpeg_ndk_settings_send` - `mjpeg_ndk_queryinfo_cmd_send` - `handle_mcu_msg_frag` - `handle_mcu_msg_ack` - `build_send_ack` `streamSendCommand(byte[], int)` copies the Java command bytes and calls `mjpeg_ndk_command_send`. That native command path accepts payloads shorter than `0x81`, stores command length plus a `length + 0x0c` field, and piggybacks the command into the native ACK/control stream. This is not the Java UDP `7099` transport; it should be treated as a separate BL60x native backend even though the 8-byte stick payload is familiar. `streamSetModify(...)` builds the same `oGMcOfyZdIurm2kS` settings payload family seen in the KY UFO native code, with a mode value of `2` for this StreamClient path. 2. Jieli / JSON `DeviceClient` KY FPV adds `com.jieli.lib.dv.control.DeviceClient` through `ClientManager`. This is a materially different backend from the raw UDP packet path. KY FPV constructs `new DeviceClient(context, 1)`, selecting the UDP implementation. The app connects to: ```text 192.168.8.15:2228 ``` from `IConstants.DEFAULT_DEV_IP` and `AP_MODE_UDP_PORT`. The Jieli command envelope is not the CooingDV `0x03 0x66` packet. It is a little-endian CTP envelope: ```text offset size meaning 0x00 4 ASCII "CTP:" 0x04 2 little-endian topic length 0x06 N topic string ... 4 little-endian JSON payload length ... M JSON payload ``` The JSON payload looks like: ```json {"op":"PUT","param":{"state":"1"}} ``` or, for flight controls: ```json { "op": "PUT", "param": { "BYTE0": "102", "BYTE1": "...", "BYTE2": "...", "BYTE3": "...", "BYTE4": "...", "BYTE5": "...", "BYTE6": "...", "BYTE7": "153" } } ``` `FlyCommandUtils.tryToSendFlyCommand(int[])` sends a `SettingCmd` topic `FLYING_CTRL` with parameters: ```text BYTE0=102 BYTE1=B1 BYTE2=B2 BYTE3=ACC BYTE4=TURN BYTE5=FLAGS BYTE6=CHECKSUM BYTE7=153 ``` `FlyController` type `2` uses this JSON backend. The same 8-byte TC control payload is preserved, but it is transported as named JSON settings rather than as a raw UDP datagram. Other JSON command topics in `FlyCommand` include: - `CONTROL_MODE` - `FLYING_CTRL` - `SWITCH_CAMERA` - `RT_PIC_POSITION_CTL` - `REQUEST_FAKE_NUMBER` - numeric legacy topics such as `0136`, `0138`, `0141`, `0142`, `0143`, `0144`, `0145`, `0146`, `0147` This strongly suggests that newer CooingDV apps support at least one chipset/app backend that is not covered by TurboDrone's current raw UDP/RTSP CooingDV adapter. 3. Classic RTSP/UDP `DeviceTXFragment` KY FPV still includes a classic RTSP/UDP path: - RTSP preview: `rtsp://192.168.1.1:7070/webcam` - UDP telemetry/control: `192.168.1.1:7099` - Heartbeat: `01 01` - Exit control: `08 01` - Photo/video notifications: `M` / `X`, with `09 01` / `09 02` responses - `OnReceivedOriginalDataListener` -> `MjpegThread.drawBitmap(byte[])` `FlyController` type `3` delegates raw 8-byte TC inner frames back to `DeviceTXFragment.sendFlyControllerData`, which prefixes `0x03` and sends the result by `UdpComm`. KY FPV therefore preserves the TurboDrone-compatible TC/short path, but it also contains non-TurboDrone backends. KY FPV `WifiIdUtils` has the broadest observed model table so far. Notable additional IDs and model families: - FPV IDs: `25`, `26`, `27`, `29`, `30` - DF FPV: `14`, `50` - E19 Eachine: `10` - Hasakee Q8: `62` - Qixin Toy: `34` - XKY 4K: `60`, `61` - hide/custom UI variants: `13`, `28`, `32` - PRO26 family: `105`, `109`, `112`, `119`, `121`, `123`, `124` - F-resolution no music/gesture IDs: `212`, `213` These model IDs should be considered publisher-level capability IDs, not just resolution values. KY FPV uses them to hide controls, change language, switch backgrounds, set double-camera state, and select backend behavior. ### Impact On TurboDrone Current TurboDrone `cooingdv` support maps well to: - RC UFO Wi-Fi TC/GL path. - KY UFO Wi-Fi TC/GL path. - RC FPV classic TC Wi-Fi path. - KY FPV `DeviceTXFragment` classic TC Wi-Fi path. - 4DRC FPV drone-flight path. Current TurboDrone does not yet cover: - KY/RC native `UAV` / `StreamClient` BL60x-style native transport unless the drone also exposes the classic Java Wi-Fi path. - KY FPV `DeviceClient` / Jieli JSON settings backend. - The expanded KY FPV model-ID capability table and UI-hide/customization behavior. - 4DRC FPV excavator/ground-vehicle controls. The safest immediate expansion is to broaden CooingDV telemetry recognition with the RC FPV and KY FPV IDs while keeping the packet encoder conservative: - Treat RC FPV as TC/short unless runtime telemetry proves otherwise. - Treat KY FPV classic `DeviceTXFragment` as TC/short. - Do not assume the Jieli JSON backend is reachable through UDP `7099`. - Do not assume KY FPV native `mjpeg_jni` behaves exactly like KY UFO `libuav_gl.so` / `libuav_tc.so` for every model, even though Ghidra shows the same BL60x-style `192.168.169.1:8800` native protocol family. - Treat 4DRC FPV `EXCAVATOR_720 = 89` as a non-flight device class. It may share video and native BL transport, but its RC packet is a drive/excavator packet, not a quadcopter packet. Implementation recommendation: - Add `cooingdv_bl` only if hardware capture shows target drones require the native `192.168.169.1:8800` path and do not expose classic RTSP/UDP. - Add `cooingdv_jieli` as a distinct backend if we want KY FPV `DeviceClient`/Jieli devices. It needs CTP packet construction, UDP `2228`, Jieli realtime stream handling, and JSON command topics. TurboDrone now has initial CTP RC and RTP/JPEG video support, but it still needs hardware validation and H.264 handling if a device chooses RTS format `1`. - Add a drive/excavator backend only if TurboDrone intentionally grows beyond flying drones. The 4DRC excavator path is clearly CooingDV-family, but it does not belong in a flight-control adapter. ### 4DRC FPV 1.6.0 4DRC FPV package: ```text com.cooingdv.fpv4drc ``` The app is a strong overlap with KY UFO / RC FPV: - Classic RTSP preview: `rtsp://192.168.1.1:7070/webcam` - UDP command/telemetry: `192.168.1.1:7099` - Heartbeat: `01 01` - Camera switch: `06 01` / `06 02` - Photo/video gallery sync: `09 01` / `09 02` - Native bridge: `com.cooingdv.bl60xmjpeg.UAV` - Native libraries in split APK: - `libuav_gl.so` - `libuav_tc.so` - `libnative-lib.so` - IJK/OpenCV/GPUImage/pocketsphinx support libraries The 4DRC native `libuav_gl.so` and `libuav_tc.so` contain the same important BL60x-family strings and symbols: - `192.168.169.1` - `8800` - `mjpeg_ndk_start` - `mjpeg_ndk_command_send` - `mjpeg_ndk_set_active_camera_index` - `mjpeg_ndk_set_QPara` - `build_send_ack` - `handle_mcu_msg_frag` - embedded `jpeg_header_640x360_Q*` tables This means 4DRC FPV overlaps with `cooingdv_bl`, not Jieli. 4DRC FPV `FlyController` is the familiar KY UFO-style TC/GL split: - If `UAV.getInstance().getDeviceType() != 10`, it builds the GL extended 20-byte inner payload `66 14 ... 99`, then prefixes `03` for Wi-Fi. - If device type is `10`, it builds the TC 8-byte inner payload `66 ... 99`, then prefixes `03` for Wi-Fi. - Native `UAV` mode sends the inner payload directly. - Stop/exit sends native `65` (`0x65`, decimal 101) when native is active, or UDP `08 01` otherwise. The biggest unique piece is not a drone: 4DRC FPV includes an excavator/drive mode. `WifiIdUtils` defines: ```text EXCAVATOR_720 = 89 ``` If this ID is selected, the menu opens `ExcavatorFragment`, which uses `DriveController`, not `FlyController`. `DriveController` packet: ```text 33 TURN ACCEL BUCKET_FLAGS MODE_FLAGS 88 ``` Wi-Fi wrapper: ```text 03 33 TURN ACCEL BUCKET_FLAGS MODE_FLAGS 88 ``` Drive/excavator bytes: - Byte 0: `0x33` (`51`) - Byte 1: turn - Byte 2: accelerator - Byte 3: bucket flags - `0x01`: excavator rise - `0x02`: excavator drop - `0x04`: excavator left - `0x08`: excavator right - Byte 4: mode flags - `0x04`: auto - `0x08`: music - `0x10`: light - Byte 5: `0x88` (`136`) This packet has no checksum byte and is not compatible with TurboDrone's quadcopter control model. If we ever support it, it should be a separate ground-vehicle/excavator backend, not a `cooingdv` flight variant. ## Telemetry And App Messages Both apps pass received UDP packets to the flight fragment. Common photo/video notifications: - If `bArr[2] == 77` (`0x4d`, ASCII `M`), the app treats it as a photo event. The photo counter is read from `bArr[3]`. - If `bArr[2] == 88` (`0x58`, ASCII `X`), the app treats it as a video event. The video counter is read from `bArr[4]`. - On new photo count, both apps send `09 01`. - On new video count, both apps send `09 02`. - Shorter packets with only `bArr[2] == M/X` trigger direct UI tab switching. RC UFO has additional GL Wi-Fi status handling when `SocketClient.getDeviceType() == 2` and `bArr[0] == 0x66`: - If `bArr[1] == 0`, it reads state from `bArr[2]` and `bArr[9]`. - If packet length is 10 or 15, it reads a state byte from `bArr[4]`. - State values toggle `isTakingControl` and simulate top-list UI clicks for photo/video tabs. KY's Wi-Fi `SocketClient` handles first-byte resolution, GL/TC detection, camera reset state in `bArr[1]`, and screen-switch state in `bArr[2]`. KY's richer GL status parsing appears in the native `PicDataCallback` path used by `DeviceGLFragment` and `DeviceBLFragment`. ## KY UFO Native Path KY UFO includes `com.cooingdv.bl60xmjpeg.UAV` and native wrappers: - `GLJni` loads `libuav_gl` - `TCJni` loads `libuav_tc` `MainActivity` initializes the native stack: ```text UAV.getInstance().init(this) UAV.getInstance().startServer() SocketClient.getInstance().initVideoView(...) SocketClient.getInstance().start() ``` `UAV` starts in unknown device type `0`. Native `deviceStatus` sets: - `10` for TC. - `2` for GL. `UAV.sendCommand(byte[])` sends to the native implementation selected by `mDeviceType`. Important distinction: - Wi-Fi control packets include the leading `0x03` wrapper. - Native `UAV` commands use the inner 8-byte TC or 20-byte GL payload directly. Native video callbacks: - `picData(byte[] bArr, long seq, byte quality)` receives JPEG-like frame bytes. - On first frame, `UAV` marks itself active and sends native command `0x64`. - If not stopped, frames are passed to `PicDataCallback.onData`. - `picMessage(byte[] bArr)` is used for resolution/status messages and can send native command `0x63` after resolution discovery. RC UFO does not include this `bl60xmjpeg.UAV` path in the inspected package. It is Wi-Fi/RTSP oriented and adds password handling and advertising/consent code. ## Native Library Inventory KY UFO ships native libraries under `resources/lib` for three ABIs: - `arm64-v8a` - `armeabi` - `armeabi-v7a` Each ABI contains: - `libgesture-lib.so` - `libgpuimage-library.so` - `libijkffmpeg.so` - `libijkplayer.so` - `libijksdl.so` - `libopencv_java3.so` - `libuav_gl.so` - `libuav_tc.so` The `arm64-v8a` libraries are the most useful static-analysis target: | Library | Size | SHA-256 prefix | Role | | --- | ---: | --- | --- | | `libuav_gl.so` | 30,472 | `b62090ca898f41d4` | GL native MJPEG/control engine | | `libuav_tc.so` | 26,376 | `d43457e0f04b6025` | TC native MJPEG/control engine | | `libgesture-lib.so` | 501,976 | `b209601f7489a586` | OpenCV-backed gesture recognition | | `libijkplayer.so` | 418,984 | `c8ed8af43bf12090` | IJK player core | | `libijksdl.so` | 485,448 | `d53a2b3c63a6ce35` | IJK SDL/media glue | | `libijkffmpeg.so` | 3,780,216 | `3787aeac5935379a` | FFmpeg media stack | | `libopencv_java3.so` | 18,696,224 | `34b23b9914cfb6bb` | OpenCV runtime | | `libgpuimage-library.so` | 5,448 | `a7d4b44990bb5ef0` | GPUImage JNI/helper | RC UFO's inspected `resources` tree has no `resources/lib` directory and no bundled `.so` files. Its Java still calls `System.loadLibrary("lib_gesture")`, so this particular decompile appears to be incomplete for that library, built from a split APK without the native split, or decompiled from an APK variant that omitted native libs. ## Native JNI Surface KY `GLJni` exports these JNI entrypoints from `libuav_gl.so`: - `Java_com_cooingdv_bl60xmjpeg_utils_GLJni_nativeInit` - `Java_com_cooingdv_bl60xmjpeg_utils_GLJni_nativeStart` - `Java_com_cooingdv_bl60xmjpeg_utils_GLJni_nativeStop` - `Java_com_cooingdv_bl60xmjpeg_utils_GLJni_nativeUninit` - `Java_com_cooingdv_bl60xmjpeg_utils_GLJni_UnregisterDeviceStatus` - `Java_com_cooingdv_bl60xmjpeg_utils_GLJni_nativeSendCommand` - `Java_com_cooingdv_bl60xmjpeg_utils_GLJni_nativeSetCameraIndex` - `Java_com_cooingdv_bl60xmjpeg_utils_GLJni_nativeSetQPara` - `Java_com_cooingdv_bl60xmjpeg_utils_GLJni_nativeSetModify` KY `TCJni` exports these JNI entrypoints from `libuav_tc.so`: - `Java_com_cooingdv_bl60xmjpeg_utils_TCJni_stringFromJNI` - `Java_com_cooingdv_bl60xmjpeg_utils_TCJni_unimplementedStringFromJNI` - `Java_com_cooingdv_bl60xmjpeg_utils_TCJni_nativeSendCommand` - `Java_com_cooingdv_bl60xmjpeg_utils_TCJni_nativeSetModify` - `Java_com_cooingdv_bl60xmjpeg_utils_TCJni_setActiveCameraIndex` - `Java_com_cooingdv_bl60xmjpeg_utils_TCJni_setQPara` `libgesture-lib.so` exports: - `Java_com_cooingdv_kyufo_utils_JniUtils_nativeGestureRecognition` The KY gesture library links against `libopencv_java3.so`, references `gesture.jpg`, `fist.xml`, and `rpalm.xml` under the app documents directory, and appears UI/vision-only. It does not appear to carry flight-control or video transport protocol logic. The native callback map is: ```text libuav_gl.so -> GLJni.cbJpegFromNative(byte[], long, byte) -> ReceiveDataCallback.picData(...) -> UAV.AnonymousClass1.picData(...) -> PicDataCallback.onData(...) -> MjpegThread.drawBitmap(...) -> GLJni.cbCtlMsgFromNative(byte[], long) -> ReceiveDataCallback.picMessage(...) -> UAV.AnonymousClass1.picMessage(...) -> PicDataCallback.onReceiver(...) -> GLJni.cbDeviceStatusFromNative(byte[], long) -> ReceiveDataCallback.deviceStatus(...) -> UAV.AnonymousClass1.deviceStatus(...) libuav_tc.so -> TCJni.function_for_pic(byte[], long, byte) -> ReceiveDataCallback.picData(...) -> TCJni.java_function_for_mcuctl(byte[], int, int) -> ReceiveDataCallback.picMessage(...) ``` ## Native MJPEG Engine Findings `libuav_gl.so` and `libuav_tc.so` are not generic wrappers only. They contain a small native MJPEG/control engine with symbols such as: - `mjpeg_ndk_start` - `mjpeg_ndk_startup` - `mjpeg_ndk_stop` - `mjpeg_ndk_command_send` - `mjpeg_ndk_settings_send` - `mjpeg_ndk_queryinfo_cmd_send` (TC) - `mjpeg_ndk_custom_cmd_send` (GL) - `mjpeg_ndk_frame_callback_register` - `mjpeg_ndk_ctlmsg_cb_register` - `mjpeg_ndk_device_status_cb_register` (GL) - `handle_mcu_msg_ctlmsg` - `handle_mcu_msg_frag` - `build_send_ack` The native libraries also contain built-in JPEG header tables: - `jpeg_header_640x360_Q5` - `jpeg_header_640x360_Q10` - `jpeg_header_640x360_Q25` - `jpeg_header_640x360_Q50` - `jpeg_header_640x360_Q75` - `jpeg_header_640x360_Q100` The decompiled native fragment handlers show the native path is assembling JPEG frames from MCU fragments: - The fragment payload size is `0x400` bytes for non-final fragments. - The final fragment copies `body_len & 0x3ff` bytes. - Fragment slots are keyed by sequence and fragment ID. - Four image assembly slots are used. - A small output queue is used; when full, the oldest queued frame is dropped. - Fragment receipt is tracked with a bitset. - The first fragment causes the library to copy one of the built-in JPEG headers into the output buffer. - Width/height fields are patched into that JPEG header. - The final fragment appends JPEG EOI bytes `ff d9`. - Once all fragments are present, a complete JPEG is pushed to the Java callback queue. This is stronger than the Java-only inference: KY's native `UAV` path delivers complete JPEG byte arrays to Java after native fragment reassembly. The Java `MjpegThread` then decodes those complete JPEGs with `BitmapFactory`. The quality byte from native fragment metadata selects the JPEG header table: - `5` -> Q5 - `10` -> Q10 - `25` -> Q25 - `50` -> Q50 - `75` -> Q75 - `100` -> Q100 Both `libuav_gl.so` and `libuav_tc.so` contain `192.168.169.1` as a native target string, and the decompiled native start paths pass port string `8800`. That does not match the Wi-Fi RTSP/UDP Java constants `192.168.1.1:7070` and `192.168.1.1:7099`; the KY native `UAV` path is therefore a separate BL60x-style native transport, not the same Java `SocketClient` Wi-Fi path. ### Native Socket Lifecycle Both native engines allocate a large session object and open a UDP socket: - Local bind: `getaddrinfo("0.0.0.0", NULL, ...)`, `socket(AF_INET, SOCK_DGRAM)`, then `bind(...)`. Because no service/port is provided, this binds an ephemeral local UDP port. - Remote target: `getaddrinfo("192.168.169.1", "8800", ...)`. - GL marks the socket non-blocking with `fcntl(..., O_NONBLOCK)`. - Both engines store the local socket and remote sockaddr in the session. GL creates three detached threads from `create_instance`: - Timer / ACK / start thread. - Frame delivery thread. - Receive/parser thread. TC creates analogous threads, but its thread functions attach to the JVM because TC uses static Java callback functions. The timer/start thread sends a 4-byte start packet until the first frame/fragment activity is established: ```text ef 00 04 00 ``` This is `0x000400ef` in the little-endian decompiler output. The thread sends it about every 100 ms while the engine is not yet active. Once active, it watches for fragment silence; after roughly 3000 ms without fragments, it resets the native assembly state and starts again. The native engines also send ACKs about every 25 ms while active. If the last fragment was very recent, the ACK builder uses the latest fragment slot; if not, it uses a special `0xfffffffe` path that can request recovery or report pending flight-control-only state. ### Native Incoming Message Envelope The native receive thread reads up to `0x438` bytes from the UDP socket and only accepts packets whose first byte is `0x93` and whose 16-bit length field at offset 2 equals the number of bytes read. The observed incoming envelope is: ```text offset size meaning 0x00 1 0x93 packet marker 0x01 1 message type 0x02 2 total packet length 0x04 4 sequence / command id / status id 0x08 2 payload length for control/status callbacks 0x0c ... payload bytes ``` Message types observed in native dispatch: - `0x01`: image fragment, handled by `handle_mcu_msg_frag`. - `0x02`: ACK, TC-only dispatch to `handle_mcu_msg_ack`. - `0x04`: MCU control/status message, handled by `handle_mcu_msg_ctlmsg`. - `0x08`: query-info response, TC-only dispatch to `handle_mcu_msg_queryinfo_resp`. GL's receive thread handles fragment (`0x01`) and control/status (`0x04`). TC's receive thread handles fragment (`0x01`), ACK (`0x02`), control/status (`0x04`), and query-info response (`0x08`). ### Native Control Message Handling Native `nativeSendCommand(byte[])` does not reinterpret the Java control bytes. Both GL and TC wrappers copy the Java byte array from JNI into a local buffer and pass it to `mjpeg_ndk_command_send`. Important differences: - GL native command send accepts payloads shorter than `0x81` bytes and stores command length and `length + 0x0c`. - TC native command send accepts payloads shorter than `9` bytes and stores a duplicate command envelope with magic `0x04ef`. - The Java `FlyController` sends inner TC/GL payloads into native mode, not the Wi-Fi `0x03` wrapper. Native command payloads are not always sent immediately as standalone UDP packets. The regular `mjpeg_ndk_command_send` path stores the command in the session so the ACK/timer thread can include it in the next outgoing ACK/control packet. This explains the native log string: ```text [ACK] 0 frame ack, flyctl msg only ``` Settings/custom helpers do send immediate UDP packets: - GL/TC settings packets use magic `0x04ef`, a `length + 0x0c` field, and copy the settings payload after a 12-byte native envelope. - GL custom command packets use magic `0x20ef`, a `length + 4` field, and copy a payload of 1..64 bytes. - TC query-info packets use magic `0x10ef` and message type/status `0x08` in the decompiled `0x000810ef` word. Native `handle_mcu_msg_ctlmsg` receives MCU control/status payloads and forwards the bytes after a 12-byte native envelope to Java callbacks: - GL forwards `param_2 + 0x0c`, length at `param_2 + 8`, and sequence/status at `param_2 + 4`. - TC checks a header byte `0x93`, validates the packet length, forwards `param_2 + 0x0c`, and sends an 8-byte ACK/control response with magic `0x0808ef`. Native `build_send_ack` builds ACK packets for image fragments: - GL uses magic `0x02ef` with `0x01000202` in the decompiled local header. - TC uses analogous ACK behavior and logs urgent whole-loss, normal, special, and fly-control-only ACK cases. - GL logs `[ACK] 0 frame ack, flyctl msg only` when there are no image-frame ACKs but a pending flight-control message exists. These ACK/envelope formats are internal to the KY native MJPEG transport. They are not the same as the Java Wi-Fi RC packets documented earlier. ### Native Frame Delivery Thread After `handle_mcu_msg_frag` has assembled a complete JPEG, it pushes the image into a small native queue. A separate delivery thread drains that queue: - Sleeps around 38 ms between normal delivery attempts. - If the queue is empty, waits an additional ~19 ms and checks again. - Logs delayed delivery when frame interval exceeds about 76 ms. - Calls the registered Java frame callback with: - JPEG bytes - total JPEG length - sequence metadata - quality byte - GL-only extra camera/status flag This means the native `UAV` frame callback is already latency-throttled before Java sees it. It is not just a raw packet callback. ## PTZ / Servo / Camera Tilt Findings No confirmed PTZ or camera-tilt servo command was found in the inspected KY UFO or RC UFO paths. Several UI elements look suspicious at first: - `verticalBar` - `horizontalBar` - `horizontalCenterBar` - `view_control_device_vertical_add` - `view_control_device_vertical_del` - `VerticalSeekBar` However, these are flight trim controls, not camera servos. In both KY UFO and RC UFO, `verticalCurrentValue` is added into `controlByte2`, the same pitch axis that is sent in the regular RC control packet. `horizontalCurrentValue` adjusts yaw/turn, and `horizontalCenterCurrentValue` adjusts `controlByte1`. Observed mapping: - `horizontalBar` -> yaw trim / `controlTurn` - `horizontalCenterBar` -> roll trim / `controlByte1` - `verticalBar` -> pitch trim / `controlByte2` KY native camera-related functions are also not PTZ: - `UAV.switchActiveCamera(...)` -> `setActiveCameraIndex(...)` / `nativeSetCameraIndex(...)`. Native code stores a camera index byte that is later included in ACK/control state; this appears to select front/rear or active camera stream. - `UAV.setQPara(...)` -> native JPEG quality parameter ranges. Native code validates quality values such as 5, 10, 25, 50, 75, and 100 and stores min/max quality thresholds. - `UAV.setModify(...)` is firmware/configuration UI for SSID, resolution, channel, protocol, rotate, and flow orientation. It is not exposed as runtime tilt control. The apps also implement digital pan/zoom on decoded frames through `MjpegThread.setFocusScale(...)` and `setFocusMove(...)`, but that is display crop/zoom, not mechanical camera motion. Conclusion: if some CooingDV-published drones have tilt servos, this pair of decompiled apps does not expose an obvious runtime PTZ command for them. It may exist in another white-label app, another firmware telemetry/status packet, or a model-specific command not reachable from the visible KY/RC UI. ## Video Feed ### Wi-Fi RTSP Path Both apps use: ```text rtsp://192.168.1.1:7070/webcam ``` Playback is handled by `IjkVideoView`. The apps configure: - `mediacodec = 0` - `readtimeout = 5000000` - `preferred-image-type = 0` - `image-quality-min = 2` - `image-quality-max = 20` - `preferred-video-type = 2` - `video-need-transcoding = 1` - `mjpeg-pix-fmt = 1` - `video-quality-min = 2` - `video-quality-max = 20` - `x264-option-preset = 0` - `x264-option-tune = 5` - `x264-option-profile = 1` - `x264-params = "crf=23"` - `auto-drop-record-frame = 3` - codec option `err_detect = "explode"` The app enables original frame callbacks: ```text mVideoView.setOutputOriginalVideo(true) mVideoView.setOnReceivedOriginalDataListener(...) ``` The callback passes each `byte[]` to the fragment's `onVideo(...)`, and the fragment calls: ```text mjpegThread.drawBitmap(bArr) ``` `MjpegThread` decodes the whole byte array: ```text BitmapFactory.decodeByteArray(remove, 0, remove.length) ``` That makes the app boundary frame format effectively "complete JPEG blob per callback". There is no Java RTP packet assembler in these app paths. ### Display Transform Variations The apps apply display-side transformations after decoding JPEG bytes: - Cropping `800x600` frames to `800x540` by removing 30 pixels top and bottom. - Rotating portrait-like frame sizes such as `240x320`, `120x160`, and `160x272` by 90 degrees unless the resolution ID is a no-rotate ID. - Optional 180-degree rotation through `isTurnBitmap`. - Portrait crop to a 9:16 center region. - Focus/zoom cropping controlled by `focusScale`, `focusMoveX`, and `focusMoveY`. - Gesture recognition uses the same video byte stream. No equivalent display transforms are currently implemented in `CooingdvVideoProtocolAdapter`. TurboDrone receives decoded RTSP frames from OpenCV and re-encodes them to JPEG without app-specific rotation/cropping. ### Local Recording The apps' `VideoModel` records by encoding processed display frames to H.264: - Encoder: `video/avc` or `OMX.google.h264.encoder`. - Output: `REC__0.mp4`. - Bitrate: `2000000`. - I-frame interval: `5`. - Presentation time: `(frameIndex * 1000000 / fps) + 132`. - SPS/PPS handling: once codec config is captured, if output byte 4 is `101` (`0x65`, H.264 IDR NAL), the app prepends SPS/PPS before returning the encoded buffer. This is the app's local recording pipeline. It should not be confused with the drone-to-phone RTSP/JPEG frame boundary. ## TurboDrone Implementation ### CLI and Web Defaults `main.py` supports: ```text --drone-type cooingdv ``` Defaults: - `drone_ip = 192.168.1.1` - `control_port = 7099` - `video_port = 7070` - `control_rate = 20.0` `web_server.py` follows the same class wiring for `DRONE_TYPE=cooingdv`. ### RC Model `CooingdvRcModel` extends `BaseRCModel` with: - Stick range: `50..200`, center `128`. - `IncrementalStrategy`. - One-shot flags: - `takeoff_flag` - `land_flag` - `stop_flag` - `flip_flag` - `calibration_flag` - Toggle state: - `headless_flag` The model exposes: - `takeoff()` - `land()` - `emergency_stop()` - `flip()` - `toggle_headless()` - `calibrate_gyro()` - `get_control_state()` ### RC Protocol Adapter `CooingdvRcProtocolAdapter`: - Opens one UDP socket. - Binds to an ephemeral local port to match Android `DatagramSocket()`. - Sends heartbeat `01 01` every second. - Starts a receive thread for telemetry-driven variant detection. - Builds TC or GL packets from the active variant. - Sends packets to `drone_ip:control_port`. - On `stop()`, stops heartbeat, sends `08 01`, stops receiver, and closes the socket. Important constants: - `PREFIX = 0x03` - `START_MARKER = 0x66` - `EXTENDED_MARKER = 0x14` - `END_MARKER = 0x99` - `HEARTBEAT_COMMAND = bytes([0x01, 0x01])` - `STOP_COMMAND = bytes([0x08, 0x01])` The adapter clears one-shot command flags immediately after building each packet. It does not clear `headless_flag`, because that is a toggle state. ### Flight Controller Scheduling `FlightController._control_loop`: - Computes `dt`. - Calls `model.update(dt, axes)`. - Calls `protocol.build_control_packet(model)`. - Calls `protocol.send_control_packet(packet)`. - Sleeps `1 / update_rate`. For CooingDV the default update interval is 50 ms, matching the Android app's `SEND_COMMAND_INTERVAL = 50` in `FlyController`. ### Video Protocol Adapter `CooingdvVideoProtocolAdapter`: - Builds `rtsp://{drone_ip}:{video_port}/webcam`. - Opens it using `cv2.VideoCapture(rtsp_url, cv2.CAP_FFMPEG)`. - Sets `CAP_PROP_BUFFERSIZE = 1` for lower latency. - Reads decoded BGR frames. - Re-encodes each frame as JPEG with quality 85. - Wraps the bytes with `CooingdvVideoModel.ingest_chunk`. - Publishes `VideoFrame` objects through a size-2 queue. - Drops the oldest queued frame when the queue is full. - Reconnects after a 5 s frame timeout or OpenCV errors. This is different from TurboDrone's S2x and WiFi-UAV adapters. There is no raw UDP video packet assembler for CooingDV in TurboDrone; `get_packets()` returns an empty list. ## Implementation Gaps And Risks - TurboDrone currently uses product-level names (`takeoff`, `land`, `flip`) for bits whose app symbols are lower-level (`isFastFly`, `isFastDrop`, `isCircleTurnEnd`). This may be correct UX-wise, but the doc should not imply the Android apps have named takeoff/land opcodes. - `0x08 0x01` is only an exit-control command in the apps. It should not be used as a startup/init packet. - GL bit `flags2 0x02` (`isFixedHeightMode`) is observed in both apps, but TurboDrone does not currently expose or set a CooingDV fixed-height flag. - KY GL `flags1 0x10` (`isOpenLight`) is not exposed by TurboDrone. - GL `flags1 0x40` (`isGestureMode`) is app-side UI/vision behavior and should not be treated as a drone flight command unless separately verified. - RC UFO's decompiled `FlyControlTask.run()` has damaged control-flow output around `isFastReturn` / `isUnLock`; KY's source is cleaner and should be preferred for TC flag interpretation. - RC UFO's decompiled UDP receive method is also damaged around password and telemetry parsing. The visible fragments still confirm password IDs and the 8-digit password command. - RC UFO's Java references `System.loadLibrary("lib_gesture")`, but this workspace's RC UFO decompile has no `resources/lib` tree. If RC native behavior matters later, reacquire the full universal APK or the relevant split APK containing native libraries. - The exact RTSP wire codec is not fully proven from Java source alone. At the app boundary, the original-data callback is handled as JPEG bytes. - KY native `UAV` video is now stronger than inference: `libuav_gl.so` and `libuav_tc.so` reassemble 1024-byte MCU fragments into complete JPEG images using embedded 640x360 JPEG headers and append `ff d9` on the final fragment. - TurboDrone does not yet implement the Android app's display transforms: no-rotate IDs, portrait rotations, `800x600 -> 800x540` crop, or alternate camera/screen restart behavior. - TurboDrone auto-detection falls back to TC until it receives a recognized telemetry byte. GL hardware may need `COOINGDV_VARIANT=gl` if telemetry is delayed, filtered, or not sent to the ephemeral local port. - KY native `libuav_gl.so` and `libuav_tc.so` use a separate `192.168.169.1:8800` native transport, while the Java CooingDV Wi-Fi path uses `192.168.1.1:7099` for RC and `192.168.1.1:7070` for RTSP. Do not change TurboDrone's CooingDV defaults based on the native path without hardware evidence that the target drone exposes that BL60x interface. ## Deep-Dive Follow-Ups - Decompile the remaining unnamed helper functions around the GL/TC native threads to improve field names and confirm every byte in the ACK packet envelope. The main native socket target and incoming envelope are now mapped. - Recover or reacquire RC UFO's missing native split if the RC gesture/native behavior needs parity with KY. The Java package references `lib_gesture`, but the inspected RC resources do not include it. - Capture KY native traffic when `UAV.isActive()` is true. Confirm whether `192.168.169.1:8800` is reachable on real KY hardware, whether packets are sent over Wi-Fi or another interface, and how it relates to the Java RTSP path. - Capture GL and TC Wi-Fi traffic from hardware while toggling fixed-height, light, camera, screen switch, gyro correction, and emergency stop. Use the captures to decide which currently unimplemented flags are safe to add to TurboDrone. - Add optional TurboDrone diagnostics to log first-byte telemetry IDs, GL `0x66` status packets, and camera/gallery notifications before adding more control surface. ## Useful Test Packets Neutral TC hover/control packet: ```text 03 66 80 80 80 80 00 00 99 ``` Neutral GL hover/control packet: ```text 03 66 14 80 80 80 80 00 00 00 00 00 00 00 00 00 00 00 00 00 99 ``` TC emergency-stop style packet: ```text 03 66 80 80 80 80 04 04 99 ``` GL emergency-stop style packet: ```text 03 66 14 80 80 80 80 02 00 00 00 00 00 00 00 00 00 00 02 99 ``` Heartbeat: ```text 01 01 ``` Leave control mode: ```text 08 01 ``` Camera switch: ```text 06 01 06 02 ``` Gallery/screen sync: ```text 09 01 09 02 ``` RC UFO password command for password `12345678`: ```text 0a 01 02 03 04 05 06 07 08 ``` ================================================ FILE: docs/research/wifi_cam.md ================================================ # WiFi_CAM Protocol Research This note documents the decompiled `WiFi_CAM` Android app from `decompiled-wifi-cam-6.1.8`, including the native `libCamera.so` transport in `resources/lib/arm64-v8a`. ## Executive Summary `WiFi_CAM` should be treated as a separate TurboDrone backend candidate named `wifi_cam`. The first Java-only pass looked similar to `cooingdv_jieli` because `IConstants.java` contains Jieli-style constants such as `192.168.8.15`, UDP `2228`, RTP `6666`, and SDP `6789`. Native analysis of `libCamera.so` shows the actual live camera/control path is different: - Device IP: `192.168.4.153` - Video/data UDP socket: `8080` - RC/command UDP socket: `8090` - Stream start command: `42 76` - Stream stop command: `42 77` - Rotate command: `42 78` - Camera switch command: `42 79` - RC payload: raw Java-built `66 ... 99` 8-byte or 20-byte command sent to UDP `8090` - Video payload: fixed `0x5c0` UDP datagrams with an 8-byte native header and a `0x5b8` JPEG slice body, reassembled into complete JPEG frames by native code This does not match TurboDrone's current `cooingdv`, `cooingdv_jieli`, `s2x`, or `wifi_uav` implementations closely enough to reuse one unchanged. ## Native Evidence Native libraries are present under: ```text decompiled-wifi-cam-6.1.8/resources/lib/arm64-v8a/ ``` Important library: ```text libCamera.so ``` Ghidra headless outputs used for this pass: - `tools/ghidra_out/wifi_cam_libCamera_functions_all.txt` - `tools/ghidra_out/wifi_cam_libCamera_jni_table.txt` - `tools/ghidra_out/wifi_cam_libCamera_decompiled/` Helper scripts added for repeatable native analysis: - `tools/ghidra_scripts/ExportFunctionsByAddress.java` - `tools/ghidra_scripts/DumpJniNativeTable.java` - `tools/ghidra_scripts/DumpMatchingSymbols.java` ## JNI Table `JNI_OnLoad` registers 22 native methods for `com.tzh.wifi.utils.Camera`. Important mappings: ```text iCameraInit -> FUN_00133718 iCameraDeinit -> FUN_001337f4 iCameraStart -> FUN_001338f8 iCameraStop -> FUN_0013392c iCmdStart -> FUN_00133b20 iCameraRoate -> FUN_00133b28 iCameraSwitch -> FUN_00133b50 iCmdSend -> FUN_00133b78 iCmdResume -> FUN_00133c2c iCmdStop -> FUN_00133c34 iYuanInit -> FUN_00133d2c iYuanProc -> FUN_00133e6c iYuanRelease -> video_uninit ``` `iCameraInit` allocates the native `Socket`, `MjpegToAvi`, `AviReader`, and `MjpegToMp4` objects. `iCameraStart` calls `Socket::connect()`. ## Socket Lifecycle `Socket::connect()` starts two threads: - `udpSocketEnterance` - `cmdSocketEnterance` `udpSocketEnterance` creates two UDP sockets: ```text Socket::create(udpSocket, "192.168.4.153", 0x1f90) // 8080 Socket::create(cmdSocket, "192.168.4.153", 0x1f9a) // 8090 ``` The sockets are unconnected UDP sockets using `sendto(...)` and `recvfrom(...)`. There is no CTP envelope, no `CTP:` string, no `FLYING_CTRL` topic, and no Jieli JSON transport in the native library. ## IP Address Families The app carries several IP constants, but they do not all represent active live transport paths in the reviewed build: - `192.168.4.153` is the active native camera/control target. `libCamera.so` hardcodes this address for both UDP `8080` video/data and UDP `8090` command sockets. - `192.168.4.151` appears only in `WiFiApp.java` as `TCP_SERVER_IP`. The app compares the Wi-Fi DHCP gateway against `192.168.4.151` and `192.168.4.153`, returning `1` or `2`, but no caller was found under `com/tzh/wifi`. Treat this as a dormant/legacy mode hint until packet captures prove otherwise. - `192.168.8.15` appears in `IConstants.DEFAULT_DEV_IP` with Jieli-like `2228`/RTP/SDP constants. No Java caller or native `libCamera.so` evidence was found for this path in the reviewed WiFi_CAM live preview/control flow. - `192.168.1.1` appears as an FTP/media constant, likely for legacy file access or shared firmware support, not live flight/video control. This suggests WiFi_CAM is a broad whitelabel codebase carrying constants for multiple device families. For TurboDrone, implement the observed `192.168.4.153` native UDP backend first, and only split additional `wifi_cam_*` variants if later apps or captures show one of the dormant IP families is actually used. ## Stream Commands Ghidra data-symbol dump shows the 2-byte commands: ```text startCmd = 42 76 stopCmd = 42 77 rotateCmd = 42 78 switchCmd = 42 79 ``` `udpSocketEnterance` sends `startCmd` to `192.168.4.153:8080` after socket creation and sends `stopCmd` when disconnecting. `iCameraRoate()` sends `rotateCmd`; `iCameraSwitch()` sends `switchCmd`. ## Camera Type Probe After sending `startCmd`, `udpSocketEnterance` waits for an 8-byte response from the `8080` socket. Known camera-type responses: ```text 55 00 01 00 00 00 01 99 -> camera type 1 55 00 02 00 00 00 02 99 -> camera type 2 ``` The native code calls `C_Method::onNotifyCameraType(type)`, which reaches `WiFiModelImpl.ICameraType(int)` and selects the Java command payload mode via `BaseCmd.setCameraType(int)`. ## RC Command Path Java builds flight packets in: ```text decompiled-wifi-cam-6.1.8/sources/com/tzh/wifi/wificam/model/base/BaseCmd.java ``` `iCmdSend([B, int)` copies the Java byte array and calls: ```text Socket::sendCmd(payload, length) ``` `Socket::sendCmd` sends raw bytes to `192.168.4.153:8090`. Short 8-byte payload: ```text index value 0 0x66 1 roll 2 pitch 3 throttle / power 4 yaw 5 flags 6 xor checksum over bytes 1..5 7 0x99 ``` Alternate 20-byte payload for camera type `2`: ```text index value 0 0x66 1 0x14 2 roll 3 pitch 4 throttle / power 5 yaw 6 flags group 1 7 flags group 2 8..17 reserved / zero in Java initializer 18 xor checksum over bytes 2..17 19 0x99 ``` For the 8-byte payload, byte `5` flags are: - `0x01`: one-key fly / fast fly - `0x02`: one-key land / fast drop - `0x04`: emergency - `0x08`: rotate / circle turn - `0x10`: headless mode - `0x80`: checkout / calibration-like flag For the 20-byte payload, bytes `6` and `7` are: - byte `6`, `0x01`: fly and land both map here in the decompiled Java - byte `6`, `0x02`: emergency - byte `6`, `0x04`: checkout - byte `6`, `0x08`: rotate / circle turn - byte `7`, `0x01`: headless mode - byte `7`, `0x02`: stay-high / altitude hold ## Command Response Path `cmdSocketEnterance` listens on the `8090` command socket for 8-byte responses. It copies each response into `recvCmd` and uses the same logic as `parseCmdRecv(...)`: - Read exactly 8 bytes. - Update `prevIdx` from response byte `1`. - Notify Java snap state with response byte `2`. - Ignore duplicate response indexes. The checksum expression in the decompile is awkward, but the shape is clearly an 8-byte command acknowledgement/status channel rather than CTP or JSON. ## Video Packet Format `udpSocketEnterance` receives up to `0x5c0` bytes from the `8080` socket and passes the packet to `Image::put(...)`. Native reassembly behavior: - Full UDP datagram size: `0x5c0` bytes (`1472`) - Native packet header: 8 bytes - JPEG slice body: `0x5b8` bytes (`1464`) - Packet byte `0`: frame id / sequence byte - Packet byte `1`: final-frame marker used by native code; completion requires this byte to equal `1` - Packet byte `2`: total fragment count for the frame - Packet byte `3`: resolution id forwarded to Java - Packet byte `7`: retain/orientation value forwarded to Java - Payload starts at byte `8` `Image::putImgtoBuf(...)` appends each `0x5b8` payload into an internal frame buffer. When the final chunk arrives, it scans for JPEG EOI `ff d9`, validates that the assembled frame starts with JPEG SOI `ff d8` and ends with `ff d9`, and calls: ```text C_Method::onNotifyRecvState(resolution, jpeg_bytes, jpeg_len, retain) ``` Java receives this in `Camera.OnImageRecv(...)`, then forwards frame bytes to `WiFiPresenter.onData(...)`. The app log tag says `MJPEG`, but the app boundary is complete JPEG frame blobs. ## Comparison With Existing TurboDrone Backends `cooingdv` is not a match: - Uses `192.168.1.1` - Uses UDP `7099` - Uses RTSP `rtsp://192.168.1.1:7070/webcam` - Wraps flight payloads with a leading `0x03` `cooingdv_jieli` is not a match: - Uses `192.168.8.15` - Uses UDP `2228` - Uses CTP envelope beginning with `CTP:` - Uses JSON topics such as `CONTROL_MODE`, `FLYING_CTRL`, and `OPEN_RT_STREAM` - Uses RTP/JPEG on UDP `6666` plus SDP TCP `6789` `wifi_uav` is not a match: - Uses `192.168.169.1` - Uses UDP `8800` / `8801` - Uses different native packet headers and ACK/request behavior `s2x` is the closest structural comparison but still not a match: - Both use a port `8080`, but S2x targets `172.16.10.1:8080` - S2x video uses a separate UDP `8888` receiver and a `0x40 0x40` packet header - S2x start command is `0x08 + local IPv4`, not `42 76` - S2x RC packets are 20-byte packets sent to `8080`, not a separate `8090` command socket ## Recommended TurboDrone Implementation Add a new backend family: ```text DRONE_TYPE=wifi_cam ``` Implementation status: - `wifi_cam` is wired as a first-pass backend in TurboDrone. - RC supports short and extended raw command packets. - Video supports `42 76` stream start, camera-type probing, and native JPEG chunk reassembly. - Hardware validation is still needed against a WiFi_CAM-compatible device. Suggested defaults: ```text WIFI_CAM_DRONE_IP=192.168.4.153 WIFI_CAM_VIDEO_PORT=8080 WIFI_CAM_COMMAND_PORT=8090 ``` Implementation pieces: - `models/wifi_cam_rc.py` - Can reuse the CooingDV-style stick center/range initially. - Needs explicit support for both 8-byte and 20-byte payload modes. - `protocols/wifi_cam_rc_protocol_adapter.py` - Sends raw RC packets to UDP `8090`. - Starts with 8-byte mode unless camera-type probe says type `2`. - Builds checksums exactly like `BaseCmd.java`. - `protocols/wifi_cam_video_protocol.py` - Opens a UDP socket to the device. - Sends `42 76` to `192.168.4.153:8080` on start and `42 77` on stop. - Receives `0x5c0` datagrams. - Handles camera-type responses before frame assembly. - Reassembles `0x5b8` JPEG chunks using the 8-byte native header. - Emits complete JPEG `VideoFrame` objects. - `main.py` / `web_server.py` - Add `wifi_cam` selection and env defaults. - Wire the video adapter and RC adapter separately because ports differ. Open implementation question: - Where to route the native camera type notification. The cleanest Python design is for the video adapter to expose the detected camera type and for `web_server.py` / `VideoReceiverService` to pass that into the RC adapter, or for the RC adapter to default to 8-byte mode with an env override such as `WIFI_CAM_COMMAND_MODE=short|extended`. ## Validation Plan Packet captures should confirm: - App sends `42 76` to `192.168.4.153:8080` at live preview start. - Device returns one of the `55 00 xx 00 00 00 xx 99` camera-type responses. - App sends raw `66 ... 99` RC payloads to `192.168.4.153:8090`. - Video datagrams are `0x5c0` bytes and payload chunks assemble into JPEG frames. - Stop/disconnect sends `42 77` to `8080`. If captures confirm the native decompile, this should become a new `wifi_cam` backend rather than a variant of `cooingdv_jieli`. ================================================ FILE: docs/research/wifi_uav.md ================================================ # WiFi-UAV Protocol Research This note captures findings from the decompiled WiFi-UAV Android app in `wifi-uav-app-decompiled` and from native analysis of `wifi-uav-app-decompiled/resources/lib/arm64-v8a/libuav_lib.so`. The main takeaway is that "WiFi-UAV" is a drone family, not one protocol. The app routes different SSID families to different backend SDKs. ## App Backend Variants The app uses `defpackage.d00` as a dispatcher. It maps SSID prefixes onto two backend families: ```java put("FlOW_", f.Uav); put("FLOW_", f.Uav); put("WIFI_", f.Fld); put("GD89Pro_", f.Fld); put("WTECH-", f.Fld); put("WTECH_", f.Fld); ``` The dispatcher then binds those backend enum values to concrete implementations: ```java put(f.Fld, wz.X()); put(f.Uav, e00.i0()); ``` Observed / inferred mapping: | SSID prefix | App backend | Java class | Native dependency | Notes | | --- | --- | --- | --- | --- | | `WIFI_` | `Fld` | `defpackage.wz` | `com.lxProLib.lxSigPro` / `lxPro` | Classic WiFi-UAV path. | | `GD89Pro_` | `Fld` | `defpackage.wz` | `com.lxProLib.lxSigPro` / `lxPro` | Same backend as `WIFI_`. | | `WTECH-`, `WTECH_` | `Fld` | `defpackage.wz` | `com.lxProLib.lxSigPro` / `lxPro` | Same backend as `WIFI_`. | | `FLOW_`, `FlOW_` | `Uav` | `defpackage.e00` | `com.example.sdk.UAVSDK` / `libuav_lib.so` | Native UAVSDK backend. | | `DRONE_` | Not in app map | Turbodrone maps to `fld` | Appears K417-compatible with `fld` wire behavior | Added from K417 testing. | ## App Call Graph The main flight screen is `com.lcfld.fldpublic.ActCtrl`. It owns: - `d00 W = d00.Y()` as the selected drone backend. - `xx X = new xx()` as the RC packet builder. Control flow: ```text ActCtrl -> d00 dispatcher -> wz / lxSigPro for Fld devices -> e00 / UAVSDK / libuav_lib.so for FLOW/Uav devices -> xx packet builder -> short 8-byte command for Fld-style control -> extended 20-byte command for UAV/FLOW when supported ``` `d00.g()` starts every registered backend, not just the currently selected one. The selected backend is changed by successful callbacks: - `wz` / `lxSigPro` connection state `5` calls `e0(f.Fld)`. - `e00` posts a connected callback after MCU version data arrives and calls `e0(f.Uav)`. `d00.h(byte[])` is the main command forwarding point. It sends the current RC packet only to the active backend. `ActCtrl` starts its own worker thread in `onCreate()`. The thread calls `P1()` about every `50 ms`, updates PTZ visibility about once per second, and also drives UI effects such as roll animation and water-ripple rendering. The Android app version in the analyzed manifest is: ```text versionName=2024.11.08 versionCode=135 package=com.lcfld.fldpublic ``` ## Fld Backend `defpackage.wz` wraps `com.lxProLib.lxSigPro`: ```java public class wz extends tz { public lxSigPro d = lxSigPro.getInstance(); } ``` Important methods: ```java public int g() { return this.d.Connect(0); } public int h(byte[] bArr) { return this.d.DataForward(bArr, 0); } public int i() { return this.d.DisConnect(); } public int p() { return this.d.StPlay(0); } public int q() { return this.d.StStop(); } ``` Implications: - `Fld` has explicit connect / data-forward / disconnect / start-play / stop-play lifecycle in the app. - Turbodrone does not call `lxSigPro`; it reconstructs the wire behavior in Python. - K417 (`DRONE_*`) appears to use a wire path compatible with this family. Additional `Fld` callbacks: - `onStreamCbk()` forwards decoded video frames from `lxSigPro`. - `onSrlDataCbk()` forwards serial/control data from the drone. - `onCfgCbk()` forwards config responses. - `onUpdateFwCbk()` forwards firmware update progress. `wz.k()` initializes `lxSigPro` with `SetMid(1)`, `UidsInit(new int[]{0})`, and `AStRecType = 0`. `lxPro` loads `lxffmpeg` and `lxPro`. Ghidra analysis of `resources/lib/arm64-v8a/liblxPro.so` shows this is a separate native stack from `libuav_lib.so`. Important `lxPro` native wrappers used by the app: | Java call | Native wrapper | App use | | --- | --- | --- | | `Connect(0)` | `ntvConnectInt()` | Start FLD connection. | | `DataForward(bytes, 0)` | `ntvDataForward()` | Forward RC bytes. | | `DisConnect()` | `ntvDisConnect()` | Stop FLD connection. | | `StPlay(0)` | `ntvStPlay()` | Start video stream. | | `StStop()` | `ntvStStop()` | Stop video stream. | | `SdCapture(1)` | `ntvSdCapture()` | Ask drone-side SD/photo capture. | | `SdRecord(boolean)` | `ntvSdRecord()` | Ask drone-side SD/video record. | | `SetCfg(json)` | `ntvSetCfg()` | Configure WiFi SSID and other SDK JSON commands. | ### Native FLD / lxPro Findings `liblxPro.so` is an ARM64 ELF with DWARF/debug info. The relevant compile units include: - `lgProIntface/lxPro.c` - `lgProIntface/lxProJni.c` - `lgProSrc/lxProtocol.c` - `lgProSrc/lgProDev.c` - `lgPublicSrc/lxFFmpeg.c` `lgCreate()` allocates a `0xa30` byte device context, creates command locks and stats queues, initializes check/command/stream sockets to empty state, creates an audio player, and starts `lgThreadTimming()`. The native version log in this library is: ```text v1.0.0(20201022) ``` `Connect(0)` flows through: ```text Java_com_lxProLib_lxPro_ntvConnectInt() -> lgConnect() -> lgConnectPort(dev, ip, 0x49c1) ``` `0x49c1 == 18881`. If the app passes IP `0`, `lgConnectPort()` defaults to: ```text 192.168.100.1:18881 ``` That is different from the `libuav_lib.so` / BL608/BL618 transport, which uses `192.168.169.1:8800` and `:8801`. `lgConnectPort()` starts the FLD control/check path: - creates `0x400000` byte buffers for command and check sockets - configures the check socket as TCP - starts `lgThreadCmdParse` - starts `lgThreadCekClient` `lgThreadCekClient()` calls `lgOnSocketResv()` on the check socket. The socket logic uses Java-configured connection cycle/timeout values, with minimums of `500 ms` cycle and `100 ms` connect timeout. The FLD command parser accepts native packets with: ```text start marker: f5 e2 e3 cb end marker: e3 a5 cb cc ``` `lgPackCmdSend()` builds this envelope: | Offset | Size | Meaning | | --- | ---: | --- | | `0` | 4 | start marker `f5 e2 e3 cb` | | `4` | 4 | `DgCmdId_e` command id | | `8` | 4 | payload size | | `12` | var | payload | | `12 + payload_size` | 4 | end marker `e3 a5 cb cc` | `DataForward(bytes, 0)` is not a length bug. JNI ignores the Java-side `0` for length, obtains the actual byte-array length, and calls: ```c lgDataForward(dev, data, array_length, isTcp); ``` With `isTcp == 0`, `lgDataForward()` selects UDP and sends: ```text lgPackCmdSend(dev, eDgSrlOspf, rc_bytes, rc_len, eSckTypeUdp) ``` So the short/extended `xx` RC bytes are nested inside the FLD native command envelope as an `eDgSrlOspf` payload. `lgSendCJsonData()` creates JSON and sends it as: ```text lgPackCmdSend(dev, eDgCmdJson, json_with_trailing_nul, len + 1, eSckTypeTcp) ``` Examples from native decompilation: - `lgStPlay(Rel)` sends JSON with `cmd`, `StRelIdx=Rel`, and `vopen=1`, then starts stream threads. - `lgStStop()` sends JSON with `cmd` and `vopen=0`, then starts a delayed stream-thread destroy path. - `lgSdRecord(start)` sends JSON with `cmd` and `record=(start != 0)`. - `lgGetCfg(Kid)` sends JSON with only `cmd=Kid`. - `lgSetCfg(json)` forwards the Java JSON string directly through `lgSetCfg()` and logs the result. `lgStartStThread()` creates a `5,000,000` byte stream buffer, initializes video and audio packet lists, then starts: - `lgThreadStClient` - `lgStDecodeThread` - `lgVideoDecodeThread` - `lgAudioDecodeThread` `lgThreadStClient()` receives stream socket data through `lgOnSocketResv()` and registers `onStRecvCbk()` for stream receive stats. `lgVideoDecodeThread()` creates an H.264 FFmpeg decoder and dispatches packet payloads based on a byte at packet offset `0x0f`; decoded output eventually reaches the Java `lgStreamCbk()` / `onStreamCbk()` path as `lgFrameInFo`. FLD stream packets use a separate stream frame marker: ```text a5 a5 ef cc ``` `lgGetCompleteOneFrame()` searches for that marker, requires a `0x14` byte stream header, then expects one of these end markers after `Size + 0x14`: ```text e3 a5 cb cc a5 a5 ef cc ``` Bad/misaligned frames increment `BadFrameCount` and advance the ring-buffer read pointer. This is separate from the UAVSDK `0x93 0x01` JPEG-fragment transport. FLD decode path: - stream bytes are buffered by `lgOnSocketResv()` - complete stream frames are parsed by `lgGetCompleteOneFrame()` - video packets are queued into `VStList` - `lgVideoDecodeThread()` pops video packets - packet byte `0x0f` selects a decoder entry in `lxAVDcMap` - decoded H.264 output is copied to Java preview buffers by `lxGeneratePreview()` `lxGeneratePreview()` lays out preview YUV as planar Y, U, V: ```text Y plane: offset 0, size w*h U plane: offset w*h, size (w/2)*(h/2) V plane: offset w*h*5/4, size (w/2)*(h/2) ``` So `FrameType == 2` on the Java side is consistent with planar YUV420 output from the FLD FFmpeg decode path. Implication: the FLD backend is not just a raw `xx` packet UDP sender. It is a full P2P-ish native protocol with TCP JSON/check traffic, UDP serial forwarding, and a separate H.264-oriented stream decode pipeline. `lxPro.lgFrameInFo` is the common Java frame envelope for FLD and UAV video: | Field | Meaning | | --- | --- | | `FrameBuf` | Frame bytes. JPEG for `FrameType == 1`; YUV420p-like bytes for `FrameType == 2`. | | `FrameType` | App dispatches `1` as JPEG bitmap and `2` as YUV data. | | `W`, `H` | Frame dimensions. | | `FrameNum`, `FrameRate`, `Pts`, `DisplayMs`, `StType`, `UserId`, `RecTimes` | Metadata forwarded by native SDK. | ## Uav / FLOW Backend `defpackage.e00` wraps `com.example.sdk.UAVSDK`: ```java public class e00 extends tz implements UAVSDK.DataListener { public UAVSDK f = UAVSDK.getInstance(); } ``` Important methods: ```java public int g() { if (this.n) return 0; this.n = true; this.f.nativeStart(); return 0; } public int h(byte[] bArr) { this.f.nativeSendCtlMsg(bArr, bArr.length); return 0; } public int i() { this.q = 0L; this.h.f(); this.o = false; this.p = true; if (this.n) { this.n = false; this.f.nativeStop(); } return 0; } ``` `UAVSDK` loads: ```java System.loadLibrary("uav_lib"); System.loadLibrary("upcnn-cpu"); if (Build.VERSION.SDK_INT >= 24) { System.loadLibrary("upcnn-gpu"); } ``` `UAVSDK` exposes JNI methods including: ```java nativeCreate(); nativeDestroy(); nativeGetVersion(); nativeInit(); nativeSendCtlMsg(byte[] data, int len); nativeSendCustomMsg(byte[] data, int len); nativeSetCameraIndex(int index); nativeSetCameraRotate180(int value); nativeSetQPara(int q1, int q2, int t1, int t2); nativeStart(); nativeStop(); ``` ### Java UAV Lifecycle `e00.k()` initializes the native layer: ```java this.g.h(); this.f.nativeInit(); this.f.setDataListener(this); g0(); ``` `nativeInit()` caches JNI callback method IDs for: - `cbJpegFromNative([BJBI)V` - `cbCtlMsgFromNative([BJ)V` - `cbUpdateFromNative(II)V` - `cbTrackFromNative(I[F)V` The native SDK version logged by this build is: ```text V2.2.1 20240428 ``` `e00.g()` calls `nativeStart()` once. `e00.i()` clears connection state and calls `nativeStop()` if the native backend is running. The app's UAV liveness check is stricter than "socket exists": ```java public boolean u() { return this.o && System.currentTimeMillis() - this.q < 1500; } ``` `o` becomes true after a recognized MCU version/control message. `q` is updated by `handleJpeg()`, so the Java app considers the UAV backend connected only while JPEG frames are arriving. `e00.handleJpeg()` wraps native JPEG bytes into `lxPro.lgFrameInFo` through `d00.e()`: ```java lgframeinfo.FrameType = 1; lgframeinfo.FrameBuf = bArr; ``` The native callback's `quality` and `is_normal_pic` parameters are not copied into `lgFrameInFo` by the Java bridge, so `ActCtrl` sees the same frame shape for UAV JPEGs that it sees for FLD JPEGs. ### Java UAV Control Messages `e00.handleCtlMsg()` interprets native control callbacks: | Native payload | Java effect | | --- | --- | | `[1, 101, ...]` | MCU version string. Parses board/capabilities, marks UAV connected, and probes extra commands when supported. | | `[1, 103, ...]` | SSID/config response. Parses scene/camera information. | | `[1, 105]` | MCU busy. | | `[1, 106]` | MCU disconnected. Clears version/state and posts a disconnect. | `e00.l` parses version strings into: - board family, resolution token, firmware version, build date, and extra flags - flow-device markers: `FLOW...` / `(FLOW)...` - no-flow marker: `(NOFLOW)...` - extended command support when the parsed build date is `>= 20211218` - optional `I=` image/capability bitfield The Java custom-command helpers are string based: | Java method | Custom payload string | | --- | --- | | `Q()` | `cmd=2` | | `P()` | `cmd=3` | | `W()` | `cmd=4` | | `T(scene)` | `cmd=` | | `k0(imgfunc)` | `imgfunc=` | | `o(ssid)` | `>` | These are wrapped as custom native messages, not as joystick bytes. ### UAV Capability Bitfield `e00.j` parses the optional `I=` capability field from the MCU version string. It stores nibbles and flags as: | Bits | Java field | Observed app use | | --- | --- | --- | | `0..3` | `b` | Mirror/camera command value used by `E(true)` / main camera. | | `4..7` | `c` | Mirror/camera command value used by `E(false)` / flow camera. | | `8..11` | `d` | Scene/current camera-ish capability. | | `12..15` | `e` | Resolution selector for flow/secondary camera. | | `16..19` | `f` | Resolution selector for main/primary camera. | | `20..23` | `g` | Extra image capability nibble. | | `24..27` | `h` | Extra image capability nibble. | | `28` | `i` | Camera/mirror behavior flag. | | `29` | `j` | Camera/mirror behavior flag. | | `30` | `k` | Front/back camera support flag; `x()` returns true when `k == 1`. | `e00.z()` means the `I=` field exists. `e00.y()` means custom image commands are supported, scene/camera state is `1`, and front/back camera support is not already active. ## Native UAVSDK Findings `libuav_lib.so` is an ARM64 ELF shared object. It is not stripped and contains debug info, so Ghidra headless analysis is useful. Exports of interest: - `Java_com_example_sdk_UAVSDK_nativeStart` - `Java_com_example_sdk_UAVSDK_nativeStop` - `Java_com_example_sdk_UAVSDK_nativeSendCtlMsg` - `Java_com_example_sdk_UAVSDK_nativeSendCustomMsg` - `mjpeg_ndk_start` - `mjpeg_ndk_stop` - `mjpeg_ndk_command_send` - `mjpeg_ndk_custom_cmd_send` - `mjpeg_ndk_start_bl618` - `mjpeg_ndk_stop_bl618` - `mjpeg_ndk_command_send_bl618` - `mjpeg_ndk_custom_cmd_send_bl618` - `handle_mcu_msg_frag` - `handle_mcu_msg_frag_bl618` - `build_send_ack` - `build_send_ack_bl618` ### Native Start Behavior `nativeStart()` starts two internal native backends: ```c context = mjpeg_ndk_start("192.168.169.1", "8800", NULL); mjpeg_ndk_frame_callback_register(context, callback_jpeg, context); mjpeg_ndk_ctlmsg_cb_register(context, callback_ctlmsg, context); mjpeg_ndk_track_set_sdk(context, sdk); mjpeg_ndk_track_callback_register(context, callback_track, context); context_bl618 = mjpeg_ndk_start_bl618(NULL, "192.168.169.1", "8801"); mjpeg_ndk_frame_callback_register_bl618(context_bl618, callback_jpeg, context_bl618); mjpeg_ndk_ctlmsg_cb_register_bl618(context_bl618, callback_ctlmsg, context_bl618); mjpeg_ndk_track_set_sdk_bl618(context_bl618, sdk); mjpeg_ndk_track_callback_register_bl618(context_bl618, callback_track, context_bl618); ``` Implications: - Native UAVSDK probes both the normal path and the BL618 path. - Normal backend targets `192.168.169.1:8800`. - BL618 backend targets `192.168.169.1:8801`. - Turbodrone's `wifi_uav_uav` mode now mirrors this by sending RC and video startup/request traffic to both ports. ### Native Socket Setup Normal `create_instance()`: - creates an ACK/socket bound to `0.0.0.0` on an ephemeral local port - sets MCU target to `192.168.169.1:8800` - creates a side UDP socket using `NetworkSocket_Create(Network_UDP, 0x271a)` - `0x271a == 10010` BL618 `create_instance_bl618()`: - creates an ACK/socket bound to `0.0.0.0` on an ephemeral local port - sets MCU target to `192.168.169.1:8801` - creates a side UDP socket using `NetworkSocket_Create(Network_UDP, 0x271b)` - `0x271b == 10011` - logs the ephemeral ACK socket with `getsockname()` K417 captures showed video fragments arrive at the ephemeral ACK socket, not at `10010` or `10011`. The native build strings in the analyzed library are: - normal path: `BL608_V20240426` - BL618 path: `BL618_V20240426` ### Native Startup Packet The startup packet is: ```text ef 00 04 00 ``` Both native paths send this repeatedly during startup until receive state is established. Normal timer behavior: - before first frame/control receive, sends the startup packet about every `> 100 ms` - after receiving data, sends ACK/request packets about every `> 24 ms` - if no activity for about `3001 ms`, emits a two-byte callback and resets native receive state BL618 timer behavior: - before first receive, sends the startup packet about every `> 200 ms` - after receiving data, sends ACK/request packets about every `> 50 ms` - only sends an ACK packet when at least one ACK slot is present - has an extra device-config update path that packages SSID/channel/user bytes with the marker string `oGMcOfyZdIurm2kS` ### Native Command Queueing `nativeSendCtlMsg(byte[], len)` does not immediately `sendto()` the joystick packet. It copies commands shorter than `65` bytes into each backend's `user_msg_ctx`: ```c mjpeg_ndk_command_send(context, data, len, &seq); mjpeg_ndk_command_send_bl618(context_bl618, data, len, &seq); ``` Normal `mjpeg_ndk_command_send()` accepts commands only when: - context is non-null and alive - command length is `<= 64` - backend is enabled - `has_received != 0` BL618 is similar but allows `<= 128` bytes at the lower native layer. The JNI wrapper still limits `nativeSendCtlMsg()` to `< 65` bytes. The queued command is embedded into the next ACK/request packet: - `user_msg_ctx.seq` - `user_msg_ctx.length` - up to `64` bytes of user command data `nativeSendCustomMsg()` is different. It wraps the payload in an immediate custom packet: ```text ef 20 ``` but it still requires the backend to be enabled and to have received data. There are native JNI declarations for camera index, rotation, and quality parameters: ```java nativeSetCameraIndex(int index); nativeSetCameraRotate180(int value); nativeSetQPara(int q1, int q2, int t1, int t2); ``` Ghidra export of these functions confirms: - `nativeSetCameraIndex(index)` fans out to both BL608 and BL618: `mjpeg_ndk_set_active_camera_index(context, index)` and `mjpeg_ndk_set_active_camera_index_bl618(context_bl618, index)`. - `nativeSetCameraRotate180(value)` fans out to both tracking pipelines: `mjpeg_ndk_track_set_rgb_rotate_180*()`. - `nativeSetQPara(q1, q2, t1, t2)` updates only the normal/BL608 context in this exported function: `mjpeg_ndk_set_QPara(context, ...)`. Their effects are visible in exported state: - `build_send_ack*()` places quality bytes and `active_cam_idx` into every ACK/request packet. - `handle_mcu_msg_frag()` uses `active_cam_idx` to choose whether main-camera or flow-camera status is passed to the JPEG callback. - Java calls `nativeSetCameraIndex(0)` on stream start for UAV and `nativeSetCameraIndex(z ? 1 : 0)` for camera switching. `handle_mcu_msg_ack_bl618()` is present, but decompiles to a no-op apart from stack checking. BL618 type `0x02` ACK messages are recognized by the receive thread but ignored in this build. `callback_jpeg()` has one important backend-selection behavior: once either the normal context or BL618 context produces JPEG frames, it stops the other context. This is how the dual-probe `nativeStart()` collapses to the working backend at runtime. `callback_ctlmsg()` attaches to the JVM, copies native bytes into a Java `byte[]`, and calls `cbCtlMsgFromNative`. The native callback carries the MCU sequence; Java's decompiled `UAVSDK` then forwards the byte array to `e00.handleCtlMsg()`. ## Video Packet Format K417 captures and native `handle_mcu_msg_frag*()` agree on this layout: | Offset | Size | Meaning | | --- | ---: | --- | | `0` | 1 | `0x93` | | `1` | 1 | message type; `0x01` means JPEG fragment | | `2` | 2 | total packet length, little-endian | | `4` | 4 | message sequence / id | | `8` | 8 | image sequence | | `16` | 8 | observed duplicate/secondary sequence; K417 captures match image sequence, exported native frag handler does not read this field | | `24` | 8 | last-finished / acked-ish sequence field | | `32` | 4 | fragment index | | `36` | 4 | fragment count | | `40` | 4 | frame body length | | `44` | 2 | width | | `46` | 2 | height | | `48` | 1 | quality | | `52` | 1 | main camera status | | `53` | 1 | flow camera status | | `56+` | var | JPEG payload fragment | Observed K417 traffic: - drone sends from `192.168.169.1:1234` - PC receives on the ephemeral socket used to send ACK/request packets - typical packet length: `1088` bytes - quality: `50` - about `10-19` fragments per frame - about `15 fps` in a working capture Short capture validation from `captures/k417_wifi_uav_fld.pcapng` and `captures/k417_wifi_uav_live_after_parser.pcapng`: - outbound flow: `192.168.169.2: -> 192.168.169.1:8800` - inbound flow: `192.168.169.1:1234 -> 192.168.169.2:` - startup packets: `ef 00 04 00` - request packets alternate between `88` and `124` byte `ef 02` packets - `ef 20` custom packets were not present in these short captures - inbound fragments were `0x93 0x01`, usually `1080` bytes each - observed frame header values: `640x360`, quality `50`, fragment count `20` - the 124-byte request tail decodes as two ACK slots, for example: ```text seq=9 status=1 len=20 bitmap=ff ff ff ff seq=9 status=3 len=16 ``` Those slots match the current Turbodrone fallback/request behavior and the native ACK slot layout. ## ACK / Request Packet Format Native `build_send_ack()` and `build_send_ack_bl618()` emit this broad shape: | Offset | Size | Meaning | | --- | ---: | --- | | `0` | 1 | `0xef` | | `1` | 1 | `0x02` | | `2` | 2 | packet length, little-endian | | `4` | 4 | constant `02 02 00 01` | | `8` | 1 | ACK slot count | | `9` | 3 | padding | | `12` | 4 | queued user-command sequence | | `16` | 2 | queued user-command length | | `18` | 64 | queued user-command data | | `82` | 1 | quality1 | | `83` | 1 | quality2 | | `84` | 1 | q_threshold1 | | `85` | 1 | q_threshold2 | | `86` | 1 | active camera index | | `87` | 1 | padding | | `88+` | var | ACK slot records | Each ACK slot: | Offset | Size | Meaning | | --- | ---: | --- | | `0` | 8 | image sequence | | `8` | 4 | status | | `12` | 4 | slot record length | | `16+` | var | optional fragment ACK bitmap | Status values inferred from native: - `0`: receiving / partial - `1`: complete / delivered - `2`: dropped - `3`: future/request slot Turbodrone now generates native-shaped ACK packets and tracks in-flight frame state using `WifiUavAckState`. ### Native Frame State Machine The native receiver uses four frame slots: ```c slot_idx = (image_sequence + 3) & 3; ``` Internal native slot statuses are distinct from the ACK status values: | Internal status | Meaning | ACK status emitted | | --- | --- | --- | | `0` | empty / unused | none or future slot | | `1` | receiving fragments | `0` | | `2` | all fragments received, ready to deliver | `1` | | `3` | delivered | `1` | | `4` | dropped / skipped | `2` | For a new frame, `handle_mcu_msg_frag()`: - updates `last_finished` from the header's offset `24` field - updates `max_recv_seq` - initializes the slot's fragment count, body length, width, height, and quality - records main/flow camera status from offsets `52` and `53` - initializes a fragment ACK bitmap and sets the received fragment bit - copies payload from offset `56` into the frame buffer When all fragments are present, the frame moves to the native image queue. The queue has three entries but keeps at most two queued frames; if it is full, the oldest queued frame is dropped before adding the next one. The native `thread_ctl` then drains queued frames into Java/native callbacks: - normal path sleeps roughly `38 ms` per loop, with an extra `19 ms` wait when no frame is queued - BL618 path sleeps roughly `16 ms` per loop - both paths also call `UPUAVSDK_UpdateJPEGData()` for the tracking pipeline BL618 adds a frame-drop/recovery path that the normal BL608 path does not show in the exported decompilation. If no frame was delivered and the image queue is empty, BL618 can mark an in-progress frame at `seq_to_deliver` as dropped (`status = 4`) so later complete frames can advance. ### Native Control Callback Format Native `thread_recv()` accepts only packets with: - byte `0`: `0x93` - bytes `2..3`: packet length matching the datagram size Message type `0x01` is JPEG fragment data. Message type `0x04` is MCU/control data. For MCU/control packets, native code forwards: - payload pointer: `data + 0x0c` - payload length: little-endian `uint16` at offset `8` - sequence: little-endian `uint32` at offset `4` There are special cases for photo/record callbacks with 9-byte payloads shaped like `` or `` plus XOR checksum. These are collapsed into two-byte Java callbacks so the Java layer sees `UAV_Flight_Ctl_Picture` or `UAV_Flight_Ctl_Record`. ## Control Packet Semantics The app has two control builders: - `xx.f()` short 8-byte layout - `xx.g()` extended 20-byte layout Turbodrone embeds the extended layout (`66 14 ...`) in the longer `ef 02 ...` packet wrapper. In `xx.g()`, takeoff and land share the same one-key bit: ```java bArr[6] = (takeoff ? 1 : 0) | (land ? 1 : 0) | (stop ? 2 : 0) | (gyro ? 4 : 0) | (roll ? 8 : 0) | ((ptz & 3) << 6); ``` Meaning for Turbodrone's current WiFi-UAV extended command layout: | Action | Byte 6 bit | | --- | ---: | | takeoff / land one-key action | `0x01` | | emergency stop | `0x02` | | gyro/calibration | `0x04` | | flip/roll | `0x08` | K417 testing confirmed: - app land button descends gracefully - Turbodrone previously mapped land to `0x02`, causing motor cutoff - Turbodrone now maps both takeoff and land to `0x01`, and e-stop to `0x02` ### App Control Loop Details `ActCtrl.P1()` is the app's recurring RC send loop. It runs only when: ```java this.W.u() && this.W.f() == 1 ``` For the UAV backend, `f()` always returns `1`. For the FLD backend, `f()` is `lxSigPro.AppCtrlSt()`. When the on/off control is disabled, the app sends a neutral FLD-style packet a few times: ```java { 0x66, 0x80, 0x80, 0x80, 0x80, 0x40, 0x40, 0x99 } ``` For the UAV backend, after having sent active controls, it also sends an eight-byte all-zero packet once when turning controls off. During active control, `ActCtrl` fills `xx` from the UI: | `xx` field | Source | | --- | --- | | `a` | `getTrSliderYpercent()` or hand/track override | | `b` | `getTrSliderXpercent()` or hand/track override | | `c` | `getEaSliderXpercent() + getPathPercentX()` or trim override | | `d` | `getEaSliderYpercent() + getPathPercentY()` or trim override | | `e/f/g/h` | trim/tuning sliders | | `i` | up / one-key takeoff flag | | `j` | down / one-key land flag | | `k` | stop flag | | `l` | roll/flip flag | | `m` | back flag | | `n` | headless flag | | `o` | gyro/check flag | | `p` | high/low mode flag | | `q` | hand-flow active | | `r` | tracking active | | `s` | speed index | | `v` | PTZ up/down state; likely camera tilt servo in practice | The app chooses the packet shape with: ```java int extended = (this.W.d0() && this.W.L()) ? 1 : 0; byte[] packet = extended != 0 ? this.X.g() : this.X.f(); ``` So `xx.g()` is used only for the UAV backend when the version string says this is a FLOW device. Otherwise the app uses the short `xx.f()` packet. Both packet builders use an XOR checksum: - short packet: XOR bytes `1..5` into byte `6`, terminator `0x99` - extended packet: XOR bytes `2..17` into byte `18`, terminator `0x99` The app also parses a serial/status packet in `xx.j()`: ```text 88 00 ... 55 ``` The status flags byte exposes speed, RC-control support, PTZ support, headless/photo/video/light flags, and is forwarded through `xx.a`. ### RC Packet Layouts `xx.f()` short packet: | Offset | Meaning | | --- | --- | | `0` | Constant `0x66`. | | `1` | Rudder / yaw axis: `h(c, g, s, true)`. | | `2` | Elevator / pitch axis: `h(d, h, s, true)`. | | `3` | Throttle axis: `h(a, e, 2, p)`. | | `4` | Aileron / roll axis: `h(b, f, s, true)`. | | `5` | Button bitfield. | | `6` | XOR checksum over bytes `1..5`. | | `7` | Terminator `0x99`. | Short packet byte `5`: | Bit | Java source | Meaning | | ---: | --- | --- | | `0x01` | `i` / `getUpBtnState()` | Takeoff/up. | | `0x02` | `j` / `getDownBtnState()` | Land/down. | | `0x04` | `k` / `getStopBtnState()` | Stop. | | `0x08` | `l` / `getRollBtnState()` | Flip/roll. | | `0x10` | `n` / `getHeadlessBtnState()` | Headless. | | `0x20` | `m` / `getBackBtnState()` | Back/return. | | `0x40` | `p` / `getHlBtnState()` | High/low or throttle-center mode. | | `0x80` | `o` / `getGyoCheckBtnState()` | Gyro/calibration/check. | `xx.g()` extended packet: | Offset | Meaning | | --- | --- | | `0` | Constant `0x66`. | | `1` | Constant length `0x14`. | | `2` | Rudder / yaw axis: `h(c, g, s, true)`. | | `3` | Elevator / pitch axis: `h(d, h, s, true)`. | | `4` | Throttle axis: `h(a, e, 2, p)`. | | `5` | Aileron / roll axis: `h(b, f, s, true)`. | | `6` | Primary button/PTZ bitfield. | | `7` | Secondary mode bitfield. | | `8..17` | Zero in this app build. | | `18` | XOR checksum over bytes `2..17`. | | `19` | Terminator `0x99`. | Extended packet byte `6`: | Bits | Java source | Meaning | | ---: | --- | --- | | `0x01` | `i` or `j` | Shared one-key takeoff/land action. | | `0x02` | `k` | Emergency stop. | | `0x04` | `o` | Gyro/calibration/check. | | `0x08` | `l` | Flip/roll. | | `0xc0` | `v << 6` | PTZ up/down state; likely tilt up/down. | Extended packet byte `7`: | Bit | Java source | Meaning | | ---: | --- | --- | | `0x01` | `n` | Headless. | | `0x02` | `p` | High/low or throttle-center mode. | | `0x04` | `r` | Person tracking active. | | `0x80` | `q` | Hand-flow active. | Axis scaling uses `xx.h(float f, float trim, int speed, boolean centered)`. Speed indexes map to: | Speed index | Scale | | ---: | ---: | | `0` | `0.30` | | `1` | `0.60` | | `2` | `1.00` | | `3` | `0.25` | For centered axes, neutral is approximately `127.5`, stick movement is `(f - 0.5) * scale * 255`, and trim adds `(trim - 0.5) * 63`. For non-centered throttle mode, the app uses `f * scale * 255` plus the same trim term. In practice `xx.f()` and `xx.g()` pass `p` as the throttle `centered` argument, so high/low mode also changes throttle centering behavior. ### UI Button To Command Map `lxCtrlView` switches between `lxFldCtrlView` and `lxUavCtrlView`. Both expose the same `lxBasicView` state getters, but their visible controls differ. `lxFldCtrlView` mappings: | UI control | Getter / field | RC effect | | --- | --- | --- | | `takeoff_nor` button | `getUpBtnState()` / `m` | Short packet bit `0x01`. | | `landing_nor` button | `getDownBtnState()` / `n` | Short packet bit `0x02`. | | `Stop` button | `getStopBtnState()` / `o` | Short packet bit `0x04`. | | `roll360` button | `getRollBtnState()` / `l` | Short packet bit `0x08`; XY roll direction can override axis byte. | | `nohead` button | `getHeadlessBtnState()` / `F` | Short packet bit `0x10`. | | `hl` button | `getHlBtnState()` / `w` | Short packet bit `0x40`; also hides takeoff/land/stop/roll when off. | | `checkout` button | `getGyoCheckBtnState()` / `G` | Short packet bit `0x80`. | | `record` button | `getRecordBtnState()` / `s` | Local recording state; also `ActCtrl.f()` starts/stops local record. | | `photo` button | calls `ActCtrl.q()` | Triggers local snapshot path. | | `speed` button | `getSpeedBtnState()` / `M` | Cycles speed index `0..2`. | | `path` / gyro / mirror / VR | view state | Affects axes, rendering, or mode bits where applicable. | `lxUavCtrlView` mappings: | UI control | Getter / field | RC/custom effect | | --- | --- | --- | | `btn_flight_mode` | `getOnoffBtnState()` / `r` | Enables periodic RC sending. | | takeoff/land shared button | `getUpBtnState()` and `getDownBtnState()` / `S` | Extended packet shared one-key bit `0x01`. | | `btn_stop` | `getStopBtnState()` / `s` | Extended packet e-stop bit `0x02`. | | `roll360` | `getRollBtnState()` / `t` | Extended packet roll bit `0x08`; XY roll direction can override axis bytes. | | `btn_gyocheck` | `getGyoCheckBtnState()` / `I` | Extended packet gyro bit `0x04`. | | `btn_headless_mode` | `getHeadlessBtnState()` / `J` | Extended packet byte `7` bit `0x01`. | | `btn_body` / hand-flow | `getHandFlowState()` / `D` | Extended packet byte `7` bit `0x80`; starts/stops palm tracking through `nativeStartPalmTrack()` / `nativeStopPalmTrack()`. | | person-flow button | `r` flag | Extended packet byte `7` bit `0x04`; starts person tracking through `nativeStartPersonTrack(x,y)`. | | PTZ slider | `getPtzUpDnState()` / `B` | Extended packet byte `6` bits `6..7`: `1` down/one tilt direction, `2` up/other tilt direction, `0` neutral. | | `btn_hvcam` | `getHvCamBtnState()` / `v` | Triggers custom command sequence through `W()`, `P()`, and `Q()` depending on capability state. | | `btn_scene` | scene state / `w.j` | Calls `T(scene)` when scene capability is known, otherwise probes with `Q()`. | | front/back camera button | `setFbCamBtnState()` | Calls `ActCtrl.R()`, which uses `W.W()` when supported. | | camera switch | `getSwCamBtnState()` / `k` | Calls `e00.t(z)`, which sets `nativeSetCameraIndex(z ? 1 : 0)`. | | record/photo/speed/mirror/VR/scale | view state or `ActCtrl` callbacks | Local record/snapshot/render controls plus possible SDK custom commands. | PTZ / Camera Tilt Note: ```java getPtzUpDnState() == 0 -> neutral getPtzUpDnState() == 1 -> extended byte 6 bits 6..7 = 01b getPtzUpDnState() == 2 -> extended byte 6 bits 6..7 = 10b ``` The app labels this as PTZ, but the decompiled UI only exposes an up/down state. Based on observed toy drone hardware, this should be treated first as a camera tilt servo command, not full pan/tilt/zoom: - no pan axis has been identified in the Java UI or native command path - no optical zoom command has been identified - any "zoom" seen in the app is likely software/render zoom through `lxEglView` scaling, not a drone-side camera command - the likely hardware action is camera pitch/tilt up or down The app only samples this tilt/PTZ value into `xx.v`; it does not send a separate custom PTZ message. Therefore Turbodrone tilt support should be implemented as a persistent or momentary field in the extended `66 14 ...` RC command for UAV/FLOW-style devices. FLD views report no PTZ state in the decompiled app, even though some hardware may expose camera tilt through other SDK-specific paths. The click handlers call `lxBtn.e()`, which toggles the selected state and records the selection timestamp. The main RC loop then samples the selected states on its next `P1()` tick. ### Photo And Record Paths There are two separate concepts: - local phone snapshot/recording through `lxEglView` - drone-side SD capture/record commands through the backend SDK Local snapshot: ```text ActCtrl.g2() -> choose output file under lxConfig.ePhotoFolder -> choose resolution through d00.Z(...) -> lxEglView.snapshot(width, height, 1, jpg_path) -> W.l(1) ``` For `Fld`, `W.l(1)` maps to `wz.l(1)` and then `lxSigPro.SdCapture(1)`. For `Uav`, `e00.l()` returns `-1`, so the local snapshot is the effective behavior unless a native photo callback arrives. Local recording: ```text ActCtrl.c2() -> W.m(true) -> lxEglView.startARecord(width, height, mime_or_profile, mp4_path, false, cb) -> lxEglView.snapshot(..., mp4_path + ".pic") ``` `lxEglView.startARecord()` creates a Java hardware encoder (`zz`) and passes the encoder surface to native `lxEglView.ntvStartRec()`. If the preferred resolution fails with `-2`, the app retries `4096x2160`, `3840x2160`, `2560x1440`, `1920x1080`, then `1280x720`. For `Fld`, `W.m(true/false)` maps to `lxSigPro.SdRecord(boolean)`. For `Uav`, `e00.m()` returns `-1`, so local MP4 recording is the effective behavior. Backend callbacks can also toggle UI photo/record state: - FLD JSON config callback keys `lgDevCbkPic` and `lgDevCbkRec`. - UAV serial/control callbacks `[0, 113]` for picture and `[0, 114]` for record. - UAV nested command `[0, 102, ..., status]` with status `2` for picture and `4` for record. ### Video Feed Rendering Path All backend video frames converge at `ActCtrl.V(tz, lxPro.lgFrameInFo)`. For `FrameType == 1`: ```text FrameBuf JPEG bytes -> BitmapFactory.decodeByteArray(...) -> lxEglView.set(Bitmap) -> lxEglView.requestRender() ``` The app may rotate JPEG frames before rendering: - small frames (`min(width,height) < 320`) from a backend reporting `tz.v() == true` are rotated `-90` unless BRoll rendering is active - custom UAV camera/mirror state can add another `+90` - mirror state is synchronized through `a2()` For `FrameType == 2`: ```text FrameBuf YUV bytes -> lxEglView.set(byte[], width, height) -> native ntvSetYuv420p(...) -> requestRender() ``` `lxEglView` is a native OpenGL surface wrapper: - creates native state with `ntvCreate(LXLIB_SIGKEY)` - sends surface lifecycle into `ntvSurfaceCreated/Changed/Destroyed` - auto-renders from an internal thread by calling `ntvRequestRender()` - draws bitmap frames through `ntvSetBitmap()` - draws YUV frames through `ntvSetYuv420p()` or `ntvSetYuv()` - handles mirror, rotate, roll, scale, VR/split-screen, filters, snapshots, and recording in native `lxEglView` This means Turbodrone should treat decoded frame assembly separately from app rendering: the app's transport delivers JPEG/YUV frame payloads, and `lxEglView` handles presentation, local capture, and MP4 encoding. ## K417 Notes Observed K417 SSID: ```text DRONE_4C8172 ``` This prefix is not present in the decompiled app's dispatcher, but testing shows it is compatible with the `fld`-style wire path: ```env DRONE_TYPE=wifi_uav_fld ``` Working capture summary: - inbound: `192.168.169.1:1234 -> 192.168.169.2:` - outbound: ` -> 192.168.169.1:8800` - `4527` video packets in about `20s` - `295` frame sequences - `294` complete frames - approximately `15 fps` Windows Firewall can block inbound video because the drone replies from UDP source port `1234`, not from `8800`. Packet capture may show traffic even if Python does not receive it. During testing, disabling firewall allowed Python to receive and assemble frames. ## Turbodrone Implementation State Current related files: - `backend/protocols/wifi_uav_rc_protocol_adapter.py` - `backend/protocols/wifi_uav_video_protocol.py` - `backend/utils/wifi_uav_packets.py` - `backend/utils/wifi_uav_ack_state.py` - `backend/utils/wifi_uav_variants.py` Implemented: - `DRONE_TYPE=wifi_uav`, `wifi_uav_fld`, `wifi_uav_uav` - best-effort SSID mapping - `DRONE_` maps to `fld` - `wifi_uav_uav` probes `8800` and `8801` - internal WiFi-UAV capability profiles for `fld` and `uav` - corrected WiFi-UAV extended land/e-stop semantics - WiFi-UAV speed tier field (`speed_index`) using the app's speed scale table - native-shaped ACK/request packet builder - native fragment parser - ACK state tracking - duplicate delivered-frame guard - startup/request burst moved after RX thread startup Recommended implementation cleanup: - Split WiFi-UAV RC packet builders by variant instead of using the extended UAV/FLOW packet shape for every WiFi-UAV device. - Keep `wifi_uav_uav` / FLOW on the native-shaped `ef 02` ACK/request packet with embedded extended `66 14 ...` command. - Treat `wifi_uav_fld` / K417 as the empirically working compatibility path, but document that the stock app's native FLD stack actually wraps RC bytes in the `f5 e2 e3 cb ... e3 a5 cb cc` `eDgSrlOspf` envelope and targets `192.168.100.1:18881`. - Add explicit model fields for: - `flip_flag` - `camera_tilt_state` / `ptz_state` (`0`, `1`, `2`) with the UI wording biased toward "tilt" unless hardware proves full PTZ - `camera_index` or camera-switch command - Add frontend controls for `speed_index` and camera tilt once the hardware behavior is tested. - Build the extended command bytes from the documented `xx.g()` structure rather than from fixed packet constants, then pass that command into `build_native_ack_packet()`. - Consider a capability object per WiFi-UAV variant so the frontend can show: - one-key takeoff/land vs independent takeoff/land - camera tilt available or not - camera-switch available or not - FLOW/UAV dual-port probing vs FLD/K417 single-port behavior Remaining possible work: - Full native four-slot state machine parity, if needed. - More `wifi_uav_uav` / FLOW hardware testing, especially PTZ and camera switch behavior. - Proper Windows firewall documentation or setup helper. - Frontend capability refinement: WiFi-UAV takeoff/land is really one-key takeoff/land, not independent commands. ## Completeness Assessment The app-level map is now strong for: - Java UI-to-RC mapping. - `xx.f()` and `xx.g()` command byte layouts. - `d00` backend dispatch and lifecycle. - UAVSDK / `libuav_lib.so` startup, command queueing, ACK/request, fragment parsing, and Java callback flow. - App rendering, local snapshot, and local recording behavior. - K417 `DRONE_*` capture behavior for the FLD-compatible wire path. The remaining unknowns require artifacts not currently present in this workspace, or another hardware/capture pass: | Gap | Why it matters | What would close it | | --- | --- | --- | | Deeper `lxPro` packet decode functions | `lgPackCmdSend()` and top-level parser are mapped, but individual `DFuntc` decoders behind `lxCmdMaps` still need function-pointer/xref work. | Export/decompile the `lxCmdMaps` targets or inspect them in the saved Ghidra project. | | Native rodata constants referenced as `DAT_*` in Ghidra output | Startup/timeout bytes are known from captures for `ef 00 04 00`, but not every timeout/status string/constant is byte-exact from the export snippets. | Ghidra data export or direct binary string/rodata inspection. | | Real FLOW/UAV hardware capture | Current hardware captures are K417/`DRONE_*` FLD-compatible. | PCAP from `FLOW_` or `FlOW_` hardware, especially ports `8800` and `8801`. | | Takeoff vs land UI distinction in `lxUavCtrlView` | Decompiled Java shows both `getUpBtnState()` and `getDownBtnState()` returning the same field; behavior matches one-key action, but original intent is ambiguous. | Runtime test or cleaner decompilation/original unobfuscated source. | So the current map is complete enough to implement and debug the observed K417 path, model the UAVSDK transport shape, and understand the major FLD/lxPro native architecture. The remaining gaps are narrower: exact FLD decoder callbacks behind `lxCmdMaps`, byte-exact rodata constants, and FLOW hardware captures. ================================================ FILE: experimental/README.md ================================================ # Experimental This directory contains early-stage support for drones that are not yet integrated into the main Turbodrone architecture. Each subdirectory corresponds to a mobile app and contains control and video protocols. ## Drones and Apps | App Name | Supported Drones | Notes | |-----------------------|------------------|-------| | RC_UFO | E88 pro | PyQt5 app for flying it with a computer | ================================================ FILE: experimental/test_e88pro.py ================================================ ''' Author: jithinbp@gmail.com For the E88pro drone which is under 10$ Features: One touch takeoff, land, flip(all ways), fly around, view video feed. Finicky features: Switching camera feed , one touch land(falls out of the sky sometimes while landing. might be a battery thing) Controls Z : one touch takeoff X : land (careful) C : Calibrate gyro W/S : throttle A/D : YAW UP/DOWN: PITCH LEFT/RIGHT: roll F: FLIP (combine with up/down/left/right to specify direction of flip H: toggle headless mode 1,2 : camera selection. barely works. ''' import sys import socket import cv2 import time # Import time for delays from PyQt5.QtWidgets import QApplication, QMainWindow, QLabel, QPushButton, QVBoxLayout, QWidget, QMessageBox from PyQt5.QtGui import QImage, QPixmap from PyQt5.QtCore import QThread, pyqtSignal, pyqtSlot, Qt, QByteArray, QTimer # --- Constants --- RTSP_URL = "rtsp://192.168.1.1:7070/webcam" UDP_IP = "192.168.1.1" # This is the target IP for sending commands UDP_SEND_PORT = 7099 # Port for sending commands UDP_LISTEN_PORT = 7099 # Port for listening to responses (assuming same port for simplicity, adjust if different) UDP_UP_COMMAND = b'\x06\x01' # Byte sequence for UP command UDP_DOWN_COMMAND = b'\x06\x02' # Byte sequence for DOWN command UDP_BUFFER_SIZE = 1024 # Buffer size for UDP receive STREAM_REINITIALIZE_DELAY_SEC = 2 # Delay before attempting to re-open stream after disruption # --- Video Stream Thread --- class VideoStreamThread(QThread): """ A QThread subclass to handle video capture from an RTSP stream. It emits a QPixmap signal for each new frame. """ change_pixmap_signal = pyqtSignal(QPixmap) error_signal = pyqtSignal(str) def __init__(self, rtsp_url): super().__init__() self._rtsp_url = rtsp_url self._run_flag = True self._reinitialize_flag = False # New flag to trigger stream re-initialization self._cap = None self._paused = False def run(self): """ Main loop for video capture. Reads frames and emits them as QPixmap. Handles stream opening, reading, and re-initialization. """ while self._run_flag: if self._paused: continue if self._reinitialize_flag: # If re-initialization is requested, release current capture and prepare to re-open print("Re-initializing video stream...") if self._cap: self._cap.release() self._cap = None self.msleep(int(STREAM_REINITIALIZE_DELAY_SEC * 1000)) # Wait before re-opening self._reinitialize_flag = False # Clear the flag if not self._cap or not self._cap.isOpened(): print(f"Attempting to open RTSP stream: {self._rtsp_url}") self._cap = cv2.VideoCapture(self._rtsp_url) if not self._cap.isOpened(): self.error_signal.emit(f"Error: Could not open RTSP stream at {self._rtsp_url}. " "Please check the URL and network connection.") self.msleep(1000) # Wait before retrying to open continue # Try again in the next loop iteration print("RTSP stream opened successfully.") # Attempt to read a frame ret, cv_img = self._cap.read() if ret: # Convert OpenCV image to QPixmap qt_format = QImage.Format_RGB888 if len(cv_img.shape) == 3 and cv_img.shape[2] == 3: # Check if it's a color image rgb_image = cv2.cvtColor(cv_img, cv2.COLOR_BGR2RGB) # Rotate the image 90 degrees clockwise rgb_image = cv2.rotate(rgb_image, cv2.ROTATE_90_CLOCKWISE) h, w, ch = rgb_image.shape bytes_per_line = ch * w convert_to_qt_format = QImage(rgb_image.data, w, h, bytes_per_line, qt_format) p = convert_to_qt_format.scaled(640, 480, Qt.KeepAspectRatio) # Scale for display self.change_pixmap_signal.emit(QPixmap.fromImage(p)) else: print("Warning: Received non-RGB image, skipping display.") else: # If reading fails, assume stream disruption and trigger re-initialization print("Warning: Failed to read frame from RTSP stream. Triggering re-initialization.") self.error_signal.emit(f"Stream disrupted. Attempting to re-establish connection to {self._rtsp_url}.") self._reinitialize_flag = True # Set flag to re-initialize in next loop iteration if self._cap: # Release immediately to avoid blocking self._cap.release() self._cap = None self.msleep(30) # Small delay to prevent busy-waiting and reduce CPU usage print("Video stream thread stopping.") if self._cap: self._cap.release() print("RTSP stream released.") def stop(self): """Stops the video stream thread gracefully.""" self._run_flag = False self.wait() # Wait for the thread to finish its execution @pyqtSlot() def reinitialize_stream(self): """ Slot to trigger a full re-initialization of the video stream. This is called when a known disruption (like a camera switch) occurs. """ self._reinitialize_flag = True print("Re-initialization requested for video stream.") # --- UDP Listener Thread --- class UDPListenerThread(QThread): """ A QThread subclass to listen for incoming UDP data. It emits a signal with the received data. """ data_received_signal = pyqtSignal(bytes, tuple) # Signal to emit data and sender address error_signal = pyqtSignal(str) def __init__(self, port, buffer_size): super().__init__() self._listen_ip = "" # We bind to "" (0.0.0.0) to listen on all available local interfaces self._port = port self._buffer_size = buffer_size self._run_flag = True self._sock = None def run(self): """ Main loop for UDP listening. Receives data and emits it. """ try: self._sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) # UDP socket self._sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) # Allow reusing the address # Bind to all available network interfaces on the specified port self._sock.bind((self._listen_ip, self._port)) self._sock.settimeout(1.0) # Set a timeout to allow graceful shutdown print(f"UDP listener started on {self._listen_ip}:{self._port} (listening on all interfaces)") except socket.error as e: self.error_signal.emit(f"Error binding UDP socket to {self._listen_ip}:{self._port}: {e}") self._run_flag = False return while self._run_flag: try: data, addr = self._sock.recvfrom(self._buffer_size) self.data_received_signal.emit(data, addr) print(f"Received UDP data from {addr}: {data.hex()}") except socket.timeout: # Timeout occurred, continue loop to check _run_flag pass except socket.error as e: if self._run_flag: # Only report error if not intentionally stopping self.error_signal.emit(f"UDP receive error: {e}") break # Exit loop on persistent error print("UDP listener thread stopping.") if self._sock: self._sock.close() print("UDP socket closed.") def stop(self): """Stops the UDP listener thread gracefully.""" self._run_flag = False if self._sock: # A small trick to unblock recvfrom if it's blocking try: self._sock.shutdown(socket.SHUT_RDWR) except OSError: pass # Socket might already be closed or in a bad state self.wait() # Wait for the thread to finish # --- Main Application Window --- class RTSPViewerApp(QMainWindow): """ Main application window for RTSP viewing and UDP control. """ SOMERSAULT = 8 HEADLESS = 16 def __init__(self): super().__init__() self.setWindowTitle("RTSP Stream Viewer & Device Control") self.setGeometry(100, 100, 800, 700) # Adjusted height to accommodate new label self.central_widget = QWidget() self.setCentralWidget(self.central_widget) self.layout = QVBoxLayout(self.central_widget) # Video display label self.image_label = QLabel(self) self.image_label.setFixedSize(640, 480) # Fixed size for video display self.image_label.setAlignment(Qt.AlignCenter) self.image_label.setStyleSheet("background-color: black; border: 1px solid gray;") self.layout.addWidget(self.image_label, alignment=Qt.AlignCenter) self.flip=False self.headless=False self.cam=0 # Control buttons self.up_button = QPushButton("UP") self.up_button.setFixedSize(100, 40) self.up_button.clicked.connect(self.on_up_button_clicked) self.layout.addWidget(self.up_button, alignment=Qt.AlignCenter) self.down_button = QPushButton("DOWN") self.down_button.setFixedSize(100, 40) self.down_button.clicked.connect(self.on_down_button_clicked) self.layout.addWidget(self.down_button, alignment=Qt.AlignCenter) # UDP response display label self.udp_response_label = QLabel("UDP Response: None", self) self.udp_response_label.setAlignment(Qt.AlignCenter) self.udp_response_label.setStyleSheet("font-weight: bold; color: blue;") self.layout.addWidget(self.udp_response_label, alignment=Qt.AlignCenter) # Initialize video thread video = True if video: self.video_thread = VideoStreamThread(RTSP_URL) self.video_thread.change_pixmap_signal.connect(self.update_image) self.video_thread.error_signal.connect(self.show_error_message) self.video_thread.start() # Initialize UDP listener thread self.udp_listener_thread = UDPListenerThread(UDP_LISTEN_PORT, UDP_BUFFER_SIZE) self.udp_listener_thread.data_received_signal.connect(self.update_udp_response) self.udp_listener_thread.error_signal.connect(self.show_error_message) self.udp_listener_thread.start() # Add a placeholder for when video is not loaded self.image_label.setText("Loading RTSP stream...") self.image_label.setStyleSheet("background-color: black; color: white; border: 1px solid gray;") self.sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) # UDP socket # Ensure the widget itself can receive key press events # We also want it to be able to re-gain focus after buttons are clicked self.setFocusPolicy(Qt.StrongFocus) # Create a QTimer for periodic voltage measurement self.accel = 50 self.decel = 5 self.send_udp_command(b'\x08\x01') self.timer = QTimer(self) self.last_heartbeat = time.time() self.timer.timeout.connect(self.send) # Connect the timeout signal to the update method self.timer.start(30) # Start the timer with an interval #byte0:102,byte1:128,byte2:128,byte3:128,byte4:128,byte5:0,byte6:0,byte7:153 self.basebytes = bytearray(b'\x66\x80\x80\x80\x80\x00\x00\x99') def send(self): if self.flip: self.basebytes[5]+=self.SOMERSAULT if self.headless: self.basebytes[5]+=self.HEADLESS self.send_udp_command(bytes(bytearray(b'\x03') + self.basebytes)) if time.time() - self.last_heartbeat > 1: self.send_udp_command(b'\x01\x01') self.last_heartbeat = time.time() if self.cam>0: self.video_thread._paused = True time.sleep(1) l = bytearray(b'\x06') l.append(self.cam) self.send_udp_command(bytes(l)) self.cam=0 time.sleep(1) self.video_thread._paused = False self.video_thread.reinitialize_stream() self.basebytes[5] = 0 for a in range(1,5): if (self.basebytes[a]>128): self.basebytes[a]-=self.decel elif (self.basebytes[a]<128): self.basebytes[a]+=self.decel def xor(self,bs): s=0 for byte_value in bs[1:6]: s ^= byte_value bs[6]=s return bs def keyPressEvent(self, event): if event.key() == Qt.Key_Up: #Forward (pitch down) if self.basebytes[2]<200: self.basebytes[2]+=self.accel event.accept() # Crucial: tell PyQt we handled this event elif event.key() == Qt.Key_Down: #Back (pitch up) if self.basebytes[2]>50: self.basebytes[2]-=self.accel event.accept() # Crucial: tell PyQt we handled this event elif event.key() == Qt.Key_Left: #Roll left if self.basebytes[1]>50: self.basebytes[1]-=self.accel event.accept() # Crucial: tell PyQt we handled this event elif event.key() == Qt.Key_Right: #Roll right if self.basebytes[1]<200: self.basebytes[1]+=self.accel event.accept() # Crucial: tell PyQt we handled this event elif event.key() == Qt.Key_W: if self.basebytes[3]<200: self.basebytes[3]+=self.accel event.accept() # Crucial: tell PyQt we handled this event elif event.key() == Qt.Key_S: if self.basebytes[3]>50: self.basebytes[3]-=self.accel event.accept() # Crucial: tell PyQt we handled this event elif event.key() == Qt.Key_D: if self.basebytes[4]<200: self.basebytes[4]+=self.accel event.accept() # Crucial: tell PyQt we handled this event elif event.key() == Qt.Key_A: if self.basebytes[4]>50: self.basebytes[4]-=self.accel event.accept() # Crucial: tell PyQt we handled this event elif event.key() == Qt.Key_Z: #Takeoff self.basebytes[5] = 1 event.accept() # Crucial: tell PyQt we handled this event elif event.key() == Qt.Key_X: #Land self.basebytes[5] = 2 event.accept() # Crucial: tell PyQt we handled this event elif event.key() == Qt.Key_C: #Calibrate self.basebytes[5] = 128 event.accept() # Crucial: tell PyQt we handled this event elif event.key() == Qt.Key_F: #Flip 360 self.flip = True event.accept() # Crucial: tell PyQt we handled this event elif event.key() == Qt.Key_H: #Headless self.headless = not self.headless event.accept() # Crucial: tell PyQt we handled this event elif event.key() == Qt.Key_1: #cam 1 self.cam=1 event.accept() # Crucial: tell PyQt we handled this event elif event.key() == Qt.Key_2: #cam 2 self.cam=2 event.accept() # Crucial: tell PyQt we handled this event else: # For other keys, let the default behavior happen print(event.key()) super().keyPressEvent(event) @pyqtSlot(QPixmap) def update_image(self, pixmap): """Slot to update the QLabel with new video frames.""" self.image_label.setPixmap(pixmap) self.image_label.setText("") # Clear loading text once video starts @pyqtSlot(bytes, tuple) def update_udp_response(self, data, addr): """Slot to update the QLabel with received UDP data.""" self.udp_response_label.setText(f"UDP Response from {addr[0]}:{addr[1]}: {data.hex()}") @pyqtSlot(str) def show_error_message(self, message): """Slot to display error messages in a QMessageBox.""" #QMessageBox.critical(self, "Error", message) # For stream errors, keep the error message on the image label if "Stream disrupted" in message or "Could not open RTSP stream" in message: self.image_label.setText("Stream Error: " + message) self.image_label.setStyleSheet("background-color: red; color: white; border: 1px solid gray;") # Do not disable buttons for temporary stream disruptions, as they are part of control flow else: # For other errors (e.g., UDP binding) self.udp_response_label.setText("UDP Error: " + message) self.udp_response_label.setStyleSheet("font-weight: bold; color: red;") def send_udp_command(self, command): """Sends a UDP command to the specified IP and port.""" try: self.sock.sendto(command, (UDP_IP, UDP_SEND_PORT)) #print(f"Sent UDP command: {command.hex()} to {UDP_IP}:{UDP_SEND_PORT}") except socket.error as e: #QMessageBox.warning(self, "Network Error", f"Failed to send UDP command: {e}") print(f"UDP send error: {e}") def on_up_button_clicked(self): """Handler for the UP button click.""" self.send_udp_command(UDP_UP_COMMAND) # Give the device a moment to switch cameras and stabilize the stream time.sleep(0.5) # Small delay after sending command self.video_thread.reinitialize_stream() # Trigger full stream re-initialization def on_down_button_clicked(self): """Handler for the DOWN button click.""" self.send_udp_command(UDP_DOWN_COMMAND) # Give the device a moment to switch cameras and stabilize the stream time.sleep(0.5) # Small delay after sending command self.video_thread.reinitialize_stream() # Trigger full stream re-initialization def closeEvent(self, event): """Ensures all threads are stopped when the application closes.""" print("Closing application. Stopping video and UDP threads...") self.video_thread.stop() self.udp_listener_thread.stop() event.accept() # --- Main execution --- if __name__ == "__main__": app = QApplication(sys.argv) window = RTSPViewerApp() window.show() sys.exit(app.exec_()) ================================================ FILE: frontend/.gitignore ================================================ # Logs logs *.log npm-debug.log* yarn-debug.log* yarn-error.log* pnpm-debug.log* lerna-debug.log* node_modules dist dist-ssr *.local # Editor directories and files .vscode/* !.vscode/extensions.json .idea .DS_Store *.suo *.ntvs* *.njsproj *.sln *.sw? ================================================ FILE: frontend/eslint.config.js ================================================ import js from '@eslint/js' import globals from 'globals' import reactHooks from 'eslint-plugin-react-hooks' import reactRefresh from 'eslint-plugin-react-refresh' import tseslint from 'typescript-eslint' export default tseslint.config( { ignores: ['dist'] }, { extends: [js.configs.recommended, ...tseslint.configs.recommended], files: ['**/*.{ts,tsx}'], languageOptions: { ecmaVersion: 2020, globals: globals.browser, }, plugins: { 'react-hooks': reactHooks, 'react-refresh': reactRefresh, }, rules: { ...reactHooks.configs.recommended.rules, 'react-refresh/only-export-components': [ 'warn', { allowConstantExport: true }, ], }, }, ) ================================================ FILE: frontend/index.html ================================================ Turbodrone Web
================================================ FILE: frontend/package.json ================================================ { "name": "frontend", "private": true, "version": "0.0.0", "type": "module", "scripts": { "dev": "vite", "build": "tsc -b && vite build", "lint": "eslint .", "preview": "vite preview" }, "dependencies": { "react": "^19.1.0", "react-dom": "^19.1.0", "react-router-dom": "^7.6.2" }, "devDependencies": { "@eslint/js": "^9.25.0", "@tailwindcss/cli": "^4.1.7", "@tailwindcss/postcss": "^4.1.7", "@types/react": "^19.1.2", "@types/react-dom": "^19.1.2", "@vitejs/plugin-react": "^4.4.1", "autoprefixer": "^10.4.21", "eslint": "^9.25.0", "eslint-plugin-react-hooks": "^5.2.0", "eslint-plugin-react-refresh": "^0.4.19", "globals": "^16.0.0", "postcss": "^8.5.3", "tailwindcss": "^4.1.7", "typescript": "~5.8.3", "typescript-eslint": "^8.30.1", "vite": "^6.3.5" } } ================================================ FILE: frontend/postcss.config.cjs ================================================ // postcss.config.cjs (new) module.exports = { plugins: { '@tailwindcss/postcss': {}, // ← new v4 plugin autoprefixer: {}, // optional but still fine }, }; ================================================ FILE: frontend/src/App.css ================================================ #root { max-width: 1280px; margin: 0 auto; padding: 2rem; text-align: center; } .logo { height: 6em; padding: 1.5em; will-change: filter; transition: filter 300ms; } .logo:hover { filter: drop-shadow(0 0 2em #646cffaa); } .logo.react:hover { filter: drop-shadow(0 0 2em #61dafbaa); } @keyframes logo-spin { from { transform: rotate(0deg); } to { transform: rotate(360deg); } } @media (prefers-reduced-motion: no-preference) { a:nth-of-type(2) .logo { animation: logo-spin infinite 20s linear; } } .card { padding: 2em; } .read-the-docs { color: #888; } ================================================ FILE: frontend/src/App.tsx ================================================ import { useControls } from './hooks/useControls'; import { ControlSchemeToggle } from './components/ControlSchemeToggle'; import { VideoFeed } from './components/VideoFeed'; import ControlsOverlay from './components/ControlsOverlay'; import { PluginControls } from './components/PluginControls'; import { DrawingOverlay } from './components/DrawingOverlay'; function App() { const { axes, mode, setMode, gamepadConnected, droneType, commandCapabilities, takeOff, land, emergencyStop, speedTier, setSpeedTier, } = useControls(); return (
); } export default App; ================================================ FILE: frontend/src/components/AxisIndicator.tsx ================================================ interface Props { x: number; // -1 … +1 (left … right) y: number; // -1 … +1 (down … up) – positive y = up size?: number; label?: string; } export default function AxisIndicator({ x, y, size = 120, label }: Props) { // clamp outside callers just in case x = Math.max(-1, Math.min(1, x)); y = Math.max(-1, Math.min(1, y)); const radius = size * 0.4; // max travel inside the square return (
{/* cross-hair */}
{/* moving dot */}
{label && {label}}
); } ================================================ FILE: frontend/src/components/CommandButtons.tsx ================================================ import type { CommandCapabilities } from "../hooks/useControls"; interface CommandButtonsProps { droneType: string; capabilities: CommandCapabilities; onTakeoff: () => void; onLand: () => void; onEstop: () => void; } const disabledButtonClasses = "bg-gray-700/80 border border-gray-500/40 text-gray-400 cursor-not-allowed opacity-70"; const baseButtonClasses = "min-w-[112px] px-5 py-2.5 font-semibold rounded-lg shadow-md transition-all duration-150 ease-in-out border focus:outline-none"; export default function CommandButtons({ droneType, capabilities, onTakeoff, onLand, onEstop, }: CommandButtonsProps) { return (
{droneType} Controls
); } ================================================ FILE: frontend/src/components/ControlSchemeToggle.tsx ================================================ import React from "react"; import type { ControlMode } from "../hooks/useControls"; interface Props { mode: ControlMode; setMode: (m: ControlMode) => void; gamepadConnected: boolean; } export const ControlSchemeToggle: React.FC = ({ mode, setMode, gamepadConnected }) => { /* helpers ensure Pointer-Lock is requested inside the user gesture */ const toKeyboard = () => setMode("inc"); const toGamepad = () => { if (gamepadConnected) setMode("abs"); }; const toTrackPoint = () => { /* must be called from a user-generated event */ document.body.requestPointerLock(); setMode("mouse"); }; return (
{/* status / hints */}
Current  {mode === "inc" ? "Keyboard" : mode === "abs" ? "Gamepad" : "TrackPoint"} {mode === "mouse" &&  (Esc to release)}
); }; ================================================ FILE: frontend/src/components/ControlsOverlay.tsx ================================================ import AxisIndicator from "./AxisIndicator"; import type { Axes, CommandCapabilities, SpeedTier } from "../hooks/useControls"; import CommandButtons from "./CommandButtons"; import SpeedControl from "./SpeedControl"; interface ControlsOverlayProps { axes: Axes; droneType: string; commandCapabilities: CommandCapabilities; speedTier: SpeedTier; onTakeoff: () => void; onLand: () => void; onEstop: () => void; onSpeedChange: (tier: SpeedTier) => void; } export default function ControlsOverlay({ axes, droneType, commandCapabilities, speedTier, onTakeoff, onLand, onEstop, onSpeedChange, }: ControlsOverlayProps) { const left = { x: axes.roll, y: axes.pitch }; const right = { x: axes.yaw, y: axes.throttle }; const droneLabel = droneType === "unknown" ? "Unknown" : droneType.replace(/_/g, " ").toUpperCase(); return (
{/* title bar */}

TURBODRONE WEB

{/* Controls Area: Buttons and Sticks */}
{/* Command Buttons Cluster */} {commandCapabilities.speed_control && ( )} {/* Sticks */}
); } ================================================ FILE: frontend/src/components/DrawingOverlay.tsx ================================================ import { useOverlays } from "../hooks/useOverlays"; import { useVideoSizing } from "../hooks/useVideoSizing"; function DrawingOverlay() { const overlays = useOverlays(); const { videoRect } = useVideoSizing(); const style: React.CSSProperties = { position: "absolute", left: videoRect.x, top: videoRect.y, width: videoRect.width, height: videoRect.height, pointerEvents: "none", }; return (
{overlays.map((o, i) => o.type === "rect" ? ( ) : null )}
); } // Making the component available as both a default and named export // to resolve the import issue in App.tsx. export { DrawingOverlay }; export default DrawingOverlay; ================================================ FILE: frontend/src/components/PluginControls.tsx ================================================ import { usePlugins } from '../hooks/usePlugins'; export function PluginControls() { const { pluginsEnabled, availablePlugins, runningPlugins, isLoading, error, togglePlugin } = usePlugins(); // Feature-flagged off by backend (PLUGINS_ENABLED=false) if (!pluginsEnabled && !isLoading) { return null; } if (isLoading) { return
Loading plugins...
; } if (error) { return
Error: {error}
; } return (

Plugins

{availablePlugins.length === 0 ? (

No plugins available.

) : (
    {availablePlugins.map((name) => { const isRunning = runningPlugins.has(name); return (
  • {name}
  • ); })}
)}
); } ================================================ FILE: frontend/src/components/SpeedControl.tsx ================================================ import type { SpeedTier } from "../hooks/useControls"; interface SpeedControlProps { enabled: boolean; value: SpeedTier; onChange: (tier: SpeedTier) => void; } const TIERS: { value: SpeedTier; label: string; help: string }[] = [ { value: 0, label: "Low", help: "Slow / beginner stick scaling" }, { value: 1, label: "Med", help: "Medium stick scaling" }, { value: 2, label: "High", help: "Full stick range" }, ]; export default function SpeedControl({ enabled, value, onChange }: SpeedControlProps) { const baseSegment = "px-3 py-1.5 text-xs font-semibold uppercase tracking-wider transition-colors duration-150 ease-in-out focus:outline-none"; const disabledTitle = "Speed control is unavailable for this implementation"; return (
Speed
{TIERS.map((tier, idx) => { const active = enabled && value === tier.value; const segmentClasses = active ? "bg-blue-600 text-white" : enabled ? "bg-gray-800/80 text-gray-200 hover:bg-gray-700/80" : "bg-gray-800/60 text-gray-400 cursor-not-allowed"; const borderClasses = idx > 0 ? "border-l border-gray-600/70" : ""; return ( ); })}
); } ================================================ FILE: frontend/src/components/StaticDrawingOverlay.tsx ================================================ import { useOverlays } from "../hooks/useOverlays"; /** * A simple overlay that assumes its parent container is correctly sized * with the same aspect ratio as the video feed. It uses a normalized * (0-1) coordinate system. */ export default function StaticDrawingOverlay() { const overlays = useOverlays(); return ( {overlays.map((o, i) => o.type === "rect" ? ( ) : null )} ); } ================================================ FILE: frontend/src/components/VideoFeed.tsx ================================================ export function VideoFeed({ src }: { src: string }) { return ( Drone video feed ); } ================================================ FILE: frontend/src/hooks/useControls.ts ================================================ import { useCallback, useEffect, useRef, useState } from "react"; /* ─────────────────────────────────────────────────────────── */ /* Shared types */ export type ControlMode = "inc" | "abs" | "mouse"; export interface Axes { throttle: number; // -1 … +1 (down / up) yaw: number; // -1 … +1 (left / right) pitch: number; // -1 … +1 (back / fwd) roll: number; // -1 … +1 (left / right) } export interface CommandCapabilities { takeoff: boolean; land: boolean; estop: boolean; camera_tilt: boolean; camera_switch: boolean; speed_control: boolean; } export type SpeedTier = 0 | 1 | 2; /* ─────────────────────────────────────────────────────────── */ export function useControls() { const API_BASE_URL = "http://localhost:8000"; /* ------- state refs (mutable) ------- */ const axesRef = useRef({ throttle: 0, yaw: 0, pitch: 0, roll: 0 }); const modeRef = useRef("inc"); /* ------- NEW: websocket ref & lifecycle ------- */ const ws = useRef(null); /* ------- Plugin state (event-driven) ------- */ const pluginRunningRef = useRef(false); const stoppedPluginOnceRef = useRef(false); // rate-limit stop calls per burst // Open WS once on mount, close on unmount useEffect(() => { ws.current = new WebSocket("ws://localhost:8000/ws"); return () => { ws.current?.close(); ws.current = null; }; }, []); // Listen for plugin start/stop events (dispatched from usePlugins and auto-stop) useEffect(() => { const onStart = () => { pluginRunningRef.current = true; }; const onStop = () => { pluginRunningRef.current = false; stoppedPluginOnceRef.current = false; }; window.addEventListener('plugin:running', onStart as EventListener); window.addEventListener('plugin:stopped', onStop as EventListener); return () => { window.removeEventListener('plugin:running', onStart as EventListener); window.removeEventListener('plugin:stopped', onStop as EventListener); }; }, []); /* ------- state that triggers re-renders ------- */ const [axes, setAxes] = useState(axesRef.current); const [mode, setModeSt] = useState("inc"); const [gamepadConnected, setGamepadConnected] = useState(false); const [droneType, setDroneType] = useState("unknown"); const [commandCapabilities, setCommandCapabilities] = useState({ takeoff: true, land: true, estop: true, camera_tilt: false, camera_switch: false, speed_control: false, }); const [speedTier, setSpeedTierSt] = useState(2); // Track previous gamepad status to avoid spam const prevGamepadStatus = useRef(false); /* make setMode update both the ref (for hooks) and the state (for UI) */ const setMode = useCallback((m: ControlMode) => { modeRef.current = m; setModeSt(m); }, []); useEffect(() => { let cancelled = false; const fetchCapabilities = async () => { try { const response = await fetch(`${API_BASE_URL}/capabilities`); if (!response.ok) return; const data = await response.json(); if (cancelled) return; setDroneType(data?.drone_type ?? "unknown"); setCommandCapabilities({ takeoff: Boolean(data?.commands?.takeoff), land: Boolean(data?.commands?.land), estop: Boolean(data?.commands?.estop), camera_tilt: Boolean(data?.commands?.camera_tilt), camera_switch: Boolean(data?.commands?.camera_switch), speed_control: Boolean(data?.commands?.speed_control), }); } catch { // Keep optimistic defaults when the backend is temporarily unavailable. } }; fetchCapabilities(); return () => { cancelled = true; }; }, []); /* --------------- gamepad detection --------------- */ useEffect(() => { const checkGamepad = () => { const gamepads = navigator.getGamepads(); const hasGamepad = Array.from(gamepads).some(gp => gp !== null && gp.connected); // Only log when status changes if (hasGamepad !== prevGamepadStatus.current) { console.log(`Gamepad ${hasGamepad ? 'connected' : 'disconnected'}`); if (hasGamepad) { const connectedGamepads = Array.from(gamepads).filter(gp => gp !== null); console.log('Connected gamepads:', connectedGamepads.map(gp => gp?.id)); } prevGamepadStatus.current = hasGamepad; } setGamepadConnected(hasGamepad); }; // Check initially checkGamepad(); // Listen for gamepad connect/disconnect events const handleGamepadConnected = () => checkGamepad(); const handleGamepadDisconnected = () => checkGamepad(); window.addEventListener('gamepadconnected', handleGamepadConnected); window.addEventListener('gamepaddisconnected', handleGamepadDisconnected); // Also poll periodically since some browsers don't fire events reliably const pollInterval = setInterval(checkGamepad, 1000); return () => { window.removeEventListener('gamepadconnected', handleGamepadConnected); window.removeEventListener('gamepaddisconnected', handleGamepadDisconnected); clearInterval(pollInterval); }; }, []); /* --------------- keyboard (incremental) --------------- */ useEffect(() => { if (modeRef.current !== "inc") return; // ignore when in abs mode const map: Record = { w: { axis: "pitch", dir: +1 }, s: { axis: "pitch", dir: -1 }, a: { axis: "roll", dir: -1 }, d: { axis: "roll", dir: +1 }, ArrowUp: { axis: "throttle", dir: +1 }, ArrowDown: { axis: "throttle", dir: -1 }, ArrowLeft: { axis: "yaw", dir: -1 }, ArrowRight: { axis: "yaw", dir: +1 }, }; const down = (e: KeyboardEvent) => { const m = map[e.key]; if (!m) return; axesRef.current[m.axis] = m.dir; setAxes({ ...axesRef.current }); // If any plugin is running and user provides input → stop plugin once maybeStopPluginOnUserInput(); }; const up = (e: KeyboardEvent) => { const m = map[e.key]; if (!m) return; if (axesRef.current[m.axis] === m.dir) { axesRef.current[m.axis] = 0; setAxes({ ...axesRef.current }); } }; window.addEventListener("keydown", down); window.addEventListener("keyup", up); return () => { window.removeEventListener("keydown", down); window.removeEventListener("keyup", up); }; }, [mode]); // re-run effect when mode flips /* --------------- Xbox-360 game-pad (absolute) --------- */ useEffect(() => { if (modeRef.current !== "abs") return; // ignore when in inc mode const DEADZONE = 0.15; // Adjust this value as needed let raf = 0; const applyDeadzone = (value: number): number => { return Math.abs(value) < DEADZONE ? 0 : value; }; const poll = () => { const gp = navigator.getGamepads()[0] as Gamepad | null; if (gp) { /* Xbox-360 / Chrome mapping ------------------------------------- Left stick: axes[0] (X) axes[1] (Y) Right stick: axes[2] (X) axes[3] (Y) Positive Y is *down* → invert for throttle / pitch ------------------------------------------------------------------*/ axesRef.current.roll = applyDeadzone(gp.axes[0]); // left X axesRef.current.pitch = applyDeadzone(-gp.axes[1]); // left Y (forward == -1) axesRef.current.yaw = applyDeadzone(gp.axes[2]); // right X axesRef.current.throttle = applyDeadzone(-gp.axes[3]); // right Y (up == +1) setAxes({ ...axesRef.current }); // Stop plugin when the first non-zero user sticks are detected if (Math.abs(axesRef.current.roll) > 0 || Math.abs(axesRef.current.pitch) > 0 || Math.abs(axesRef.current.yaw) > 0 || Math.abs(axesRef.current.throttle) > 0) { maybeStopPluginOnUserInput(); } } raf = requestAnimationFrame(poll); }; raf = requestAnimationFrame(poll); return () => cancelAnimationFrame(raf); }, [mode]); /* ---------- TrackPoint / Mouse (relative) ------------------ */ useEffect(() => { if (modeRef.current !== "mouse") return; const sensitivity = 0.015; // tune to taste for TrackPoint const decay = 0.90; // spring-back to centre when idle const axesState = axesRef.current; let rafId = 0; /* convert mouse deltas → roll / pitch (-y = pitch forward) */ const onMove = (e: MouseEvent) => { axesRef.current.roll = Math.max(-1, Math.min(1, axesRef.current.roll + e.movementX * sensitivity)); axesRef.current.pitch = Math.max(-1, Math.min(1, axesRef.current.pitch - e.movementY * sensitivity)); setAxes({ ...axesRef.current }); // Stop plugin when user moves mouse/trackpoint if (Math.abs(e.movementX) > 0 || Math.abs(e.movementY) > 0) { maybeStopPluginOnUserInput(); } }; /* gentle recentre so sticks don't stay deflected forever */ const tick = () => { axesRef.current.roll *= decay; axesRef.current.pitch *= decay; if (Math.abs(axesRef.current.roll) < 0.001) axesRef.current.roll = 0; if (Math.abs(axesRef.current.pitch) < 0.001) axesRef.current.pitch = 0; setAxes({ ...axesRef.current }); rafId = requestAnimationFrame(tick); }; /* when we lose pointer-lock (Esc, window blur, etc.) fall back to keyboard */ const onLockChange = () => { if (document.pointerLockElement === null) { setMode("inc"); } }; window.addEventListener("mousemove", onMove); document.addEventListener("pointerlockchange", onLockChange); rafId = requestAnimationFrame(tick); return () => { window.removeEventListener("mousemove", onMove); document.removeEventListener("pointerlockchange", onLockChange); cancelAnimationFrame(rafId); axesState.roll = 0; axesState.pitch = 0; setAxes((prev) => ({ ...prev, roll: 0, pitch: 0 })); }; }, [mode, setMode]); /* ----------- network TX 30 Hz (treat mouse as "abs") ------- */ useEffect(() => { const interval = setInterval(() => { if (ws.current?.readyState !== WebSocket.OPEN) return; // COMPLETELY suppress all transmissions when any plugin is running // This prevents frontend from overwriting plugin commands if (pluginRunningRef.current) return; ws.current.send(JSON.stringify({ type: "axes", mode: modeRef.current, ...axesRef.current, })); }, 1000 / 30); return () => clearInterval(interval); }, []); /* ------------- helpers / commands ------------------------- */ const sendCommand = (type: string, payload = {}) => { if (ws.current?.readyState === WebSocket.OPEN) { ws.current.send(JSON.stringify({ type, ...payload })); } else { console.warn("Cannot send command, WebSocket not open."); } }; const maybeStopPluginOnUserInput = async () => { if (!pluginRunningRef.current || stoppedPluginOnceRef.current) return; try { // Stop all running plugins for simplicity const res = await fetch(`${API_BASE_URL}/plugins`); // If plugins are disabled on backend, nothing to stop. if (res.status === 404) return; if (!res.ok) return; const data = await res.json(); const running: string[] = data?.running ?? []; await Promise.all(running.map((name) => fetch(`${API_BASE_URL}/plugins/${name}/stop`, { method: 'POST' }))); stoppedPluginOnceRef.current = true; pluginRunningRef.current = false; // notify UI to flip OFF without polling window.dispatchEvent(new CustomEvent('plugin:stopped')); } catch { // Ignore errors when stopping plugins (fire-and-forget) } }; const takeOff = () => sendCommand("takeoff"); const land = () => sendCommand("land"); const emergencyStop = () => sendCommand("estop"); const setSpeedIndex = useCallback((speedIndex: number) => { sendCommand("set_speed_index", { speed_index: speedIndex }); }, []); const setSpeedTier = useCallback((tier: SpeedTier) => { setSpeedTierSt(tier); setSpeedIndex(tier); }, [setSpeedIndex]); /* ------------- hook return ------------------------------- */ return { axes, mode, setMode, gamepadConnected, droneType, commandCapabilities, takeOff, land, emergencyStop, setSpeedIndex, speedTier, setSpeedTier, }; } ================================================ FILE: frontend/src/hooks/useOverlays.ts ================================================ import { useState, useEffect, useRef } from 'react'; const OVERLAY_WS_URL = 'ws://localhost:8000/ws/overlays'; export interface OverlayObject { type: 'rect'; coords: [number, number, number, number]; // [x1, y1, x2, y2] color: string; } export function useOverlays() { const [overlays, setOverlays] = useState([]); const ws = useRef(null); useEffect(() => { const connect = () => { ws.current = new WebSocket(OVERLAY_WS_URL); ws.current.onopen = () => console.log('%c[Overlays] WebSocket Connected', 'color: green'); ws.current.onmessage = (event) => { try { const data = JSON.parse(event.data); if (Array.isArray(data)) { setOverlays(data); } } catch (e) { console.error('[Overlays] Failed to parse data:', e); } }; ws.current.onclose = () => { console.log('%c[Overlays] WebSocket Disconnected. Reconnecting...', 'color: orange'); setTimeout(connect, 3000); }; ws.current.onerror = (err) => { console.error('%c[Overlays] WebSocket Error', 'color: red', err); ws.current?.close(); }; }; connect(); return () => { if (ws.current) { ws.current.onclose = null; ws.current.close(); } }; }, []); return overlays; } ================================================ FILE: frontend/src/hooks/usePlugins.ts ================================================ import { useState, useEffect, useCallback } from 'react'; // Assuming your backend runs on port 8000 const API_BASE_URL = 'http://localhost:8000'; interface PluginState { available: string[]; running: string[]; } export function usePlugins() { const [pluginsEnabled, setPluginsEnabled] = useState(false); const [availablePlugins, setAvailablePlugins] = useState([]); const [runningPlugins, setRunningPlugins] = useState>(new Set()); const [isLoading, setIsLoading] = useState(true); const [error, setError] = useState(null); // Function to fetch the current state of plugins from the backend const fetchPluginState = useCallback(async () => { try { const response = await fetch(`${API_BASE_URL}/plugins`); if (!response.ok) { // Backend returns 404 when plugins are disabled. if (response.status === 404) { setPluginsEnabled(false); setAvailablePlugins([]); setRunningPlugins(new Set()); setError(null); return; } throw new Error('Failed to fetch plugin status'); } setPluginsEnabled(true); const data: PluginState = await response.json(); setAvailablePlugins(data.available); setRunningPlugins(new Set(data.running)); } catch (e) { setError(e instanceof Error ? e.message : 'An unknown error occurred'); } finally { setIsLoading(false); } }, []); // Function to toggle a plugin on or off const togglePlugin = useCallback(async (name: string) => { if (!pluginsEnabled) return; const isRunning = runningPlugins.has(name); const endpoint = isRunning ? 'stop' : 'start'; try { const response = await fetch(`${API_BASE_URL}/plugins/${name}/${endpoint}`, { method: 'POST', }); if (!response.ok) { throw new Error(`Failed to ${endpoint} plugin ${name}`); } // After toggling, refresh the state to ensure UI is in sync await fetchPluginState(); } catch (e) { setError(e instanceof Error ? e.message : 'An unknown error occurred'); } }, [pluginsEnabled, runningPlugins, fetchPluginState]); // Fetch the initial state when the hook is first used useEffect(() => { fetchPluginState(); return () => {}; }, [fetchPluginState]); // Keep UI in sync when other parts of the app start/stop plugins useEffect(() => { const handler = () => { fetchPluginState(); }; window.addEventListener('plugin:running', handler as EventListener); window.addEventListener('plugin:stopped', handler as EventListener); return () => { window.removeEventListener('plugin:running', handler as EventListener); window.removeEventListener('plugin:stopped', handler as EventListener); }; }, [fetchPluginState]); return { pluginsEnabled, availablePlugins, runningPlugins, isLoading, error, togglePlugin, }; } ================================================ FILE: frontend/src/hooks/useVideoSizing.ts ================================================ import { useState, useLayoutEffect, useCallback } from "react"; const ZERO_RECT = { x: 0, y: 0, width: 0, height: 0, top: 0, left: 0, right: 0, bottom: 0 }; export function useVideoSizing() { const [videoRect, setVideoRect] = useState(ZERO_RECT); const [containerRect, setContainerRect] = useState(ZERO_RECT); const recalculate = useCallback(() => { const img = document.querySelector('img[alt="Drone video feed"]') as HTMLImageElement; if (!img || !img.parentElement) return; if (img.naturalWidth === 0 || img.naturalHeight === 0) return; const container = img.parentElement; if (container.clientWidth === 0 || container.clientHeight === 0) return; const containerAR = container.clientWidth / container.clientHeight; const videoAR = img.naturalWidth / img.naturalHeight; let newWidth, newHeight, newX, newY; if (containerAR > videoAR) { // Container is wider than video (letterboxed) newHeight = container.clientHeight; newWidth = newHeight * videoAR; newX = (container.clientWidth - newWidth) / 2; newY = 0; } else { // Container is taller than video (pillarboxed) newWidth = container.clientWidth; newHeight = newWidth / videoAR; newY = (container.clientHeight - newHeight) / 2; newX = 0; } setVideoRect({ x: newX, y: newY, width: newWidth, height: newHeight, top: newY, left: newX, right: newX + newWidth, bottom: newY + newHeight, }); setContainerRect(container.getBoundingClientRect()); }, []); useLayoutEffect(() => { const img = document.querySelector('img[alt="Drone video feed"]') as HTMLImageElement; if (!img) return; const ro = new ResizeObserver(recalculate); ro.observe(img.parentElement!); img.addEventListener('load', recalculate); // Initial calculation in case image is already loaded if (img.complete && img.naturalWidth > 0) { recalculate(); } return () => { ro.disconnect(); img.removeEventListener('load', recalculate); }; }, [recalculate]); return { videoRect, containerRect }; } ================================================ FILE: frontend/src/index.css ================================================ @import "tailwindcss"; /* tech-y heading font */ @import url('https://fonts.googleapis.com/css2?family=Rajdhani:wght@500;700&display=swap'); /* global dark theme */ html, body { margin: 0; height: 100%; background-color: #1e1e1e; /* VS Code / Cursor dark */ color: #d4d4d4; font-family: system-ui, sans-serif; } /* make the Heading font available as `font-heading` in Tailwind */ @layer utilities { .font-heading { font-family: 'Rajdhani', ui-sans-serif, sans-serif; } } :root { font-family: system-ui, Avenir, Helvetica, Arial, sans-serif; line-height: 1.5; font-weight: 400; color-scheme: light dark; color: rgba(255, 255, 255, 0.87); background-color: #242424; font-synthesis: none; text-rendering: optimizeLegibility; -webkit-font-smoothing: antialiased; -moz-osx-font-smoothing: grayscale; } a { font-weight: 500; color: #646cff; text-decoration: inherit; } a:hover { color: #535bf2; } body { margin: 0; /* remove flex-box centring so children can use the full width */ min-width: 320px; min-height: 100vh; } /* make sure the React root really spans the viewport */ #root { width: 100%; min-height: 100vh; } h1 { font-size: 3.2em; line-height: 1.1; } button { border-radius: 8px; border: 1px solid transparent; padding: 0.6em 1.2em; font-size: 1em; font-weight: 500; font-family: inherit; /* background-color: #1a1a1a; */ cursor: pointer; transition: border-color 0.25s; } button:hover { border-color: #646cff; } button:focus, button:focus-visible { outline: 4px auto -webkit-focus-ring-color; } @media (prefers-color-scheme: light) { :root { color: #213547; background-color: #ffffff; } a:hover { color: #747bff; } /* background-color: #f9f9f9; */ } ================================================ FILE: frontend/src/main.tsx ================================================ import { StrictMode } from 'react' import { createRoot } from 'react-dom/client' import './index.css' import App from './App.tsx' import { BrowserRouter, Routes, Route } from "react-router-dom"; import TestPage from "./pages/TestPage"; createRoot(document.getElementById('root')!).render( } /> } /> , ) ================================================ FILE: frontend/src/pages/TestPage.tsx ================================================ import StaticDrawingOverlay from "../components/StaticDrawingOverlay"; import { VideoFeed } from "../components/VideoFeed"; export default function TestPage() { return (

Static Video Feed (640×480)

); } ================================================ FILE: frontend/src/vite-env.d.ts ================================================ /// ================================================ FILE: frontend/tailwind.config.ts ================================================ import type { Config } from 'tailwindcss'; export default { content: [ './index.html', './src/**/*.{js,ts,jsx,tsx}', ], theme: { extend: { fontFamily: { heading: ['Rajdhani', 'ui-sans-serif', 'sans-serif'], }, }, }, plugins: [], } satisfies Config; ================================================ FILE: frontend/tsconfig.app.json ================================================ { "compilerOptions": { "tsBuildInfoFile": "./node_modules/.tmp/tsconfig.app.tsbuildinfo", "target": "ES2020", "useDefineForClassFields": true, "lib": ["ES2020", "DOM", "DOM.Iterable"], "module": "ESNext", "skipLibCheck": true, /* Bundler mode */ "moduleResolution": "bundler", "allowImportingTsExtensions": true, "verbatimModuleSyntax": true, "moduleDetection": "force", "noEmit": true, "jsx": "react-jsx", /* Linting */ "strict": true, "noUnusedLocals": true, "noUnusedParameters": true, "erasableSyntaxOnly": true, "noFallthroughCasesInSwitch": true, "noUncheckedSideEffectImports": true }, "include": ["src"] } ================================================ FILE: frontend/tsconfig.json ================================================ { "files": [], "references": [ { "path": "./tsconfig.app.json" }, { "path": "./tsconfig.node.json" } ] } ================================================ FILE: frontend/tsconfig.node.json ================================================ { "compilerOptions": { "tsBuildInfoFile": "./node_modules/.tmp/tsconfig.node.tsbuildinfo", "target": "ES2022", "lib": ["ES2023"], "module": "ESNext", "skipLibCheck": true, /* Bundler mode */ "moduleResolution": "bundler", "allowImportingTsExtensions": true, "verbatimModuleSyntax": true, "moduleDetection": "force", "noEmit": true, /* Linting */ "strict": true, "noUnusedLocals": true, "noUnusedParameters": true, "erasableSyntaxOnly": true, "noFallthroughCasesInSwitch": true, "noUncheckedSideEffectImports": true }, "include": ["vite.config.ts"] } ================================================ FILE: frontend/vite.config.ts ================================================ import { defineConfig } from 'vite' import react from '@vitejs/plugin-react' // https://vite.dev/config/ export default defineConfig({ plugins: [react()], })