[
  {
    "path": ".github/FUNDING.yml",
    "content": "github: alexeyvasilyev"
  },
  {
    "path": ".gitignore",
    "content": "*.iml\n.gradle\n/local.properties\n/.idea\n/build\n.DS_Store\n"
  },
  {
    "path": "LICENSE",
    "content": "                                 Apache License\n                           Version 2.0, January 2004\n                        http://www.apache.org/licenses/\n\n   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION\n\n   1. Definitions.\n\n      \"License\" shall mean the terms and conditions for use, reproduction,\n      and distribution as defined by Sections 1 through 9 of this document.\n\n      \"Licensor\" shall mean the copyright owner or entity authorized by\n      the copyright owner that is granting the License.\n\n      \"Legal Entity\" shall mean the union of the acting entity and all\n      other entities that control, are controlled by, or are under common\n      control with that entity. For the purposes of this definition,\n      \"control\" means (i) the power, direct or indirect, to cause the\n      direction or management of such entity, whether by contract or\n      otherwise, or (ii) ownership of fifty percent (50%) or more of the\n      outstanding shares, or (iii) beneficial ownership of such entity.\n\n      \"You\" (or \"Your\") shall mean an individual or Legal Entity\n      exercising permissions granted by this License.\n\n      \"Source\" form shall mean the preferred form for making modifications,\n      including but not limited to software source code, documentation\n      source, and configuration files.\n\n      \"Object\" form shall mean any form resulting from mechanical\n      transformation or translation of a Source form, including but\n      not limited to compiled object code, generated documentation,\n      and conversions to other media types.\n\n      \"Work\" shall mean the work of authorship, whether in Source or\n      Object form, made available under the License, as indicated by a\n      copyright notice that is included in or attached to the work\n      (an example is provided in the Appendix below).\n\n      \"Derivative Works\" shall mean any work, whether in Source or Object\n      form, that is based on (or derived from) the Work and for which the\n      editorial revisions, annotations, elaborations, or other modifications\n      represent, as a whole, an original work of authorship. For the purposes\n      of this License, Derivative Works shall not include works that remain\n      separable from, or merely link (or bind by name) to the interfaces of,\n      the Work and Derivative Works thereof.\n\n      \"Contribution\" shall mean any work of authorship, including\n      the original version of the Work and any modifications or additions\n      to that Work or Derivative Works thereof, that is intentionally\n      submitted to Licensor for inclusion in the Work by the copyright owner\n      or by an individual or Legal Entity authorized to submit on behalf of\n      the copyright owner. For the purposes of this definition, \"submitted\"\n      means any form of electronic, verbal, or written communication sent\n      to the Licensor or its representatives, including but not limited to\n      communication on electronic mailing lists, source code control systems,\n      and issue tracking systems that are managed by, or on behalf of, the\n      Licensor for the purpose of discussing and improving the Work, but\n      excluding communication that is conspicuously marked or otherwise\n      designated in writing by the copyright owner as \"Not a Contribution.\"\n\n      \"Contributor\" shall mean Licensor and any individual or Legal Entity\n      on behalf of whom a Contribution has been received by Licensor and\n      subsequently incorporated within the Work.\n\n   2. Grant of Copyright License. Subject to the terms and conditions of\n      this License, each Contributor hereby grants to You a perpetual,\n      worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n      copyright license to reproduce, prepare Derivative Works of,\n      publicly display, publicly perform, sublicense, and distribute the\n      Work and such Derivative Works in Source or Object form.\n\n   3. Grant of Patent License. Subject to the terms and conditions of\n      this License, each Contributor hereby grants to You a perpetual,\n      worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n      (except as stated in this section) patent license to make, have made,\n      use, offer to sell, sell, import, and otherwise transfer the Work,\n      where such license applies only to those patent claims licensable\n      by such Contributor that are necessarily infringed by their\n      Contribution(s) alone or by combination of their Contribution(s)\n      with the Work to which such Contribution(s) was submitted. If You\n      institute patent litigation against any entity (including a\n      cross-claim or counterclaim in a lawsuit) alleging that the Work\n      or a Contribution incorporated within the Work constitutes direct\n      or contributory patent infringement, then any patent licenses\n      granted to You under this License for that Work shall terminate\n      as of the date such litigation is filed.\n\n   4. Redistribution. You may reproduce and distribute copies of the\n      Work or Derivative Works thereof in any medium, with or without\n      modifications, and in Source or Object form, provided that You\n      meet the following conditions:\n\n      (a) You must give any other recipients of the Work or\n          Derivative Works a copy of this License; and\n\n      (b) You must cause any modified files to carry prominent notices\n          stating that You changed the files; and\n\n      (c) You must retain, in the Source form of any Derivative Works\n          that You distribute, all copyright, patent, trademark, and\n          attribution notices from the Source form of the Work,\n          excluding those notices that do not pertain to any part of\n          the Derivative Works; and\n\n      (d) If the Work includes a \"NOTICE\" text file as part of its\n          distribution, then any Derivative Works that You distribute must\n          include a readable copy of the attribution notices contained\n          within such NOTICE file, excluding those notices that do not\n          pertain to any part of the Derivative Works, in at least one\n          of the following places: within a NOTICE text file distributed\n          as part of the Derivative Works; within the Source form or\n          documentation, if provided along with the Derivative Works; or,\n          within a display generated by the Derivative Works, if and\n          wherever such third-party notices normally appear. The contents\n          of the NOTICE file are for informational purposes only and\n          do not modify the License. You may add Your own attribution\n          notices within Derivative Works that You distribute, alongside\n          or as an addendum to the NOTICE text from the Work, provided\n          that such additional attribution notices cannot be construed\n          as modifying the License.\n\n      You may add Your own copyright statement to Your modifications and\n      may provide additional or different license terms and conditions\n      for use, reproduction, or distribution of Your modifications, or\n      for any such Derivative Works as a whole, provided Your use,\n      reproduction, and distribution of the Work otherwise complies with\n      the conditions stated in this License.\n\n   5. Submission of Contributions. Unless You explicitly state otherwise,\n      any Contribution intentionally submitted for inclusion in the Work\n      by You to the Licensor shall be under the terms and conditions of\n      this License, without any additional terms or conditions.\n      Notwithstanding the above, nothing herein shall supersede or modify\n      the terms of any separate license agreement you may have executed\n      with Licensor regarding such Contributions.\n\n   6. Trademarks. This License does not grant permission to use the trade\n      names, trademarks, service marks, or product names of the Licensor,\n      except as required for reasonable and customary use in describing the\n      origin of the Work and reproducing the content of the NOTICE file.\n\n   7. Disclaimer of Warranty. Unless required by applicable law or\n      agreed to in writing, Licensor provides the Work (and each\n      Contributor provides its Contributions) on an \"AS IS\" BASIS,\n      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or\n      implied, including, without limitation, any warranties or conditions\n      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A\n      PARTICULAR PURPOSE. You are solely responsible for determining the\n      appropriateness of using or redistributing the Work and assume any\n      risks associated with Your exercise of permissions under this License.\n\n   8. Limitation of Liability. In no event and under no legal theory,\n      whether in tort (including negligence), contract, or otherwise,\n      unless required by applicable law (such as deliberate and grossly\n      negligent acts) or agreed to in writing, shall any Contributor be\n      liable to You for damages, including any direct, indirect, special,\n      incidental, or consequential damages of any character arising as a\n      result of this License or out of the use or inability to use the\n      Work (including but not limited to damages for loss of goodwill,\n      work stoppage, computer failure or malfunction, or any and all\n      other commercial damages or losses), even if such Contributor\n      has been advised of the possibility of such damages.\n\n   9. Accepting Warranty or Additional Liability. While redistributing\n      the Work or Derivative Works thereof, You may choose to offer,\n      and charge a fee for, acceptance of support, warranty, indemnity,\n      or other liability obligations and/or rights consistent with this\n      License. However, in accepting such obligations, You may act only\n      on Your own behalf and on Your sole responsibility, not on behalf\n      of any other Contributor, and only if You agree to indemnify,\n      defend, and hold each Contributor harmless for any liability\n      incurred by, or claims asserted against, such Contributor by reason\n      of your accepting any such warranty or additional liability.\n\n   END OF TERMS AND CONDITIONS\n\n   APPENDIX: How to apply the Apache License to your work.\n\n      To apply the Apache License to your work, attach the following\n      boilerplate notice, with the fields enclosed by brackets \"[]\"\n      replaced with your own identifying information. (Don't include\n      the brackets!)  The text should be enclosed in the appropriate\n      comment syntax for the file format. We also recommend that a\n      file or class name and description of purpose be included on the\n      same \"printed page\" as the copyright notice for easier\n      identification within third-party archives.\n\n   Copyright [yyyy] [name of copyright owner]\n\n   Licensed under the Apache License, Version 2.0 (the \"License\");\n   you may not use this file except in compliance with the License.\n   You may obtain a copy of the License at\n\n       http://www.apache.org/licenses/LICENSE-2.0\n\n   Unless required by applicable law or agreed to in writing, software\n   distributed under the License is distributed on an \"AS IS\" BASIS,\n   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n   See the License for the specific language governing permissions and\n   limitations under the License.\n"
  },
  {
    "path": "README.md",
    "content": "# rtsp-client-android\n<b>Lightweight RTSP client library for Android</b> with almost zero lag video decoding (achieved 20 msec video decoding latency on some RTSP streams). Designed for lag criticial applications (e.g. video surveillance from drones, car rear view cameras, etc.).\n\nUnlike [AndroidX Media ExoPlayer](https://github.com/androidx/media) which also supports RTSP, this library does not make any video buffering. Video frames are shown immidiately when they arrive.\n\n[![Release](https://jitpack.io/v/alexeyvasilyev/rtsp-client-android.svg)](https://jitpack.io/#alexeyvasilyev/rtsp-client-android)\n\n![Screenshot](docs/images/rtsp-demo-app.webp?raw=true \"Screenshot\")\n\n## Features:\n- RTSP/RTSPS over TCP.\n- Supports majority of RTSP IP cameras.\n- Video H.264/H.265.\n- Audio AAC LC, G.711 uLaw, G.711 aLaw.\n- Support for application specific data sent via RTP, e.g. GPS data (`m=application`, see [RFC 4566 sec.5.14](https://datatracker.ietf.org/doc/html/rfc4566#section-5.14))\n- Basic/Digest authentication.\n- Uses Android's [Low-Latency MediaCodec](https://source.android.com/docs/core/media/low-latency-media) by default if available.\n- Ability to select hardware or software video decoder.\n- Ability to [rewrite SPS frame](https://github.com/alexeyvasilyev/rtsp-client-android/blob/dbea741548307b1b0e1ead0ccc6294e811fbf6fd/library-client-rtsp/src/main/java/com/alexvas/rtsp/widget/RtspProcessor.kt#L106C9-L106C55) with low-latency parameters (EXPERIMENTAL).\n- Video rotation (90, 180, 270 degrees). \n- Android min API 24.\n\n## Upcoming features:\n- 2-w talk.\n\n## Permissions:\n\n```xml\n<uses-permission android:name=\"android.permission.INTERNET\" />\n```\n\n## Compile\n\nTo use this library in your project add this to your build.gradle:\n```gradle\nallprojects {\n  repositories {\n    maven { url 'https://jitpack.io' }\n  }\n}\ndependencies {\n  implementation 'com.github.alexeyvasilyev:rtsp-client-android:x.x.x'\n}\n```\n\n## How to use:\nEasiest way is just to use `RtspSurfaceView` (recommended) or `RtspImageView` classes for showing video stream in UI.\n\nUse [RtspSurfaceView](https://github.com/alexeyvasilyev/rtsp-client-android/blob/master/library-client-rtsp/src/main/java/com/alexvas/rtsp/widget/RtspSurfaceView.kt) if you need best performance and less battery usage. To get bitmap from SurfaceView use [PixelCopy.request](https://developer.android.com/reference/android/view/PixelCopy) (on Pixel 8 Pro with 1440p @ 20 fps video stream, you can get 12 fps only via PixelCopy)\n\nUse [RtspImageView](https://github.com/alexeyvasilyev/rtsp-client-android/blob/master/library-client-rtsp/src/main/java/com/alexvas/rtsp/widget/RtspImageView.kt) if you need better performance than PixelCopy for getting bitmaps for further processing (e.g. for AI).\n\n```xml\n<com.alexvas.rtsp.widget.RtspSurfaceView\n    android:layout_width=\"match_parent\"\n    android:layout_height=\"match_parent\"\n    android:id=\"@+id/svVideo\" />\n\n<com.alexvas.rtsp.widget.RtspImageView\n    android:layout_width=\"match_parent\"\n    android:layout_height=\"match_parent\"\n    android:id=\"@+id/ivVideo\" />\n```\n\nThen in code use:\n```kotlin\nval uri = Uri.parse(\"rtsps://10.0.1.3/test.sdp\")\nval username = \"admin\"\nval password = \"secret\"\nsvVideo.init(uri, username, password)\nsvVideo.start(\n    requestVideo = true,\n    requestAudio = true,\n    requestApplication = false)\n// ...\nsvVideo.stop()\n```\n\nYou can still use library without any decoding (just for obtaining raw frames from RTSP source), e.g. for writing video stream into MP4 via muxer.\n\n```kotlin\nval rtspClientListener = object: RtspClient.RtspClientListener {\n    override fun onRtspConnecting() {}\n    override fun onRtspConnected(sdpInfo: SdpInfo) {}\n    override fun onRtspVideoNalUnitReceived(data: ByteArray, offset: Int, length: Int, timestamp: Long) {\n        // Send raw H264/H265 NAL unit to decoder\n    }\n    override fun onRtspAudioSampleReceived(data: ByteArray, offset: Int, length: Int, timestamp: Long) {\n        // Send raw audio to decoder\n    }\n    override fun onRtspApplicationDataReceived(data: ByteArray, offset: Int, length: Int, timestamp: Long) {\n        // Send raw application data to app specific parser\n    }\n    override fun onRtspDisconnected() {}\n    override fun onRtspFailedUnauthorized() {\n        Log.e(TAG, \"RTSP failed unauthorized\");\n    }\n    override fun onRtspFailed(message: String?) {\n        Log.e(TAG, \"RTSP failed with message '$message'\")\n    }\n}\n\nval uri = Uri.parse(\"rtsps://10.0.1.3/test.sdp\")\nval username = \"admin\"\nval password = \"secret\"\nval stopped = new AtomicBoolean(false)\nval sslSocket = NetUtils.createSslSocketAndConnect(uri.getHost(), uri.getPort(), 5000)\n\nval rtspClient = RtspClient.Builder(sslSocket, uri.toString(), stopped, rtspClientListener)\n    .requestVideo(true)\n    .requestAudio(true)\n    .withDebug(false)\n    .withUserAgent(\"RTSP client\")\n    .withCredentials(username, password)\n    .build()\n// Blocking call until stopped variable is true or connection failed\nrtspClient.execute()\n\nNetUtils.closeSocket(sslSocket)\n```\n\n## How to get lowest possible latency:\nThere are two types of latencies:\n\n### Network latency\nIf you want the lowest possible network latency, be sure that both Android device and RTSP camera are connected to the same network by the Ethernet cable (not WiFi).\n\nAnother option to try is to decrease stream bitrate on RTSP camera. Less frame size leads to less time needed for frame transfer.\n\n### Video decoder latency\nVideo decoder latency can vary significantly on different Android devices and on different RTSP camera streams.\n\nFor the same profile/level and resolution (but different cameras) the latency in best cases can can be 20 msec, in worst cases 1200 msec.\n\nTo decrease latency be sure you use the lowest possible H.264 video stream profile and level (enable `debug` in the library and check SPS frame params `profile_idc` and `level_idc` in the log). `Baseline profile` should have the lowest possible decoder latency.\nCheck `max_num_reorder_frames` param as well. For best latency it's value should be `0`.\n\nYou can also try to use [experimentalUpdateSpsFrameWithLowLatencyParams](https://github.com/alexeyvasilyev/rtsp-client-android/blob/master/library-client-rtsp/src/main/java/com/alexvas/rtsp/widget/RtspProcessor.kt#L106) library feature which rewrites config frame on runtime with low-latency parameters.\n"
  },
  {
    "path": "app/.gitignore",
    "content": "# Created by https://www.gitignore.io/api/android,java,intellij\r\n\r\n### Android ###\r\n# Built application files\r\n*.apk\r\n*.ap_\r\n\r\n# Files for the Dalvik VM\r\n*.dex\r\n\r\n# Java class files\r\n*.class\r\n\r\n# Generated files\r\nbin/\r\ngen/\r\n\r\n# Gradle files\r\n.gradle/\r\nbuild/\r\n\r\n# Local configuration file (sdk path, etc)\r\nlocal.properties\r\n\r\n# Proguard folder generated by Eclipse\r\nproguard/\r\n\r\nxactmobile/class_files.txt\r\nxactmobile/mapping.txt\r\nxactmobile/seeds.txt\r\n\r\n# Log Files\r\n*.log\r\n\r\n# Android Studio Navigation editor temp files\r\n.navigation/\r\n\r\n### Android Patch ###\r\ngen-external-apklibs\r\n\r\n\r\n### Java ###\r\n*.class\r\n\r\n# Mobile Tools for Java (J2ME)\r\n.mtj.tmp/\r\n\r\n# Package Files #\r\n#*.jar\r\n*.war\r\n*.ear\r\n\r\n# virtual machine crash logs, see http://www.java.com/en/download/help/error_hotspot.xml\r\nhs_err_pid*\r\n\r\n\r\n### Intellij ###\r\n# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio\r\n\r\n*.iml\r\n\r\n## Directory-based project format:\r\n.idea/\r\n# if you remove the above rule, at least ignore the following:\r\n\r\n# User-specific stuff:\r\n.idea/workspace.xml\r\n.idea/tasks.xml\r\n.idea/dictionaries\r\n\r\n# Sensitive or high-churn files:\r\n.idea/dataSources.ids\r\n.idea/dataSources.xml\r\n.idea/sqlDataSources.xml\r\n.idea/dynamic.xml\r\n.idea/uiDesigner.xml\r\n\r\n# Gradle:\r\n.idea/gradle.xml\r\n.idea/libraries\r\n\r\n# Mongo Explorer plugin:\r\n.idea/mongoSettings.xml\r\n\r\n## File-based project format:\r\n*.ipr\r\n*.iws\r\n\r\n## Plugin-specific files:\r\n\r\n# IntelliJ\r\n/out/\r\n\r\n# mpeltonen/sbt-idea plugin\r\n.idea_modules/\r\n\r\n# JIRA plugin\r\natlassian-ide-plugin.xml\r\n\r\n# Crashlytics plugin (for Android Studio and IntelliJ)\r\ncom_crashlytics_export_strings.xml\r\ncrashlytics.properties\r\ncrashlytics-build.properties\r\n\r\nxactmobile/.DS_Store~64be78fe3602626c61b52bcbfd09e09a6107b50a\r\nxactmobile/.DS_Store~HEAD\r\noslab-viewpager/._.DS_Store\r\noslab-viewpager/src/main/.DS_Store\r\noslab-viewpager/src/main/._.DS_Store\r\noslab-viewpager/src/main/res/.DS_Store\r\noslab-viewpager/src/main/res/._.DS_Store\r\noslab-viewpager/.gitignore\r\noslab-materialdesign/.DS_Store\r\noslab-materialdesign/._.DS_Store\r\noslab-materialdesign/src/.DS_Store\r\noslab-materialdesign/src/._.DS_Store\r\noslab-materialdesign/src/main/.DS_Store\r\noslab-materialdesign/src/main/._.DS_Store\r\noslab-materialdesign/src/main/res/.DS_Store\r\noslab-materialdesign/src/main/res/._.DS_Store\r\n"
  },
  {
    "path": "app/build.gradle",
    "content": "apply plugin: 'com.android.application'\napply plugin: 'kotlin-android'\n\nandroid {\n\n    compileSdkVersion 36\n\n    defaultConfig {\n        applicationId \"com.alexvas.rtsp.demo\"\n        minSdk 24\n        targetSdk 34\n        versionCode 1\n        versionName \"1.0\"\n    }\n\n    buildTypes {\n        release {\n            minifyEnabled false\n            proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro'\n        }\n    }\n\n// To inline the bytecode built with JVM target 1.8 into\n// bytecode that is being built with JVM target 1.6. (e.g. navArgs)\n\n\n    compileOptions {\n        sourceCompatibility JavaVersion.VERSION_17\n        targetCompatibility JavaVersion.VERSION_17\n    }\n    kotlinOptions {\n        jvmTarget = JavaVersion.VERSION_17.toString()\n    }\n    buildFeatures {\n        viewBinding true\n    }\n    namespace 'com.alexvas.rtsp.demo'\n}\n\ndependencies {\n    implementation fileTree(dir: 'libs', include: ['*.jar'])\n    implementation \"org.jetbrains.kotlin:kotlin-stdlib-jdk7:$kotlin_version\"\n    implementation 'androidx.appcompat:appcompat:1.7.1'\n    implementation 'androidx.core:core-ktx:1.18.0'\n    implementation 'com.google.android.material:material:1.13.0'\n    implementation 'androidx.constraintlayout:constraintlayout:2.2.1'\n    implementation 'androidx.lifecycle:lifecycle-extensions:2.2.0'\n\n    def androidx_navigation_version = '2.9.7'\n    implementation \"androidx.navigation:navigation-fragment-ktx:$androidx_navigation_version\"\n    implementation \"androidx.navigation:navigation-ui-ktx:$androidx_navigation_version\"\n    implementation \"androidx.navigation:navigation-fragment-ktx:$androidx_navigation_version\"\n    implementation \"androidx.navigation:navigation-ui-ktx:$androidx_navigation_version\"\n\n    def logcat_core_version = '3.4'\n    api \"com.github.AppDevNext.Logcat:LogcatCoreLib:$logcat_core_version\"\n    api \"com.github.AppDevNext.Logcat:LogcatCoreUI:$logcat_core_version\"\n\n    implementation project(':library-client-rtsp')\n}\n"
  },
  {
    "path": "app/proguard-rules.pro",
    "content": "# Add project specific ProGuard rules here.\n# You can control the set of applied configuration files using the\n# proguardFiles setting in build.gradle.\n#\n# For more details, see\n#   http://developer.android.com/guide/developing/tools/proguard.html\n\n# If your project uses WebView with JS, uncomment the following\n# and specify the fully qualified class name to the JavaScript interface\n# class:\n#-keepclassmembers class fqcn.of.javascript.interface.for.webview {\n#   public *;\n#}\n\n# Uncomment this to preserve the line number information for\n# debugging stack traces.\n#-keepattributes SourceFile,LineNumberTable\n\n# If you keep the line number information, uncomment this to\n# hide the original source file name.\n#-renamesourcefileattribute SourceFile\n"
  },
  {
    "path": "app/src/main/AndroidManifest.xml",
    "content": "<?xml version=\"1.0\" encoding=\"utf-8\"?>\n<manifest xmlns:android=\"http://schemas.android.com/apk/res/android\">\n\n    <application\n        android:allowBackup=\"true\"\n        android:icon=\"@mipmap/ic_launcher\"\n        android:label=\"@string/app_name\"\n        android:roundIcon=\"@mipmap/ic_launcher_round\"\n        android:supportsRtl=\"true\"\n        android:theme=\"@style/AppTheme\">\n        <activity\n            android:name=\".MainActivity\"\n            android:exported=\"true\"\n            android:label=\"@string/app_name\">\n            <intent-filter>\n                <action android:name=\"android.intent.action.MAIN\" />\n                <category android:name=\"android.intent.category.LAUNCHER\" />\n            </intent-filter>\n        </activity>\n    </application>\n\n</manifest>"
  },
  {
    "path": "app/src/main/java/com/alexvas/rtsp/demo/MainActivity.kt",
    "content": "package com.alexvas.rtsp.demo\n\nimport android.os.Bundle\nimport com.google.android.material.bottomnavigation.BottomNavigationView\nimport androidx.appcompat.app.AppCompatActivity\nimport androidx.navigation.findNavController\nimport androidx.navigation.ui.setupWithNavController\n\nclass MainActivity : AppCompatActivity() {\n\n    override fun onCreate(savedInstanceState: Bundle?) {\n        super.onCreate(savedInstanceState)\n        setContentView(R.layout.activity_main)\n        val navView: BottomNavigationView = findViewById(R.id.nav_view)\n\n        val navController = findNavController(R.id.nav_host_fragment)\n        // Passing each menu ID as a set of Ids because each\n        // menu should be considered as top level destinations.\n//        val appBarConfiguration = AppBarConfiguration(setOf(\n//                R.id.navigation_live, R.id.navigation_logs))\n//        setupActionBarWithNavController(navController, appBarConfiguration)\n        navView.setupWithNavController(navController)\n    }\n}\n"
  },
  {
    "path": "app/src/main/java/com/alexvas/rtsp/demo/live/LiveFragment.kt",
    "content": "package com.alexvas.rtsp.demo.live\n\nimport android.annotation.SuppressLint\nimport android.graphics.Bitmap\nimport android.os.Bundle\nimport android.os.Handler\nimport android.os.HandlerThread\nimport android.util.Log\nimport android.view.LayoutInflater\nimport android.view.PixelCopy\nimport android.view.View\nimport android.view.ViewGroup\nimport android.view.WindowManager\nimport android.widget.Toast\nimport androidx.constraintlayout.widget.ConstraintSet\nimport androidx.core.net.toUri\nimport androidx.fragment.app.Fragment\nimport androidx.lifecycle.ViewModelProvider\nimport com.alexvas.rtsp.codec.VideoDecodeThread\nimport com.alexvas.rtsp.demo.databinding.FragmentLiveBinding\nimport com.alexvas.rtsp.widget.RtspDataListener\nimport com.alexvas.rtsp.widget.RtspImageView\nimport com.alexvas.rtsp.widget.RtspStatusListener\nimport com.alexvas.rtsp.widget.toHexString\nimport java.util.Timer\nimport java.util.TimerTask\nimport java.util.concurrent.atomic.AtomicBoolean\nimport kotlin.math.min\n\n@SuppressLint(\"LogNotTimber\")\nclass LiveFragment : Fragment() {\n\n    private lateinit var binding: FragmentLiveBinding\n    private lateinit var liveViewModel: LiveViewModel\n\n    private var statisticsTimer: Timer? = null\n    private var svVideoSurfaceResolution = Pair(0, 0)\n\n    private val rtspStatusSurfaceListener = object: RtspStatusListener {\n        override fun onRtspStatusConnecting() {\n            if (DEBUG) Log.v(TAG, \"onRtspStatusConnecting()\")\n            binding.apply {\n                tvStatusSurface.text = \"RTSP connecting\"\n                pbLoadingSurface.visibility = View.VISIBLE\n                vShutterSurface.visibility = View.VISIBLE\n                llRtspParams.apply {\n                    etRtspRequest.isEnabled = false\n                    etRtspUsername.isEnabled = false\n                    etRtspPassword.isEnabled = false\n                    cbVideo.isEnabled = false\n                    cbAudio.isEnabled = false\n                    cbApplication.isEnabled = false\n                    cbDebug.isEnabled = false\n                }\n                tgRotation.isEnabled = false\n            }\n        }\n\n        override fun onRtspStatusConnected() {\n            if (DEBUG) Log.v(TAG, \"onRtspStatusConnected()\")\n            binding.apply {\n                tvStatusSurface.text = \"RTSP connected\"\n                bnStartStopSurface.text = \"Stop RTSP\"\n            }\n            setKeepScreenOn(true)\n        }\n\n        override fun onRtspStatusDisconnecting() {\n            if (DEBUG) Log.v(TAG, \"onRtspStatusDisconnecting()\")\n            binding.apply {\n                tvStatusSurface.text = \"RTSP disconnecting\"\n            }\n        }\n\n        override fun onRtspStatusDisconnected() {\n            if (DEBUG) Log.v(TAG, \"onRtspStatusDisconnected()\")\n            binding.apply {\n                tvStatusSurface.text = \"RTSP disconnected\"\n                bnStartStopSurface.text = \"Start RTSP\"\n                pbLoadingSurface.visibility = View.GONE\n                vShutterSurface.visibility = View.VISIBLE\n                pbLoadingSurface.isEnabled = false\n                llRtspParams.apply {\n                    cbVideo.isEnabled = true\n                    cbAudio.isEnabled = true\n                    cbApplication.isEnabled = true\n                    cbDebug.isEnabled = true\n                    etRtspRequest.isEnabled = true\n                    etRtspUsername.isEnabled = true\n                    etRtspPassword.isEnabled = true\n                }\n                tgRotation.isEnabled = true\n            }\n            setKeepScreenOn(false)\n        }\n\n        override fun onRtspStatusFailedUnauthorized() {\n            if (DEBUG) Log.e(TAG, \"onRtspStatusFailedUnauthorized()\")\n            if (context == null) return\n            onRtspStatusDisconnected()\n            binding.apply {\n                tvStatusSurface.text = \"RTSP username or password invalid\"\n                pbLoadingSurface.visibility = View.GONE\n            }\n        }\n\n        override fun onRtspStatusFailed(message: String?) {\n            if (DEBUG) Log.e(TAG, \"onRtspStatusFailed(message='$message')\")\n            if (context == null) return\n            onRtspStatusDisconnected()\n            binding.apply {\n                tvStatusSurface.text = \"Error: $message\"\n                pbLoadingSurface.visibility = View.GONE\n            }\n        }\n\n        override fun onRtspFirstFrameRendered() {\n            if (DEBUG) Log.v(TAG, \"onRtspFirstFrameRendered()\")\n            Log.i(TAG, \"First frame rendered\")\n            binding.apply {\n                pbLoadingSurface.visibility = View.GONE\n                vShutterSurface.visibility = View.GONE\n                bnSnapshotSurface.isEnabled = true\n            }\n        }\n\n        override fun onRtspFrameSizeChanged(width: Int, height: Int) {\n            if (DEBUG) Log.v(TAG, \"onRtspFrameSizeChanged(width=$width, height=$height)\")\n            Log.i(TAG, \"Video resolution changed to ${width}x${height}\")\n            svVideoSurfaceResolution = Pair(width, height)\n            ConstraintSet().apply {\n                clone(binding.csVideoSurface)\n                setDimensionRatio(binding.svVideoSurface.id, \"$width:$height\")\n                applyTo(binding.csVideoSurface)\n            }\n        }\n    }\n\n    private val rtspDataListener = object: RtspDataListener {\n        override fun onRtspDataApplicationDataReceived(data: ByteArray, offset: Int, length: Int, timestamp: Long) {\n            val numBytesDump = min(length, 25) // dump max 25 bytes\n            Log.i(TAG, \"RTSP app data ($length bytes): ${data.toHexString(offset, offset + numBytesDump)}\")\n        }\n    }\n\n    private val rtspStatusImageListener = object: RtspStatusListener {\n        override fun onRtspStatusConnecting() {\n            if (DEBUG) Log.v(TAG, \"onRtspStatusConnecting()\")\n            binding.apply {\n                tvStatusImage.text = \"RTSP connecting\"\n                pbLoadingImage.visibility = View.VISIBLE\n                vShutterImage.visibility = View.VISIBLE\n            }\n        }\n\n        override fun onRtspStatusConnected() {\n            if (DEBUG) Log.v(TAG, \"onRtspStatusConnected()\")\n            binding.apply {\n                tvStatusImage.text = \"RTSP connected\"\n                bnStartStopImage.text = \"Stop RTSP\"\n            }\n            setKeepScreenOn(true)\n        }\n\n        override fun onRtspStatusDisconnecting() {\n            if (DEBUG) Log.v(TAG, \"onRtspStatusDisconnecting()\")\n            binding.apply {\n                tvStatusImage.text = \"RTSP disconnecting\"\n            }\n        }\n\n        override fun onRtspStatusDisconnected() {\n            if (DEBUG) Log.v(TAG, \"onRtspStatusDisconnected()\")\n            binding.apply {\n                tvStatusImage.text = \"RTSP disconnected\"\n                bnStartStopImage.text = \"Start RTSP\"\n                pbLoadingImage.visibility = View.GONE\n                vShutterImage.visibility = View.VISIBLE\n                pbLoadingImage.isEnabled = false\n            }\n            setKeepScreenOn(false)\n        }\n\n        override fun onRtspStatusFailedUnauthorized() {\n            if (DEBUG) Log.e(TAG, \"onRtspStatusFailedUnauthorized()\")\n            if (context == null) return\n            onRtspStatusDisconnected()\n            binding.apply {\n                tvStatusImage.text = \"RTSP username or password invalid\"\n                pbLoadingImage.visibility = View.GONE\n            }\n        }\n\n        override fun onRtspStatusFailed(message: String?) {\n            if (DEBUG) Log.e(TAG, \"onRtspStatusFailed(message='$message')\")\n            if (context == null) return\n            onRtspStatusDisconnected()\n            binding.apply {\n                tvStatusImage.text = \"Error: $message\"\n                pbLoadingImage.visibility = View.GONE\n            }\n        }\n\n        override fun onRtspFirstFrameRendered() {\n            if (DEBUG) Log.v(TAG, \"onRtspFirstFrameRendered()\")\n            Log.i(TAG, \"First frame rendered\")\n            binding.apply {\n                vShutterImage.visibility = View.GONE\n                pbLoadingImage.visibility = View.GONE\n            }\n        }\n\n        override fun onRtspFrameSizeChanged(width: Int, height: Int) {\n            if (DEBUG) Log.v(TAG, \"onRtspFrameSizeChanged(width=$width, height=$height)\")\n            Log.i(TAG, \"Video resolution changed to ${width}x${height}\")\n            ConstraintSet().apply {\n                clone(binding.csVideoImage)\n                setDimensionRatio(binding.ivVideoImage.id, \"$width:$height\")\n                applyTo(binding.csVideoImage)\n            }\n        }\n    }\n\n    private fun getSnapshot(): Bitmap? {\n        if (DEBUG) Log.v(TAG, \"getSnapshot()\")\n        val surfaceBitmap = Bitmap.createBitmap(\n            svVideoSurfaceResolution.first,\n            svVideoSurfaceResolution.second,\n            Bitmap.Config.ARGB_8888\n        )\n        val lock = Object()\n        val success = AtomicBoolean(false)\n        val thread = HandlerThread(\"PixelCopyHelper\")\n        thread.start()\n        val sHandler = Handler(thread.looper)\n        val listener = PixelCopy.OnPixelCopyFinishedListener { copyResult ->\n            success.set(copyResult == PixelCopy.SUCCESS)\n            synchronized (lock) {\n                lock.notify()\n            }\n        }\n        synchronized (lock) {\n            PixelCopy.request(binding.svVideoSurface.holder.surface, surfaceBitmap, listener, sHandler)\n            lock.wait()\n        }\n        thread.quitSafely()\n        return if (success.get()) surfaceBitmap else null\n    }\n\n    override fun onCreateView(inflater: LayoutInflater, container: ViewGroup?, savedInstanceState: Bundle?): View {\n        if (DEBUG) Log.v(TAG, \"onCreateView()\")\n\n        liveViewModel = ViewModelProvider(this)[LiveViewModel::class.java]\n        binding = FragmentLiveBinding.inflate(inflater, container, false)\n\n        binding.bnVideoDecoderGroup.check(binding.bnVideoDecoderHardware.id)\n\n        binding.svVideoSurface.setStatusListener(rtspStatusSurfaceListener)\n        binding.svVideoSurface.setDataListener(rtspDataListener)\n        binding.ivVideoImage.setStatusListener(rtspStatusImageListener)\n        binding.ivVideoImage.setDataListener(rtspDataListener)\n\n        liveViewModel.initEditTexts(\n            binding.llRtspParams.etRtspRequest,\n            binding.llRtspParams.etRtspUsername,\n            binding.llRtspParams.etRtspPassword\n        )\n\n        liveViewModel.rtspRequest.observe(viewLifecycleOwner) {\n            if (binding.llRtspParams.etRtspRequest.text.toString() != it)\n                binding.llRtspParams.etRtspRequest.setText(it)\n        }\n        liveViewModel.rtspUsername.observe(viewLifecycleOwner) {\n            if (binding.llRtspParams.etRtspUsername.text.toString() != it)\n                binding.llRtspParams.etRtspUsername.setText(it)\n        }\n        liveViewModel.rtspPassword.observe(viewLifecycleOwner) {\n            if (binding.llRtspParams.etRtspPassword.text.toString() != it)\n                binding.llRtspParams.etRtspPassword.setText(it)\n        }\n\n        binding.cbVideoFpsStabilization.setOnCheckedChangeListener { _, isChecked ->\n            binding.svVideoSurface.videoFrameRateStabilization = isChecked\n        }\n\n        binding.cbExperimentalRewriteSps.setOnCheckedChangeListener { _, isChecked ->\n            binding.svVideoSurface.experimentalUpdateSpsFrameWithLowLatencyParams = isChecked\n        }\n\n        binding.bnRotate0.setOnClickListener {\n            binding.svVideoSurface.videoRotation = 0\n            binding.ivVideoImage.videoRotation = 0\n        }\n\n        binding.bnRotate90.setOnClickListener {\n            binding.svVideoSurface.videoRotation = 90\n            binding.ivVideoImage.videoRotation = 90\n        }\n\n        binding.bnRotate180.setOnClickListener {\n            binding.svVideoSurface.videoRotation = 180\n            binding.ivVideoImage.videoRotation = 180\n        }\n\n        binding.bnRotate270.setOnClickListener {\n            binding.svVideoSurface.videoRotation = 270\n            binding.ivVideoImage.videoRotation = 270\n        }\n\n        binding.bnRotate0.performClick()\n\n        binding.bnVideoDecoderHardware.setOnClickListener {\n            binding.svVideoSurface.videoDecoderType = VideoDecodeThread.DecoderType.HARDWARE\n            binding.ivVideoImage.videoDecoderType = VideoDecodeThread.DecoderType.HARDWARE\n        }\n\n        binding.bnVideoDecoderSoftware.setOnClickListener {\n            binding.svVideoSurface.videoDecoderType = VideoDecodeThread.DecoderType.SOFTWARE\n            binding.ivVideoImage.videoDecoderType = VideoDecodeThread.DecoderType.SOFTWARE\n        }\n\n        binding.bnStartStopSurface.setOnClickListener {\n            if (binding.svVideoSurface.isStarted()) {\n                binding.svVideoSurface.stop()\n                stopStatistics()\n            } else {\n                val uri = liveViewModel.rtspRequest.value!!.toUri()\n                binding.svVideoSurface.apply {\n                    init(\n                        uri,\n                        username = liveViewModel.rtspUsername.value,\n                        password = liveViewModel.rtspPassword.value,\n                        userAgent = \"rtsp-client-android\"\n                    )\n                    debug = binding.llRtspParams.cbDebug.isChecked\n                    videoFrameRateStabilization = binding.cbVideoFpsStabilization.isChecked\n                    start(\n                        requestVideo = binding.llRtspParams.cbVideo.isChecked,\n                        requestAudio = binding.llRtspParams.cbAudio.isChecked,\n                        requestApplication = binding.llRtspParams.cbApplication.isChecked\n                    )\n                }\n                startStatistics()\n            }\n        }\n\n        binding.bnStartStopImage.setOnClickListener {\n            if (binding.ivVideoImage.isStarted()) {\n                binding.ivVideoImage.stop()\n                stopStatistics()\n            } else {\n                val uri = liveViewModel.rtspRequest.value!!.toUri()\n                binding.ivVideoImage.apply {\n                    init(\n                        uri,\n                        username = liveViewModel.rtspUsername.value,\n                        password = liveViewModel.rtspPassword.value,\n                        userAgent = \"rtsp-client-android\"\n                    )\n                    debug = binding.llRtspParams.cbDebug.isChecked\n                    onRtspImageBitmapListener = object : RtspImageView.RtspImageBitmapListener {\n                        override fun onRtspImageBitmapObtained(bitmap: Bitmap) {\n                            // TODO: You can send bitmap for processing\n                        }\n                    }\n                    start(\n                        requestVideo = binding.llRtspParams.cbVideo.isChecked,\n                        requestAudio = binding.llRtspParams.cbAudio.isChecked,\n                        requestApplication = binding.llRtspParams.cbApplication.isChecked\n                    )\n                }\n                startStatistics()\n            }\n        }\n\n        binding.bnSnapshotSurface.setOnClickListener {\n            val bitmap = getSnapshot()\n            // TODO Save snapshot to DCIM folder\n            if (bitmap != null) {\n                Toast.makeText(requireContext(), \"Snapshot succeeded ${bitmap.width}x${bitmap.height}\", Toast.LENGTH_LONG).show()\n            } else {\n                Toast.makeText(requireContext(), \"Snapshot failed\", Toast.LENGTH_LONG).show()\n            }\n        }\n        return binding.root\n    }\n\n    override fun onResume() {\n        if (DEBUG) Log.v(TAG, \"onResume()\")\n        super.onResume()\n        liveViewModel.loadParams(requireContext())\n    }\n\n    override fun onPause() {\n        val started = binding.svVideoSurface.isStarted()\n        if (DEBUG) Log.v(TAG, \"onPause(), started:$started\")\n        super.onPause()\n        liveViewModel.saveParams(requireContext())\n\n        if (started) {\n            binding.svVideoSurface.stop()\n            stopStatistics()\n        }\n    }\n\n    private fun startStatistics() {\n        if (DEBUG) Log.v(TAG, \"startStatistics()\")\n        Log.i(TAG, \"Start statistics\")\n        if (statisticsTimer == null) {\n            val task: TimerTask = object : TimerTask() {\n                override fun run() {\n                    val statistics = binding.svVideoSurface.statistics\n                    val text =\n                        \"Video decoder: ${statistics.videoDecoderType.toString().lowercase()} ${if (statistics.videoDecoderName.isNullOrEmpty()) \"\" else \"(${statistics.videoDecoderName})\"}\" +\n                        \"\\nVideo decoder latency: ${statistics.videoDecoderLatencyMsec} ms\" +\n                        \"\\nResolution: ${svVideoSurfaceResolution.first}x${svVideoSurfaceResolution.second}\"\n//                        \"\\nNetwork latency: \"\n\n//                    // Assume that difference between current Android time and camera time cannot be more than 5 sec.\n//                    // Otherwise time need to be synchronized on both devices.\n//                    text += if (statistics.networkLatencyMsec == -1) {\n//                        \"-\"\n//                    } else if (statistics.networkLatencyMsec < 0 || statistics.networkLatencyMsec > TimeUnit.SECONDS.toMillis(5)) {\n//                        \"[time out of sync]\"\n//                    } else {\n//                        \"${statistics.networkLatencyMsec} ms\"\n//                    }\n\n                    binding.tvStatistics.post {\n                        binding.tvStatistics.text = text\n                    }\n                }\n            }\n            statisticsTimer = Timer(\"${TAG}::Statistics\").apply {\n                schedule(task, 0, 1000)\n            }\n        }\n    }\n\n    private fun stopStatistics() {\n        if (DEBUG) Log.v(TAG, \"stopStatistics()\")\n        statisticsTimer?.apply {\n            Log.i(TAG, \"Stop statistics\")\n            cancel()\n        }\n        statisticsTimer = null\n    }\n\n    private fun setKeepScreenOn(enable: Boolean) {\n        if (DEBUG) Log.v(TAG, \"setKeepScreenOn(enable=$enable)\")\n        if (enable) {\n            activity?.apply {\n                window.addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON)\n                Log.i(TAG, \"Enabled keep screen on\")\n            }\n        } else {\n            activity?.apply {\n                window.clearFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON)\n                Log.i(TAG, \"Disabled keep screen on\")\n            }\n        }\n    }\n    companion object {\n        private val TAG: String = LiveFragment::class.java.simpleName\n        private const val DEBUG = true\n    }\n\n}\n"
  },
  {
    "path": "app/src/main/java/com/alexvas/rtsp/demo/live/LiveViewModel.kt",
    "content": "package com.alexvas.rtsp.demo.live\n\nimport android.annotation.SuppressLint\nimport android.content.Context\nimport android.text.Editable\nimport android.text.TextWatcher\nimport android.util.Log\nimport android.widget.EditText\nimport androidx.lifecycle.MutableLiveData\nimport androidx.lifecycle.ViewModel\n\nprivate const val RTSP_REQUEST_KEY = \"rtsp_request\"\nprivate const val RTSP_USERNAME_KEY = \"rtsp_username\"\nprivate const val RTSP_PASSWORD_KEY = \"rtsp_password\"\n\nprivate const val DEFAULT_RTSP_REQUEST = \"rtsp://10.0.1.3:554/axis-media/media.amp\"\nprivate const val DEFAULT_RTSP_USERNAME = \"\"\nprivate const val DEFAULT_RTSP_PASSWORD = \"\"\n\nprivate const val LIVE_PARAMS_FILENAME = \"live_params\"\n\n@SuppressLint(\"LogNotTimber\")\nclass LiveViewModel : ViewModel() {\n\n    val rtspRequest = MutableLiveData<String>().apply {\n        value = DEFAULT_RTSP_REQUEST\n    }\n    val rtspUsername = MutableLiveData<String>().apply {\n        value = DEFAULT_RTSP_USERNAME\n    }\n    val rtspPassword = MutableLiveData<String>().apply {\n        value = DEFAULT_RTSP_PASSWORD\n    }\n\n//    private val _text = MutableLiveData<String>().apply {\n//        value = \"This is live Fragment\"\n//    }\n//    val text: LiveData<String> = _text\n\n//    init {\n//        // Here you could use the ID to get the user info from the DB or remote server\n//        rtspRequest.value = \"rtsp://10.0.1.3:554/axis-media/media.amp\"\n//    }\n\n    fun loadParams(context: Context) {\n        if (DEBUG) Log.v(TAG, \"loadParams()\")\n        val pref = context.getSharedPreferences(LIVE_PARAMS_FILENAME, Context.MODE_PRIVATE)\n        try {\n            rtspRequest.setValue(pref.getString(RTSP_REQUEST_KEY, DEFAULT_RTSP_REQUEST))\n        } catch (e: ClassCastException) {\n            e.printStackTrace()\n        }\n        try {\n            rtspUsername.setValue(pref.getString(RTSP_USERNAME_KEY, DEFAULT_RTSP_USERNAME))\n        } catch (e: ClassCastException) {\n            e.printStackTrace()\n        }\n        try {\n            rtspPassword.setValue(pref.getString(RTSP_PASSWORD_KEY, DEFAULT_RTSP_PASSWORD))\n        } catch (e: ClassCastException) {\n            e.printStackTrace()\n        }\n    }\n\n    fun saveParams(context: Context) {\n        if (DEBUG) Log.v(TAG, \"saveParams()\")\n        context.getSharedPreferences(LIVE_PARAMS_FILENAME, Context.MODE_PRIVATE).edit().apply {\n            putString(RTSP_REQUEST_KEY, rtspRequest.value)\n            putString(RTSP_USERNAME_KEY, rtspUsername.value)\n            putString(RTSP_PASSWORD_KEY, rtspPassword.value)\n            apply()\n        }\n    }\n\n    fun initEditTexts(etRtspRequest: EditText, etRtspUsername: EditText, etRtspPassword: EditText) {\n        if (DEBUG) Log.v(TAG, \"initEditTexts()\")\n        etRtspRequest.addTextChangedListener(object : TextWatcher {\n            override fun afterTextChanged(s: Editable?) {\n            }\n            override fun beforeTextChanged(s: CharSequence?, start: Int, count: Int, after: Int) {\n            }\n            override fun onTextChanged(s: CharSequence?, start: Int, before: Int, count: Int) {\n                val text = s.toString()\n                if (text != rtspRequest.value) {\n                    rtspRequest.value = text\n                }\n            }\n        })\n        etRtspUsername.addTextChangedListener(object : TextWatcher {\n            override fun afterTextChanged(s: Editable?) {\n            }\n            override fun beforeTextChanged(s: CharSequence?, start: Int, count: Int, after: Int) {\n            }\n            override fun onTextChanged(s: CharSequence?, start: Int, before: Int, count: Int) {\n                val text = s.toString()\n                if (text != rtspUsername.value) {\n                    rtspUsername.value = text\n                }\n            }\n        })\n        etRtspPassword.addTextChangedListener(object : TextWatcher {\n            override fun afterTextChanged(s: Editable?) {\n            }\n            override fun beforeTextChanged(s: CharSequence?, start: Int, count: Int, after: Int) {\n            }\n            override fun onTextChanged(s: CharSequence?, start: Int, before: Int, count: Int) {\n                val text = s.toString()\n                if (text != rtspPassword.value) {\n                    rtspPassword.value = text\n                }\n            }\n        })\n    }\n\n    companion object {\n        private val TAG: String = LiveViewModel::class.java.simpleName\n        private const val DEBUG = false\n\n\n    }\n\n}\n"
  },
  {
    "path": "app/src/main/java/com/alexvas/rtsp/demo/live/RawFragment.kt",
    "content": "package com.alexvas.rtsp.demo.live\n\nimport android.annotation.SuppressLint\nimport android.net.Uri\nimport android.os.Bundle\nimport android.util.Log\nimport android.view.LayoutInflater\nimport android.view.View\nimport android.view.ViewGroup\nimport androidx.fragment.app.Fragment\nimport androidx.lifecycle.ViewModelProvider\nimport com.alexvas.rtsp.RtspClient\nimport com.alexvas.rtsp.demo.databinding.FragmentRawBinding\nimport com.alexvas.rtsp.widget.toHexString\nimport com.alexvas.utils.NetUtils\nimport kotlinx.coroutines.Runnable\nimport java.net.Socket\nimport java.util.Timer\nimport java.util.TimerTask\nimport java.util.concurrent.atomic.AtomicBoolean\nimport kotlin.math.min\n\n@SuppressLint(\"LogNotTimber\")\nclass RawFragment : Fragment() {\n\n    private lateinit var binding: FragmentRawBinding\n    private lateinit var liveViewModel: LiveViewModel\n\n    private var statisticsTimer: Timer? = null\n    private val rtspStopped = AtomicBoolean(true)\n\n    private var rtspVideoBytesReceived: Long = 0\n    private var rtspVideoFramesReceived: Long = 0\n    private var rtspAudioBytesReceived: Long = 0\n    private var rtspAudioSamplesReceived: Long = 0\n    private var rtspApplicationBytesReceived: Long = 0\n    private var rtspApplicationSamplesReceived: Long = 0\n\n    private val rtspClientListener = object: RtspClient.RtspClientListener {\n        override fun onRtspConnecting() {\n            if (DEBUG) Log.v(TAG, \"onRtspConnecting()\")\n            rtspVideoBytesReceived = 0\n            rtspVideoFramesReceived = 0\n            rtspAudioBytesReceived = 0\n            rtspAudioSamplesReceived = 0\n            rtspApplicationBytesReceived = 0\n            rtspApplicationSamplesReceived = 0\n\n            binding.apply {\n                root.post {\n                    updateStatistics()\n                    llRtspParams.etRtspRequest.isEnabled = false\n                    llRtspParams.etRtspUsername.isEnabled = false\n                    llRtspParams.etRtspPassword.isEnabled = false\n                    llRtspParams.cbVideo.isEnabled = false\n                    llRtspParams.cbAudio.isEnabled = false\n                    llRtspParams.cbApplication.isEnabled = false\n                    llRtspParams.cbDebug.isEnabled = false\n                    tvStatusSurface.text = \"RTSP connecting\"\n                    bnStartStop.text = \"Stop RTSP\"\n                }\n            }\n        }\n\n        override fun onRtspConnected(sdpInfo: RtspClient.SdpInfo) {\n            if (DEBUG) Log.v(TAG, \"onRtspConnected()\")\n            binding.apply {\n                root.post {\n                    tvStatusSurface.text = \"RTSP connected\"\n                }\n            }\n            startStatistics()\n        }\n\n        override fun onRtspVideoNalUnitReceived(data: ByteArray, offset: Int, length: Int, timestamp: Long) {\n            val numBytesDump = min(length, 25) // dump max 25 bytes\n            Log.i(TAG, \"RTSP video data ($length bytes): ${data.toHexString(offset, offset + numBytesDump)}\")\n            rtspVideoBytesReceived += length\n            rtspVideoFramesReceived++\n        }\n\n        override fun onRtspAudioSampleReceived(data: ByteArray, offset: Int, length: Int, timestamp: Long) {\n            val numBytesDump = min(length, 25) // dump max 25 bytes\n            Log.i(TAG, \"RTSP audio data ($length bytes): ${data.toHexString(offset, offset + numBytesDump)}\")\n            rtspAudioBytesReceived += length\n            rtspAudioSamplesReceived++\n        }\n\n        override fun onRtspApplicationDataReceived(data: ByteArray, offset: Int, length: Int, timestamp: Long) {\n            val numBytesDump = min(length, 25) // dump max 25 bytes\n            Log.i(TAG, \"RTSP app data ($length bytes): ${data.toHexString(offset, offset + numBytesDump)}\")\n            rtspApplicationBytesReceived += length\n            rtspApplicationSamplesReceived++\n        }\n\n        override fun onRtspDisconnecting() {\n            if (DEBUG) Log.v(TAG, \"onRtspDisconnecting()\")\n            binding.apply {\n                root.post {\n                    tvStatusSurface.text = \"RTSP disconnecting\"\n                }\n            }\n            stopStatistics()\n        }\n\n        override fun onRtspDisconnected() {\n            if (DEBUG) Log.v(TAG, \"onRtspDisconnected()\")\n            binding.apply {\n                root.post {\n                    tvStatusSurface.text = \"RTSP disconnected\"\n                    bnStartStop.text = \"Start RTSP\"\n                    llRtspParams.cbVideo.isEnabled = true\n                    llRtspParams.cbAudio.isEnabled = true\n                    llRtspParams.cbApplication.isEnabled = true\n                    llRtspParams.cbDebug.isEnabled = true\n                    llRtspParams.etRtspRequest.isEnabled = true\n                    llRtspParams.etRtspUsername.isEnabled = true\n                    llRtspParams.etRtspPassword.isEnabled = true\n                }\n            }\n        }\n\n        override fun onRtspFailedUnauthorized() {\n            if (DEBUG) Log.e(TAG, \"onRtspFailedUnauthorized()\")\n            Log.e(TAG, \"RTSP failed unauthorized\")\n            if (context == null) return\n            onRtspDisconnected()\n            binding.apply {\n                root.post {\n                    tvStatusSurface.text = \"RTSP username or password invalid\"\n                }\n            }\n        }\n\n        override fun onRtspFailed(message: String?) {\n            if (DEBUG) Log.e(TAG, \"onRtspFailed(message='$message')\")\n            Log.e(TAG, \"RTSP failed with message '$message'\")\n            if (context == null) return\n            onRtspDisconnected()\n            binding.apply {\n                root.post {\n                    tvStatusSurface.text = \"Error: $message\"\n                }\n            }\n        }\n    }\n\n    private val threadRunnable = Runnable {\n        Log.i(TAG, \"Thread started\")\n        var socket: Socket? = null\n        try {\n            val uri = Uri.parse(liveViewModel.rtspRequest.value)\n            val port = if (uri.port == -1) DEFAULT_RTSP_PORT else uri.port\n            socket = NetUtils.createSocketAndConnect(uri.host!!, port, 5000)\n\n            val rtspClient =\n                RtspClient.Builder(\n                    socket,\n                    uri.toString(),\n                    rtspStopped,\n                    rtspClientListener\n                )\n                    .requestVideo(binding.llRtspParams.cbVideo.isChecked)\n                    .requestAudio(binding.llRtspParams.cbAudio.isChecked)\n                    .requestApplication(binding.llRtspParams.cbApplication.isChecked)\n                    .withDebug(binding.llRtspParams.cbDebug.isChecked)\n                    .withUserAgent(\"rtsp-client-android\")\n                    .withCredentials(\n                        binding.llRtspParams.etRtspUsername.text.toString(),\n                        binding.llRtspParams.etRtspPassword.text.toString())\n                    .build()\n\n            rtspClient.execute()\n        } catch (e: Exception) {\n            e.printStackTrace()\n            binding.root.post { rtspClientListener.onRtspFailed(e.message) }\n        } finally {\n            NetUtils.closeSocket(socket)\n        }\n        Log.i(TAG, \"Thread stopped\")\n    }\n\n    override fun onCreateView(inflater: LayoutInflater, container: ViewGroup?, savedInstanceState: Bundle?): View {\n        if (DEBUG) Log.v(TAG, \"onCreateView()\")\n\n        liveViewModel = ViewModelProvider(this)[LiveViewModel::class.java]\n        binding = FragmentRawBinding.inflate(inflater, container, false)\n\n        liveViewModel.initEditTexts(\n            binding.llRtspParams.etRtspRequest,\n            binding.llRtspParams.etRtspUsername,\n            binding.llRtspParams.etRtspPassword\n        )\n        liveViewModel.rtspRequest.observe(viewLifecycleOwner) {\n            if (binding.llRtspParams.etRtspRequest.text.toString() != it)\n                binding.llRtspParams.etRtspRequest.setText(it)\n        }\n        liveViewModel.rtspUsername.observe(viewLifecycleOwner) {\n            if (binding.llRtspParams.etRtspUsername.text.toString() != it)\n                binding.llRtspParams.etRtspUsername.setText(it)\n        }\n        liveViewModel.rtspPassword.observe(viewLifecycleOwner) {\n            if (binding.llRtspParams.etRtspPassword.text.toString() != it)\n                binding.llRtspParams.etRtspPassword.setText(it)\n        }\n\n        binding.bnStartStop.setOnClickListener {\n            if (DEBUG) Log.v(TAG, \"onClick() rtspStopped=${rtspStopped.get()}\")\n            if (rtspStopped.get()) {\n                rtspStopped.set(false)\n                Log.i(TAG, \"Thread starting...\")\n                Thread(threadRunnable).apply {\n                    name = \"RTSP raw thread\"\n                    start()\n                }\n            } else {\n                Log.i(TAG, \"Thread stopping...\")\n                rtspStopped.set(true)\n            }\n        }\n        return binding.root\n    }\n\n    override fun onResume() {\n        if (DEBUG) Log.v(TAG, \"onResume()\")\n        super.onResume()\n        liveViewModel.loadParams(requireContext())\n    }\n\n    override fun onPause() {\n        if (DEBUG) Log.v(TAG, \"onPause()\")\n        super.onPause()\n        liveViewModel.saveParams(requireContext())\n\n        stopStatistics()\n        rtspStopped.set(true)\n    }\n\n    private fun updateStatistics() {\n//      if (DEBUG) Log.v(TAG, \"updateStatistics()\")\n        binding.apply {\n            tvStatisticsVideo.text = \"Video: $rtspVideoBytesReceived bytes, $rtspVideoFramesReceived frames\"\n            tvStatisticsAudio.text = \"Audio: $rtspAudioBytesReceived bytes, $rtspAudioSamplesReceived samples\"\n            tvStatisticsApplication.text = \"Application: $rtspApplicationBytesReceived bytes, $rtspApplicationSamplesReceived samples\"\n        }\n    }\n\n    private fun startStatistics() {\n        if (DEBUG) Log.v(TAG, \"startStatistics()\")\n        Log.i(TAG, \"Start statistics\")\n        if (statisticsTimer == null) {\n            val task: TimerTask = object : TimerTask() {\n                override fun run() {\n                    binding.root.post {\n                        updateStatistics()\n                    }\n                }\n            }\n            statisticsTimer = Timer(\"${TAG}::Statistics\").apply {\n                schedule(task, 0, 1000)\n            }\n        }\n    }\n\n    private fun stopStatistics() {\n        if (DEBUG) Log.v(TAG, \"stopStatistics()\")\n        statisticsTimer?.apply {\n            Log.i(TAG, \"Stop statistics\")\n            cancel()\n        }\n        statisticsTimer = null\n    }\n\n    companion object {\n        private val TAG: String = RawFragment::class.java.simpleName\n        private const val DEBUG = true\n\n        private const val DEFAULT_RTSP_PORT = 554\n    }\n\n}\n"
  },
  {
    "path": "app/src/main/res/drawable/ic_camera_black_24dp.xml",
    "content": "<vector xmlns:android=\"http://schemas.android.com/apk/res/android\"\n    android:height=\"24dp\"\n    android:width=\"24dp\"\n    android:viewportWidth=\"24\"\n    android:viewportHeight=\"24\">\n    <path android:fillColor=\"#000\" android:pathData=\"M4,4H7L9,2H15L17,4H20A2,2 0 0,1 22,6V18A2,2 0 0,1 20,20H4A2,2 0 0,1 2,18V6A2,2 0 0,1 4,4M12,7A5,5 0 0,0 7,12A5,5 0 0,0 12,17A5,5 0 0,0 17,12A5,5 0 0,0 12,7M12,9A3,3 0 0,1 15,12A3,3 0 0,1 12,15A3,3 0 0,1 9,12A3,3 0 0,1 12,9Z\" />\n</vector>"
  },
  {
    "path": "app/src/main/res/drawable/ic_cctv_black_24dp.xml",
    "content": "<vector xmlns:android=\"http://schemas.android.com/apk/res/android\"\n    android:height=\"24dp\"\n    android:width=\"24dp\"\n    android:viewportWidth=\"24\"\n    android:viewportHeight=\"24\">\n    <path android:fillColor=\"#000\" android:pathData=\"M18.15,4.94C17.77,4.91 17.37,5 17,5.2L8.35,10.2C7.39,10.76 7.07,12 7.62,12.94L9.12,15.53C9.67,16.5 10.89,16.82 11.85,16.27L13.65,15.23C13.92,15.69 14.32,16.06 14.81,16.27V18.04C14.81,19.13 15.7,20 16.81,20H22V18.04H16.81V16.27C17.72,15.87 18.31,14.97 18.31,14C18.31,13.54 18.19,13.11 17.97,12.73L20.5,11.27C21.47,10.71 21.8,9.5 21.24,8.53L19.74,5.94C19.4,5.34 18.79,5 18.15,4.94M6.22,13.17L2,13.87L2.75,15.17L4.75,18.63L5.5,19.93L8.22,16.63L6.22,13.17Z\" />\n</vector>"
  },
  {
    "path": "app/src/main/res/drawable/ic_launcher_background.xml",
    "content": "<?xml version=\"1.0\" encoding=\"utf-8\"?>\n<vector xmlns:android=\"http://schemas.android.com/apk/res/android\"\n    android:width=\"108dp\"\n    android:height=\"108dp\"\n    android:viewportWidth=\"108\"\n    android:viewportHeight=\"108\">\n    <path\n        android:fillColor=\"#3DDC84\"\n        android:pathData=\"M0,0h108v108h-108z\" />\n    <path\n        android:fillColor=\"#00000000\"\n        android:pathData=\"M9,0L9,108\"\n        android:strokeWidth=\"0.8\"\n        android:strokeColor=\"#33FFFFFF\" />\n    <path\n        android:fillColor=\"#00000000\"\n        android:pathData=\"M19,0L19,108\"\n        android:strokeWidth=\"0.8\"\n        android:strokeColor=\"#33FFFFFF\" />\n    <path\n        android:fillColor=\"#00000000\"\n        android:pathData=\"M29,0L29,108\"\n        android:strokeWidth=\"0.8\"\n        android:strokeColor=\"#33FFFFFF\" />\n    <path\n        android:fillColor=\"#00000000\"\n        android:pathData=\"M39,0L39,108\"\n        android:strokeWidth=\"0.8\"\n        android:strokeColor=\"#33FFFFFF\" />\n    <path\n        android:fillColor=\"#00000000\"\n        android:pathData=\"M49,0L49,108\"\n        android:strokeWidth=\"0.8\"\n        android:strokeColor=\"#33FFFFFF\" />\n    <path\n        android:fillColor=\"#00000000\"\n        android:pathData=\"M59,0L59,108\"\n        android:strokeWidth=\"0.8\"\n        android:strokeColor=\"#33FFFFFF\" />\n    <path\n        android:fillColor=\"#00000000\"\n        android:pathData=\"M69,0L69,108\"\n        android:strokeWidth=\"0.8\"\n        android:strokeColor=\"#33FFFFFF\" />\n    <path\n        android:fillColor=\"#00000000\"\n        android:pathData=\"M79,0L79,108\"\n        android:strokeWidth=\"0.8\"\n        android:strokeColor=\"#33FFFFFF\" />\n    <path\n        android:fillColor=\"#00000000\"\n        android:pathData=\"M89,0L89,108\"\n        android:strokeWidth=\"0.8\"\n        android:strokeColor=\"#33FFFFFF\" />\n    <path\n        android:fillColor=\"#00000000\"\n        android:pathData=\"M99,0L99,108\"\n        android:strokeWidth=\"0.8\"\n        android:strokeColor=\"#33FFFFFF\" />\n    <path\n        android:fillColor=\"#00000000\"\n        android:pathData=\"M0,9L108,9\"\n        android:strokeWidth=\"0.8\"\n        android:strokeColor=\"#33FFFFFF\" />\n    <path\n        android:fillColor=\"#00000000\"\n        android:pathData=\"M0,19L108,19\"\n        android:strokeWidth=\"0.8\"\n        android:strokeColor=\"#33FFFFFF\" />\n    <path\n        android:fillColor=\"#00000000\"\n        android:pathData=\"M0,29L108,29\"\n        android:strokeWidth=\"0.8\"\n        android:strokeColor=\"#33FFFFFF\" />\n    <path\n        android:fillColor=\"#00000000\"\n        android:pathData=\"M0,39L108,39\"\n        android:strokeWidth=\"0.8\"\n        android:strokeColor=\"#33FFFFFF\" />\n    <path\n        android:fillColor=\"#00000000\"\n        android:pathData=\"M0,49L108,49\"\n        android:strokeWidth=\"0.8\"\n        android:strokeColor=\"#33FFFFFF\" />\n    <path\n        android:fillColor=\"#00000000\"\n        android:pathData=\"M0,59L108,59\"\n        android:strokeWidth=\"0.8\"\n        android:strokeColor=\"#33FFFFFF\" />\n    <path\n        android:fillColor=\"#00000000\"\n        android:pathData=\"M0,69L108,69\"\n        android:strokeWidth=\"0.8\"\n        android:strokeColor=\"#33FFFFFF\" />\n    <path\n        android:fillColor=\"#00000000\"\n        android:pathData=\"M0,79L108,79\"\n        android:strokeWidth=\"0.8\"\n        android:strokeColor=\"#33FFFFFF\" />\n    <path\n        android:fillColor=\"#00000000\"\n        android:pathData=\"M0,89L108,89\"\n        android:strokeWidth=\"0.8\"\n        android:strokeColor=\"#33FFFFFF\" />\n    <path\n        android:fillColor=\"#00000000\"\n        android:pathData=\"M0,99L108,99\"\n        android:strokeWidth=\"0.8\"\n        android:strokeColor=\"#33FFFFFF\" />\n    <path\n        android:fillColor=\"#00000000\"\n        android:pathData=\"M19,29L89,29\"\n        android:strokeWidth=\"0.8\"\n        android:strokeColor=\"#33FFFFFF\" />\n    <path\n        android:fillColor=\"#00000000\"\n        android:pathData=\"M19,39L89,39\"\n        android:strokeWidth=\"0.8\"\n        android:strokeColor=\"#33FFFFFF\" />\n    <path\n        android:fillColor=\"#00000000\"\n        android:pathData=\"M19,49L89,49\"\n        android:strokeWidth=\"0.8\"\n        android:strokeColor=\"#33FFFFFF\" />\n    <path\n        android:fillColor=\"#00000000\"\n        android:pathData=\"M19,59L89,59\"\n        android:strokeWidth=\"0.8\"\n        android:strokeColor=\"#33FFFFFF\" />\n    <path\n        android:fillColor=\"#00000000\"\n        android:pathData=\"M19,69L89,69\"\n        android:strokeWidth=\"0.8\"\n        android:strokeColor=\"#33FFFFFF\" />\n    <path\n        android:fillColor=\"#00000000\"\n        android:pathData=\"M19,79L89,79\"\n        android:strokeWidth=\"0.8\"\n        android:strokeColor=\"#33FFFFFF\" />\n    <path\n        android:fillColor=\"#00000000\"\n        android:pathData=\"M29,19L29,89\"\n        android:strokeWidth=\"0.8\"\n        android:strokeColor=\"#33FFFFFF\" />\n    <path\n        android:fillColor=\"#00000000\"\n        android:pathData=\"M39,19L39,89\"\n        android:strokeWidth=\"0.8\"\n        android:strokeColor=\"#33FFFFFF\" />\n    <path\n        android:fillColor=\"#00000000\"\n        android:pathData=\"M49,19L49,89\"\n        android:strokeWidth=\"0.8\"\n        android:strokeColor=\"#33FFFFFF\" />\n    <path\n        android:fillColor=\"#00000000\"\n        android:pathData=\"M59,19L59,89\"\n        android:strokeWidth=\"0.8\"\n        android:strokeColor=\"#33FFFFFF\" />\n    <path\n        android:fillColor=\"#00000000\"\n        android:pathData=\"M69,19L69,89\"\n        android:strokeWidth=\"0.8\"\n        android:strokeColor=\"#33FFFFFF\" />\n    <path\n        android:fillColor=\"#00000000\"\n        android:pathData=\"M79,19L79,89\"\n        android:strokeWidth=\"0.8\"\n        android:strokeColor=\"#33FFFFFF\" />\n</vector>\n"
  },
  {
    "path": "app/src/main/res/drawable/ic_launcher_foreground.xml",
    "content": "<vector xmlns:android=\"http://schemas.android.com/apk/res/android\"\n    android:height=\"16dp\"\n    android:width=\"16dp\"\n    android:viewportWidth=\"24\"\n    android:viewportHeight=\"24\">\n    <path android:fillColor=\"#ffffff\" android:pathData=\"M18.15,4.94C17.77,4.91 17.37,5 17,5.2L8.35,10.2C7.39,10.76 7.07,12 7.62,12.94L9.12,15.53C9.67,16.5 10.89,16.82 11.85,16.27L13.65,15.23C13.92,15.69 14.32,16.06 14.81,16.27V18.04C14.81,19.13 15.7,20 16.81,20H22V18.04H16.81V16.27C17.72,15.87 18.31,14.97 18.31,14C18.31,13.54 18.19,13.11 17.97,12.73L20.5,11.27C21.47,10.71 21.8,9.5 21.24,8.53L19.74,5.94C19.4,5.34 18.79,5 18.15,4.94M6.22,13.17L2,13.87L2.75,15.17L4.75,18.63L5.5,19.93L8.22,16.63L6.22,13.17Z\" />\n</vector>"
  },
  {
    "path": "app/src/main/res/drawable/ic_text_subject_black_24dp.xml",
    "content": "<!-- drawable/text_subject.xml -->\r\n<vector xmlns:android=\"http://schemas.android.com/apk/res/android\"\r\n    android:height=\"24dp\"\r\n    android:width=\"24dp\"\r\n    android:viewportWidth=\"24\"\r\n    android:viewportHeight=\"24\">\r\n    <path android:fillColor=\"#000\" android:pathData=\"M4,5H20V7H4V5M4,9H20V11H4V9M4,13H20V15H4V13M4,17H14V19H4V17Z\" />\r\n</vector>"
  },
  {
    "path": "app/src/main/res/layout/activity_main.xml",
    "content": "<?xml version=\"1.0\" encoding=\"utf-8\"?>\n<RelativeLayout\n    xmlns:android=\"http://schemas.android.com/apk/res/android\"\n    xmlns:app=\"http://schemas.android.com/apk/res-auto\"\n    android:id=\"@+id/container\"\n    android:layout_width=\"match_parent\"\n    android:layout_height=\"match_parent\">\n\n    <fragment\n        android:layout_above=\"@+id/nav_view\"\n        android:id=\"@+id/nav_host_fragment\"\n        android:name=\"androidx.navigation.fragment.NavHostFragment\"\n        android:layout_width=\"match_parent\"\n        android:layout_height=\"match_parent\"\n        app:defaultNavHost=\"true\"\n        app:navGraph=\"@navigation/mobile_navigation\" />\n\n    <com.google.android.material.bottomnavigation.BottomNavigationView\n        android:id=\"@+id/nav_view\"\n        android:layout_width=\"match_parent\"\n        android:layout_height=\"wrap_content\"\n        android:background=\"?android:attr/windowBackground\"\n        android:layout_alignParentBottom=\"true\"\n        app:menu=\"@menu/bottom_nav_menu\" />\n\n</RelativeLayout>"
  },
  {
    "path": "app/src/main/res/layout/fragment_live.xml",
    "content": "<?xml version=\"1.0\" encoding=\"utf-8\"?>\n<ScrollView\n    xmlns:android=\"http://schemas.android.com/apk/res/android\"\n    xmlns:tools=\"http://schemas.android.com/tools\"\n    xmlns:app=\"http://schemas.android.com/apk/res-auto\"\n    android:layout_width=\"match_parent\"\n    android:layout_height=\"match_parent\"\n    tools:context=\".live.LiveFragment\">\n\n    <LinearLayout\n        android:layout_width=\"match_parent\"\n        android:layout_height=\"wrap_content\"\n        android:orientation=\"vertical\"\n        android:padding=\"20dp\">\n\n        <include\n            android:id=\"@+id/llRtspParams\"\n            layout=\"@layout/layout_rtsp_params\"/>\n\n        <CheckBox\n            android:id=\"@+id/cbVideoFpsStabilization\"\n            android:text=\"Video frame rate stabilization.\\nAdd delay up to 100ms for smoother playback. RtspSurfaceView only.\"\n            android:layout_marginStart=\"5dp\"\n            android:checked=\"false\"\n            android:layout_width=\"wrap_content\"\n            android:layout_height=\"wrap_content\" />\n\n        <CheckBox\n            android:id=\"@+id/cbExperimentalRewriteSps\"\n            android:text=\"Rewrite SPS frames w/ low-latency params (EXPERIMENTAL)\"\n            android:checked=\"false\"\n            android:layout_marginStart=\"5dp\"\n            android:layout_width=\"wrap_content\"\n            android:layout_height=\"wrap_content\" />\n\n        <Button\n            android:layout_marginTop=\"40dp\"\n            android:id=\"@+id/bnStartStopSurface\"\n            android:layout_width=\"wrap_content\"\n            android:layout_height=\"wrap_content\"\n            android:layout_gravity=\"center\"\n            android:text=\"Start\" />\n\n\n        <!-- RtspSurfaceView -->\n\n        <TextView\n            android:layout_width=\"wrap_content\"\n            android:layout_height=\"match_parent\"\n            android:paddingBottom=\"5dp\"\n            android:text=\"RtspSurfaceView:\"/>\n\n        <androidx.constraintlayout.widget.ConstraintLayout\n            android:id=\"@+id/csVideoSurface\"\n            android:layout_width=\"match_parent\"\n            android:layout_height=\"match_parent\"\n            android:animateLayoutChanges=\"true\">\n            <com.alexvas.rtsp.widget.RtspSurfaceView\n                android:layout_width=\"0dp\"\n                android:layout_height=\"0dp\"\n                android:id=\"@+id/svVideoSurface\"\n                app:layout_constraintTop_toTopOf=\"parent\"\n                app:layout_constraintBottom_toBottomOf=\"parent\"\n                app:layout_constraintStart_toStartOf=\"parent\"\n                app:layout_constraintEnd_toEndOf=\"parent\"\n                app:layout_constraintDimensionRatio=\"16:9\"/>\n            <View\n                android:layout_width=\"0dp\"\n                android:layout_height=\"0dp\"\n                android:background=\"@android:color/black\"\n                app:layout_constraintTop_toTopOf=\"parent\"\n                app:layout_constraintBottom_toBottomOf=\"parent\"\n                app:layout_constraintStart_toStartOf=\"parent\"\n                app:layout_constraintEnd_toEndOf=\"parent\"\n                android:id=\"@+id/vShutterSurface\" />\n            <ProgressBar\n                android:layout_width=\"wrap_content\"\n                android:layout_height=\"wrap_content\"\n                android:layout_gravity=\"center\"\n                android:visibility=\"gone\"\n                android:id=\"@+id/pbLoadingSurface\"\n                app:layout_constraintTop_toTopOf=\"parent\"\n                app:layout_constraintBottom_toBottomOf=\"parent\"\n                app:layout_constraintStart_toStartOf=\"parent\"\n                app:layout_constraintEnd_toEndOf=\"parent\"/>\n        </androidx.constraintlayout.widget.ConstraintLayout>\n\n        <!-- Debug statistics -->\n        <TextView\n            android:layout_width=\"wrap_content\"\n            android:layout_height=\"wrap_content\"\n            android:layout_marginTop=\"2dp\"\n            android:id=\"@+id/tvStatistics\"\n            android:textSize=\"12sp\"/>\n\n        <LinearLayout\n            android:layout_width=\"match_parent\"\n            android:layout_height=\"wrap_content\"\n            android:paddingStart=\"5dp\"\n            android:paddingEnd=\"5dp\"\n            android:orientation=\"horizontal\"\n            android:gravity=\"center_vertical\">\n            <Button\n                android:layout_width=\"wrap_content\"\n                android:layout_height=\"wrap_content\"\n                style=\"@style/Widget.Material3.Button.TextButton.Icon\"\n                android:id=\"@+id/bnSnapshotSurface\"\n                android:enabled=\"false\"\n                android:text=\"Snapshot\"\n                app:icon=\"@drawable/ic_camera_black_24dp\"/>\n            <TextView\n                android:layout_width=\"match_parent\"\n                android:layout_height=\"wrap_content\"\n                android:id=\"@+id/tvStatusSurface\"\n                android:gravity=\"end\"/>\n        </LinearLayout>\n\n\n        <!-- RtspImageView -->\n\n        <Button\n            android:layout_marginTop=\"30dp\"\n            android:id=\"@+id/bnStartStopImage\"\n            android:layout_width=\"wrap_content\"\n            android:layout_height=\"wrap_content\"\n            android:layout_gravity=\"center\"\n            android:text=\"Start\" />\n\n        <TextView\n            android:layout_width=\"wrap_content\"\n            android:layout_height=\"match_parent\"\n            android:paddingBottom=\"5dp\"\n            android:text=\"RtspImageView:\"/>\n\n        <androidx.constraintlayout.widget.ConstraintLayout\n            android:id=\"@+id/csVideoImage\"\n            android:layout_width=\"match_parent\"\n            android:layout_height=\"match_parent\"\n            android:animateLayoutChanges=\"true\">\n            <com.alexvas.rtsp.widget.RtspImageView\n                android:layout_width=\"0dp\"\n                android:layout_height=\"0dp\"\n                android:scaleType=\"fitXY\"\n                android:id=\"@+id/ivVideoImage\"\n                app:layout_constraintTop_toTopOf=\"parent\"\n                app:layout_constraintBottom_toBottomOf=\"parent\"\n                app:layout_constraintStart_toStartOf=\"parent\"\n                app:layout_constraintEnd_toEndOf=\"parent\"\n                app:layout_constraintDimensionRatio=\"16:9\"/>\n            <View\n                android:layout_width=\"0dp\"\n                android:layout_height=\"0dp\"\n                android:background=\"@android:color/black\"\n                android:id=\"@+id/vShutterImage\"\n                app:layout_constraintTop_toTopOf=\"parent\"\n                app:layout_constraintBottom_toBottomOf=\"parent\"\n                app:layout_constraintStart_toStartOf=\"parent\"\n                app:layout_constraintEnd_toEndOf=\"parent\" />\n            <ProgressBar\n                android:layout_width=\"wrap_content\"\n                android:layout_height=\"wrap_content\"\n                android:layout_gravity=\"center\"\n                android:visibility=\"gone\"\n                android:id=\"@+id/pbLoadingImage\"\n                app:layout_constraintTop_toTopOf=\"parent\"\n                app:layout_constraintBottom_toBottomOf=\"parent\"\n                app:layout_constraintStart_toStartOf=\"parent\"\n                app:layout_constraintEnd_toEndOf=\"parent\" />\n        </androidx.constraintlayout.widget.ConstraintLayout>\n\n        <LinearLayout\n            android:layout_width=\"match_parent\"\n            android:layout_height=\"wrap_content\"\n            android:padding=\"5dp\"\n            android:orientation=\"horizontal\"\n            android:gravity=\"center_vertical\">\n            <TextView\n                android:layout_width=\"match_parent\"\n                android:layout_height=\"wrap_content\"\n                android:id=\"@+id/tvStatusImage\"\n                android:gravity=\"end\"/>\n        </LinearLayout>\n\n        <LinearLayout\n            android:layout_width=\"wrap_content\"\n            android:layout_height=\"wrap_content\"\n            android:layout_marginTop=\"10dp\"\n            android:layout_gravity=\"center\"\n            android:orientation=\"vertical\">\n\n            <TextView\n                android:layout_width=\"wrap_content\"\n                android:layout_height=\"wrap_content\"\n                android:text=\"Video decoder\" />\n\n            <com.google.android.material.button.MaterialButtonToggleGroup\n                android:layout_width=\"wrap_content\"\n                android:layout_height=\"wrap_content\"\n                android:id=\"@+id/bnVideoDecoderGroup\"\n                android:layout_gravity=\"center\"\n                app:singleSelection=\"true\">\n\n                <Button\n                    android:id=\"@+id/bnVideoDecoderHardware\"\n                    style=\"?attr/materialButtonOutlinedStyle\"\n                    android:layout_width=\"wrap_content\"\n                    android:layout_height=\"wrap_content\"\n                    android:text=\"Hardware\" />\n\n                <Button\n                    android:id=\"@+id/bnVideoDecoderSoftware\"\n                    style=\"?attr/materialButtonOutlinedStyle\"\n                    android:layout_width=\"wrap_content\"\n                    android:layout_height=\"wrap_content\"\n                    android:text=\"Software\" />\n            </com.google.android.material.button.MaterialButtonToggleGroup>\n        </LinearLayout>\n\n        <LinearLayout\n            android:layout_width=\"wrap_content\"\n            android:layout_height=\"wrap_content\"\n            android:layout_gravity=\"center\"\n            android:layout_marginTop=\"10dp\"\n            android:orientation=\"vertical\">\n\n            <TextView\n                android:layout_width=\"wrap_content\"\n                android:layout_height=\"wrap_content\"\n                android:text=\"Rotation\" />\n\n            <com.google.android.material.button.MaterialButtonToggleGroup\n                android:layout_width=\"wrap_content\"\n                android:layout_height=\"wrap_content\"\n                android:layout_gravity=\"center\"\n                android:id=\"@+id/tgRotation\"\n                app:singleSelection=\"true\">\n\n                <Button\n                    android:id=\"@+id/bnRotate0\"\n                    style=\"?attr/materialButtonOutlinedStyle\"\n                    android:layout_width=\"wrap_content\"\n                    android:layout_height=\"wrap_content\"\n                    android:text=\"0\" />\n\n                <Button\n                    android:id=\"@+id/bnRotate90\"\n                    style=\"?attr/materialButtonOutlinedStyle\"\n                    android:layout_width=\"wrap_content\"\n                    android:layout_height=\"wrap_content\"\n                    android:text=\"90\" />\n\n                <Button\n                    android:id=\"@+id/bnRotate180\"\n                    style=\"?attr/materialButtonOutlinedStyle\"\n                    android:layout_width=\"wrap_content\"\n                    android:layout_height=\"wrap_content\"\n                    android:text=\"180\" />\n\n                <Button\n                    android:id=\"@+id/bnRotate270\"\n                    style=\"?attr/materialButtonOutlinedStyle\"\n                    android:layout_width=\"wrap_content\"\n                    android:layout_height=\"wrap_content\"\n                    android:text=\"270\" />\n            </com.google.android.material.button.MaterialButtonToggleGroup>\n        </LinearLayout>\n\n    </LinearLayout>\n\n</ScrollView>"
  },
  {
    "path": "app/src/main/res/layout/fragment_logs.xml",
    "content": "<?xml version=\"1.0\" encoding=\"utf-8\"?>\n<HorizontalScrollView xmlns:android=\"http://schemas.android.com/apk/res/android\"\n    android:layout_width=\"match_parent\"\n    android:layout_height=\"match_parent\"\n    android:layout_margin=\"8dp\"\n    android:layout_weight=\"1\">\n\n    <androidx.recyclerview.widget.RecyclerView\n        android:id=\"@+id/log_recycler\"\n        android:layout_width=\"wrap_content\"\n        android:layout_height=\"match_parent\"\n        android:scrollbars=\"vertical\" />\n\n</HorizontalScrollView>\n"
  },
  {
    "path": "app/src/main/res/layout/fragment_raw.xml",
    "content": "<?xml version=\"1.0\" encoding=\"utf-8\"?>\n<ScrollView\n    xmlns:android=\"http://schemas.android.com/apk/res/android\"\n    xmlns:tools=\"http://schemas.android.com/tools\"\n    android:layout_width=\"match_parent\"\n    android:layout_height=\"match_parent\"\n    tools:context=\".live.LiveFragment\">\n\n    <LinearLayout\n        android:layout_width=\"match_parent\"\n        android:layout_height=\"wrap_content\"\n        android:orientation=\"vertical\"\n        android:padding=\"20dp\">\n\n        <include\n            android:id=\"@+id/llRtspParams\"\n            layout=\"@layout/layout_rtsp_params\" />\n\n        <Button\n            android:layout_marginTop=\"10dp\"\n            android:id=\"@+id/bnStartStop\"\n            android:layout_width=\"wrap_content\"\n            android:layout_height=\"wrap_content\"\n            android:layout_gravity=\"center\"\n            android:layout_marginBottom=\"10dp\"\n            android:text=\"Start\" />\n\n        <TextView\n            android:layout_width=\"match_parent\"\n            android:layout_height=\"wrap_content\"\n            android:id=\"@+id/tvStatusSurface\"\n            android:gravity=\"end\"/>\n\n        <!-- Debug statistics -->\n        <TextView\n            android:id=\"@+id/tvStatisticsVideo\"\n            android:layout_width=\"wrap_content\"\n            android:layout_height=\"wrap_content\"\n            android:layout_marginTop=\"30dp\"\n            android:padding=\"5dp\"\n            android:textStyle=\"normal|bold\"\n            android:textSize=\"16sp\"/>\n        <TextView\n            android:id=\"@+id/tvStatisticsAudio\"\n            android:layout_width=\"wrap_content\"\n            android:layout_height=\"wrap_content\"\n            android:padding=\"5dp\"\n            android:textStyle=\"normal|bold\"\n            android:textSize=\"16sp\"/>\n        <TextView\n            android:id=\"@+id/tvStatisticsApplication\"\n            android:layout_width=\"wrap_content\"\n            android:layout_height=\"wrap_content\"\n            android:padding=\"5dp\"\n            android:textStyle=\"normal|bold\"\n            android:textSize=\"16sp\"/>\n\n    </LinearLayout>\n\n</ScrollView>\n"
  },
  {
    "path": "app/src/main/res/layout/layout_rtsp_params.xml",
    "content": "<?xml version=\"1.0\" encoding=\"utf-8\"?>\n<LinearLayout\n    xmlns:android=\"http://schemas.android.com/apk/res/android\"\n    xmlns:app=\"http://schemas.android.com/apk/res-auto\"\n    android:layout_width=\"match_parent\"\n    android:layout_height=\"match_parent\"\n    android:orientation=\"vertical\">\n\n    <com.google.android.material.textfield.TextInputLayout\n        android:layout_width=\"match_parent\"\n        android:layout_height=\"wrap_content\"\n        android:layout_marginTop=\"20dp\">\n        <EditText\n            android:id=\"@+id/etRtspRequest\"\n            android:layout_width=\"match_parent\"\n            android:layout_height=\"wrap_content\"\n            android:hint=\"RTSP request\"\n            android:inputType=\"textUri\"/>\n    </com.google.android.material.textfield.TextInputLayout>\n\n    <LinearLayout\n        android:layout_width=\"match_parent\"\n        android:layout_height=\"wrap_content\"\n        android:layout_marginTop=\"20dp\"\n        android:orientation=\"horizontal\">\n\n        <com.google.android.material.textfield.TextInputLayout\n            android:layout_width=\"wrap_content\"\n            android:layout_height=\"wrap_content\"\n            android:layout_weight=\"1\"\n            android:layout_marginEnd=\"5dp\">\n            <EditText\n                android:id=\"@+id/etRtspUsername\"\n                android:layout_width=\"match_parent\"\n                android:layout_height=\"wrap_content\"\n                android:hint=\"RTSP username\"/>\n        </com.google.android.material.textfield.TextInputLayout>\n\n        <com.google.android.material.textfield.TextInputLayout\n            android:layout_width=\"wrap_content\"\n            android:layout_height=\"wrap_content\"\n            android:layout_weight=\"1\"\n            app:passwordToggleEnabled=\"true\">\n            <EditText\n                android:id=\"@+id/etRtspPassword\"\n                android:layout_width=\"match_parent\"\n                android:layout_height=\"wrap_content\"\n                android:inputType=\"textPassword\"\n                android:hint=\"RTSP password\"/>\n        </com.google.android.material.textfield.TextInputLayout>\n\n    </LinearLayout>\n\n    <LinearLayout\n        android:layout_width=\"match_parent\"\n        android:layout_height=\"wrap_content\"\n        android:gravity=\"center\"\n        android:orientation=\"horizontal\">\n\n        <CheckBox\n            android:id=\"@+id/cbVideo\"\n            android:text=\"Video\"\n            android:checked=\"true\"\n            android:layout_margin=\"5dp\"\n            android:layout_width=\"wrap_content\"\n            android:layout_height=\"wrap_content\" />\n\n        <CheckBox\n            android:id=\"@+id/cbAudio\"\n            android:text=\"Audio\"\n            android:checked=\"false\"\n            android:layout_margin=\"5dp\"\n            android:layout_width=\"wrap_content\"\n            android:layout_height=\"wrap_content\" />\n\n        <CheckBox\n            android:id=\"@+id/cbApplication\"\n            android:text=\"Application\"\n            android:checked=\"false\"\n            android:layout_margin=\"5dp\"\n            android:layout_width=\"wrap_content\"\n            android:layout_height=\"wrap_content\" />\n\n        <CheckBox\n            android:id=\"@+id/cbDebug\"\n            android:text=\"Debug\"\n            android:checked=\"false\"\n            android:layout_marginStart=\"20dp\"\n            android:layout_width=\"wrap_content\"\n            android:layout_height=\"wrap_content\" />\n    </LinearLayout>\n\n</LinearLayout>"
  },
  {
    "path": "app/src/main/res/menu/bottom_nav_menu.xml",
    "content": "<?xml version=\"1.0\" encoding=\"utf-8\"?>\n<menu xmlns:android=\"http://schemas.android.com/apk/res/android\">\n\n    <item\n        android:id=\"@+id/navigation_live\"\n        android:icon=\"@drawable/ic_cctv_black_24dp\"\n        android:title=\"@string/title_live\" />\n\n    <item\n        android:id=\"@+id/navigation_raw\"\n        android:icon=\"@drawable/ic_cctv_black_24dp\"\n        android:title=\"@string/title_raw\" />\n\n    <item\n        android:id=\"@+id/navigation_logs\"\n        android:icon=\"@drawable/ic_text_subject_black_24dp\"\n        android:title=\"@string/title_logs\" />\n\n</menu>\n"
  },
  {
    "path": "app/src/main/res/mipmap-anydpi-v26/ic_launcher.xml",
    "content": "<?xml version=\"1.0\" encoding=\"utf-8\"?>\n<adaptive-icon xmlns:android=\"http://schemas.android.com/apk/res/android\">\n    <background android:drawable=\"@drawable/ic_launcher_background\" />\n    <foreground android:drawable=\"@drawable/ic_launcher_foreground\" />\n</adaptive-icon>"
  },
  {
    "path": "app/src/main/res/mipmap-anydpi-v26/ic_launcher_round.xml",
    "content": "<?xml version=\"1.0\" encoding=\"utf-8\"?>\n<adaptive-icon xmlns:android=\"http://schemas.android.com/apk/res/android\">\n    <background android:drawable=\"@drawable/ic_launcher_background\" />\n    <foreground android:drawable=\"@drawable/ic_launcher_foreground\" />\n</adaptive-icon>"
  },
  {
    "path": "app/src/main/res/navigation/mobile_navigation.xml",
    "content": "<?xml version=\"1.0\" encoding=\"utf-8\"?>\n<navigation xmlns:android=\"http://schemas.android.com/apk/res/android\"\n    xmlns:app=\"http://schemas.android.com/apk/res-auto\"\n    xmlns:tools=\"http://schemas.android.com/tools\"\n    android:id=\"@+id/mobile_navigation\"\n    app:startDestination=\"@+id/navigation_live\">\n\n    <fragment\n        android:id=\"@+id/navigation_live\"\n        android:name=\"com.alexvas.rtsp.demo.live.LiveFragment\"\n        android:label=\"@string/title_live\"\n        tools:layout=\"@layout/fragment_live\" />\n\n    <fragment\n        android:id=\"@+id/navigation_raw\"\n        android:name=\"com.alexvas.rtsp.demo.live.RawFragment\"\n        android:label=\"@string/title_live\"\n        tools:layout=\"@layout/fragment_live\" />\n\n    <fragment\n        android:id=\"@+id/navigation_logs\"\n        android:name=\"info.hannes.logcat.ui.LogcatFragment\"\n        android:label=\"@string/title_logs\"\n        tools:layout=\"@layout/fragment_log\" />\n\n</navigation>\n"
  },
  {
    "path": "app/src/main/res/values/colors.xml",
    "content": "<?xml version=\"1.0\" encoding=\"utf-8\"?>\n<resources>\n    <color name=\"colorPrimary\">#40747A</color>\n    <color name=\"colorPrimaryDark\">#00BCD4</color>\n    <color name=\"colorAccent\">#03DAC5</color>\n</resources>\n"
  },
  {
    "path": "app/src/main/res/values/dimens.xml",
    "content": "<resources>\n    <!-- Default screen margins, per the Android Design guidelines. -->\n    <dimen name=\"activity_horizontal_margin\">16dp</dimen>\n    <dimen name=\"activity_vertical_margin\">16dp</dimen>\n</resources>\n"
  },
  {
    "path": "app/src/main/res/values/strings.xml",
    "content": "<resources>\n    <string name=\"app_name\">Rtsp demo</string>\n    <string name=\"title_live\">Live</string>\n    <string name=\"title_raw\">Raw</string>\n    <string name=\"title_logs\">Logs</string>\n</resources>\n"
  },
  {
    "path": "app/src/main/res/values/styles.xml",
    "content": "<resources>\n\n    <!-- Base application theme. -->\n    <style name=\"AppTheme\" parent=\"Theme.Material3.DayNight\">\n        <!-- Customize your theme here. -->\n        <item name=\"colorPrimary\">@color/colorPrimaryDark</item>\n        <item name=\"colorPrimaryDark\">@color/colorPrimaryDark</item>\n        <item name=\"colorAccent\">@color/colorAccent</item>\n    </style>\n\n</resources>\n"
  },
  {
    "path": "build.gradle",
    "content": "buildscript {\n\n  ext.kotlin_version = '2.2.21'\n  ext.compile_sdk_version = 36\n  ext.min_sdk_version = 24\n  ext.target_sdk_version = 35\n  ext.project_version_code = 564\n  ext.project_version_name = '5.6.4'\n\n  repositories {\n    google()\n    mavenCentral()\n  }\n  dependencies {\n    classpath 'com.android.tools.build:gradle:8.13.2'\n    classpath \"org.jetbrains.kotlin:kotlin-gradle-plugin:$kotlin_version\"\n  }\n}\n\nallprojects {\n  repositories {\n    google()\n    mavenCentral()\n    maven { url 'https://jitpack.io' }\n  }\n}\n\ntasks.register('clean', Delete) {\n  delete rootProject.layout.buildDirectory\n}\n"
  },
  {
    "path": "gradle/wrapper/gradle-wrapper.properties",
    "content": "distributionBase=GRADLE_USER_HOME\ndistributionPath=wrapper/dists\ndistributionUrl=https\\://services.gradle.org/distributions/gradle-8.14.3-bin.zip\nnetworkTimeout=10000\nvalidateDistributionUrl=true\nzipStoreBase=GRADLE_USER_HOME\nzipStorePath=wrapper/dists\n"
  },
  {
    "path": "gradle.properties",
    "content": "org.gradle.jvmargs=-Xmx1g\nandroid.useAndroidX=true\n"
  },
  {
    "path": "gradlew",
    "content": "#!/bin/sh\n\n#\n# Copyright © 2015-2021 the original authors.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n#      https://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n# SPDX-License-Identifier: Apache-2.0\n#\n\n##############################################################################\n#\n#   Gradle start up script for POSIX generated by Gradle.\n#\n#   Important for running:\n#\n#   (1) You need a POSIX-compliant shell to run this script. If your /bin/sh is\n#       noncompliant, but you have some other compliant shell such as ksh or\n#       bash, then to run this script, type that shell name before the whole\n#       command line, like:\n#\n#           ksh Gradle\n#\n#       Busybox and similar reduced shells will NOT work, because this script\n#       requires all of these POSIX shell features:\n#         * functions;\n#         * expansions «$var», «${var}», «${var:-default}», «${var+SET}»,\n#           «${var#prefix}», «${var%suffix}», and «$( cmd )»;\n#         * compound commands having a testable exit status, especially «case»;\n#         * various built-in commands including «command», «set», and «ulimit».\n#\n#   Important for patching:\n#\n#   (2) This script targets any POSIX shell, so it avoids extensions provided\n#       by Bash, Ksh, etc; in particular arrays are avoided.\n#\n#       The \"traditional\" practice of packing multiple parameters into a\n#       space-separated string is a well documented source of bugs and security\n#       problems, so this is (mostly) avoided, by progressively accumulating\n#       options in \"$@\", and eventually passing that to Java.\n#\n#       Where the inherited environment variables (DEFAULT_JVM_OPTS, JAVA_OPTS,\n#       and GRADLE_OPTS) rely on word-splitting, this is performed explicitly;\n#       see the in-line comments for details.\n#\n#       There are tweaks for specific operating systems such as AIX, CygWin,\n#       Darwin, MinGW, and NonStop.\n#\n#   (3) This script is generated from the Groovy template\n#       https://github.com/gradle/gradle/blob/HEAD/platforms/jvm/plugins-application/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt\n#       within the Gradle project.\n#\n#       You can find Gradle at https://github.com/gradle/gradle/.\n#\n##############################################################################\n\n# Attempt to set APP_HOME\n\n# Resolve links: $0 may be a link\napp_path=$0\n\n# Need this for daisy-chained symlinks.\nwhile\n    APP_HOME=${app_path%\"${app_path##*/}\"}  # leaves a trailing /; empty if no leading path\n    [ -h \"$app_path\" ]\ndo\n    ls=$( ls -ld \"$app_path\" )\n    link=${ls#*' -> '}\n    case $link in             #(\n      /*)   app_path=$link ;; #(\n      *)    app_path=$APP_HOME$link ;;\n    esac\ndone\n\n# This is normally unused\n# shellcheck disable=SC2034\nAPP_BASE_NAME=${0##*/}\n# Discard cd standard output in case $CDPATH is set (https://github.com/gradle/gradle/issues/25036)\nAPP_HOME=$( cd -P \"${APP_HOME:-./}\" > /dev/null && printf '%s\\n' \"$PWD\" ) || exit\n\n# Use the maximum available, or set MAX_FD != -1 to use that value.\nMAX_FD=maximum\n\nwarn () {\n    echo \"$*\"\n} >&2\n\ndie () {\n    echo\n    echo \"$*\"\n    echo\n    exit 1\n} >&2\n\n# OS specific support (must be 'true' or 'false').\ncygwin=false\nmsys=false\ndarwin=false\nnonstop=false\ncase \"$( uname )\" in                #(\n  CYGWIN* )         cygwin=true  ;; #(\n  Darwin* )         darwin=true  ;; #(\n  MSYS* | MINGW* )  msys=true    ;; #(\n  NONSTOP* )        nonstop=true ;;\nesac\n\nCLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar\n\n\n# Determine the Java command to use to start the JVM.\nif [ -n \"$JAVA_HOME\" ] ; then\n    if [ -x \"$JAVA_HOME/jre/sh/java\" ] ; then\n        # IBM's JDK on AIX uses strange locations for the executables\n        JAVACMD=$JAVA_HOME/jre/sh/java\n    else\n        JAVACMD=$JAVA_HOME/bin/java\n    fi\n    if [ ! -x \"$JAVACMD\" ] ; then\n        die \"ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME\n\nPlease set the JAVA_HOME variable in your environment to match the\nlocation of your Java installation.\"\n    fi\nelse\n    JAVACMD=java\n    if ! command -v java >/dev/null 2>&1\n    then\n        die \"ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.\n\nPlease set the JAVA_HOME variable in your environment to match the\nlocation of your Java installation.\"\n    fi\nfi\n\n# Increase the maximum file descriptors if we can.\nif ! \"$cygwin\" && ! \"$darwin\" && ! \"$nonstop\" ; then\n    case $MAX_FD in #(\n      max*)\n        # In POSIX sh, ulimit -H is undefined. That's why the result is checked to see if it worked.\n        # shellcheck disable=SC2039,SC3045\n        MAX_FD=$( ulimit -H -n ) ||\n            warn \"Could not query maximum file descriptor limit\"\n    esac\n    case $MAX_FD in  #(\n      '' | soft) :;; #(\n      *)\n        # In POSIX sh, ulimit -n is undefined. That's why the result is checked to see if it worked.\n        # shellcheck disable=SC2039,SC3045\n        ulimit -n \"$MAX_FD\" ||\n            warn \"Could not set maximum file descriptor limit to $MAX_FD\"\n    esac\nfi\n\n# Collect all arguments for the java command, stacking in reverse order:\n#   * args from the command line\n#   * the main class name\n#   * -classpath\n#   * -D...appname settings\n#   * --module-path (only if needed)\n#   * DEFAULT_JVM_OPTS, JAVA_OPTS, and GRADLE_OPTS environment variables.\n\n# For Cygwin or MSYS, switch paths to Windows format before running java\nif \"$cygwin\" || \"$msys\" ; then\n    APP_HOME=$( cygpath --path --mixed \"$APP_HOME\" )\n    CLASSPATH=$( cygpath --path --mixed \"$CLASSPATH\" )\n\n    JAVACMD=$( cygpath --unix \"$JAVACMD\" )\n\n    # Now convert the arguments - kludge to limit ourselves to /bin/sh\n    for arg do\n        if\n            case $arg in                                #(\n              -*)   false ;;                            # don't mess with options #(\n              /?*)  t=${arg#/} t=/${t%%/*}              # looks like a POSIX filepath\n                    [ -e \"$t\" ] ;;                      #(\n              *)    false ;;\n            esac\n        then\n            arg=$( cygpath --path --ignore --mixed \"$arg\" )\n        fi\n        # Roll the args list around exactly as many times as the number of\n        # args, so each arg winds up back in the position where it started, but\n        # possibly modified.\n        #\n        # NB: a `for` loop captures its iteration list before it begins, so\n        # changing the positional parameters here affects neither the number of\n        # iterations, nor the values presented in `arg`.\n        shift                   # remove old arg\n        set -- \"$@\" \"$arg\"      # push replacement arg\n    done\nfi\n\n\n# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.\nDEFAULT_JVM_OPTS='\"-Xmx64m\" \"-Xms64m\"'\n\n# Collect all arguments for the java command:\n#   * DEFAULT_JVM_OPTS, JAVA_OPTS, and optsEnvironmentVar are not allowed to contain shell fragments,\n#     and any embedded shellness will be escaped.\n#   * For example: A user cannot expect ${Hostname} to be expanded, as it is an environment variable and will be\n#     treated as '${Hostname}' itself on the command line.\n\nset -- \\\n        \"-Dorg.gradle.appname=$APP_BASE_NAME\" \\\n        -classpath \"$CLASSPATH\" \\\n        org.gradle.wrapper.GradleWrapperMain \\\n        \"$@\"\n\n# Stop when \"xargs\" is not available.\nif ! command -v xargs >/dev/null 2>&1\nthen\n    die \"xargs is not available\"\nfi\n\n# Use \"xargs\" to parse quoted args.\n#\n# With -n1 it outputs one arg per line, with the quotes and backslashes removed.\n#\n# In Bash we could simply go:\n#\n#   readarray ARGS < <( xargs -n1 <<<\"$var\" ) &&\n#   set -- \"${ARGS[@]}\" \"$@\"\n#\n# but POSIX shell has neither arrays nor command substitution, so instead we\n# post-process each arg (as a line of input to sed) to backslash-escape any\n# character that might be a shell metacharacter, then use eval to reverse\n# that process (while maintaining the separation between arguments), and wrap\n# the whole thing up as a single \"set\" statement.\n#\n# This will of course break if any of these variables contains a newline or\n# an unmatched quote.\n#\n\neval \"set -- $(\n        printf '%s\\n' \"$DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS\" |\n        xargs -n1 |\n        sed ' s~[^-[:alnum:]+,./:=@_]~\\\\&~g; ' |\n        tr '\\n' ' '\n    )\" '\"$@\"'\n\nexec \"$JAVACMD\" \"$@\"\n"
  },
  {
    "path": "gradlew.bat",
    "content": "@rem\r\n@rem Copyright 2015 the original author or authors.\r\n@rem\r\n@rem Licensed under the Apache License, Version 2.0 (the \"License\");\r\n@rem you may not use this file except in compliance with the License.\r\n@rem You may obtain a copy of the License at\r\n@rem\r\n@rem      https://www.apache.org/licenses/LICENSE-2.0\r\n@rem\r\n@rem Unless required by applicable law or agreed to in writing, software\r\n@rem distributed under the License is distributed on an \"AS IS\" BASIS,\r\n@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n@rem See the License for the specific language governing permissions and\r\n@rem limitations under the License.\r\n@rem\r\n@rem SPDX-License-Identifier: Apache-2.0\r\n@rem\r\n\r\n@if \"%DEBUG%\"==\"\" @echo off\r\n@rem ##########################################################################\r\n@rem\r\n@rem  Gradle startup script for Windows\r\n@rem\r\n@rem ##########################################################################\r\n\r\n@rem Set local scope for the variables with windows NT shell\r\nif \"%OS%\"==\"Windows_NT\" setlocal\r\n\r\nset DIRNAME=%~dp0\r\nif \"%DIRNAME%\"==\"\" set DIRNAME=.\r\n@rem This is normally unused\r\nset APP_BASE_NAME=%~n0\r\nset APP_HOME=%DIRNAME%\r\n\r\n@rem Resolve any \".\" and \"..\" in APP_HOME to make it shorter.\r\nfor %%i in (\"%APP_HOME%\") do set APP_HOME=%%~fi\r\n\r\n@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.\r\nset DEFAULT_JVM_OPTS=\"-Xmx64m\" \"-Xms64m\"\r\n\r\n@rem Find java.exe\r\nif defined JAVA_HOME goto findJavaFromJavaHome\r\n\r\nset JAVA_EXE=java.exe\r\n%JAVA_EXE% -version >NUL 2>&1\r\nif %ERRORLEVEL% equ 0 goto execute\r\n\r\necho. 1>&2\r\necho ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. 1>&2\r\necho. 1>&2\r\necho Please set the JAVA_HOME variable in your environment to match the 1>&2\r\necho location of your Java installation. 1>&2\r\n\r\ngoto fail\r\n\r\n:findJavaFromJavaHome\r\nset JAVA_HOME=%JAVA_HOME:\"=%\r\nset JAVA_EXE=%JAVA_HOME%/bin/java.exe\r\n\r\nif exist \"%JAVA_EXE%\" goto execute\r\n\r\necho. 1>&2\r\necho ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% 1>&2\r\necho. 1>&2\r\necho Please set the JAVA_HOME variable in your environment to match the 1>&2\r\necho location of your Java installation. 1>&2\r\n\r\ngoto fail\r\n\r\n:execute\r\n@rem Setup the command line\r\n\r\nset CLASSPATH=%APP_HOME%\\gradle\\wrapper\\gradle-wrapper.jar\r\n\r\n\r\n@rem Execute Gradle\r\n\"%JAVA_EXE%\" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% \"-Dorg.gradle.appname=%APP_BASE_NAME%\" -classpath \"%CLASSPATH%\" org.gradle.wrapper.GradleWrapperMain %*\r\n\r\n:end\r\n@rem End local scope for the variables with windows NT shell\r\nif %ERRORLEVEL% equ 0 goto mainEnd\r\n\r\n:fail\r\nrem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of\r\nrem the _cmd.exe /c_ return code!\r\nset EXIT_CODE=%ERRORLEVEL%\r\nif %EXIT_CODE% equ 0 set EXIT_CODE=1\r\nif not \"\"==\"%GRADLE_EXIT_CONSOLE%\" exit %EXIT_CODE%\r\nexit /b %EXIT_CODE%\r\n\r\n:mainEnd\r\nif \"%OS%\"==\"Windows_NT\" endlocal\r\n\r\n:omega\r\n"
  },
  {
    "path": "jitpack.yml",
    "content": "jdk:\n  - openjdk17\n\ninstall:\n  - ./gradlew build :library-client-rtsp:publishToMavenLocal"
  },
  {
    "path": "library-client-rtsp/.gitignore",
    "content": "# Created by https://www.gitignore.io/api/android,java,intellij\r\n\r\n### Android ###\r\n# Built application files\r\n*.apk\r\n*.ap_\r\n\r\n# Files for the Dalvik VM\r\n*.dex\r\n\r\n# Java class files\r\n*.class\r\n\r\n# Generated files\r\nbin/\r\ngen/\r\n\r\n# Gradle files\r\n.gradle/\r\nbuild/\r\n\r\n# Local configuration file (sdk path, etc)\r\nlocal.properties\r\n\r\n# Proguard folder generated by Eclipse\r\nproguard/\r\n\r\nxactmobile/class_files.txt\r\nxactmobile/mapping.txt\r\nxactmobile/seeds.txt\r\n\r\n# Log Files\r\n*.log\r\n\r\n# Android Studio Navigation editor temp files\r\n.navigation/\r\n\r\n### Android Patch ###\r\ngen-external-apklibs\r\n\r\n\r\n### Java ###\r\n*.class\r\n\r\n# Mobile Tools for Java (J2ME)\r\n.mtj.tmp/\r\n\r\n# Package Files #\r\n#*.jar\r\n*.war\r\n*.ear\r\n\r\n# virtual machine crash logs, see http://www.java.com/en/download/help/error_hotspot.xml\r\nhs_err_pid*\r\n\r\n\r\n### Intellij ###\r\n# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio\r\n\r\n*.iml\r\n\r\n## Directory-based project format:\r\n.idea/\r\n# if you remove the above rule, at least ignore the following:\r\n\r\n# User-specific stuff:\r\n.idea/workspace.xml\r\n.idea/tasks.xml\r\n.idea/dictionaries\r\n\r\n# Sensitive or high-churn files:\r\n.idea/dataSources.ids\r\n.idea/dataSources.xml\r\n.idea/sqlDataSources.xml\r\n.idea/dynamic.xml\r\n.idea/uiDesigner.xml\r\n\r\n# Gradle:\r\n.idea/gradle.xml\r\n.idea/libraries\r\n\r\n# Mongo Explorer plugin:\r\n.idea/mongoSettings.xml\r\n\r\n## File-based project format:\r\n*.ipr\r\n*.iws\r\n\r\n## Plugin-specific files:\r\n\r\n# IntelliJ\r\n/out/\r\n\r\n# mpeltonen/sbt-idea plugin\r\n.idea_modules/\r\n\r\n# JIRA plugin\r\natlassian-ide-plugin.xml\r\n\r\n# Crashlytics plugin (for Android Studio and IntelliJ)\r\ncom_crashlytics_export_strings.xml\r\ncrashlytics.properties\r\ncrashlytics-build.properties\r\n\r\nxactmobile/.DS_Store~64be78fe3602626c61b52bcbfd09e09a6107b50a\r\nxactmobile/.DS_Store~HEAD\r\noslab-viewpager/._.DS_Store\r\noslab-viewpager/src/main/.DS_Store\r\noslab-viewpager/src/main/._.DS_Store\r\noslab-viewpager/src/main/res/.DS_Store\r\noslab-viewpager/src/main/res/._.DS_Store\r\noslab-viewpager/.gitignore\r\noslab-materialdesign/.DS_Store\r\noslab-materialdesign/._.DS_Store\r\noslab-materialdesign/src/.DS_Store\r\noslab-materialdesign/src/._.DS_Store\r\noslab-materialdesign/src/main/.DS_Store\r\noslab-materialdesign/src/main/._.DS_Store\r\noslab-materialdesign/src/main/res/.DS_Store\r\noslab-materialdesign/src/main/res/._.DS_Store\r\n"
  },
  {
    "path": "library-client-rtsp/build.gradle",
    "content": "plugins {\n    id 'com.android.library'\n    id 'kotlin-android'\n    id 'maven-publish'\n}\n\napply plugin: 'com.android.library'\n\nproject.afterEvaluate {\n    publishing {\n        publications {\n            release(MavenPublication) {\n                from components.release\n            }\n        }\n    }\n}\n\nandroid {\n\n    compileSdkVersion compile_sdk_version\n\n    defaultConfig {\n        minSdk min_sdk_version\n        targetSdk target_sdk_version\n    }\n\n    compileOptions {\n        sourceCompatibility JavaVersion.VERSION_17\n        targetCompatibility JavaVersion.VERSION_17\n    }\n\n    kotlinOptions {\n        jvmTarget = JavaVersion.VERSION_17.toString()\n    }\n\n    namespace 'com.alexvas.rtsp'\n}\n\ndependencies {\n    implementation 'androidx.annotation:annotation:1.9.1'\n    implementation 'androidx.media3:media3-exoplayer:1.9.3'\n    implementation 'androidx.camera:camera-core:1.5.3' // YUV -> BMP conversion\n    implementation 'org.jcodec:jcodec:0.2.5' // SPS frame modification\n}\n"
  },
  {
    "path": "library-client-rtsp/proguard-rules.txt",
    "content": "# Proguard rules.\n\n"
  },
  {
    "path": "library-client-rtsp/src/main/AndroidManifest.xml",
    "content": "<manifest\n    xmlns:android=\"http://schemas.android.com/apk/res/android\">\n    <uses-permission android:name=\"android.permission.INTERNET\" />\n</manifest>\n"
  },
  {
    "path": "library-client-rtsp/src/main/java/com/alexvas/rtsp/RtspClient.java",
    "content": "package com.alexvas.rtsp;\n\nimport android.text.TextUtils;\nimport android.util.Base64;\nimport android.util.Log;\nimport android.util.Pair;\n\nimport androidx.annotation.NonNull;\nimport androidx.annotation.Nullable;\n\nimport com.alexvas.rtsp.parser.AacParser;\nimport com.alexvas.rtsp.parser.G711Parser;\nimport com.alexvas.rtsp.parser.AudioParser;\nimport com.alexvas.rtsp.parser.RtpH264Parser;\nimport com.alexvas.rtsp.parser.RtpH265Parser;\nimport com.alexvas.rtsp.parser.RtpHeaderParser;\nimport com.alexvas.rtsp.parser.RtpParser;\nimport com.alexvas.utils.NetUtils;\nimport com.alexvas.utils.VideoCodecUtils;\n\nimport java.io.BufferedOutputStream;\nimport java.io.IOException;\nimport java.io.InputStream;\nimport java.io.OutputStream;\nimport java.io.Serial;\nimport java.math.BigInteger;\nimport java.net.Socket;\nimport java.nio.charset.StandardCharsets;\nimport java.security.MessageDigest;\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.List;\nimport java.util.concurrent.atomic.AtomicBoolean;\nimport java.util.concurrent.atomic.AtomicInteger;\n\n//OPTIONS rtsp://10.0.1.145:88/videoSub RTSP/1.0\n//CSeq: 1\n//User-Agent: Lavf58.29.100\n//\n//RTSP/1.0 200 OK\n//CSeq: 1\n//Date: Fri, Jan 03 2020 22:03:07 GMT\n//Public: OPTIONS, DESCRIBE, SETUP, TEARDOWN, PLAY, PAUSE, GET_PARAMETER, SET_PARAMETER\n\n//DESCRIBE rtsp://10.0.1.145:88/videoSub RTSP/1.0\n//Accept: application/sdp\n//CSeq: 2\n//User-Agent: Lavf58.29.100\n//\n//RTSP/1.0 401 Unauthorized\n//CSeq: 2\n//Date: Fri, Jan 03 2020 22:03:07 GMT\n//WWW-Authenticate: Digest realm=\"Foscam IPCam Living Video\", nonce=\"3c889dbf8371d3660aa2496789a5d130\"\n\n//DESCRIBE rtsp://10.0.1.145:88/videoSub RTSP/1.0\n//Accept: application/sdp\n//CSeq: 3\n//User-Agent: Lavf58.29.100\n//Authorization: Digest username=\"admin\", realm=\"Foscam IPCam Living Video\", nonce=\"3c889dbf8371d3660aa2496789a5d130\", uri=\"rtsp://10.0.1.145:88/videoSub\", response=\"4f062baec1c813ae3db15e3a14111d3d\"\n//\n//RTSP/1.0 200 OK\n//CSeq: 3\n//Date: Fri, Jan 03 2020 22:03:07 GMT\n//Content-Base: rtsp://10.0.1.145:65534/videoSub/\n//Content-Type: application/sdp\n//Content-Length: 495\n//\n//v=0\n//o=- 1578088972261172 1 IN IP4 10.0.1.145\n//s=IP Camera Video\n//i=videoSub\n//t=0 0\n//a=tool:LIVE555 Streaming Media v2014.02.10\n//a=type:broadcast\n//a=control:*\n//a=range:npt=0-\n//a=x-qt-text-nam:IP Camera Video\n//a=x-qt-text-inf:videoSub\n//m=video 0 RTP/AVP 96\n//c=IN IP4 0.0.0.0\n//b=AS:96\n//a=rtpmap:96 H264/90000\n//a=fmtp:96 packetization-mode=1;profile-level-id=420020;sprop-parameter-sets=Z0IAIJWoFAHmQA==,aM48gA==\n//a=control:track1\n//m=audio 0 RTP/AVP 0\n//c=IN IP4 0.0.0.0\n//b=AS:64\n//a=control:track2\n//SETUP rtsp://10.0.1.145:65534/videoSub/track1 RTSP/1.0\n//Transport: RTP/AVP/UDP;unicast;client_port=27452-27453\n//CSeq: 4\n//User-Agent: Lavf58.29.100\n//Authorization: Digest username=\"admin\", realm=\"Foscam IPCam Living Video\", nonce=\"3c889dbf8371d3660aa2496789a5d130\", uri=\"rtsp://10.0.1.145:65534/videoSub/track1\", response=\"1fbc50b24d582c9331dd5e89f3102a06\"\n//\n//RTSP/1.0 200 OK\n//CSeq: 4\n//Date: Fri, Jan 03 2020 22:03:07 GMT\n//Transport: RTP/AVP;unicast;destination=10.0.1.53;source=10.0.1.145;client_port=27452-27453;server_port=6972-6973\n//Session: 1F91B1B6;timeout=65\n\n//SETUP rtsp://10.0.1.145:65534/videoSub/track2 RTSP/1.0\n//Transport: RTP/AVP/UDP;unicast;client_port=27454-27455\n//CSeq: 5\n//User-Agent: Lavf58.29.100\n//Session: 1F91B1B6\n//Authorization: Digest username=\"admin\", realm=\"Foscam IPCam Living Video\", nonce=\"3c889dbf8371d3660aa2496789a5d130\", uri=\"rtsp://10.0.1.145:65534/videoSub/track2\", response=\"ad779abe070c096eff1012e7c70c986a\"\n//\n//RTSP/1.0 200 OK\n//CSeq: 5\n//Date: Fri, Jan 03 2020 22:03:07 GMT\n//Transport: RTP/AVP;unicast;destination=10.0.1.53;source=10.0.1.145;client_port=27454-27455;server_port=6974-6975\n//Session: 1F91B1B6;timeout=65\n\n//PLAY rtsp://10.0.1.145:65534/videoSub/ RTSP/1.0\n//Range: npt=0.000-\n//CSeq: 6\n//User-Agent: Lavf58.29.100\n//Session: 1F91B1B6\n//Authorization: Digest username=\"admin\", realm=\"Foscam IPCam Living Video\", nonce=\"3c889dbf8371d3660aa2496789a5d130\", uri=\"rtsp://10.0.1.145:65534/videoSub/\", response=\"bb52eb6938dd4e50c4fac50363ffded0\"\n//\n//RTSP/1.0 200 OK\n//CSeq: 6\n//Date: Fri, Jan 03 2020 22:03:07 GMT\n//Range: npt=0.000-\n//Session: 1F91B1B6\n//RTP-Info: url=rtsp://10.0.1.145:65534/videoSub/track1;seq=42731;rtptime=2690581590,url=rtsp://10.0.1.145:65534/videoSub/track2;seq=34051;rtptime=3328043318\n\n// https://www.ietf.org/rfc/rfc2326.txt\npublic class RtspClient {\n\n    private static final String TAG = RtspClient.class.getSimpleName();\n            static final String TAG_DEBUG = TAG + \" DBG\";\n    private static final boolean DEBUG = false;\n    private static final byte[] EMPTY_ARRAY = new byte[0];\n\n    public final static int RTSP_CAPABILITY_NONE          = 0;\n    public final static int RTSP_CAPABILITY_OPTIONS       = 1 << 1;\n    public final static int RTSP_CAPABILITY_DESCRIBE      = 1 << 2;\n    public final static int RTSP_CAPABILITY_ANNOUNCE      = 1 << 3;\n    public final static int RTSP_CAPABILITY_SETUP         = 1 << 4;\n    public final static int RTSP_CAPABILITY_PLAY          = 1 << 5;\n    public final static int RTSP_CAPABILITY_RECORD        = 1 << 6;\n    public final static int RTSP_CAPABILITY_PAUSE         = 1 << 7;\n    public final static int RTSP_CAPABILITY_TEARDOWN      = 1 << 8;\n    public final static int RTSP_CAPABILITY_SET_PARAMETER = 1 << 9;\n    public final static int RTSP_CAPABILITY_GET_PARAMETER = 1 << 10;\n    public final static int RTSP_CAPABILITY_REDIRECT      = 1 << 11;\n\n    public static boolean hasCapability(int capability, int capabilitiesMask) {\n        return (capabilitiesMask & capability) != 0;\n    }\n\n    public interface RtspClientListener {\n        void onRtspConnecting();\n        void onRtspConnected(@NonNull SdpInfo sdpInfo);\n        void onRtspVideoNalUnitReceived(@NonNull byte[] data, int offset, int length, long timestamp);\n        void onRtspAudioSampleReceived(@NonNull byte[] data, int offset, int length, long timestamp);\n        void onRtspApplicationDataReceived(@NonNull byte[] data, int offset, int length, long timestamp);\n        void onRtspDisconnecting();\n        void onRtspDisconnected();\n        void onRtspFailedUnauthorized();\n        void onRtspFailed(@Nullable String message);\n    }\n\n    private interface RtspClientKeepAliveListener {\n        void onRtspKeepAliveRequested();\n    }\n\n    public static class SdpInfo {\n        /**\n         * Session name (RFC 2327). In most cases RTSP server name.\n         */\n        public @Nullable String sessionName;\n\n        /**\n         * Session description (RFC 2327).\n         */\n        public @Nullable String sessionDescription;\n\n        public @Nullable VideoTrack videoTrack;\n        public @Nullable AudioTrack audioTrack;\n        public @Nullable ApplicationTrack applicationTrack;\n    }\n\n    public abstract static class Track {\n        public String request;\n        public int payloadType;\n\n        @NonNull\n        @Override\n        public String toString() {\n            return \"Track{request='\" + request + \"', payloadType=\" + payloadType + '}';\n        }\n    }\n\n    public static final int VIDEO_CODEC_H264 = 0;\n    public static final int VIDEO_CODEC_H265 = 1;\n\n    public static class VideoTrack extends Track {\n        public int videoCodec = VIDEO_CODEC_H264;\n        public @Nullable byte[] sps; // Both H.264 and H.265\n        public @Nullable byte[] pps; // Both H.264 and H.265\n        public @Nullable byte[] vps; // H.265 only\n    }\n\n    public static final int AUDIO_CODEC_UNKNOWN = -1;\n    public static final int AUDIO_CODEC_AAC = 0;\n    public static final int AUDIO_CODEC_OPUS = 1;\n    public static final int AUDIO_CODEC_G711_ULAW = 2;\n    public static final int AUDIO_CODEC_G711_ALAW = 3;\n\n    @NonNull\n    private static String getAudioCodecName(int codec) {\n        return switch (codec) {\n            case AUDIO_CODEC_AAC -> \"AAC\";\n            case AUDIO_CODEC_OPUS -> \"Opus\";\n            case AUDIO_CODEC_G711_ULAW -> \"G.711 uLaw\";\n            case AUDIO_CODEC_G711_ALAW -> \"G.711 aLaw\";\n            default -> \"Unknown\";\n        };\n    }\n\n    public static class AudioTrack extends Track {\n        public int audioCodec = AUDIO_CODEC_UNKNOWN;\n        public int sampleRateHz; // 16000, 8000\n        public int channels; // 1 - mono, 2 - stereo\n        public String mode; // AAC-lbr, AAC-hbr\n        public @Nullable byte[] config; // config=1210fff15081ffdffc\n    }\n\n    public static class ApplicationTrack extends Track {\n    }\n\n    private static final String CRLF = \"\\r\\n\";\n\n    // Size of buffer for reading from the connection\n    private final static int MAX_LINE_SIZE = 4098;\n\n    private static class UnauthorizedException extends IOException {\n        UnauthorizedException() {\n            super(\"Unauthorized\");\n        }\n    }\n\n    private final static class NoResponseHeadersException extends IOException {\n        @Serial\n        private static final long serialVersionUID = 1L;\n    }\n\n    private final @NonNull Socket rtspSocket;\n    private @NonNull String uriRtsp;\n    private final @NonNull AtomicBoolean exitFlag;\n    private final @NonNull RtspClientListener listener;\n\n//  private boolean sendOptionsCommand;\n    private final boolean requestVideo;\n    private final boolean requestAudio;\n    private final boolean requestApplication;\n    private final boolean debug;\n    private final @Nullable String username;\n    private final @Nullable String password;\n    private final @Nullable String userAgent;\n\n    private RtspClient(@NonNull RtspClient.Builder builder) {\n        rtspSocket = builder.rtspSocket;\n        uriRtsp = builder.uriRtsp;\n        exitFlag = builder.exitFlag;\n        listener = builder.listener;\n//      sendOptionsCommand = builder.sendOptionsCommand;\n        requestVideo = builder.requestVideo;\n        requestAudio = builder.requestAudio;\n        requestApplication = builder.requestApplication;\n        username = builder.username;\n        password = builder.password;\n        debug = builder.debug;\n        userAgent = builder.userAgent;\n    }\n\n    public void execute() {\n        if (DEBUG) Log.v(TAG, \"execute()\");\n        listener.onRtspConnecting();\n        try {\n            final InputStream inputStream = rtspSocket.getInputStream();\n            final OutputStream outputStream = debug ?\n                    new LoggerOutputStream(rtspSocket.getOutputStream()) :\n                    new BufferedOutputStream(rtspSocket.getOutputStream());\n\n            SdpInfo sdpInfo = new SdpInfo();\n            final AtomicInteger cSeq = new AtomicInteger(0);\n            ArrayList<Pair<String, String>> headers;\n            int status;\n\n            String authToken = null;\n            Pair<String, String> digestRealmNonce = null;\n\n// OPTIONS rtsp://10.0.1.78:8080/video/h264 RTSP/1.0\n// CSeq: 1\n// User-Agent: Lavf58.29.100\n\n// RTSP/1.0 200 OK\n// CSeq: 1\n// Public: OPTIONS, DESCRIBE, SETUP, PLAY, GET_PARAMETER, SET_PARAMETER, TEARDOWN\n//          if (sendOptionsCommand) {\n            checkExitFlag(exitFlag);\n            sendOptionsCommand(outputStream, uriRtsp, cSeq.addAndGet(1), userAgent, null);\n            status = readResponseStatusCode(inputStream);\n            headers = readResponseHeaders(inputStream);\n            dumpHeaders(headers);\n            // Try once again with credentials\n            if (status == 401) {\n                digestRealmNonce = getHeaderWwwAuthenticateDigestRealmAndNonce(headers);\n                if (digestRealmNonce == null) {\n                    String basicRealm = getHeaderWwwAuthenticateBasicRealm(headers);\n                    if (TextUtils.isEmpty(basicRealm)) {\n                        throw new IOException(\"Unknown authentication type\");\n                    }\n                    // Basic auth\n                    authToken = getBasicAuthHeader(username, password);\n                } else {\n                    // Digest auth\n                    authToken = getDigestAuthHeader(username, password, \"OPTIONS\", uriRtsp, digestRealmNonce.first, digestRealmNonce.second);\n                }\n                checkExitFlag(exitFlag);\n                sendOptionsCommand(outputStream, uriRtsp, cSeq.addAndGet(1), userAgent, authToken);\n                status = readResponseStatusCode(inputStream);\n                headers = readResponseHeaders(inputStream);\n                dumpHeaders(headers);\n            }\n            if (DEBUG)\n                Log.i(TAG, \"OPTIONS status: \" + status);\n            checkStatusCode(status);\n            final int capabilities = getSupportedCapabilities(headers);\n\n\n// DESCRIBE rtsp://10.0.1.78:8080/video/h264 RTSP/1.0\n// Accept: application/sdp\n// CSeq: 2\n// User-Agent: Lavf58.29.100\n\n// RTSP/1.0 200 OK\n// CSeq: 2\n// Content-Type: application/sdp\n// Content-Length: 364\n//\n// v=0\n// t=0 0\n// a=range:npt=now-\n// m=video 0 RTP/AVP 96\n// a=rtpmap:96 H264/90000\n// a=fmtp:96 packetization-mode=1;sprop-parameter-sets=Z0KAH9oBABhpSCgwMDaFCag=,aM4G4g==\n// a=control:trackID=1\n// m=audio 0 RTP/AVP 96\n// a=rtpmap:96 mpeg4-generic/48000/1\n// a=fmtp:96 profile-level-id=1;mode=AAC-hbr;sizelength=13;indexlength=3;indexdeltalength=3;config=1188\n// a=control:trackID=2\n            checkExitFlag(exitFlag);\n\n            if (digestRealmNonce != null) {\n                authToken = getDigestAuthHeader(username, password, \"DESCRIBE\", uriRtsp, digestRealmNonce.first, digestRealmNonce.second);\n            }\n            sendDescribeCommand(outputStream, uriRtsp, cSeq.addAndGet(1), userAgent, authToken);\n            status = readResponseStatusCode(inputStream);\n            headers = readResponseHeaders(inputStream);\n            dumpHeaders(headers);\n            // Try once again with credentials. OPTIONS command can be accepted without authentication.\n            if (status == 401) {\n                digestRealmNonce = getHeaderWwwAuthenticateDigestRealmAndNonce(headers);\n                if (digestRealmNonce == null) {\n                    String basicRealm = getHeaderWwwAuthenticateBasicRealm(headers);\n                    if (TextUtils.isEmpty(basicRealm)) {\n                        throw new IOException(\"Unknown authentication type\");\n                    }\n                    // Basic auth\n                    authToken = getBasicAuthHeader(username, password);\n                } else {\n                    // Digest auth\n                    authToken = getDigestAuthHeader(username, password, \"DESCRIBE\", uriRtsp, digestRealmNonce.first, digestRealmNonce.second);\n                }\n                checkExitFlag(exitFlag);\n                sendDescribeCommand(outputStream, uriRtsp, cSeq.addAndGet(1), userAgent, authToken);\n                status = readResponseStatusCode(inputStream);\n                headers = readResponseHeaders(inputStream);\n                dumpHeaders(headers);\n            }\n            if (DEBUG)\n                Log.i(TAG, \"DESCRIBE status: \" + status);\n            checkStatusCode(status);\n\n            String contentBaseUri = getHeaderContentBase(headers);\n            if (contentBaseUri != null) {\n                if (debug)\n                    Log.i(TAG_DEBUG, \"RTSP URI changed to '\" + uriRtsp + \"'\");\n                uriRtsp = contentBaseUri;\n            }\n\n            int contentLength = getHeaderContentLength(headers);\n            if (contentLength > 0) {\n                String content = readContentAsText(inputStream, contentLength);\n                if (debug)\n                    Log.i(TAG_DEBUG, \"\" + content);\n                try {\n                    List<Pair<String, String>> params = getDescribeParams(content);\n                    sdpInfo = getSdpInfoFromDescribeParams(params);\n                    if (!requestVideo)\n                        sdpInfo.videoTrack = null;\n                    if (!requestAudio)\n                        sdpInfo.audioTrack = null;\n                    if (!requestApplication)\n                        sdpInfo.applicationTrack = null;\n                    // Only AAC supported\n                    if (sdpInfo.audioTrack != null && sdpInfo.audioTrack.audioCodec == AUDIO_CODEC_UNKNOWN) {\n                        Log.e(TAG_DEBUG, \"Unknown RTSP audio codec (\" + sdpInfo.audioTrack.audioCodec + \") specified in SDP\");\n                        sdpInfo.audioTrack = null;\n                    }\n                } catch (Exception e) {\n                    e.printStackTrace();\n                }\n            }\n\n\n// SETUP rtsp://10.0.1.78:8080/video/h264/trackID=1 RTSP/1.0\n// Transport: RTP/AVP/TCP;unicast;interleaved=0-1\n// CSeq: 3\n// User-Agent: Lavf58.29.100\n\n// RTSP/1.0 200 OK\n// CSeq: 3\n// Transport: RTP/AVP/TCP;unicast;interleaved=0-1\n// Session: Mzk5MzY2MzUwMTg3NTc2Mzc5NQ;timeout=30\n            String session = null;\n            int sessionTimeout = 0;\n            for (int i = 0; i < 3; i++) {\n                // 0 - video track, 1 - audio track, 2 - application track\n                checkExitFlag(exitFlag);\n                Track track;\n                switch (i) {\n                    case 0 -> track = requestVideo ? sdpInfo.videoTrack : null;\n                    case 1 -> track = requestAudio ? sdpInfo.audioTrack : null;\n                    default -> track = requestApplication ? sdpInfo.applicationTrack : null;\n                }\n                if (track != null) {\n                    String uriRtspSetup = getUriForSetup(uriRtsp, track);\n                    if (uriRtspSetup == null) {\n                        Log.e(TAG, \"Failed to get RTSP URI for SETUP\");\n                        continue;\n                    }\n                    if (digestRealmNonce != null)\n                        authToken = getDigestAuthHeader(\n                                username,\n                                password,\n                                \"SETUP\",\n                                uriRtspSetup,\n                                digestRealmNonce.first,\n                                digestRealmNonce.second);\n                    sendSetupCommand(\n                            outputStream,\n                            uriRtspSetup,\n                            cSeq.addAndGet(1),\n                            userAgent,\n                            authToken,\n                            session,\n                            (i == 0 ? \"0-1\" /*video*/ : \"2-3\" /*audio*/));\n                    status = readResponseStatusCode(inputStream);\n                    if (DEBUG)\n                        Log.i(TAG, \"SETUP status: \" + status);\n                    checkStatusCode(status);\n                    headers = readResponseHeaders(inputStream);\n                    dumpHeaders(headers);\n                    session = getHeader(headers, \"Session\");\n                    if (!TextUtils.isEmpty(session)) {\n                        // ODgyODg3MjQ1MDczODk3NDk4Nw;timeout=30\n                        String[] params = TextUtils.split(session, \";\");\n                        session = params[0];\n                        // Getting session timeout\n                        if (params.length > 1) {\n                            params = TextUtils.split(params[1], \"=\");\n                            if (params.length > 1) {\n                                try {\n                                    sessionTimeout = Integer.parseInt(params[1]);\n                                } catch (NumberFormatException e) {\n                                    Log.e(TAG, \"Failed to parse RTSP session timeout\");\n                                }\n                            }\n                        }\n                    }\n                    if (DEBUG)\n                        Log.d(TAG, \"SETUP session: \" + session + \", timeout: \" + sessionTimeout);\n                    if (TextUtils.isEmpty(session))\n                        throw new IOException(\"Failed to get RTSP session\");\n                }\n            }\n\n            if (TextUtils.isEmpty(session))\n                throw new IOException(\"Failed to get any media track\");\n\n// PLAY rtsp://10.0.1.78:8080/video/h264 RTSP/1.0\n// Range: npt=0.000-\n// CSeq: 5\n// User-Agent: Lavf58.29.100\n// Session: Mzk5MzY2MzUwMTg3NTc2Mzc5NQ\n\n// RTSP/1.0 200 OK\n// CSeq: 5\n// RTP-Info: url=/video/h264;seq=56\n// Session: Mzk5MzY2MzUwMTg3NTc2Mzc5NQ;timeout=30\n            checkExitFlag(exitFlag);\n            if (digestRealmNonce != null)\n                authToken = getDigestAuthHeader(username, password, \"PLAY\", uriRtsp /*?*/, digestRealmNonce.first, digestRealmNonce.second);\n            sendPlayCommand(outputStream, uriRtsp, cSeq.addAndGet(1), userAgent, authToken, session);\n            status = readResponseStatusCode(inputStream);\n            if (DEBUG)\n                Log.i(TAG, \"PLAY status: \" + status);\n            checkStatusCode(status);\n            headers = readResponseHeaders(inputStream);\n            dumpHeaders(headers);\n\n            listener.onRtspConnected(sdpInfo);\n\n            if (sdpInfo.videoTrack != null ||  sdpInfo.audioTrack != null || sdpInfo.applicationTrack != null) {\n                if (digestRealmNonce != null)\n                    authToken = getDigestAuthHeader(username, password, hasCapability(RTSP_CAPABILITY_GET_PARAMETER, capabilities) ? \"GET_PARAMETER\" : \"OPTIONS\", uriRtsp, digestRealmNonce.first, digestRealmNonce.second);\n                final String authTokenFinal = authToken;\n                final String sessionFinal = session;\n                RtspClientKeepAliveListener keepAliveListener = () -> {\n                    try {\n                        //GET_PARAMETER rtsp://10.0.1.155:554/cam/realmonitor?channel=1&subtype=1/ RTSP/1.0\n                        //CSeq: 6\n                        //User-Agent: Lavf58.45.100\n                        //Session: 4066342621205\n                        //Authorization: Digest username=\"admin\", realm=\"Login to cam\", nonce=\"8fb58500489d60f99a40b43f3c8574ef\", uri=\"rtsp://10.0.1.155:554/cam/realmonitor?channel=1&subtype=1/\", response=\"692a26124a1ee9562135785ace33a23b\"\n\n                        //RTSP/1.0 200 OK\n                        //CSeq: 6\n                        //Session: 4066342621205\n                        if (debug)\n                            Log.d(TAG_DEBUG, \"Sending keep-alive\");\n                        if (hasCapability(RTSP_CAPABILITY_GET_PARAMETER, capabilities))\n                            sendGetParameterCommand(outputStream, uriRtsp, cSeq.addAndGet(1), userAgent, sessionFinal, authTokenFinal);\n                        else\n                            sendOptionsCommand(outputStream, uriRtsp, cSeq.addAndGet(1), userAgent, authTokenFinal);\n\n                        // Do not read response right now, since it may contain unread RTP frames.\n                        // RtpHeader.searchForNextRtpHeader will handle that.\n                    } catch (IOException e) {\n                        e.printStackTrace();\n                    }\n                };\n\n                // Blocking call unless exitFlag set to true, thread.interrupt() called or connection closed.\n                try {\n                    readRtpData(\n                            inputStream,\n                            sdpInfo,\n                            exitFlag,\n                            listener,\n                            sessionTimeout / 2 * 1000,\n                            keepAliveListener);\n                } finally {\n                    // Cleanup resources on server side\n                    if (hasCapability(RTSP_CAPABILITY_TEARDOWN, capabilities)) {\n                        if (digestRealmNonce != null)\n                            authToken = getDigestAuthHeader(username, password, \"TEARDOWN\", uriRtsp, digestRealmNonce.first, digestRealmNonce.second);\n                        sendTeardownCommand(outputStream, uriRtsp, cSeq.addAndGet(1), userAgent, authToken, sessionFinal);\n                    }\n                }\n\n            } else {\n                listener.onRtspFailed(\"No tracks found. RTSP server issue.\");\n            }\n\n            listener.onRtspDisconnecting();\n            listener.onRtspDisconnected();\n        } catch (UnauthorizedException e) {\n            e.printStackTrace();\n            listener.onRtspFailedUnauthorized();\n        } catch (InterruptedException e) {\n            // Thread interrupted. Expected behavior.\n            listener.onRtspDisconnecting();\n            listener.onRtspDisconnected();\n        } catch (Exception e) {\n            e.printStackTrace();\n            listener.onRtspFailed(e.getMessage());\n        }\n        try {\n            rtspSocket.close();\n        } catch (IOException e) {\n            e.printStackTrace();\n        }\n    }\n\n    @Nullable\n    private static String getUriForSetup(@NonNull String uriRtsp, @Nullable Track track) {\n        if (DEBUG) Log.v(TAG, \"getUriForSetup(uriRtsp='\" + uriRtsp + \"', track=\" + track + \")\");\n        if (track == null)\n            return null;\n        if (track.request == null) {\n            // a=control:trackID=1 is missed\n            Log.w(TAG, \"Track request is empty. Skipping it.\");\n            track.request = uriRtsp;\n        }\n        String uriRtspSetup = uriRtsp;\n        // Absolute URL\n        if (track.request.startsWith(\"rtsp://\") || track.request.startsWith(\"rtsps://\")) {\n            uriRtspSetup = track.request;\n        // Relative URL\n        } else {\n            if (!track.request.startsWith(\"/\") && !uriRtspSetup.endsWith(\"/\")) {\n                track.request = \"/\" + track.request;\n            }\n            uriRtspSetup += track.request;\n        }\n        return uriRtspSetup.trim();\n    }\n\n    private static void checkExitFlag(@NonNull AtomicBoolean exitFlag) throws InterruptedException {\n        if (exitFlag.get())\n            throw new InterruptedException();\n    }\n\n    private static void checkStatusCode(int code) throws IOException {\n        switch (code) {\n            case 200:\n                break;\n            case 401:\n                throw new UnauthorizedException();\n            default:\n                throw new IOException(\"Invalid status code \" + code);\n        }\n    }\n\n    private static void readRtpData(\n            @NonNull InputStream inputStream,\n            @NonNull SdpInfo sdpInfo,\n            @NonNull AtomicBoolean exitFlag,\n            @NonNull RtspClientListener listener,\n            int keepAliveTimeout,\n            @NonNull RtspClientKeepAliveListener keepAliveListener)\n    throws IOException {\n        byte[] data = EMPTY_ARRAY; // Usually not bigger than MTU = 15KB\n\n        final RtpParser videoParser = (sdpInfo.videoTrack != null && sdpInfo.videoTrack.videoCodec == VIDEO_CODEC_H265 ?\n                new RtpH265Parser() :\n                new RtpH264Parser());\n        final AudioParser audioParser = sdpInfo.audioTrack != null\n                ? switch (sdpInfo.audioTrack.audioCodec) {\n                    case AUDIO_CODEC_AAC -> new AacParser(sdpInfo.audioTrack.mode);\n                    case AUDIO_CODEC_G711_ULAW,\n                         AUDIO_CODEC_G711_ALAW -> new G711Parser();\n                    default -> null;\n                }\n                : null;\n\n        byte[] nalUnitSps = (sdpInfo.videoTrack != null ? sdpInfo.videoTrack.sps : null);\n        byte[] nalUnitPps = (sdpInfo.videoTrack != null ? sdpInfo.videoTrack.pps : null);\n        byte[] nalUnitSei = EMPTY_ARRAY;\n        byte[] nalUnitAud = EMPTY_ARRAY;\n        int videoSeqNum = 0;\n\n        long keepAliveSent = System.currentTimeMillis();\n\n        while (!exitFlag.get()) {\n            RtpHeaderParser.RtpHeader header = RtpHeaderParser.readHeader(inputStream);\n            if (header == null) {\n                continue;\n//                throw new IOException(\"No RTP frames found\");\n            }\n//          header.dumpHeader();\n            if (header.payloadSize > data.length)\n                data = new byte[header.payloadSize];\n\n            NetUtils.readData(inputStream, data, 0, header.payloadSize);\n\n            // Check if keep-alive should be sent\n            long l = System.currentTimeMillis();\n            if (keepAliveTimeout > 0 && l - keepAliveSent > keepAliveTimeout) {\n                keepAliveSent = l;\n                keepAliveListener.onRtspKeepAliveRequested();\n            }\n\n            // Video\n            if (sdpInfo.videoTrack != null && header.payloadType == sdpInfo.videoTrack.payloadType) {\n                if (videoSeqNum > header.sequenceNumber)\n                    Log.w(TAG, \"Invalid video seq num \" + videoSeqNum + \"/\" + header.sequenceNumber);\n                videoSeqNum = header.sequenceNumber;\n\n                byte[] nalUnit;\n                // If extendion bit set in header, skip extension data\n                if (header.extension == 1) {\n                    int skipBytes = ((data[2] & 0xFF) << 8 | (data[3] & 0xFF)) * 4 + 4;\n                    nalUnit = videoParser.processRtpPacketAndGetNalUnit(Arrays.copyOfRange(data, skipBytes, data.length),\n                            header.payloadSize - skipBytes, header.marker == 1);\n                } else {\n                    nalUnit = videoParser.processRtpPacketAndGetNalUnit(data, header.payloadSize, header.marker == 1);\n                }\n\n                if (nalUnit != null) {\n                    boolean isH265 = sdpInfo.videoTrack.videoCodec == VIDEO_CODEC_H265;\n                    byte type = VideoCodecUtils.INSTANCE.getNalUnitType(nalUnit, 0, nalUnit.length, isH265);\n//                  Log.i(TAG, \"NAL u: \" + VideoCodecUtils.INSTANCE.getH265NalUnitTypeString(type));\n                    switch (type) {\n                        case VideoCodecUtils.NAL_SPS:\n                            nalUnitSps = nalUnit;\n                            // Looks like there is NAL_IDR_SLICE as well. Send it now.\n                            if (nalUnit.length > VideoCodecUtils.MAX_NAL_SPS_SIZE)\n                                listener.onRtspVideoNalUnitReceived(nalUnit, 0, nalUnit.length, header.getTimestampMsec());\n                            break;\n\n                        case VideoCodecUtils.NAL_PPS:\n                            nalUnitPps = nalUnit;\n                            // Looks like there is NAL_IDR_SLICE as well. Send it now.\n                            if (nalUnit.length > VideoCodecUtils.MAX_NAL_SPS_SIZE)\n                                listener.onRtspVideoNalUnitReceived(nalUnit, 0, nalUnit.length, header.getTimestampMsec());\n                            break;\n\n                        case VideoCodecUtils.NAL_AUD:\n                            nalUnitAud = nalUnit;\n                            break;\n\n                        case VideoCodecUtils.NAL_SEI:\n                            nalUnitSei = nalUnit;\n                            break;\n\n                        case VideoCodecUtils.NAL_IDR_SLICE:\n                            // Combine IDR with SPS/PPS\n                            if (nalUnitSps != null && nalUnitPps != null) {\n                                byte[] nalUnitSpsPpsIdr = new byte[nalUnitAud.length + nalUnitSps.length + nalUnitPps.length + nalUnitSei.length + nalUnit.length];\n                                int offset = 0;\n                                System.arraycopy(nalUnitSps, 0, nalUnitSpsPpsIdr, offset, nalUnitSps.length);\n                                offset += nalUnitSps.length;\n                                System.arraycopy(nalUnitPps, 0, nalUnitSpsPpsIdr, offset, nalUnitPps.length);\n                                offset += nalUnitPps.length;\n                                System.arraycopy(nalUnitAud, 0, nalUnitSpsPpsIdr, offset, nalUnitAud.length);\n                                offset += nalUnitAud.length;\n                                System.arraycopy(nalUnitSei, 0, nalUnitSpsPpsIdr, offset, nalUnitSei.length);\n                                offset += nalUnitSei.length;\n                                System.arraycopy(nalUnit, 0, nalUnitSpsPpsIdr, offset, nalUnit.length);\n                                listener.onRtspVideoNalUnitReceived(nalUnitSpsPpsIdr, 0, nalUnitSpsPpsIdr.length, header.getTimestampMsec());\n//                              listener.onRtspVideoNalUnitReceived(nalUnitSppPpsIdr, 0, nalUnitSppPpsIdr.length, System.currentTimeMillis());\n                                // Send it only once\n                                nalUnitSps = null;\n                                nalUnitPps = null;\n                                nalUnitSei = EMPTY_ARRAY;\n                                nalUnitAud = EMPTY_ARRAY;\n                                break;\n                            }\n\n                        default:\n                            if (nalUnitSei.length == 0 && nalUnitAud.length == 0) {\n                                listener.onRtspVideoNalUnitReceived(nalUnit, 0, nalUnit.length, header.getTimestampMsec());\n                            } else {\n                                byte[] nalUnitAudSeiSlice = new byte[nalUnitAud.length + nalUnitSei.length + nalUnit.length];\n                                int offset = 0;\n                                System.arraycopy(nalUnitAud, 0, nalUnitAudSeiSlice, offset, nalUnitAud.length);\n                                offset += nalUnitAud.length;\n                                System.arraycopy(nalUnitSei, 0, nalUnitAudSeiSlice, offset, nalUnitSei.length);\n                                offset += nalUnitSei.length;\n                                System.arraycopy(nalUnit, 0, nalUnitAudSeiSlice, offset, nalUnit.length);\n                                listener.onRtspVideoNalUnitReceived(nalUnitAudSeiSlice, 0, nalUnitAudSeiSlice.length, header.getTimestampMsec());\n                                nalUnitSei = EMPTY_ARRAY;\n                                nalUnitAud = EMPTY_ARRAY;\n                            }\n                    }\n                }\n\n            // Audio\n            } else if (sdpInfo.audioTrack != null && header.payloadType == sdpInfo.audioTrack.payloadType) {\n                if (audioParser != null) {\n                    byte[] sample = audioParser.processRtpPacketAndGetSample(data, header.payloadSize);\n                    if (sample != null)\n                        listener.onRtspAudioSampleReceived(sample, 0, sample.length, header.getTimestampMsec());\n                }\n\n            // Application\n            } else if (sdpInfo.applicationTrack != null && header.payloadType == sdpInfo.applicationTrack.payloadType) {\n                listener.onRtspApplicationDataReceived(data, 0, header.payloadSize, header.getTimestampMsec());\n\n            // Unknown\n            } else {\n                // https://www.iana.org/assignments/rtp-parameters/rtp-parameters.xhtml\n                if (DEBUG && header.payloadType >= 96 && header.payloadType <= 127)\n                    Log.w(TAG, \"Invalid RTP payload type \" + header.payloadType);\n            }\n        }\n    }\n\n    private static void sendSimpleCommand(\n            @NonNull String command,\n            @NonNull OutputStream outputStream,\n            @NonNull String request,\n            int cSeq,\n            @Nullable String userAgent,\n            @Nullable String session,\n            @Nullable String authToken)\n    throws IOException {\n        outputStream.write((command + \" \" + request + \" RTSP/1.0\" + CRLF).getBytes());\n        if (authToken != null)\n            outputStream.write((\"Authorization: \" + authToken + CRLF).getBytes());\n        outputStream.write((\"CSeq: \" + cSeq + CRLF).getBytes());\n        if (userAgent != null)\n            outputStream.write((\"User-Agent: \" + userAgent + CRLF).getBytes());\n        if (session != null)\n            outputStream.write((\"Session: \" + session + CRLF).getBytes());\n        outputStream.write(CRLF.getBytes());\n        outputStream.flush();\n    }\n\n    private static void sendOptionsCommand(\n            @NonNull OutputStream outputStream,\n            @NonNull String request,\n            int cSeq,\n            @Nullable String userAgent,\n            @Nullable String authToken)\n    throws IOException {\n        if (DEBUG) Log.v(TAG, \"sendOptionsCommand(request=\\\"\" + request + \"\\\", cSeq=\" + cSeq + \")\");\n        sendSimpleCommand(\"OPTIONS\", outputStream, request, cSeq, userAgent, null, authToken);\n    }\n\n    private static void sendGetParameterCommand(\n            @NonNull OutputStream outputStream,\n            @NonNull String request,\n            int cSeq,\n            @Nullable String userAgent,\n            @Nullable String session,\n            @Nullable String authToken)\n    throws IOException {\n        if (DEBUG) Log.v(TAG, \"sendGetParameterCommand(request=\\\"\" + request + \"\\\", cSeq=\" + cSeq + \")\");\n        sendSimpleCommand(\"GET_PARAMETER\", outputStream, request, cSeq, userAgent, session, authToken);\n    }\n\n    private static void sendDescribeCommand(\n            @NonNull OutputStream outputStream,\n            @NonNull String request,\n            int cSeq,\n            @Nullable String userAgent,\n            @Nullable String authToken)\n    throws IOException {\n        if (DEBUG) Log.v(TAG, \"sendDescribeCommand(request=\\\"\" + request + \"\\\", cSeq=\" + cSeq + \")\");\n        outputStream.write((\"DESCRIBE \" + request + \" RTSP/1.0\" + CRLF).getBytes());\n        outputStream.write((\"Accept: application/sdp\" + CRLF).getBytes());\n        if (authToken != null)\n            outputStream.write((\"Authorization: \" + authToken + CRLF).getBytes());\n        outputStream.write((\"CSeq: \" + cSeq + CRLF).getBytes());\n        if (userAgent != null)\n            outputStream.write((\"User-Agent: \" + userAgent + CRLF).getBytes());\n        outputStream.write(CRLF.getBytes());\n        outputStream.flush();\n    }\n\n    private static void sendTeardownCommand(\n            @NonNull OutputStream outputStream,\n            @NonNull String request,\n            int cSeq,\n            @Nullable String userAgent,\n            @Nullable String authToken,\n            @Nullable String session)\n    throws IOException {\n        if (DEBUG) Log.v(TAG, \"sendTeardownCommand(request=\\\"\" + request + \"\\\", cSeq=\" + cSeq + \")\");\n        outputStream.write((\"TEARDOWN \" + request + \" RTSP/1.0\" + CRLF).getBytes());\n        if (authToken != null)\n            outputStream.write((\"Authorization: \" + authToken + CRLF).getBytes());\n        outputStream.write((\"CSeq: \" + cSeq + CRLF).getBytes());\n        if (userAgent != null)\n            outputStream.write((\"User-Agent: \" + userAgent + CRLF).getBytes());\n        if (session != null)\n            outputStream.write((\"Session: \" + session + CRLF).getBytes());\n        outputStream.write(CRLF.getBytes());\n        outputStream.flush();\n    }\n\n    private static void sendSetupCommand(\n            @NonNull OutputStream outputStream,\n            @NonNull String request,\n            int cSeq,\n            @Nullable String userAgent,\n            @Nullable String authToken,\n            @Nullable String session,\n            @NonNull String interleaved)\n    throws IOException {\n        if (DEBUG) Log.v(TAG, \"sendSetupCommand(request=\\\"\" + request + \"\\\", cSeq=\" + cSeq + \")\");\n        outputStream.write((\"SETUP \" + request + \" RTSP/1.0\" + CRLF).getBytes());\n        outputStream.write((\"Transport: RTP/AVP/TCP;unicast;interleaved=\" + interleaved + CRLF).getBytes());\n        if (authToken != null)\n            outputStream.write((\"Authorization: \" + authToken + CRLF).getBytes());\n        outputStream.write((\"CSeq: \" + cSeq + CRLF).getBytes());\n        if (userAgent != null)\n            outputStream.write((\"User-Agent: \" + userAgent + CRLF).getBytes());\n        if (session != null)\n            outputStream.write((\"Session: \" + session + CRLF).getBytes());\n        outputStream.write(CRLF.getBytes());\n        outputStream.flush();\n    }\n\n    private static void sendPlayCommand(\n            @NonNull OutputStream outputStream,\n            @NonNull String request,\n            int cSeq,\n            @Nullable String userAgent,\n            @Nullable String authToken,\n            @NonNull String session)\n    throws IOException {\n        if (DEBUG) Log.v(TAG, \"sendPlayCommand(request=\\\"\" + request + \"\\\", cSeq=\" + cSeq + \")\");\n        outputStream.write((\"PLAY \" + request + \" RTSP/1.0\" + CRLF).getBytes());\n        outputStream.write((\"Range: npt=0.000-\" + CRLF).getBytes());\n        if (authToken != null)\n            outputStream.write((\"Authorization: \" + authToken + CRLF).getBytes());\n        outputStream.write((\"CSeq: \" + cSeq + CRLF).getBytes());\n        if (userAgent != null)\n            outputStream.write((\"User-Agent: \" + userAgent + CRLF).getBytes());\n        outputStream.write((\"Session: \" + session + CRLF).getBytes());\n        outputStream.write(CRLF.getBytes());\n        outputStream.flush();\n    }\n\n    private int readResponseStatusCode(@NonNull InputStream inputStream) throws IOException {\n//        String line = readLine(inputStream);\n//        if (debug)\n//            Log.d(TAG_DEBUG, \"\" + line);\n        String line;\n        byte[] rtspHeader = \"RTSP/1.0 \".getBytes();\n        // Search fpr \"RTSP/1.0 \"\n        while (!exitFlag.get() && readUntilBytesFound(inputStream, rtspHeader) && (line = readLine(inputStream)) != null) {\n            if (debug)\n                Log.d(TAG_DEBUG, \"\" + line);\n//            int indexRtsp = line.indexOf(\"TSP/1.0 \"); // 8 characters, 'R' already found\n//            if (indexRtsp >= 0) {\n            int indexCode = line.indexOf(' ');\n            String code = line.substring(0, indexCode);\n            try {\n                int statusCode = Integer.parseInt(code);\n//                if (debug)\n//                    Log.d(TAG_DEBUG, \"Status code: \" + statusCode);\n                return statusCode;\n            } catch (NumberFormatException e) {\n                // Does not fulfill standard \"RTSP/1.1 200 OK\" token\n                // Continue search for\n            }\n//            }\n        }\n        if (debug)\n            Log.w(TAG_DEBUG, \"Could not obtain status code\");\n        return -1;\n    }\n\n    @NonNull\n    private ArrayList<Pair<String, String>> readResponseHeaders(@NonNull InputStream inputStream) throws IOException {\n        ArrayList<Pair<String, String>> headers = new ArrayList<>();\n        String line;\n        while (!exitFlag.get() && !TextUtils.isEmpty(line = readLine(inputStream))) {\n            if (debug)\n                Log.d(TAG_DEBUG, \"\" + line);\n            if (CRLF.equals(line)) {\n                return headers;\n            } else {\n                String[] pairs = line.split(\":\", 2);\n                if (pairs.length == 2) {\n                    headers.add(Pair.create(pairs[0].trim(), pairs[1].trim()));\n                }\n            }\n        }\n        return headers;\n    }\n\n    /**\n     * Get a list of tracks from SDP. Usually contains video and audio track only.\n     * @return array of 3 tracks. First is video track, second audio track, third application track.\n     */\n    @NonNull\n    private static Track[] getTracksFromDescribeParams(@NonNull List<Pair<String, String>> params) {\n        Track[] tracks = new Track[3];\n        Track currentTrack = null;\n        for (Pair<String, String> param: params) {\n            switch (param.first) {\n                case \"m\":\n                    // m=video 0 RTP/AVP 96\n                    if (param.second.startsWith(\"video\")) {\n                        currentTrack = new VideoTrack();\n                        tracks[0] = currentTrack;\n\n                    // m=audio 0 RTP/AVP 97\n                    // m=audio 0 RTP/AVP 0 8\n                    } else if (param.second.startsWith(\"audio\")) {\n                        currentTrack = new AudioTrack();\n                        tracks[1] = currentTrack;\n\n                    // m=application 0 RTP/AVP 99\n                    // a=rtpmap:99 com.my/90000\n                    } else if (param.second.startsWith(\"application\")) {\n                        currentTrack = new ApplicationTrack();\n                        tracks[2] = currentTrack;\n\n                    } else if (param.second.startsWith(\"text\")) {\n                        Log.w(TAG, \"Media track 'text' is not supported\");\n\n                    } else if (param.second.startsWith(\"message\")) {\n                        Log.w(TAG, \"Media track 'message' is not supported\");\n\n                    } else {\n                        currentTrack = null;\n                    }\n\n                    if (currentTrack != null) {\n                        // m=<media> <port>/<number of ports> <proto> <fmt> ...\n                        String[] values = TextUtils.split(param.second, \" \");\n                        try {\n                            currentTrack.payloadType = (values.length > 3 ? Integer.parseInt(values[3]) : -1);\n                            // Handle static PT that comes with no rtpmap\n                            if (currentTrack instanceof AudioTrack track) {\n                                switch (currentTrack.payloadType) {\n                                    case 0 -> { // uLaw\n                                        track.audioCodec = AUDIO_CODEC_G711_ULAW;\n                                        track.sampleRateHz = 8000;\n                                        track.channels = 1;\n                                    }\n                                    case 8 -> { // aLaw\n                                        track.audioCodec = AUDIO_CODEC_G711_ALAW;\n                                        track.sampleRateHz = 8000;\n                                        track.channels = 1;\n                                    }\n                                }\n                            }\n                        } catch (Exception e) {\n                            currentTrack.payloadType = -1;\n                        }\n                        if (currentTrack.payloadType == -1)\n                            Log.e(TAG, \"Failed to get payload type from \\\"m=\" + param.second + \"\\\"\");\n                    }\n                    break;\n\n                case \"a\":\n                    // a=control:trackID=1\n                    if (currentTrack != null) {\n                        if (param.second.startsWith(\"control:\")) {\n                            currentTrack.request = param.second.substring(8);\n\n                        // a=fmtp:96 packetization-mode=1; profile-level-id=4D4029; sprop-parameter-sets=Z01AKZpmBkCb8uAtQEBAQXpw,aO48gA==\n                        // a=fmtp:97 streamtype=5; profile-level-id=15; mode=AAC-hbr; config=1408; sizeLength=13; indexLength=3; indexDeltaLength=3; profile=1; bitrate=32000;\n                        // a=fmtp:97 streamtype=5;profile-level-id=1;mode=AAC-hbr;sizelength=13;indexlength=3;indexdeltalength=3;config=1408\n                        // a=fmtp:96 streamtype=5; profile-level-id=14; mode=AAC-lbr; config=1388; sizeLength=6; indexLength=2; indexDeltaLength=2; constantDuration=1024; maxDisplacement=5\n                        // a=fmtp:96 profile-level-id=1;mode=AAC-hbr;sizelength=13;indexlength=3;indexdeltalength=3;config=1210fff15081ffdffc\n                        // a=fmtp:96\n                        } else if (param.second.startsWith(\"fmtp:\")) {\n                            // Video\n                            if (currentTrack instanceof VideoTrack) {\n                                updateVideoTrackFromDescribeParam((VideoTrack)tracks[0], param);\n                            // Audio\n                            } else if (currentTrack instanceof AudioTrack) {\n                                updateAudioTrackFromDescribeParam((AudioTrack)tracks[1], param);\n                            }\n\n                        // a=rtpmap:96 H264/90000\n                        // a=rtpmap:97 mpeg4-generic/16000/1\n                        // a=rtpmap:97 MPEG4-GENERIC/16000\n                        // a=rtpmap:97 G726-32/8000\n                        // a=rtpmap:96 mpeg4-generic/44100/2\n                        } else if (param.second.startsWith(\"rtpmap:\")) {\n                            // Video\n                            String[] values = TextUtils.split(param.second, \" \");\n                            if (currentTrack instanceof VideoTrack) {\n                                if (values.length > 1) {\n                                    values = TextUtils.split(values[1], \"/\");\n                                    if (values.length > 0) {\n                                        switch (values[0].toLowerCase()) {\n                                            case \"h264\" -> ((VideoTrack) tracks[0]).videoCodec = VIDEO_CODEC_H264;\n                                            case \"h265\" -> ((VideoTrack) tracks[0]).videoCodec = VIDEO_CODEC_H265;\n                                            default -> Log.w(TAG, \"Unknown video codec \\\"\" + values[0] + \"\\\"\");\n                                        }\n                                        Log.i(TAG, \"Video: \" + values[0]);\n                                    }\n                                }\n\n                            // Audio\n                            } else if (currentTrack instanceof AudioTrack) {\n                                if (values.length > 1) {\n                                    AudioTrack track = ((AudioTrack) tracks[1]);\n                                    values = TextUtils.split(values[1], \"/\");\n                                    if (values.length > 1) {\n                                        switch (values[0].toLowerCase()) {\n                                            case \"mpeg4-generic\" -> track.audioCodec = AUDIO_CODEC_AAC;\n                                            case \"opus\" -> track.audioCodec = AUDIO_CODEC_OPUS;\n                                            case \"pcmu\" -> track.audioCodec = AUDIO_CODEC_G711_ULAW;\n                                            case \"pcma\" -> track.audioCodec = AUDIO_CODEC_G711_ALAW;\n                                            default -> {\n                                                Log.w(TAG, \"Unknown audio codec \\\"\" + values[0] + \"\\\"\");\n                                                track.audioCodec = AUDIO_CODEC_UNKNOWN;\n                                            }\n                                        }\n                                        track.sampleRateHz = Integer.parseInt(values[1]);\n                                        // If no channels specified, use mono, e.g. \"a=rtpmap:97 MPEG4-GENERIC/8000\"\n                                        track.channels = values.length > 2 ? Integer.parseInt(values[2]) : 1;\n                                        Log.i(TAG, \"Audio: \" + getAudioCodecName(track.audioCodec) + \", sample rate: \" + track.sampleRateHz + \" Hz, channels: \" + track.channels);\n                                    }\n                                }\n\n                            // Application\n                            } else {\n                                // Do nothing\n                            }\n                        }\n                    }\n                    break;\n            }\n        }\n        return tracks;\n    }\n\n//v=0\n//o=- 1542237507365806 1542237507365806 IN IP4 10.0.1.111\n//s=Media Presentation\n//e=NONE\n//b=AS:50032\n//t=0 0\n//a=control:*\n//a=range:npt=0.000000-\n//m=video 0 RTP/AVP 96\n//c=IN IP4 0.0.0.0\n//b=AS:50000\n//a=framerate:25.0\n//a=transform:1.000000,0.000000,0.000000;0.000000,1.000000,0.000000;0.000000,0.000000,1.000000\n//a=control:trackID=1\n//a=rtpmap:96 H264/90000\n//a=fmtp:96 packetization-mode=1; profile-level-id=4D4029; sprop-parameter-sets=Z01AKZpmBkCb8uAtQEBAQXpw,aO48gA==\n//m=audio 0 RTP/AVP 97\n//c=IN IP4 0.0.0.0\n//b=AS:32\n//a=control:trackID=2\n//a=rtpmap:97 G726-32/8000\n\n// v=0\n// o=- 14190294250618174561 14190294250618174561 IN IP4 127.0.0.1\n// s=IP Webcam\n// c=IN IP4 0.0.0.0\n// t=0 0\n// a=range:npt=now-\n// a=control:*\n// m=video 0 RTP/AVP 96\n// a=rtpmap:96 H264/90000\n// a=control:h264\n// a=fmtp:96 packetization-mode=1;profile-level-id=42C028;sprop-parameter-sets=Z0LAKIyNQDwBEvLAPCIRqA==,aM48gA==;\n// a=cliprect:0,0,1920,1080\n// a=framerate:30.0\n// a=framesize:96 1080-1920\n\n    // Pair first - name, e.g. \"a\"; second - value, e.g \"cliprect:0,0,1920,1080\"\n    @NonNull\n    private static List<Pair<String, String>> getDescribeParams(@NonNull String text) {\n        ArrayList<Pair<String, String>> list = new ArrayList<>();\n        String[] params = TextUtils.split(text, \"\\r\\n\");\n        for (String param : params) {\n            int i = param.indexOf('=');\n            if (i > 0) {\n                String name = param.substring(0, i).trim();\n                String value = param.substring(i + 1);\n                list.add(Pair.create(name, value));\n            }\n        }\n        return list;\n    }\n\n    @NonNull\n    private static SdpInfo getSdpInfoFromDescribeParams(@NonNull List<Pair<String, String>> params) {\n        SdpInfo sdpInfo = new SdpInfo();\n\n        Track[] tracks = getTracksFromDescribeParams(params);\n        sdpInfo.videoTrack = ((VideoTrack)tracks[0]);\n        sdpInfo.audioTrack = ((AudioTrack)tracks[1]);\n        sdpInfo.applicationTrack = ((ApplicationTrack)tracks[2]);\n\n        for (Pair<String, String> param : params) {\n            switch (param.first) {\n                case \"s\" -> sdpInfo.sessionName = param.second;\n                case \"i\" -> sdpInfo.sessionDescription = param.second;\n            }\n        }\n        return sdpInfo;\n    }\n\n    // a=fmtp:97 streamtype=5;profile-level-id=1;mode=AAC-hbr;sizelength=13;indexlength=3;indexdeltalength=3;config=1408\n    @Nullable\n    private static List<Pair<String, String>> getSdpAParams(@NonNull Pair<String, String> param) {\n        if (param.first.equals(\"a\") && param.second.startsWith(\"fmtp:\") && param.second.length() > 8) { //\n            String value = param.second.substring(8).trim(); // fmtp can be '96' (2 chars) and '127' (3 chars)\n            String[] paramsA = TextUtils.split(value, \";\");\n            // streamtype=5\n            // profile-level-id=1\n            // mode=AAC-hbr\n            ArrayList<Pair<String, String>> retParams = new ArrayList<>();\n            for (String paramA: paramsA) {\n                paramA = paramA.trim();\n                // sprop-parameter-sets=Z0LAKIyNQDwBEvLAPCIRqA==,aM48gA==\n                int i = paramA.indexOf(\"=\");\n                if (i != -1)\n                    retParams.add(\n                            Pair.create(\n                                    paramA.substring(0, i),\n                                    paramA.substring(i + 1)));\n            }\n            return retParams;\n        } else {\n            Log.w(TAG, \"Not a valid fmtp\");\n        }\n        return null;\n    }\n\n    @NonNull\n    private static byte[] getNalUnitFromSprop(String nalBase64) {\n        byte[] nal = Base64.decode(nalBase64, Base64.NO_WRAP);\n        byte[] nalWithStart = new byte[nal.length + 4];\n        // Add 00 00 00 01 NAL unit header\n        nalWithStart[0] = 0;\n        nalWithStart[1] = 0;\n        nalWithStart[2] = 0;\n        nalWithStart[3] = 1;\n        System.arraycopy(nal, 0, nalWithStart, 4, nal.length);\n        return nalWithStart;\n    }\n\n    private static void updateVideoTrackFromDescribeParam(@NonNull VideoTrack videoTrack, @NonNull Pair<String, String> param) {\n        // a=fmtp:96 packetization-mode=1;profile-level-id=42C028;sprop-parameter-sets=Z0LAKIyNQDwBEvLAPCIRqA==,aM48gA==;\n        // a=fmtp:96 packetization-mode=1; profile-level-id=4D4029; sprop-parameter-sets=Z01AKZpmBkCb8uAtQEBAQXpw,aO48gA==\n        // a=fmtp:99 sprop-parameter-sets=Z0LgKdoBQBbpuAgIMBA=,aM4ySA==;packetization-mode=1;profile-level-id=42e029\n        // a=fmtp:98 profile-id=1;sprop-sps=QgEBAWAAAAMAgAAAAwAAAwB4oAWCAJB/ja7tTd3Jdf+ACAAFtwUFBQQAAA+gAAGGoch3uUQD6AARlAB9AAIygg==;sprop-pps=RAHBcrAiQA==;sprop-vps=QAEMAf//AWAAAAMAgAAAAwAAAwB4rAk=\n        List<Pair<String, String>> params = getSdpAParams(param);\n        if (params != null) {\n            for (Pair<String, String> pair: params) {\n                switch (pair.first.toLowerCase()) {\n                    case \"sprop-sps\" -> {\n                        videoTrack.sps = getNalUnitFromSprop(pair.second);\n                    }\n                    case \"sprop-pps\" -> {\n                        videoTrack.pps = getNalUnitFromSprop(pair.second);\n                    }\n                    case \"sprop-vps\" -> {\n                        videoTrack.vps = getNalUnitFromSprop(pair.second);\n                    }\n                    case \"sprop-parameter-sets\" -> {\n                        String[] paramsSpsPps = TextUtils.split(pair.second, \",\");\n                        if (paramsSpsPps.length > 1) {\n                            videoTrack.sps = getNalUnitFromSprop(paramsSpsPps[0]);\n                            videoTrack.pps = getNalUnitFromSprop(paramsSpsPps[1]);\n//                            Base64.decode(paramsSpsPps[0], Base64.NO_WRAP);\n//                            byte[] pps = Base64.decode(paramsSpsPps[1], Base64.NO_WRAP);\n//                            byte[] nalSps = new byte[sps.length + 4];\n//                            byte[] nalPps = new byte[pps.length + 4];\n//                            // Add 00 00 00 01 NAL unit header\n//                            nalSps[0] = 0;\n//                            nalSps[1] = 0;\n//                            nalSps[2] = 0;\n//                            nalSps[3] = 1;\n//                            System.arraycopy(sps, 0, nalSps, 4, sps.length);\n//                            nalPps[0] = 0;\n//                            nalPps[1] = 0;\n//                            nalPps[2] = 0;\n//                            nalPps[3] = 1;\n//                            System.arraycopy(pps, 0, nalPps, 4, pps.length);\n//                            videoTrack.sps = nalSps;\n//                            videoTrack.pps = nalPps;\n                        }\n                    }\n                    // packetization-mode=1\n                    case \"packetization-mode\" -> {\n                        // 0 - single NAL unit (default)\n                        // 1 - non-interleaved mode (STAP-A and FU-A NAL units)\n                        // 2 - interleaved mode\n                        try {\n                            int mode = Integer.parseInt(pair.second);\n                            if (mode == 2)\n                                Log.e(TAG, \"Interleaved packetization mode is not supported\");\n                        } catch (NumberFormatException ignored) {\n                        }\n                    }\n                }\n            }\n        }\n    }\n\n    @NonNull\n    private static byte[] getBytesFromHexString(@NonNull String config) {\n        // \"1210fff1\" -> [12, 10, ff, f1]\n        return new BigInteger(config ,16).toByteArray();\n    }\n\n    private static void updateAudioTrackFromDescribeParam(@NonNull AudioTrack audioTrack, @NonNull Pair<String, String> param) {\n        // a=fmtp:96 streamtype=5; profile-level-id=14; mode=AAC-lbr; config=1388; sizeLength=6; indexLength=2; indexDeltaLength=2; constantDuration=1024; maxDisplacement=5\n        // a=fmtp:97 streamtype=5;profile-level-id=1;mode=AAC-hbr;sizelength=13;indexlength=3;indexdeltalength=3;config=1408\n        // a=fmtp:96 profile-level-id=1;mode=AAC-hbr;sizelength=13;indexlength=3;indexdeltalength=3;config=1210fff15081ffdffc\n        List<Pair<String, String>> params = getSdpAParams(param);\n        if (params != null) {\n            for (Pair<String, String> pair: params) {\n                switch (pair.first.toLowerCase()) {\n                    case \"mode\" -> audioTrack.mode = pair.second;\n                    case \"config\" -> audioTrack.config = getBytesFromHexString(pair.second);\n                }\n            }\n        }\n    }\n\n    /**\n     * Search for header \"Content-Base: rtsp://example.com/stream/\"\n     * and return \"rtsp://example.com/stream/\"\n     */\n    @Nullable\n    private static String getHeaderContentBase(@NonNull ArrayList<Pair<String, String>> headers) {\n        String contentBase = getHeader(headers, \"content-base\");\n        if (!TextUtils.isEmpty(contentBase)) {\n            return contentBase;\n        }\n        return null;\n    }\n\n    private static int getHeaderContentLength(@NonNull ArrayList<Pair<String, String>> headers) {\n        String length = getHeader(headers, \"content-length\");\n        if (!TextUtils.isEmpty(length)) {\n            try {\n                return Integer.parseInt(length);\n            } catch (NumberFormatException ignored) {\n            }\n        }\n        return -1;\n    }\n\n    private static int getSupportedCapabilities(@NonNull ArrayList<Pair<String, String>> headers) {\n        for (Pair<String, String> head: headers) {\n            String h = head.first.toLowerCase();\n            // Public: OPTIONS, DESCRIBE, SETUP, PLAY, GET_PARAMETER, SET_PARAMETER, TEARDOWN\n            if (\"public\".equals(h)) {\n                int mask = 0;\n                String[] tokens = TextUtils.split(head.second.toLowerCase(), \",\");\n                for (String token: tokens) {\n                    switch (token.trim()) {\n                        case \"options\" -> mask |= RTSP_CAPABILITY_OPTIONS;\n                        case \"describe\" -> mask |= RTSP_CAPABILITY_DESCRIBE;\n                        case \"announce\" -> mask |= RTSP_CAPABILITY_ANNOUNCE;\n                        case \"setup\" -> mask |= RTSP_CAPABILITY_SETUP;\n                        case \"play\" -> mask |= RTSP_CAPABILITY_PLAY;\n                        case \"record\" -> mask |= RTSP_CAPABILITY_RECORD;\n                        case \"pause\" -> mask |= RTSP_CAPABILITY_PAUSE;\n                        case \"teardown\" -> mask |= RTSP_CAPABILITY_TEARDOWN;\n                        case \"set_parameter\" -> mask |= RTSP_CAPABILITY_SET_PARAMETER;\n                        case \"get_parameter\" -> mask |= RTSP_CAPABILITY_GET_PARAMETER;\n                        case \"redirect\" -> mask |= RTSP_CAPABILITY_REDIRECT;\n                    }\n                }\n                return mask;\n            }\n        }\n        return RTSP_CAPABILITY_NONE;\n    }\n\n    @Nullable\n    private static Pair<String, String> getHeaderWwwAuthenticateDigestRealmAndNonce(@NonNull ArrayList<Pair<String, String>> headers) {\n        for (Pair<String, String> head: headers) {\n            String h = head.first.toLowerCase();\n            // WWW-Authenticate: Digest realm=\"AXIS_00408CEF081C\", nonce=\"00054cecY7165349339ae05f7017797d6b0aaad38f6ff45\", stale=FALSE\n            // WWW-Authenticate: Basic realm=\"AXIS_00408CEF081C\"\n            // WWW-Authenticate: Digest realm=\"Login to 4K049EBPAG1D7E7\", nonce=\"de4ccb15804565dc8a4fa5b115695f4f\"\n            if (\"www-authenticate\".equals(h) && head.second.toLowerCase().startsWith(\"digest\")) {\n                String v = head.second.substring(7).trim();\n                int begin, end;\n\n                begin = v.indexOf(\"realm=\");\n                begin = v.indexOf('\"', begin) + 1;\n                end = v.indexOf('\"', begin);\n                String digestRealm = v.substring(begin, end);\n\n                begin = v.indexOf(\"nonce=\");\n                begin = v.indexOf('\"', begin)+1;\n                end = v.indexOf('\"', begin);\n                String digestNonce = v.substring(begin, end);\n\n                return Pair.create(digestRealm, digestNonce);\n            }\n        }\n        return null;\n    }\n\n    @Nullable\n    private static String getHeaderWwwAuthenticateBasicRealm(@NonNull ArrayList<Pair<String, String>> headers) {\n        for (Pair<String, String> head: headers) {\n            // Session: ODgyODg3MjQ1MDczODk3NDk4Nw\n            String h = head.first.toLowerCase();\n            String v = head.second.toLowerCase();\n            // WWW-Authenticate: Digest realm=\"AXIS_00408CEF081C\", nonce=\"00054cecY7165349339ae05f7017797d6b0aaad38f6ff45\", stale=FALSE\n            // WWW-Authenticate: Basic realm=\"AXIS_00408CEF081C\"\n            if (\"www-authenticate\".equals(h) && v.startsWith(\"basic\")) {\n                v = v.substring(6).trim();\n                // realm=\n                // AXIS_00408CEF081C\n                String[] tokens = TextUtils.split(v, \"\\\"\");\n                if (tokens.length > 2)\n                    return tokens[1];\n            }\n        }\n        return null;\n    }\n\n    // Basic authentication\n    @NonNull\n    private static String getBasicAuthHeader(@Nullable String username, @Nullable String password) {\n        String auth = (username == null ? \"\" : username) + \":\" + (password == null ? \"\" : password);\n        return \"Basic \" + new String(Base64.encode(auth.getBytes(StandardCharsets.ISO_8859_1), Base64.NO_WRAP));\n    }\n\n    // Digest authentication\n    @Nullable\n    private static String getDigestAuthHeader(\n            @Nullable String username,\n            @Nullable String password,\n            @NonNull String method,\n            @NonNull String digestUri,\n            @NonNull String realm,\n            @NonNull String nonce) {\n        try {\n            MessageDigest md = MessageDigest.getInstance(\"MD5\");\n            byte[] ha1;\n\n            if (username == null)\n                username = \"\";\n            if (password == null)\n                password = \"\";\n\n            // calc A1 digest\n            md.update(username.getBytes(StandardCharsets.ISO_8859_1));\n            md.update((byte) ':');\n            md.update(realm.getBytes(StandardCharsets.ISO_8859_1));\n            md.update((byte) ':');\n            md.update(password.getBytes(StandardCharsets.ISO_8859_1));\n            ha1 = md.digest();\n\n            // calc A2 digest\n            md.reset();\n            md.update(method.getBytes(StandardCharsets.ISO_8859_1));\n            md.update((byte) ':');\n            md.update(digestUri.getBytes(StandardCharsets.ISO_8859_1));\n            byte[] ha2 = md.digest();\n\n            // calc response\n            md.update(getHexStringFromBytes(ha1).getBytes(StandardCharsets.ISO_8859_1));\n            md.update((byte) ':');\n            md.update(nonce.getBytes(StandardCharsets.ISO_8859_1));\n            md.update((byte) ':');\n            // TODO add support for more secure version of digest auth\n            //md.update(nc.getBytes(StandardCharsets.ISO_8859_1));\n            //md.update((byte) ':');\n            //md.update(cnonce.getBytes(StandardCharsets.ISO_8859_1));\n            //md.update((byte) ':');\n            //md.update(qop.getBytes(StandardCharsets.ISO_8859_1));\n            //md.update((byte) ':');\n            md.update(getHexStringFromBytes(ha2).getBytes(StandardCharsets.ISO_8859_1));\n            String response = getHexStringFromBytes(md.digest());\n\n//            log.trace(\"username=\\\"{}\\\", realm=\\\"{}\\\", nonce=\\\"{}\\\", uri=\\\"{}\\\", response=\\\"{}\\\"\",\n//                    userName, digestRealm, digestNonce, digestUri, response);\n\n            return \"Digest username=\\\"\" + username + \"\\\", realm=\\\"\" + realm + \"\\\", nonce=\\\"\" + nonce + \"\\\", uri=\\\"\" + digestUri + \"\\\", response=\\\"\" + response + \"\\\"\";\n        } catch (Exception e) {\n            e.printStackTrace();\n        }\n        return null;\n    }\n\n    @NonNull\n    private static String getHexStringFromBytes(@NonNull byte[] bytes) {\n        StringBuilder buf = new StringBuilder();\n        for (byte b : bytes)\n            buf.append(String.format(\"%02x\", b));\n        return buf.toString();\n    }\n\n    @NonNull\n    private static String readContentAsText(@NonNull InputStream inputStream, int length) throws IOException {\n        if (length <= 0)\n            return \"\";\n        byte[] b = new byte[length];\n        int read = readData(inputStream, b, 0, length);\n        return new String(b, 0, read);\n    }\n\n    // int memcmp ( const void * ptr1, const void * ptr2, size_t num );\n    public static boolean memcmp(\n            @NonNull byte[] source1,\n            int offsetSource1,\n            @NonNull byte[] source2,\n            int offsetSource2,\n            int num) {\n        if (source1.length - offsetSource1 < num)\n            return false;\n        if (source2.length - offsetSource2 < num)\n            return false;\n\n        for (int i = 0; i < num; i++) {\n            if (source1[offsetSource1 + i] != source2[offsetSource2 + i])\n                return false;\n        }\n        return true;\n    }\n\n    private static void shiftLeftArray(@NonNull byte[] array, int num) {\n        // ABCDEF -> BCDEF\n        if (num - 1 >= 0)\n            System.arraycopy(array, 1, array, 0, num - 1);\n    }\n\n    private boolean readUntilBytesFound(@NonNull InputStream inputStream, @NonNull byte[] array) throws IOException {\n        byte[] buffer = new byte[array.length];\n\n        // Fill in buffer\n        if (NetUtils.readData(inputStream, buffer, 0, buffer.length) != buffer.length)\n            return false; // EOF\n\n        while (!exitFlag.get()) {\n            // Check if buffer is the same one\n            if (memcmp(buffer, 0, array, 0, buffer.length)) {\n                return true;\n            }\n            // ABCDEF -> BCDEFF\n            shiftLeftArray(buffer, buffer.length);\n            // Read 1 byte into last buffer item\n            if (NetUtils.readData(inputStream, buffer, buffer.length - 1, 1) != 1) {\n                return false; // EOF\n            }\n        }\n        return false;\n    }\n\n//    private boolean readUntilByteFound(@NonNull InputStream inputStream, byte bt) throws IOException {\n//        byte[] buffer = new byte[1];\n//        int readBytes;\n//        while (!exitFlag.get()) {\n//            readBytes = inputStream.read(buffer, 0, 1);\n//            if (readBytes == -1) // EOF\n//                return false;\n//            if (readBytes == 1 && buffer[0] == bt) {\n//                return true;\n//            }\n//        }\n//        return false;\n//    }\n\n    @Nullable\n    private String readLine(@NonNull InputStream inputStream) throws IOException {\n        byte[] bufferLine = new byte[MAX_LINE_SIZE];\n        int offset = 0;\n        int readBytes;\n        do {\n            // Didn't find \"\\r\\n\" within 4K bytes\n            if (offset >= MAX_LINE_SIZE) {\n                throw new NoResponseHeadersException();\n            }\n\n            // Read 1 byte\n            readBytes = inputStream.read(bufferLine, offset, 1);\n            if (readBytes == 1) {\n                // Check for EOL\n                // Some cameras like Linksys WVC200 do not send \\n instead of \\r\\n\n                if (offset > 0 && /*bufferLine[offset-1] == '\\r' &&*/ bufferLine[offset] == '\\n') {\n                    // Found empty EOL. End of header section\n                    if (offset == 1)\n                        return \"\";//break;\n\n                    // Found EOL. Add to array.\n                    return new String(bufferLine, 0, offset-1);\n                } else {\n                    offset++;\n                }\n            }\n        } while (readBytes > 0 && !exitFlag.get());\n        return null;\n    }\n\n    private static int readData(@NonNull InputStream inputStream, @NonNull byte[] buffer, int offset, int length) throws IOException {\n        if (DEBUG) Log.v(TAG, \"readData(offset=\" + offset + \", length=\" + length + \")\");\n        int readBytes;\n        int totalReadBytes = 0;\n        do {\n            readBytes = inputStream.read(buffer, offset + totalReadBytes, length - totalReadBytes);\n            if (readBytes > 0)\n                totalReadBytes += readBytes;\n        } while (readBytes >= 0 && totalReadBytes < length);\n        return totalReadBytes;\n    }\n\n    private static void dumpHeaders(@NonNull ArrayList<Pair<String, String>> headers) {\n        if (DEBUG) {\n            for (Pair<String, String> head : headers) {\n                Log.d(TAG, head.first + \": \" + head.second);\n            }\n        }\n    }\n\n    @Nullable\n    private static String getHeader(@NonNull ArrayList<Pair<String, String>> headers, @NonNull String header) {\n        for (Pair<String, String> head: headers) {\n            // Session: ODgyODg3MjQ1MDczODk3NDk4Nw\n            String h = head.first.toLowerCase();\n            if (header.toLowerCase().equals(h)) {\n                return head.second;\n            }\n        }\n        // Not found\n        return null;\n    }\n\n    public static class Builder {\n\n        private static final String DEFAULT_USER_AGENT = \"Lavf58.29.100\";\n\n        private final @NonNull Socket rtspSocket;\n        private final @NonNull String uriRtsp;\n        private final @NonNull AtomicBoolean exitFlag;\n        private final @NonNull RtspClientListener listener;\n//      private boolean sendOptionsCommand = true;\n        private boolean requestVideo = true;\n        private boolean requestAudio = true;\n        private boolean requestApplication = true;\n        private boolean debug = false;\n        private @Nullable String username = null;\n        private @Nullable String password = null;\n        private @Nullable String userAgent = DEFAULT_USER_AGENT;\n\n        public Builder(\n                @NonNull Socket rtspSocket,\n                @NonNull String uriRtsp,\n                @NonNull AtomicBoolean exitFlag,\n                @NonNull RtspClientListener listener) {\n            this.rtspSocket = rtspSocket;\n            this.uriRtsp = uriRtsp;\n            this.exitFlag = exitFlag;\n            this.listener = listener;\n        }\n\n        @NonNull\n        public Builder withDebug(boolean debug) {\n            this.debug = debug;\n            return this;\n        }\n\n        @NonNull\n        public Builder withCredentials(@Nullable String username, @Nullable String password) {\n            this.username = username;\n            this.password = password;\n            return this;\n        }\n\n        @NonNull\n        public Builder withUserAgent(@Nullable String userAgent) {\n            this.userAgent = userAgent;\n            return this;\n        }\n\n//        @NonNull\n//        public Builder sendOptionsCommand(boolean sendOptionsCommand) {\n//            this.sendOptionsCommand = sendOptionsCommand;\n//            return this;\n//        }\n\n        @NonNull\n        public Builder requestVideo(boolean requestVideo) {\n            this.requestVideo = requestVideo;\n            return this;\n        }\n\n        @NonNull\n        public Builder requestAudio(boolean requestAudio) {\n            this.requestAudio = requestAudio;\n            return this;\n        }\n\n        @NonNull\n        public Builder requestApplication(boolean requestApplication) {\n            this.requestApplication = requestApplication;\n            return this;\n        }\n\n        @NonNull\n        public RtspClient build() {\n            return new RtspClient(this);\n        }\n    }\n}\n\nclass LoggerOutputStream extends BufferedOutputStream {\n    private boolean logging = true;\n\n    public LoggerOutputStream(@NonNull OutputStream out) {\n        super(out);\n    }\n\n    public synchronized void setLogging(boolean logging) {\n        this.logging = logging;\n    }\n\n    @Override\n    public synchronized void write(byte[] b, int off, int len) throws IOException {\n        super.write(b, off, len);\n        if (logging)\n            Log.i(RtspClient.TAG_DEBUG, new String(b, off, len));\n    }\n}\n"
  },
  {
    "path": "library-client-rtsp/src/main/java/com/alexvas/rtsp/codec/AudioDecodeThread.kt",
    "content": "package com.alexvas.rtsp.codec\n\nimport android.media.*\nimport android.os.Process\nimport android.util.Log\nimport java.nio.ByteBuffer\n\n\nclass AudioDecodeThread (\n        private val mimeType: String,\n        private val sampleRate: Int,\n        private val channelCount: Int,\n        private val codecConfig: ByteArray?,\n        private val audioFrameQueue: AudioFrameQueue) : Thread() {\n\n    private var isRunning = true\n\n    fun stopAsync() {\n        if (DEBUG) Log.v(TAG, \"stopAsync()\")\n        isRunning = false\n        // Wake up sleep() code\n        interrupt()\n    }\n\n    override fun run() {\n        if (DEBUG) Log.d(TAG, \"$name started\")\n\n        Process.setThreadPriority(Process.THREAD_PRIORITY_AUDIO)\n\n        // Creating audio decoder\n        val decoder = MediaCodec.createDecoderByType(mimeType)\n        val format = MediaFormat.createAudioFormat(mimeType, sampleRate, channelCount)\n\n        if (mimeType == MediaFormat.MIMETYPE_AUDIO_AAC) {\n            val csd0 = codecConfig ?: getAacDecoderConfigData(MediaCodecInfo.CodecProfileLevel.AACObjectLC, sampleRate, channelCount)\n            format.setByteBuffer(\"csd-0\", ByteBuffer.wrap(csd0))\n            format.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC)\n        } else if (mimeType == MediaFormat.MIMETYPE_AUDIO_OPUS) {\n            // TODO: Add Opus support\n\n//            val OPUS_IDENTIFICATION_HEADER = \"OpusHead\".toByteArray()\n//            val OPUS_PRE_SKIP_NSEC = ByteBuffer.allocate(8).putLong(11971).array()\n//            val OPUS_SEEK_PRE_ROLL_NSEC = ByteBuffer.allocate(8).putLong(80000000).array()\n\n//            val csd0 = ByteBuffer.allocate(8+1+1+2+4+2+1)\n//            csd0.put(\"OpusHead\".toByteArray())\n//            // Version\n//            csd0.put(1)\n//            // Number of channels\n//            csd0.put(2)\n//            // Pre-skip\n//            csd0.putShort(0)\n//            csd0.putInt(sampleRate)\n//            // Output Gain\n//            csd0.putShort(0)\n//            // Channel Mapping Family\n//            csd0.put(0)\n            // Buffer buf = new Buffer();\n//                // Magic Signature：固定头，占8个字节，为字符串OpusHead\n//                buf.write(\"OpusHead\".getBytes(StandardCharsets.UTF_8));\n//                // Version：版本号，占1字节，固定为0x01\n//                buf.writeByte(1);\n//                // Channel Count：通道数，占1字节，根据音频流通道自行设置，如0x02\n//                buf.writeByte(1);\n//                // Pre-skip：回放的时候从解码器中丢弃的samples数量，占2字节，为小端模式，默认设置0x00,\n//                buf.writeShortLe(0);\n//                // Input Sample Rate (Hz)：音频流的Sample Rate，占4字节，为小端模式，根据实际情况自行设置\n//                buf.writeIntLe(currentFormat.HZ);\n//                //Output Gain：输出增益，占2字节，为小端模式，没有用到默认设置0x00, 0x00就好\n//                buf.writeShortLe(0);\n//                // Channel Mapping Family：通道映射系列，占1字节，默认设置0x00就好\n//                buf.writeByte(0);\n//                //Channel Mapping Table：可选参数，上面的Family默认设置0x00的时候可忽略\n//            format.setByteBuffer(\"csd-0\", ByteBuffer.wrap(OPUS_IDENTIFICATION_HEADER).order(ByteOrder.BIG_ENDIAN))\n//            format.setByteBuffer(\"csd-1\", ByteBuffer.wrap(OPUS_PRE_SKIP_NSEC).order(ByteOrder.BIG_ENDIAN))\n//            format.setByteBuffer(\"csd-2\", ByteBuffer.wrap(OPUS_SEEK_PRE_ROLL_NSEC).order(ByteOrder.LITTLE_ENDIAN))\n\n            val csd0 = byteArrayOf(\n                0x4f, 0x70, 0x75, 0x73, // \"Opus\"\n                0x48, 0x65, 0x61, 0x64, // \"Head\"\n                0x01,  // Version\n                0x02,  // Channel Count\n                0x00, 0x00,  // Pre skip\n                0x80.toByte(), 0xbb.toByte(), 0x00, 0x00, // Sample rate 48000\n                0x00, 0x00,  // Output Gain (Q7.8 in dB)\n                0x00,  // Mapping Family\n            )\n            val csd1 = byteArrayOf(0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00)\n            val csd2 = byteArrayOf(0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00)\n            format.setByteBuffer(\"csd-0\", ByteBuffer.wrap(csd0))\n            format.setByteBuffer(\"csd-1\", ByteBuffer.wrap(csd1))\n            format.setByteBuffer(\"csd-2\", ByteBuffer.wrap(csd2))\n        }\n\n        decoder.configure(format, null, null, 0)\n        decoder.start()\n\n        // Creating audio playback device\n        val outChannel = if (channelCount > 1) AudioFormat.CHANNEL_OUT_STEREO else AudioFormat.CHANNEL_OUT_MONO\n        val outAudio = AudioFormat.ENCODING_PCM_16BIT\n        val bufferSize = AudioTrack.getMinBufferSize(sampleRate, outChannel, outAudio)\n//      Log.i(TAG, \"sampleRate: $sampleRate, bufferSize: $bufferSize\".format(sampleRate, bufferSize))\n        val audioTrack = AudioTrack(\n                AudioAttributes.Builder()\n                        .setUsage(AudioAttributes.USAGE_MEDIA)\n                        .setContentType(AudioAttributes.CONTENT_TYPE_SPEECH)\n                        .build(),\n                AudioFormat.Builder()\n                        .setEncoding(outAudio)\n                        .setChannelMask(outChannel)\n                        .setSampleRate(sampleRate)\n                        .build(),\n                bufferSize,\n                AudioTrack.MODE_STREAM,\n                0)\n        audioTrack.play()\n\n        val bufferInfo = MediaCodec.BufferInfo()\n        while (isRunning) {\n            val inIndex: Int = decoder.dequeueInputBuffer(10000L)\n            if (inIndex >= 0) {\n                // fill inputBuffers[inputBufferIndex] with valid data\n                var byteBuffer: ByteBuffer?\n                try {\n                    byteBuffer = decoder.getInputBuffer(inIndex)\n                } catch (e: Exception) {\n                    e.printStackTrace()\n                    break\n                }\n                byteBuffer?.rewind()\n\n                // Preventing BufferOverflowException\n//              if (length > byteBuffer.limit()) throw DecoderFatalException(\"Error\")\n\n                val audioFrame: FrameQueue.Frame?\n                try {\n                    audioFrame = audioFrameQueue.pop()\n                    if (audioFrame == null) {\n                        Log.d(TAG, \"Empty audio frame\")\n                        // Release input buffer\n                        decoder.queueInputBuffer(inIndex, 0, 0, 0L, 0)\n                    } else {\n                        byteBuffer?.put(audioFrame.data, audioFrame.offset, audioFrame.length)\n                        decoder.queueInputBuffer(inIndex, audioFrame.offset, audioFrame.length, audioFrame.timestampMs, 0)\n                    }\n                } catch (e: Exception) {\n                    e.printStackTrace()\n                }\n            }\n//            Log.i(TAG, \"inIndex: ${inIndex}\")\n\n            try {\n//                Log.w(TAG, \"outIndex: ${outIndex}\")\n                if (!isRunning) break\n                when (val outIndex = decoder.dequeueOutputBuffer(bufferInfo, 10000L)) {\n                    MediaCodec.INFO_OUTPUT_FORMAT_CHANGED -> Log.d(TAG, \"Decoder format changed: ${decoder.outputFormat}\")\n                    MediaCodec.INFO_TRY_AGAIN_LATER -> if (DEBUG) Log.d(TAG, \"No output from decoder available\")\n                    else -> {\n                        if (outIndex >= 0) {\n                            val byteBuffer: ByteBuffer? = decoder.getOutputBuffer(outIndex)\n\n                            val chunk = ByteArray(bufferInfo.size)\n                            byteBuffer?.get(chunk)\n                            byteBuffer?.clear()\n\n                            if (chunk.isNotEmpty()) {\n                                audioTrack.write(chunk, 0, chunk.size)\n                            }\n                            decoder.releaseOutputBuffer(outIndex, false)\n                        }\n                    }\n                }\n            } catch (e: Exception) {\n                e.printStackTrace()\n            }\n\n            // All decoded frames have been rendered, we can stop playing now\n            if (bufferInfo.flags and MediaCodec.BUFFER_FLAG_END_OF_STREAM != 0) {\n                Log.d(TAG, \"OutputBuffer BUFFER_FLAG_END_OF_STREAM\")\n                break\n            }\n        }\n        audioTrack.flush()\n        audioTrack.release()\n\n        try {\n            decoder.stop()\n            decoder.release()\n        } catch (_: InterruptedException) {\n        } catch (e: Exception) {\n            e.printStackTrace()\n        }\n        audioFrameQueue.clear()\n        if (DEBUG) Log.d(TAG, \"$name stopped\")\n    }\n\n    companion object {\n        private val TAG: String = AudioDecodeThread::class.java.simpleName\n        private const val DEBUG = false\n\n        fun getAacDecoderConfigData(audioProfile: Int, sampleRate: Int, channels: Int): ByteArray {\n            // AOT_LC = 2\n            // 0001 0000 0000 0000\n            var extraDataAac = audioProfile shl 11\n            // Sample rate\n            when (sampleRate) {\n                7350 -> extraDataAac = extraDataAac or (0xC shl 7)\n                8000 -> extraDataAac = extraDataAac or (0xB shl 7)\n                11025 -> extraDataAac = extraDataAac or (0xA shl 7)\n                12000 -> extraDataAac = extraDataAac or (0x9 shl 7)\n                16000 -> extraDataAac = extraDataAac or (0x8 shl 7)\n                22050 -> extraDataAac = extraDataAac or (0x7 shl 7)\n                24000 -> extraDataAac = extraDataAac or (0x6 shl 7)\n                32000 -> extraDataAac = extraDataAac or (0x5 shl 7)\n                44100 -> extraDataAac = extraDataAac or (0x4 shl 7)\n                48000 -> extraDataAac = extraDataAac or (0x3 shl 7)\n                64000 -> extraDataAac = extraDataAac or (0x2 shl 7)\n                88200 -> extraDataAac = extraDataAac or (0x1 shl 7)\n                96000 -> extraDataAac = extraDataAac or (0x0 shl 7)\n            }\n            // Channels\n            extraDataAac = extraDataAac or (channels shl 3)\n            val extraData = ByteArray(2)\n            extraData[0] = (extraDataAac and 0xff00 shr 8).toByte() // high byte\n            extraData[1] = (extraDataAac and 0xff).toByte()         // low byte\n            return extraData\n        }\n    }\n\n}\n\n"
  },
  {
    "path": "library-client-rtsp/src/main/java/com/alexvas/rtsp/codec/FrameQueue.kt",
    "content": "package com.alexvas.rtsp.codec\n\nimport java.util.concurrent.ArrayBlockingQueue\nimport java.util.concurrent.TimeUnit\n\nenum class VideoCodecType {\n    H264, H265, UNKNOWN\n}\n\nenum class AudioCodecType {\n    AAC_LC, G711_ALAW, G711_MLAW, UNKNOWN\n}\n\nclass VideoFrameQueue(frameQueueCapacity: Int): FrameQueue<FrameQueue.VideoFrame>(frameQueueCapacity)\nclass AudioFrameQueue(frameQueueCapacity: Int): FrameQueue<FrameQueue.AudioFrame>(frameQueueCapacity)\n\n/**\n * Queue for concurrent adding/removing audio/video frames.\n */\nopen class FrameQueue<T>(private val frameQueueCapacity: Int) {\n\n    interface Frame {\n        val data: ByteArray\n        val offset: Int\n        val length: Int\n        val timestampMs: Long  // presentation time in msec\n    }\n\n    data class VideoFrame(\n        /** Only H264 codec supported */\n        val codecType: VideoCodecType,\n        /** Indicates whether it is a keyframe or not */\n        val isKeyframe: Boolean,\n        override val data: ByteArray,\n        override val offset: Int,\n        override val length: Int,\n        /** Video frame timestamp (msec) generated by camera */\n        override val timestampMs: Long,\n        /** Captured (received) video frame timestamp (msec). If -1, not supported. */\n        val capturedTimestampMs: Long = -1\n    ) : Frame\n\n    data class AudioFrame(\n        val codecType: AudioCodecType,\n//      val sampleRate: Int,\n        override val data: ByteArray,\n        override val offset: Int,\n        override val length: Int,\n        override val timestampMs: Long,\n    ) : Frame\n\n    private val queue = ArrayBlockingQueue<T>(frameQueueCapacity)\n\n    val size: Int\n        get() = queue.size\n\n    val capacity: Int\n        get() = frameQueueCapacity\n\n    @Throws(InterruptedException::class)\n    fun push(frame: T): Boolean {\n        if (queue.offer(frame, 5, TimeUnit.MILLISECONDS)) {\n            return true\n        }\n//        Log.w(TAG, \"Cannot add frame, queue is full\")\n        return false\n    }\n\n    @Throws(InterruptedException::class)\n    open fun pop(timeout: Long = 1000): T? {\n        try {\n            val frame: T? = queue.poll(timeout, TimeUnit.MILLISECONDS)\n//            if (frame == null) {\n//                Log.w(TAG, \"Cannot get frame within 1 sec, queue is empty\")\n//            }\n            return frame\n        } catch (e: InterruptedException) {\n            Thread.currentThread().interrupt()\n        }\n        return null\n    }\n\n    fun clear() {\n        queue.clear()\n    }\n\n    fun copyInto(dstFrameQueue: FrameQueue<T>) {\n        dstFrameQueue.queue.addAll(queue)\n    }\n\n    companion object {\n        private val TAG: String = FrameQueue::class.java.simpleName\n    }\n\n}\n"
  },
  {
    "path": "library-client-rtsp/src/main/java/com/alexvas/rtsp/codec/VideoDecodeThread.kt",
    "content": "package com.alexvas.rtsp.codec\n\nimport android.annotation.SuppressLint\nimport android.media.MediaCodec\nimport android.media.MediaCodec.OnFrameRenderedListener\nimport android.media.MediaFormat\nimport android.os.Build\nimport android.os.Handler\nimport android.os.Looper\nimport android.os.Process\nimport android.util.Log\nimport com.alexvas.utils.MediaCodecUtils\nimport com.alexvas.utils.capabilitiesToString\nimport androidx.media3.common.util.Util\nimport com.alexvas.utils.VideoCodecUtils\nimport com.limelight.binding.video.MediaCodecHelper\nimport java.lang.Integer.min\nimport java.nio.ByteBuffer\nimport java.util.concurrent.TimeUnit\nimport java.util.concurrent.atomic.AtomicBoolean\n\nabstract class VideoDecodeThread (\n    protected val mimeType: String,\n    protected val width: Int,\n    protected val height: Int,\n    protected val rotation: Int, // 0, 90, 180, 270\n    protected val videoFrameQueue: VideoFrameQueue,\n    protected val videoDecoderListener: VideoDecoderListener,\n    protected var videoDecoderType: DecoderType\n) : Thread() {\n\n    enum class DecoderType {\n        HARDWARE,\n        SOFTWARE // fallback\n    }\n\n    interface VideoDecoderListener {\n        /** Video decoder successfully started */\n        fun onVideoDecoderStarted() {}\n        /** Video decoder successfully stopped */\n        fun onVideoDecoderStopped() {}\n        /** Fatal error occurred */\n        fun onVideoDecoderFailed(message: String?) {}\n        /** Resolution changed */\n        fun onVideoDecoderFormatChanged(width: Int, height: Int) {}\n        /** First video frame rendered */\n        fun onVideoDecoderFirstFrameRendered() {}\n    }\n\n    protected val uiHandler = Handler(Looper.getMainLooper())\n    protected var exitFlag = AtomicBoolean(false)\n    protected var firstFrameRendered = false\n\n    /** Decoder latency used for statistics */\n    @Volatile private var decoderLatency = -1\n    /** Flag for allowing calculating latency */\n    private var decoderLatencyRequested = false\n    /** Network latency used for statistics */\n    @Volatile private var networkLatency = -1\n    private var videoDecoderName: String? = null\n    private var firstFrameDecoded = false\n    @Volatile private var videoFrameRateStabilization = false\n\n    fun stopAsync() {\n        if (DEBUG) Log.v(TAG, \"stopAsync()\")\n        exitFlag.set(true)\n        // Wake up sleep() code\n        interrupt()\n    }\n\n    /**\n     * Currently used video decoder. Video decoder can be changed on runtime.\n     * If videoDecoderType set to HARDWARE, it can be switched to SOFTWARE in case of decoding issue\n     * (e.g. hardware decoder does not support the stream resolution).\n     * If videoDecoderType set to SOFTWARE, it will always remain SOFTWARE (no any changes).\n     */\n    fun getCurrentVideoDecoderType(): DecoderType {\n        return videoDecoderType\n    }\n\n    fun getCurrentVideoDecoderName(): String? {\n        return videoDecoderName\n    }\n\n    /**\n     * Get frames decoding/rendering latency in msec. Returns -1 if not supported.\n     */\n    fun getCurrentVideoDecoderLatencyMsec(): Int {\n        decoderLatencyRequested = true\n        return decoderLatency\n    }\n\n    /**\n     * Get network latency in msec. Returns -1 if not supported.\n     */\n    fun getCurrentNetworkLatencyMsec(): Int {\n        return networkLatency\n    }\n\n    fun setVideoFrameRateStabilization(enable: Boolean) {\n        if (DEBUG) Log.v(TAG, \"setVideoFrameRateStabilization(enable=$enable)\")\n        videoFrameRateStabilization = enable\n    }\n\n    fun hasVideoFrameRateStabilization(): Boolean {\n        return videoFrameRateStabilization\n    }\n\n    @SuppressLint(\"UnsafeOptInUsageError\")\n    private fun getDecoderSafeWidthHeight(decoder: MediaCodec): Pair<Int, Int> {\n        if (DEBUG) Log.v(TAG, \"getDecoderSafeWidthHeight()\")\n        val capabilities = decoder.codecInfo.getCapabilitiesForType(mimeType).videoCapabilities\n        return if (capabilities == null) {\n            Log.e(TAG, \"Not a video decoder\")\n            Pair(-1, -1)\n        } else if (capabilities.isSizeSupported(width, height)) {\n            Log.i(TAG, \"Video decoder frame size ${width}x${height} supported\")\n            Pair(width, height)\n        } else {\n            Log.w(TAG, \"Video decoder frame size ${width}x${height} is not supported\")\n            val widthAlignment = capabilities.widthAlignment\n            val heightAlignment = capabilities.heightAlignment\n            val w = Util.ceilDivide(width, widthAlignment) * widthAlignment\n            val h = Util.ceilDivide(height, heightAlignment) * heightAlignment\n            if (capabilities.isSizeSupported(w, h)) {\n                Log.i(TAG, \"Video decoder frame size ${w}x${h} calculated from alignment ${widthAlignment}x${heightAlignment} and original size ${width}x${height}]\")\n                Pair(w, h)\n            } else {\n                val p = Pair(capabilities.supportedWidths.upper, capabilities.supportedHeights.upper)\n                Log.i(TAG, \"Video decoder max supported frame size ${w}x${h}\")\n                p\n            }\n        }\n    }\n\n    @SuppressLint(\"InlinedApi\")\n    private fun getWidthHeight(mediaFormat: MediaFormat): Pair<Int, Int> {\n        // Sometimes height obtained via KEY_HEIGHT is not valid, e.g. can be 1088 instead 1080\n        // (no problems with width though). Use crop parameters to correctly determine height.\n        val hasCrop =\n            mediaFormat.containsKey(MediaFormat.KEY_CROP_RIGHT) && mediaFormat.containsKey(MediaFormat.KEY_CROP_LEFT) &&\n            mediaFormat.containsKey(MediaFormat.KEY_CROP_BOTTOM) && mediaFormat.containsKey(MediaFormat.KEY_CROP_TOP)\n        val width =\n            if (hasCrop)\n                mediaFormat.getInteger(MediaFormat.KEY_CROP_RIGHT) - mediaFormat.getInteger(MediaFormat.KEY_CROP_LEFT) + 1\n            else\n                mediaFormat.getInteger(MediaFormat.KEY_WIDTH)\n        var height =\n            if (hasCrop)\n                mediaFormat.getInteger(MediaFormat.KEY_CROP_BOTTOM) - mediaFormat.getInteger(MediaFormat.KEY_CROP_TOP) + 1\n            else\n                mediaFormat.getInteger(MediaFormat.KEY_HEIGHT)\n        // Fix for 1080p resolution for Samsung S21\n        // {crop-right=1919, max-height=4320, sar-width=1, color-format=2130708361, mime=video/raw,\n        // hdr-static-info=java.nio.HeapByteBuffer[pos=0 lim=25 cap=25],\n        // priority=0, color-standard=1, feature-secure-playback=0, color-transfer=3, sar-height=1,\n        // crop-bottom=1087, max-width=8192, crop-left=0, width=1920, color-range=2, crop-top=0,\n        // rotation-degrees=0, frame-rate=30, height=1088}\n        height = height / 16 * 16 // 1088 -> 1080\n//        if (height == 1088)\n//            height = 1080\n        return Pair(width, height)\n    }\n\n    private fun getDecoderMediaFormat(decoder: MediaCodec): MediaFormat {\n        if (DEBUG) Log.v(TAG, \"getDecoderMediaFormat()\")\n        val safeWidthHeight = getDecoderSafeWidthHeight(decoder)\n        val format = MediaFormat.createVideoFormat(mimeType, safeWidthHeight.first, safeWidthHeight.second)\n        if (DEBUG)\n            Log.d(TAG, \"Configuring surface ${safeWidthHeight.first}x${safeWidthHeight.second} w/ '$mimeType'\")\n        else\n            Log.i(TAG, \"Configuring surface ${safeWidthHeight.first}x${safeWidthHeight.second} w/ '$mimeType'\")\n        format.setInteger(MediaFormat.KEY_ROTATION, rotation)\n//        if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.R) {\n//            // format.setFeatureEnabled(android.media.MediaCodecInfo.CodecCapabilities.FEATURE_LowLatency, true)\n//            // Request low-latency for the decoder. Not all of the decoders support that.\n//            format.setInteger(MediaFormat.KEY_LOW_LATENCY, 1)\n//        }\n\n        val succeeded = MediaCodecHelper.setDecoderLowLatencyOptions(format, decoder.codecInfo, 1)\n        Log.i(TAG, \"Low-latency: $succeeded\")\n\n        return format\n    }\n\n    /** Decoder created */\n    abstract fun decoderCreated(mediaCodec: MediaCodec, mediaFormat: MediaFormat)\n\n    /** Frame processed */\n    abstract fun releaseOutputBuffer(\n        mediaCodec: MediaCodec,\n        outIndex: Int,\n        bufferInfo: MediaCodec.BufferInfo,\n        render: Boolean\n    )\n\n    /** Decoder stopped and released */\n    abstract fun decoderDestroyed(mediaCodec: MediaCodec)\n\n    private fun createVideoDecoderAndStart(decoderType: DecoderType): MediaCodec {\n        if (DEBUG) Log.v(TAG, \"createVideoDecoderAndStart(decoderType=$decoderType)\")\n\n        @SuppressLint(\"UnsafeOptInUsageError\")\n        val decoder = when (decoderType) {\n            DecoderType.HARDWARE -> {\n                val hwDecoders = MediaCodecUtils.getHardwareDecoders(mimeType)\n                if (hwDecoders.isEmpty()) {\n                    Log.w(TAG, \"Cannot get hardware video decoders for mime type '$mimeType'. Using default one.\")\n                    MediaCodec.createDecoderByType(mimeType)\n                } else {\n                    val lowLatencyDecoder = MediaCodecUtils.getLowLatencyDecoder(hwDecoders)\n                    val name = lowLatencyDecoder?.let {\n                        Log.i(TAG, \"[$name] Dedicated low-latency decoder found '${lowLatencyDecoder.name}'\")\n                        lowLatencyDecoder.name\n                    } ?: hwDecoders[0].name\n                    MediaCodec.createByCodecName(name)\n                }\n            }\n            DecoderType.SOFTWARE -> {\n                val swDecoders = MediaCodecUtils.getSoftwareDecoders(mimeType)\n                if (swDecoders.isEmpty()) {\n                    Log.w(TAG, \"Cannot get software video decoders for mime type '$mimeType'. Using default one .\")\n                    MediaCodec.createDecoderByType(mimeType)\n                } else {\n                    val name = swDecoders[0].name\n                    MediaCodec.createByCodecName(name)\n                }\n            }\n        }\n        this.videoDecoderType = decoderType\n        this.videoDecoderName = decoder.name\n\n        val frameRenderedListener = OnFrameRenderedListener { _, _, _ ->\n            if (!firstFrameRendered) {\n                firstFrameRendered = true\n                uiHandler.post {\n                    videoDecoderListener.onVideoDecoderFirstFrameRendered()\n                }\n            }\n        }\n        decoder.setOnFrameRenderedListener(frameRenderedListener, null)\n        val format = getDecoderMediaFormat(decoder)\n        decoderCreated(decoder, format)\n        decoder.start()\n\n        val capabilities = decoder.codecInfo.getCapabilitiesForType(mimeType)\n        val lowLatencySupport = if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.R) {\n            capabilities.isFeatureSupported(android.media.MediaCodecInfo.CodecCapabilities.FEATURE_LowLatency)\n        } else {\n            false\n        }\n        Log.i(TAG, \"[$name] Video decoder '${decoder.name}' started \" +\n                \"(${if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) { if (decoder.codecInfo.isHardwareAccelerated) \"hardware\" else \"software\" } else \"\"}, \" +\n                \"${capabilities.capabilitiesToString()}, \" +\n                \"${if (lowLatencySupport) \"w/\" else \"w/o\"} low-latency support)\")\n\n        return decoder\n    }\n\n    private fun stopAndReleaseVideoDecoder(decoder: MediaCodec) {\n        if (DEBUG) Log.v(TAG, \"stopAndReleaseVideoDecoder()\")\n        val type = videoDecoderType.toString().lowercase()\n        Log.i(TAG, \"Stopping $type video decoder...\")\n        try {\n            decoder.stop()\n            Log.i(TAG, \"Decoder successfully stopped\")\n        } catch (e3: Throwable) {\n            Log.e(TAG, \"Failed to stop decoder\", e3)\n        }\n        Log.i(TAG, \"Releasing decoder...\")\n        try {\n            decoder.release()\n            Log.i(TAG, \"Decoder successfully released\")\n        } catch (e3: Throwable) {\n            Log.e(TAG, \"Failed to release decoder\", e3)\n        }\n        videoFrameQueue.clear()\n        decoderDestroyed(decoder)\n    }\n\n    override fun run() {\n        if (DEBUG) Log.d(TAG, \"$name started\")\n\n        if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.P) {\n            Process.setThreadPriority(Process.THREAD_PRIORITY_VIDEO)\n        }\n\n        videoDecoderListener.onVideoDecoderStarted()\n\n        try {\n            Log.i(TAG, \"Starting hardware video decoder...\")\n            var decoder = try {\n                createVideoDecoderAndStart(videoDecoderType)\n            } catch (e: Throwable) {\n                Log.e(TAG, \"Failed to start $videoDecoderType video decoder (${e.message})\", e)\n                Log.i(TAG, \"Starting software video decoder...\")\n                try {\n                    createVideoDecoderAndStart(DecoderType.SOFTWARE)\n                } catch (e2: Throwable) {\n                    Log.e(TAG, \"Failed to start video software decoder. Exiting...\", e2)\n                    // Unexpected behavior\n                    videoDecoderListener.onVideoDecoderFailed(\"Cannot initialize video decoder for mime type '$mimeType'\")\n                    return\n                }\n            }\n            val bufferInfo = MediaCodec.BufferInfo()\n\n            try {\n                var widthHeightFromStream: Pair<Int, Int>? = null\n\n                // Map for calculating decoder rendering latency.\n                // key - original frame timestamp, value - timestamp when frame was added to the map\n                val keyframesTimestamps = HashMap<Long, Long>()\n\n                var frameQueuedMsec = System.currentTimeMillis()\n                var frameAlreadyDequeued = false\n\n                // Main loop\n                while (!exitFlag.get()) {\n                    try {\n                        val inIndex: Int = decoder.dequeueInputBuffer(DEQUEUE_INPUT_TIMEOUT_US)\n                        if (inIndex >= 0) {\n                            // fill inputBuffers[inputBufferIndex] with valid data\n                            val byteBuffer: ByteBuffer? = decoder.getInputBuffer(inIndex)\n                            byteBuffer?.rewind()\n\n                            // Preventing BufferOverflowException\n                            // if (length > byteBuffer.limit()) throw DecoderFatalException(\"Error\")\n\n                            val frame = videoFrameQueue.pop()\n                            if (frame == null) {\n                                Log.d(TAG, \"Empty video frame\")\n                                // Release input buffer\n                                decoder.queueInputBuffer(inIndex, 0, 0, 0L, 0)\n                            } else {\n                                // Add timestamp for keyframe to calculating latency further.\n                                if ((DEBUG || decoderLatencyRequested) && frame.isKeyframe) {\n                                    if (keyframesTimestamps.size > 5) {\n                                        // Something wrong with map. Allow only 5 map entries.\n                                        keyframesTimestamps.clear()\n                                    }\n                                    val l = System.currentTimeMillis()\n                                    keyframesTimestamps[frame.timestampMs] = l\n//                                  Log.d(TAG, \"Added $l\")\n                                }\n                                // Calculate network latency\n                                networkLatency = if (frame.capturedTimestampMs > -1)\n                                    (frame.timestampMs - frame.capturedTimestampMs).toInt()\n                                else\n                                    -1\n\n                                byteBuffer?.put(frame.data, frame.offset, frame.length)\n                                if (DEBUG) {\n                                    val l = System.currentTimeMillis()\n                                    Log.i(TAG, \"\\tFrame queued (${l - frameQueuedMsec}) ${if (frame.isKeyframe) \"key frame\" else \"\"}\")\n                                    frameQueuedMsec = l\n                                }\n                                val flags = if (frame.isKeyframe)\n                                    (MediaCodec.BUFFER_FLAG_KEY_FRAME /*or MediaCodec.BUFFER_FLAG_CODEC_CONFIG*/) else 0\n                                decoder.queueInputBuffer(inIndex, frame.offset, frame.length, frame.timestampMs, flags)\n\n                                if (frame.isKeyframe) {\n                                    // Obtain width and height from stream\n                                    widthHeightFromStream = try {\n                                        VideoCodecUtils.getWidthHeightFromArray(\n                                            frame.data,\n                                            frame.offset,\n                                            // Check only first 100 bytes maximum. That's enough for finding SPS NAL unit.\n                                            min(frame.length, VideoCodecUtils.MAX_NAL_SPS_SIZE),\n                                            isH265 = frame.codecType == VideoCodecType.H265\n                                        )\n                                    } catch (_: Exception) {\n//                                      Log.e(TAG, \"Failed to parse width/height from SPS frame. SPS frame seems to be corrupted.\", e)\n                                        null\n                                    }\n//                                  Log.i(TAG, \"width/height: ${widthHeightFromStream?.first}x${widthHeightFromStream?.second}\")\n                                }\n                            }\n                        }\n\n                        if (exitFlag.get()) break\n\n                        // Get all output buffer frames until no buffer from decoder available (INFO_TRY_AGAIN_LATER).\n                        // Single input buffer frame can contain several frames, e.g. SPS + PPS + IDR.\n                        // Thus dequeueOutputBuffer should be called several times.\n                        // First time it obtains SPS + PPS, second one - IDR frame.\n                        do {\n                            // For the first time wait for a frame within 100 msec, next times no timeout\n                            val timeout = if (frameAlreadyDequeued || !firstFrameDecoded) 0L else DEQUEUE_OUTPUT_BUFFER_TIMEOUT_US\n                            val outIndex = decoder.dequeueOutputBuffer(bufferInfo, timeout)\n                            when (outIndex) {\n                                // Resolution changed\n                                MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED, MediaCodec.INFO_OUTPUT_FORMAT_CHANGED -> {\n                                    Log.d(TAG, \"Decoder format changed: ${decoder.outputFormat}\")\n                                    // Decoder can contain different resolution (it can make downsampling).\n                                    // If resolution successfully obtained from SPS frame, use it.\n                                    val widthHeightFromDecoder = getWidthHeight(decoder.outputFormat)\n                                    val widthHeight = widthHeightFromStream ?: widthHeightFromDecoder\n                                    Log.i(TAG, \"Video decoder resolution: ${widthHeightFromDecoder.first}x${widthHeightFromDecoder.second}, stream resolution: ${widthHeightFromStream?.first}x${widthHeightFromStream?.second}\")\n\n//                                    val widthHeightFromDecoder = getWidthHeight(decoder.outputFormat)\n                                    val rotation = if (decoder.outputFormat.containsKey(MediaFormat.KEY_ROTATION)) {\n                                        decoder.outputFormat.getInteger(MediaFormat.KEY_ROTATION)\n                                    } else {\n                                        // Some devices like Samsung SM-A505U (Android 11) do not allow\n                                        // video stream rotation on decoding for hardware decoder\n                                        Log.w(TAG, \"Video stream rotation is not supported by this Android device (${Build.MODEL} - ${Build.DEVICE}, codec: '${decoder.name}')\")\n                                        0\n                                    }\n                                    uiHandler.post {\n                                        // Run in UI thread\n                                        when (rotation) {\n                                            90, 270 -> videoDecoderListener.onVideoDecoderFormatChanged(widthHeight.second, widthHeight.first)\n                                            else -> videoDecoderListener.onVideoDecoderFormatChanged(widthHeight.first, widthHeight.second)\n                                        }\n                                    }\n                                    frameAlreadyDequeued = true\n                                }\n                                // No any frames in queue\n                                MediaCodec.INFO_TRY_AGAIN_LATER -> {\n                                    if (DEBUG) Log.d(TAG, \"No output from decoder available\")\n                                    frameAlreadyDequeued = true\n                                }\n                                // Frame decoded\n                                else -> {\n                                    if (outIndex >= 0) {\n                                        if (DEBUG || decoderLatencyRequested) {\n                                            val ts = bufferInfo.presentationTimeUs\n                                            keyframesTimestamps.remove(ts)?.apply {\n                                                decoderLatency = (System.currentTimeMillis() - this).toInt()\n//                                              Log.d(TAG, \"Removed $this\")\n                                            }\n                                        }\n\n                                        val render = bufferInfo.size != 0 && !exitFlag.get()\n                                        if (DEBUG) Log.i(TAG, \"\\tFrame decoded [outIndex=$outIndex, render=$render]\")\n                                        releaseOutputBuffer(decoder, outIndex, bufferInfo, render)\n                                        if (!firstFrameDecoded && render) {\n                                            firstFrameDecoded = true\n                                        }\n                                        frameAlreadyDequeued = false\n                                    } else {\n                                        Log.e(TAG, \"Obtaining frame failed w/ error code $outIndex\")\n                                    }\n                                }\n                            }\n                        // For SPS/PPS frame request another frame (IDR)\n                        } while (outIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED || outIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED)\n//                      } while (outIndex != MediaCodec.INFO_TRY_AGAIN_LATER)\n\n                        // All decoded frames have been rendered, we can stop playing now\n                        if (bufferInfo.flags and MediaCodec.BUFFER_FLAG_END_OF_STREAM != 0) {\n                            if (DEBUG) Log.d(TAG, \"OutputBuffer BUFFER_FLAG_END_OF_STREAM\")\n                            break\n                        }\n                    } catch (_: InterruptedException) {\n                    } catch (e: IllegalStateException) {\n                        // Restarting decoder in software mode\n                        Log.e(TAG, \"${e.message}\", e)\n                        stopAndReleaseVideoDecoder(decoder)\n                        Log.i(TAG, \"Starting software video decoder...\")\n                        decoder = createVideoDecoderAndStart(DecoderType.SOFTWARE)\n                        Log.i(TAG, \"Software video decoder '${decoder.name}' started (${decoder.codecInfo.getCapabilitiesForType(mimeType).capabilitiesToString()})\")\n                    } catch (e: MediaCodec.CodecException) {\n                        Log.w(TAG, \"${e.diagnosticInfo}\\nisRecoverable: ${e.isRecoverable}, isTransient: ${e.isTransient}\")\n                        if (e.isRecoverable) {\n                            // Recoverable error.\n                            // Calling stop(), configure(), and start() to recover.\n                            Log.i(TAG, \"Recovering video decoder...\")\n                            try {\n                                decoder.stop()\n                                val format = getDecoderMediaFormat(decoder)\n                                decoderCreated(decoder, format)\n                                decoder.start()\n                                Log.i(TAG, \"Video decoder recovering succeeded\")\n                            } catch (e2: Throwable) {\n                                Log.e(TAG, \"Video decoder recovering failed\")\n                                Log.e(TAG, \"${e2.message}\", e2)\n                            }\n                        } else if (e.isTransient) {\n                            // Transient error. Resources are temporarily unavailable and\n                            // the method may be retried at a later time.\n                            Log.w(TAG, \"Video decoder resource temporarily unavailable\")\n                        } else {\n                            // Fatal error. Restarting decoder in software mode.\n                            stopAndReleaseVideoDecoder(decoder)\n                            Log.i(TAG, \"Starting video software decoder...\")\n                            decoder = createVideoDecoderAndStart(DecoderType.SOFTWARE)\n                            Log.i(TAG, \"Software video decoder '${decoder.name}' started (${decoder.codecInfo.getCapabilitiesForType(mimeType).capabilitiesToString()})\")\n                        }\n                    } catch (e: Throwable) {\n                        Log.e(TAG, \"${e.message}\", e)\n                    }\n                } // while\n\n                // Drain decoder\n                val inIndex: Int = decoder.dequeueInputBuffer(DEQUEUE_INPUT_TIMEOUT_US)\n                if (inIndex >= 0) {\n                    decoder.queueInputBuffer(inIndex, 0, 0, 0L, MediaCodec.BUFFER_FLAG_END_OF_STREAM)\n                } else {\n                    Log.w(TAG, \"Not able to signal end of stream\")\n                }\n\n            } catch (e2: Throwable) {\n                Log.e(TAG, \"${e2.message}\", e2)\n            } finally {\n                stopAndReleaseVideoDecoder(decoder)\n            }\n\n        } catch (e: Throwable) {\n            Log.e(TAG, \"$name stopped due to '${e.message}'\")\n            videoDecoderListener.onVideoDecoderFailed(e.message)\n            // While configuring stopAsync can be called and surface released. Just exit.\n            if (!exitFlag.get()) e.printStackTrace()\n            return\n        }\n\n        videoDecoderListener.onVideoDecoderStopped()\n        if (DEBUG) Log.d(TAG, \"$name stopped\")\n    }\n\n    companion object {\n        internal val TAG: String = VideoDecodeThread::class.java.simpleName\n        internal const val DEBUG = false\n\n        private val DEQUEUE_INPUT_TIMEOUT_US = TimeUnit.MILLISECONDS.toMicros(500)\n        private val DEQUEUE_OUTPUT_BUFFER_TIMEOUT_US = TimeUnit.MILLISECONDS.toMicros(100)\n    }\n\n}\n"
  },
  {
    "path": "library-client-rtsp/src/main/java/com/alexvas/rtsp/codec/VideoDecoderBitmapThread.kt",
    "content": "package com.alexvas.rtsp.codec\n\nimport android.graphics.Bitmap\nimport android.graphics.Matrix\nimport android.media.MediaCodec\nimport android.media.MediaFormat\nimport android.util.Log\nimport com.alexvas.rtsp.codec.color.ColorConverterImageAndroidX\n\nclass VideoDecoderBitmapThread(\n    mimeType: String,\n    rotation: Int, // 0, 90, 180, 270\n    videoFrameQueue: VideoFrameQueue,\n    videoDecoderListener: VideoDecoderListener,\n    private val videoDecoderBitmapListener: VideoDecoderBitmapListener,\n    videoDecoderType: DecoderType = DecoderType.HARDWARE\n): VideoDecodeThread(\n    mimeType,\n    1920,\n    1080,\n    rotation,\n    videoFrameQueue,\n    videoDecoderListener,\n    videoDecoderType\n) {\n\n    interface VideoDecoderBitmapListener {\n        /** Used only when OutputType.BUFFERS is used */\n        fun onVideoDecoderBitmapObtained(bitmap: Bitmap) {}\n    }\n\n    private var colorConverter: ColorConverterImageAndroidX? = null\n\n    override fun decoderCreated(mediaCodec: MediaCodec, mediaFormat: MediaFormat) {\n        if (DEBUG) Log.v(TAG, \"decoderCreated()\")\n        mediaCodec.configure(mediaFormat, null, null, 0)\n    }\n\n    override fun releaseOutputBuffer(\n        mediaCodec: MediaCodec,\n        outIndex: Int,\n        bufferInfo: MediaCodec.BufferInfo,\n        render: Boolean\n    ) {\n        mediaCodec.getOutputImage(outIndex)?.use { image ->\n            if (colorConverter == null)\n                colorConverter = ColorConverterImageAndroidX()\n            // Converting YUV 4:2:0 888 to Bitmap ARGB 8888\n            var bitmap = colorConverter!!.getBitmapFromImage(image)\n            // Rotation does not work in VideoDecoderThread since we do not use Surface there.\n            // Rotate bitmaps.\n            bitmap = if (rotation != 0) {\n                bitmap.rotateBitmap(rotation.toFloat())\n            } else {\n                bitmap.createCopy565()\n            }\n            uiHandler.post {\n                if (!firstFrameRendered) {\n                    firstFrameRendered = true\n                    videoDecoderListener.onVideoDecoderFirstFrameRendered()\n                }\n                videoDecoderBitmapListener.onVideoDecoderBitmapObtained(bitmap)\n            }\n        }\n        mediaCodec.releaseOutputBuffer(outIndex, false)\n    }\n\n    override fun decoderDestroyed(mediaCodec: MediaCodec) {\n        if (DEBUG) Log.v(TAG, \"decoderDestroyed()\")\n        colorConverter?.apply {\n            try {\n                Log.i(TAG, \"Releasing color converter...\")\n                release()\n                Log.i(TAG, \"Color converter successfully released\")\n            } catch (e: Throwable) {\n                Log.e(TAG, \"Failed to release color converter\", e)\n            }\n        }\n    }\n\n}\n\nfun Bitmap.createCopy565(): Bitmap {\n    return copy(\n        Bitmap.Config.RGB_565,\n        true\n    )\n}\n\nfun Bitmap.rotateBitmap(angle: Float): Bitmap {\n    val matrix = Matrix()\n    matrix.postRotate(angle)\n    return Bitmap.createBitmap(this, 0, 0, this.width, this.height, matrix, true)\n}\n"
  },
  {
    "path": "library-client-rtsp/src/main/java/com/alexvas/rtsp/codec/VideoDecoderSurfaceThread.kt",
    "content": "package com.alexvas.rtsp.codec\n\nimport android.media.MediaCodec\nimport android.media.MediaFormat\nimport android.util.Log\nimport android.view.Surface\nimport java.util.concurrent.TimeUnit\nimport kotlin.math.max\n\nclass VideoDecoderSurfaceThread(\n    private val surface: Surface,\n    mimeType: String,\n    width: Int,\n    height: Int,\n    rotation: Int, // 0, 90, 180, 270\n    videoFrameQueue: VideoFrameQueue,\n    videoDecoderListener: VideoDecoderListener,\n    videoDecoderType: DecoderType = DecoderType.HARDWARE,\n    videoFrameRateStabilization: Boolean = false,\n) : VideoDecodeThread(\n    mimeType, width, height, rotation, videoFrameQueue, videoDecoderListener, videoDecoderType\n) {\n\n    /**\n     * Presentation time (in RTP units converted to microseconds) of the first frame used as the\n     * PTS baseline.\n     */\n    private var streamStartPtsUs: Long? = null\n\n    /**\n     * Monotonic clock timestamp corresponding to streamStartPtsUs, used to map future frames\n     * to real time.\n     */\n    private var playbackStartRealtimeNs: Long? = null\n\n    /**\n     * Timestamp of the most recently released frame to enforce minimum spacing between consecutive\n     * frames.\n     */\n    private var lastFrameReleaseTimeNs: Long = Long.MIN_VALUE\n\n    /**\n     * Last presentation timestamp we processed; used to detect wrap-around or backwards jumps.\n     */\n    private var lastPresentationTimeUs: Long = Long.MIN_VALUE\n\n    init {\n        setVideoFrameRateStabilization(videoFrameRateStabilization)\n    }\n\n    override fun decoderCreated(mediaCodec: MediaCodec, mediaFormat: MediaFormat) {\n        if (DEBUG) Log.v(TAG, \"decoderCreated()\")\n        if (!surface.isValid) {\n            Log.e(TAG, \"Surface invalid\")\n        }\n        mediaCodec.configure(mediaFormat, surface, null, 0)\n        resetFrameTiming()\n    }\n\n    private fun releaseOutputBufferWithFrameRateStabilization(\n        mediaCodec: MediaCodec,\n        outIndex: Int,\n        bufferInfo: MediaCodec.BufferInfo\n    ) {\n        if (DEBUG) Log.v(TAG, \"releaseOutputBufferWithFrameRateStabilization(outIndex=$outIndex)\")\n\n        val ptsUs = bufferInfo.presentationTimeUs\n        val nowNs = System.nanoTime()\n\n        if (streamStartPtsUs == null || playbackStartRealtimeNs == null) {\n            // First frame (or after a reset): initialize all timing anchors.\n            streamStartPtsUs = ptsUs\n            playbackStartRealtimeNs = nowNs\n            lastFrameReleaseTimeNs = nowNs\n            lastPresentationTimeUs = ptsUs\n            mediaCodec.releaseOutputBuffer(outIndex, nowNs)\n            return\n        }\n\n        var targetNs = playbackStartRealtimeNs!! + (ptsUs - streamStartPtsUs!!) * 1000L\n        var adjustedNowNs = System.nanoTime()\n\n        if (lastPresentationTimeUs != Long.MIN_VALUE && ptsUs < lastPresentationTimeUs) {\n            // PTS went backwards (e.g. codec reordering). Re-base the clock to avoid negative deltas.\n            streamStartPtsUs = ptsUs\n            playbackStartRealtimeNs = adjustedNowNs\n            targetNs = adjustedNowNs\n        }\n\n        if (lastFrameReleaseTimeNs != Long.MIN_VALUE) {\n            // Ensure we never schedule two frames closer together than the min spacing.\n            targetNs = max(targetNs, lastFrameReleaseTimeNs + MIN_FRAME_SPACING_NS)\n        }\n\n        adjustedNowNs = System.nanoTime()\n        val latenessNs = adjustedNowNs - targetNs\n\n        if (latenessNs >= FRAME_DROP_THRESHOLD_NS) {\n            // Frame is critically late; drop to keep playback responsive.\n            mediaCodec.releaseOutputBuffer(outIndex, false)\n            lastFrameReleaseTimeNs = adjustedNowNs\n            return\n        }\n\n        var correctedTargetNs = targetNs\n        if (latenessNs > 0) {\n            // For mild lateness, shift the playback baseline forward so future frames stay aligned.\n            val correction = minOf(latenessNs, FRAME_DROP_THRESHOLD_NS)\n            playbackStartRealtimeNs = playbackStartRealtimeNs?.plus(correction)\n            correctedTargetNs += correction\n        }\n\n        if (correctedTargetNs <= adjustedNowNs + RENDER_EARLY_MARGIN_NS) {\n            // Already at/behind the target time: render immediately using the current VSYNC.\n            mediaCodec.releaseOutputBuffer(outIndex, true)\n            lastFrameReleaseTimeNs = adjustedNowNs\n        } else {\n            // Still early enough: hand the desired release timestamp to MediaCodec for VSYNC alignment.\n            mediaCodec.releaseOutputBuffer(outIndex, correctedTargetNs)\n            lastFrameReleaseTimeNs = correctedTargetNs\n        }\n\n        lastPresentationTimeUs = ptsUs\n    }\n\n    override fun releaseOutputBuffer(\n        mediaCodec: MediaCodec,\n        outIndex: Int,\n        bufferInfo: MediaCodec.BufferInfo,\n        render: Boolean\n    ) {\n        if (DEBUG) Log.v(TAG, \"releaseOutputBuffer(outIndex=$outIndex, render=$render)\")\n        if (!render || !surface.isValid) {\n            mediaCodec.releaseOutputBuffer(outIndex, false)\n            return\n        }\n\n        if (!hasVideoFrameRateStabilization()) {\n            mediaCodec.releaseOutputBuffer(outIndex, true)\n        } else {\n            releaseOutputBufferWithFrameRateStabilization(mediaCodec, outIndex, bufferInfo)\n        }\n    }\n\n    override fun decoderDestroyed(mediaCodec: MediaCodec) {\n        if (DEBUG) Log.v(TAG, \"decoderDestroyed()\")\n        resetFrameTiming()\n    }\n\n    private fun resetFrameTiming() {\n        if (DEBUG) Log.v(TAG, \"resetFrameTiming()\")\n        streamStartPtsUs = null\n        playbackStartRealtimeNs = null\n        lastFrameReleaseTimeNs = Long.MIN_VALUE\n        lastPresentationTimeUs = Long.MIN_VALUE\n    }\n\n    companion object {\n        private val FRAME_DROP_THRESHOLD_NS = TimeUnit.MILLISECONDS.toNanos(80)\n        private val MIN_FRAME_SPACING_NS = TimeUnit.MILLISECONDS.toNanos(1)\n        private val RENDER_EARLY_MARGIN_NS = TimeUnit.MILLISECONDS.toNanos(2)\n    }\n\n}\n"
  },
  {
    "path": "library-client-rtsp/src/main/java/com/alexvas/rtsp/codec/color/ColorConverter.kt",
    "content": "package com.alexvas.rtsp.codec.color\n\nimport android.annotation.SuppressLint\nimport android.graphics.Bitmap\nimport android.graphics.Matrix\nimport android.graphics.Rect\nimport android.media.Image\nimport androidx.camera.core.FlashState\nimport androidx.camera.core.ImageInfo\nimport androidx.camera.core.ImageProcessingUtil\nimport androidx.camera.core.ImageProxy\nimport androidx.camera.core.ImmutableImageInfo\nimport androidx.camera.core.impl.TagBundle\nimport java.nio.ByteBuffer\n\n/**\n * Convert Image YUV 4:2:0 888 to Bitmap ARGB 8888.\n */\nclass ColorConverterImageAndroidX: ColorConverterImage() {\n\n    @SuppressLint(\"RestrictedApi\")\n    override fun getBitmapFromImage(image: Image): Bitmap {\n        // YUV 4:2:0 888 -> ARGB 8888\n        return ImageProcessingUtil.convertYUVToBitmap(AndroidImageProxy(image))\n    }\n\n    override fun release() {\n    }\n\n}\n\ninternal class AndroidImageProxy(private val image: Image) : ImageProxy {\n\n    private val planes: Array<AndroidPlaneProxy> = if (image.planes != null) {\n        Array(image.planes.size) { i -> AndroidPlaneProxy(image.planes[i]) }\n    } else {\n        emptyArray()\n    }\n    @SuppressLint(\"RestrictedApi\")\n    private val imageInfo: ImageInfo = ImmutableImageInfo.create(\n        TagBundle.emptyBundle(),\n        image.timestamp,\n        0,\n        Matrix(),\n        FlashState.UNAVAILABLE\n    )\n\n    override fun close() {\n        image.close()\n    }\n\n    override fun getCropRect(): Rect {\n        return image.cropRect\n    }\n\n    override fun setCropRect(rect: Rect?) {\n        image.cropRect = rect\n    }\n\n    override fun getFormat(): Int {\n        return image.format\n    }\n\n    override fun getHeight(): Int {\n        return image.height\n    }\n\n    override fun getWidth(): Int {\n        return image.width\n    }\n\n    override fun getPlanes(): Array<ImageProxy.PlaneProxy> {\n        @Suppress(\"UNCHECKED_CAST\")\n        return planes as Array<ImageProxy.PlaneProxy>\n    }\n\n    /** An [ImageProxy.PlaneProxy] which wraps around an [Image.Plane].  */\n    private class AndroidPlaneProxy(private val mPlane: Image.Plane) : ImageProxy.PlaneProxy {\n        override fun getRowStride(): Int {\n            return mPlane.rowStride\n        }\n\n        override fun getPixelStride(): Int {\n            return mPlane.pixelStride\n        }\n\n        override fun getBuffer(): ByteBuffer {\n            return mPlane.buffer\n        }\n    }\n\n    override fun getImageInfo(): ImageInfo {\n        return imageInfo\n    }\n\n    @SuppressLint(\"UnsafeOptInUsageError\")\n    override fun getImage(): Image {\n        return image\n    }\n}\n"
  },
  {
    "path": "library-client-rtsp/src/main/java/com/alexvas/rtsp/codec/color/ColorConverterImage.kt",
    "content": "package com.alexvas.rtsp.codec.color\n\nimport android.graphics.Bitmap\nimport android.media.Image\n\nabstract class ColorConverter {\n\n    abstract fun release()\n\n}\n\nabstract class ColorConverterImage: ColorConverter() {\n\n    abstract fun getBitmapFromImage(image: Image): Bitmap\n\n}\n"
  },
  {
    "path": "library-client-rtsp/src/main/java/com/alexvas/rtsp/parser/AacParser.java",
    "content": "package com.alexvas.rtsp.parser;\n\nimport android.annotation.SuppressLint;\nimport android.util.Log;\n\nimport androidx.annotation.NonNull;\nimport androidx.annotation.Nullable;\n\nimport androidx.media3.common.util.ParsableBitArray;\nimport androidx.media3.common.util.ParsableByteArray;\n\n// https://tools.ietf.org/html/rfc3640\n//          +---------+-----------+-----------+---------------+\n//         | RTP     | AU Header | Auxiliary | Access Unit   |\n//         | Header  | Section   | Section   | Data Section  |\n//         +---------+-----------+-----------+---------------+\n//\n//                   <----------RTP Packet Payload----------->\n@SuppressLint(\"UnsafeOptInUsageError\")\npublic class AacParser extends AudioParser {\n\n    private static final String TAG = AacParser.class.getSimpleName();\n    private static final boolean DEBUG = false;\n\n    private final ParsableBitArray headerScratchBits;\n    private final ParsableByteArray headerScratchBytes;\n\n    private static final int MODE_LBR = 0;\n    private static final int MODE_HBR = 1;\n\n    // Number of bits for AAC AU sizes, indexed by mode (LBR and HBR)\n    private static final int[] NUM_BITS_AU_SIZES = {6, 13};\n\n    // Number of bits for AAC AU index(-delta), indexed by mode (LBR and HBR)\n    private static final int[] NUM_BITS_AU_INDEX = {2, 3};\n\n    // Frame Sizes for AAC AU fragments, indexed by mode (LBR and HBR)\n    private static final int[] FRAME_SIZES = {63, 8191};\n\n    private final int _aacMode;\n    private boolean completeFrameIndicator = true;\n\n    public AacParser(@NonNull String aacMode) {\n        _aacMode = aacMode.equalsIgnoreCase(\"AAC-lbr\") ? MODE_LBR : MODE_HBR;\n\n        headerScratchBits = new ParsableBitArray();\n        headerScratchBytes = new ParsableByteArray();\n    }\n\n    @Override\n    @Nullable\n    public byte[] processRtpPacketAndGetSample(@NonNull byte[] data, int length) {\n        if (DEBUG)\n            Log.v(TAG, \"processRtpPacketAndGetSample(length=\" + length + \")\");\n        int auHeadersCount = 1;\n        int numBitsAuSize = NUM_BITS_AU_SIZES[_aacMode];\n        int numBitsAuIndex = NUM_BITS_AU_INDEX[_aacMode];\n\n        ParsableByteArray packet = new ParsableByteArray(data, length);\n\n//      +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+- .. -+-+-+-+-+-+-+-+-+-+\n//      |AU-headers-length|AU-header|AU-header|      |AU-header|padding|\n//      |                 |   (1)   |   (2)   |      |   (n)   | bits  |\n//      +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+- .. -+-+-+-+-+-+-+-+-+-+\n        int auHeadersLength = packet.readShort();//((data[0] & 0xFF) << 8) | (data[1] & 0xFF);\n        int auHeadersLengthBytes = (auHeadersLength + 7) / 8;\n\n        headerScratchBytes.reset(auHeadersLengthBytes);\n        packet.readBytes(headerScratchBytes.getData(), 0, auHeadersLengthBytes);\n        headerScratchBits.reset(headerScratchBytes.getData());\n\n        int bitsAvailable = auHeadersLength - (numBitsAuSize + numBitsAuIndex);\n\n        if (bitsAvailable > 0) {// && (numBitsAuSize + numBitsAuSize) > 0) {\n            auHeadersCount +=  bitsAvailable / (numBitsAuSize + numBitsAuIndex);\n        }\n\n        if (auHeadersCount == 1) {\n            int auSize = headerScratchBits.readBits(numBitsAuSize);\n            int auIndex = headerScratchBits.readBits(numBitsAuIndex);\n\n            if (completeFrameIndicator) {\n                if (auIndex == 0) {\n                    if (packet.bytesLeft() == auSize) {\n                        return handleSingleAacFrame(packet);\n\n                    } else {\n//                        handleFragmentationAacFrame(packet, auSize);\n                    }\n                }\n            } else {\n//                handleFragmentationAacFrame(packet, auSize);\n            }\n\n        } else {\n            if (completeFrameIndicator) {\n//                handleMultipleAacFrames(packet, auHeadersLength);\n            }\n        }\n//        byte[] auHeader = new byte[length-2-auHeadersLengthBytes];\n//        System.arraycopy(data,2-auHeadersLengthBytes, auHeader,0, auHeader.length);\n//        if (DEBUG)\n//            Log.d(TAG, \"AU headers size: \" + auHeadersLengthBytes + \", AU headers: \" + auHeadersCount + \", sample length: \" + auHeader.length);\n//        return auHeader;\n        return new byte[0];\n    }\n\n    private byte[] handleSingleAacFrame(ParsableByteArray packet) {\n        int length = packet.bytesLeft();\n        byte[] data = new byte[length];\n        System.arraycopy(packet.getData(), packet.getPosition(), data,0, data.length);\n        return data;\n    }\n\n//    private static final class AUHeader {\n//        private int size;\n//        private int index;\n//\n//        public AUHeader(int size, int index) {\n//            this.size = size;\n//            this.index = index;\n//        }\n//\n//        public int size() { return size; }\n//\n//        public int index() { return index; }\n//    }\n\n//    /**\n//     * Stores the consecutive fragment AU to reconstruct an AAC-Frame\n//     */\n//    private static final class FragmentedAacFrame {\n//        public byte[] auData;\n//        public int auLength;\n//        public int auSize;\n//\n//        private int sequence;\n//\n//        public FragmentedAacFrame(int frameSize) {\n//            // Initialize data\n//            auData = new byte[frameSize];\n//            sequence = -1;\n//        }\n//\n//        /**\n//         * Resets the buffer, clearing any data that it holds.\n//         */\n//        public void reset() {\n//            auLength = 0;\n//            auSize = 0;\n//            sequence = -1;\n//        }\n//\n//        public void sequence(int sequence) {\n//            this.sequence = sequence;\n//        }\n//\n//        public int sequence() {\n//            return sequence;\n//        }\n//\n//        /**\n//         * Called to add a fragment unit to fragmented AU.\n//         *\n//         * @param fragment Holds the data of fragment unit being passed.\n//         * @param offset The offset of the data in {@code fragment}.\n//         * @param limit The limit (exclusive) of the data in {@code fragment}.\n//         */\n//        public void appendFragment(byte[] fragment, int offset, int limit) {\n//            if (auSize == 0) {\n//                auSize = limit;\n//            } else if (auSize != limit) {\n//                reset();\n//            }\n//\n//            if (auData.length < auLength + limit) {\n//                auData = Arrays.copyOf(auData, (auLength + limit) * 2);\n//            }\n//\n//            System.arraycopy(fragment, offset, auData, auLength, limit);\n//            auLength += limit;\n//        }\n//\n//        public boolean isCompleted() {\n//            return auSize == auLength;\n//        }\n//    }\n\n}\n"
  },
  {
    "path": "library-client-rtsp/src/main/java/com/alexvas/rtsp/parser/AudioParser.kt",
    "content": "package com.alexvas.rtsp.parser\n\nabstract class AudioParser {\n    abstract fun processRtpPacketAndGetSample(\n        data: ByteArray,\n        length: Int\n    ): ByteArray?\n}"
  },
  {
    "path": "library-client-rtsp/src/main/java/com/alexvas/rtsp/parser/G711Parser.kt",
    "content": "package com.alexvas.rtsp.parser\n\nclass G711Parser() : AudioParser() {\n    override fun processRtpPacketAndGetSample(\n        data: ByteArray,\n        length: Int\n    ): ByteArray? {\n        val g711Payload = data.copyOfRange(0, length)\n        return g711Payload\n    }\n}\n"
  },
  {
    "path": "library-client-rtsp/src/main/java/com/alexvas/rtsp/parser/RtpH264Parser.kt",
    "content": "package com.alexvas.rtsp.parser\n\nimport android.util.Log\nimport com.alexvas.utils.VideoCodecUtils\nimport com.alexvas.utils.VideoCodecUtils.getH264NalUnitTypeString\n\nclass RtpH264Parser: RtpParser() {\n\n    override fun processRtpPacketAndGetNalUnit(data: ByteArray, length: Int, marker: Boolean): ByteArray? {\n        if (DEBUG) Log.v(TAG, \"processRtpPacketAndGetNalUnit(data.size=${data.size}, length=$length, marker=$marker)\")\n\n        val nalType = (data[0].toInt() and 0x1F).toByte()\n        val packFlag = data[1].toInt() and 0xC0\n        var nalUnit: ByteArray? = null\n\n        if (DEBUG)\n            Log.d(TAG, \"\\t\\tNAL type: ${getH264NalUnitTypeString(nalType)}, pack flag: 0x${Integer.toHexString(packFlag).lowercase()}\")\n\n        when (nalType) {\n            VideoCodecUtils.NAL_STAP_A, VideoCodecUtils.NAL_STAP_B -> {\n                // Not supported\n            }\n\n            VideoCodecUtils.NAL_MTAP16, VideoCodecUtils.NAL_MTAP24 -> {\n                // Not supported\n            }\n\n            VideoCodecUtils.NAL_FU_A -> {\n                when (packFlag) {\n                    0x80 -> {\n                        addStartFragmentedPacket(data, length)\n                    }\n\n                    0x00 -> {\n                        if (marker) {\n                            // Sometimes 0x40 end packet is not arrived. Use marker bit in this case\n                            // to finish fragmented packet.\n                            nalUnit = addEndFragmentedPacketAndCombine(data, length)\n                        } else {\n                            addMiddleFragmentedPacket(data, length)\n                        }\n                    }\n\n                    0x40 -> {\n                        nalUnit = addEndFragmentedPacketAndCombine(data, length)\n                    }\n                }\n            }\n\n            VideoCodecUtils.NAL_FU_B -> {\n                // Not supported\n            }\n\n            else -> {\n                nalUnit = processSingleFramePacket(data, length)\n                clearFragmentedBuffer()\n                if (DEBUG) Log.d(TAG, \"Single NAL (${nalUnit.size})\")\n            }\n        }\n        return nalUnit\n    }\n\n    private fun addStartFragmentedPacket(data: ByteArray, length: Int) {\n        if (DEBUG) Log.v(TAG, \"addStartFragmentedPacket(data.size=${data.size}, length=$length)\")\n        fragmentedPackets = 0\n        fragmentedBufferLength = length - 1\n        fragmentedBuffer[0] = ByteArray(fragmentedBufferLength).apply {\n            this[0] = ((data[0].toInt() and 0xE0) or (data[1].toInt() and 0x1F)).toByte()\n        }\n        System.arraycopy(data, 2, fragmentedBuffer[0]!!, 1, length - 2)\n    }\n\n    private fun addMiddleFragmentedPacket(data: ByteArray, length: Int) {\n        if (DEBUG) Log.v(TAG, \"addMiddleFragmentedPacket(data.size=${data.size}, length=$length)\")\n        fragmentedPackets++\n        if (fragmentedPackets >= fragmentedBuffer.size) {\n            Log.e(TAG, \"Too many middle packets. No NAL FU_A end packet received. Skipped RTP packet.\")\n            fragmentedBuffer[0] = null\n        } else {\n            fragmentedBufferLength += length - 2\n            fragmentedBuffer[fragmentedPackets] = ByteArray(length - 2)\n            System.arraycopy(data, 2, fragmentedBuffer[fragmentedPackets]!!, 0, length - 2)\n        }\n    }\n\n    private fun addEndFragmentedPacketAndCombine(data: ByteArray, length: Int): ByteArray? {\n        if (DEBUG) Log.v(TAG, \"addEndFragmentedPacketAndCombine(data.size=${data.size}, length=$length)\")\n        var nalUnit: ByteArray? = null\n        var tmpLen: Int\n        if (fragmentedBuffer[0] == null) {\n            Log.e(TAG, \"No NAL FU_A start packet received. Skipped RTP packet.\")\n        } else {\n            nalUnit = ByteArray(fragmentedBufferLength + length + 2)\n            writeNalPrefix0001(nalUnit)\n            tmpLen = 4\n            // Write start and middle packets\n            for (i in 0 until fragmentedPackets + 1) {\n                fragmentedBuffer[i]!!.apply {\n                    System.arraycopy(\n                        this,\n                        0,\n                        nalUnit,\n                        tmpLen,\n                        this.size\n                    )\n                    tmpLen += this.size\n                }\n            }\n            // Write end packet\n            System.arraycopy(data, 2, nalUnit, tmpLen, length - 2)\n            clearFragmentedBuffer()\n            if (DEBUG) Log.d(TAG, \"Fragmented NAL (${nalUnit.size})\")\n        }\n        return nalUnit\n    }\n\n    private fun clearFragmentedBuffer() {\n        if (DEBUG) Log.v(TAG, \"clearFragmentedBuffer()\")\n        for (i in 0 until fragmentedPackets + 1) {\n            fragmentedBuffer[i] = null\n        }\n    }\n\n    companion object {\n        private val TAG: String = RtpH264Parser::class.java.simpleName\n        private const val DEBUG = false\n    }\n\n}\n"
  },
  {
    "path": "library-client-rtsp/src/main/java/com/alexvas/rtsp/parser/RtpH265Parser.kt",
    "content": "package com.alexvas.rtsp.parser\n\nimport android.util.Log\n\nclass RtpH265Parser: RtpParser() {\n\n    override fun processRtpPacketAndGetNalUnit(data: ByteArray, length: Int, marker: Boolean): ByteArray? {\n        if (DEBUG) Log.v(TAG, \"processRtpPacketAndGetNalUnit(length=$length, marker=$marker)\")\n\n        // NAL Unit Header.type (RFC7798 Section 1.1.4).\n        val nalType = ((data[0].toInt() shr 1) and 0x3F).toByte()\n        var nalUnit: ByteArray? = null\n\n//        Log.d(TAG, \"\\t\\tNAL type: ${VideoCodecUtils.getH265NalUnitTypeString(nalType)}\")\n\n        if (nalType in 0..<RTP_PACKET_TYPE_AP) {\n            nalUnit = processSingleFramePacket(data, length)\n            clearFragmentedBuffer()\n            if (DEBUG) Log.d(TAG, \"Single NAL (${nalUnit.size})\")\n        } else if (nalType == RTP_PACKET_TYPE_AP) {\n            // TODO: Support AggregationPacket mode.\n            Log.e(TAG, \"need to implement processAggregationPacket\")\n        } else if (nalType == RTP_PACKET_TYPE_FU) {\n            nalUnit = processFragmentationUnitPacket(data, length, marker)\n        } else {\n            Log.e(TAG, \"RTP H265 payload type [${nalType}] not supported.\")\n        }\n\n        return nalUnit\n    }\n\n    private fun processFragmentationUnitPacket(data: ByteArray, length: Int, marker: Boolean): ByteArray? {\n        if (DEBUG) Log.v(TAG, \"processFragmentationUnitPacket(length=$length, marker=$marker)\")\n\n        val fuHeader = data[2].toInt()\n        val isFirstFuPacket = (fuHeader and 0x80) > 0\n        val isLastFuPacket = (fuHeader and 0x40) > 0\n\n        if (isFirstFuPacket) {\n            addStartFragmentedPacket(data, length)\n        } else if (isLastFuPacket || marker) {\n            return addEndFragmentedPacketAndCombine(data, length)\n        } else {\n            addMiddleFragmentedPacket(data, length)\n        }\n        return null\n    }\n\n    private fun addStartFragmentedPacket(data: ByteArray, length: Int) {\n        if (DEBUG) Log.v(TAG, \"addStartFragmentedPacket(data.size=${data.size}, length=$length)\")\n        fragmentedPackets = 0\n        fragmentedBufferLength = length - 1\n        fragmentedBuffer[0] = ByteArray(fragmentedBufferLength).apply {\n\n            val tid = (data[1].toInt() and 0x7)\n            val fuHeader = data[2].toInt()\n            val nalUnitType = fuHeader and 0x3F\n\n            // Convert RTP header into HEVC NAL Unit header accoding to RFC7798 Section 1.1.4.\n            // RTP byte 0: ignored.\n            // RTP byte 1: repurposed as HEVC HALU byte 0, copy NALU type.\n            // RTP Byte 2: repurposed as HEVC HALU byte 1, layerId required to be zero, copying only tid.\n            // Set data position from byte 1 as byte 0 is ignored.\n            this[0] = (((nalUnitType shl 1) and 0x7F).toByte())\n            this[1] = tid.toByte()\n        }\n        System.arraycopy(data, 3, fragmentedBuffer[0]!!, 2, length - 3)\n    }\n\n    private fun addMiddleFragmentedPacket(data: ByteArray, length: Int) {\n        if (DEBUG) Log.v(TAG, \"addMiddleFragmentedPacket(data.size=${data.size}, length=$length)\")\n        fragmentedPackets++\n        if (fragmentedPackets >= fragmentedBuffer.size) {\n            Log.e(TAG, \"Too many middle packets. No RTP_PACKET_TYPE_FU end packet received. Skipped RTP packet.\")\n            fragmentedBuffer[0] = null\n        } else {\n            fragmentedBufferLength += length - 3\n            fragmentedBuffer[fragmentedPackets] = ByteArray(length - 3).apply {\n                System.arraycopy(data, 3, this, 0, length - 3)\n            }\n        }\n    }\n\n    private fun addEndFragmentedPacketAndCombine(data: ByteArray, length: Int): ByteArray? {\n        if (DEBUG) Log.v(TAG, \"addEndFragmentedPacketAndCombine(data.size=${data.size}, length=$length)\")\n        var nalUnit: ByteArray? = null\n        if (fragmentedBuffer[0] == null) {\n            Log.e(TAG, \"No NAL FU_A start packet received. Skipped RTP packet.\")\n        } else {\n            nalUnit = ByteArray(fragmentedBufferLength + length + 3)\n            writeNalPrefix0001(nalUnit)\n            var tmpLen = 4\n            // Write start and middle packets\n            for (i in 0 until fragmentedPackets + 1) {\n                fragmentedBuffer[i]!!.apply {\n                    System.arraycopy(\n                        this,\n                        0,\n                        nalUnit,\n                        tmpLen,\n                        this.size\n                    )\n                    tmpLen += this.size\n                }\n            }\n            // Write end packet\n            System.arraycopy(data, 3, nalUnit, tmpLen, length - 3)\n            clearFragmentedBuffer()\n            if (DEBUG) Log.d(TAG, \"Fragmented NAL (${nalUnit.size})\")\n        }\n        return nalUnit\n    }\n\n    private fun clearFragmentedBuffer() {\n        if (DEBUG) Log.v(TAG, \"clearFragmentedBuffer()\")\n        for (i in 0 until fragmentedPackets + 1) {\n            fragmentedBuffer[i] = null\n        }\n    }\n\n    companion object {\n        private val TAG: String = RtpH265Parser::class.java.simpleName\n        private const val DEBUG = false\n\n        /** Aggregation Packet. RFC7798 Section 4.4.2.  */\n        private const val RTP_PACKET_TYPE_AP: Byte = 48\n        /** Fragmentation Unit. RFC7798 Section 4.4.3. */\n        private const val RTP_PACKET_TYPE_FU: Byte = 49\n    }\n\n}\n"
  },
  {
    "path": "library-client-rtsp/src/main/java/com/alexvas/rtsp/parser/RtpHeaderParser.java",
    "content": "package com.alexvas.rtsp.parser;\n\nimport android.util.Log;\n\nimport androidx.annotation.NonNull;\nimport androidx.annotation.Nullable;\n\nimport com.alexvas.utils.NetUtils;\n\nimport java.io.IOException;\nimport java.io.InputStream;\n\npublic class RtpHeaderParser {\n\n    private static final String TAG = RtpHeaderParser.class.getSimpleName();\n    private static final boolean DEBUG = false;\n\n    private final static int RTP_HEADER_SIZE = 12;\n\n    public static class RtpHeader {\n        public int version;\n        public int padding;\n        public int extension;\n        public int cc;\n        public int marker;\n        public int payloadType;\n        public int sequenceNumber;\n        public long timeStamp;\n        public long ssrc;\n        public int payloadSize;\n\n        public long getTimestampMsec() {\n            return (long)(timeStamp * 11.111111);\n        }\n\n        // If RTP header found, return 4 bytes of the header\n        private static boolean searchForNextRtpHeader(@NonNull InputStream inputStream, @NonNull byte[] header /*out*/) throws IOException {\n            if (header.length < 4)\n                throw new IOException(\"Invalid allocated buffer size\");\n\n            int bytesRemaining = 100000; // 100 KB max to check\n            boolean foundFirstByte = false;\n            boolean foundSecondByte = false;\n            byte[] oneByte = new byte[1];\n            // Search for {0x24, 0x00}\n            do {\n                if (bytesRemaining-- < 0)\n                    return false;\n                // Read 1 byte\n                NetUtils.readData(inputStream, oneByte, 0, 1);\n                if (foundFirstByte) {\n                    // Found 0x24. Checking for 0x00-0x02.\n                    if (oneByte[0] == 0x00)\n                        foundSecondByte = true;\n                    else\n                        foundFirstByte = false;\n                }\n                if (!foundFirstByte && oneByte[0] == 0x24) {\n                    // Found 0x24\n                    foundFirstByte = true;\n                }\n            } while (!foundSecondByte);\n            header[0] = 0x24;\n            header[1] = oneByte[0];\n            // Read 2 bytes more (packet size)\n            NetUtils.readData(inputStream, header, 2, 2);\n            return true;\n        }\n\n        @Nullable\n        private static RtpHeader parseData(@NonNull byte[] header, int packetSize) {\n            RtpHeader rtpHeader = new RtpHeader();\n            rtpHeader.version = (header[0] & 0xFF) >> 6;\n            if (rtpHeader.version != 2) {\n                if (DEBUG)\n                    Log.e(TAG,\"Not a RTP packet (\" + rtpHeader.version + \")\");\n                return null;\n            }\n\n            // 80 60 40 91 fd ab d4 2a\n            // 80 c8 00 06\n            rtpHeader.padding = (header[0] & 0x20) >> 5; // 0b00100100\n            rtpHeader.extension = (header[0] & 0x10) >> 4;\n            rtpHeader.marker = (header[1] & 0x80) >> 7;\n            rtpHeader.payloadType = header[1] & 0x7F;\n            rtpHeader.sequenceNumber = (header[3] & 0xFF) + ((header[2] & 0xFF) << 8);\n            rtpHeader.timeStamp = (header[7] & 0xFF) + ((header[6] & 0xFF) << 8) + ((header[5] & 0xFF) << 16) + ((header[4] & 0xFF) << 24) & 0xffffffffL;\n            rtpHeader.ssrc = (header[7] & 0xFF) + ((header[6] & 0xFF) << 8) + ((header[5] & 0xFF) << 16) + ((header[4] & 0xFF) << 24) & 0xffffffffL;\n            rtpHeader.payloadSize = packetSize - RTP_HEADER_SIZE;\n            return rtpHeader;\n        }\n\n        private static int getPacketSize(@NonNull byte[] header) {\n            int packetSize = ((header[2] & 0xFF) << 8) | (header[3] & 0xFF);\n            if (DEBUG)\n                Log.d(TAG, \"Packet size: \" + packetSize);\n            return packetSize;\n        }\n\n        public void dumpHeader() {\n            Log.d(\"RTP\",\"\\t\\tRTP header version: \" + version\n                    + \", padding: \" + padding\n                    + \", ext: \" + extension\n                    + \", cc: \" + cc\n                    + \", marker: \" + marker\n                    + \", payload type: \" + payloadType\n                    + \", seq num: \" + sequenceNumber\n                    + \", ts: \" + timeStamp\n                    + \", ssrc: \" + ssrc\n                    + \", payload size: \" + payloadSize);\n        }\n    }\n\n    @Nullable\n    public static RtpHeader readHeader(@NonNull InputStream inputStream) throws IOException {\n        // 24 01 00 1c 80 c8 00 06  7f 1d d2 c4\n        // 24 01 00 1c 80 c8 00 06  13 9b cf 60\n        // 24 02 01 12 80 e1 01 d2  00 07 43 f0\n        byte[] header = new byte[RTP_HEADER_SIZE];\n        // Skip 4 bytes (TCP only). No those bytes in UDP.\n        NetUtils.readData(inputStream, header, 0, 4);\n        if (DEBUG && header[0] == 0x24)\n            Log.d(TAG, header[1] == 0 ? \"RTP packet\" : \"RTCP packet\");\n\n        int packetSize = RtpHeader.getPacketSize(header);\n        if (DEBUG)\n            Log.d(TAG, \"Packet size: \" + packetSize);\n\n        if (NetUtils.readData(inputStream, header, 0, header.length) == header.length) {\n            RtpHeader rtpHeader = RtpHeader.parseData(header, packetSize);\n            if (rtpHeader == null) {\n                // Header not found. Possible keep-alive response. Search for another RTP header.\n                boolean foundHeader = RtpHeader.searchForNextRtpHeader(inputStream, header);\n                if (foundHeader) {\n                    packetSize = RtpHeader.getPacketSize(header);\n                    if (NetUtils.readData(inputStream, header, 0, header.length) == header.length)\n                        return RtpHeader.parseData(header, packetSize);\n                }\n            } else {\n                return rtpHeader;\n            }\n        }\n        return null;\n    }\n}\n"
  },
  {
    "path": "library-client-rtsp/src/main/java/com/alexvas/rtsp/parser/RtpParser.kt",
    "content": "package com.alexvas.rtsp.parser\n\nabstract class RtpParser {\n\n    abstract fun processRtpPacketAndGetNalUnit(data: ByteArray, length: Int, marker: Boolean): ByteArray?\n\n    // TODO Use already allocated buffer with RtpPacket.MAX_SIZE = 65507\n    // Used only for fragmented packets\n    protected val fragmentedBuffer = arrayOfNulls<ByteArray>(1024)\n    protected var fragmentedBufferLength = 0\n    protected var fragmentedPackets = 0\n\n    protected fun writeNalPrefix0001(buffer: ByteArray) {\n        buffer[0] = 0x00\n        buffer[1] = 0x00\n        buffer[2] = 0x00\n        buffer[3] = 0x01\n    }\n\n    protected fun processSingleFramePacket(data: ByteArray, length: Int): ByteArray {\n        return ByteArray(4 + length).apply {\n            writeNalPrefix0001(this)\n            System.arraycopy(data, 0, this, 4, length)\n        }\n    }\n\n}\n"
  },
  {
    "path": "library-client-rtsp/src/main/java/com/alexvas/rtsp/widget/RtspImageView.kt",
    "content": "package com.alexvas.rtsp.widget\n\nimport android.content.Context\nimport android.graphics.Bitmap\nimport android.net.Uri\nimport android.util.AttributeSet\nimport android.util.Log\nimport android.widget.ImageView\nimport com.alexvas.rtsp.codec.VideoDecodeThread\nimport com.alexvas.rtsp.codec.VideoDecoderBitmapThread\nimport com.alexvas.rtsp.widget.RtspProcessor.Statistics\nimport com.limelight.binding.video.MediaCodecHelper\n\n/**\n * Low latency RTSP stream playback on image view (bitmap).\n */\nclass RtspImageView : ImageView {\n\n    /** Optional listener to be called when bitmap obtained from video decoder. */\n    var onRtspImageBitmapListener: RtspImageBitmapListener? = null\n\n    interface RtspImageBitmapListener {\n        fun onRtspImageBitmapObtained(bitmap: Bitmap) {}\n    }\n\n    private var rtspProcessor = RtspProcessor(onVideoDecoderCreateRequested = {\n            videoMimeType, videoRotation, videoFrameQueue, videoDecoderListener, videoDecoderType, _ ->\n        VideoDecoderBitmapThread(\n            videoMimeType,\n            videoRotation,\n            videoFrameQueue,\n            videoDecoderListener,\n            videoDecoderBitmapListener,\n            videoDecoderType,\n        )\n    })\n\n    private val videoDecoderBitmapListener = object : VideoDecoderBitmapThread.VideoDecoderBitmapListener {\n        override fun onVideoDecoderBitmapObtained(bitmap: Bitmap) {\n            onRtspImageBitmapListener?.onRtspImageBitmapObtained(bitmap)\n            setImageBitmap(bitmap)\n            invalidate()\n        }\n    }\n\n    var statistics = Statistics()\n        get() = rtspProcessor.statistics\n        private set\n\n    var videoRotation: Int\n        get() = rtspProcessor.videoRotation\n        set(value) { rtspProcessor.videoRotation = value }\n\n    var videoDecoderType: VideoDecodeThread.DecoderType\n        get() = rtspProcessor.videoDecoderType\n        set(value) { rtspProcessor.videoDecoderType = value }\n\n    var debug: Boolean\n        get() = rtspProcessor.debug\n        set(value) { rtspProcessor.debug = value }\n\n    constructor(context: Context) : super(context) {\n        initView(context, null, 0)\n    }\n\n    constructor(context: Context, attrs: AttributeSet?) : super(context, attrs) {\n        initView(context, attrs, 0)\n    }\n\n    constructor(context: Context, attrs: AttributeSet?, defStyleAttr: Int) : super(context, attrs, defStyleAttr) {\n        initView(context, attrs, defStyleAttr)\n    }\n\n    private fun initView(context: Context, attrs: AttributeSet?, defStyleAttr: Int) {\n        if (DEBUG) Log.v(TAG, \"initView()\")\n        MediaCodecHelper.initialize(context, /*glRenderer*/ \"\")\n    }\n\n    fun init(\n        uri: Uri,\n        username: String? = null,\n        password: String? = null,\n        userAgent: String? = null,\n        socketTimeout: Int? = null\n    ) {\n        if (DEBUG) Log.v(TAG, \"init(uri='$uri', username='$username', password='$password', userAgent='$userAgent')\")\n        rtspProcessor.init(\n            uri,\n            username,\n            password,\n            userAgent,\n            socketTimeout ?: RtspProcessor.DEFAULT_SOCKET_TIMEOUT\n        )\n    }\n\n    /**\n     * Start RTSP client.\n     *\n     * @param requestVideo request video track\n     * @param requestAudio request audio track\n     * @param requestApplication request application track\n     * @see https://datatracker.ietf.org/doc/html/rfc4566#section-5.14\n     */\n    fun start(requestVideo: Boolean, requestAudio: Boolean, requestApplication: Boolean) {\n        if (DEBUG) Log.v(TAG, \"start(requestVideo=$requestVideo, requestAudio=$requestAudio, requestApplication=$requestApplication)\")\n        rtspProcessor.start(requestVideo, requestAudio, requestApplication)\n    }\n\n    /**\n     * Stop RTSP client.\n     */\n    fun stop() {\n        if (DEBUG) Log.v(TAG, \"stop()\")\n        rtspProcessor.stop()\n    }\n\n    fun isStarted(): Boolean {\n        return rtspProcessor.isStarted()\n    }\n\n    fun setStatusListener(listener: RtspStatusListener?) {\n        if (DEBUG) Log.v(TAG, \"setStatusListener()\")\n        rtspProcessor.statusListener = listener\n    }\n\n    fun setDataListener(listener: RtspDataListener?) {\n        if (DEBUG) Log.v(TAG, \"setDataListener()\")\n        rtspProcessor.dataListener = listener\n    }\n\n    companion object {\n        private val TAG: String = RtspImageView::class.java.simpleName\n        private const val DEBUG = false\n    }\n\n}\n"
  },
  {
    "path": "library-client-rtsp/src/main/java/com/alexvas/rtsp/widget/RtspListeners.kt",
    "content": "package com.alexvas.rtsp.widget\n\n/**\n * Listener for getting RTSP status update.\n */\ninterface RtspStatusListener {\n    fun onRtspStatusConnecting() {}\n    fun onRtspStatusConnected() {}\n    fun onRtspStatusDisconnecting() {}\n    fun onRtspStatusDisconnected() {}\n    fun onRtspStatusFailedUnauthorized() {}\n    fun onRtspStatusFailed(message: String?) {}\n    fun onRtspFirstFrameRendered() {}\n    fun onRtspFrameSizeChanged(width: Int, height: Int) {}\n}\n\n/**\n * Listener for getting RTSP raw data, e.g. for recording.\n */\ninterface RtspDataListener {\n    fun onRtspDataVideoNalUnitReceived(data: ByteArray, offset: Int, length: Int, timestamp: Long) {}\n    fun onRtspDataAudioSampleReceived(data: ByteArray, offset: Int, length: Int, timestamp: Long) {}\n    fun onRtspDataApplicationDataReceived(data: ByteArray, offset: Int, length: Int, timestamp: Long) {}\n}\n"
  },
  {
    "path": "library-client-rtsp/src/main/java/com/alexvas/rtsp/widget/RtspProcessor.kt",
    "content": "package com.alexvas.rtsp.widget\n\nimport android.annotation.SuppressLint\nimport android.media.MediaFormat\nimport android.net.Uri\nimport android.os.Handler\nimport android.os.Looper\nimport android.util.Log\nimport androidx.media3.container.NalUnitUtil\nimport com.alexvas.rtsp.RtspClient\nimport com.alexvas.rtsp.RtspClient.SdpInfo\nimport com.alexvas.rtsp.codec.AudioCodecType\nimport com.alexvas.rtsp.codec.AudioDecodeThread\nimport com.alexvas.rtsp.codec.AudioFrameQueue\nimport com.alexvas.rtsp.codec.FrameQueue\nimport com.alexvas.rtsp.codec.VideoCodecType\nimport com.alexvas.rtsp.codec.VideoDecodeThread\nimport com.alexvas.rtsp.codec.VideoDecodeThread.DecoderType\nimport com.alexvas.rtsp.codec.VideoDecodeThread.VideoDecoderListener\nimport com.alexvas.rtsp.codec.VideoFrameQueue\nimport com.alexvas.utils.NetUtils\nimport com.alexvas.utils.VideoCodecUtils\nimport org.jcodec.codecs.h264.io.model.SeqParameterSet\nimport org.jcodec.codecs.h264.io.model.VUIParameters\nimport java.net.Socket\nimport java.nio.ByteBuffer\nimport java.util.concurrent.atomic.AtomicBoolean\nimport kotlin.math.min\n\nclass RtspProcessor(\n    private var onVideoDecoderCreateRequested: ((\n        videoMimeType: String,\n        videoRotation: Int, // 0, 90, 180, 270\n        videoFrameQueue: VideoFrameQueue,\n        videoDecoderListener: VideoDecoderListener,\n        videoDecoderType: DecoderType,\n        videoFrameRateStabilization: Boolean,\n    ) -> VideoDecodeThread)\n) {\n\n    class Statistics {\n        var videoDecoderType = DecoderType.HARDWARE\n        var videoDecoderName: String? = null\n        var videoDecoderLatencyMsec = -1\n        var networkLatencyMsec = -1\n    }\n\n    private lateinit var uri: Uri\n    private var username: String? = null\n    private var password: String? = null\n    private var userAgent: String? = null\n    private var requestVideo = true\n    private var requestAudio = true\n    private var requestApplication = false\n    private var rtspThread: RtspThread? = null\n    private var videoFrameQueue = VideoFrameQueue(60)\n    private var audioFrameQueue = AudioFrameQueue(10)\n    private var videoDecodeThread: VideoDecodeThread? = null\n    private var audioDecodeThread: AudioDecodeThread? = null\n    private val uiHandler = Handler(Looper.getMainLooper())\n    private var videoMimeType: String = \"video/avc\"\n    private var audioMimeType: String = \"\"\n    private var audioSampleRate: Int = 0\n    private var audioChannelCount: Int = 0\n    private var audioCodecConfig: ByteArray? = null\n    private var firstFrameRendered = false\n    var statistics = Statistics()\n        get() {\n            videoDecodeThread?.let { decoder ->\n                field.apply {\n                    networkLatencyMsec = decoder.getCurrentNetworkLatencyMsec()\n                    videoDecoderLatencyMsec = decoder.getCurrentVideoDecoderLatencyMsec()\n                    videoDecoderType = decoder.getCurrentVideoDecoderType()\n                    videoDecoderName = decoder.getCurrentVideoDecoderName()\n                }\n            }\n            return field\n        }\n        private set\n\n    /** Read and connect timeout for socket in msec. */\n    private var socketTimeoutMsec: Int = 5000\n\n    /**\n     * Show more debug info on console on runtime.\n     */\n    var debug = false\n\n    /**\n     * Video rotation in degrees. Allowed values: 0, 90, 180, 270.\n     * Note that not all hardware video decoders support rotation.\n     */\n    var videoRotation = 0\n        set(value) {\n            if (value == 0 || value == 90 || value == 180 || value == 270)\n                field = value\n        }\n\n    /**\n     * Requested video decoder type.\n     */\n    var videoDecoderType = DecoderType.HARDWARE\n\n    /**\n     * Try to modify SPS frame coming from camera with low-latency parameters to decrease video\n     * decoding latency.\n     * If SPS frame param num_ref_frames is equal to 1 or more, set it to 0. That should decrease\n     * decoder latency by 2x times on some hardware decoders.\n     */\n    var experimentalUpdateSpsFrameWithLowLatencyParams = false\n\n    /**\n     * Enables the playback smoothing logic inside the video decoder.\n     */\n    var videoFrameRateStabilization: Boolean = false\n        set(value) {\n            field = value\n            videoDecodeThread?.setVideoFrameRateStabilization(value)\n        }\n\n    /**\n     * Status listener for getting RTSP event updates.\n     */\n    var statusListener: RtspStatusListener? = null\n\n    /**\n     * Listener for getting raw data, e.g. for recording.\n     */\n    var dataListener: RtspDataListener? = null\n\n    private val proxyClientListener = object: RtspClient.RtspClientListener {\n\n        override fun onRtspConnecting() {\n            if (DEBUG) Log.v(TAG, \"onRtspConnecting()\")\n            uiHandler.post {\n                statusListener?.onRtspStatusConnecting()\n            }\n        }\n\n        override fun onRtspConnected(sdpInfo: SdpInfo) {\n            if (DEBUG) Log.v(TAG, \"onRtspConnected()\")\n            if (sdpInfo.videoTrack != null) {\n                videoFrameQueue.clear()\n                when (sdpInfo.videoTrack?.videoCodec) {\n                    RtspClient.VIDEO_CODEC_H264 -> videoMimeType = MediaFormat.MIMETYPE_VIDEO_AVC\n                    RtspClient.VIDEO_CODEC_H265 -> videoMimeType = MediaFormat.MIMETYPE_VIDEO_HEVC\n                }\n                when (sdpInfo.audioTrack?.audioCodec) {\n                    RtspClient.AUDIO_CODEC_AAC -> audioMimeType = MediaFormat.MIMETYPE_AUDIO_AAC\n                    RtspClient.AUDIO_CODEC_OPUS -> audioMimeType = MediaFormat.MIMETYPE_AUDIO_OPUS\n                    RtspClient.AUDIO_CODEC_G711_ULAW -> audioMimeType = MediaFormat.MIMETYPE_AUDIO_G711_MLAW\n                    RtspClient.AUDIO_CODEC_G711_ALAW -> audioMimeType = MediaFormat.MIMETYPE_AUDIO_G711_ALAW\n                }\n                val sps: ByteArray? = sdpInfo.videoTrack?.sps\n                val pps: ByteArray? = sdpInfo.videoTrack?.pps\n                // Initialize decoder\n                @SuppressLint(\"UnsafeOptInUsageError\")\n                if (sps != null && pps != null) {\n                    val vps: ByteArray = sdpInfo.videoTrack?.vps ?: ByteArray(0)\n                    val data = ByteArray(sps.size + pps.size + vps.size)\n                    var offset = 0\n                    sps.copyInto(data, offset, 0, sps.size)\n                    offset += sps.size\n                    pps.copyInto(data, offset, 0, pps.size)\n                    offset += pps.size\n                    vps.copyInto(data, offset, 0, vps.size)\n                    videoFrameQueue.push(\n                        FrameQueue.VideoFrame(\n                            VideoCodecType.H264,\n                            isKeyframe = true,\n                            data,\n                            0,\n                            data.size,\n                            0\n                        )\n                    )\n                    try {\n                        val startNalOffset = if (sps[3] == 1.toByte()) 5 else 4\n                        val spsData = NalUnitUtil.parseSpsNalUnitPayload(\n                            data, startNalOffset, data.size - startNalOffset)\n                        if (spsData.maxNumReorderFrames > 0) {\n                            Log.w(\n                                TAG, \"SPS frame param max_num_reorder_frames=\" +\n                                    \"${spsData.maxNumReorderFrames} is too high\" +\n                                    \" for low latency decoding (expecting 0).\"\n                            )\n                        }\n                        if (debug) {\n                            Log.d(TAG, \"SPS frame: ${sps.toHexString(0, sps.size)}\")\n                            Log.d(TAG, \"\\t${spsData.spsDataToString()}\")\n                            Log.d(TAG, \"PPS frame: ${pps.toHexString(0, pps.size)}\")\n                            if (vps.isNotEmpty())\n                                Log.d(TAG, \"VPS frame: ${vps.toHexString(0, vps.size)}\")\n                        }\n                    } catch (e: Exception) {\n                        e.printStackTrace()\n                    }\n                } else {\n                    if (DEBUG) Log.d(TAG, \"RTSP SPS and PPS NAL units missed in SDP\")\n                }\n            }\n            if (sdpInfo.audioTrack != null) {\n                audioFrameQueue.clear()\n                when (sdpInfo.audioTrack?.audioCodec) {\n                    RtspClient.AUDIO_CODEC_AAC -> audioMimeType = MediaFormat.MIMETYPE_AUDIO_AAC\n                    RtspClient.AUDIO_CODEC_OPUS -> audioMimeType = MediaFormat.MIMETYPE_AUDIO_OPUS\n                }\n                audioSampleRate = sdpInfo.audioTrack?.sampleRateHz!!\n                audioChannelCount = sdpInfo.audioTrack?.channels!!\n                audioCodecConfig = sdpInfo.audioTrack?.config\n            }\n            onRtspClientConnected()\n            uiHandler.post {\n                statusListener?.onRtspStatusConnected()\n            }\n        }\n\n        private var framesPerGop = 0\n\n        override fun onRtspVideoNalUnitReceived(data: ByteArray, offset: Int, length: Int, timestamp: Long) {\n            if (DEBUG) Log.v(TAG, \"onRtspVideoNalUnitReceived(data.size=${data.size}, length=$length, timestamp=$timestamp)\")\n\n            val isH265 = videoMimeType == MediaFormat.MIMETYPE_VIDEO_HEVC\n            // Search for NAL_IDR_SLICE within first 1KB maximum\n            val isKeyframe = VideoCodecUtils.isAnyKeyFrame(data, offset, min(length, 1000), isH265)\n\n            var videoFrame = FrameQueue.VideoFrame(\n                VideoCodecType.H264,\n                isKeyframe,\n                data,\n                offset,\n                length,\n                timestamp,\n                capturedTimestampMs = System.currentTimeMillis()\n            )\n            if (isKeyframe && experimentalUpdateSpsFrameWithLowLatencyParams) {\n                videoFrame = getNewLowLatencyFrameFromKeyFrame(videoFrame)\n            }\n\n            if (debug) {\n                nalUnitsFound.clear()\n                VideoCodecUtils.getNalUnits(videoFrame.data, videoFrame.offset, videoFrame.length, nalUnitsFound, isH265)\n                var b = StringBuilder()\n                for (nal in nalUnitsFound) {\n                    b\n                    .append(if (isH265)\n                            VideoCodecUtils.getH265NalUnitTypeString(nal.type)\n                        else\n                            VideoCodecUtils.getH264NalUnitTypeString(nal.type))\n                    .append(\" (${nal.length}), \")\n                }\n                if (b.length > 2)\n                    b = b.removeRange(b.length - 2, b.length) as StringBuilder\n                Log.d(TAG, \"NALs: $b\")\n                @SuppressLint(\"UnsafeOptInUsageError\")\n                if (isKeyframe) {\n                    val sps = VideoCodecUtils.getSpsNalUnitFromArray(\n                        videoFrame.data,\n                        videoFrame.offset,\n                        // Check only first 100 bytes maximum. That's enough for finding SPS NAL unit.\n                        Integer.min(videoFrame.length, VideoCodecUtils.MAX_NAL_SPS_SIZE),\n                        isH265\n                    )\n                    Log.d(TAG,\n                        \"\\tKey frame received (${videoFrame.length} bytes, ts=$timestamp,\" +\n                        \" ${sps?.width}x${sps?.height},\" +\n                        \" GoP=$framesPerGop,\" +\n                        \" profile=${sps?.profileIdc}, level=${sps?.levelIdc})\")\n                    framesPerGop = 0\n                } else {\n                    framesPerGop++\n                }\n            }\n\n            videoFrameQueue.push(videoFrame)\n            dataListener?.onRtspDataVideoNalUnitReceived(\n                videoFrame.data,\n                videoFrame.offset,\n                videoFrame.length,\n                timestamp)\n        }\n\n        override fun onRtspAudioSampleReceived(data: ByteArray, offset: Int, length: Int, timestamp: Long) {\n            if (DEBUG) Log.v(TAG, \"onRtspAudioSampleReceived(length=$length, timestamp=$timestamp)\")\n            if (length > 0) {\n                audioFrameQueue.push(\n                    FrameQueue.AudioFrame(\n                        AudioCodecType.AAC_LC,\n                        data, offset,\n                        length,\n                        timestamp\n                    )\n                )\n            }\n            dataListener?.onRtspDataAudioSampleReceived(data, offset, length, timestamp)\n        }\n\n        override fun onRtspApplicationDataReceived(data: ByteArray, offset: Int, length: Int, timestamp: Long) {\n            if (DEBUG) Log.v(TAG, \"onRtspApplicationDataReceived(length=$length, timestamp=$timestamp)\")\n            dataListener?.onRtspDataApplicationDataReceived(data, offset, length, timestamp)\n        }\n\n        override fun onRtspDisconnecting() {\n            if (DEBUG) Log.v(TAG, \"onRtspDisconnecting()\")\n            uiHandler.post {\n                statusListener?.onRtspStatusDisconnecting()\n            }\n        }\n\n        override fun onRtspDisconnected() {\n            if (DEBUG) Log.v(TAG, \"onRtspDisconnected()\")\n            uiHandler.post {\n                statusListener?.onRtspStatusDisconnected()\n            }\n        }\n\n        override fun onRtspFailedUnauthorized() {\n            if (DEBUG) Log.v(TAG, \"onRtspFailedUnauthorized()\")\n            uiHandler.post {\n                statusListener?.onRtspStatusFailedUnauthorized()\n            }\n        }\n\n        override fun onRtspFailed(message: String?) {\n            if (DEBUG) Log.v(TAG, \"onRtspFailed(message='$message')\")\n            uiHandler.post {\n                statusListener?.onRtspStatusFailed(message)\n            }\n        }\n    }\n\n    inner class RtspThread: Thread() {\n        private var rtspStopped = AtomicBoolean(false)\n\n        fun stopAsync() {\n            if (DEBUG) Log.v(TAG, \"stopAsync()\")\n            rtspStopped.set(true)\n            // Wake up sleep() code\n            interrupt()\n        }\n\n        override fun run() {\n            onRtspClientStarted()\n            val port = if (uri.port == -1) DEFAULT_RTSP_PORT else uri.port\n            var socket: Socket? = null\n            try {\n                if (DEBUG) Log.d(TAG, \"Connecting to ${uri.host.toString()}:$port...\")\n\n                socket = if (uri.scheme?.lowercase() == \"rtsps\")\n                    NetUtils.createSslSocketAndConnect(\n                        uri.host.toString(),\n                        port,\n                        socketTimeoutMsec\n                    )\n                else\n                    NetUtils.createSocketAndConnect(\n                        uri.host.toString(),\n                        port,\n                        socketTimeoutMsec\n                    )\n\n                // Blocking call until stopped variable is true or connection failed\n                val rtspClient = RtspClient.Builder(socket, uri.toString(), rtspStopped, proxyClientListener)\n                    .requestVideo(requestVideo)\n                    .requestAudio(requestAudio)\n                    .requestApplication(requestApplication)\n                    .withDebug(debug)\n                    .withUserAgent(userAgent)\n                    .withCredentials(username, password)\n                    .build()\n                rtspClient.execute()\n            } catch (e: Exception) {\n                e.printStackTrace()\n                uiHandler.post { proxyClientListener.onRtspFailed(e.message) }\n            } finally {\n                NetUtils.closeSocket(socket)\n            }\n            onRtspClientStopped()\n        }\n    }\n\n    private val videoDecoderListener = object: VideoDecoderListener {\n        override fun onVideoDecoderStarted() {\n            if (DEBUG) Log.v(TAG, \"onVideoDecoderStarted()\")\n        }\n\n        override fun onVideoDecoderStopped() {\n            if (DEBUG) Log.v(TAG, \"onVideoDecoderStopped()\")\n        }\n\n        override fun onVideoDecoderFailed(message: String?) {\n            if (DEBUG) Log.e(TAG, \"onVideoDecoderFailed(message='$message')\")\n        }\n\n        override fun onVideoDecoderFormatChanged(width: Int, height: Int) {\n            if (DEBUG) Log.v(TAG, \"onVideoDecoderFormatChanged(width=$width, height=$height)\")\n            statusListener?.onRtspFrameSizeChanged(width, height)\n        }\n\n        override fun onVideoDecoderFirstFrameRendered() {\n            if (DEBUG) Log.v(TAG, \"onVideoDecoderFirstFrameDecoded()\")\n            if (!firstFrameRendered) statusListener?.onRtspFirstFrameRendered()\n            firstFrameRendered = true\n        }\n    }\n\n\n    private fun onRtspClientStarted() {\n        if (DEBUG) Log.v(TAG, \"onRtspClientStarted()\")\n//        uiHandler.post { statusListener?.onRtspStatusConnected() }\n    }\n\n    private fun onRtspClientConnected() {\n        if (DEBUG) Log.v(TAG, \"onRtspClientConnected()\")\n        if (videoMimeType.isNotEmpty()) {\n            firstFrameRendered = false\n            Log.i(TAG, \"Starting video decoder with mime type \\\"$videoMimeType\\\"\")\n            videoDecodeThread = onVideoDecoderCreateRequested.invoke(\n                videoMimeType,\n                videoRotation,\n                videoFrameQueue,\n                videoDecoderListener,\n                videoDecoderType,\n                videoFrameRateStabilization,\n            )\n            videoDecodeThread!!.apply {\n                name = \"RTSP video thread [${getUriName()}]\"\n                start()\n            }\n        }\n        if (audioMimeType.isNotEmpty() /*&& checkAudio!!.isChecked*/) {\n            Log.i(TAG, \"Starting audio decoder with mime type \\\"$audioMimeType\\\"\")\n            audioDecodeThread = AudioDecodeThread(\n                audioMimeType, audioSampleRate, audioChannelCount, audioCodecConfig, audioFrameQueue)\n            audioDecodeThread!!.apply {\n                name = \"RTSP audio thread [${getUriName()}]\"\n                start()\n            }\n        }\n    }\n\n    private fun onRtspClientStopped() {\n        if (DEBUG) Log.v(TAG, \"onRtspClientStopped()\")\n        stopDecoders()\n        rtspThread = null\n//        uiHandler.post { statusListener?.onRtspStatusDisconnected() }\n    }\n\n    fun init(uri: Uri, username: String?, password: String?, userAgent: String? = null, socketTimeout: Int = DEFAULT_SOCKET_TIMEOUT) {\n        if (DEBUG) Log.v(TAG, \"init(uri='$uri', username='$username', password='$password', userAgent='$userAgent', socketTimeout=$socketTimeout)\")\n        this.uri = uri\n        this.username = username\n        this.password = password\n        this.userAgent = userAgent\n        this.socketTimeoutMsec = socketTimeout\n    }\n\n    fun start(requestVideo: Boolean, requestAudio: Boolean, requestApplication: Boolean = false) {\n        if (DEBUG) Log.v(TAG, \"start(requestVideo=$requestVideo, requestAudio=$requestAudio, requestApplication=$requestApplication)\")\n        if (rtspThread != null) rtspThread?.stopAsync()\n        this.requestVideo = requestVideo\n        this.requestAudio = requestAudio\n        this.requestApplication = requestApplication\n        rtspThread = RtspThread().apply {\n            name = \"RTSP IO thread [${getUriName()}]\"\n            start()\n        }\n    }\n\n    fun stop() {\n        if (DEBUG) Log.v(TAG, \"stop()\")\n        rtspThread?.stopAsync()\n        rtspThread = null\n    }\n\n    fun isStarted(): Boolean {\n        return rtspThread != null\n    }\n\n    fun stopDecoders() {\n        if (DEBUG) Log.v(TAG, \"stopDecoders()\")\n        videoDecodeThread?.stopAsync()\n        videoDecodeThread = null\n        audioDecodeThread?.stopAsync()\n        audioDecodeThread = null\n    }\n\n// Cached values\n    private val nalUnitsFound = ArrayList<VideoCodecUtils.NalUnit>()\n    private val spsBufferReadFrame = ByteBuffer.allocate(VideoCodecUtils.MAX_NAL_SPS_SIZE)\n    private val spsBufferWriteFrame = ByteBuffer.allocate(VideoCodecUtils.MAX_NAL_SPS_SIZE)\n\n    /**\n     * Try to get a new frame keyframe (SPS+PPS+IDR) with low latency modified SPS frame.\n     * If modification failed, original frame will be returned.\n     * Inspired by https://webrtc.googlesource.com/src/+/refs/heads/main/common_video/h264/sps_vui_rewriter.cc#400\n     */\n    private fun getNewLowLatencyFrameFromKeyFrame(frame: FrameQueue.VideoFrame): FrameQueue.VideoFrame {\n        try {\n            // Support only H264 for now\n            if (frame.codecType == VideoCodecType.H265)\n                return frame\n\n            nalUnitsFound.clear()\n            VideoCodecUtils.getNalUnits(frame.data, frame.offset, frame.length, nalUnitsFound, isH265 = false)\n\n            val oldSpsNalUnit = nalUnitsFound.firstOrNull { it.type == VideoCodecUtils.NAL_SPS }\n\n            // SPS frame not found. Return original frame.\n            if (oldSpsNalUnit == null)\n                return frame\n\n            spsBufferReadFrame.apply {\n                rewind()\n                put(frame.data, oldSpsNalUnit.offset + 5,\n                    Integer.min(oldSpsNalUnit.length, VideoCodecUtils.MAX_NAL_SPS_SIZE)\n                )\n                rewind()\n            }\n            // Read SPS frame\n            val spsSet = SeqParameterSet.read(spsBufferReadFrame)\n\n            // adding VUI might decrease latency for some streams, if max_dec_frame_buffering is set properly\n            // https://community.intel.com/t5/Media-Intel-oneAPI-Video/h-264-decoder-gives-two-frames-latency-while-decoding-a-stream/td-p/1099694\n            // https://github.com/Consti10/LiveVideo10ms/blob/master/VideoCore/src/main/cpp/NALU/H26X.hpp\n            fun modifyVui() {\n//                spsSet.vuiParams = VUIParameters()\n                spsSet.vuiParams.apply {\n//                    videoSignalTypePresentFlag = true\n//                    videoFormat = 5\n//                    colourDescriptionPresentFlag = true\n//                    matrixCoefficients = 5\n//                    timingInfoPresentFlag = true\n//                    numUnitsInTick = 1\n//                    timeScale = 120\n//                    fixedFrameRateFlag = true\n                    bitstreamRestriction = VUIParameters.BitstreamRestriction().apply {\n//                        motionVectorsOverPicBoundariesFlag = true\n//                        log2MaxMvLengthHorizontal = 16\n//                        log2MaxMvLengthVertical = 16\n                        maxDecFrameBuffering = 1\n                        numReorderFrames = 0\n                    }\n                }\n            }\n            modifyVui()\n\n            // Write SPS frame\n            spsBufferWriteFrame.rewind()\n            spsSet.write(spsBufferWriteFrame)\n\n            val newSpsNalUnitSize = spsBufferWriteFrame.position()\n\n            if (oldSpsNalUnit.length > -1) {\n                val newSize = frame.length - oldSpsNalUnit.length + newSpsNalUnitSize\n                val newData = ByteArray(newSize + 5)\n                var newDataOffset = 0\n\n                for (nalUnit in nalUnitsFound) {\n                    when (nalUnit.type) {\n                        VideoCodecUtils.NAL_SPS -> {\n                            // Write NAL header + SPS frame type\n                            val b = byteArrayOf(0x00, 0x00, 0x00, 0x01, 0x27)\n                            b.copyInto(newData, newDataOffset, 0, b.size)\n                            newDataOffset += b.size\n                            // Write SPS frame body\n                            spsBufferWriteFrame.apply {\n                                rewind()\n                                get(newData, newDataOffset, newSpsNalUnitSize)\n                            }\n                            newDataOffset += newSpsNalUnitSize\n                        }\n\n                        else -> {\n                            frame.data.copyInto(\n                                newData,\n                                newDataOffset,\n                                nalUnit.offset,\n                                nalUnit.offset + nalUnit.length\n                            )\n                            newDataOffset += nalUnit.length\n                        }\n                    }\n                }\n                // Create SPS+PPS+IDR frame with newly modified SPS frame data\n                return FrameQueue.VideoFrame(\n                    frame.codecType,\n                    frame.isKeyframe,\n                    newData,\n                    0,\n                    newData.size,\n                    frame.timestampMs,\n                    frame.capturedTimestampMs\n                )\n            }\n        } catch (e: Exception) {\n            Log.e(TAG, \"Failed to create low-latency keyframe\", e)\n        }\n        return frame\n    }\n\n    private fun getUriName(): String {\n        val port = if (uri.port == -1) DEFAULT_RTSP_PORT else uri.port\n        return \"${uri.host.toString()}:$port\"\n    }\n\n    companion object {\n        private val TAG: String = RtspProcessor::class.java.simpleName\n        private const val DEBUG = false\n\n        private const val DEFAULT_RTSP_PORT = 554\n\n        const val DEFAULT_SOCKET_TIMEOUT = 5000\n    }\n\n}\n"
  },
  {
    "path": "library-client-rtsp/src/main/java/com/alexvas/rtsp/widget/RtspSurfaceView.kt",
    "content": "package com.alexvas.rtsp.widget\n\nimport android.content.Context\nimport android.net.Uri\nimport android.util.AttributeSet\nimport android.util.Log\nimport android.view.SurfaceHolder\nimport android.view.SurfaceView\nimport androidx.annotation.OptIn\nimport androidx.media3.common.util.UnstableApi\nimport androidx.media3.container.NalUnitUtil\nimport com.alexvas.rtsp.codec.VideoDecodeThread.DecoderType\nimport com.alexvas.rtsp.codec.VideoDecoderSurfaceThread\nimport com.alexvas.rtsp.widget.RtspProcessor.Statistics\nimport com.limelight.binding.video.MediaCodecHelper\n\n/**\n * Low latency RTSP stream playback on surface view.\n */\nopen class RtspSurfaceView: SurfaceView {\n\n    private var surfaceWidth = 1920\n    private var surfaceHeight = 1080\n\n    private var rtspProcessor = RtspProcessor(\n        onVideoDecoderCreateRequested = {\n                videoMimeType,\n                videoRotation,\n                videoFrameQueue,\n                videoDecoderListener,\n                videoDecoderType,\n                videoFrameRateStabilization,\n            ->\n            VideoDecoderSurfaceThread(\n                holder.surface,\n                videoMimeType,\n                surfaceWidth,\n                surfaceHeight,\n                videoRotation,\n                videoFrameQueue,\n                videoDecoderListener,\n                videoDecoderType,\n                videoFrameRateStabilization,\n            )\n        }\n    )\n\n    var statistics = Statistics()\n        get() = rtspProcessor.statistics\n        private set\n\n    var videoRotation: Int\n        get() = rtspProcessor.videoRotation\n        set(value) { rtspProcessor.videoRotation = value }\n\n    var videoDecoderType: DecoderType\n        get() = rtspProcessor.videoDecoderType\n        set(value) { rtspProcessor.videoDecoderType = value }\n\n    var experimentalUpdateSpsFrameWithLowLatencyParams: Boolean\n        get() = rtspProcessor.experimentalUpdateSpsFrameWithLowLatencyParams\n        set(value) { rtspProcessor.experimentalUpdateSpsFrameWithLowLatencyParams = value }\n\n    var debug: Boolean\n        get() = rtspProcessor.debug\n        set(value) { rtspProcessor.debug = value }\n\n    /** Enables decoder-side playback smoothing. Disabled by default. */\n    var videoFrameRateStabilization: Boolean\n        get() = rtspProcessor.videoFrameRateStabilization\n        set(value) { rtspProcessor.videoFrameRateStabilization = value }\n\n    private val surfaceCallback = object: SurfaceHolder.Callback {\n        override fun surfaceCreated(holder: SurfaceHolder) {\n            if (DEBUG) Log.v(TAG, \"surfaceCreated()\")\n        }\n\n        override fun surfaceChanged(holder: SurfaceHolder, format: Int, width: Int, height: Int) {\n            if (DEBUG) Log.v(TAG, \"surfaceChanged(format=$format, width=$width, height=$height)\")\n            surfaceWidth = width\n            surfaceHeight = height\n        }\n\n        override fun surfaceDestroyed(holder: SurfaceHolder) {\n            if (DEBUG) Log.v(TAG, \"surfaceDestroyed()\")\n            rtspProcessor.stopDecoders()\n        }\n    }\n\n    constructor(context: Context) : super(context) {\n        initView(context, null, 0)\n    }\n\n    constructor(context: Context, attrs: AttributeSet?) : super(context, attrs) {\n        initView(context, attrs, 0)\n    }\n\n    constructor(context: Context, attrs: AttributeSet?, defStyleAttr: Int) : super(context, attrs, defStyleAttr) {\n        initView(context, attrs, defStyleAttr)\n    }\n\n    private fun initView(context: Context, attrs: AttributeSet?, defStyleAttr: Int) {\n        if (DEBUG) Log.v(TAG, \"initView()\")\n        MediaCodecHelper.initialize(context, /*glRenderer*/ \"\")\n        holder.addCallback(surfaceCallback)\n    }\n\n    fun init(\n        uri: Uri,\n        username: String? = null,\n        password: String? = null,\n        userAgent: String? = null,\n        socketTimeout: Int? = null\n    ) {\n        if (DEBUG) Log.v(TAG, \"init(uri='$uri', username='$username', password='$password', userAgent='$userAgent', socketTimeout=$socketTimeout)\")\n        rtspProcessor.init(\n            uri,\n            username,\n            password,\n            userAgent,\n            socketTimeout ?: RtspProcessor.DEFAULT_SOCKET_TIMEOUT\n        )\n    }\n\n    /**\n     * Start RTSP client.\n     *\n     * @param requestVideo request video track\n     * @param requestAudio request audio track\n     * @param requestApplication request application track\n     * @see https://datatracker.ietf.org/doc/html/rfc4566#section-5.14\n     */\n    fun start(requestVideo: Boolean, requestAudio: Boolean, requestApplication: Boolean = false) {\n        if (DEBUG) Log.v(TAG, \"start(requestVideo=$requestVideo, requestAudio=$requestAudio, requestApplication=$requestApplication)\")\n        rtspProcessor.start(requestVideo, requestAudio, requestApplication)\n    }\n\n    /**\n     * Stop RTSP client.\n     */\n    fun stop() {\n        if (DEBUG) Log.v(TAG, \"stop()\")\n        rtspProcessor.stop()\n    }\n\n    fun isStarted(): Boolean {\n        return rtspProcessor.isStarted()\n    }\n\n    fun setStatusListener(listener: RtspStatusListener?) {\n        if (DEBUG) Log.v(TAG, \"setStatusListener()\")\n        rtspProcessor.statusListener = listener\n    }\n\n    fun setDataListener(listener: RtspDataListener?) {\n        if (DEBUG) Log.v(TAG, \"setDataListener()\")\n        rtspProcessor.dataListener = listener\n    }\n\n    companion object {\n        private val TAG: String = RtspSurfaceView::class.java.simpleName\n        private const val DEBUG = false\n    }\n\n}\n\n@OptIn(UnstableApi::class)\nfun NalUnitUtil.SpsData.spsDataToString(): String {\n    return \"\" +\n        \"width=${this.width}, \" +\n        \"height=${this.height}, \" +\n        \"profile_idc=${this.profileIdc}, \" +\n        \"constraint_set_flags=${this.constraintsFlagsAndReservedZero2Bits}, \" +\n        \"level_idc=${this.levelIdc}, \" +\n        \"max_num_ref_frames=${this.maxNumRefFrames}, \" +\n        \"frame_mbs_only_flag=${this.frameMbsOnlyFlag}, \" +\n        \"log2_max_frame_num=${this.frameNumLength}, \" +\n        \"pic_order_cnt_type=${this.picOrderCountType}, \" +\n        \"log2_max_pic_order_cnt_lsb=${this.picOrderCntLsbLength}, \" +\n        \"delta_pic_order_always_zero_flag=${this.deltaPicOrderAlwaysZeroFlag}, \" +\n        \"max_reorder_frames=${this.maxNumReorderFrames}\"\n}\n\nfun ByteArray.toHexString(offset: Int, maxLength: Int): String {\n    val length = minOf(maxLength, size - offset)\n    return sliceArray(offset until (offset + length))\n        .joinToString(separator = \"\") { byte ->\n            \"%02x \".format(byte).uppercase()\n        }\n}\n"
  },
  {
    "path": "library-client-rtsp/src/main/java/com/alexvas/utils/ByteUtils.java",
    "content": "package com.alexvas.utils;\n\nimport androidx.annotation.NonNull;\n\nimport java.io.File;\nimport java.io.FileOutputStream;\n\npublic class ByteUtils {\n\n    // int memcmp ( const void * ptr1, const void * ptr2, size_t num );\n    public static boolean memcmp(\n            @NonNull byte[] source1,\n            int offsetSource1,\n            @NonNull byte[] source2,\n            int offsetSource2,\n            int num) {\n        if (source1.length - offsetSource1 < num)\n            return false;\n        if (source2.length - offsetSource2 < num)\n            return false;\n\n        for (int i = 0; i < num; i++) {\n            if (source1[offsetSource1 + i] != source2[offsetSource2 + i])\n                return false;\n        }\n        return true;\n    }\n\n    public static byte[] copy(@NonNull byte[] src) {\n        byte[] dest = new byte[src.length];\n        System.arraycopy(src, 0, dest, 0, src.length);\n        return dest;\n    }\n}\n"
  },
  {
    "path": "library-client-rtsp/src/main/java/com/alexvas/utils/MediaCodecUtils.kt",
    "content": "package com.alexvas.utils\n\nimport android.annotation.SuppressLint\nimport android.util.Log\nimport android.util.Range\nimport androidx.annotation.OptIn\nimport androidx.media3.common.util.UnstableApi\nimport androidx.media3.exoplayer.mediacodec.MediaCodecInfo\nimport androidx.media3.exoplayer.mediacodec.MediaCodecUtil\nimport java.lang.Exception\n\n@SuppressLint(\"UnsafeOptInUsageError\")\nobject MediaCodecUtils {\n\n    // key - codecs mime type\n    // value - list of codecs able to handle this mime type\n    private val decoderInfosMap = HashMap<String, List<MediaCodecInfo>>()\n\n    private val TAG: String = MediaCodecUtils::class.java.simpleName\n\n    private fun getDecoderInfos(mimeType: String): List<MediaCodecInfo> {\n        val list = decoderInfosMap[mimeType]\n        return if (list.isNullOrEmpty()) {\n            val decoderInfos = try {\n                MediaCodecUtil.getDecoderInfos(mimeType, false, false)\n            } catch (e: Exception) {\n                Log.e(TAG, \"Failed to initialize '$mimeType' decoders list (${e.message})\", e)\n                ArrayList()\n            }\n            decoderInfosMap[mimeType] = decoderInfos\n            decoderInfos\n        } else {\n            list\n        }\n    }\n\n    /**\n     * Get software decoders list. Usually used as fallback.\n     */\n    @Synchronized\n    fun getSoftwareDecoders(mimeType: String): List<MediaCodecInfo> {\n        val decoderInfos = getDecoderInfos(mimeType)\n        val list = ArrayList<MediaCodecInfo>()\n        for (codec in decoderInfos) {\n            if (codec.softwareOnly)\n                list.add(codec)\n        }\n        return list\n    }\n\n    /**\n     * Get hardware accelerated decoders list. Used as default.\n     */\n    @Synchronized\n    fun getHardwareDecoders(mimeType: String): List<MediaCodecInfo> {\n        val decoderInfos = getDecoderInfos(mimeType)\n        val list = ArrayList<MediaCodecInfo>()\n        for (codec in decoderInfos) {\n            if (codec.hardwareAccelerated)\n                list.add(codec)\n        }\n        return list\n    }\n\n    /**\n     * Look through all decoders (if there are multiple)\n     * and select the one which supports low-latency.\n     */\n    @OptIn(UnstableApi::class)\n    fun getLowLatencyDecoder(decoders: List<MediaCodecInfo>): MediaCodecInfo? {\n        // Some devices can have several decoders, e.g.\n        // Samsung Fold 5:\n        //   \"c2.qti.avc.decoder\"\n        //   \"c2.qti.avc.decoder.low_latency\"\n        for (decoder in decoders) {\n            if (decoder.name.contains(\"low_latency\"))\n                return decoder\n        }\n        // Another approach to find decoder with low-latency is to call\n        // MediaCodec.createByCodecName(name) for every decoder to get decoder instance and then call\n        // decoder.codecInfo.getCapabilitiesForType(mimeType).isFeatureSupported(MediaCodecInfo.CodecCapabilities.FEATURE_LowLatency)\n\n        // No low-latency decoder found.\n        return null\n    }\n\n}\n\nfun android.media.MediaCodecInfo.CodecCapabilities.capabilitiesToString(): String {\n    var heights = videoCapabilities?.supportedHeights\n    if (heights == null)\n        heights = Range(-1, -1)\n    var widths = videoCapabilities?.supportedWidths\n    if (widths == null)\n        widths = Range(-1, -1)\n    return \"max instances: ${maxSupportedInstances}, max resolution: ${heights.upper}x${widths.upper}\"\n}\n"
  },
  {
    "path": "library-client-rtsp/src/main/java/com/alexvas/utils/NetUtils.java",
    "content": "package com.alexvas.utils;\n\nimport android.util.Log;\n\nimport androidx.annotation.NonNull;\nimport androidx.annotation.Nullable;\n\nimport java.io.EOFException;\nimport java.io.IOException;\nimport java.io.InputStream;\nimport java.net.InetSocketAddress;\nimport java.net.Socket;\nimport java.security.cert.CertificateException;\nimport java.security.cert.X509Certificate;\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.List;\n\nimport javax.net.ssl.SSLContext;\nimport javax.net.ssl.SSLSocket;\nimport javax.net.ssl.TrustManager;\nimport javax.net.ssl.X509TrustManager;\n\npublic class NetUtils {\n\n    private static final String TAG = NetUtils.class.getSimpleName();\n    private static final boolean DEBUG = false;\n    private final static int MAX_LINE_SIZE = 4098;\n\n    public static final class FakeX509TrustManager implements X509TrustManager {\n\n        /**\n         * Accepted issuers for fake trust manager\n         */\n        final static private X509Certificate[] mAcceptedIssuers = new X509Certificate[]{};\n\n        /**\n         * Constructor for FakeX509TrustManager.\n         */\n        public FakeX509TrustManager() {\n        }\n\n        /**\n         * @see javax.net.ssl.X509TrustManager#checkClientTrusted(X509Certificate[],String authType)\n         */\n        public void checkClientTrusted(X509Certificate[] certificates, String authType)\n        throws CertificateException {\n        }\n\n        /**\n         * @see javax.net.ssl.X509TrustManager#checkServerTrusted(X509Certificate[],String authType)\n         */\n        public void checkServerTrusted(X509Certificate[] certificates, String authType)\n        throws CertificateException {\n        }\n\n        // https://github.com/square/okhttp/issues/4669\n        // Called by Android via reflection in X509TrustManagerExtensions.\n        @SuppressWarnings(\"unused\")\n        public List<X509Certificate> checkServerTrusted(X509Certificate[] chain, String authType, String host) throws CertificateException {\n            return Arrays.asList(chain);\n        }\n\n        /**\n         * @see javax.net.ssl.X509TrustManager#getAcceptedIssuers()\n         */\n        public X509Certificate[] getAcceptedIssuers() {\n            return mAcceptedIssuers;\n        }\n    }\n\n    @NonNull\n    public static SSLSocket createSslSocketAndConnect(@NonNull String dstName, int dstPort, int timeout) throws Exception {\n        if (DEBUG)\n            Log.v(TAG, \"createSslSocketAndConnect(dstName=\" + dstName + \", dstPort=\" + dstPort + \", timeout=\" + timeout + \")\");\n\n//        TrustManagerFactory trustManagerFactory = TrustManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm());\n//        trustManagerFactory.init((KeyStore) null);\n//        TrustManager[] trustManagers = trustManagerFactory.getTrustManagers();\n//        if (trustManagers.length != 1 || !(trustManagers[0] instanceof X509TrustManager)) {\n//           throw new IllegalStateException(\"Unexpected default trust managers:\" + Arrays.toString(trustManagers));\n//        }\n//        X509TrustManager trustManager = (X509TrustManager) trustManagers[0];\n\n        SSLContext sslContext = SSLContext.getInstance(\"TLS\");\n        sslContext.init(null, new TrustManager[] { new FakeX509TrustManager() }, null);\n        SSLSocket sslSocket = (SSLSocket) sslContext.getSocketFactory().createSocket();\n        sslSocket.connect(new InetSocketAddress(dstName, dstPort), timeout);\n        sslSocket.setSoLinger(false, 1);\n        sslSocket.setSoTimeout(timeout);\n        return sslSocket;\n    }\n\n    @NonNull\n    public static Socket createSocketAndConnect(@NonNull String dstName, int dstPort, int timeout) throws IOException {\n        if (DEBUG)\n            Log.v(TAG, \"createSocketAndConnect(dstName=\" + dstName + \", dstPort=\" + dstPort + \", timeout=\" + timeout + \")\");\n        Socket socket = new Socket();\n        socket.connect(new InetSocketAddress(dstName, dstPort), timeout);\n        socket.setSoLinger(false, 1);\n        socket.setSoTimeout(timeout);\n        return socket;\n    }\n\n    @NonNull\n    public static Socket createSocket(int timeout) throws IOException {\n        Socket socket = new Socket();\n        socket.setSoLinger(false, 1);// 1 sec for flush() before close()\n        socket.setSoTimeout(timeout);  // 10 sec timeout for read(), not for write()\n        return socket;\n    }\n\n    public static void closeSocket(@Nullable Socket socket) throws IOException {\n        if (DEBUG)\n            Log.v(TAG, \"closeSocket()\");\n        if (socket != null) {\n            try {\n                socket.shutdownInput();\n            } catch (Exception ignored) {\n            }\n            try {\n                socket.shutdownOutput();\n            } catch (Exception ignored) {\n            }\n            socket.close();\n        }\n    }\n\n    @NonNull\n    public static ArrayList<String> readResponseHeaders(@NonNull InputStream inputStream) throws IOException {\n//        Assert.assertNotNull(\"Input stream should not be null\", inputStream);\n        ArrayList<String> headers = new ArrayList<>();\n        String line;\n        while (true) {\n            line = readLine(inputStream);\n            if (line != null) {\n                if (line.equals(\"\\r\\n\"))\n                    return headers;\n                else\n                    headers.add(line);\n            } else {\n                break;\n            }\n        }\n        return headers;\n    }\n\n    @Nullable\n    public static String readLine(@NonNull InputStream inputStream) throws IOException {\n//        Assert.assertNotNull(\"Input stream should not be null\", inputStream);\n        byte[] bufferLine = new byte[MAX_LINE_SIZE];\n        int offset = 0;\n        int readBytes;\n        do {\n            // Didn't find \"\\r\\n\" within 4K bytes\n            if (offset >= MAX_LINE_SIZE) {\n                throw new IOException(\"Invalid headers\");\n            }\n\n            // Read 1 byte\n            readBytes = inputStream.read(bufferLine, offset, 1);\n            if (readBytes == 1) {\n                // Check for EOL\n                // Some cameras like Linksys WVC200 do not send \\n instead of \\r\\n\n                if (offset > 0 && /*bufferLine[offset-1] == '\\r' &&*/ bufferLine[offset] == '\\n') {\n                    // Found empty EOL. End of header section\n                    if (offset == 1)\n                        break;\n\n                    // Found EOL. Add to array.\n                    return new String(bufferLine, 0, offset-1);\n                } else {\n                    offset++;\n                }\n            }\n        } while (readBytes > 0);\n        return null;\n    }\n\n    public static int getResponseStatusCode(@NonNull ArrayList<String> headers) {\n//        Assert.assertNotNull(\"Headers should not be null\", headers);\n        // Search for HTTP status code header\n        for (String header: headers) {\n            int indexHttp = header.indexOf(\"HTTP/1.1 \"); // 9 characters\n            if (indexHttp == -1)\n                indexHttp = header.indexOf(\"HTTP/1.0 \");\n            if (indexHttp >= 0) {\n                int indexCode = header.indexOf(' ', 9);\n                String code = header.substring(9, indexCode);\n                try {\n                    return Integer.parseInt(code);\n                } catch (NumberFormatException e) {\n                    // Does not fulfill standard \"HTTP/1.1 200 Ok\" token\n                    // Continue search for\n                }\n            }\n        }\n        // Not found\n        return -1;\n    }\n\n//    @Nullable\n//    static String readContentAsText(@Nullable InputStream inputStream) throws IOException {\n//        if (inputStream == null)\n//            return null;\n//        BufferedReader r = new BufferedReader(new InputStreamReader(inputStream));\n//        StringBuilder total = new StringBuilder();\n//        String line;\n//        while ((line = r.readLine()) != null) {\n//            total.append(line);\n//            total.append(\"\\r\\n\");\n//        }\n//        return total.toString();\n//    }\n\n    @NonNull\n    public static String readContentAsText(@NonNull InputStream inputStream, int length) throws IOException {\n//        Assert.assertNotNull(\"Input stream should not be null\", inputStream);\n        if (length <= 0)\n            return \"\";\n        byte[] b = new byte[length];\n        int read = readData(inputStream, b, 0, length);\n        return new String(b, 0, read);\n    }\n\n    public static int readData(@NonNull InputStream inputStream, @NonNull byte[] buffer, int offset, int length) throws IOException {\n        int readBytes;\n        int totalReadBytes = 0;\n        do {\n            readBytes = inputStream.read(buffer, offset + totalReadBytes, length - totalReadBytes);\n            if (readBytes == -1) {\n                throw new EOFException(\"Stream closed, read \" + totalReadBytes + \" of \" + length + \" bytes\");\n            }\n            totalReadBytes += readBytes;\n        } while (readBytes >= 0 && totalReadBytes < length);\n        return totalReadBytes;\n    }\n}\n"
  },
  {
    "path": "library-client-rtsp/src/main/java/com/alexvas/utils/VideoCodecUtils.kt",
    "content": "package com.alexvas.utils\n\nimport android.annotation.SuppressLint\nimport android.util.Log\nimport androidx.media3.container.NalUnitUtil\nimport androidx.media3.container.NalUnitUtil.SpsData\nimport java.util.concurrent.atomic.AtomicInteger\nimport kotlin.experimental.and\n\n\nobject VideoCodecUtils {\n\n    private val TAG = VideoCodecUtils::class.java.simpleName\n\n    /** Max possible NAL SPS size in bytes */\n    const val MAX_NAL_SPS_SIZE:     Int = 500\n\n    const val NAL_SLICE:            Byte = 1\n    const val NAL_DPA:              Byte = 2\n    const val NAL_DPB:              Byte = 3\n    const val NAL_DPC:              Byte = 4\n    const val NAL_IDR_SLICE:        Byte = 5\n    const val NAL_SEI:              Byte = 6\n    const val NAL_SPS:              Byte = 7\n    const val NAL_PPS:              Byte = 8\n    const val NAL_AUD:              Byte = 9\n    const val NAL_END_SEQUENCE:     Byte = 10\n    const val NAL_END_STREAM:       Byte = 11\n    const val NAL_FILLER_DATA:      Byte = 12\n    const val NAL_SPS_EXT:          Byte = 13\n    const val NAL_AUXILIARY_SLICE:  Byte = 19\n    const val NAL_STAP_A:           Byte = 24 // https://tools.ietf.org/html/rfc3984 5.7.1\n    const val NAL_STAP_B:           Byte = 25 // 5.7.1\n    const val NAL_MTAP16:           Byte = 26 // 5.7.2\n    const val NAL_MTAP24:           Byte = 27 // 5.7.2\n    const val NAL_FU_A:             Byte = 28 // 5.8 fragmented unit\n    const val NAL_FU_B:             Byte = 29 // 5.8\n\n    // Table 7-3: NAL unit type codes\n    const val H265_NAL_TRAIL_N:     Byte = 0\n    const val H265_NAL_TRAIL_R:     Byte = 1\n    const val H265_NAL_TSA_N:       Byte = 2\n    const val H265_NAL_TSA_R:       Byte = 3\n    const val H265_NAL_STSA_N:      Byte = 4\n    const val H265_NAL_STSA_R:      Byte = 5\n    const val H265_NAL_RADL_N:      Byte = 6\n    const val H265_NAL_RADL_R:      Byte = 7\n    const val H265_NAL_RASL_N:      Byte = 8\n    const val H265_NAL_RASL_R:      Byte = 9\n    const val H265_NAL_BLA_W_LP:    Byte = 16\n    const val H265_NAL_BLA_W_RADL:  Byte = 17\n    const val H265_NAL_BLA_N_LP:    Byte = 18\n    const val H265_NAL_IDR_W_RADL:  Byte = 19\n    const val H265_NAL_IDR_N_LP:    Byte = 20\n    const val H265_NAL_CRA_NUT:     Byte = 21\n    const val H265_NAL_VPS:         Byte = 32\n    const val H265_NAL_SPS:         Byte = 33\n    const val H265_NAL_PPS:         Byte = 34\n    const val H265_NAL_AUD:         Byte = 35\n    const val H265_NAL_EOS_NUT:     Byte = 36\n    const val H265_NAL_EOB_NUT:     Byte = 37\n    const val H265_NAL_FD_NUT:      Byte = 38\n    const val H265_NAL_SEI_PREFIX:  Byte = 39\n    const val H265_NAL_SEI_SUFFIX:  Byte = 40\n\n    private val NAL_PREFIX1 = byteArrayOf(0x00, 0x00, 0x00, 0x01)\n    private val NAL_PREFIX2 = byteArrayOf(0x00, 0x00, 0x01)\n\n\n    /**\n     * Search for 00 00 01 or 00 00 00 01 in byte stream.\n     * @return offset to the start of NAL unit if found, otherwise -1\n     */\n    fun searchForNalUnitStart(\n        data: ByteArray,\n        offset: Int,\n        length: Int,\n        prefixSize: AtomicInteger\n    ): Int {\n        if (offset >= data.size - 3) return -1\n        for (pos in 0 until length) {\n            val prefix: Int = getNalUnitStartCodePrefixSize(data, pos + offset, length)\n            if (prefix >= 0) {\n                prefixSize.set(prefix)\n                return pos + offset\n            }\n        }\n        return -1\n    }\n\n    fun searchForH264NalUnitByType(\n        data: ByteArray,\n        offset: Int,\n        length: Int,\n        byUnitType: Int\n    ): Int {\n        var off = offset\n        val nalUnitPrefixSize = AtomicInteger(-1)\n        val timestamp = System.currentTimeMillis()\n        while (true) {\n            val nalUnitIndex = searchForNalUnitStart(data, off, length, nalUnitPrefixSize)\n            if (nalUnitIndex >= 0) {\n                val nalUnitOffset = nalUnitIndex + nalUnitPrefixSize.get()\n                if (nalUnitOffset >= data.size)\n                    break\n                val nalUnitTypeOctet = data[nalUnitOffset]\n                if ((nalUnitTypeOctet and 0x1f).toInt() == byUnitType) {\n                    return nalUnitIndex\n                }\n                off = nalUnitOffset\n\n                // Check that we are not too long here\n                if (System.currentTimeMillis() - timestamp > 100) {\n                    Log.w(TAG, \"Cannot process data within 100 msec in $length bytes\")\n                    break\n                }\n            } else {\n                break\n            }\n        }\n        return -1\n    }\n\n    fun getNalUnitType(data: ByteArray?, offset: Int, length: Int, isH265: Boolean): Byte {\n        if (data == null || length <= NAL_PREFIX1.size) return (-1).toByte()\n        var nalUnitTypeOctetOffset = -1\n        if (data[offset + NAL_PREFIX2.size - 1] == 1.toByte())\n            nalUnitTypeOctetOffset =\n                offset + NAL_PREFIX2.size - 1\n        else if (data[offset + NAL_PREFIX1.size - 1] == 1.toByte())\n            nalUnitTypeOctetOffset = offset + NAL_PREFIX1.size - 1\n\n        return if (nalUnitTypeOctetOffset != -1) {\n            val nalUnitTypeOctet = data[nalUnitTypeOctetOffset + 1]\n            if (isH265)\n                ((nalUnitTypeOctet.toInt() shr 1) and 0x3F).toByte()\n            else\n                (nalUnitTypeOctet and 0x1f)\n        } else {\n            (-1).toByte()\n        }\n    }\n\n    private fun getNalUnitStartCodePrefixSize(\n        data: ByteArray,\n        offset: Int,\n        length: Int\n    ): Int {\n        if (length < 4) return -1\n        return if (memcmp(data, offset, NAL_PREFIX1, 0, NAL_PREFIX1.size))\n            NAL_PREFIX1.size else\n            if (memcmp(data, offset, NAL_PREFIX2, 0, NAL_PREFIX2.size))\n                NAL_PREFIX2.size else\n                -1\n    }\n\n    private fun memcmp(\n        source1: ByteArray,\n        offsetSource1: Int,\n        source2: ByteArray,\n        offsetSource2: Int,\n        num: Int\n    ): Boolean {\n        if (source1.size - offsetSource1 < num) return false\n        if (source2.size - offsetSource2 < num) return false\n        for (i in 0 until num) {\n            if (source1[offsetSource1 + i] != source2[offsetSource2 + i]) return false\n        }\n        return true\n    }\n\n    data class NalUnit (val type: Byte, val offset: Int, val length: Int)\n\n\n    fun getNalUnits(\n        data: ByteArray,\n        dataOffset: Int,\n        length: Int,\n        foundNals: ArrayList<NalUnit>,\n        isH265: Boolean\n    ): Int {\n        foundNals.clear()\n        var nalUnits = 0\n        val nextNalOffset = 0\n        val nalUnitPrefixSize = AtomicInteger(-1)\n        val timestamp = System.currentTimeMillis()\n        var offset = dataOffset\n        var stopped = false\n        while (!stopped) {\n\n            // Search for first NAL unit\n            val nalUnitIndex = searchForNalUnitStart(\n                data,\n                offset + nextNalOffset,\n                length - nextNalOffset,\n                nalUnitPrefixSize\n            )\n\n            // NAL unit found\n            if (nalUnitIndex >= 0) {\n                nalUnits++\n                val nalUnitOffset = offset + nextNalOffset + nalUnitPrefixSize.get()\n                val nalUnitTypeOctet = data[nalUnitOffset]\n                val nalUnitType = if (isH265)\n                    ((nalUnitTypeOctet.toInt() shr 1) and 0x3F).toByte()\n                else\n                    (nalUnitTypeOctet and 0x1F)\n\n                // Search for second NAL unit (optional)\n                var nextNalUnitStartIndex = searchForNalUnitStart(\n                    data,\n                    nalUnitOffset,\n                    length - nalUnitOffset,\n                    nalUnitPrefixSize\n                )\n\n                // Second NAL unit not found. Use till the end.\n                if (nextNalUnitStartIndex < 0) {\n                    // Not found next NAL unit. Use till the end.\n//                  nextNalUnitStartIndex = length - nextNalOffset + dataOffset;\n                    nextNalUnitStartIndex = length + dataOffset\n                    stopped = true\n                }\n                val l = nextNalUnitStartIndex - offset\n//                if (DEBUG) Log.d(\n//                    TAG,\n//                    \"NAL unit type: \" + getH264NalUnitTypeString(nalUnitType.toInt()) +\n//                            \" (\" + nalUnitType + \") - \" + l + \" bytes, offset \" + offset\n//                )\n                foundNals.add(NalUnit(nalUnitType, offset, l))\n                offset = nextNalUnitStartIndex\n\n                // Check that we are not too long here\n                if (System.currentTimeMillis() - timestamp > 200) {\n                    Log.w(TAG, \"Cannot process data within 200 msec in $length bytes (NALs found: \" + foundNals.size + \")\")\n                    break\n                }\n            } else {\n                stopped = true\n            }\n        }\n        return nalUnits\n    }\n\n    private fun getNalUnitStartLengthFromArray(\n        src: ByteArray, offset: Int, length: Int,\n        isH265: Boolean,\n        nalUnitType: Byte\n    ): Pair<Int, Int>? {\n        val nalUnitsFound = ArrayList<NalUnit>()\n        if (getNalUnits(src, offset, length, nalUnitsFound, isH265) > 0) {\n            for (nalUnit in nalUnitsFound) {\n                if (nalUnit.type == nalUnitType) {\n                    val prefixSize = AtomicInteger()\n                    val nalUnitIndex = searchForNalUnitStart(\n                        src,\n                        nalUnit.offset,\n                        nalUnit.length,\n                        prefixSize\n                    )\n                    val nalOffset = nalUnitIndex + prefixSize.get() + 1 /* NAL unit type */\n                    return Pair(nalOffset, nalUnit.length)\n                }\n            }\n        }\n        return null\n    }\n\n    @SuppressLint(\"UnsafeOptInUsageError\")\n    fun getSpsNalUnitFromArray(src: ByteArray, offset: Int, length: Int, isH265: Boolean): SpsData? {\n        val spsStartLength = getNalUnitStartLengthFromArray(src, offset, length, isH265, NAL_SPS)\n        spsStartLength?.let {\n            return NalUnitUtil.parseSpsNalUnitPayload(\n                src, spsStartLength.first, spsStartLength.first + spsStartLength.second)\n        }\n        return null\n    }\n\n    @SuppressLint(\"UnsafeOptInUsageError\")\n    fun getWidthHeightFromArray(src: ByteArray, offset: Int, length: Int, isH265: Boolean): Pair<Int, Int>? {\n        val sps = getSpsNalUnitFromArray(src, offset, length, isH265)\n        sps?.let {\n            return Pair(sps.width, sps.height)\n        }\n        return null\n    }\n\n\n//    private fun isH265IRAP(nalUnitType: Byte): Boolean {\n//        return nalUnitType in 16..23\n//    }\n\n    fun isAnyKeyFrame(data: ByteArray?, offset: Int, length: Int, isH265: Boolean): Boolean {\n        if (data == null || length <= 0) return false\n        var currOffset = offset\n\n        val nalUnitPrefixSize = AtomicInteger(-1)\n        val timestamp = System.currentTimeMillis()\n        while (true) {\n            val nalUnitIndex = searchForNalUnitStart(\n                data,\n                currOffset,\n                length,\n                nalUnitPrefixSize\n            )\n\n            if (nalUnitIndex >= 0) {\n                val nalUnitOffset = nalUnitIndex + nalUnitPrefixSize.get()\n                if (nalUnitOffset >= data.size)\n                    return false\n                val nalUnitTypeOctet = data[nalUnitOffset]\n\n                if (isH265) {\n                    val nalUnitType = ((nalUnitTypeOctet.toInt() and 0x7E) shr 1).toByte()\n                    // Treat SEI_PREFIX as key frame.\n                    if (nalUnitType == H265_NAL_IDR_W_RADL || nalUnitType == H265_NAL_IDR_N_LP)\n                        return true\n                } else {\n                    val nalUnitType = (nalUnitTypeOctet.toInt() and 0x1f).toByte()\n                    when (nalUnitType) {\n                        NAL_IDR_SLICE -> return true\n                        NAL_SLICE -> return false\n                    }\n                }\n                // Continue searching\n                currOffset = nalUnitOffset\n\n                // Check that we are not too long here\n                if (System.currentTimeMillis() - timestamp > 100) {\n                    Log.w(TAG, \"Cannot process data within 100 msec in $length bytes (index=$nalUnitIndex)\")\n                    break\n                }\n            } else {\n                break\n            }\n        }\n\n        return false\n    }\n\n    fun getH264NalUnitTypeString(nalUnitType: Byte): String {\n        return when (nalUnitType) {\n            NAL_SLICE -> \"NAL_SLICE\"\n            NAL_DPA -> \"NAL_DPA\"\n            NAL_DPB -> \"NAL_DPB\"\n            NAL_DPC -> \"NAL_DPC\"\n            NAL_IDR_SLICE -> \"NAL_IDR_SLICE\"\n            NAL_SEI -> \"NAL_SEI\"\n            NAL_SPS -> \"NAL_SPS\"\n            NAL_PPS -> \"NAL_PPS\"\n            NAL_AUD -> \"NAL_AUD\"\n            NAL_END_SEQUENCE -> \"NAL_END_SEQUENCE\"\n            NAL_END_STREAM -> \"NAL_END_STREAM\"\n            NAL_FILLER_DATA -> \"NAL_FILLER_DATA\"\n            NAL_SPS_EXT -> \"NAL_SPS_EXT\"\n            NAL_AUXILIARY_SLICE -> \"NAL_AUXILIARY_SLICE\"\n            NAL_STAP_A -> \"NAL_STAP_A\"\n            NAL_STAP_B -> \"NAL_STAP_B\"\n            NAL_MTAP16 -> \"NAL_MTAP16\"\n            NAL_MTAP24 -> \"NAL_MTAP24\"\n            NAL_FU_A -> \"NAL_FU_A\"\n            NAL_FU_B -> \"NAL_FU_B\"\n            else -> \"unknown - $nalUnitType\"\n        }\n    }\n\n    fun getH265NalUnitTypeString(nalUnitType: Byte): String {\n        return when (nalUnitType) {\n            H265_NAL_TRAIL_N -> \"NAL_TRAIL_N\"\n            H265_NAL_TRAIL_R -> \"NAL_TRAIL_R\"\n            H265_NAL_TSA_N -> \"NAL_TSA_N\"\n            H265_NAL_TSA_R -> \"NAL_TSA_R\"\n            H265_NAL_STSA_N -> \"NAL_STSA_N\"\n            H265_NAL_STSA_R -> \"NAL_STSA_R\"\n            H265_NAL_RADL_N -> \"NAL_RADL_N\"\n            H265_NAL_RADL_R -> \"NAL_RADL_R\"\n            H265_NAL_RASL_N -> \"NAL_RASL_N\"\n            H265_NAL_RASL_R -> \"NAL_RASL_R\"\n            H265_NAL_BLA_W_LP -> \"NAL_BLA_W_LP\"\n            H265_NAL_BLA_W_RADL -> \"NAL_BLA_W_RADL\"\n            H265_NAL_BLA_N_LP -> \"NAL_BLA_N_LP\"\n            H265_NAL_IDR_W_RADL -> \"NAL_IDR_W_RADL\"\n            H265_NAL_IDR_N_LP -> \"NAL_IDR_N_LP\"\n            H265_NAL_CRA_NUT -> \"NAL_CRA_NUT\"\n            H265_NAL_VPS -> \"NAL_VPS\"\n            H265_NAL_SPS -> \"NAL_SPS\"\n            H265_NAL_PPS -> \"NAL_PPS\"\n            H265_NAL_AUD -> \"NAL_AUD\"\n            H265_NAL_EOS_NUT -> \"NAL_EOS_NUT\"\n            H265_NAL_EOB_NUT -> \"NAL_EOB_NUT\"\n            H265_NAL_FD_NUT -> \"NAL_FD_NUT\"\n            H265_NAL_SEI_PREFIX -> \"NAL_SEI_PREFIX\"\n            H265_NAL_SEI_SUFFIX -> \"NAL_SEI_SUFFIX\"\n            else -> \"unknown - $nalUnitType\"\n        }\n    }\n\n}\n"
  },
  {
    "path": "library-client-rtsp/src/main/java/com/limelight/binding/video/MediaCodecHelper.java",
    "content": "package com.limelight.binding.video;\n\nimport java.io.BufferedReader;\nimport java.io.File;\nimport java.io.FileReader;\nimport java.util.Collections;\nimport java.util.LinkedList;\nimport java.util.List;\nimport java.util.Locale;\nimport java.util.regex.Matcher;\nimport java.util.regex.Pattern;\n\nimport android.annotation.SuppressLint;\nimport android.app.ActivityManager;\nimport android.content.Context;\nimport android.content.pm.ConfigurationInfo;\nimport android.media.MediaCodec;\nimport android.media.MediaCodecInfo;\nimport android.media.MediaCodecList;\nimport android.media.MediaCodecInfo.CodecCapabilities;\nimport android.media.MediaCodecInfo.CodecProfileLevel;\nimport android.media.MediaFormat;\nimport android.os.Build;\nimport android.util.Log;\n\n// Based on https://github.com/moonlight-stream/moonlight-android/blob/master/app/src/main/java/com/limelight/binding/video/MediaCodecHelper.java\npublic class MediaCodecHelper {\n\n    private static final String TAG = MediaCodecHelper.class.getSimpleName();\n\n    private static final List<String> preferredDecoders;\n\n    private static final List<String> blacklistedDecoderPrefixes;\n    private static final List<String> spsFixupBitstreamFixupDecoderPrefixes;\n    private static final List<String> blacklistedAdaptivePlaybackPrefixes;\n    private static final List<String> baselineProfileHackPrefixes;\n    private static final List<String> directSubmitPrefixes;\n    private static final List<String> constrainedHighProfilePrefixes;\n    private static final List<String> whitelistedHevcDecoders;\n    private static final List<String> refFrameInvalidationAvcPrefixes;\n    private static final List<String> refFrameInvalidationHevcPrefixes;\n    private static final List<String> useFourSlicesPrefixes;\n    private static final List<String> qualcommDecoderPrefixes;\n    private static final List<String> kirinDecoderPrefixes;\n    private static final List<String> exynosDecoderPrefixes;\n    private static final List<String> amlogicDecoderPrefixes;\n    private static final List<String> knownVendorLowLatencyOptions;\n\n    public static final boolean SHOULD_BYPASS_SOFTWARE_BLOCK =\n            Build.HARDWARE.equals(\"ranchu\") || Build.HARDWARE.equals(\"cheets\") || Build.BRAND.equals(\"Android-x86\");\n\n    private static boolean isLowEndSnapdragon = false;\n    private static boolean isAdreno620 = false;\n    private static boolean initialized = false;\n\n    static {\n        directSubmitPrefixes = new LinkedList<>();\n\n        // These decoders have low enough input buffer latency that they\n        // can be directly invoked from the receive thread\n        directSubmitPrefixes.add(\"omx.qcom\");\n        directSubmitPrefixes.add(\"omx.sec\");\n        directSubmitPrefixes.add(\"omx.exynos\");\n        directSubmitPrefixes.add(\"omx.intel\");\n        directSubmitPrefixes.add(\"omx.brcm\");\n        directSubmitPrefixes.add(\"omx.TI\");\n        directSubmitPrefixes.add(\"omx.arc\");\n        directSubmitPrefixes.add(\"omx.nvidia\");\n\n        // All Codec2 decoders\n        directSubmitPrefixes.add(\"c2.\");\n    }\n\n    static {\n        refFrameInvalidationAvcPrefixes = new LinkedList<>();\n\n        refFrameInvalidationHevcPrefixes = new LinkedList<>();\n        refFrameInvalidationHevcPrefixes.add(\"omx.exynos\");\n        refFrameInvalidationHevcPrefixes.add(\"c2.exynos\");\n\n        // Qualcomm and NVIDIA may be added at runtime\n    }\n\n    static {\n        preferredDecoders = new LinkedList<>();\n    }\n\n    static {\n        blacklistedDecoderPrefixes = new LinkedList<>();\n\n        // Blacklist software decoders that don't support H264 high profile except on systems\n        // that are expected to only have software decoders (like emulators).\n        if (!SHOULD_BYPASS_SOFTWARE_BLOCK) {\n            blacklistedDecoderPrefixes.add(\"omx.google\");\n            blacklistedDecoderPrefixes.add(\"AVCDecoder\");\n\n            // We want to avoid ffmpeg decoders since they're usually software decoders,\n            // but we'll defer to the Android 10 isSoftwareOnly() API on newer devices\n            // to determine if we should use these or not.\n            if (Build.VERSION.SDK_INT < Build.VERSION_CODES.Q) {\n                blacklistedDecoderPrefixes.add(\"OMX.ffmpeg\");\n            }\n        }\n\n        // Force these decoders disabled because:\n        // 1) They are software decoders, so the performance is terrible\n        // 2) They crash with our HEVC stream anyway (at least prior to CSD batching)\n        blacklistedDecoderPrefixes.add(\"OMX.qcom.video.decoder.hevcswvdec\");\n        blacklistedDecoderPrefixes.add(\"OMX.SEC.hevc.sw.dec\");\n    }\n\n    static {\n        // If a decoder qualifies for reference frame invalidation,\n        // these entries will be ignored for those decoders.\n        spsFixupBitstreamFixupDecoderPrefixes = new LinkedList<>();\n        spsFixupBitstreamFixupDecoderPrefixes.add(\"omx.nvidia\");\n        spsFixupBitstreamFixupDecoderPrefixes.add(\"omx.qcom\");\n        spsFixupBitstreamFixupDecoderPrefixes.add(\"omx.brcm\");\n\n        baselineProfileHackPrefixes = new LinkedList<>();\n        baselineProfileHackPrefixes.add(\"omx.intel\");\n\n        blacklistedAdaptivePlaybackPrefixes = new LinkedList<>();\n        // The Intel decoder on Lollipop on Nexus Player would increase latency badly\n        // if adaptive playback was enabled so let's avoid it to be safe.\n        blacklistedAdaptivePlaybackPrefixes.add(\"omx.intel\");\n        // The MediaTek decoder crashes at 1080p when adaptive playback is enabled\n        // on some Android TV devices with HEVC only.\n        blacklistedAdaptivePlaybackPrefixes.add(\"omx.mtk\");\n\n        constrainedHighProfilePrefixes = new LinkedList<>();\n        constrainedHighProfilePrefixes.add(\"omx.intel\");\n    }\n\n    static {\n        whitelistedHevcDecoders = new LinkedList<>();\n\n        // Allow software HEVC decoding in the official AOSP emulator\n        if (Build.HARDWARE.equals(\"ranchu\")) {\n            whitelistedHevcDecoders.add(\"omx.google\");\n        }\n\n        // Exynos seems to be the only HEVC decoder that works reliably\n        whitelistedHevcDecoders.add(\"omx.exynos\");\n\n        // On Darcy (Shield 2017), HEVC runs fine with no fixups required. For some reason,\n        // other X1 implementations require bitstream fixups. However, since numReferenceFrames\n        // has been supported in GFE since late 2017, we'll go ahead and enable HEVC for all\n        // device models.\n        //\n        // NVIDIA does partial HEVC acceleration on the Shield Tablet. I don't know\n        // whether the performance is good enough to use for streaming, but they're\n        // using the same omx.nvidia.h265.decode name as the Shield TV which has a\n        // fully accelerated HEVC pipeline. AFAIK, the only K1 devices with this\n        // partially accelerated HEVC decoder are the Shield Tablet and Xiaomi MiPad,\n        // so I'll check for those here.\n        //\n        // In case there are some that I missed, I will also exclude pre-Oreo OSes since\n        // only Shield ATV got an Oreo update and any newer Tegra devices will not ship\n        // with an old OS like Nougat.\n        if (!Build.DEVICE.equalsIgnoreCase(\"shieldtablet\") &&\n                !Build.DEVICE.equalsIgnoreCase(\"mocha\") &&\n                Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {\n            whitelistedHevcDecoders.add(\"omx.nvidia\");\n        }\n\n        // Plot twist: On newer Sony devices (BRAVIA_ATV2, BRAVIA_ATV3_4K, BRAVIA_UR1_4K) the H.264 decoder crashes\n        // on several configurations (> 60 FPS and 1440p) that work with HEVC, so we'll whitelist those devices for HEVC.\n        if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O && Build.DEVICE.startsWith(\"BRAVIA_\")) {\n            whitelistedHevcDecoders.add(\"omx.mtk\");\n        }\n\n        // Amlogic requires 1 reference frame for HEVC to avoid hanging. Since it's been years\n        // since GFE added support for maxNumReferenceFrames, we'll just enable all Amlogic SoCs\n        // running Android 9 or later.\n        //\n        // NB: We don't do this on Sabrina (GCWGTV) because H.264 is lower latency when we use\n        // vendor.low-latency.enable. We will still use HEVC if decoderCanMeetPerformancePointWithHevcAndNotAvc()\n        // determines it's the only way to meet the performance requirements.\n        //\n        // With the Android 12 update, Sabrina now uses HEVC (with RFI) based upon FEATURE_LowLatency\n        // support, which provides equivalent latency to H.264 now.\n        //\n        // FIXME: Should we do this for all Amlogic S905X SoCs?\n        if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.P && !Build.DEVICE.equalsIgnoreCase(\"sabrina\")) {\n            whitelistedHevcDecoders.add(\"omx.amlogic\");\n        }\n\n        // Realtek SoCs are used inside many Android TV devices and can only do 4K60 with HEVC.\n        // We'll enable those HEVC decoders by default and see if anything breaks.\n        if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.P) {\n            whitelistedHevcDecoders.add(\"omx.realtek\");\n        }\n\n        // These theoretically have good HEVC decoding capabilities (potentially better than\n        // their AVC decoders), but haven't been tested enough\n        //whitelistedHevcDecoders.add(\"omx.rk\");\n\n        // Let's see if HEVC decoders are finally stable with C2\n        whitelistedHevcDecoders.add(\"c2.\");\n\n        // Based on GPU attributes queried at runtime, the omx.qcom/c2.qti prefix will be added\n        // during initialization to avoid SoCs with broken HEVC decoders.\n    }\n\n    static {\n        useFourSlicesPrefixes = new LinkedList<>();\n\n        // Software decoders will use 4 slices per frame to allow for slice multithreading\n        useFourSlicesPrefixes.add(\"omx.google\");\n        useFourSlicesPrefixes.add(\"AVCDecoder\");\n        useFourSlicesPrefixes.add(\"omx.ffmpeg\");\n        useFourSlicesPrefixes.add(\"c2.android\");\n\n        // Old Qualcomm decoders are detected at runtime\n    }\n\n    static {\n        knownVendorLowLatencyOptions = new LinkedList<>();\n\n        knownVendorLowLatencyOptions.add(\"vendor.qti-ext-dec-low-latency.enable\");\n        knownVendorLowLatencyOptions.add(\"vendor.hisi-ext-low-latency-video-dec.video-scene-for-low-latency-req\");\n        knownVendorLowLatencyOptions.add(\"vendor.rtc-ext-dec-low-latency.enable\");\n        knownVendorLowLatencyOptions.add(\"vendor.low-latency.enable\");\n    }\n\n    static {\n        qualcommDecoderPrefixes = new LinkedList<>();\n\n        qualcommDecoderPrefixes.add(\"omx.qcom\");\n        qualcommDecoderPrefixes.add(\"c2.qti\");\n    }\n\n    static {\n        kirinDecoderPrefixes = new LinkedList<>();\n\n        kirinDecoderPrefixes.add(\"omx.hisi\");\n        kirinDecoderPrefixes.add(\"c2.hisi\"); // Unconfirmed\n    }\n\n    static {\n        exynosDecoderPrefixes = new LinkedList<>();\n\n        exynosDecoderPrefixes.add(\"omx.exynos\");\n        exynosDecoderPrefixes.add(\"c2.exynos\");\n    }\n\n    static {\n        amlogicDecoderPrefixes = new LinkedList<>();\n\n        amlogicDecoderPrefixes.add(\"omx.amlogic\");\n        amlogicDecoderPrefixes.add(\"c2.amlogic\"); // Unconfirmed\n    }\n\n    private static boolean isPowerVR(String glRenderer) {\n        return glRenderer.toLowerCase().contains(\"powervr\");\n    }\n\n    private static String getAdrenoVersionString(String glRenderer) {\n        glRenderer = glRenderer.toLowerCase().trim();\n\n        if (!glRenderer.contains(\"adreno\")) {\n            return null;\n        }\n\n        Pattern modelNumberPattern = Pattern.compile(\"(.*)([0-9]{3})(.*)\");\n\n        Matcher matcher = modelNumberPattern.matcher(glRenderer);\n        if (!matcher.matches()) {\n            return null;\n        }\n\n        String modelNumber = matcher.group(2);\n        Log.i(TAG, \"Found Adreno GPU: \"+modelNumber);\n        return modelNumber;\n    }\n\n    private static boolean isLowEndSnapdragonRenderer(String glRenderer) {\n        String modelNumber = getAdrenoVersionString(glRenderer);\n        if (modelNumber == null) {\n            // Not an Adreno GPU\n            return false;\n        }\n\n        // The current logic is to identify low-end SoCs based on a zero in the x0x place.\n        return modelNumber.charAt(1) == '0';\n    }\n\n    private static int getAdrenoRendererModelNumber(String glRenderer) {\n        String modelNumber = getAdrenoVersionString(glRenderer);\n        if (modelNumber == null) {\n            // Not an Adreno GPU\n            return -1;\n        }\n\n        return Integer.parseInt(modelNumber);\n    }\n\n    // This is a workaround for some broken devices that report\n    // only GLES 3.0 even though the GPU is an Adreno 4xx series part.\n    // An example of such a device is the Huawei Honor 5x with the\n    // Snapdragon 616 SoC (Adreno 405).\n    private static boolean isGLES31SnapdragonRenderer(String glRenderer) {\n        // Snapdragon 4xx and higher support GLES 3.1\n        return getAdrenoRendererModelNumber(glRenderer) >= 400;\n    }\n\n    public static void initialize(Context context, String glRenderer) {\n        if (initialized) {\n            return;\n        }\n\n        // Older Sony ATVs (SVP-DTV15) have broken MediaTek codecs (decoder hangs after rendering the first frame).\n        // I know the Fire TV 2 and 3 works, so I'll whitelist Amazon devices which seem to actually be tested.\n        // We still have to check Build.MANUFACTURER to catch Amazon Fire tablets.\n        if (context.getPackageManager().hasSystemFeature(\"amazon.hardware.fire_tv\") ||\n                Build.MANUFACTURER.equalsIgnoreCase(\"Amazon\")) {\n            // HEVC and RFI have been confirmed working on Fire TV 2, Fire TV Stick 2, Fire TV 4K Max,\n            // Fire HD 8 2020, and Fire HD 8 2022 models.\n            //\n            // This is probably a good enough sample to conclude that all MediaTek Fire OS devices\n            // are likely to be okay.\n            whitelistedHevcDecoders.add(\"omx.mtk\");\n            refFrameInvalidationHevcPrefixes.add(\"omx.mtk\");\n            refFrameInvalidationHevcPrefixes.add(\"c2.mtk\");\n\n            // This requires setting vdec-lowlatency on the Fire TV 3, otherwise the decoder\n            // never produces any output frames. See comment above for details on why we only\n            // do this for Fire TV devices.\n            whitelistedHevcDecoders.add(\"omx.amlogic\");\n\n            // Fire TV 3 seems to produce random artifacts on HEVC streams after packet loss.\n            // Enabling RFI turns these artifacts into full decoder output hangs, so let's not enable\n            // that for Fire OS 6 Amlogic devices. We will leave HEVC enabled because that's the only\n            // way these devices can hit 4K. Hopefully this is just a problem with the BSP used in\n            // the Fire OS 6 Amlogic devices, so we will leave this enabled for Fire OS 7+.\n            //\n            // Apart from a few TV models, the main Amlogic-based Fire TV devices are the Fire TV\n            // Cubes and Fire TV 3. This check will exclude the Fire TV 3 and Fire TV Cube 1, but\n            // allow the newer Fire TV Cubes to use HEVC RFI.\n            if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {\n                refFrameInvalidationHevcPrefixes.add(\"omx.amlogic\");\n                refFrameInvalidationHevcPrefixes.add(\"c2.amlogic\");\n            }\n        }\n\n        ActivityManager activityManager =\n                (ActivityManager) context.getSystemService(Context.ACTIVITY_SERVICE);\n        ConfigurationInfo configInfo = activityManager.getDeviceConfigurationInfo();\n        if (configInfo.reqGlEsVersion != ConfigurationInfo.GL_ES_VERSION_UNDEFINED) {\n            Log.i(TAG, \"OpenGL ES version: \"+configInfo.reqGlEsVersion);\n\n            isLowEndSnapdragon = isLowEndSnapdragonRenderer(glRenderer);\n            isAdreno620 = getAdrenoRendererModelNumber(glRenderer) == 620;\n\n            // Tegra K1 and later can do reference frame invalidation properly\n            if (configInfo.reqGlEsVersion >= 0x30000) {\n                Log.i(TAG, \"Added omx.nvidia/c2.nvidia to reference frame invalidation support list\");\n                refFrameInvalidationAvcPrefixes.add(\"omx.nvidia\");\n\n                // Exclude HEVC RFI on Pixel C and Tegra devices prior to Android 11. Misbehaving RFI\n                // on these devices can cause hundreds of milliseconds of latency, so it's not worth\n                // using it unless we're absolutely sure that it will not cause increased latency.\n                if (!Build.DEVICE.equalsIgnoreCase(\"dragon\") && Build.VERSION.SDK_INT >= Build.VERSION_CODES.R) {\n                    refFrameInvalidationHevcPrefixes.add(\"omx.nvidia\");\n                }\n\n                refFrameInvalidationAvcPrefixes.add(\"c2.nvidia\"); // Unconfirmed\n                refFrameInvalidationHevcPrefixes.add(\"c2.nvidia\"); // Unconfirmed\n\n                Log.i(TAG, \"Added omx.qcom/c2.qti to reference frame invalidation support list\");\n                refFrameInvalidationAvcPrefixes.add(\"omx.qcom\");\n                refFrameInvalidationHevcPrefixes.add(\"omx.qcom\");\n                refFrameInvalidationAvcPrefixes.add(\"c2.qti\");\n                refFrameInvalidationHevcPrefixes.add(\"c2.qti\");\n            }\n\n            // Qualcomm's early HEVC decoders break hard on our HEVC stream. The best check to\n            // tell the good from the bad decoders are the generation of Adreno GPU included:\n            // 3xx - bad\n            // 4xx - good\n            //\n            // The \"good\" GPUs support GLES 3.1, but we can't just check that directly\n            // (see comment on isGLES31SnapdragonRenderer).\n            //\n            if (isGLES31SnapdragonRenderer(glRenderer)) {\n                Log.i(TAG, \"Added omx.qcom/c2.qti to HEVC decoders based on GLES 3.1+ support\");\n                whitelistedHevcDecoders.add(\"omx.qcom\");\n                whitelistedHevcDecoders.add(\"c2.qti\");\n            }\n            else {\n                blacklistedDecoderPrefixes.add(\"OMX.qcom.video.decoder.hevc\");\n\n                // These older decoders need 4 slices per frame for best performance\n                useFourSlicesPrefixes.add(\"omx.qcom\");\n            }\n\n            // Older MediaTek SoCs have issues with HEVC rendering but the newer chips with\n            // PowerVR GPUs have good HEVC support.\n            if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.N && isPowerVR(glRenderer)) {\n                Log.i(TAG, \"Added omx.mtk to HEVC decoders based on PowerVR GPU\");\n                whitelistedHevcDecoders.add(\"omx.mtk\");\n\n                // This SoC (MT8176 in GPD XD+) supports AVC RFI too, but the maxNumReferenceFrames setting\n                // required to make it work adds a huge amount of latency. However, RFI on HEVC causes\n                // decoder hangs on the newer GE8100, GE8300, and GE8320 GPUs, so we limit it to the\n                // Series6XT GPUs where we know it works.\n                if (glRenderer.contains(\"GX6\")) {\n                    Log.i(TAG, \"Added omx.mtk/c2.mtk to RFI list for HEVC\");\n                    refFrameInvalidationHevcPrefixes.add(\"omx.mtk\");\n                    refFrameInvalidationHevcPrefixes.add(\"c2.mtk\");\n                }\n            }\n        }\n\n        initialized = true;\n    }\n\n    private static boolean isDecoderInList(List<String> decoderList, String decoderName) {\n        if (!initialized) {\n            throw new IllegalStateException(\"MediaCodecHelper must be initialized before use\");\n        }\n\n        for (String badPrefix : decoderList) {\n            if (decoderName.length() >= badPrefix.length()) {\n                String prefix = decoderName.substring(0, badPrefix.length());\n                if (prefix.equalsIgnoreCase(badPrefix)) {\n                    return true;\n                }\n            }\n        }\n\n        return false;\n    }\n\n    private static boolean decoderSupportsAndroidRLowLatency(MediaCodecInfo decoderInfo, String mimeType) {\n        if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.R) {\n            try {\n                if (decoderInfo.getCapabilitiesForType(mimeType).isFeatureSupported(CodecCapabilities.FEATURE_LowLatency)) {\n                    Log.i(TAG, \"Low latency decoding mode supported (FEATURE_LowLatency)\");\n                    return true;\n                }\n            } catch (Exception e) {\n                // Tolerate buggy codecs\n                e.printStackTrace();\n            }\n        }\n\n        return false;\n    }\n\n    private static boolean decoderSupportsKnownVendorLowLatencyOption(String decoderName) {\n        // It's only possible to probe vendor parameters on Android 12 and above.\n        if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.S) {\n            MediaCodec testCodec = null;\n            try {\n                // Unfortunately we have to create an actual codec instance to get supported options.\n                testCodec = MediaCodec.createByCodecName(decoderName);\n\n                // See if any of the vendor parameters match ones we know about\n                for (String supportedOption : testCodec.getSupportedVendorParameters()) {\n                    for (String knownLowLatencyOption : knownVendorLowLatencyOptions) {\n                        if (supportedOption.equalsIgnoreCase(knownLowLatencyOption)) {\n                            Log.i(TAG, decoderName + \" supports known low latency option: \" + supportedOption);\n                            return true;\n                        }\n                    }\n                }\n            } catch (Exception e) {\n                // Tolerate buggy codecs\n                e.printStackTrace();\n            } finally {\n                if (testCodec != null) {\n                    testCodec.release();\n                }\n            }\n        }\n        return false;\n    }\n\n    private static boolean decoderSupportsMaxOperatingRate(String decoderName) {\n        // Operate at maximum rate to lower latency as much as possible on\n        // some Qualcomm platforms. We could also set KEY_PRIORITY to 0 (realtime)\n        // but that will actually result in the decoder crashing if it can't satisfy\n        // our (ludicrous) operating rate requirement. This seems to cause reliable\n        // crashes on the Xiaomi Mi 10 lite 5G and Redmi K30i 5G on Android 10, so\n        // we'll disable it on Snapdragon 765G and all non-Qualcomm devices to be safe.\n        //\n        // NB: Even on Android 10, this optimization still provides significant\n        // performance gains on Pixel 2.\n        return Build.VERSION.SDK_INT >= Build.VERSION_CODES.M &&\n                isDecoderInList(qualcommDecoderPrefixes, decoderName) &&\n                !isAdreno620;\n    }\n\n    public static boolean setDecoderLowLatencyOptions(MediaFormat videoFormat, MediaCodecInfo decoderInfo, int tryNumber) {\n        // Options here should be tried in the order of most to least risky. The decoder will use\n        // the first MediaFormat that doesn't fail in configure().\n\n        boolean setNewOption = false;\n\n        if (tryNumber < 1) {\n            // Official Android 11+ low latency option (KEY_LOW_LATENCY).\n            videoFormat.setInteger(\"low-latency\", 1);\n            setNewOption = true;\n\n            // If this decoder officially supports FEATURE_LowLatency, we will just use that alone\n            // for try 0. Otherwise, we'll include it as best effort with other options.\n            if (decoderSupportsAndroidRLowLatency(decoderInfo, videoFormat.getString(MediaFormat.KEY_MIME))) {\n                return true;\n            }\n        }\n\n        if (tryNumber < 2 &&\n                (!Build.MANUFACTURER.equalsIgnoreCase(\"xiaomi\") || Build.VERSION.SDK_INT > Build.VERSION_CODES.M)) {\n            // MediaTek decoders don't use vendor-defined keys for low latency mode. Instead, they have a modified\n            // version of AOSP's ACodec.cpp which supports the \"vdec-lowlatency\" option. This option is passed down\n            // to the decoder as OMX.MTK.index.param.video.LowLatencyDecode.\n            //\n            // This option is also plumbed for Amazon Amlogic-based devices like the Fire TV 3. Not only does it\n            // reduce latency on Amlogic, it fixes the HEVC bug that causes the decoder to not output any frames.\n            // Unfortunately, it does the exact opposite for the Xiaomi MITV4-ANSM0, breaking it in the way that\n            // Fire TV was broken prior to vdec-lowlatency :(\n            //\n            // On Fire TV 3, vdec-lowlatency is translated to OMX.amazon.fireos.index.video.lowLatencyDecode.\n            //\n            // https://github.com/yuan1617/Framwork/blob/master/frameworks/av/media/libstagefright/ACodec.cpp\n            // https://github.com/iykex/vendor_mediatek_proprietary_hardware/blob/master/libomx/video/MtkOmxVdecEx/MtkOmxVdecEx.h\n            videoFormat.setInteger(\"vdec-lowlatency\", 1);\n            setNewOption = true;\n        }\n\n        if (tryNumber < 3) {\n            if (MediaCodecHelper.decoderSupportsMaxOperatingRate(decoderInfo.getName())) {\n                videoFormat.setInteger(MediaFormat.KEY_OPERATING_RATE, Short.MAX_VALUE);\n                setNewOption = true;\n            }\n            else if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {\n                videoFormat.setInteger(MediaFormat.KEY_PRIORITY, 0);\n                setNewOption = true;\n            }\n        }\n\n        // MediaCodec supports vendor-defined format keys using the \"vendor.<extension name>.<parameter name>\" syntax.\n        // These allow access to functionality that is not exposed through documented MediaFormat.KEY_* values.\n        // https://cs.android.com/android/platform/superproject/+/master:hardware/qcom/sdm845/media/mm-video-v4l2/vidc/common/inc/vidc_vendor_extensions.h;l=67\n        //\n        // MediaCodec vendor extension support was introduced in Android 8.0:\n        // https://cs.android.com/android/_/android/platform/frameworks/av/+/01c10f8cdcd58d1e7025f426a72e6e75ba5d7fc2\n        if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {\n            // Try vendor-specific low latency options\n            //\n            // NOTE: Update knownVendorLowLatencyOptions if you modify this code!\n            if (isDecoderInList(qualcommDecoderPrefixes, decoderInfo.getName())) {\n                // Examples of Qualcomm's vendor extensions for Snapdragon 845:\n                // https://cs.android.com/android/platform/superproject/+/master:hardware/qcom/sdm845/media/mm-video-v4l2/vidc/vdec/src/omx_vdec_extensions.hpp\n                // https://cs.android.com/android/_/android/platform/hardware/qcom/sm8150/media/+/0621ceb1c1b19564999db8293574a0e12952ff6c\n                //\n                // We will first try both, then try vendor.qti-ext-dec-low-latency.enable alone if that fails\n                if (tryNumber < 4) {\n                    videoFormat.setInteger(\"vendor.qti-ext-dec-picture-order.enable\", 1);\n                    setNewOption = true;\n                }\n                if (tryNumber < 5) {\n                    videoFormat.setInteger(\"vendor.qti-ext-dec-low-latency.enable\", 1);\n                    setNewOption = true;\n                }\n            }\n            else if (isDecoderInList(kirinDecoderPrefixes, decoderInfo.getName())) {\n                if (tryNumber < 4) {\n                    // Kirin low latency options\n                    // https://developer.huawei.com/consumer/cn/forum/topic/0202325564295980115\n                    videoFormat.setInteger(\"vendor.hisi-ext-low-latency-video-dec.video-scene-for-low-latency-req\", 1);\n                    videoFormat.setInteger(\"vendor.hisi-ext-low-latency-video-dec.video-scene-for-low-latency-rdy\", -1);\n                    setNewOption = true;\n                }\n            }\n            else if (isDecoderInList(exynosDecoderPrefixes, decoderInfo.getName())) {\n                if (tryNumber < 4) {\n                    // Exynos low latency option for H.264 decoder\n                    videoFormat.setInteger(\"vendor.rtc-ext-dec-low-latency.enable\", 1);\n                    setNewOption = true;\n                }\n            }\n            else if (isDecoderInList(amlogicDecoderPrefixes, decoderInfo.getName())) {\n                if (tryNumber < 4) {\n                    // Amlogic low latency vendor extension\n                    // https://github.com/codewalkerster/android_vendor_amlogic_common_prebuilt_libstagefrighthw/commit/41fefc4e035c476d58491324a5fe7666bfc2989e\n                    videoFormat.setInteger(\"vendor.low-latency.enable\", 1);\n                    setNewOption = true;\n                }\n            }\n        }\n\n        return setNewOption;\n    }\n\n    public static boolean decoderSupportsFusedIdrFrame(MediaCodecInfo decoderInfo, String mimeType) {\n        // If adaptive playback is supported, we can submit new CSD together with a keyframe\n        try {\n            if (decoderInfo.getCapabilitiesForType(mimeType).\n                    isFeatureSupported(CodecCapabilities.FEATURE_AdaptivePlayback)) {\n                Log.i(TAG, \"Decoder supports fused IDR frames (FEATURE_AdaptivePlayback)\");\n                return true;\n            }\n        } catch (Exception e) {\n            // Tolerate buggy codecs\n            e.printStackTrace();\n        }\n\n        return false;\n    }\n\n    public static boolean decoderSupportsAdaptivePlayback(MediaCodecInfo decoderInfo, String mimeType) {\n        if (isDecoderInList(blacklistedAdaptivePlaybackPrefixes, decoderInfo.getName())) {\n            Log.i(TAG, \"Decoder blacklisted for adaptive playback\");\n            return false;\n        }\n\n        try {\n            if (decoderInfo.getCapabilitiesForType(mimeType).\n                    isFeatureSupported(CodecCapabilities.FEATURE_AdaptivePlayback))\n            {\n                // This will make getCapabilities() return that adaptive playback is supported\n                Log.i(TAG, \"Adaptive playback supported (FEATURE_AdaptivePlayback)\");\n                return true;\n            }\n        } catch (Exception e) {\n            // Tolerate buggy codecs\n            e.printStackTrace();\n        }\n\n        return false;\n    }\n\n    public static boolean decoderNeedsConstrainedHighProfile(String decoderName) {\n        return isDecoderInList(constrainedHighProfilePrefixes, decoderName);\n    }\n\n    public static boolean decoderCanDirectSubmit(String decoderName) {\n        return isDecoderInList(directSubmitPrefixes, decoderName) && !isExynos4Device();\n    }\n\n    public static boolean decoderNeedsSpsBitstreamRestrictions(String decoderName) {\n        return isDecoderInList(spsFixupBitstreamFixupDecoderPrefixes, decoderName);\n    }\n\n    public static boolean decoderNeedsBaselineSpsHack(String decoderName) {\n        return isDecoderInList(baselineProfileHackPrefixes, decoderName);\n    }\n\n    public static byte getDecoderOptimalSlicesPerFrame(String decoderName) {\n        if (isDecoderInList(useFourSlicesPrefixes, decoderName)) {\n            // 4 slices per frame reduces decoding latency on older Qualcomm devices\n            return 4;\n        }\n        else {\n            // 1 slice per frame produces the optimal encoding efficiency\n            return 1;\n        }\n    }\n\n    public static boolean decoderSupportsRefFrameInvalidationAvc(String decoderName, int videoHeight) {\n        // Reference frame invalidation is broken on low-end Snapdragon SoCs at 1080p.\n        if (videoHeight > 720 && isLowEndSnapdragon) {\n            return false;\n        }\n\n        // This device seems to crash constantly at 720p, so try disabling\n        // RFI to see if we can get that under control.\n        if (Build.DEVICE.equals(\"b3\") || Build.DEVICE.equals(\"b5\")) {\n            return false;\n        }\n\n        return isDecoderInList(refFrameInvalidationAvcPrefixes, decoderName);\n    }\n\n    public static boolean decoderSupportsRefFrameInvalidationHevc(MediaCodecInfo decoderInfo) {\n        // HEVC decoders seem to universally support RFI, but it can have huge latency penalties\n        // for some decoders due to the number of references frames being > 1. Old Amlogic\n        // decoders are known to have this problem.\n        //\n        // If the decoder supports FEATURE_LowLatency or any vendor low latency option,\n        // we will use that as an indication that it can handle HEVC RFI without excessively\n        // buffering frames.\n        if (decoderSupportsAndroidRLowLatency(decoderInfo, \"video/hevc\") ||\n                decoderSupportsKnownVendorLowLatencyOption(decoderInfo.getName())) {\n            Log.i(TAG, \"Enabling HEVC RFI based on low latency option support\");\n            return true;\n        }\n\n        return isDecoderInList(refFrameInvalidationHevcPrefixes, decoderInfo.getName());\n    }\n\n    public static boolean decoderSupportsRefFrameInvalidationAv1(MediaCodecInfo decoderInfo) {\n        // We'll use the same heuristics as HEVC for now\n        if (decoderSupportsAndroidRLowLatency(decoderInfo, \"video/av01\") ||\n                decoderSupportsKnownVendorLowLatencyOption(decoderInfo.getName())) {\n            Log.i(TAG, \"Enabling AV1 RFI based on low latency option support\");\n            return true;\n        }\n\n        return false;\n    }\n\n    public static boolean decoderIsWhitelistedForHevc(MediaCodecInfo decoderInfo) {\n        //\n        // Software decoders are terrible and we never want to use them.\n        // We want to catch decoders like:\n        // OMX.qcom.video.decoder.hevcswvdec\n        // OMX.SEC.hevc.sw.dec\n        //\n        if (decoderInfo.getName().contains(\"sw\")) {\n            Log.i(TAG, \"Disallowing HEVC on software decoder: \" + decoderInfo.getName());\n            return false;\n        }\n        else if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q && (!decoderInfo.isHardwareAccelerated() || decoderInfo.isSoftwareOnly())) {\n            Log.i(TAG, \"Disallowing HEVC on software decoder: \" + decoderInfo.getName());\n            return false;\n        }\n\n        // If this device is media performance class 12 or higher, we will assume any hardware\n        // HEVC decoder present is fast and modern enough for streaming.\n        //\n        // [5.3/H-1-1] MUST NOT drop more than 2 frames in 10 seconds (i.e less than 0.333 percent frame drop) for a 1080p 60 fps video session under load.\n        if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.S) {\n            Log.i(TAG, \"Media performance class: \" + Build.VERSION.MEDIA_PERFORMANCE_CLASS);\n            if (Build.VERSION.MEDIA_PERFORMANCE_CLASS >= Build.VERSION_CODES.S) {\n                Log.i(TAG, \"Allowing HEVC based on media performance class\");\n                return true;\n            }\n        }\n\n        // If the decoder supports FEATURE_LowLatency, we will assume it is fast and modern enough\n        // to be preferable for streaming over H.264 decoders.\n        if (decoderSupportsAndroidRLowLatency(decoderInfo, \"video/hevc\")) {\n            Log.i(TAG, \"Allowing HEVC based on FEATURE_LowLatency support\");\n            return true;\n        }\n\n        // Otherwise, we use our list of known working HEVC decoders\n        return isDecoderInList(whitelistedHevcDecoders, decoderInfo.getName());\n    }\n\n    public static boolean isDecoderWhitelistedForAv1(MediaCodecInfo decoderInfo) {\n        // Google didn't have official support for AV1 (or more importantly, a CTS test) until\n        // Android 10, so don't use any decoder before then.\n        if (Build.VERSION.SDK_INT < Build.VERSION_CODES.Q) {\n            return false;\n        }\n\n        //\n        // Software decoders are terrible and we never want to use them.\n        // We want to catch decoders like:\n        // OMX.qcom.video.decoder.hevcswvdec\n        // OMX.SEC.hevc.sw.dec\n        //\n        if (decoderInfo.getName().contains(\"sw\")) {\n            Log.i(TAG, \"Disallowing AV1 on software decoder: \" + decoderInfo.getName());\n            return false;\n        }\n        else if (!decoderInfo.isHardwareAccelerated() || decoderInfo.isSoftwareOnly()) {\n            Log.i(TAG, \"Disallowing AV1 on software decoder: \" + decoderInfo.getName());\n            return false;\n        }\n\n        // TODO: Test some AV1 decoders\n        return false;\n    }\n\n    @SuppressWarnings(\"deprecation\")\n    @SuppressLint(\"NewApi\")\n    private static LinkedList<MediaCodecInfo> getMediaCodecList() {\n        LinkedList<MediaCodecInfo> infoList = new LinkedList<>();\n\n        MediaCodecList mcl = new MediaCodecList(MediaCodecList.REGULAR_CODECS);\n        Collections.addAll(infoList, mcl.getCodecInfos());\n\n        return infoList;\n    }\n\n    @SuppressWarnings(\"RedundantThrows\")\n    public static String dumpDecoders() throws Exception {\n        String str = \"\";\n        for (MediaCodecInfo codecInfo : getMediaCodecList()) {\n            // Skip encoders\n            if (codecInfo.isEncoder()) {\n                continue;\n            }\n\n            str += \"Decoder: \"+codecInfo.getName()+\"\\n\";\n            for (String type : codecInfo.getSupportedTypes()) {\n                str += \"\\t\"+type+\"\\n\";\n                CodecCapabilities caps = codecInfo.getCapabilitiesForType(type);\n\n                for (CodecProfileLevel profile : caps.profileLevels) {\n                    str += \"\\t\\t\"+profile.profile+\" \"+profile.level+\"\\n\";\n                }\n            }\n        }\n        return str;\n    }\n\n    private static MediaCodecInfo findPreferredDecoder() {\n        // This is a different algorithm than the other findXXXDecoder functions,\n        // because we want to evaluate the decoders in our list's order\n        // rather than MediaCodecList's order\n\n        if (!initialized) {\n            throw new IllegalStateException(\"MediaCodecHelper must be initialized before use\");\n        }\n\n        for (String preferredDecoder : preferredDecoders) {\n            for (MediaCodecInfo codecInfo : getMediaCodecList()) {\n                // Skip encoders\n                if (codecInfo.isEncoder()) {\n                    continue;\n                }\n\n                // Check for preferred decoders\n                if (preferredDecoder.equalsIgnoreCase(codecInfo.getName())) {\n                    Log.i(TAG, \"Preferred decoder choice is \"+codecInfo.getName());\n                    return codecInfo;\n                }\n            }\n        }\n\n        return null;\n    }\n\n    private static boolean isCodecBlacklisted(MediaCodecInfo codecInfo) {\n        // Use the new isSoftwareOnly() function on Android Q\n        if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) {\n            if (!SHOULD_BYPASS_SOFTWARE_BLOCK && codecInfo.isSoftwareOnly()) {\n                Log.i(TAG, \"Skipping software-only decoder: \"+codecInfo.getName());\n                return true;\n            }\n        }\n\n        // Check for explicitly blacklisted decoders\n        if (isDecoderInList(blacklistedDecoderPrefixes, codecInfo.getName())) {\n            Log.i(TAG, \"Skipping blacklisted decoder: \"+codecInfo.getName());\n            return true;\n        }\n\n        return false;\n    }\n\n    public static MediaCodecInfo findFirstDecoder(String mimeType) {\n        for (MediaCodecInfo codecInfo : getMediaCodecList()) {\n            // Skip encoders\n            if (codecInfo.isEncoder()) {\n                continue;\n            }\n\n            // Skip compatibility aliases on Q+\n            if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) {\n                if (codecInfo.isAlias()) {\n                    continue;\n                }\n            }\n\n            // Find a decoder that supports the specified video format\n            for (String mime : codecInfo.getSupportedTypes()) {\n                if (mime.equalsIgnoreCase(mimeType)) {\n                    // Skip blacklisted codecs\n                    if (isCodecBlacklisted(codecInfo)) {\n                        continue;\n                    }\n\n                    Log.i(TAG, \"First decoder choice is \"+codecInfo.getName());\n                    return codecInfo;\n                }\n            }\n        }\n\n        return null;\n    }\n\n    public static MediaCodecInfo findProbableSafeDecoder(String mimeType, int requiredProfile) {\n        // First look for a preferred decoder by name\n        MediaCodecInfo info = findPreferredDecoder();\n        if (info != null) {\n            return info;\n        }\n\n        // Now look for decoders we know are safe\n        try {\n            // If this function completes, it will determine if the decoder is safe\n            return findKnownSafeDecoder(mimeType, requiredProfile);\n        } catch (Exception e) {\n            // Some buggy devices seem to throw exceptions\n            // from getCapabilitiesForType() so we'll just assume\n            // they're okay and go with the first one we find\n            return findFirstDecoder(mimeType);\n        }\n    }\n\n    // We declare this method as explicitly throwing Exception\n    // since some bad decoders can throw IllegalArgumentExceptions unexpectedly\n    // and we want to be sure all callers are handling this possibility\n    @SuppressWarnings(\"RedundantThrows\")\n    private static MediaCodecInfo findKnownSafeDecoder(String mimeType, int requiredProfile) throws Exception {\n        // Some devices (Exynos devces, at least) have two sets of decoders.\n        // The first set of decoders are C2 which do not support FEATURE_LowLatency,\n        // but the second set of OMX decoders do support FEATURE_LowLatency. We want\n        // to pick the OMX decoders despite the fact that C2 is listed first.\n        // On some Qualcomm devices (like Pixel 4), there are separate low latency decoders\n        // (like c2.qti.hevc.decoder.low_latency) that advertise FEATURE_LowLatency while\n        // the standard ones (like c2.qti.hevc.decoder) do not. Like Exynos, the decoders\n        // with FEATURE_LowLatency support are listed after the standard ones.\n        for (int i = 0; i < 2; i++) {\n            for (MediaCodecInfo codecInfo : getMediaCodecList()) {\n                // Skip encoders\n                if (codecInfo.isEncoder()) {\n                    continue;\n                }\n\n                // Skip compatibility aliases on Q+\n                if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) {\n                    if (codecInfo.isAlias()) {\n                        continue;\n                    }\n                }\n\n                // Find a decoder that supports the requested video format\n                for (String mime : codecInfo.getSupportedTypes()) {\n                    if (mime.equalsIgnoreCase(mimeType)) {\n                        Log.i(TAG, \"Examining decoder capabilities of \" + codecInfo.getName() + \" (round \" + (i + 1) + \")\");\n\n                        // Skip blacklisted codecs\n                        if (isCodecBlacklisted(codecInfo)) {\n                            continue;\n                        }\n\n                        CodecCapabilities caps = codecInfo.getCapabilitiesForType(mime);\n\n                        if (i == 0 && !decoderSupportsAndroidRLowLatency(codecInfo, mime)) {\n                            Log.i(TAG, \"Skipping decoder that lacks FEATURE_LowLatency for round 1\");\n                            continue;\n                        }\n\n                        if (requiredProfile != -1) {\n                            for (CodecProfileLevel profile : caps.profileLevels) {\n                                if (profile.profile == requiredProfile) {\n                                    Log.i(TAG, \"Decoder \" + codecInfo.getName() + \" supports required profile\");\n                                    return codecInfo;\n                                }\n                            }\n\n                            Log.i(TAG, \"Decoder \" + codecInfo.getName() + \" does NOT support required profile\");\n                        } else {\n                            return codecInfo;\n                        }\n                    }\n                }\n            }\n        }\n\n        return null;\n    }\n\n    public static String readCpuinfo() throws Exception {\n        StringBuilder cpuInfo = new StringBuilder();\n        try (final BufferedReader br = new BufferedReader(new FileReader(new File(\"/proc/cpuinfo\")))) {\n            for (;;) {\n                int ch = br.read();\n                if (ch == -1)\n                    break;\n                cpuInfo.append((char)ch);\n            }\n\n            return cpuInfo.toString();\n        }\n    }\n\n    private static boolean stringContainsIgnoreCase(String string, String substring) {\n        return string.toLowerCase(Locale.ENGLISH).contains(substring.toLowerCase(Locale.ENGLISH));\n    }\n\n    public static boolean isExynos4Device() {\n        try {\n            // Try reading CPU info too look for \n            String cpuInfo = readCpuinfo();\n\n            // SMDK4xxx is Exynos 4 \n            if (stringContainsIgnoreCase(cpuInfo, \"SMDK4\")) {\n                Log.i(TAG, \"Found SMDK4 in /proc/cpuinfo\");\n                return true;\n            }\n\n            // If we see \"Exynos 4\" also we'll count it\n            if (stringContainsIgnoreCase(cpuInfo, \"Exynos 4\")) {\n                Log.i(TAG, \"Found Exynos 4 in /proc/cpuinfo\");\n                return true;\n            }\n        } catch (Exception e) {\n            e.printStackTrace();\n        }\n\n        try {\n            File systemDir = new File(\"/sys/devices/system\");\n            File[] files = systemDir.listFiles();\n            if (files != null) {\n                for (File f : files) {\n                    if (stringContainsIgnoreCase(f.getName(), \"exynos4\")) {\n                        Log.i(TAG, \"Found exynos4 in /sys/devices/system\");\n                        return true;\n                    }\n                }\n            }\n        } catch (Exception e) {\n            e.printStackTrace();\n        }\n\n        return false;\n    }\n}\n"
  },
  {
    "path": "settings.gradle",
    "content": "include ':library-client-rtsp'\ninclude ':app'"
  }
]