Repository: alexeyvasilyev/rtsp-client-android
Branch: master
Commit: 0c0dd61e20eb
Files: 66
Total size: 354.9 KB
Directory structure:
gitextract_xa9itofy/
├── .github/
│ └── FUNDING.yml
├── .gitignore
├── LICENSE
├── README.md
├── app/
│ ├── .gitignore
│ ├── build.gradle
│ ├── proguard-rules.pro
│ └── src/
│ └── main/
│ ├── AndroidManifest.xml
│ ├── java/
│ │ └── com/
│ │ └── alexvas/
│ │ └── rtsp/
│ │ └── demo/
│ │ ├── MainActivity.kt
│ │ └── live/
│ │ ├── LiveFragment.kt
│ │ ├── LiveViewModel.kt
│ │ └── RawFragment.kt
│ └── res/
│ ├── drawable/
│ │ ├── ic_camera_black_24dp.xml
│ │ ├── ic_cctv_black_24dp.xml
│ │ ├── ic_launcher_background.xml
│ │ ├── ic_launcher_foreground.xml
│ │ └── ic_text_subject_black_24dp.xml
│ ├── layout/
│ │ ├── activity_main.xml
│ │ ├── fragment_live.xml
│ │ ├── fragment_logs.xml
│ │ ├── fragment_raw.xml
│ │ └── layout_rtsp_params.xml
│ ├── menu/
│ │ └── bottom_nav_menu.xml
│ ├── mipmap-anydpi-v26/
│ │ ├── ic_launcher.xml
│ │ └── ic_launcher_round.xml
│ ├── navigation/
│ │ └── mobile_navigation.xml
│ └── values/
│ ├── colors.xml
│ ├── dimens.xml
│ ├── strings.xml
│ └── styles.xml
├── build.gradle
├── gradle/
│ └── wrapper/
│ ├── gradle-wrapper.jar
│ └── gradle-wrapper.properties
├── gradle.properties
├── gradlew
├── gradlew.bat
├── jitpack.yml
├── library-client-rtsp/
│ ├── .gitignore
│ ├── build.gradle
│ ├── proguard-rules.txt
│ └── src/
│ └── main/
│ ├── AndroidManifest.xml
│ └── java/
│ └── com/
│ ├── alexvas/
│ │ ├── rtsp/
│ │ │ ├── RtspClient.java
│ │ │ ├── codec/
│ │ │ │ ├── AudioDecodeThread.kt
│ │ │ │ ├── FrameQueue.kt
│ │ │ │ ├── VideoDecodeThread.kt
│ │ │ │ ├── VideoDecoderBitmapThread.kt
│ │ │ │ ├── VideoDecoderSurfaceThread.kt
│ │ │ │ └── color/
│ │ │ │ ├── ColorConverter.kt
│ │ │ │ └── ColorConverterImage.kt
│ │ │ ├── parser/
│ │ │ │ ├── AacParser.java
│ │ │ │ ├── AudioParser.kt
│ │ │ │ ├── G711Parser.kt
│ │ │ │ ├── RtpH264Parser.kt
│ │ │ │ ├── RtpH265Parser.kt
│ │ │ │ ├── RtpHeaderParser.java
│ │ │ │ └── RtpParser.kt
│ │ │ └── widget/
│ │ │ ├── RtspImageView.kt
│ │ │ ├── RtspListeners.kt
│ │ │ ├── RtspProcessor.kt
│ │ │ └── RtspSurfaceView.kt
│ │ └── utils/
│ │ ├── ByteUtils.java
│ │ ├── MediaCodecUtils.kt
│ │ ├── NetUtils.java
│ │ └── VideoCodecUtils.kt
│ └── limelight/
│ └── binding/
│ └── video/
│ └── MediaCodecHelper.java
└── settings.gradle
================================================
FILE CONTENTS
================================================
================================================
FILE: .github/FUNDING.yml
================================================
github: alexeyvasilyev
================================================
FILE: .gitignore
================================================
*.iml
.gradle
/local.properties
/.idea
/build
.DS_Store
================================================
FILE: LICENSE
================================================
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
================================================
FILE: README.md
================================================
# rtsp-client-android
Lightweight RTSP client library for Android with almost zero lag video decoding (achieved 20 msec video decoding latency on some RTSP streams). Designed for lag criticial applications (e.g. video surveillance from drones, car rear view cameras, etc.).
Unlike [AndroidX Media ExoPlayer](https://github.com/androidx/media) which also supports RTSP, this library does not make any video buffering. Video frames are shown immidiately when they arrive.
[](https://jitpack.io/#alexeyvasilyev/rtsp-client-android)

## Features:
- RTSP/RTSPS over TCP.
- Supports majority of RTSP IP cameras.
- Video H.264/H.265.
- Audio AAC LC, G.711 uLaw, G.711 aLaw.
- Support for application specific data sent via RTP, e.g. GPS data (`m=application`, see [RFC 4566 sec.5.14](https://datatracker.ietf.org/doc/html/rfc4566#section-5.14))
- Basic/Digest authentication.
- Uses Android's [Low-Latency MediaCodec](https://source.android.com/docs/core/media/low-latency-media) by default if available.
- Ability to select hardware or software video decoder.
- Ability to [rewrite SPS frame](https://github.com/alexeyvasilyev/rtsp-client-android/blob/dbea741548307b1b0e1ead0ccc6294e811fbf6fd/library-client-rtsp/src/main/java/com/alexvas/rtsp/widget/RtspProcessor.kt#L106C9-L106C55) with low-latency parameters (EXPERIMENTAL).
- Video rotation (90, 180, 270 degrees).
- Android min API 24.
## Upcoming features:
- 2-w talk.
## Permissions:
```xml
```
## Compile
To use this library in your project add this to your build.gradle:
```gradle
allprojects {
repositories {
maven { url 'https://jitpack.io' }
}
}
dependencies {
implementation 'com.github.alexeyvasilyev:rtsp-client-android:x.x.x'
}
```
## How to use:
Easiest way is just to use `RtspSurfaceView` (recommended) or `RtspImageView` classes for showing video stream in UI.
Use [RtspSurfaceView](https://github.com/alexeyvasilyev/rtsp-client-android/blob/master/library-client-rtsp/src/main/java/com/alexvas/rtsp/widget/RtspSurfaceView.kt) if you need best performance and less battery usage. To get bitmap from SurfaceView use [PixelCopy.request](https://developer.android.com/reference/android/view/PixelCopy) (on Pixel 8 Pro with 1440p @ 20 fps video stream, you can get 12 fps only via PixelCopy)
Use [RtspImageView](https://github.com/alexeyvasilyev/rtsp-client-android/blob/master/library-client-rtsp/src/main/java/com/alexvas/rtsp/widget/RtspImageView.kt) if you need better performance than PixelCopy for getting bitmaps for further processing (e.g. for AI).
```xml
```
Then in code use:
```kotlin
val uri = Uri.parse("rtsps://10.0.1.3/test.sdp")
val username = "admin"
val password = "secret"
svVideo.init(uri, username, password)
svVideo.start(
requestVideo = true,
requestAudio = true,
requestApplication = false)
// ...
svVideo.stop()
```
You can still use library without any decoding (just for obtaining raw frames from RTSP source), e.g. for writing video stream into MP4 via muxer.
```kotlin
val rtspClientListener = object: RtspClient.RtspClientListener {
override fun onRtspConnecting() {}
override fun onRtspConnected(sdpInfo: SdpInfo) {}
override fun onRtspVideoNalUnitReceived(data: ByteArray, offset: Int, length: Int, timestamp: Long) {
// Send raw H264/H265 NAL unit to decoder
}
override fun onRtspAudioSampleReceived(data: ByteArray, offset: Int, length: Int, timestamp: Long) {
// Send raw audio to decoder
}
override fun onRtspApplicationDataReceived(data: ByteArray, offset: Int, length: Int, timestamp: Long) {
// Send raw application data to app specific parser
}
override fun onRtspDisconnected() {}
override fun onRtspFailedUnauthorized() {
Log.e(TAG, "RTSP failed unauthorized");
}
override fun onRtspFailed(message: String?) {
Log.e(TAG, "RTSP failed with message '$message'")
}
}
val uri = Uri.parse("rtsps://10.0.1.3/test.sdp")
val username = "admin"
val password = "secret"
val stopped = new AtomicBoolean(false)
val sslSocket = NetUtils.createSslSocketAndConnect(uri.getHost(), uri.getPort(), 5000)
val rtspClient = RtspClient.Builder(sslSocket, uri.toString(), stopped, rtspClientListener)
.requestVideo(true)
.requestAudio(true)
.withDebug(false)
.withUserAgent("RTSP client")
.withCredentials(username, password)
.build()
// Blocking call until stopped variable is true or connection failed
rtspClient.execute()
NetUtils.closeSocket(sslSocket)
```
## How to get lowest possible latency:
There are two types of latencies:
### Network latency
If you want the lowest possible network latency, be sure that both Android device and RTSP camera are connected to the same network by the Ethernet cable (not WiFi).
Another option to try is to decrease stream bitrate on RTSP camera. Less frame size leads to less time needed for frame transfer.
### Video decoder latency
Video decoder latency can vary significantly on different Android devices and on different RTSP camera streams.
For the same profile/level and resolution (but different cameras) the latency in best cases can can be 20 msec, in worst cases 1200 msec.
To decrease latency be sure you use the lowest possible H.264 video stream profile and level (enable `debug` in the library and check SPS frame params `profile_idc` and `level_idc` in the log). `Baseline profile` should have the lowest possible decoder latency.
Check `max_num_reorder_frames` param as well. For best latency it's value should be `0`.
You can also try to use [experimentalUpdateSpsFrameWithLowLatencyParams](https://github.com/alexeyvasilyev/rtsp-client-android/blob/master/library-client-rtsp/src/main/java/com/alexvas/rtsp/widget/RtspProcessor.kt#L106) library feature which rewrites config frame on runtime with low-latency parameters.
================================================
FILE: app/.gitignore
================================================
# Created by https://www.gitignore.io/api/android,java,intellij
### Android ###
# Built application files
*.apk
*.ap_
# Files for the Dalvik VM
*.dex
# Java class files
*.class
# Generated files
bin/
gen/
# Gradle files
.gradle/
build/
# Local configuration file (sdk path, etc)
local.properties
# Proguard folder generated by Eclipse
proguard/
xactmobile/class_files.txt
xactmobile/mapping.txt
xactmobile/seeds.txt
# Log Files
*.log
# Android Studio Navigation editor temp files
.navigation/
### Android Patch ###
gen-external-apklibs
### Java ###
*.class
# Mobile Tools for Java (J2ME)
.mtj.tmp/
# Package Files #
#*.jar
*.war
*.ear
# virtual machine crash logs, see http://www.java.com/en/download/help/error_hotspot.xml
hs_err_pid*
### Intellij ###
# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio
*.iml
## Directory-based project format:
.idea/
# if you remove the above rule, at least ignore the following:
# User-specific stuff:
.idea/workspace.xml
.idea/tasks.xml
.idea/dictionaries
# Sensitive or high-churn files:
.idea/dataSources.ids
.idea/dataSources.xml
.idea/sqlDataSources.xml
.idea/dynamic.xml
.idea/uiDesigner.xml
# Gradle:
.idea/gradle.xml
.idea/libraries
# Mongo Explorer plugin:
.idea/mongoSettings.xml
## File-based project format:
*.ipr
*.iws
## Plugin-specific files:
# IntelliJ
/out/
# mpeltonen/sbt-idea plugin
.idea_modules/
# JIRA plugin
atlassian-ide-plugin.xml
# Crashlytics plugin (for Android Studio and IntelliJ)
com_crashlytics_export_strings.xml
crashlytics.properties
crashlytics-build.properties
xactmobile/.DS_Store~64be78fe3602626c61b52bcbfd09e09a6107b50a
xactmobile/.DS_Store~HEAD
oslab-viewpager/._.DS_Store
oslab-viewpager/src/main/.DS_Store
oslab-viewpager/src/main/._.DS_Store
oslab-viewpager/src/main/res/.DS_Store
oslab-viewpager/src/main/res/._.DS_Store
oslab-viewpager/.gitignore
oslab-materialdesign/.DS_Store
oslab-materialdesign/._.DS_Store
oslab-materialdesign/src/.DS_Store
oslab-materialdesign/src/._.DS_Store
oslab-materialdesign/src/main/.DS_Store
oslab-materialdesign/src/main/._.DS_Store
oslab-materialdesign/src/main/res/.DS_Store
oslab-materialdesign/src/main/res/._.DS_Store
================================================
FILE: app/build.gradle
================================================
apply plugin: 'com.android.application'
apply plugin: 'kotlin-android'
android {
compileSdkVersion 36
defaultConfig {
applicationId "com.alexvas.rtsp.demo"
minSdk 24
targetSdk 34
versionCode 1
versionName "1.0"
}
buildTypes {
release {
minifyEnabled false
proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro'
}
}
// To inline the bytecode built with JVM target 1.8 into
// bytecode that is being built with JVM target 1.6. (e.g. navArgs)
compileOptions {
sourceCompatibility JavaVersion.VERSION_17
targetCompatibility JavaVersion.VERSION_17
}
kotlinOptions {
jvmTarget = JavaVersion.VERSION_17.toString()
}
buildFeatures {
viewBinding true
}
namespace 'com.alexvas.rtsp.demo'
}
dependencies {
implementation fileTree(dir: 'libs', include: ['*.jar'])
implementation "org.jetbrains.kotlin:kotlin-stdlib-jdk7:$kotlin_version"
implementation 'androidx.appcompat:appcompat:1.7.1'
implementation 'androidx.core:core-ktx:1.18.0'
implementation 'com.google.android.material:material:1.13.0'
implementation 'androidx.constraintlayout:constraintlayout:2.2.1'
implementation 'androidx.lifecycle:lifecycle-extensions:2.2.0'
def androidx_navigation_version = '2.9.7'
implementation "androidx.navigation:navigation-fragment-ktx:$androidx_navigation_version"
implementation "androidx.navigation:navigation-ui-ktx:$androidx_navigation_version"
implementation "androidx.navigation:navigation-fragment-ktx:$androidx_navigation_version"
implementation "androidx.navigation:navigation-ui-ktx:$androidx_navigation_version"
def logcat_core_version = '3.4'
api "com.github.AppDevNext.Logcat:LogcatCoreLib:$logcat_core_version"
api "com.github.AppDevNext.Logcat:LogcatCoreUI:$logcat_core_version"
implementation project(':library-client-rtsp')
}
================================================
FILE: app/proguard-rules.pro
================================================
# Add project specific ProGuard rules here.
# You can control the set of applied configuration files using the
# proguardFiles setting in build.gradle.
#
# For more details, see
# http://developer.android.com/guide/developing/tools/proguard.html
# If your project uses WebView with JS, uncomment the following
# and specify the fully qualified class name to the JavaScript interface
# class:
#-keepclassmembers class fqcn.of.javascript.interface.for.webview {
# public *;
#}
# Uncomment this to preserve the line number information for
# debugging stack traces.
#-keepattributes SourceFile,LineNumberTable
# If you keep the line number information, uncomment this to
# hide the original source file name.
#-renamesourcefileattribute SourceFile
================================================
FILE: app/src/main/AndroidManifest.xml
================================================
================================================
FILE: app/src/main/java/com/alexvas/rtsp/demo/MainActivity.kt
================================================
package com.alexvas.rtsp.demo
import android.os.Bundle
import com.google.android.material.bottomnavigation.BottomNavigationView
import androidx.appcompat.app.AppCompatActivity
import androidx.navigation.findNavController
import androidx.navigation.ui.setupWithNavController
class MainActivity : AppCompatActivity() {
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
setContentView(R.layout.activity_main)
val navView: BottomNavigationView = findViewById(R.id.nav_view)
val navController = findNavController(R.id.nav_host_fragment)
// Passing each menu ID as a set of Ids because each
// menu should be considered as top level destinations.
// val appBarConfiguration = AppBarConfiguration(setOf(
// R.id.navigation_live, R.id.navigation_logs))
// setupActionBarWithNavController(navController, appBarConfiguration)
navView.setupWithNavController(navController)
}
}
================================================
FILE: app/src/main/java/com/alexvas/rtsp/demo/live/LiveFragment.kt
================================================
package com.alexvas.rtsp.demo.live
import android.annotation.SuppressLint
import android.graphics.Bitmap
import android.os.Bundle
import android.os.Handler
import android.os.HandlerThread
import android.util.Log
import android.view.LayoutInflater
import android.view.PixelCopy
import android.view.View
import android.view.ViewGroup
import android.view.WindowManager
import android.widget.Toast
import androidx.constraintlayout.widget.ConstraintSet
import androidx.core.net.toUri
import androidx.fragment.app.Fragment
import androidx.lifecycle.ViewModelProvider
import com.alexvas.rtsp.codec.VideoDecodeThread
import com.alexvas.rtsp.demo.databinding.FragmentLiveBinding
import com.alexvas.rtsp.widget.RtspDataListener
import com.alexvas.rtsp.widget.RtspImageView
import com.alexvas.rtsp.widget.RtspStatusListener
import com.alexvas.rtsp.widget.toHexString
import java.util.Timer
import java.util.TimerTask
import java.util.concurrent.atomic.AtomicBoolean
import kotlin.math.min
@SuppressLint("LogNotTimber")
class LiveFragment : Fragment() {
private lateinit var binding: FragmentLiveBinding
private lateinit var liveViewModel: LiveViewModel
private var statisticsTimer: Timer? = null
private var svVideoSurfaceResolution = Pair(0, 0)
private val rtspStatusSurfaceListener = object: RtspStatusListener {
override fun onRtspStatusConnecting() {
if (DEBUG) Log.v(TAG, "onRtspStatusConnecting()")
binding.apply {
tvStatusSurface.text = "RTSP connecting"
pbLoadingSurface.visibility = View.VISIBLE
vShutterSurface.visibility = View.VISIBLE
llRtspParams.apply {
etRtspRequest.isEnabled = false
etRtspUsername.isEnabled = false
etRtspPassword.isEnabled = false
cbVideo.isEnabled = false
cbAudio.isEnabled = false
cbApplication.isEnabled = false
cbDebug.isEnabled = false
}
tgRotation.isEnabled = false
}
}
override fun onRtspStatusConnected() {
if (DEBUG) Log.v(TAG, "onRtspStatusConnected()")
binding.apply {
tvStatusSurface.text = "RTSP connected"
bnStartStopSurface.text = "Stop RTSP"
}
setKeepScreenOn(true)
}
override fun onRtspStatusDisconnecting() {
if (DEBUG) Log.v(TAG, "onRtspStatusDisconnecting()")
binding.apply {
tvStatusSurface.text = "RTSP disconnecting"
}
}
override fun onRtspStatusDisconnected() {
if (DEBUG) Log.v(TAG, "onRtspStatusDisconnected()")
binding.apply {
tvStatusSurface.text = "RTSP disconnected"
bnStartStopSurface.text = "Start RTSP"
pbLoadingSurface.visibility = View.GONE
vShutterSurface.visibility = View.VISIBLE
pbLoadingSurface.isEnabled = false
llRtspParams.apply {
cbVideo.isEnabled = true
cbAudio.isEnabled = true
cbApplication.isEnabled = true
cbDebug.isEnabled = true
etRtspRequest.isEnabled = true
etRtspUsername.isEnabled = true
etRtspPassword.isEnabled = true
}
tgRotation.isEnabled = true
}
setKeepScreenOn(false)
}
override fun onRtspStatusFailedUnauthorized() {
if (DEBUG) Log.e(TAG, "onRtspStatusFailedUnauthorized()")
if (context == null) return
onRtspStatusDisconnected()
binding.apply {
tvStatusSurface.text = "RTSP username or password invalid"
pbLoadingSurface.visibility = View.GONE
}
}
override fun onRtspStatusFailed(message: String?) {
if (DEBUG) Log.e(TAG, "onRtspStatusFailed(message='$message')")
if (context == null) return
onRtspStatusDisconnected()
binding.apply {
tvStatusSurface.text = "Error: $message"
pbLoadingSurface.visibility = View.GONE
}
}
override fun onRtspFirstFrameRendered() {
if (DEBUG) Log.v(TAG, "onRtspFirstFrameRendered()")
Log.i(TAG, "First frame rendered")
binding.apply {
pbLoadingSurface.visibility = View.GONE
vShutterSurface.visibility = View.GONE
bnSnapshotSurface.isEnabled = true
}
}
override fun onRtspFrameSizeChanged(width: Int, height: Int) {
if (DEBUG) Log.v(TAG, "onRtspFrameSizeChanged(width=$width, height=$height)")
Log.i(TAG, "Video resolution changed to ${width}x${height}")
svVideoSurfaceResolution = Pair(width, height)
ConstraintSet().apply {
clone(binding.csVideoSurface)
setDimensionRatio(binding.svVideoSurface.id, "$width:$height")
applyTo(binding.csVideoSurface)
}
}
}
private val rtspDataListener = object: RtspDataListener {
override fun onRtspDataApplicationDataReceived(data: ByteArray, offset: Int, length: Int, timestamp: Long) {
val numBytesDump = min(length, 25) // dump max 25 bytes
Log.i(TAG, "RTSP app data ($length bytes): ${data.toHexString(offset, offset + numBytesDump)}")
}
}
private val rtspStatusImageListener = object: RtspStatusListener {
override fun onRtspStatusConnecting() {
if (DEBUG) Log.v(TAG, "onRtspStatusConnecting()")
binding.apply {
tvStatusImage.text = "RTSP connecting"
pbLoadingImage.visibility = View.VISIBLE
vShutterImage.visibility = View.VISIBLE
}
}
override fun onRtspStatusConnected() {
if (DEBUG) Log.v(TAG, "onRtspStatusConnected()")
binding.apply {
tvStatusImage.text = "RTSP connected"
bnStartStopImage.text = "Stop RTSP"
}
setKeepScreenOn(true)
}
override fun onRtspStatusDisconnecting() {
if (DEBUG) Log.v(TAG, "onRtspStatusDisconnecting()")
binding.apply {
tvStatusImage.text = "RTSP disconnecting"
}
}
override fun onRtspStatusDisconnected() {
if (DEBUG) Log.v(TAG, "onRtspStatusDisconnected()")
binding.apply {
tvStatusImage.text = "RTSP disconnected"
bnStartStopImage.text = "Start RTSP"
pbLoadingImage.visibility = View.GONE
vShutterImage.visibility = View.VISIBLE
pbLoadingImage.isEnabled = false
}
setKeepScreenOn(false)
}
override fun onRtspStatusFailedUnauthorized() {
if (DEBUG) Log.e(TAG, "onRtspStatusFailedUnauthorized()")
if (context == null) return
onRtspStatusDisconnected()
binding.apply {
tvStatusImage.text = "RTSP username or password invalid"
pbLoadingImage.visibility = View.GONE
}
}
override fun onRtspStatusFailed(message: String?) {
if (DEBUG) Log.e(TAG, "onRtspStatusFailed(message='$message')")
if (context == null) return
onRtspStatusDisconnected()
binding.apply {
tvStatusImage.text = "Error: $message"
pbLoadingImage.visibility = View.GONE
}
}
override fun onRtspFirstFrameRendered() {
if (DEBUG) Log.v(TAG, "onRtspFirstFrameRendered()")
Log.i(TAG, "First frame rendered")
binding.apply {
vShutterImage.visibility = View.GONE
pbLoadingImage.visibility = View.GONE
}
}
override fun onRtspFrameSizeChanged(width: Int, height: Int) {
if (DEBUG) Log.v(TAG, "onRtspFrameSizeChanged(width=$width, height=$height)")
Log.i(TAG, "Video resolution changed to ${width}x${height}")
ConstraintSet().apply {
clone(binding.csVideoImage)
setDimensionRatio(binding.ivVideoImage.id, "$width:$height")
applyTo(binding.csVideoImage)
}
}
}
private fun getSnapshot(): Bitmap? {
if (DEBUG) Log.v(TAG, "getSnapshot()")
val surfaceBitmap = Bitmap.createBitmap(
svVideoSurfaceResolution.first,
svVideoSurfaceResolution.second,
Bitmap.Config.ARGB_8888
)
val lock = Object()
val success = AtomicBoolean(false)
val thread = HandlerThread("PixelCopyHelper")
thread.start()
val sHandler = Handler(thread.looper)
val listener = PixelCopy.OnPixelCopyFinishedListener { copyResult ->
success.set(copyResult == PixelCopy.SUCCESS)
synchronized (lock) {
lock.notify()
}
}
synchronized (lock) {
PixelCopy.request(binding.svVideoSurface.holder.surface, surfaceBitmap, listener, sHandler)
lock.wait()
}
thread.quitSafely()
return if (success.get()) surfaceBitmap else null
}
override fun onCreateView(inflater: LayoutInflater, container: ViewGroup?, savedInstanceState: Bundle?): View {
if (DEBUG) Log.v(TAG, "onCreateView()")
liveViewModel = ViewModelProvider(this)[LiveViewModel::class.java]
binding = FragmentLiveBinding.inflate(inflater, container, false)
binding.bnVideoDecoderGroup.check(binding.bnVideoDecoderHardware.id)
binding.svVideoSurface.setStatusListener(rtspStatusSurfaceListener)
binding.svVideoSurface.setDataListener(rtspDataListener)
binding.ivVideoImage.setStatusListener(rtspStatusImageListener)
binding.ivVideoImage.setDataListener(rtspDataListener)
liveViewModel.initEditTexts(
binding.llRtspParams.etRtspRequest,
binding.llRtspParams.etRtspUsername,
binding.llRtspParams.etRtspPassword
)
liveViewModel.rtspRequest.observe(viewLifecycleOwner) {
if (binding.llRtspParams.etRtspRequest.text.toString() != it)
binding.llRtspParams.etRtspRequest.setText(it)
}
liveViewModel.rtspUsername.observe(viewLifecycleOwner) {
if (binding.llRtspParams.etRtspUsername.text.toString() != it)
binding.llRtspParams.etRtspUsername.setText(it)
}
liveViewModel.rtspPassword.observe(viewLifecycleOwner) {
if (binding.llRtspParams.etRtspPassword.text.toString() != it)
binding.llRtspParams.etRtspPassword.setText(it)
}
binding.cbVideoFpsStabilization.setOnCheckedChangeListener { _, isChecked ->
binding.svVideoSurface.videoFrameRateStabilization = isChecked
}
binding.cbExperimentalRewriteSps.setOnCheckedChangeListener { _, isChecked ->
binding.svVideoSurface.experimentalUpdateSpsFrameWithLowLatencyParams = isChecked
}
binding.bnRotate0.setOnClickListener {
binding.svVideoSurface.videoRotation = 0
binding.ivVideoImage.videoRotation = 0
}
binding.bnRotate90.setOnClickListener {
binding.svVideoSurface.videoRotation = 90
binding.ivVideoImage.videoRotation = 90
}
binding.bnRotate180.setOnClickListener {
binding.svVideoSurface.videoRotation = 180
binding.ivVideoImage.videoRotation = 180
}
binding.bnRotate270.setOnClickListener {
binding.svVideoSurface.videoRotation = 270
binding.ivVideoImage.videoRotation = 270
}
binding.bnRotate0.performClick()
binding.bnVideoDecoderHardware.setOnClickListener {
binding.svVideoSurface.videoDecoderType = VideoDecodeThread.DecoderType.HARDWARE
binding.ivVideoImage.videoDecoderType = VideoDecodeThread.DecoderType.HARDWARE
}
binding.bnVideoDecoderSoftware.setOnClickListener {
binding.svVideoSurface.videoDecoderType = VideoDecodeThread.DecoderType.SOFTWARE
binding.ivVideoImage.videoDecoderType = VideoDecodeThread.DecoderType.SOFTWARE
}
binding.bnStartStopSurface.setOnClickListener {
if (binding.svVideoSurface.isStarted()) {
binding.svVideoSurface.stop()
stopStatistics()
} else {
val uri = liveViewModel.rtspRequest.value!!.toUri()
binding.svVideoSurface.apply {
init(
uri,
username = liveViewModel.rtspUsername.value,
password = liveViewModel.rtspPassword.value,
userAgent = "rtsp-client-android"
)
debug = binding.llRtspParams.cbDebug.isChecked
videoFrameRateStabilization = binding.cbVideoFpsStabilization.isChecked
start(
requestVideo = binding.llRtspParams.cbVideo.isChecked,
requestAudio = binding.llRtspParams.cbAudio.isChecked,
requestApplication = binding.llRtspParams.cbApplication.isChecked
)
}
startStatistics()
}
}
binding.bnStartStopImage.setOnClickListener {
if (binding.ivVideoImage.isStarted()) {
binding.ivVideoImage.stop()
stopStatistics()
} else {
val uri = liveViewModel.rtspRequest.value!!.toUri()
binding.ivVideoImage.apply {
init(
uri,
username = liveViewModel.rtspUsername.value,
password = liveViewModel.rtspPassword.value,
userAgent = "rtsp-client-android"
)
debug = binding.llRtspParams.cbDebug.isChecked
onRtspImageBitmapListener = object : RtspImageView.RtspImageBitmapListener {
override fun onRtspImageBitmapObtained(bitmap: Bitmap) {
// TODO: You can send bitmap for processing
}
}
start(
requestVideo = binding.llRtspParams.cbVideo.isChecked,
requestAudio = binding.llRtspParams.cbAudio.isChecked,
requestApplication = binding.llRtspParams.cbApplication.isChecked
)
}
startStatistics()
}
}
binding.bnSnapshotSurface.setOnClickListener {
val bitmap = getSnapshot()
// TODO Save snapshot to DCIM folder
if (bitmap != null) {
Toast.makeText(requireContext(), "Snapshot succeeded ${bitmap.width}x${bitmap.height}", Toast.LENGTH_LONG).show()
} else {
Toast.makeText(requireContext(), "Snapshot failed", Toast.LENGTH_LONG).show()
}
}
return binding.root
}
override fun onResume() {
if (DEBUG) Log.v(TAG, "onResume()")
super.onResume()
liveViewModel.loadParams(requireContext())
}
override fun onPause() {
val started = binding.svVideoSurface.isStarted()
if (DEBUG) Log.v(TAG, "onPause(), started:$started")
super.onPause()
liveViewModel.saveParams(requireContext())
if (started) {
binding.svVideoSurface.stop()
stopStatistics()
}
}
private fun startStatistics() {
if (DEBUG) Log.v(TAG, "startStatistics()")
Log.i(TAG, "Start statistics")
if (statisticsTimer == null) {
val task: TimerTask = object : TimerTask() {
override fun run() {
val statistics = binding.svVideoSurface.statistics
val text =
"Video decoder: ${statistics.videoDecoderType.toString().lowercase()} ${if (statistics.videoDecoderName.isNullOrEmpty()) "" else "(${statistics.videoDecoderName})"}" +
"\nVideo decoder latency: ${statistics.videoDecoderLatencyMsec} ms" +
"\nResolution: ${svVideoSurfaceResolution.first}x${svVideoSurfaceResolution.second}"
// "\nNetwork latency: "
// // Assume that difference between current Android time and camera time cannot be more than 5 sec.
// // Otherwise time need to be synchronized on both devices.
// text += if (statistics.networkLatencyMsec == -1) {
// "-"
// } else if (statistics.networkLatencyMsec < 0 || statistics.networkLatencyMsec > TimeUnit.SECONDS.toMillis(5)) {
// "[time out of sync]"
// } else {
// "${statistics.networkLatencyMsec} ms"
// }
binding.tvStatistics.post {
binding.tvStatistics.text = text
}
}
}
statisticsTimer = Timer("${TAG}::Statistics").apply {
schedule(task, 0, 1000)
}
}
}
private fun stopStatistics() {
if (DEBUG) Log.v(TAG, "stopStatistics()")
statisticsTimer?.apply {
Log.i(TAG, "Stop statistics")
cancel()
}
statisticsTimer = null
}
private fun setKeepScreenOn(enable: Boolean) {
if (DEBUG) Log.v(TAG, "setKeepScreenOn(enable=$enable)")
if (enable) {
activity?.apply {
window.addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON)
Log.i(TAG, "Enabled keep screen on")
}
} else {
activity?.apply {
window.clearFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON)
Log.i(TAG, "Disabled keep screen on")
}
}
}
companion object {
private val TAG: String = LiveFragment::class.java.simpleName
private const val DEBUG = true
}
}
================================================
FILE: app/src/main/java/com/alexvas/rtsp/demo/live/LiveViewModel.kt
================================================
package com.alexvas.rtsp.demo.live
import android.annotation.SuppressLint
import android.content.Context
import android.text.Editable
import android.text.TextWatcher
import android.util.Log
import android.widget.EditText
import androidx.lifecycle.MutableLiveData
import androidx.lifecycle.ViewModel
private const val RTSP_REQUEST_KEY = "rtsp_request"
private const val RTSP_USERNAME_KEY = "rtsp_username"
private const val RTSP_PASSWORD_KEY = "rtsp_password"
private const val DEFAULT_RTSP_REQUEST = "rtsp://10.0.1.3:554/axis-media/media.amp"
private const val DEFAULT_RTSP_USERNAME = ""
private const val DEFAULT_RTSP_PASSWORD = ""
private const val LIVE_PARAMS_FILENAME = "live_params"
@SuppressLint("LogNotTimber")
class LiveViewModel : ViewModel() {
val rtspRequest = MutableLiveData().apply {
value = DEFAULT_RTSP_REQUEST
}
val rtspUsername = MutableLiveData().apply {
value = DEFAULT_RTSP_USERNAME
}
val rtspPassword = MutableLiveData().apply {
value = DEFAULT_RTSP_PASSWORD
}
// private val _text = MutableLiveData().apply {
// value = "This is live Fragment"
// }
// val text: LiveData = _text
// init {
// // Here you could use the ID to get the user info from the DB or remote server
// rtspRequest.value = "rtsp://10.0.1.3:554/axis-media/media.amp"
// }
fun loadParams(context: Context) {
if (DEBUG) Log.v(TAG, "loadParams()")
val pref = context.getSharedPreferences(LIVE_PARAMS_FILENAME, Context.MODE_PRIVATE)
try {
rtspRequest.setValue(pref.getString(RTSP_REQUEST_KEY, DEFAULT_RTSP_REQUEST))
} catch (e: ClassCastException) {
e.printStackTrace()
}
try {
rtspUsername.setValue(pref.getString(RTSP_USERNAME_KEY, DEFAULT_RTSP_USERNAME))
} catch (e: ClassCastException) {
e.printStackTrace()
}
try {
rtspPassword.setValue(pref.getString(RTSP_PASSWORD_KEY, DEFAULT_RTSP_PASSWORD))
} catch (e: ClassCastException) {
e.printStackTrace()
}
}
fun saveParams(context: Context) {
if (DEBUG) Log.v(TAG, "saveParams()")
context.getSharedPreferences(LIVE_PARAMS_FILENAME, Context.MODE_PRIVATE).edit().apply {
putString(RTSP_REQUEST_KEY, rtspRequest.value)
putString(RTSP_USERNAME_KEY, rtspUsername.value)
putString(RTSP_PASSWORD_KEY, rtspPassword.value)
apply()
}
}
fun initEditTexts(etRtspRequest: EditText, etRtspUsername: EditText, etRtspPassword: EditText) {
if (DEBUG) Log.v(TAG, "initEditTexts()")
etRtspRequest.addTextChangedListener(object : TextWatcher {
override fun afterTextChanged(s: Editable?) {
}
override fun beforeTextChanged(s: CharSequence?, start: Int, count: Int, after: Int) {
}
override fun onTextChanged(s: CharSequence?, start: Int, before: Int, count: Int) {
val text = s.toString()
if (text != rtspRequest.value) {
rtspRequest.value = text
}
}
})
etRtspUsername.addTextChangedListener(object : TextWatcher {
override fun afterTextChanged(s: Editable?) {
}
override fun beforeTextChanged(s: CharSequence?, start: Int, count: Int, after: Int) {
}
override fun onTextChanged(s: CharSequence?, start: Int, before: Int, count: Int) {
val text = s.toString()
if (text != rtspUsername.value) {
rtspUsername.value = text
}
}
})
etRtspPassword.addTextChangedListener(object : TextWatcher {
override fun afterTextChanged(s: Editable?) {
}
override fun beforeTextChanged(s: CharSequence?, start: Int, count: Int, after: Int) {
}
override fun onTextChanged(s: CharSequence?, start: Int, before: Int, count: Int) {
val text = s.toString()
if (text != rtspPassword.value) {
rtspPassword.value = text
}
}
})
}
companion object {
private val TAG: String = LiveViewModel::class.java.simpleName
private const val DEBUG = false
}
}
================================================
FILE: app/src/main/java/com/alexvas/rtsp/demo/live/RawFragment.kt
================================================
package com.alexvas.rtsp.demo.live
import android.annotation.SuppressLint
import android.net.Uri
import android.os.Bundle
import android.util.Log
import android.view.LayoutInflater
import android.view.View
import android.view.ViewGroup
import androidx.fragment.app.Fragment
import androidx.lifecycle.ViewModelProvider
import com.alexvas.rtsp.RtspClient
import com.alexvas.rtsp.demo.databinding.FragmentRawBinding
import com.alexvas.rtsp.widget.toHexString
import com.alexvas.utils.NetUtils
import kotlinx.coroutines.Runnable
import java.net.Socket
import java.util.Timer
import java.util.TimerTask
import java.util.concurrent.atomic.AtomicBoolean
import kotlin.math.min
@SuppressLint("LogNotTimber")
class RawFragment : Fragment() {
private lateinit var binding: FragmentRawBinding
private lateinit var liveViewModel: LiveViewModel
private var statisticsTimer: Timer? = null
private val rtspStopped = AtomicBoolean(true)
private var rtspVideoBytesReceived: Long = 0
private var rtspVideoFramesReceived: Long = 0
private var rtspAudioBytesReceived: Long = 0
private var rtspAudioSamplesReceived: Long = 0
private var rtspApplicationBytesReceived: Long = 0
private var rtspApplicationSamplesReceived: Long = 0
private val rtspClientListener = object: RtspClient.RtspClientListener {
override fun onRtspConnecting() {
if (DEBUG) Log.v(TAG, "onRtspConnecting()")
rtspVideoBytesReceived = 0
rtspVideoFramesReceived = 0
rtspAudioBytesReceived = 0
rtspAudioSamplesReceived = 0
rtspApplicationBytesReceived = 0
rtspApplicationSamplesReceived = 0
binding.apply {
root.post {
updateStatistics()
llRtspParams.etRtspRequest.isEnabled = false
llRtspParams.etRtspUsername.isEnabled = false
llRtspParams.etRtspPassword.isEnabled = false
llRtspParams.cbVideo.isEnabled = false
llRtspParams.cbAudio.isEnabled = false
llRtspParams.cbApplication.isEnabled = false
llRtspParams.cbDebug.isEnabled = false
tvStatusSurface.text = "RTSP connecting"
bnStartStop.text = "Stop RTSP"
}
}
}
override fun onRtspConnected(sdpInfo: RtspClient.SdpInfo) {
if (DEBUG) Log.v(TAG, "onRtspConnected()")
binding.apply {
root.post {
tvStatusSurface.text = "RTSP connected"
}
}
startStatistics()
}
override fun onRtspVideoNalUnitReceived(data: ByteArray, offset: Int, length: Int, timestamp: Long) {
val numBytesDump = min(length, 25) // dump max 25 bytes
Log.i(TAG, "RTSP video data ($length bytes): ${data.toHexString(offset, offset + numBytesDump)}")
rtspVideoBytesReceived += length
rtspVideoFramesReceived++
}
override fun onRtspAudioSampleReceived(data: ByteArray, offset: Int, length: Int, timestamp: Long) {
val numBytesDump = min(length, 25) // dump max 25 bytes
Log.i(TAG, "RTSP audio data ($length bytes): ${data.toHexString(offset, offset + numBytesDump)}")
rtspAudioBytesReceived += length
rtspAudioSamplesReceived++
}
override fun onRtspApplicationDataReceived(data: ByteArray, offset: Int, length: Int, timestamp: Long) {
val numBytesDump = min(length, 25) // dump max 25 bytes
Log.i(TAG, "RTSP app data ($length bytes): ${data.toHexString(offset, offset + numBytesDump)}")
rtspApplicationBytesReceived += length
rtspApplicationSamplesReceived++
}
override fun onRtspDisconnecting() {
if (DEBUG) Log.v(TAG, "onRtspDisconnecting()")
binding.apply {
root.post {
tvStatusSurface.text = "RTSP disconnecting"
}
}
stopStatistics()
}
override fun onRtspDisconnected() {
if (DEBUG) Log.v(TAG, "onRtspDisconnected()")
binding.apply {
root.post {
tvStatusSurface.text = "RTSP disconnected"
bnStartStop.text = "Start RTSP"
llRtspParams.cbVideo.isEnabled = true
llRtspParams.cbAudio.isEnabled = true
llRtspParams.cbApplication.isEnabled = true
llRtspParams.cbDebug.isEnabled = true
llRtspParams.etRtspRequest.isEnabled = true
llRtspParams.etRtspUsername.isEnabled = true
llRtspParams.etRtspPassword.isEnabled = true
}
}
}
override fun onRtspFailedUnauthorized() {
if (DEBUG) Log.e(TAG, "onRtspFailedUnauthorized()")
Log.e(TAG, "RTSP failed unauthorized")
if (context == null) return
onRtspDisconnected()
binding.apply {
root.post {
tvStatusSurface.text = "RTSP username or password invalid"
}
}
}
override fun onRtspFailed(message: String?) {
if (DEBUG) Log.e(TAG, "onRtspFailed(message='$message')")
Log.e(TAG, "RTSP failed with message '$message'")
if (context == null) return
onRtspDisconnected()
binding.apply {
root.post {
tvStatusSurface.text = "Error: $message"
}
}
}
}
private val threadRunnable = Runnable {
Log.i(TAG, "Thread started")
var socket: Socket? = null
try {
val uri = Uri.parse(liveViewModel.rtspRequest.value)
val port = if (uri.port == -1) DEFAULT_RTSP_PORT else uri.port
socket = NetUtils.createSocketAndConnect(uri.host!!, port, 5000)
val rtspClient =
RtspClient.Builder(
socket,
uri.toString(),
rtspStopped,
rtspClientListener
)
.requestVideo(binding.llRtspParams.cbVideo.isChecked)
.requestAudio(binding.llRtspParams.cbAudio.isChecked)
.requestApplication(binding.llRtspParams.cbApplication.isChecked)
.withDebug(binding.llRtspParams.cbDebug.isChecked)
.withUserAgent("rtsp-client-android")
.withCredentials(
binding.llRtspParams.etRtspUsername.text.toString(),
binding.llRtspParams.etRtspPassword.text.toString())
.build()
rtspClient.execute()
} catch (e: Exception) {
e.printStackTrace()
binding.root.post { rtspClientListener.onRtspFailed(e.message) }
} finally {
NetUtils.closeSocket(socket)
}
Log.i(TAG, "Thread stopped")
}
override fun onCreateView(inflater: LayoutInflater, container: ViewGroup?, savedInstanceState: Bundle?): View {
if (DEBUG) Log.v(TAG, "onCreateView()")
liveViewModel = ViewModelProvider(this)[LiveViewModel::class.java]
binding = FragmentRawBinding.inflate(inflater, container, false)
liveViewModel.initEditTexts(
binding.llRtspParams.etRtspRequest,
binding.llRtspParams.etRtspUsername,
binding.llRtspParams.etRtspPassword
)
liveViewModel.rtspRequest.observe(viewLifecycleOwner) {
if (binding.llRtspParams.etRtspRequest.text.toString() != it)
binding.llRtspParams.etRtspRequest.setText(it)
}
liveViewModel.rtspUsername.observe(viewLifecycleOwner) {
if (binding.llRtspParams.etRtspUsername.text.toString() != it)
binding.llRtspParams.etRtspUsername.setText(it)
}
liveViewModel.rtspPassword.observe(viewLifecycleOwner) {
if (binding.llRtspParams.etRtspPassword.text.toString() != it)
binding.llRtspParams.etRtspPassword.setText(it)
}
binding.bnStartStop.setOnClickListener {
if (DEBUG) Log.v(TAG, "onClick() rtspStopped=${rtspStopped.get()}")
if (rtspStopped.get()) {
rtspStopped.set(false)
Log.i(TAG, "Thread starting...")
Thread(threadRunnable).apply {
name = "RTSP raw thread"
start()
}
} else {
Log.i(TAG, "Thread stopping...")
rtspStopped.set(true)
}
}
return binding.root
}
override fun onResume() {
if (DEBUG) Log.v(TAG, "onResume()")
super.onResume()
liveViewModel.loadParams(requireContext())
}
override fun onPause() {
if (DEBUG) Log.v(TAG, "onPause()")
super.onPause()
liveViewModel.saveParams(requireContext())
stopStatistics()
rtspStopped.set(true)
}
private fun updateStatistics() {
// if (DEBUG) Log.v(TAG, "updateStatistics()")
binding.apply {
tvStatisticsVideo.text = "Video: $rtspVideoBytesReceived bytes, $rtspVideoFramesReceived frames"
tvStatisticsAudio.text = "Audio: $rtspAudioBytesReceived bytes, $rtspAudioSamplesReceived samples"
tvStatisticsApplication.text = "Application: $rtspApplicationBytesReceived bytes, $rtspApplicationSamplesReceived samples"
}
}
private fun startStatistics() {
if (DEBUG) Log.v(TAG, "startStatistics()")
Log.i(TAG, "Start statistics")
if (statisticsTimer == null) {
val task: TimerTask = object : TimerTask() {
override fun run() {
binding.root.post {
updateStatistics()
}
}
}
statisticsTimer = Timer("${TAG}::Statistics").apply {
schedule(task, 0, 1000)
}
}
}
private fun stopStatistics() {
if (DEBUG) Log.v(TAG, "stopStatistics()")
statisticsTimer?.apply {
Log.i(TAG, "Stop statistics")
cancel()
}
statisticsTimer = null
}
companion object {
private val TAG: String = RawFragment::class.java.simpleName
private const val DEBUG = true
private const val DEFAULT_RTSP_PORT = 554
}
}
================================================
FILE: app/src/main/res/drawable/ic_camera_black_24dp.xml
================================================
================================================
FILE: app/src/main/res/drawable/ic_cctv_black_24dp.xml
================================================
================================================
FILE: app/src/main/res/drawable/ic_launcher_background.xml
================================================
================================================
FILE: app/src/main/res/drawable/ic_launcher_foreground.xml
================================================
================================================
FILE: app/src/main/res/drawable/ic_text_subject_black_24dp.xml
================================================
================================================
FILE: app/src/main/res/layout/activity_main.xml
================================================
================================================
FILE: app/src/main/res/layout/fragment_live.xml
================================================
================================================
FILE: app/src/main/res/layout/fragment_logs.xml
================================================
================================================
FILE: app/src/main/res/layout/fragment_raw.xml
================================================
================================================
FILE: app/src/main/res/layout/layout_rtsp_params.xml
================================================
================================================
FILE: app/src/main/res/menu/bottom_nav_menu.xml
================================================
================================================
FILE: app/src/main/res/mipmap-anydpi-v26/ic_launcher.xml
================================================
================================================
FILE: app/src/main/res/mipmap-anydpi-v26/ic_launcher_round.xml
================================================
================================================
FILE: app/src/main/res/navigation/mobile_navigation.xml
================================================
================================================
FILE: app/src/main/res/values/colors.xml
================================================
#40747A
#00BCD4
#03DAC5
================================================
FILE: app/src/main/res/values/dimens.xml
================================================
16dp
16dp
================================================
FILE: app/src/main/res/values/strings.xml
================================================
Rtsp demo
Live
Raw
Logs
================================================
FILE: app/src/main/res/values/styles.xml
================================================
================================================
FILE: build.gradle
================================================
buildscript {
ext.kotlin_version = '2.2.21'
ext.compile_sdk_version = 36
ext.min_sdk_version = 24
ext.target_sdk_version = 35
ext.project_version_code = 564
ext.project_version_name = '5.6.4'
repositories {
google()
mavenCentral()
}
dependencies {
classpath 'com.android.tools.build:gradle:8.13.2'
classpath "org.jetbrains.kotlin:kotlin-gradle-plugin:$kotlin_version"
}
}
allprojects {
repositories {
google()
mavenCentral()
maven { url 'https://jitpack.io' }
}
}
tasks.register('clean', Delete) {
delete rootProject.layout.buildDirectory
}
================================================
FILE: gradle/wrapper/gradle-wrapper.properties
================================================
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-8.14.3-bin.zip
networkTimeout=10000
validateDistributionUrl=true
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists
================================================
FILE: gradle.properties
================================================
org.gradle.jvmargs=-Xmx1g
android.useAndroidX=true
================================================
FILE: gradlew
================================================
#!/bin/sh
#
# Copyright © 2015-2021 the original authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# SPDX-License-Identifier: Apache-2.0
#
##############################################################################
#
# Gradle start up script for POSIX generated by Gradle.
#
# Important for running:
#
# (1) You need a POSIX-compliant shell to run this script. If your /bin/sh is
# noncompliant, but you have some other compliant shell such as ksh or
# bash, then to run this script, type that shell name before the whole
# command line, like:
#
# ksh Gradle
#
# Busybox and similar reduced shells will NOT work, because this script
# requires all of these POSIX shell features:
# * functions;
# * expansions «$var», «${var}», «${var:-default}», «${var+SET}»,
# «${var#prefix}», «${var%suffix}», and «$( cmd )»;
# * compound commands having a testable exit status, especially «case»;
# * various built-in commands including «command», «set», and «ulimit».
#
# Important for patching:
#
# (2) This script targets any POSIX shell, so it avoids extensions provided
# by Bash, Ksh, etc; in particular arrays are avoided.
#
# The "traditional" practice of packing multiple parameters into a
# space-separated string is a well documented source of bugs and security
# problems, so this is (mostly) avoided, by progressively accumulating
# options in "$@", and eventually passing that to Java.
#
# Where the inherited environment variables (DEFAULT_JVM_OPTS, JAVA_OPTS,
# and GRADLE_OPTS) rely on word-splitting, this is performed explicitly;
# see the in-line comments for details.
#
# There are tweaks for specific operating systems such as AIX, CygWin,
# Darwin, MinGW, and NonStop.
#
# (3) This script is generated from the Groovy template
# https://github.com/gradle/gradle/blob/HEAD/platforms/jvm/plugins-application/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt
# within the Gradle project.
#
# You can find Gradle at https://github.com/gradle/gradle/.
#
##############################################################################
# Attempt to set APP_HOME
# Resolve links: $0 may be a link
app_path=$0
# Need this for daisy-chained symlinks.
while
APP_HOME=${app_path%"${app_path##*/}"} # leaves a trailing /; empty if no leading path
[ -h "$app_path" ]
do
ls=$( ls -ld "$app_path" )
link=${ls#*' -> '}
case $link in #(
/*) app_path=$link ;; #(
*) app_path=$APP_HOME$link ;;
esac
done
# This is normally unused
# shellcheck disable=SC2034
APP_BASE_NAME=${0##*/}
# Discard cd standard output in case $CDPATH is set (https://github.com/gradle/gradle/issues/25036)
APP_HOME=$( cd -P "${APP_HOME:-./}" > /dev/null && printf '%s\n' "$PWD" ) || exit
# Use the maximum available, or set MAX_FD != -1 to use that value.
MAX_FD=maximum
warn () {
echo "$*"
} >&2
die () {
echo
echo "$*"
echo
exit 1
} >&2
# OS specific support (must be 'true' or 'false').
cygwin=false
msys=false
darwin=false
nonstop=false
case "$( uname )" in #(
CYGWIN* ) cygwin=true ;; #(
Darwin* ) darwin=true ;; #(
MSYS* | MINGW* ) msys=true ;; #(
NONSTOP* ) nonstop=true ;;
esac
CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
# Determine the Java command to use to start the JVM.
if [ -n "$JAVA_HOME" ] ; then
if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
# IBM's JDK on AIX uses strange locations for the executables
JAVACMD=$JAVA_HOME/jre/sh/java
else
JAVACMD=$JAVA_HOME/bin/java
fi
if [ ! -x "$JAVACMD" ] ; then
die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
else
JAVACMD=java
if ! command -v java >/dev/null 2>&1
then
die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
fi
# Increase the maximum file descriptors if we can.
if ! "$cygwin" && ! "$darwin" && ! "$nonstop" ; then
case $MAX_FD in #(
max*)
# In POSIX sh, ulimit -H is undefined. That's why the result is checked to see if it worked.
# shellcheck disable=SC2039,SC3045
MAX_FD=$( ulimit -H -n ) ||
warn "Could not query maximum file descriptor limit"
esac
case $MAX_FD in #(
'' | soft) :;; #(
*)
# In POSIX sh, ulimit -n is undefined. That's why the result is checked to see if it worked.
# shellcheck disable=SC2039,SC3045
ulimit -n "$MAX_FD" ||
warn "Could not set maximum file descriptor limit to $MAX_FD"
esac
fi
# Collect all arguments for the java command, stacking in reverse order:
# * args from the command line
# * the main class name
# * -classpath
# * -D...appname settings
# * --module-path (only if needed)
# * DEFAULT_JVM_OPTS, JAVA_OPTS, and GRADLE_OPTS environment variables.
# For Cygwin or MSYS, switch paths to Windows format before running java
if "$cygwin" || "$msys" ; then
APP_HOME=$( cygpath --path --mixed "$APP_HOME" )
CLASSPATH=$( cygpath --path --mixed "$CLASSPATH" )
JAVACMD=$( cygpath --unix "$JAVACMD" )
# Now convert the arguments - kludge to limit ourselves to /bin/sh
for arg do
if
case $arg in #(
-*) false ;; # don't mess with options #(
/?*) t=${arg#/} t=/${t%%/*} # looks like a POSIX filepath
[ -e "$t" ] ;; #(
*) false ;;
esac
then
arg=$( cygpath --path --ignore --mixed "$arg" )
fi
# Roll the args list around exactly as many times as the number of
# args, so each arg winds up back in the position where it started, but
# possibly modified.
#
# NB: a `for` loop captures its iteration list before it begins, so
# changing the positional parameters here affects neither the number of
# iterations, nor the values presented in `arg`.
shift # remove old arg
set -- "$@" "$arg" # push replacement arg
done
fi
# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"'
# Collect all arguments for the java command:
# * DEFAULT_JVM_OPTS, JAVA_OPTS, and optsEnvironmentVar are not allowed to contain shell fragments,
# and any embedded shellness will be escaped.
# * For example: A user cannot expect ${Hostname} to be expanded, as it is an environment variable and will be
# treated as '${Hostname}' itself on the command line.
set -- \
"-Dorg.gradle.appname=$APP_BASE_NAME" \
-classpath "$CLASSPATH" \
org.gradle.wrapper.GradleWrapperMain \
"$@"
# Stop when "xargs" is not available.
if ! command -v xargs >/dev/null 2>&1
then
die "xargs is not available"
fi
# Use "xargs" to parse quoted args.
#
# With -n1 it outputs one arg per line, with the quotes and backslashes removed.
#
# In Bash we could simply go:
#
# readarray ARGS < <( xargs -n1 <<<"$var" ) &&
# set -- "${ARGS[@]}" "$@"
#
# but POSIX shell has neither arrays nor command substitution, so instead we
# post-process each arg (as a line of input to sed) to backslash-escape any
# character that might be a shell metacharacter, then use eval to reverse
# that process (while maintaining the separation between arguments), and wrap
# the whole thing up as a single "set" statement.
#
# This will of course break if any of these variables contains a newline or
# an unmatched quote.
#
eval "set -- $(
printf '%s\n' "$DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS" |
xargs -n1 |
sed ' s~[^-[:alnum:]+,./:=@_]~\\&~g; ' |
tr '\n' ' '
)" '"$@"'
exec "$JAVACMD" "$@"
================================================
FILE: gradlew.bat
================================================
@rem
@rem Copyright 2015 the original author or authors.
@rem
@rem Licensed under the Apache License, Version 2.0 (the "License");
@rem you may not use this file except in compliance with the License.
@rem You may obtain a copy of the License at
@rem
@rem https://www.apache.org/licenses/LICENSE-2.0
@rem
@rem Unless required by applicable law or agreed to in writing, software
@rem distributed under the License is distributed on an "AS IS" BASIS,
@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@rem See the License for the specific language governing permissions and
@rem limitations under the License.
@rem
@rem SPDX-License-Identifier: Apache-2.0
@rem
@if "%DEBUG%"=="" @echo off
@rem ##########################################################################
@rem
@rem Gradle startup script for Windows
@rem
@rem ##########################################################################
@rem Set local scope for the variables with windows NT shell
if "%OS%"=="Windows_NT" setlocal
set DIRNAME=%~dp0
if "%DIRNAME%"=="" set DIRNAME=.
@rem This is normally unused
set APP_BASE_NAME=%~n0
set APP_HOME=%DIRNAME%
@rem Resolve any "." and ".." in APP_HOME to make it shorter.
for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi
@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m"
@rem Find java.exe
if defined JAVA_HOME goto findJavaFromJavaHome
set JAVA_EXE=java.exe
%JAVA_EXE% -version >NUL 2>&1
if %ERRORLEVEL% equ 0 goto execute
echo. 1>&2
echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. 1>&2
echo. 1>&2
echo Please set the JAVA_HOME variable in your environment to match the 1>&2
echo location of your Java installation. 1>&2
goto fail
:findJavaFromJavaHome
set JAVA_HOME=%JAVA_HOME:"=%
set JAVA_EXE=%JAVA_HOME%/bin/java.exe
if exist "%JAVA_EXE%" goto execute
echo. 1>&2
echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% 1>&2
echo. 1>&2
echo Please set the JAVA_HOME variable in your environment to match the 1>&2
echo location of your Java installation. 1>&2
goto fail
:execute
@rem Setup the command line
set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
@rem Execute Gradle
"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %*
:end
@rem End local scope for the variables with windows NT shell
if %ERRORLEVEL% equ 0 goto mainEnd
:fail
rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
rem the _cmd.exe /c_ return code!
set EXIT_CODE=%ERRORLEVEL%
if %EXIT_CODE% equ 0 set EXIT_CODE=1
if not ""=="%GRADLE_EXIT_CONSOLE%" exit %EXIT_CODE%
exit /b %EXIT_CODE%
:mainEnd
if "%OS%"=="Windows_NT" endlocal
:omega
================================================
FILE: jitpack.yml
================================================
jdk:
- openjdk17
install:
- ./gradlew build :library-client-rtsp:publishToMavenLocal
================================================
FILE: library-client-rtsp/.gitignore
================================================
# Created by https://www.gitignore.io/api/android,java,intellij
### Android ###
# Built application files
*.apk
*.ap_
# Files for the Dalvik VM
*.dex
# Java class files
*.class
# Generated files
bin/
gen/
# Gradle files
.gradle/
build/
# Local configuration file (sdk path, etc)
local.properties
# Proguard folder generated by Eclipse
proguard/
xactmobile/class_files.txt
xactmobile/mapping.txt
xactmobile/seeds.txt
# Log Files
*.log
# Android Studio Navigation editor temp files
.navigation/
### Android Patch ###
gen-external-apklibs
### Java ###
*.class
# Mobile Tools for Java (J2ME)
.mtj.tmp/
# Package Files #
#*.jar
*.war
*.ear
# virtual machine crash logs, see http://www.java.com/en/download/help/error_hotspot.xml
hs_err_pid*
### Intellij ###
# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio
*.iml
## Directory-based project format:
.idea/
# if you remove the above rule, at least ignore the following:
# User-specific stuff:
.idea/workspace.xml
.idea/tasks.xml
.idea/dictionaries
# Sensitive or high-churn files:
.idea/dataSources.ids
.idea/dataSources.xml
.idea/sqlDataSources.xml
.idea/dynamic.xml
.idea/uiDesigner.xml
# Gradle:
.idea/gradle.xml
.idea/libraries
# Mongo Explorer plugin:
.idea/mongoSettings.xml
## File-based project format:
*.ipr
*.iws
## Plugin-specific files:
# IntelliJ
/out/
# mpeltonen/sbt-idea plugin
.idea_modules/
# JIRA plugin
atlassian-ide-plugin.xml
# Crashlytics plugin (for Android Studio and IntelliJ)
com_crashlytics_export_strings.xml
crashlytics.properties
crashlytics-build.properties
xactmobile/.DS_Store~64be78fe3602626c61b52bcbfd09e09a6107b50a
xactmobile/.DS_Store~HEAD
oslab-viewpager/._.DS_Store
oslab-viewpager/src/main/.DS_Store
oslab-viewpager/src/main/._.DS_Store
oslab-viewpager/src/main/res/.DS_Store
oslab-viewpager/src/main/res/._.DS_Store
oslab-viewpager/.gitignore
oslab-materialdesign/.DS_Store
oslab-materialdesign/._.DS_Store
oslab-materialdesign/src/.DS_Store
oslab-materialdesign/src/._.DS_Store
oslab-materialdesign/src/main/.DS_Store
oslab-materialdesign/src/main/._.DS_Store
oslab-materialdesign/src/main/res/.DS_Store
oslab-materialdesign/src/main/res/._.DS_Store
================================================
FILE: library-client-rtsp/build.gradle
================================================
plugins {
id 'com.android.library'
id 'kotlin-android'
id 'maven-publish'
}
apply plugin: 'com.android.library'
project.afterEvaluate {
publishing {
publications {
release(MavenPublication) {
from components.release
}
}
}
}
android {
compileSdkVersion compile_sdk_version
defaultConfig {
minSdk min_sdk_version
targetSdk target_sdk_version
}
compileOptions {
sourceCompatibility JavaVersion.VERSION_17
targetCompatibility JavaVersion.VERSION_17
}
kotlinOptions {
jvmTarget = JavaVersion.VERSION_17.toString()
}
namespace 'com.alexvas.rtsp'
}
dependencies {
implementation 'androidx.annotation:annotation:1.9.1'
implementation 'androidx.media3:media3-exoplayer:1.9.3'
implementation 'androidx.camera:camera-core:1.5.3' // YUV -> BMP conversion
implementation 'org.jcodec:jcodec:0.2.5' // SPS frame modification
}
================================================
FILE: library-client-rtsp/proguard-rules.txt
================================================
# Proguard rules.
================================================
FILE: library-client-rtsp/src/main/AndroidManifest.xml
================================================
================================================
FILE: library-client-rtsp/src/main/java/com/alexvas/rtsp/RtspClient.java
================================================
package com.alexvas.rtsp;
import android.text.TextUtils;
import android.util.Base64;
import android.util.Log;
import android.util.Pair;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import com.alexvas.rtsp.parser.AacParser;
import com.alexvas.rtsp.parser.G711Parser;
import com.alexvas.rtsp.parser.AudioParser;
import com.alexvas.rtsp.parser.RtpH264Parser;
import com.alexvas.rtsp.parser.RtpH265Parser;
import com.alexvas.rtsp.parser.RtpHeaderParser;
import com.alexvas.rtsp.parser.RtpParser;
import com.alexvas.utils.NetUtils;
import com.alexvas.utils.VideoCodecUtils;
import java.io.BufferedOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.Serial;
import java.math.BigInteger;
import java.net.Socket;
import java.nio.charset.StandardCharsets;
import java.security.MessageDigest;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
//OPTIONS rtsp://10.0.1.145:88/videoSub RTSP/1.0
//CSeq: 1
//User-Agent: Lavf58.29.100
//
//RTSP/1.0 200 OK
//CSeq: 1
//Date: Fri, Jan 03 2020 22:03:07 GMT
//Public: OPTIONS, DESCRIBE, SETUP, TEARDOWN, PLAY, PAUSE, GET_PARAMETER, SET_PARAMETER
//DESCRIBE rtsp://10.0.1.145:88/videoSub RTSP/1.0
//Accept: application/sdp
//CSeq: 2
//User-Agent: Lavf58.29.100
//
//RTSP/1.0 401 Unauthorized
//CSeq: 2
//Date: Fri, Jan 03 2020 22:03:07 GMT
//WWW-Authenticate: Digest realm="Foscam IPCam Living Video", nonce="3c889dbf8371d3660aa2496789a5d130"
//DESCRIBE rtsp://10.0.1.145:88/videoSub RTSP/1.0
//Accept: application/sdp
//CSeq: 3
//User-Agent: Lavf58.29.100
//Authorization: Digest username="admin", realm="Foscam IPCam Living Video", nonce="3c889dbf8371d3660aa2496789a5d130", uri="rtsp://10.0.1.145:88/videoSub", response="4f062baec1c813ae3db15e3a14111d3d"
//
//RTSP/1.0 200 OK
//CSeq: 3
//Date: Fri, Jan 03 2020 22:03:07 GMT
//Content-Base: rtsp://10.0.1.145:65534/videoSub/
//Content-Type: application/sdp
//Content-Length: 495
//
//v=0
//o=- 1578088972261172 1 IN IP4 10.0.1.145
//s=IP Camera Video
//i=videoSub
//t=0 0
//a=tool:LIVE555 Streaming Media v2014.02.10
//a=type:broadcast
//a=control:*
//a=range:npt=0-
//a=x-qt-text-nam:IP Camera Video
//a=x-qt-text-inf:videoSub
//m=video 0 RTP/AVP 96
//c=IN IP4 0.0.0.0
//b=AS:96
//a=rtpmap:96 H264/90000
//a=fmtp:96 packetization-mode=1;profile-level-id=420020;sprop-parameter-sets=Z0IAIJWoFAHmQA==,aM48gA==
//a=control:track1
//m=audio 0 RTP/AVP 0
//c=IN IP4 0.0.0.0
//b=AS:64
//a=control:track2
//SETUP rtsp://10.0.1.145:65534/videoSub/track1 RTSP/1.0
//Transport: RTP/AVP/UDP;unicast;client_port=27452-27453
//CSeq: 4
//User-Agent: Lavf58.29.100
//Authorization: Digest username="admin", realm="Foscam IPCam Living Video", nonce="3c889dbf8371d3660aa2496789a5d130", uri="rtsp://10.0.1.145:65534/videoSub/track1", response="1fbc50b24d582c9331dd5e89f3102a06"
//
//RTSP/1.0 200 OK
//CSeq: 4
//Date: Fri, Jan 03 2020 22:03:07 GMT
//Transport: RTP/AVP;unicast;destination=10.0.1.53;source=10.0.1.145;client_port=27452-27453;server_port=6972-6973
//Session: 1F91B1B6;timeout=65
//SETUP rtsp://10.0.1.145:65534/videoSub/track2 RTSP/1.0
//Transport: RTP/AVP/UDP;unicast;client_port=27454-27455
//CSeq: 5
//User-Agent: Lavf58.29.100
//Session: 1F91B1B6
//Authorization: Digest username="admin", realm="Foscam IPCam Living Video", nonce="3c889dbf8371d3660aa2496789a5d130", uri="rtsp://10.0.1.145:65534/videoSub/track2", response="ad779abe070c096eff1012e7c70c986a"
//
//RTSP/1.0 200 OK
//CSeq: 5
//Date: Fri, Jan 03 2020 22:03:07 GMT
//Transport: RTP/AVP;unicast;destination=10.0.1.53;source=10.0.1.145;client_port=27454-27455;server_port=6974-6975
//Session: 1F91B1B6;timeout=65
//PLAY rtsp://10.0.1.145:65534/videoSub/ RTSP/1.0
//Range: npt=0.000-
//CSeq: 6
//User-Agent: Lavf58.29.100
//Session: 1F91B1B6
//Authorization: Digest username="admin", realm="Foscam IPCam Living Video", nonce="3c889dbf8371d3660aa2496789a5d130", uri="rtsp://10.0.1.145:65534/videoSub/", response="bb52eb6938dd4e50c4fac50363ffded0"
//
//RTSP/1.0 200 OK
//CSeq: 6
//Date: Fri, Jan 03 2020 22:03:07 GMT
//Range: npt=0.000-
//Session: 1F91B1B6
//RTP-Info: url=rtsp://10.0.1.145:65534/videoSub/track1;seq=42731;rtptime=2690581590,url=rtsp://10.0.1.145:65534/videoSub/track2;seq=34051;rtptime=3328043318
// https://www.ietf.org/rfc/rfc2326.txt
public class RtspClient {
private static final String TAG = RtspClient.class.getSimpleName();
static final String TAG_DEBUG = TAG + " DBG";
private static final boolean DEBUG = false;
private static final byte[] EMPTY_ARRAY = new byte[0];
public final static int RTSP_CAPABILITY_NONE = 0;
public final static int RTSP_CAPABILITY_OPTIONS = 1 << 1;
public final static int RTSP_CAPABILITY_DESCRIBE = 1 << 2;
public final static int RTSP_CAPABILITY_ANNOUNCE = 1 << 3;
public final static int RTSP_CAPABILITY_SETUP = 1 << 4;
public final static int RTSP_CAPABILITY_PLAY = 1 << 5;
public final static int RTSP_CAPABILITY_RECORD = 1 << 6;
public final static int RTSP_CAPABILITY_PAUSE = 1 << 7;
public final static int RTSP_CAPABILITY_TEARDOWN = 1 << 8;
public final static int RTSP_CAPABILITY_SET_PARAMETER = 1 << 9;
public final static int RTSP_CAPABILITY_GET_PARAMETER = 1 << 10;
public final static int RTSP_CAPABILITY_REDIRECT = 1 << 11;
public static boolean hasCapability(int capability, int capabilitiesMask) {
return (capabilitiesMask & capability) != 0;
}
public interface RtspClientListener {
void onRtspConnecting();
void onRtspConnected(@NonNull SdpInfo sdpInfo);
void onRtspVideoNalUnitReceived(@NonNull byte[] data, int offset, int length, long timestamp);
void onRtspAudioSampleReceived(@NonNull byte[] data, int offset, int length, long timestamp);
void onRtspApplicationDataReceived(@NonNull byte[] data, int offset, int length, long timestamp);
void onRtspDisconnecting();
void onRtspDisconnected();
void onRtspFailedUnauthorized();
void onRtspFailed(@Nullable String message);
}
private interface RtspClientKeepAliveListener {
void onRtspKeepAliveRequested();
}
public static class SdpInfo {
/**
* Session name (RFC 2327). In most cases RTSP server name.
*/
public @Nullable String sessionName;
/**
* Session description (RFC 2327).
*/
public @Nullable String sessionDescription;
public @Nullable VideoTrack videoTrack;
public @Nullable AudioTrack audioTrack;
public @Nullable ApplicationTrack applicationTrack;
}
public abstract static class Track {
public String request;
public int payloadType;
@NonNull
@Override
public String toString() {
return "Track{request='" + request + "', payloadType=" + payloadType + '}';
}
}
public static final int VIDEO_CODEC_H264 = 0;
public static final int VIDEO_CODEC_H265 = 1;
public static class VideoTrack extends Track {
public int videoCodec = VIDEO_CODEC_H264;
public @Nullable byte[] sps; // Both H.264 and H.265
public @Nullable byte[] pps; // Both H.264 and H.265
public @Nullable byte[] vps; // H.265 only
}
public static final int AUDIO_CODEC_UNKNOWN = -1;
public static final int AUDIO_CODEC_AAC = 0;
public static final int AUDIO_CODEC_OPUS = 1;
public static final int AUDIO_CODEC_G711_ULAW = 2;
public static final int AUDIO_CODEC_G711_ALAW = 3;
@NonNull
private static String getAudioCodecName(int codec) {
return switch (codec) {
case AUDIO_CODEC_AAC -> "AAC";
case AUDIO_CODEC_OPUS -> "Opus";
case AUDIO_CODEC_G711_ULAW -> "G.711 uLaw";
case AUDIO_CODEC_G711_ALAW -> "G.711 aLaw";
default -> "Unknown";
};
}
public static class AudioTrack extends Track {
public int audioCodec = AUDIO_CODEC_UNKNOWN;
public int sampleRateHz; // 16000, 8000
public int channels; // 1 - mono, 2 - stereo
public String mode; // AAC-lbr, AAC-hbr
public @Nullable byte[] config; // config=1210fff15081ffdffc
}
public static class ApplicationTrack extends Track {
}
private static final String CRLF = "\r\n";
// Size of buffer for reading from the connection
private final static int MAX_LINE_SIZE = 4098;
private static class UnauthorizedException extends IOException {
UnauthorizedException() {
super("Unauthorized");
}
}
private final static class NoResponseHeadersException extends IOException {
@Serial
private static final long serialVersionUID = 1L;
}
private final @NonNull Socket rtspSocket;
private @NonNull String uriRtsp;
private final @NonNull AtomicBoolean exitFlag;
private final @NonNull RtspClientListener listener;
// private boolean sendOptionsCommand;
private final boolean requestVideo;
private final boolean requestAudio;
private final boolean requestApplication;
private final boolean debug;
private final @Nullable String username;
private final @Nullable String password;
private final @Nullable String userAgent;
private RtspClient(@NonNull RtspClient.Builder builder) {
rtspSocket = builder.rtspSocket;
uriRtsp = builder.uriRtsp;
exitFlag = builder.exitFlag;
listener = builder.listener;
// sendOptionsCommand = builder.sendOptionsCommand;
requestVideo = builder.requestVideo;
requestAudio = builder.requestAudio;
requestApplication = builder.requestApplication;
username = builder.username;
password = builder.password;
debug = builder.debug;
userAgent = builder.userAgent;
}
public void execute() {
if (DEBUG) Log.v(TAG, "execute()");
listener.onRtspConnecting();
try {
final InputStream inputStream = rtspSocket.getInputStream();
final OutputStream outputStream = debug ?
new LoggerOutputStream(rtspSocket.getOutputStream()) :
new BufferedOutputStream(rtspSocket.getOutputStream());
SdpInfo sdpInfo = new SdpInfo();
final AtomicInteger cSeq = new AtomicInteger(0);
ArrayList> headers;
int status;
String authToken = null;
Pair digestRealmNonce = null;
// OPTIONS rtsp://10.0.1.78:8080/video/h264 RTSP/1.0
// CSeq: 1
// User-Agent: Lavf58.29.100
// RTSP/1.0 200 OK
// CSeq: 1
// Public: OPTIONS, DESCRIBE, SETUP, PLAY, GET_PARAMETER, SET_PARAMETER, TEARDOWN
// if (sendOptionsCommand) {
checkExitFlag(exitFlag);
sendOptionsCommand(outputStream, uriRtsp, cSeq.addAndGet(1), userAgent, null);
status = readResponseStatusCode(inputStream);
headers = readResponseHeaders(inputStream);
dumpHeaders(headers);
// Try once again with credentials
if (status == 401) {
digestRealmNonce = getHeaderWwwAuthenticateDigestRealmAndNonce(headers);
if (digestRealmNonce == null) {
String basicRealm = getHeaderWwwAuthenticateBasicRealm(headers);
if (TextUtils.isEmpty(basicRealm)) {
throw new IOException("Unknown authentication type");
}
// Basic auth
authToken = getBasicAuthHeader(username, password);
} else {
// Digest auth
authToken = getDigestAuthHeader(username, password, "OPTIONS", uriRtsp, digestRealmNonce.first, digestRealmNonce.second);
}
checkExitFlag(exitFlag);
sendOptionsCommand(outputStream, uriRtsp, cSeq.addAndGet(1), userAgent, authToken);
status = readResponseStatusCode(inputStream);
headers = readResponseHeaders(inputStream);
dumpHeaders(headers);
}
if (DEBUG)
Log.i(TAG, "OPTIONS status: " + status);
checkStatusCode(status);
final int capabilities = getSupportedCapabilities(headers);
// DESCRIBE rtsp://10.0.1.78:8080/video/h264 RTSP/1.0
// Accept: application/sdp
// CSeq: 2
// User-Agent: Lavf58.29.100
// RTSP/1.0 200 OK
// CSeq: 2
// Content-Type: application/sdp
// Content-Length: 364
//
// v=0
// t=0 0
// a=range:npt=now-
// m=video 0 RTP/AVP 96
// a=rtpmap:96 H264/90000
// a=fmtp:96 packetization-mode=1;sprop-parameter-sets=Z0KAH9oBABhpSCgwMDaFCag=,aM4G4g==
// a=control:trackID=1
// m=audio 0 RTP/AVP 96
// a=rtpmap:96 mpeg4-generic/48000/1
// a=fmtp:96 profile-level-id=1;mode=AAC-hbr;sizelength=13;indexlength=3;indexdeltalength=3;config=1188
// a=control:trackID=2
checkExitFlag(exitFlag);
if (digestRealmNonce != null) {
authToken = getDigestAuthHeader(username, password, "DESCRIBE", uriRtsp, digestRealmNonce.first, digestRealmNonce.second);
}
sendDescribeCommand(outputStream, uriRtsp, cSeq.addAndGet(1), userAgent, authToken);
status = readResponseStatusCode(inputStream);
headers = readResponseHeaders(inputStream);
dumpHeaders(headers);
// Try once again with credentials. OPTIONS command can be accepted without authentication.
if (status == 401) {
digestRealmNonce = getHeaderWwwAuthenticateDigestRealmAndNonce(headers);
if (digestRealmNonce == null) {
String basicRealm = getHeaderWwwAuthenticateBasicRealm(headers);
if (TextUtils.isEmpty(basicRealm)) {
throw new IOException("Unknown authentication type");
}
// Basic auth
authToken = getBasicAuthHeader(username, password);
} else {
// Digest auth
authToken = getDigestAuthHeader(username, password, "DESCRIBE", uriRtsp, digestRealmNonce.first, digestRealmNonce.second);
}
checkExitFlag(exitFlag);
sendDescribeCommand(outputStream, uriRtsp, cSeq.addAndGet(1), userAgent, authToken);
status = readResponseStatusCode(inputStream);
headers = readResponseHeaders(inputStream);
dumpHeaders(headers);
}
if (DEBUG)
Log.i(TAG, "DESCRIBE status: " + status);
checkStatusCode(status);
String contentBaseUri = getHeaderContentBase(headers);
if (contentBaseUri != null) {
if (debug)
Log.i(TAG_DEBUG, "RTSP URI changed to '" + uriRtsp + "'");
uriRtsp = contentBaseUri;
}
int contentLength = getHeaderContentLength(headers);
if (contentLength > 0) {
String content = readContentAsText(inputStream, contentLength);
if (debug)
Log.i(TAG_DEBUG, "" + content);
try {
List> params = getDescribeParams(content);
sdpInfo = getSdpInfoFromDescribeParams(params);
if (!requestVideo)
sdpInfo.videoTrack = null;
if (!requestAudio)
sdpInfo.audioTrack = null;
if (!requestApplication)
sdpInfo.applicationTrack = null;
// Only AAC supported
if (sdpInfo.audioTrack != null && sdpInfo.audioTrack.audioCodec == AUDIO_CODEC_UNKNOWN) {
Log.e(TAG_DEBUG, "Unknown RTSP audio codec (" + sdpInfo.audioTrack.audioCodec + ") specified in SDP");
sdpInfo.audioTrack = null;
}
} catch (Exception e) {
e.printStackTrace();
}
}
// SETUP rtsp://10.0.1.78:8080/video/h264/trackID=1 RTSP/1.0
// Transport: RTP/AVP/TCP;unicast;interleaved=0-1
// CSeq: 3
// User-Agent: Lavf58.29.100
// RTSP/1.0 200 OK
// CSeq: 3
// Transport: RTP/AVP/TCP;unicast;interleaved=0-1
// Session: Mzk5MzY2MzUwMTg3NTc2Mzc5NQ;timeout=30
String session = null;
int sessionTimeout = 0;
for (int i = 0; i < 3; i++) {
// 0 - video track, 1 - audio track, 2 - application track
checkExitFlag(exitFlag);
Track track;
switch (i) {
case 0 -> track = requestVideo ? sdpInfo.videoTrack : null;
case 1 -> track = requestAudio ? sdpInfo.audioTrack : null;
default -> track = requestApplication ? sdpInfo.applicationTrack : null;
}
if (track != null) {
String uriRtspSetup = getUriForSetup(uriRtsp, track);
if (uriRtspSetup == null) {
Log.e(TAG, "Failed to get RTSP URI for SETUP");
continue;
}
if (digestRealmNonce != null)
authToken = getDigestAuthHeader(
username,
password,
"SETUP",
uriRtspSetup,
digestRealmNonce.first,
digestRealmNonce.second);
sendSetupCommand(
outputStream,
uriRtspSetup,
cSeq.addAndGet(1),
userAgent,
authToken,
session,
(i == 0 ? "0-1" /*video*/ : "2-3" /*audio*/));
status = readResponseStatusCode(inputStream);
if (DEBUG)
Log.i(TAG, "SETUP status: " + status);
checkStatusCode(status);
headers = readResponseHeaders(inputStream);
dumpHeaders(headers);
session = getHeader(headers, "Session");
if (!TextUtils.isEmpty(session)) {
// ODgyODg3MjQ1MDczODk3NDk4Nw;timeout=30
String[] params = TextUtils.split(session, ";");
session = params[0];
// Getting session timeout
if (params.length > 1) {
params = TextUtils.split(params[1], "=");
if (params.length > 1) {
try {
sessionTimeout = Integer.parseInt(params[1]);
} catch (NumberFormatException e) {
Log.e(TAG, "Failed to parse RTSP session timeout");
}
}
}
}
if (DEBUG)
Log.d(TAG, "SETUP session: " + session + ", timeout: " + sessionTimeout);
if (TextUtils.isEmpty(session))
throw new IOException("Failed to get RTSP session");
}
}
if (TextUtils.isEmpty(session))
throw new IOException("Failed to get any media track");
// PLAY rtsp://10.0.1.78:8080/video/h264 RTSP/1.0
// Range: npt=0.000-
// CSeq: 5
// User-Agent: Lavf58.29.100
// Session: Mzk5MzY2MzUwMTg3NTc2Mzc5NQ
// RTSP/1.0 200 OK
// CSeq: 5
// RTP-Info: url=/video/h264;seq=56
// Session: Mzk5MzY2MzUwMTg3NTc2Mzc5NQ;timeout=30
checkExitFlag(exitFlag);
if (digestRealmNonce != null)
authToken = getDigestAuthHeader(username, password, "PLAY", uriRtsp /*?*/, digestRealmNonce.first, digestRealmNonce.second);
sendPlayCommand(outputStream, uriRtsp, cSeq.addAndGet(1), userAgent, authToken, session);
status = readResponseStatusCode(inputStream);
if (DEBUG)
Log.i(TAG, "PLAY status: " + status);
checkStatusCode(status);
headers = readResponseHeaders(inputStream);
dumpHeaders(headers);
listener.onRtspConnected(sdpInfo);
if (sdpInfo.videoTrack != null || sdpInfo.audioTrack != null || sdpInfo.applicationTrack != null) {
if (digestRealmNonce != null)
authToken = getDigestAuthHeader(username, password, hasCapability(RTSP_CAPABILITY_GET_PARAMETER, capabilities) ? "GET_PARAMETER" : "OPTIONS", uriRtsp, digestRealmNonce.first, digestRealmNonce.second);
final String authTokenFinal = authToken;
final String sessionFinal = session;
RtspClientKeepAliveListener keepAliveListener = () -> {
try {
//GET_PARAMETER rtsp://10.0.1.155:554/cam/realmonitor?channel=1&subtype=1/ RTSP/1.0
//CSeq: 6
//User-Agent: Lavf58.45.100
//Session: 4066342621205
//Authorization: Digest username="admin", realm="Login to cam", nonce="8fb58500489d60f99a40b43f3c8574ef", uri="rtsp://10.0.1.155:554/cam/realmonitor?channel=1&subtype=1/", response="692a26124a1ee9562135785ace33a23b"
//RTSP/1.0 200 OK
//CSeq: 6
//Session: 4066342621205
if (debug)
Log.d(TAG_DEBUG, "Sending keep-alive");
if (hasCapability(RTSP_CAPABILITY_GET_PARAMETER, capabilities))
sendGetParameterCommand(outputStream, uriRtsp, cSeq.addAndGet(1), userAgent, sessionFinal, authTokenFinal);
else
sendOptionsCommand(outputStream, uriRtsp, cSeq.addAndGet(1), userAgent, authTokenFinal);
// Do not read response right now, since it may contain unread RTP frames.
// RtpHeader.searchForNextRtpHeader will handle that.
} catch (IOException e) {
e.printStackTrace();
}
};
// Blocking call unless exitFlag set to true, thread.interrupt() called or connection closed.
try {
readRtpData(
inputStream,
sdpInfo,
exitFlag,
listener,
sessionTimeout / 2 * 1000,
keepAliveListener);
} finally {
// Cleanup resources on server side
if (hasCapability(RTSP_CAPABILITY_TEARDOWN, capabilities)) {
if (digestRealmNonce != null)
authToken = getDigestAuthHeader(username, password, "TEARDOWN", uriRtsp, digestRealmNonce.first, digestRealmNonce.second);
sendTeardownCommand(outputStream, uriRtsp, cSeq.addAndGet(1), userAgent, authToken, sessionFinal);
}
}
} else {
listener.onRtspFailed("No tracks found. RTSP server issue.");
}
listener.onRtspDisconnecting();
listener.onRtspDisconnected();
} catch (UnauthorizedException e) {
e.printStackTrace();
listener.onRtspFailedUnauthorized();
} catch (InterruptedException e) {
// Thread interrupted. Expected behavior.
listener.onRtspDisconnecting();
listener.onRtspDisconnected();
} catch (Exception e) {
e.printStackTrace();
listener.onRtspFailed(e.getMessage());
}
try {
rtspSocket.close();
} catch (IOException e) {
e.printStackTrace();
}
}
@Nullable
private static String getUriForSetup(@NonNull String uriRtsp, @Nullable Track track) {
if (DEBUG) Log.v(TAG, "getUriForSetup(uriRtsp='" + uriRtsp + "', track=" + track + ")");
if (track == null)
return null;
if (track.request == null) {
// a=control:trackID=1 is missed
Log.w(TAG, "Track request is empty. Skipping it.");
track.request = uriRtsp;
}
String uriRtspSetup = uriRtsp;
// Absolute URL
if (track.request.startsWith("rtsp://") || track.request.startsWith("rtsps://")) {
uriRtspSetup = track.request;
// Relative URL
} else {
if (!track.request.startsWith("/") && !uriRtspSetup.endsWith("/")) {
track.request = "/" + track.request;
}
uriRtspSetup += track.request;
}
return uriRtspSetup.trim();
}
private static void checkExitFlag(@NonNull AtomicBoolean exitFlag) throws InterruptedException {
if (exitFlag.get())
throw new InterruptedException();
}
private static void checkStatusCode(int code) throws IOException {
switch (code) {
case 200:
break;
case 401:
throw new UnauthorizedException();
default:
throw new IOException("Invalid status code " + code);
}
}
private static void readRtpData(
@NonNull InputStream inputStream,
@NonNull SdpInfo sdpInfo,
@NonNull AtomicBoolean exitFlag,
@NonNull RtspClientListener listener,
int keepAliveTimeout,
@NonNull RtspClientKeepAliveListener keepAliveListener)
throws IOException {
byte[] data = EMPTY_ARRAY; // Usually not bigger than MTU = 15KB
final RtpParser videoParser = (sdpInfo.videoTrack != null && sdpInfo.videoTrack.videoCodec == VIDEO_CODEC_H265 ?
new RtpH265Parser() :
new RtpH264Parser());
final AudioParser audioParser = sdpInfo.audioTrack != null
? switch (sdpInfo.audioTrack.audioCodec) {
case AUDIO_CODEC_AAC -> new AacParser(sdpInfo.audioTrack.mode);
case AUDIO_CODEC_G711_ULAW,
AUDIO_CODEC_G711_ALAW -> new G711Parser();
default -> null;
}
: null;
byte[] nalUnitSps = (sdpInfo.videoTrack != null ? sdpInfo.videoTrack.sps : null);
byte[] nalUnitPps = (sdpInfo.videoTrack != null ? sdpInfo.videoTrack.pps : null);
byte[] nalUnitSei = EMPTY_ARRAY;
byte[] nalUnitAud = EMPTY_ARRAY;
int videoSeqNum = 0;
long keepAliveSent = System.currentTimeMillis();
while (!exitFlag.get()) {
RtpHeaderParser.RtpHeader header = RtpHeaderParser.readHeader(inputStream);
if (header == null) {
continue;
// throw new IOException("No RTP frames found");
}
// header.dumpHeader();
if (header.payloadSize > data.length)
data = new byte[header.payloadSize];
NetUtils.readData(inputStream, data, 0, header.payloadSize);
// Check if keep-alive should be sent
long l = System.currentTimeMillis();
if (keepAliveTimeout > 0 && l - keepAliveSent > keepAliveTimeout) {
keepAliveSent = l;
keepAliveListener.onRtspKeepAliveRequested();
}
// Video
if (sdpInfo.videoTrack != null && header.payloadType == sdpInfo.videoTrack.payloadType) {
if (videoSeqNum > header.sequenceNumber)
Log.w(TAG, "Invalid video seq num " + videoSeqNum + "/" + header.sequenceNumber);
videoSeqNum = header.sequenceNumber;
byte[] nalUnit;
// If extendion bit set in header, skip extension data
if (header.extension == 1) {
int skipBytes = ((data[2] & 0xFF) << 8 | (data[3] & 0xFF)) * 4 + 4;
nalUnit = videoParser.processRtpPacketAndGetNalUnit(Arrays.copyOfRange(data, skipBytes, data.length),
header.payloadSize - skipBytes, header.marker == 1);
} else {
nalUnit = videoParser.processRtpPacketAndGetNalUnit(data, header.payloadSize, header.marker == 1);
}
if (nalUnit != null) {
boolean isH265 = sdpInfo.videoTrack.videoCodec == VIDEO_CODEC_H265;
byte type = VideoCodecUtils.INSTANCE.getNalUnitType(nalUnit, 0, nalUnit.length, isH265);
// Log.i(TAG, "NAL u: " + VideoCodecUtils.INSTANCE.getH265NalUnitTypeString(type));
switch (type) {
case VideoCodecUtils.NAL_SPS:
nalUnitSps = nalUnit;
// Looks like there is NAL_IDR_SLICE as well. Send it now.
if (nalUnit.length > VideoCodecUtils.MAX_NAL_SPS_SIZE)
listener.onRtspVideoNalUnitReceived(nalUnit, 0, nalUnit.length, header.getTimestampMsec());
break;
case VideoCodecUtils.NAL_PPS:
nalUnitPps = nalUnit;
// Looks like there is NAL_IDR_SLICE as well. Send it now.
if (nalUnit.length > VideoCodecUtils.MAX_NAL_SPS_SIZE)
listener.onRtspVideoNalUnitReceived(nalUnit, 0, nalUnit.length, header.getTimestampMsec());
break;
case VideoCodecUtils.NAL_AUD:
nalUnitAud = nalUnit;
break;
case VideoCodecUtils.NAL_SEI:
nalUnitSei = nalUnit;
break;
case VideoCodecUtils.NAL_IDR_SLICE:
// Combine IDR with SPS/PPS
if (nalUnitSps != null && nalUnitPps != null) {
byte[] nalUnitSpsPpsIdr = new byte[nalUnitAud.length + nalUnitSps.length + nalUnitPps.length + nalUnitSei.length + nalUnit.length];
int offset = 0;
System.arraycopy(nalUnitSps, 0, nalUnitSpsPpsIdr, offset, nalUnitSps.length);
offset += nalUnitSps.length;
System.arraycopy(nalUnitPps, 0, nalUnitSpsPpsIdr, offset, nalUnitPps.length);
offset += nalUnitPps.length;
System.arraycopy(nalUnitAud, 0, nalUnitSpsPpsIdr, offset, nalUnitAud.length);
offset += nalUnitAud.length;
System.arraycopy(nalUnitSei, 0, nalUnitSpsPpsIdr, offset, nalUnitSei.length);
offset += nalUnitSei.length;
System.arraycopy(nalUnit, 0, nalUnitSpsPpsIdr, offset, nalUnit.length);
listener.onRtspVideoNalUnitReceived(nalUnitSpsPpsIdr, 0, nalUnitSpsPpsIdr.length, header.getTimestampMsec());
// listener.onRtspVideoNalUnitReceived(nalUnitSppPpsIdr, 0, nalUnitSppPpsIdr.length, System.currentTimeMillis());
// Send it only once
nalUnitSps = null;
nalUnitPps = null;
nalUnitSei = EMPTY_ARRAY;
nalUnitAud = EMPTY_ARRAY;
break;
}
default:
if (nalUnitSei.length == 0 && nalUnitAud.length == 0) {
listener.onRtspVideoNalUnitReceived(nalUnit, 0, nalUnit.length, header.getTimestampMsec());
} else {
byte[] nalUnitAudSeiSlice = new byte[nalUnitAud.length + nalUnitSei.length + nalUnit.length];
int offset = 0;
System.arraycopy(nalUnitAud, 0, nalUnitAudSeiSlice, offset, nalUnitAud.length);
offset += nalUnitAud.length;
System.arraycopy(nalUnitSei, 0, nalUnitAudSeiSlice, offset, nalUnitSei.length);
offset += nalUnitSei.length;
System.arraycopy(nalUnit, 0, nalUnitAudSeiSlice, offset, nalUnit.length);
listener.onRtspVideoNalUnitReceived(nalUnitAudSeiSlice, 0, nalUnitAudSeiSlice.length, header.getTimestampMsec());
nalUnitSei = EMPTY_ARRAY;
nalUnitAud = EMPTY_ARRAY;
}
}
}
// Audio
} else if (sdpInfo.audioTrack != null && header.payloadType == sdpInfo.audioTrack.payloadType) {
if (audioParser != null) {
byte[] sample = audioParser.processRtpPacketAndGetSample(data, header.payloadSize);
if (sample != null)
listener.onRtspAudioSampleReceived(sample, 0, sample.length, header.getTimestampMsec());
}
// Application
} else if (sdpInfo.applicationTrack != null && header.payloadType == sdpInfo.applicationTrack.payloadType) {
listener.onRtspApplicationDataReceived(data, 0, header.payloadSize, header.getTimestampMsec());
// Unknown
} else {
// https://www.iana.org/assignments/rtp-parameters/rtp-parameters.xhtml
if (DEBUG && header.payloadType >= 96 && header.payloadType <= 127)
Log.w(TAG, "Invalid RTP payload type " + header.payloadType);
}
}
}
private static void sendSimpleCommand(
@NonNull String command,
@NonNull OutputStream outputStream,
@NonNull String request,
int cSeq,
@Nullable String userAgent,
@Nullable String session,
@Nullable String authToken)
throws IOException {
outputStream.write((command + " " + request + " RTSP/1.0" + CRLF).getBytes());
if (authToken != null)
outputStream.write(("Authorization: " + authToken + CRLF).getBytes());
outputStream.write(("CSeq: " + cSeq + CRLF).getBytes());
if (userAgent != null)
outputStream.write(("User-Agent: " + userAgent + CRLF).getBytes());
if (session != null)
outputStream.write(("Session: " + session + CRLF).getBytes());
outputStream.write(CRLF.getBytes());
outputStream.flush();
}
private static void sendOptionsCommand(
@NonNull OutputStream outputStream,
@NonNull String request,
int cSeq,
@Nullable String userAgent,
@Nullable String authToken)
throws IOException {
if (DEBUG) Log.v(TAG, "sendOptionsCommand(request=\"" + request + "\", cSeq=" + cSeq + ")");
sendSimpleCommand("OPTIONS", outputStream, request, cSeq, userAgent, null, authToken);
}
private static void sendGetParameterCommand(
@NonNull OutputStream outputStream,
@NonNull String request,
int cSeq,
@Nullable String userAgent,
@Nullable String session,
@Nullable String authToken)
throws IOException {
if (DEBUG) Log.v(TAG, "sendGetParameterCommand(request=\"" + request + "\", cSeq=" + cSeq + ")");
sendSimpleCommand("GET_PARAMETER", outputStream, request, cSeq, userAgent, session, authToken);
}
private static void sendDescribeCommand(
@NonNull OutputStream outputStream,
@NonNull String request,
int cSeq,
@Nullable String userAgent,
@Nullable String authToken)
throws IOException {
if (DEBUG) Log.v(TAG, "sendDescribeCommand(request=\"" + request + "\", cSeq=" + cSeq + ")");
outputStream.write(("DESCRIBE " + request + " RTSP/1.0" + CRLF).getBytes());
outputStream.write(("Accept: application/sdp" + CRLF).getBytes());
if (authToken != null)
outputStream.write(("Authorization: " + authToken + CRLF).getBytes());
outputStream.write(("CSeq: " + cSeq + CRLF).getBytes());
if (userAgent != null)
outputStream.write(("User-Agent: " + userAgent + CRLF).getBytes());
outputStream.write(CRLF.getBytes());
outputStream.flush();
}
private static void sendTeardownCommand(
@NonNull OutputStream outputStream,
@NonNull String request,
int cSeq,
@Nullable String userAgent,
@Nullable String authToken,
@Nullable String session)
throws IOException {
if (DEBUG) Log.v(TAG, "sendTeardownCommand(request=\"" + request + "\", cSeq=" + cSeq + ")");
outputStream.write(("TEARDOWN " + request + " RTSP/1.0" + CRLF).getBytes());
if (authToken != null)
outputStream.write(("Authorization: " + authToken + CRLF).getBytes());
outputStream.write(("CSeq: " + cSeq + CRLF).getBytes());
if (userAgent != null)
outputStream.write(("User-Agent: " + userAgent + CRLF).getBytes());
if (session != null)
outputStream.write(("Session: " + session + CRLF).getBytes());
outputStream.write(CRLF.getBytes());
outputStream.flush();
}
private static void sendSetupCommand(
@NonNull OutputStream outputStream,
@NonNull String request,
int cSeq,
@Nullable String userAgent,
@Nullable String authToken,
@Nullable String session,
@NonNull String interleaved)
throws IOException {
if (DEBUG) Log.v(TAG, "sendSetupCommand(request=\"" + request + "\", cSeq=" + cSeq + ")");
outputStream.write(("SETUP " + request + " RTSP/1.0" + CRLF).getBytes());
outputStream.write(("Transport: RTP/AVP/TCP;unicast;interleaved=" + interleaved + CRLF).getBytes());
if (authToken != null)
outputStream.write(("Authorization: " + authToken + CRLF).getBytes());
outputStream.write(("CSeq: " + cSeq + CRLF).getBytes());
if (userAgent != null)
outputStream.write(("User-Agent: " + userAgent + CRLF).getBytes());
if (session != null)
outputStream.write(("Session: " + session + CRLF).getBytes());
outputStream.write(CRLF.getBytes());
outputStream.flush();
}
private static void sendPlayCommand(
@NonNull OutputStream outputStream,
@NonNull String request,
int cSeq,
@Nullable String userAgent,
@Nullable String authToken,
@NonNull String session)
throws IOException {
if (DEBUG) Log.v(TAG, "sendPlayCommand(request=\"" + request + "\", cSeq=" + cSeq + ")");
outputStream.write(("PLAY " + request + " RTSP/1.0" + CRLF).getBytes());
outputStream.write(("Range: npt=0.000-" + CRLF).getBytes());
if (authToken != null)
outputStream.write(("Authorization: " + authToken + CRLF).getBytes());
outputStream.write(("CSeq: " + cSeq + CRLF).getBytes());
if (userAgent != null)
outputStream.write(("User-Agent: " + userAgent + CRLF).getBytes());
outputStream.write(("Session: " + session + CRLF).getBytes());
outputStream.write(CRLF.getBytes());
outputStream.flush();
}
private int readResponseStatusCode(@NonNull InputStream inputStream) throws IOException {
// String line = readLine(inputStream);
// if (debug)
// Log.d(TAG_DEBUG, "" + line);
String line;
byte[] rtspHeader = "RTSP/1.0 ".getBytes();
// Search fpr "RTSP/1.0 "
while (!exitFlag.get() && readUntilBytesFound(inputStream, rtspHeader) && (line = readLine(inputStream)) != null) {
if (debug)
Log.d(TAG_DEBUG, "" + line);
// int indexRtsp = line.indexOf("TSP/1.0 "); // 8 characters, 'R' already found
// if (indexRtsp >= 0) {
int indexCode = line.indexOf(' ');
String code = line.substring(0, indexCode);
try {
int statusCode = Integer.parseInt(code);
// if (debug)
// Log.d(TAG_DEBUG, "Status code: " + statusCode);
return statusCode;
} catch (NumberFormatException e) {
// Does not fulfill standard "RTSP/1.1 200 OK" token
// Continue search for
}
// }
}
if (debug)
Log.w(TAG_DEBUG, "Could not obtain status code");
return -1;
}
@NonNull
private ArrayList> readResponseHeaders(@NonNull InputStream inputStream) throws IOException {
ArrayList> headers = new ArrayList<>();
String line;
while (!exitFlag.get() && !TextUtils.isEmpty(line = readLine(inputStream))) {
if (debug)
Log.d(TAG_DEBUG, "" + line);
if (CRLF.equals(line)) {
return headers;
} else {
String[] pairs = line.split(":", 2);
if (pairs.length == 2) {
headers.add(Pair.create(pairs[0].trim(), pairs[1].trim()));
}
}
}
return headers;
}
/**
* Get a list of tracks from SDP. Usually contains video and audio track only.
* @return array of 3 tracks. First is video track, second audio track, third application track.
*/
@NonNull
private static Track[] getTracksFromDescribeParams(@NonNull List> params) {
Track[] tracks = new Track[3];
Track currentTrack = null;
for (Pair param: params) {
switch (param.first) {
case "m":
// m=video 0 RTP/AVP 96
if (param.second.startsWith("video")) {
currentTrack = new VideoTrack();
tracks[0] = currentTrack;
// m=audio 0 RTP/AVP 97
// m=audio 0 RTP/AVP 0 8
} else if (param.second.startsWith("audio")) {
currentTrack = new AudioTrack();
tracks[1] = currentTrack;
// m=application 0 RTP/AVP 99
// a=rtpmap:99 com.my/90000
} else if (param.second.startsWith("application")) {
currentTrack = new ApplicationTrack();
tracks[2] = currentTrack;
} else if (param.second.startsWith("text")) {
Log.w(TAG, "Media track 'text' is not supported");
} else if (param.second.startsWith("message")) {
Log.w(TAG, "Media track 'message' is not supported");
} else {
currentTrack = null;
}
if (currentTrack != null) {
// m= / ...
String[] values = TextUtils.split(param.second, " ");
try {
currentTrack.payloadType = (values.length > 3 ? Integer.parseInt(values[3]) : -1);
// Handle static PT that comes with no rtpmap
if (currentTrack instanceof AudioTrack track) {
switch (currentTrack.payloadType) {
case 0 -> { // uLaw
track.audioCodec = AUDIO_CODEC_G711_ULAW;
track.sampleRateHz = 8000;
track.channels = 1;
}
case 8 -> { // aLaw
track.audioCodec = AUDIO_CODEC_G711_ALAW;
track.sampleRateHz = 8000;
track.channels = 1;
}
}
}
} catch (Exception e) {
currentTrack.payloadType = -1;
}
if (currentTrack.payloadType == -1)
Log.e(TAG, "Failed to get payload type from \"m=" + param.second + "\"");
}
break;
case "a":
// a=control:trackID=1
if (currentTrack != null) {
if (param.second.startsWith("control:")) {
currentTrack.request = param.second.substring(8);
// a=fmtp:96 packetization-mode=1; profile-level-id=4D4029; sprop-parameter-sets=Z01AKZpmBkCb8uAtQEBAQXpw,aO48gA==
// a=fmtp:97 streamtype=5; profile-level-id=15; mode=AAC-hbr; config=1408; sizeLength=13; indexLength=3; indexDeltaLength=3; profile=1; bitrate=32000;
// a=fmtp:97 streamtype=5;profile-level-id=1;mode=AAC-hbr;sizelength=13;indexlength=3;indexdeltalength=3;config=1408
// a=fmtp:96 streamtype=5; profile-level-id=14; mode=AAC-lbr; config=1388; sizeLength=6; indexLength=2; indexDeltaLength=2; constantDuration=1024; maxDisplacement=5
// a=fmtp:96 profile-level-id=1;mode=AAC-hbr;sizelength=13;indexlength=3;indexdeltalength=3;config=1210fff15081ffdffc
// a=fmtp:96
} else if (param.second.startsWith("fmtp:")) {
// Video
if (currentTrack instanceof VideoTrack) {
updateVideoTrackFromDescribeParam((VideoTrack)tracks[0], param);
// Audio
} else if (currentTrack instanceof AudioTrack) {
updateAudioTrackFromDescribeParam((AudioTrack)tracks[1], param);
}
// a=rtpmap:96 H264/90000
// a=rtpmap:97 mpeg4-generic/16000/1
// a=rtpmap:97 MPEG4-GENERIC/16000
// a=rtpmap:97 G726-32/8000
// a=rtpmap:96 mpeg4-generic/44100/2
} else if (param.second.startsWith("rtpmap:")) {
// Video
String[] values = TextUtils.split(param.second, " ");
if (currentTrack instanceof VideoTrack) {
if (values.length > 1) {
values = TextUtils.split(values[1], "/");
if (values.length > 0) {
switch (values[0].toLowerCase()) {
case "h264" -> ((VideoTrack) tracks[0]).videoCodec = VIDEO_CODEC_H264;
case "h265" -> ((VideoTrack) tracks[0]).videoCodec = VIDEO_CODEC_H265;
default -> Log.w(TAG, "Unknown video codec \"" + values[0] + "\"");
}
Log.i(TAG, "Video: " + values[0]);
}
}
// Audio
} else if (currentTrack instanceof AudioTrack) {
if (values.length > 1) {
AudioTrack track = ((AudioTrack) tracks[1]);
values = TextUtils.split(values[1], "/");
if (values.length > 1) {
switch (values[0].toLowerCase()) {
case "mpeg4-generic" -> track.audioCodec = AUDIO_CODEC_AAC;
case "opus" -> track.audioCodec = AUDIO_CODEC_OPUS;
case "pcmu" -> track.audioCodec = AUDIO_CODEC_G711_ULAW;
case "pcma" -> track.audioCodec = AUDIO_CODEC_G711_ALAW;
default -> {
Log.w(TAG, "Unknown audio codec \"" + values[0] + "\"");
track.audioCodec = AUDIO_CODEC_UNKNOWN;
}
}
track.sampleRateHz = Integer.parseInt(values[1]);
// If no channels specified, use mono, e.g. "a=rtpmap:97 MPEG4-GENERIC/8000"
track.channels = values.length > 2 ? Integer.parseInt(values[2]) : 1;
Log.i(TAG, "Audio: " + getAudioCodecName(track.audioCodec) + ", sample rate: " + track.sampleRateHz + " Hz, channels: " + track.channels);
}
}
// Application
} else {
// Do nothing
}
}
}
break;
}
}
return tracks;
}
//v=0
//o=- 1542237507365806 1542237507365806 IN IP4 10.0.1.111
//s=Media Presentation
//e=NONE
//b=AS:50032
//t=0 0
//a=control:*
//a=range:npt=0.000000-
//m=video 0 RTP/AVP 96
//c=IN IP4 0.0.0.0
//b=AS:50000
//a=framerate:25.0
//a=transform:1.000000,0.000000,0.000000;0.000000,1.000000,0.000000;0.000000,0.000000,1.000000
//a=control:trackID=1
//a=rtpmap:96 H264/90000
//a=fmtp:96 packetization-mode=1; profile-level-id=4D4029; sprop-parameter-sets=Z01AKZpmBkCb8uAtQEBAQXpw,aO48gA==
//m=audio 0 RTP/AVP 97
//c=IN IP4 0.0.0.0
//b=AS:32
//a=control:trackID=2
//a=rtpmap:97 G726-32/8000
// v=0
// o=- 14190294250618174561 14190294250618174561 IN IP4 127.0.0.1
// s=IP Webcam
// c=IN IP4 0.0.0.0
// t=0 0
// a=range:npt=now-
// a=control:*
// m=video 0 RTP/AVP 96
// a=rtpmap:96 H264/90000
// a=control:h264
// a=fmtp:96 packetization-mode=1;profile-level-id=42C028;sprop-parameter-sets=Z0LAKIyNQDwBEvLAPCIRqA==,aM48gA==;
// a=cliprect:0,0,1920,1080
// a=framerate:30.0
// a=framesize:96 1080-1920
// Pair first - name, e.g. "a"; second - value, e.g "cliprect:0,0,1920,1080"
@NonNull
private static List> getDescribeParams(@NonNull String text) {
ArrayList> list = new ArrayList<>();
String[] params = TextUtils.split(text, "\r\n");
for (String param : params) {
int i = param.indexOf('=');
if (i > 0) {
String name = param.substring(0, i).trim();
String value = param.substring(i + 1);
list.add(Pair.create(name, value));
}
}
return list;
}
@NonNull
private static SdpInfo getSdpInfoFromDescribeParams(@NonNull List> params) {
SdpInfo sdpInfo = new SdpInfo();
Track[] tracks = getTracksFromDescribeParams(params);
sdpInfo.videoTrack = ((VideoTrack)tracks[0]);
sdpInfo.audioTrack = ((AudioTrack)tracks[1]);
sdpInfo.applicationTrack = ((ApplicationTrack)tracks[2]);
for (Pair param : params) {
switch (param.first) {
case "s" -> sdpInfo.sessionName = param.second;
case "i" -> sdpInfo.sessionDescription = param.second;
}
}
return sdpInfo;
}
// a=fmtp:97 streamtype=5;profile-level-id=1;mode=AAC-hbr;sizelength=13;indexlength=3;indexdeltalength=3;config=1408
@Nullable
private static List> getSdpAParams(@NonNull Pair param) {
if (param.first.equals("a") && param.second.startsWith("fmtp:") && param.second.length() > 8) { //
String value = param.second.substring(8).trim(); // fmtp can be '96' (2 chars) and '127' (3 chars)
String[] paramsA = TextUtils.split(value, ";");
// streamtype=5
// profile-level-id=1
// mode=AAC-hbr
ArrayList> retParams = new ArrayList<>();
for (String paramA: paramsA) {
paramA = paramA.trim();
// sprop-parameter-sets=Z0LAKIyNQDwBEvLAPCIRqA==,aM48gA==
int i = paramA.indexOf("=");
if (i != -1)
retParams.add(
Pair.create(
paramA.substring(0, i),
paramA.substring(i + 1)));
}
return retParams;
} else {
Log.w(TAG, "Not a valid fmtp");
}
return null;
}
@NonNull
private static byte[] getNalUnitFromSprop(String nalBase64) {
byte[] nal = Base64.decode(nalBase64, Base64.NO_WRAP);
byte[] nalWithStart = new byte[nal.length + 4];
// Add 00 00 00 01 NAL unit header
nalWithStart[0] = 0;
nalWithStart[1] = 0;
nalWithStart[2] = 0;
nalWithStart[3] = 1;
System.arraycopy(nal, 0, nalWithStart, 4, nal.length);
return nalWithStart;
}
private static void updateVideoTrackFromDescribeParam(@NonNull VideoTrack videoTrack, @NonNull Pair param) {
// a=fmtp:96 packetization-mode=1;profile-level-id=42C028;sprop-parameter-sets=Z0LAKIyNQDwBEvLAPCIRqA==,aM48gA==;
// a=fmtp:96 packetization-mode=1; profile-level-id=4D4029; sprop-parameter-sets=Z01AKZpmBkCb8uAtQEBAQXpw,aO48gA==
// a=fmtp:99 sprop-parameter-sets=Z0LgKdoBQBbpuAgIMBA=,aM4ySA==;packetization-mode=1;profile-level-id=42e029
// a=fmtp:98 profile-id=1;sprop-sps=QgEBAWAAAAMAgAAAAwAAAwB4oAWCAJB/ja7tTd3Jdf+ACAAFtwUFBQQAAA+gAAGGoch3uUQD6AARlAB9AAIygg==;sprop-pps=RAHBcrAiQA==;sprop-vps=QAEMAf//AWAAAAMAgAAAAwAAAwB4rAk=
List> params = getSdpAParams(param);
if (params != null) {
for (Pair pair: params) {
switch (pair.first.toLowerCase()) {
case "sprop-sps" -> {
videoTrack.sps = getNalUnitFromSprop(pair.second);
}
case "sprop-pps" -> {
videoTrack.pps = getNalUnitFromSprop(pair.second);
}
case "sprop-vps" -> {
videoTrack.vps = getNalUnitFromSprop(pair.second);
}
case "sprop-parameter-sets" -> {
String[] paramsSpsPps = TextUtils.split(pair.second, ",");
if (paramsSpsPps.length > 1) {
videoTrack.sps = getNalUnitFromSprop(paramsSpsPps[0]);
videoTrack.pps = getNalUnitFromSprop(paramsSpsPps[1]);
// Base64.decode(paramsSpsPps[0], Base64.NO_WRAP);
// byte[] pps = Base64.decode(paramsSpsPps[1], Base64.NO_WRAP);
// byte[] nalSps = new byte[sps.length + 4];
// byte[] nalPps = new byte[pps.length + 4];
// // Add 00 00 00 01 NAL unit header
// nalSps[0] = 0;
// nalSps[1] = 0;
// nalSps[2] = 0;
// nalSps[3] = 1;
// System.arraycopy(sps, 0, nalSps, 4, sps.length);
// nalPps[0] = 0;
// nalPps[1] = 0;
// nalPps[2] = 0;
// nalPps[3] = 1;
// System.arraycopy(pps, 0, nalPps, 4, pps.length);
// videoTrack.sps = nalSps;
// videoTrack.pps = nalPps;
}
}
// packetization-mode=1
case "packetization-mode" -> {
// 0 - single NAL unit (default)
// 1 - non-interleaved mode (STAP-A and FU-A NAL units)
// 2 - interleaved mode
try {
int mode = Integer.parseInt(pair.second);
if (mode == 2)
Log.e(TAG, "Interleaved packetization mode is not supported");
} catch (NumberFormatException ignored) {
}
}
}
}
}
}
@NonNull
private static byte[] getBytesFromHexString(@NonNull String config) {
// "1210fff1" -> [12, 10, ff, f1]
return new BigInteger(config ,16).toByteArray();
}
private static void updateAudioTrackFromDescribeParam(@NonNull AudioTrack audioTrack, @NonNull Pair param) {
// a=fmtp:96 streamtype=5; profile-level-id=14; mode=AAC-lbr; config=1388; sizeLength=6; indexLength=2; indexDeltaLength=2; constantDuration=1024; maxDisplacement=5
// a=fmtp:97 streamtype=5;profile-level-id=1;mode=AAC-hbr;sizelength=13;indexlength=3;indexdeltalength=3;config=1408
// a=fmtp:96 profile-level-id=1;mode=AAC-hbr;sizelength=13;indexlength=3;indexdeltalength=3;config=1210fff15081ffdffc
List> params = getSdpAParams(param);
if (params != null) {
for (Pair pair: params) {
switch (pair.first.toLowerCase()) {
case "mode" -> audioTrack.mode = pair.second;
case "config" -> audioTrack.config = getBytesFromHexString(pair.second);
}
}
}
}
/**
* Search for header "Content-Base: rtsp://example.com/stream/"
* and return "rtsp://example.com/stream/"
*/
@Nullable
private static String getHeaderContentBase(@NonNull ArrayList> headers) {
String contentBase = getHeader(headers, "content-base");
if (!TextUtils.isEmpty(contentBase)) {
return contentBase;
}
return null;
}
private static int getHeaderContentLength(@NonNull ArrayList> headers) {
String length = getHeader(headers, "content-length");
if (!TextUtils.isEmpty(length)) {
try {
return Integer.parseInt(length);
} catch (NumberFormatException ignored) {
}
}
return -1;
}
private static int getSupportedCapabilities(@NonNull ArrayList> headers) {
for (Pair head: headers) {
String h = head.first.toLowerCase();
// Public: OPTIONS, DESCRIBE, SETUP, PLAY, GET_PARAMETER, SET_PARAMETER, TEARDOWN
if ("public".equals(h)) {
int mask = 0;
String[] tokens = TextUtils.split(head.second.toLowerCase(), ",");
for (String token: tokens) {
switch (token.trim()) {
case "options" -> mask |= RTSP_CAPABILITY_OPTIONS;
case "describe" -> mask |= RTSP_CAPABILITY_DESCRIBE;
case "announce" -> mask |= RTSP_CAPABILITY_ANNOUNCE;
case "setup" -> mask |= RTSP_CAPABILITY_SETUP;
case "play" -> mask |= RTSP_CAPABILITY_PLAY;
case "record" -> mask |= RTSP_CAPABILITY_RECORD;
case "pause" -> mask |= RTSP_CAPABILITY_PAUSE;
case "teardown" -> mask |= RTSP_CAPABILITY_TEARDOWN;
case "set_parameter" -> mask |= RTSP_CAPABILITY_SET_PARAMETER;
case "get_parameter" -> mask |= RTSP_CAPABILITY_GET_PARAMETER;
case "redirect" -> mask |= RTSP_CAPABILITY_REDIRECT;
}
}
return mask;
}
}
return RTSP_CAPABILITY_NONE;
}
@Nullable
private static Pair getHeaderWwwAuthenticateDigestRealmAndNonce(@NonNull ArrayList> headers) {
for (Pair head: headers) {
String h = head.first.toLowerCase();
// WWW-Authenticate: Digest realm="AXIS_00408CEF081C", nonce="00054cecY7165349339ae05f7017797d6b0aaad38f6ff45", stale=FALSE
// WWW-Authenticate: Basic realm="AXIS_00408CEF081C"
// WWW-Authenticate: Digest realm="Login to 4K049EBPAG1D7E7", nonce="de4ccb15804565dc8a4fa5b115695f4f"
if ("www-authenticate".equals(h) && head.second.toLowerCase().startsWith("digest")) {
String v = head.second.substring(7).trim();
int begin, end;
begin = v.indexOf("realm=");
begin = v.indexOf('"', begin) + 1;
end = v.indexOf('"', begin);
String digestRealm = v.substring(begin, end);
begin = v.indexOf("nonce=");
begin = v.indexOf('"', begin)+1;
end = v.indexOf('"', begin);
String digestNonce = v.substring(begin, end);
return Pair.create(digestRealm, digestNonce);
}
}
return null;
}
@Nullable
private static String getHeaderWwwAuthenticateBasicRealm(@NonNull ArrayList> headers) {
for (Pair head: headers) {
// Session: ODgyODg3MjQ1MDczODk3NDk4Nw
String h = head.first.toLowerCase();
String v = head.second.toLowerCase();
// WWW-Authenticate: Digest realm="AXIS_00408CEF081C", nonce="00054cecY7165349339ae05f7017797d6b0aaad38f6ff45", stale=FALSE
// WWW-Authenticate: Basic realm="AXIS_00408CEF081C"
if ("www-authenticate".equals(h) && v.startsWith("basic")) {
v = v.substring(6).trim();
// realm=
// AXIS_00408CEF081C
String[] tokens = TextUtils.split(v, "\"");
if (tokens.length > 2)
return tokens[1];
}
}
return null;
}
// Basic authentication
@NonNull
private static String getBasicAuthHeader(@Nullable String username, @Nullable String password) {
String auth = (username == null ? "" : username) + ":" + (password == null ? "" : password);
return "Basic " + new String(Base64.encode(auth.getBytes(StandardCharsets.ISO_8859_1), Base64.NO_WRAP));
}
// Digest authentication
@Nullable
private static String getDigestAuthHeader(
@Nullable String username,
@Nullable String password,
@NonNull String method,
@NonNull String digestUri,
@NonNull String realm,
@NonNull String nonce) {
try {
MessageDigest md = MessageDigest.getInstance("MD5");
byte[] ha1;
if (username == null)
username = "";
if (password == null)
password = "";
// calc A1 digest
md.update(username.getBytes(StandardCharsets.ISO_8859_1));
md.update((byte) ':');
md.update(realm.getBytes(StandardCharsets.ISO_8859_1));
md.update((byte) ':');
md.update(password.getBytes(StandardCharsets.ISO_8859_1));
ha1 = md.digest();
// calc A2 digest
md.reset();
md.update(method.getBytes(StandardCharsets.ISO_8859_1));
md.update((byte) ':');
md.update(digestUri.getBytes(StandardCharsets.ISO_8859_1));
byte[] ha2 = md.digest();
// calc response
md.update(getHexStringFromBytes(ha1).getBytes(StandardCharsets.ISO_8859_1));
md.update((byte) ':');
md.update(nonce.getBytes(StandardCharsets.ISO_8859_1));
md.update((byte) ':');
// TODO add support for more secure version of digest auth
//md.update(nc.getBytes(StandardCharsets.ISO_8859_1));
//md.update((byte) ':');
//md.update(cnonce.getBytes(StandardCharsets.ISO_8859_1));
//md.update((byte) ':');
//md.update(qop.getBytes(StandardCharsets.ISO_8859_1));
//md.update((byte) ':');
md.update(getHexStringFromBytes(ha2).getBytes(StandardCharsets.ISO_8859_1));
String response = getHexStringFromBytes(md.digest());
// log.trace("username=\"{}\", realm=\"{}\", nonce=\"{}\", uri=\"{}\", response=\"{}\"",
// userName, digestRealm, digestNonce, digestUri, response);
return "Digest username=\"" + username + "\", realm=\"" + realm + "\", nonce=\"" + nonce + "\", uri=\"" + digestUri + "\", response=\"" + response + "\"";
} catch (Exception e) {
e.printStackTrace();
}
return null;
}
@NonNull
private static String getHexStringFromBytes(@NonNull byte[] bytes) {
StringBuilder buf = new StringBuilder();
for (byte b : bytes)
buf.append(String.format("%02x", b));
return buf.toString();
}
@NonNull
private static String readContentAsText(@NonNull InputStream inputStream, int length) throws IOException {
if (length <= 0)
return "";
byte[] b = new byte[length];
int read = readData(inputStream, b, 0, length);
return new String(b, 0, read);
}
// int memcmp ( const void * ptr1, const void * ptr2, size_t num );
public static boolean memcmp(
@NonNull byte[] source1,
int offsetSource1,
@NonNull byte[] source2,
int offsetSource2,
int num) {
if (source1.length - offsetSource1 < num)
return false;
if (source2.length - offsetSource2 < num)
return false;
for (int i = 0; i < num; i++) {
if (source1[offsetSource1 + i] != source2[offsetSource2 + i])
return false;
}
return true;
}
private static void shiftLeftArray(@NonNull byte[] array, int num) {
// ABCDEF -> BCDEF
if (num - 1 >= 0)
System.arraycopy(array, 1, array, 0, num - 1);
}
private boolean readUntilBytesFound(@NonNull InputStream inputStream, @NonNull byte[] array) throws IOException {
byte[] buffer = new byte[array.length];
// Fill in buffer
if (NetUtils.readData(inputStream, buffer, 0, buffer.length) != buffer.length)
return false; // EOF
while (!exitFlag.get()) {
// Check if buffer is the same one
if (memcmp(buffer, 0, array, 0, buffer.length)) {
return true;
}
// ABCDEF -> BCDEFF
shiftLeftArray(buffer, buffer.length);
// Read 1 byte into last buffer item
if (NetUtils.readData(inputStream, buffer, buffer.length - 1, 1) != 1) {
return false; // EOF
}
}
return false;
}
// private boolean readUntilByteFound(@NonNull InputStream inputStream, byte bt) throws IOException {
// byte[] buffer = new byte[1];
// int readBytes;
// while (!exitFlag.get()) {
// readBytes = inputStream.read(buffer, 0, 1);
// if (readBytes == -1) // EOF
// return false;
// if (readBytes == 1 && buffer[0] == bt) {
// return true;
// }
// }
// return false;
// }
@Nullable
private String readLine(@NonNull InputStream inputStream) throws IOException {
byte[] bufferLine = new byte[MAX_LINE_SIZE];
int offset = 0;
int readBytes;
do {
// Didn't find "\r\n" within 4K bytes
if (offset >= MAX_LINE_SIZE) {
throw new NoResponseHeadersException();
}
// Read 1 byte
readBytes = inputStream.read(bufferLine, offset, 1);
if (readBytes == 1) {
// Check for EOL
// Some cameras like Linksys WVC200 do not send \n instead of \r\n
if (offset > 0 && /*bufferLine[offset-1] == '\r' &&*/ bufferLine[offset] == '\n') {
// Found empty EOL. End of header section
if (offset == 1)
return "";//break;
// Found EOL. Add to array.
return new String(bufferLine, 0, offset-1);
} else {
offset++;
}
}
} while (readBytes > 0 && !exitFlag.get());
return null;
}
private static int readData(@NonNull InputStream inputStream, @NonNull byte[] buffer, int offset, int length) throws IOException {
if (DEBUG) Log.v(TAG, "readData(offset=" + offset + ", length=" + length + ")");
int readBytes;
int totalReadBytes = 0;
do {
readBytes = inputStream.read(buffer, offset + totalReadBytes, length - totalReadBytes);
if (readBytes > 0)
totalReadBytes += readBytes;
} while (readBytes >= 0 && totalReadBytes < length);
return totalReadBytes;
}
private static void dumpHeaders(@NonNull ArrayList> headers) {
if (DEBUG) {
for (Pair head : headers) {
Log.d(TAG, head.first + ": " + head.second);
}
}
}
@Nullable
private static String getHeader(@NonNull ArrayList> headers, @NonNull String header) {
for (Pair head: headers) {
// Session: ODgyODg3MjQ1MDczODk3NDk4Nw
String h = head.first.toLowerCase();
if (header.toLowerCase().equals(h)) {
return head.second;
}
}
// Not found
return null;
}
public static class Builder {
private static final String DEFAULT_USER_AGENT = "Lavf58.29.100";
private final @NonNull Socket rtspSocket;
private final @NonNull String uriRtsp;
private final @NonNull AtomicBoolean exitFlag;
private final @NonNull RtspClientListener listener;
// private boolean sendOptionsCommand = true;
private boolean requestVideo = true;
private boolean requestAudio = true;
private boolean requestApplication = true;
private boolean debug = false;
private @Nullable String username = null;
private @Nullable String password = null;
private @Nullable String userAgent = DEFAULT_USER_AGENT;
public Builder(
@NonNull Socket rtspSocket,
@NonNull String uriRtsp,
@NonNull AtomicBoolean exitFlag,
@NonNull RtspClientListener listener) {
this.rtspSocket = rtspSocket;
this.uriRtsp = uriRtsp;
this.exitFlag = exitFlag;
this.listener = listener;
}
@NonNull
public Builder withDebug(boolean debug) {
this.debug = debug;
return this;
}
@NonNull
public Builder withCredentials(@Nullable String username, @Nullable String password) {
this.username = username;
this.password = password;
return this;
}
@NonNull
public Builder withUserAgent(@Nullable String userAgent) {
this.userAgent = userAgent;
return this;
}
// @NonNull
// public Builder sendOptionsCommand(boolean sendOptionsCommand) {
// this.sendOptionsCommand = sendOptionsCommand;
// return this;
// }
@NonNull
public Builder requestVideo(boolean requestVideo) {
this.requestVideo = requestVideo;
return this;
}
@NonNull
public Builder requestAudio(boolean requestAudio) {
this.requestAudio = requestAudio;
return this;
}
@NonNull
public Builder requestApplication(boolean requestApplication) {
this.requestApplication = requestApplication;
return this;
}
@NonNull
public RtspClient build() {
return new RtspClient(this);
}
}
}
class LoggerOutputStream extends BufferedOutputStream {
private boolean logging = true;
public LoggerOutputStream(@NonNull OutputStream out) {
super(out);
}
public synchronized void setLogging(boolean logging) {
this.logging = logging;
}
@Override
public synchronized void write(byte[] b, int off, int len) throws IOException {
super.write(b, off, len);
if (logging)
Log.i(RtspClient.TAG_DEBUG, new String(b, off, len));
}
}
================================================
FILE: library-client-rtsp/src/main/java/com/alexvas/rtsp/codec/AudioDecodeThread.kt
================================================
package com.alexvas.rtsp.codec
import android.media.*
import android.os.Process
import android.util.Log
import java.nio.ByteBuffer
class AudioDecodeThread (
private val mimeType: String,
private val sampleRate: Int,
private val channelCount: Int,
private val codecConfig: ByteArray?,
private val audioFrameQueue: AudioFrameQueue) : Thread() {
private var isRunning = true
fun stopAsync() {
if (DEBUG) Log.v(TAG, "stopAsync()")
isRunning = false
// Wake up sleep() code
interrupt()
}
override fun run() {
if (DEBUG) Log.d(TAG, "$name started")
Process.setThreadPriority(Process.THREAD_PRIORITY_AUDIO)
// Creating audio decoder
val decoder = MediaCodec.createDecoderByType(mimeType)
val format = MediaFormat.createAudioFormat(mimeType, sampleRate, channelCount)
if (mimeType == MediaFormat.MIMETYPE_AUDIO_AAC) {
val csd0 = codecConfig ?: getAacDecoderConfigData(MediaCodecInfo.CodecProfileLevel.AACObjectLC, sampleRate, channelCount)
format.setByteBuffer("csd-0", ByteBuffer.wrap(csd0))
format.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC)
} else if (mimeType == MediaFormat.MIMETYPE_AUDIO_OPUS) {
// TODO: Add Opus support
// val OPUS_IDENTIFICATION_HEADER = "OpusHead".toByteArray()
// val OPUS_PRE_SKIP_NSEC = ByteBuffer.allocate(8).putLong(11971).array()
// val OPUS_SEEK_PRE_ROLL_NSEC = ByteBuffer.allocate(8).putLong(80000000).array()
// val csd0 = ByteBuffer.allocate(8+1+1+2+4+2+1)
// csd0.put("OpusHead".toByteArray())
// // Version
// csd0.put(1)
// // Number of channels
// csd0.put(2)
// // Pre-skip
// csd0.putShort(0)
// csd0.putInt(sampleRate)
// // Output Gain
// csd0.putShort(0)
// // Channel Mapping Family
// csd0.put(0)
// Buffer buf = new Buffer();
// // Magic Signature:固定头,占8个字节,为字符串OpusHead
// buf.write("OpusHead".getBytes(StandardCharsets.UTF_8));
// // Version:版本号,占1字节,固定为0x01
// buf.writeByte(1);
// // Channel Count:通道数,占1字节,根据音频流通道自行设置,如0x02
// buf.writeByte(1);
// // Pre-skip:回放的时候从解码器中丢弃的samples数量,占2字节,为小端模式,默认设置0x00,
// buf.writeShortLe(0);
// // Input Sample Rate (Hz):音频流的Sample Rate,占4字节,为小端模式,根据实际情况自行设置
// buf.writeIntLe(currentFormat.HZ);
// //Output Gain:输出增益,占2字节,为小端模式,没有用到默认设置0x00, 0x00就好
// buf.writeShortLe(0);
// // Channel Mapping Family:通道映射系列,占1字节,默认设置0x00就好
// buf.writeByte(0);
// //Channel Mapping Table:可选参数,上面的Family默认设置0x00的时候可忽略
// format.setByteBuffer("csd-0", ByteBuffer.wrap(OPUS_IDENTIFICATION_HEADER).order(ByteOrder.BIG_ENDIAN))
// format.setByteBuffer("csd-1", ByteBuffer.wrap(OPUS_PRE_SKIP_NSEC).order(ByteOrder.BIG_ENDIAN))
// format.setByteBuffer("csd-2", ByteBuffer.wrap(OPUS_SEEK_PRE_ROLL_NSEC).order(ByteOrder.LITTLE_ENDIAN))
val csd0 = byteArrayOf(
0x4f, 0x70, 0x75, 0x73, // "Opus"
0x48, 0x65, 0x61, 0x64, // "Head"
0x01, // Version
0x02, // Channel Count
0x00, 0x00, // Pre skip
0x80.toByte(), 0xbb.toByte(), 0x00, 0x00, // Sample rate 48000
0x00, 0x00, // Output Gain (Q7.8 in dB)
0x00, // Mapping Family
)
val csd1 = byteArrayOf(0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00)
val csd2 = byteArrayOf(0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00)
format.setByteBuffer("csd-0", ByteBuffer.wrap(csd0))
format.setByteBuffer("csd-1", ByteBuffer.wrap(csd1))
format.setByteBuffer("csd-2", ByteBuffer.wrap(csd2))
}
decoder.configure(format, null, null, 0)
decoder.start()
// Creating audio playback device
val outChannel = if (channelCount > 1) AudioFormat.CHANNEL_OUT_STEREO else AudioFormat.CHANNEL_OUT_MONO
val outAudio = AudioFormat.ENCODING_PCM_16BIT
val bufferSize = AudioTrack.getMinBufferSize(sampleRate, outChannel, outAudio)
// Log.i(TAG, "sampleRate: $sampleRate, bufferSize: $bufferSize".format(sampleRate, bufferSize))
val audioTrack = AudioTrack(
AudioAttributes.Builder()
.setUsage(AudioAttributes.USAGE_MEDIA)
.setContentType(AudioAttributes.CONTENT_TYPE_SPEECH)
.build(),
AudioFormat.Builder()
.setEncoding(outAudio)
.setChannelMask(outChannel)
.setSampleRate(sampleRate)
.build(),
bufferSize,
AudioTrack.MODE_STREAM,
0)
audioTrack.play()
val bufferInfo = MediaCodec.BufferInfo()
while (isRunning) {
val inIndex: Int = decoder.dequeueInputBuffer(10000L)
if (inIndex >= 0) {
// fill inputBuffers[inputBufferIndex] with valid data
var byteBuffer: ByteBuffer?
try {
byteBuffer = decoder.getInputBuffer(inIndex)
} catch (e: Exception) {
e.printStackTrace()
break
}
byteBuffer?.rewind()
// Preventing BufferOverflowException
// if (length > byteBuffer.limit()) throw DecoderFatalException("Error")
val audioFrame: FrameQueue.Frame?
try {
audioFrame = audioFrameQueue.pop()
if (audioFrame == null) {
Log.d(TAG, "Empty audio frame")
// Release input buffer
decoder.queueInputBuffer(inIndex, 0, 0, 0L, 0)
} else {
byteBuffer?.put(audioFrame.data, audioFrame.offset, audioFrame.length)
decoder.queueInputBuffer(inIndex, audioFrame.offset, audioFrame.length, audioFrame.timestampMs, 0)
}
} catch (e: Exception) {
e.printStackTrace()
}
}
// Log.i(TAG, "inIndex: ${inIndex}")
try {
// Log.w(TAG, "outIndex: ${outIndex}")
if (!isRunning) break
when (val outIndex = decoder.dequeueOutputBuffer(bufferInfo, 10000L)) {
MediaCodec.INFO_OUTPUT_FORMAT_CHANGED -> Log.d(TAG, "Decoder format changed: ${decoder.outputFormat}")
MediaCodec.INFO_TRY_AGAIN_LATER -> if (DEBUG) Log.d(TAG, "No output from decoder available")
else -> {
if (outIndex >= 0) {
val byteBuffer: ByteBuffer? = decoder.getOutputBuffer(outIndex)
val chunk = ByteArray(bufferInfo.size)
byteBuffer?.get(chunk)
byteBuffer?.clear()
if (chunk.isNotEmpty()) {
audioTrack.write(chunk, 0, chunk.size)
}
decoder.releaseOutputBuffer(outIndex, false)
}
}
}
} catch (e: Exception) {
e.printStackTrace()
}
// All decoded frames have been rendered, we can stop playing now
if (bufferInfo.flags and MediaCodec.BUFFER_FLAG_END_OF_STREAM != 0) {
Log.d(TAG, "OutputBuffer BUFFER_FLAG_END_OF_STREAM")
break
}
}
audioTrack.flush()
audioTrack.release()
try {
decoder.stop()
decoder.release()
} catch (_: InterruptedException) {
} catch (e: Exception) {
e.printStackTrace()
}
audioFrameQueue.clear()
if (DEBUG) Log.d(TAG, "$name stopped")
}
companion object {
private val TAG: String = AudioDecodeThread::class.java.simpleName
private const val DEBUG = false
fun getAacDecoderConfigData(audioProfile: Int, sampleRate: Int, channels: Int): ByteArray {
// AOT_LC = 2
// 0001 0000 0000 0000
var extraDataAac = audioProfile shl 11
// Sample rate
when (sampleRate) {
7350 -> extraDataAac = extraDataAac or (0xC shl 7)
8000 -> extraDataAac = extraDataAac or (0xB shl 7)
11025 -> extraDataAac = extraDataAac or (0xA shl 7)
12000 -> extraDataAac = extraDataAac or (0x9 shl 7)
16000 -> extraDataAac = extraDataAac or (0x8 shl 7)
22050 -> extraDataAac = extraDataAac or (0x7 shl 7)
24000 -> extraDataAac = extraDataAac or (0x6 shl 7)
32000 -> extraDataAac = extraDataAac or (0x5 shl 7)
44100 -> extraDataAac = extraDataAac or (0x4 shl 7)
48000 -> extraDataAac = extraDataAac or (0x3 shl 7)
64000 -> extraDataAac = extraDataAac or (0x2 shl 7)
88200 -> extraDataAac = extraDataAac or (0x1 shl 7)
96000 -> extraDataAac = extraDataAac or (0x0 shl 7)
}
// Channels
extraDataAac = extraDataAac or (channels shl 3)
val extraData = ByteArray(2)
extraData[0] = (extraDataAac and 0xff00 shr 8).toByte() // high byte
extraData[1] = (extraDataAac and 0xff).toByte() // low byte
return extraData
}
}
}
================================================
FILE: library-client-rtsp/src/main/java/com/alexvas/rtsp/codec/FrameQueue.kt
================================================
package com.alexvas.rtsp.codec
import java.util.concurrent.ArrayBlockingQueue
import java.util.concurrent.TimeUnit
enum class VideoCodecType {
H264, H265, UNKNOWN
}
enum class AudioCodecType {
AAC_LC, G711_ALAW, G711_MLAW, UNKNOWN
}
class VideoFrameQueue(frameQueueCapacity: Int): FrameQueue(frameQueueCapacity)
class AudioFrameQueue(frameQueueCapacity: Int): FrameQueue(frameQueueCapacity)
/**
* Queue for concurrent adding/removing audio/video frames.
*/
open class FrameQueue(private val frameQueueCapacity: Int) {
interface Frame {
val data: ByteArray
val offset: Int
val length: Int
val timestampMs: Long // presentation time in msec
}
data class VideoFrame(
/** Only H264 codec supported */
val codecType: VideoCodecType,
/** Indicates whether it is a keyframe or not */
val isKeyframe: Boolean,
override val data: ByteArray,
override val offset: Int,
override val length: Int,
/** Video frame timestamp (msec) generated by camera */
override val timestampMs: Long,
/** Captured (received) video frame timestamp (msec). If -1, not supported. */
val capturedTimestampMs: Long = -1
) : Frame
data class AudioFrame(
val codecType: AudioCodecType,
// val sampleRate: Int,
override val data: ByteArray,
override val offset: Int,
override val length: Int,
override val timestampMs: Long,
) : Frame
private val queue = ArrayBlockingQueue(frameQueueCapacity)
val size: Int
get() = queue.size
val capacity: Int
get() = frameQueueCapacity
@Throws(InterruptedException::class)
fun push(frame: T): Boolean {
if (queue.offer(frame, 5, TimeUnit.MILLISECONDS)) {
return true
}
// Log.w(TAG, "Cannot add frame, queue is full")
return false
}
@Throws(InterruptedException::class)
open fun pop(timeout: Long = 1000): T? {
try {
val frame: T? = queue.poll(timeout, TimeUnit.MILLISECONDS)
// if (frame == null) {
// Log.w(TAG, "Cannot get frame within 1 sec, queue is empty")
// }
return frame
} catch (e: InterruptedException) {
Thread.currentThread().interrupt()
}
return null
}
fun clear() {
queue.clear()
}
fun copyInto(dstFrameQueue: FrameQueue) {
dstFrameQueue.queue.addAll(queue)
}
companion object {
private val TAG: String = FrameQueue::class.java.simpleName
}
}
================================================
FILE: library-client-rtsp/src/main/java/com/alexvas/rtsp/codec/VideoDecodeThread.kt
================================================
package com.alexvas.rtsp.codec
import android.annotation.SuppressLint
import android.media.MediaCodec
import android.media.MediaCodec.OnFrameRenderedListener
import android.media.MediaFormat
import android.os.Build
import android.os.Handler
import android.os.Looper
import android.os.Process
import android.util.Log
import com.alexvas.utils.MediaCodecUtils
import com.alexvas.utils.capabilitiesToString
import androidx.media3.common.util.Util
import com.alexvas.utils.VideoCodecUtils
import com.limelight.binding.video.MediaCodecHelper
import java.lang.Integer.min
import java.nio.ByteBuffer
import java.util.concurrent.TimeUnit
import java.util.concurrent.atomic.AtomicBoolean
abstract class VideoDecodeThread (
protected val mimeType: String,
protected val width: Int,
protected val height: Int,
protected val rotation: Int, // 0, 90, 180, 270
protected val videoFrameQueue: VideoFrameQueue,
protected val videoDecoderListener: VideoDecoderListener,
protected var videoDecoderType: DecoderType
) : Thread() {
enum class DecoderType {
HARDWARE,
SOFTWARE // fallback
}
interface VideoDecoderListener {
/** Video decoder successfully started */
fun onVideoDecoderStarted() {}
/** Video decoder successfully stopped */
fun onVideoDecoderStopped() {}
/** Fatal error occurred */
fun onVideoDecoderFailed(message: String?) {}
/** Resolution changed */
fun onVideoDecoderFormatChanged(width: Int, height: Int) {}
/** First video frame rendered */
fun onVideoDecoderFirstFrameRendered() {}
}
protected val uiHandler = Handler(Looper.getMainLooper())
protected var exitFlag = AtomicBoolean(false)
protected var firstFrameRendered = false
/** Decoder latency used for statistics */
@Volatile private var decoderLatency = -1
/** Flag for allowing calculating latency */
private var decoderLatencyRequested = false
/** Network latency used for statistics */
@Volatile private var networkLatency = -1
private var videoDecoderName: String? = null
private var firstFrameDecoded = false
@Volatile private var videoFrameRateStabilization = false
fun stopAsync() {
if (DEBUG) Log.v(TAG, "stopAsync()")
exitFlag.set(true)
// Wake up sleep() code
interrupt()
}
/**
* Currently used video decoder. Video decoder can be changed on runtime.
* If videoDecoderType set to HARDWARE, it can be switched to SOFTWARE in case of decoding issue
* (e.g. hardware decoder does not support the stream resolution).
* If videoDecoderType set to SOFTWARE, it will always remain SOFTWARE (no any changes).
*/
fun getCurrentVideoDecoderType(): DecoderType {
return videoDecoderType
}
fun getCurrentVideoDecoderName(): String? {
return videoDecoderName
}
/**
* Get frames decoding/rendering latency in msec. Returns -1 if not supported.
*/
fun getCurrentVideoDecoderLatencyMsec(): Int {
decoderLatencyRequested = true
return decoderLatency
}
/**
* Get network latency in msec. Returns -1 if not supported.
*/
fun getCurrentNetworkLatencyMsec(): Int {
return networkLatency
}
fun setVideoFrameRateStabilization(enable: Boolean) {
if (DEBUG) Log.v(TAG, "setVideoFrameRateStabilization(enable=$enable)")
videoFrameRateStabilization = enable
}
fun hasVideoFrameRateStabilization(): Boolean {
return videoFrameRateStabilization
}
@SuppressLint("UnsafeOptInUsageError")
private fun getDecoderSafeWidthHeight(decoder: MediaCodec): Pair {
if (DEBUG) Log.v(TAG, "getDecoderSafeWidthHeight()")
val capabilities = decoder.codecInfo.getCapabilitiesForType(mimeType).videoCapabilities
return if (capabilities == null) {
Log.e(TAG, "Not a video decoder")
Pair(-1, -1)
} else if (capabilities.isSizeSupported(width, height)) {
Log.i(TAG, "Video decoder frame size ${width}x${height} supported")
Pair(width, height)
} else {
Log.w(TAG, "Video decoder frame size ${width}x${height} is not supported")
val widthAlignment = capabilities.widthAlignment
val heightAlignment = capabilities.heightAlignment
val w = Util.ceilDivide(width, widthAlignment) * widthAlignment
val h = Util.ceilDivide(height, heightAlignment) * heightAlignment
if (capabilities.isSizeSupported(w, h)) {
Log.i(TAG, "Video decoder frame size ${w}x${h} calculated from alignment ${widthAlignment}x${heightAlignment} and original size ${width}x${height}]")
Pair(w, h)
} else {
val p = Pair(capabilities.supportedWidths.upper, capabilities.supportedHeights.upper)
Log.i(TAG, "Video decoder max supported frame size ${w}x${h}")
p
}
}
}
@SuppressLint("InlinedApi")
private fun getWidthHeight(mediaFormat: MediaFormat): Pair {
// Sometimes height obtained via KEY_HEIGHT is not valid, e.g. can be 1088 instead 1080
// (no problems with width though). Use crop parameters to correctly determine height.
val hasCrop =
mediaFormat.containsKey(MediaFormat.KEY_CROP_RIGHT) && mediaFormat.containsKey(MediaFormat.KEY_CROP_LEFT) &&
mediaFormat.containsKey(MediaFormat.KEY_CROP_BOTTOM) && mediaFormat.containsKey(MediaFormat.KEY_CROP_TOP)
val width =
if (hasCrop)
mediaFormat.getInteger(MediaFormat.KEY_CROP_RIGHT) - mediaFormat.getInteger(MediaFormat.KEY_CROP_LEFT) + 1
else
mediaFormat.getInteger(MediaFormat.KEY_WIDTH)
var height =
if (hasCrop)
mediaFormat.getInteger(MediaFormat.KEY_CROP_BOTTOM) - mediaFormat.getInteger(MediaFormat.KEY_CROP_TOP) + 1
else
mediaFormat.getInteger(MediaFormat.KEY_HEIGHT)
// Fix for 1080p resolution for Samsung S21
// {crop-right=1919, max-height=4320, sar-width=1, color-format=2130708361, mime=video/raw,
// hdr-static-info=java.nio.HeapByteBuffer[pos=0 lim=25 cap=25],
// priority=0, color-standard=1, feature-secure-playback=0, color-transfer=3, sar-height=1,
// crop-bottom=1087, max-width=8192, crop-left=0, width=1920, color-range=2, crop-top=0,
// rotation-degrees=0, frame-rate=30, height=1088}
height = height / 16 * 16 // 1088 -> 1080
// if (height == 1088)
// height = 1080
return Pair(width, height)
}
private fun getDecoderMediaFormat(decoder: MediaCodec): MediaFormat {
if (DEBUG) Log.v(TAG, "getDecoderMediaFormat()")
val safeWidthHeight = getDecoderSafeWidthHeight(decoder)
val format = MediaFormat.createVideoFormat(mimeType, safeWidthHeight.first, safeWidthHeight.second)
if (DEBUG)
Log.d(TAG, "Configuring surface ${safeWidthHeight.first}x${safeWidthHeight.second} w/ '$mimeType'")
else
Log.i(TAG, "Configuring surface ${safeWidthHeight.first}x${safeWidthHeight.second} w/ '$mimeType'")
format.setInteger(MediaFormat.KEY_ROTATION, rotation)
// if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.R) {
// // format.setFeatureEnabled(android.media.MediaCodecInfo.CodecCapabilities.FEATURE_LowLatency, true)
// // Request low-latency for the decoder. Not all of the decoders support that.
// format.setInteger(MediaFormat.KEY_LOW_LATENCY, 1)
// }
val succeeded = MediaCodecHelper.setDecoderLowLatencyOptions(format, decoder.codecInfo, 1)
Log.i(TAG, "Low-latency: $succeeded")
return format
}
/** Decoder created */
abstract fun decoderCreated(mediaCodec: MediaCodec, mediaFormat: MediaFormat)
/** Frame processed */
abstract fun releaseOutputBuffer(
mediaCodec: MediaCodec,
outIndex: Int,
bufferInfo: MediaCodec.BufferInfo,
render: Boolean
)
/** Decoder stopped and released */
abstract fun decoderDestroyed(mediaCodec: MediaCodec)
private fun createVideoDecoderAndStart(decoderType: DecoderType): MediaCodec {
if (DEBUG) Log.v(TAG, "createVideoDecoderAndStart(decoderType=$decoderType)")
@SuppressLint("UnsafeOptInUsageError")
val decoder = when (decoderType) {
DecoderType.HARDWARE -> {
val hwDecoders = MediaCodecUtils.getHardwareDecoders(mimeType)
if (hwDecoders.isEmpty()) {
Log.w(TAG, "Cannot get hardware video decoders for mime type '$mimeType'. Using default one.")
MediaCodec.createDecoderByType(mimeType)
} else {
val lowLatencyDecoder = MediaCodecUtils.getLowLatencyDecoder(hwDecoders)
val name = lowLatencyDecoder?.let {
Log.i(TAG, "[$name] Dedicated low-latency decoder found '${lowLatencyDecoder.name}'")
lowLatencyDecoder.name
} ?: hwDecoders[0].name
MediaCodec.createByCodecName(name)
}
}
DecoderType.SOFTWARE -> {
val swDecoders = MediaCodecUtils.getSoftwareDecoders(mimeType)
if (swDecoders.isEmpty()) {
Log.w(TAG, "Cannot get software video decoders for mime type '$mimeType'. Using default one .")
MediaCodec.createDecoderByType(mimeType)
} else {
val name = swDecoders[0].name
MediaCodec.createByCodecName(name)
}
}
}
this.videoDecoderType = decoderType
this.videoDecoderName = decoder.name
val frameRenderedListener = OnFrameRenderedListener { _, _, _ ->
if (!firstFrameRendered) {
firstFrameRendered = true
uiHandler.post {
videoDecoderListener.onVideoDecoderFirstFrameRendered()
}
}
}
decoder.setOnFrameRenderedListener(frameRenderedListener, null)
val format = getDecoderMediaFormat(decoder)
decoderCreated(decoder, format)
decoder.start()
val capabilities = decoder.codecInfo.getCapabilitiesForType(mimeType)
val lowLatencySupport = if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.R) {
capabilities.isFeatureSupported(android.media.MediaCodecInfo.CodecCapabilities.FEATURE_LowLatency)
} else {
false
}
Log.i(TAG, "[$name] Video decoder '${decoder.name}' started " +
"(${if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) { if (decoder.codecInfo.isHardwareAccelerated) "hardware" else "software" } else ""}, " +
"${capabilities.capabilitiesToString()}, " +
"${if (lowLatencySupport) "w/" else "w/o"} low-latency support)")
return decoder
}
private fun stopAndReleaseVideoDecoder(decoder: MediaCodec) {
if (DEBUG) Log.v(TAG, "stopAndReleaseVideoDecoder()")
val type = videoDecoderType.toString().lowercase()
Log.i(TAG, "Stopping $type video decoder...")
try {
decoder.stop()
Log.i(TAG, "Decoder successfully stopped")
} catch (e3: Throwable) {
Log.e(TAG, "Failed to stop decoder", e3)
}
Log.i(TAG, "Releasing decoder...")
try {
decoder.release()
Log.i(TAG, "Decoder successfully released")
} catch (e3: Throwable) {
Log.e(TAG, "Failed to release decoder", e3)
}
videoFrameQueue.clear()
decoderDestroyed(decoder)
}
override fun run() {
if (DEBUG) Log.d(TAG, "$name started")
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.P) {
Process.setThreadPriority(Process.THREAD_PRIORITY_VIDEO)
}
videoDecoderListener.onVideoDecoderStarted()
try {
Log.i(TAG, "Starting hardware video decoder...")
var decoder = try {
createVideoDecoderAndStart(videoDecoderType)
} catch (e: Throwable) {
Log.e(TAG, "Failed to start $videoDecoderType video decoder (${e.message})", e)
Log.i(TAG, "Starting software video decoder...")
try {
createVideoDecoderAndStart(DecoderType.SOFTWARE)
} catch (e2: Throwable) {
Log.e(TAG, "Failed to start video software decoder. Exiting...", e2)
// Unexpected behavior
videoDecoderListener.onVideoDecoderFailed("Cannot initialize video decoder for mime type '$mimeType'")
return
}
}
val bufferInfo = MediaCodec.BufferInfo()
try {
var widthHeightFromStream: Pair? = null
// Map for calculating decoder rendering latency.
// key - original frame timestamp, value - timestamp when frame was added to the map
val keyframesTimestamps = HashMap()
var frameQueuedMsec = System.currentTimeMillis()
var frameAlreadyDequeued = false
// Main loop
while (!exitFlag.get()) {
try {
val inIndex: Int = decoder.dequeueInputBuffer(DEQUEUE_INPUT_TIMEOUT_US)
if (inIndex >= 0) {
// fill inputBuffers[inputBufferIndex] with valid data
val byteBuffer: ByteBuffer? = decoder.getInputBuffer(inIndex)
byteBuffer?.rewind()
// Preventing BufferOverflowException
// if (length > byteBuffer.limit()) throw DecoderFatalException("Error")
val frame = videoFrameQueue.pop()
if (frame == null) {
Log.d(TAG, "Empty video frame")
// Release input buffer
decoder.queueInputBuffer(inIndex, 0, 0, 0L, 0)
} else {
// Add timestamp for keyframe to calculating latency further.
if ((DEBUG || decoderLatencyRequested) && frame.isKeyframe) {
if (keyframesTimestamps.size > 5) {
// Something wrong with map. Allow only 5 map entries.
keyframesTimestamps.clear()
}
val l = System.currentTimeMillis()
keyframesTimestamps[frame.timestampMs] = l
// Log.d(TAG, "Added $l")
}
// Calculate network latency
networkLatency = if (frame.capturedTimestampMs > -1)
(frame.timestampMs - frame.capturedTimestampMs).toInt()
else
-1
byteBuffer?.put(frame.data, frame.offset, frame.length)
if (DEBUG) {
val l = System.currentTimeMillis()
Log.i(TAG, "\tFrame queued (${l - frameQueuedMsec}) ${if (frame.isKeyframe) "key frame" else ""}")
frameQueuedMsec = l
}
val flags = if (frame.isKeyframe)
(MediaCodec.BUFFER_FLAG_KEY_FRAME /*or MediaCodec.BUFFER_FLAG_CODEC_CONFIG*/) else 0
decoder.queueInputBuffer(inIndex, frame.offset, frame.length, frame.timestampMs, flags)
if (frame.isKeyframe) {
// Obtain width and height from stream
widthHeightFromStream = try {
VideoCodecUtils.getWidthHeightFromArray(
frame.data,
frame.offset,
// Check only first 100 bytes maximum. That's enough for finding SPS NAL unit.
min(frame.length, VideoCodecUtils.MAX_NAL_SPS_SIZE),
isH265 = frame.codecType == VideoCodecType.H265
)
} catch (_: Exception) {
// Log.e(TAG, "Failed to parse width/height from SPS frame. SPS frame seems to be corrupted.", e)
null
}
// Log.i(TAG, "width/height: ${widthHeightFromStream?.first}x${widthHeightFromStream?.second}")
}
}
}
if (exitFlag.get()) break
// Get all output buffer frames until no buffer from decoder available (INFO_TRY_AGAIN_LATER).
// Single input buffer frame can contain several frames, e.g. SPS + PPS + IDR.
// Thus dequeueOutputBuffer should be called several times.
// First time it obtains SPS + PPS, second one - IDR frame.
do {
// For the first time wait for a frame within 100 msec, next times no timeout
val timeout = if (frameAlreadyDequeued || !firstFrameDecoded) 0L else DEQUEUE_OUTPUT_BUFFER_TIMEOUT_US
val outIndex = decoder.dequeueOutputBuffer(bufferInfo, timeout)
when (outIndex) {
// Resolution changed
MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED, MediaCodec.INFO_OUTPUT_FORMAT_CHANGED -> {
Log.d(TAG, "Decoder format changed: ${decoder.outputFormat}")
// Decoder can contain different resolution (it can make downsampling).
// If resolution successfully obtained from SPS frame, use it.
val widthHeightFromDecoder = getWidthHeight(decoder.outputFormat)
val widthHeight = widthHeightFromStream ?: widthHeightFromDecoder
Log.i(TAG, "Video decoder resolution: ${widthHeightFromDecoder.first}x${widthHeightFromDecoder.second}, stream resolution: ${widthHeightFromStream?.first}x${widthHeightFromStream?.second}")
// val widthHeightFromDecoder = getWidthHeight(decoder.outputFormat)
val rotation = if (decoder.outputFormat.containsKey(MediaFormat.KEY_ROTATION)) {
decoder.outputFormat.getInteger(MediaFormat.KEY_ROTATION)
} else {
// Some devices like Samsung SM-A505U (Android 11) do not allow
// video stream rotation on decoding for hardware decoder
Log.w(TAG, "Video stream rotation is not supported by this Android device (${Build.MODEL} - ${Build.DEVICE}, codec: '${decoder.name}')")
0
}
uiHandler.post {
// Run in UI thread
when (rotation) {
90, 270 -> videoDecoderListener.onVideoDecoderFormatChanged(widthHeight.second, widthHeight.first)
else -> videoDecoderListener.onVideoDecoderFormatChanged(widthHeight.first, widthHeight.second)
}
}
frameAlreadyDequeued = true
}
// No any frames in queue
MediaCodec.INFO_TRY_AGAIN_LATER -> {
if (DEBUG) Log.d(TAG, "No output from decoder available")
frameAlreadyDequeued = true
}
// Frame decoded
else -> {
if (outIndex >= 0) {
if (DEBUG || decoderLatencyRequested) {
val ts = bufferInfo.presentationTimeUs
keyframesTimestamps.remove(ts)?.apply {
decoderLatency = (System.currentTimeMillis() - this).toInt()
// Log.d(TAG, "Removed $this")
}
}
val render = bufferInfo.size != 0 && !exitFlag.get()
if (DEBUG) Log.i(TAG, "\tFrame decoded [outIndex=$outIndex, render=$render]")
releaseOutputBuffer(decoder, outIndex, bufferInfo, render)
if (!firstFrameDecoded && render) {
firstFrameDecoded = true
}
frameAlreadyDequeued = false
} else {
Log.e(TAG, "Obtaining frame failed w/ error code $outIndex")
}
}
}
// For SPS/PPS frame request another frame (IDR)
} while (outIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED || outIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED)
// } while (outIndex != MediaCodec.INFO_TRY_AGAIN_LATER)
// All decoded frames have been rendered, we can stop playing now
if (bufferInfo.flags and MediaCodec.BUFFER_FLAG_END_OF_STREAM != 0) {
if (DEBUG) Log.d(TAG, "OutputBuffer BUFFER_FLAG_END_OF_STREAM")
break
}
} catch (_: InterruptedException) {
} catch (e: IllegalStateException) {
// Restarting decoder in software mode
Log.e(TAG, "${e.message}", e)
stopAndReleaseVideoDecoder(decoder)
Log.i(TAG, "Starting software video decoder...")
decoder = createVideoDecoderAndStart(DecoderType.SOFTWARE)
Log.i(TAG, "Software video decoder '${decoder.name}' started (${decoder.codecInfo.getCapabilitiesForType(mimeType).capabilitiesToString()})")
} catch (e: MediaCodec.CodecException) {
Log.w(TAG, "${e.diagnosticInfo}\nisRecoverable: ${e.isRecoverable}, isTransient: ${e.isTransient}")
if (e.isRecoverable) {
// Recoverable error.
// Calling stop(), configure(), and start() to recover.
Log.i(TAG, "Recovering video decoder...")
try {
decoder.stop()
val format = getDecoderMediaFormat(decoder)
decoderCreated(decoder, format)
decoder.start()
Log.i(TAG, "Video decoder recovering succeeded")
} catch (e2: Throwable) {
Log.e(TAG, "Video decoder recovering failed")
Log.e(TAG, "${e2.message}", e2)
}
} else if (e.isTransient) {
// Transient error. Resources are temporarily unavailable and
// the method may be retried at a later time.
Log.w(TAG, "Video decoder resource temporarily unavailable")
} else {
// Fatal error. Restarting decoder in software mode.
stopAndReleaseVideoDecoder(decoder)
Log.i(TAG, "Starting video software decoder...")
decoder = createVideoDecoderAndStart(DecoderType.SOFTWARE)
Log.i(TAG, "Software video decoder '${decoder.name}' started (${decoder.codecInfo.getCapabilitiesForType(mimeType).capabilitiesToString()})")
}
} catch (e: Throwable) {
Log.e(TAG, "${e.message}", e)
}
} // while
// Drain decoder
val inIndex: Int = decoder.dequeueInputBuffer(DEQUEUE_INPUT_TIMEOUT_US)
if (inIndex >= 0) {
decoder.queueInputBuffer(inIndex, 0, 0, 0L, MediaCodec.BUFFER_FLAG_END_OF_STREAM)
} else {
Log.w(TAG, "Not able to signal end of stream")
}
} catch (e2: Throwable) {
Log.e(TAG, "${e2.message}", e2)
} finally {
stopAndReleaseVideoDecoder(decoder)
}
} catch (e: Throwable) {
Log.e(TAG, "$name stopped due to '${e.message}'")
videoDecoderListener.onVideoDecoderFailed(e.message)
// While configuring stopAsync can be called and surface released. Just exit.
if (!exitFlag.get()) e.printStackTrace()
return
}
videoDecoderListener.onVideoDecoderStopped()
if (DEBUG) Log.d(TAG, "$name stopped")
}
companion object {
internal val TAG: String = VideoDecodeThread::class.java.simpleName
internal const val DEBUG = false
private val DEQUEUE_INPUT_TIMEOUT_US = TimeUnit.MILLISECONDS.toMicros(500)
private val DEQUEUE_OUTPUT_BUFFER_TIMEOUT_US = TimeUnit.MILLISECONDS.toMicros(100)
}
}
================================================
FILE: library-client-rtsp/src/main/java/com/alexvas/rtsp/codec/VideoDecoderBitmapThread.kt
================================================
package com.alexvas.rtsp.codec
import android.graphics.Bitmap
import android.graphics.Matrix
import android.media.MediaCodec
import android.media.MediaFormat
import android.util.Log
import com.alexvas.rtsp.codec.color.ColorConverterImageAndroidX
class VideoDecoderBitmapThread(
mimeType: String,
rotation: Int, // 0, 90, 180, 270
videoFrameQueue: VideoFrameQueue,
videoDecoderListener: VideoDecoderListener,
private val videoDecoderBitmapListener: VideoDecoderBitmapListener,
videoDecoderType: DecoderType = DecoderType.HARDWARE
): VideoDecodeThread(
mimeType,
1920,
1080,
rotation,
videoFrameQueue,
videoDecoderListener,
videoDecoderType
) {
interface VideoDecoderBitmapListener {
/** Used only when OutputType.BUFFERS is used */
fun onVideoDecoderBitmapObtained(bitmap: Bitmap) {}
}
private var colorConverter: ColorConverterImageAndroidX? = null
override fun decoderCreated(mediaCodec: MediaCodec, mediaFormat: MediaFormat) {
if (DEBUG) Log.v(TAG, "decoderCreated()")
mediaCodec.configure(mediaFormat, null, null, 0)
}
override fun releaseOutputBuffer(
mediaCodec: MediaCodec,
outIndex: Int,
bufferInfo: MediaCodec.BufferInfo,
render: Boolean
) {
mediaCodec.getOutputImage(outIndex)?.use { image ->
if (colorConverter == null)
colorConverter = ColorConverterImageAndroidX()
// Converting YUV 4:2:0 888 to Bitmap ARGB 8888
var bitmap = colorConverter!!.getBitmapFromImage(image)
// Rotation does not work in VideoDecoderThread since we do not use Surface there.
// Rotate bitmaps.
bitmap = if (rotation != 0) {
bitmap.rotateBitmap(rotation.toFloat())
} else {
bitmap.createCopy565()
}
uiHandler.post {
if (!firstFrameRendered) {
firstFrameRendered = true
videoDecoderListener.onVideoDecoderFirstFrameRendered()
}
videoDecoderBitmapListener.onVideoDecoderBitmapObtained(bitmap)
}
}
mediaCodec.releaseOutputBuffer(outIndex, false)
}
override fun decoderDestroyed(mediaCodec: MediaCodec) {
if (DEBUG) Log.v(TAG, "decoderDestroyed()")
colorConverter?.apply {
try {
Log.i(TAG, "Releasing color converter...")
release()
Log.i(TAG, "Color converter successfully released")
} catch (e: Throwable) {
Log.e(TAG, "Failed to release color converter", e)
}
}
}
}
fun Bitmap.createCopy565(): Bitmap {
return copy(
Bitmap.Config.RGB_565,
true
)
}
fun Bitmap.rotateBitmap(angle: Float): Bitmap {
val matrix = Matrix()
matrix.postRotate(angle)
return Bitmap.createBitmap(this, 0, 0, this.width, this.height, matrix, true)
}
================================================
FILE: library-client-rtsp/src/main/java/com/alexvas/rtsp/codec/VideoDecoderSurfaceThread.kt
================================================
package com.alexvas.rtsp.codec
import android.media.MediaCodec
import android.media.MediaFormat
import android.util.Log
import android.view.Surface
import java.util.concurrent.TimeUnit
import kotlin.math.max
class VideoDecoderSurfaceThread(
private val surface: Surface,
mimeType: String,
width: Int,
height: Int,
rotation: Int, // 0, 90, 180, 270
videoFrameQueue: VideoFrameQueue,
videoDecoderListener: VideoDecoderListener,
videoDecoderType: DecoderType = DecoderType.HARDWARE,
videoFrameRateStabilization: Boolean = false,
) : VideoDecodeThread(
mimeType, width, height, rotation, videoFrameQueue, videoDecoderListener, videoDecoderType
) {
/**
* Presentation time (in RTP units converted to microseconds) of the first frame used as the
* PTS baseline.
*/
private var streamStartPtsUs: Long? = null
/**
* Monotonic clock timestamp corresponding to streamStartPtsUs, used to map future frames
* to real time.
*/
private var playbackStartRealtimeNs: Long? = null
/**
* Timestamp of the most recently released frame to enforce minimum spacing between consecutive
* frames.
*/
private var lastFrameReleaseTimeNs: Long = Long.MIN_VALUE
/**
* Last presentation timestamp we processed; used to detect wrap-around or backwards jumps.
*/
private var lastPresentationTimeUs: Long = Long.MIN_VALUE
init {
setVideoFrameRateStabilization(videoFrameRateStabilization)
}
override fun decoderCreated(mediaCodec: MediaCodec, mediaFormat: MediaFormat) {
if (DEBUG) Log.v(TAG, "decoderCreated()")
if (!surface.isValid) {
Log.e(TAG, "Surface invalid")
}
mediaCodec.configure(mediaFormat, surface, null, 0)
resetFrameTiming()
}
private fun releaseOutputBufferWithFrameRateStabilization(
mediaCodec: MediaCodec,
outIndex: Int,
bufferInfo: MediaCodec.BufferInfo
) {
if (DEBUG) Log.v(TAG, "releaseOutputBufferWithFrameRateStabilization(outIndex=$outIndex)")
val ptsUs = bufferInfo.presentationTimeUs
val nowNs = System.nanoTime()
if (streamStartPtsUs == null || playbackStartRealtimeNs == null) {
// First frame (or after a reset): initialize all timing anchors.
streamStartPtsUs = ptsUs
playbackStartRealtimeNs = nowNs
lastFrameReleaseTimeNs = nowNs
lastPresentationTimeUs = ptsUs
mediaCodec.releaseOutputBuffer(outIndex, nowNs)
return
}
var targetNs = playbackStartRealtimeNs!! + (ptsUs - streamStartPtsUs!!) * 1000L
var adjustedNowNs = System.nanoTime()
if (lastPresentationTimeUs != Long.MIN_VALUE && ptsUs < lastPresentationTimeUs) {
// PTS went backwards (e.g. codec reordering). Re-base the clock to avoid negative deltas.
streamStartPtsUs = ptsUs
playbackStartRealtimeNs = adjustedNowNs
targetNs = adjustedNowNs
}
if (lastFrameReleaseTimeNs != Long.MIN_VALUE) {
// Ensure we never schedule two frames closer together than the min spacing.
targetNs = max(targetNs, lastFrameReleaseTimeNs + MIN_FRAME_SPACING_NS)
}
adjustedNowNs = System.nanoTime()
val latenessNs = adjustedNowNs - targetNs
if (latenessNs >= FRAME_DROP_THRESHOLD_NS) {
// Frame is critically late; drop to keep playback responsive.
mediaCodec.releaseOutputBuffer(outIndex, false)
lastFrameReleaseTimeNs = adjustedNowNs
return
}
var correctedTargetNs = targetNs
if (latenessNs > 0) {
// For mild lateness, shift the playback baseline forward so future frames stay aligned.
val correction = minOf(latenessNs, FRAME_DROP_THRESHOLD_NS)
playbackStartRealtimeNs = playbackStartRealtimeNs?.plus(correction)
correctedTargetNs += correction
}
if (correctedTargetNs <= adjustedNowNs + RENDER_EARLY_MARGIN_NS) {
// Already at/behind the target time: render immediately using the current VSYNC.
mediaCodec.releaseOutputBuffer(outIndex, true)
lastFrameReleaseTimeNs = adjustedNowNs
} else {
// Still early enough: hand the desired release timestamp to MediaCodec for VSYNC alignment.
mediaCodec.releaseOutputBuffer(outIndex, correctedTargetNs)
lastFrameReleaseTimeNs = correctedTargetNs
}
lastPresentationTimeUs = ptsUs
}
override fun releaseOutputBuffer(
mediaCodec: MediaCodec,
outIndex: Int,
bufferInfo: MediaCodec.BufferInfo,
render: Boolean
) {
if (DEBUG) Log.v(TAG, "releaseOutputBuffer(outIndex=$outIndex, render=$render)")
if (!render || !surface.isValid) {
mediaCodec.releaseOutputBuffer(outIndex, false)
return
}
if (!hasVideoFrameRateStabilization()) {
mediaCodec.releaseOutputBuffer(outIndex, true)
} else {
releaseOutputBufferWithFrameRateStabilization(mediaCodec, outIndex, bufferInfo)
}
}
override fun decoderDestroyed(mediaCodec: MediaCodec) {
if (DEBUG) Log.v(TAG, "decoderDestroyed()")
resetFrameTiming()
}
private fun resetFrameTiming() {
if (DEBUG) Log.v(TAG, "resetFrameTiming()")
streamStartPtsUs = null
playbackStartRealtimeNs = null
lastFrameReleaseTimeNs = Long.MIN_VALUE
lastPresentationTimeUs = Long.MIN_VALUE
}
companion object {
private val FRAME_DROP_THRESHOLD_NS = TimeUnit.MILLISECONDS.toNanos(80)
private val MIN_FRAME_SPACING_NS = TimeUnit.MILLISECONDS.toNanos(1)
private val RENDER_EARLY_MARGIN_NS = TimeUnit.MILLISECONDS.toNanos(2)
}
}
================================================
FILE: library-client-rtsp/src/main/java/com/alexvas/rtsp/codec/color/ColorConverter.kt
================================================
package com.alexvas.rtsp.codec.color
import android.annotation.SuppressLint
import android.graphics.Bitmap
import android.graphics.Matrix
import android.graphics.Rect
import android.media.Image
import androidx.camera.core.FlashState
import androidx.camera.core.ImageInfo
import androidx.camera.core.ImageProcessingUtil
import androidx.camera.core.ImageProxy
import androidx.camera.core.ImmutableImageInfo
import androidx.camera.core.impl.TagBundle
import java.nio.ByteBuffer
/**
* Convert Image YUV 4:2:0 888 to Bitmap ARGB 8888.
*/
class ColorConverterImageAndroidX: ColorConverterImage() {
@SuppressLint("RestrictedApi")
override fun getBitmapFromImage(image: Image): Bitmap {
// YUV 4:2:0 888 -> ARGB 8888
return ImageProcessingUtil.convertYUVToBitmap(AndroidImageProxy(image))
}
override fun release() {
}
}
internal class AndroidImageProxy(private val image: Image) : ImageProxy {
private val planes: Array = if (image.planes != null) {
Array(image.planes.size) { i -> AndroidPlaneProxy(image.planes[i]) }
} else {
emptyArray()
}
@SuppressLint("RestrictedApi")
private val imageInfo: ImageInfo = ImmutableImageInfo.create(
TagBundle.emptyBundle(),
image.timestamp,
0,
Matrix(),
FlashState.UNAVAILABLE
)
override fun close() {
image.close()
}
override fun getCropRect(): Rect {
return image.cropRect
}
override fun setCropRect(rect: Rect?) {
image.cropRect = rect
}
override fun getFormat(): Int {
return image.format
}
override fun getHeight(): Int {
return image.height
}
override fun getWidth(): Int {
return image.width
}
override fun getPlanes(): Array {
@Suppress("UNCHECKED_CAST")
return planes as Array
}
/** An [ImageProxy.PlaneProxy] which wraps around an [Image.Plane]. */
private class AndroidPlaneProxy(private val mPlane: Image.Plane) : ImageProxy.PlaneProxy {
override fun getRowStride(): Int {
return mPlane.rowStride
}
override fun getPixelStride(): Int {
return mPlane.pixelStride
}
override fun getBuffer(): ByteBuffer {
return mPlane.buffer
}
}
override fun getImageInfo(): ImageInfo {
return imageInfo
}
@SuppressLint("UnsafeOptInUsageError")
override fun getImage(): Image {
return image
}
}
================================================
FILE: library-client-rtsp/src/main/java/com/alexvas/rtsp/codec/color/ColorConverterImage.kt
================================================
package com.alexvas.rtsp.codec.color
import android.graphics.Bitmap
import android.media.Image
abstract class ColorConverter {
abstract fun release()
}
abstract class ColorConverterImage: ColorConverter() {
abstract fun getBitmapFromImage(image: Image): Bitmap
}
================================================
FILE: library-client-rtsp/src/main/java/com/alexvas/rtsp/parser/AacParser.java
================================================
package com.alexvas.rtsp.parser;
import android.annotation.SuppressLint;
import android.util.Log;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.media3.common.util.ParsableBitArray;
import androidx.media3.common.util.ParsableByteArray;
// https://tools.ietf.org/html/rfc3640
// +---------+-----------+-----------+---------------+
// | RTP | AU Header | Auxiliary | Access Unit |
// | Header | Section | Section | Data Section |
// +---------+-----------+-----------+---------------+
//
// <----------RTP Packet Payload----------->
@SuppressLint("UnsafeOptInUsageError")
public class AacParser extends AudioParser {
private static final String TAG = AacParser.class.getSimpleName();
private static final boolean DEBUG = false;
private final ParsableBitArray headerScratchBits;
private final ParsableByteArray headerScratchBytes;
private static final int MODE_LBR = 0;
private static final int MODE_HBR = 1;
// Number of bits for AAC AU sizes, indexed by mode (LBR and HBR)
private static final int[] NUM_BITS_AU_SIZES = {6, 13};
// Number of bits for AAC AU index(-delta), indexed by mode (LBR and HBR)
private static final int[] NUM_BITS_AU_INDEX = {2, 3};
// Frame Sizes for AAC AU fragments, indexed by mode (LBR and HBR)
private static final int[] FRAME_SIZES = {63, 8191};
private final int _aacMode;
private boolean completeFrameIndicator = true;
public AacParser(@NonNull String aacMode) {
_aacMode = aacMode.equalsIgnoreCase("AAC-lbr") ? MODE_LBR : MODE_HBR;
headerScratchBits = new ParsableBitArray();
headerScratchBytes = new ParsableByteArray();
}
@Override
@Nullable
public byte[] processRtpPacketAndGetSample(@NonNull byte[] data, int length) {
if (DEBUG)
Log.v(TAG, "processRtpPacketAndGetSample(length=" + length + ")");
int auHeadersCount = 1;
int numBitsAuSize = NUM_BITS_AU_SIZES[_aacMode];
int numBitsAuIndex = NUM_BITS_AU_INDEX[_aacMode];
ParsableByteArray packet = new ParsableByteArray(data, length);
// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+- .. -+-+-+-+-+-+-+-+-+-+
// |AU-headers-length|AU-header|AU-header| |AU-header|padding|
// | | (1) | (2) | | (n) | bits |
// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+- .. -+-+-+-+-+-+-+-+-+-+
int auHeadersLength = packet.readShort();//((data[0] & 0xFF) << 8) | (data[1] & 0xFF);
int auHeadersLengthBytes = (auHeadersLength + 7) / 8;
headerScratchBytes.reset(auHeadersLengthBytes);
packet.readBytes(headerScratchBytes.getData(), 0, auHeadersLengthBytes);
headerScratchBits.reset(headerScratchBytes.getData());
int bitsAvailable = auHeadersLength - (numBitsAuSize + numBitsAuIndex);
if (bitsAvailable > 0) {// && (numBitsAuSize + numBitsAuSize) > 0) {
auHeadersCount += bitsAvailable / (numBitsAuSize + numBitsAuIndex);
}
if (auHeadersCount == 1) {
int auSize = headerScratchBits.readBits(numBitsAuSize);
int auIndex = headerScratchBits.readBits(numBitsAuIndex);
if (completeFrameIndicator) {
if (auIndex == 0) {
if (packet.bytesLeft() == auSize) {
return handleSingleAacFrame(packet);
} else {
// handleFragmentationAacFrame(packet, auSize);
}
}
} else {
// handleFragmentationAacFrame(packet, auSize);
}
} else {
if (completeFrameIndicator) {
// handleMultipleAacFrames(packet, auHeadersLength);
}
}
// byte[] auHeader = new byte[length-2-auHeadersLengthBytes];
// System.arraycopy(data,2-auHeadersLengthBytes, auHeader,0, auHeader.length);
// if (DEBUG)
// Log.d(TAG, "AU headers size: " + auHeadersLengthBytes + ", AU headers: " + auHeadersCount + ", sample length: " + auHeader.length);
// return auHeader;
return new byte[0];
}
private byte[] handleSingleAacFrame(ParsableByteArray packet) {
int length = packet.bytesLeft();
byte[] data = new byte[length];
System.arraycopy(packet.getData(), packet.getPosition(), data,0, data.length);
return data;
}
// private static final class AUHeader {
// private int size;
// private int index;
//
// public AUHeader(int size, int index) {
// this.size = size;
// this.index = index;
// }
//
// public int size() { return size; }
//
// public int index() { return index; }
// }
// /**
// * Stores the consecutive fragment AU to reconstruct an AAC-Frame
// */
// private static final class FragmentedAacFrame {
// public byte[] auData;
// public int auLength;
// public int auSize;
//
// private int sequence;
//
// public FragmentedAacFrame(int frameSize) {
// // Initialize data
// auData = new byte[frameSize];
// sequence = -1;
// }
//
// /**
// * Resets the buffer, clearing any data that it holds.
// */
// public void reset() {
// auLength = 0;
// auSize = 0;
// sequence = -1;
// }
//
// public void sequence(int sequence) {
// this.sequence = sequence;
// }
//
// public int sequence() {
// return sequence;
// }
//
// /**
// * Called to add a fragment unit to fragmented AU.
// *
// * @param fragment Holds the data of fragment unit being passed.
// * @param offset The offset of the data in {@code fragment}.
// * @param limit The limit (exclusive) of the data in {@code fragment}.
// */
// public void appendFragment(byte[] fragment, int offset, int limit) {
// if (auSize == 0) {
// auSize = limit;
// } else if (auSize != limit) {
// reset();
// }
//
// if (auData.length < auLength + limit) {
// auData = Arrays.copyOf(auData, (auLength + limit) * 2);
// }
//
// System.arraycopy(fragment, offset, auData, auLength, limit);
// auLength += limit;
// }
//
// public boolean isCompleted() {
// return auSize == auLength;
// }
// }
}
================================================
FILE: library-client-rtsp/src/main/java/com/alexvas/rtsp/parser/AudioParser.kt
================================================
package com.alexvas.rtsp.parser
abstract class AudioParser {
abstract fun processRtpPacketAndGetSample(
data: ByteArray,
length: Int
): ByteArray?
}
================================================
FILE: library-client-rtsp/src/main/java/com/alexvas/rtsp/parser/G711Parser.kt
================================================
package com.alexvas.rtsp.parser
class G711Parser() : AudioParser() {
override fun processRtpPacketAndGetSample(
data: ByteArray,
length: Int
): ByteArray? {
val g711Payload = data.copyOfRange(0, length)
return g711Payload
}
}
================================================
FILE: library-client-rtsp/src/main/java/com/alexvas/rtsp/parser/RtpH264Parser.kt
================================================
package com.alexvas.rtsp.parser
import android.util.Log
import com.alexvas.utils.VideoCodecUtils
import com.alexvas.utils.VideoCodecUtils.getH264NalUnitTypeString
class RtpH264Parser: RtpParser() {
override fun processRtpPacketAndGetNalUnit(data: ByteArray, length: Int, marker: Boolean): ByteArray? {
if (DEBUG) Log.v(TAG, "processRtpPacketAndGetNalUnit(data.size=${data.size}, length=$length, marker=$marker)")
val nalType = (data[0].toInt() and 0x1F).toByte()
val packFlag = data[1].toInt() and 0xC0
var nalUnit: ByteArray? = null
if (DEBUG)
Log.d(TAG, "\t\tNAL type: ${getH264NalUnitTypeString(nalType)}, pack flag: 0x${Integer.toHexString(packFlag).lowercase()}")
when (nalType) {
VideoCodecUtils.NAL_STAP_A, VideoCodecUtils.NAL_STAP_B -> {
// Not supported
}
VideoCodecUtils.NAL_MTAP16, VideoCodecUtils.NAL_MTAP24 -> {
// Not supported
}
VideoCodecUtils.NAL_FU_A -> {
when (packFlag) {
0x80 -> {
addStartFragmentedPacket(data, length)
}
0x00 -> {
if (marker) {
// Sometimes 0x40 end packet is not arrived. Use marker bit in this case
// to finish fragmented packet.
nalUnit = addEndFragmentedPacketAndCombine(data, length)
} else {
addMiddleFragmentedPacket(data, length)
}
}
0x40 -> {
nalUnit = addEndFragmentedPacketAndCombine(data, length)
}
}
}
VideoCodecUtils.NAL_FU_B -> {
// Not supported
}
else -> {
nalUnit = processSingleFramePacket(data, length)
clearFragmentedBuffer()
if (DEBUG) Log.d(TAG, "Single NAL (${nalUnit.size})")
}
}
return nalUnit
}
private fun addStartFragmentedPacket(data: ByteArray, length: Int) {
if (DEBUG) Log.v(TAG, "addStartFragmentedPacket(data.size=${data.size}, length=$length)")
fragmentedPackets = 0
fragmentedBufferLength = length - 1
fragmentedBuffer[0] = ByteArray(fragmentedBufferLength).apply {
this[0] = ((data[0].toInt() and 0xE0) or (data[1].toInt() and 0x1F)).toByte()
}
System.arraycopy(data, 2, fragmentedBuffer[0]!!, 1, length - 2)
}
private fun addMiddleFragmentedPacket(data: ByteArray, length: Int) {
if (DEBUG) Log.v(TAG, "addMiddleFragmentedPacket(data.size=${data.size}, length=$length)")
fragmentedPackets++
if (fragmentedPackets >= fragmentedBuffer.size) {
Log.e(TAG, "Too many middle packets. No NAL FU_A end packet received. Skipped RTP packet.")
fragmentedBuffer[0] = null
} else {
fragmentedBufferLength += length - 2
fragmentedBuffer[fragmentedPackets] = ByteArray(length - 2)
System.arraycopy(data, 2, fragmentedBuffer[fragmentedPackets]!!, 0, length - 2)
}
}
private fun addEndFragmentedPacketAndCombine(data: ByteArray, length: Int): ByteArray? {
if (DEBUG) Log.v(TAG, "addEndFragmentedPacketAndCombine(data.size=${data.size}, length=$length)")
var nalUnit: ByteArray? = null
var tmpLen: Int
if (fragmentedBuffer[0] == null) {
Log.e(TAG, "No NAL FU_A start packet received. Skipped RTP packet.")
} else {
nalUnit = ByteArray(fragmentedBufferLength + length + 2)
writeNalPrefix0001(nalUnit)
tmpLen = 4
// Write start and middle packets
for (i in 0 until fragmentedPackets + 1) {
fragmentedBuffer[i]!!.apply {
System.arraycopy(
this,
0,
nalUnit,
tmpLen,
this.size
)
tmpLen += this.size
}
}
// Write end packet
System.arraycopy(data, 2, nalUnit, tmpLen, length - 2)
clearFragmentedBuffer()
if (DEBUG) Log.d(TAG, "Fragmented NAL (${nalUnit.size})")
}
return nalUnit
}
private fun clearFragmentedBuffer() {
if (DEBUG) Log.v(TAG, "clearFragmentedBuffer()")
for (i in 0 until fragmentedPackets + 1) {
fragmentedBuffer[i] = null
}
}
companion object {
private val TAG: String = RtpH264Parser::class.java.simpleName
private const val DEBUG = false
}
}
================================================
FILE: library-client-rtsp/src/main/java/com/alexvas/rtsp/parser/RtpH265Parser.kt
================================================
package com.alexvas.rtsp.parser
import android.util.Log
class RtpH265Parser: RtpParser() {
override fun processRtpPacketAndGetNalUnit(data: ByteArray, length: Int, marker: Boolean): ByteArray? {
if (DEBUG) Log.v(TAG, "processRtpPacketAndGetNalUnit(length=$length, marker=$marker)")
// NAL Unit Header.type (RFC7798 Section 1.1.4).
val nalType = ((data[0].toInt() shr 1) and 0x3F).toByte()
var nalUnit: ByteArray? = null
// Log.d(TAG, "\t\tNAL type: ${VideoCodecUtils.getH265NalUnitTypeString(nalType)}")
if (nalType in 0.. 0
val isLastFuPacket = (fuHeader and 0x40) > 0
if (isFirstFuPacket) {
addStartFragmentedPacket(data, length)
} else if (isLastFuPacket || marker) {
return addEndFragmentedPacketAndCombine(data, length)
} else {
addMiddleFragmentedPacket(data, length)
}
return null
}
private fun addStartFragmentedPacket(data: ByteArray, length: Int) {
if (DEBUG) Log.v(TAG, "addStartFragmentedPacket(data.size=${data.size}, length=$length)")
fragmentedPackets = 0
fragmentedBufferLength = length - 1
fragmentedBuffer[0] = ByteArray(fragmentedBufferLength).apply {
val tid = (data[1].toInt() and 0x7)
val fuHeader = data[2].toInt()
val nalUnitType = fuHeader and 0x3F
// Convert RTP header into HEVC NAL Unit header accoding to RFC7798 Section 1.1.4.
// RTP byte 0: ignored.
// RTP byte 1: repurposed as HEVC HALU byte 0, copy NALU type.
// RTP Byte 2: repurposed as HEVC HALU byte 1, layerId required to be zero, copying only tid.
// Set data position from byte 1 as byte 0 is ignored.
this[0] = (((nalUnitType shl 1) and 0x7F).toByte())
this[1] = tid.toByte()
}
System.arraycopy(data, 3, fragmentedBuffer[0]!!, 2, length - 3)
}
private fun addMiddleFragmentedPacket(data: ByteArray, length: Int) {
if (DEBUG) Log.v(TAG, "addMiddleFragmentedPacket(data.size=${data.size}, length=$length)")
fragmentedPackets++
if (fragmentedPackets >= fragmentedBuffer.size) {
Log.e(TAG, "Too many middle packets. No RTP_PACKET_TYPE_FU end packet received. Skipped RTP packet.")
fragmentedBuffer[0] = null
} else {
fragmentedBufferLength += length - 3
fragmentedBuffer[fragmentedPackets] = ByteArray(length - 3).apply {
System.arraycopy(data, 3, this, 0, length - 3)
}
}
}
private fun addEndFragmentedPacketAndCombine(data: ByteArray, length: Int): ByteArray? {
if (DEBUG) Log.v(TAG, "addEndFragmentedPacketAndCombine(data.size=${data.size}, length=$length)")
var nalUnit: ByteArray? = null
if (fragmentedBuffer[0] == null) {
Log.e(TAG, "No NAL FU_A start packet received. Skipped RTP packet.")
} else {
nalUnit = ByteArray(fragmentedBufferLength + length + 3)
writeNalPrefix0001(nalUnit)
var tmpLen = 4
// Write start and middle packets
for (i in 0 until fragmentedPackets + 1) {
fragmentedBuffer[i]!!.apply {
System.arraycopy(
this,
0,
nalUnit,
tmpLen,
this.size
)
tmpLen += this.size
}
}
// Write end packet
System.arraycopy(data, 3, nalUnit, tmpLen, length - 3)
clearFragmentedBuffer()
if (DEBUG) Log.d(TAG, "Fragmented NAL (${nalUnit.size})")
}
return nalUnit
}
private fun clearFragmentedBuffer() {
if (DEBUG) Log.v(TAG, "clearFragmentedBuffer()")
for (i in 0 until fragmentedPackets + 1) {
fragmentedBuffer[i] = null
}
}
companion object {
private val TAG: String = RtpH265Parser::class.java.simpleName
private const val DEBUG = false
/** Aggregation Packet. RFC7798 Section 4.4.2. */
private const val RTP_PACKET_TYPE_AP: Byte = 48
/** Fragmentation Unit. RFC7798 Section 4.4.3. */
private const val RTP_PACKET_TYPE_FU: Byte = 49
}
}
================================================
FILE: library-client-rtsp/src/main/java/com/alexvas/rtsp/parser/RtpHeaderParser.java
================================================
package com.alexvas.rtsp.parser;
import android.util.Log;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import com.alexvas.utils.NetUtils;
import java.io.IOException;
import java.io.InputStream;
public class RtpHeaderParser {
private static final String TAG = RtpHeaderParser.class.getSimpleName();
private static final boolean DEBUG = false;
private final static int RTP_HEADER_SIZE = 12;
public static class RtpHeader {
public int version;
public int padding;
public int extension;
public int cc;
public int marker;
public int payloadType;
public int sequenceNumber;
public long timeStamp;
public long ssrc;
public int payloadSize;
public long getTimestampMsec() {
return (long)(timeStamp * 11.111111);
}
// If RTP header found, return 4 bytes of the header
private static boolean searchForNextRtpHeader(@NonNull InputStream inputStream, @NonNull byte[] header /*out*/) throws IOException {
if (header.length < 4)
throw new IOException("Invalid allocated buffer size");
int bytesRemaining = 100000; // 100 KB max to check
boolean foundFirstByte = false;
boolean foundSecondByte = false;
byte[] oneByte = new byte[1];
// Search for {0x24, 0x00}
do {
if (bytesRemaining-- < 0)
return false;
// Read 1 byte
NetUtils.readData(inputStream, oneByte, 0, 1);
if (foundFirstByte) {
// Found 0x24. Checking for 0x00-0x02.
if (oneByte[0] == 0x00)
foundSecondByte = true;
else
foundFirstByte = false;
}
if (!foundFirstByte && oneByte[0] == 0x24) {
// Found 0x24
foundFirstByte = true;
}
} while (!foundSecondByte);
header[0] = 0x24;
header[1] = oneByte[0];
// Read 2 bytes more (packet size)
NetUtils.readData(inputStream, header, 2, 2);
return true;
}
@Nullable
private static RtpHeader parseData(@NonNull byte[] header, int packetSize) {
RtpHeader rtpHeader = new RtpHeader();
rtpHeader.version = (header[0] & 0xFF) >> 6;
if (rtpHeader.version != 2) {
if (DEBUG)
Log.e(TAG,"Not a RTP packet (" + rtpHeader.version + ")");
return null;
}
// 80 60 40 91 fd ab d4 2a
// 80 c8 00 06
rtpHeader.padding = (header[0] & 0x20) >> 5; // 0b00100100
rtpHeader.extension = (header[0] & 0x10) >> 4;
rtpHeader.marker = (header[1] & 0x80) >> 7;
rtpHeader.payloadType = header[1] & 0x7F;
rtpHeader.sequenceNumber = (header[3] & 0xFF) + ((header[2] & 0xFF) << 8);
rtpHeader.timeStamp = (header[7] & 0xFF) + ((header[6] & 0xFF) << 8) + ((header[5] & 0xFF) << 16) + ((header[4] & 0xFF) << 24) & 0xffffffffL;
rtpHeader.ssrc = (header[7] & 0xFF) + ((header[6] & 0xFF) << 8) + ((header[5] & 0xFF) << 16) + ((header[4] & 0xFF) << 24) & 0xffffffffL;
rtpHeader.payloadSize = packetSize - RTP_HEADER_SIZE;
return rtpHeader;
}
private static int getPacketSize(@NonNull byte[] header) {
int packetSize = ((header[2] & 0xFF) << 8) | (header[3] & 0xFF);
if (DEBUG)
Log.d(TAG, "Packet size: " + packetSize);
return packetSize;
}
public void dumpHeader() {
Log.d("RTP","\t\tRTP header version: " + version
+ ", padding: " + padding
+ ", ext: " + extension
+ ", cc: " + cc
+ ", marker: " + marker
+ ", payload type: " + payloadType
+ ", seq num: " + sequenceNumber
+ ", ts: " + timeStamp
+ ", ssrc: " + ssrc
+ ", payload size: " + payloadSize);
}
}
@Nullable
public static RtpHeader readHeader(@NonNull InputStream inputStream) throws IOException {
// 24 01 00 1c 80 c8 00 06 7f 1d d2 c4
// 24 01 00 1c 80 c8 00 06 13 9b cf 60
// 24 02 01 12 80 e1 01 d2 00 07 43 f0
byte[] header = new byte[RTP_HEADER_SIZE];
// Skip 4 bytes (TCP only). No those bytes in UDP.
NetUtils.readData(inputStream, header, 0, 4);
if (DEBUG && header[0] == 0x24)
Log.d(TAG, header[1] == 0 ? "RTP packet" : "RTCP packet");
int packetSize = RtpHeader.getPacketSize(header);
if (DEBUG)
Log.d(TAG, "Packet size: " + packetSize);
if (NetUtils.readData(inputStream, header, 0, header.length) == header.length) {
RtpHeader rtpHeader = RtpHeader.parseData(header, packetSize);
if (rtpHeader == null) {
// Header not found. Possible keep-alive response. Search for another RTP header.
boolean foundHeader = RtpHeader.searchForNextRtpHeader(inputStream, header);
if (foundHeader) {
packetSize = RtpHeader.getPacketSize(header);
if (NetUtils.readData(inputStream, header, 0, header.length) == header.length)
return RtpHeader.parseData(header, packetSize);
}
} else {
return rtpHeader;
}
}
return null;
}
}
================================================
FILE: library-client-rtsp/src/main/java/com/alexvas/rtsp/parser/RtpParser.kt
================================================
package com.alexvas.rtsp.parser
abstract class RtpParser {
abstract fun processRtpPacketAndGetNalUnit(data: ByteArray, length: Int, marker: Boolean): ByteArray?
// TODO Use already allocated buffer with RtpPacket.MAX_SIZE = 65507
// Used only for fragmented packets
protected val fragmentedBuffer = arrayOfNulls(1024)
protected var fragmentedBufferLength = 0
protected var fragmentedPackets = 0
protected fun writeNalPrefix0001(buffer: ByteArray) {
buffer[0] = 0x00
buffer[1] = 0x00
buffer[2] = 0x00
buffer[3] = 0x01
}
protected fun processSingleFramePacket(data: ByteArray, length: Int): ByteArray {
return ByteArray(4 + length).apply {
writeNalPrefix0001(this)
System.arraycopy(data, 0, this, 4, length)
}
}
}
================================================
FILE: library-client-rtsp/src/main/java/com/alexvas/rtsp/widget/RtspImageView.kt
================================================
package com.alexvas.rtsp.widget
import android.content.Context
import android.graphics.Bitmap
import android.net.Uri
import android.util.AttributeSet
import android.util.Log
import android.widget.ImageView
import com.alexvas.rtsp.codec.VideoDecodeThread
import com.alexvas.rtsp.codec.VideoDecoderBitmapThread
import com.alexvas.rtsp.widget.RtspProcessor.Statistics
import com.limelight.binding.video.MediaCodecHelper
/**
* Low latency RTSP stream playback on image view (bitmap).
*/
class RtspImageView : ImageView {
/** Optional listener to be called when bitmap obtained from video decoder. */
var onRtspImageBitmapListener: RtspImageBitmapListener? = null
interface RtspImageBitmapListener {
fun onRtspImageBitmapObtained(bitmap: Bitmap) {}
}
private var rtspProcessor = RtspProcessor(onVideoDecoderCreateRequested = {
videoMimeType, videoRotation, videoFrameQueue, videoDecoderListener, videoDecoderType, _ ->
VideoDecoderBitmapThread(
videoMimeType,
videoRotation,
videoFrameQueue,
videoDecoderListener,
videoDecoderBitmapListener,
videoDecoderType,
)
})
private val videoDecoderBitmapListener = object : VideoDecoderBitmapThread.VideoDecoderBitmapListener {
override fun onVideoDecoderBitmapObtained(bitmap: Bitmap) {
onRtspImageBitmapListener?.onRtspImageBitmapObtained(bitmap)
setImageBitmap(bitmap)
invalidate()
}
}
var statistics = Statistics()
get() = rtspProcessor.statistics
private set
var videoRotation: Int
get() = rtspProcessor.videoRotation
set(value) { rtspProcessor.videoRotation = value }
var videoDecoderType: VideoDecodeThread.DecoderType
get() = rtspProcessor.videoDecoderType
set(value) { rtspProcessor.videoDecoderType = value }
var debug: Boolean
get() = rtspProcessor.debug
set(value) { rtspProcessor.debug = value }
constructor(context: Context) : super(context) {
initView(context, null, 0)
}
constructor(context: Context, attrs: AttributeSet?) : super(context, attrs) {
initView(context, attrs, 0)
}
constructor(context: Context, attrs: AttributeSet?, defStyleAttr: Int) : super(context, attrs, defStyleAttr) {
initView(context, attrs, defStyleAttr)
}
private fun initView(context: Context, attrs: AttributeSet?, defStyleAttr: Int) {
if (DEBUG) Log.v(TAG, "initView()")
MediaCodecHelper.initialize(context, /*glRenderer*/ "")
}
fun init(
uri: Uri,
username: String? = null,
password: String? = null,
userAgent: String? = null,
socketTimeout: Int? = null
) {
if (DEBUG) Log.v(TAG, "init(uri='$uri', username='$username', password='$password', userAgent='$userAgent')")
rtspProcessor.init(
uri,
username,
password,
userAgent,
socketTimeout ?: RtspProcessor.DEFAULT_SOCKET_TIMEOUT
)
}
/**
* Start RTSP client.
*
* @param requestVideo request video track
* @param requestAudio request audio track
* @param requestApplication request application track
* @see https://datatracker.ietf.org/doc/html/rfc4566#section-5.14
*/
fun start(requestVideo: Boolean, requestAudio: Boolean, requestApplication: Boolean) {
if (DEBUG) Log.v(TAG, "start(requestVideo=$requestVideo, requestAudio=$requestAudio, requestApplication=$requestApplication)")
rtspProcessor.start(requestVideo, requestAudio, requestApplication)
}
/**
* Stop RTSP client.
*/
fun stop() {
if (DEBUG) Log.v(TAG, "stop()")
rtspProcessor.stop()
}
fun isStarted(): Boolean {
return rtspProcessor.isStarted()
}
fun setStatusListener(listener: RtspStatusListener?) {
if (DEBUG) Log.v(TAG, "setStatusListener()")
rtspProcessor.statusListener = listener
}
fun setDataListener(listener: RtspDataListener?) {
if (DEBUG) Log.v(TAG, "setDataListener()")
rtspProcessor.dataListener = listener
}
companion object {
private val TAG: String = RtspImageView::class.java.simpleName
private const val DEBUG = false
}
}
================================================
FILE: library-client-rtsp/src/main/java/com/alexvas/rtsp/widget/RtspListeners.kt
================================================
package com.alexvas.rtsp.widget
/**
* Listener for getting RTSP status update.
*/
interface RtspStatusListener {
fun onRtspStatusConnecting() {}
fun onRtspStatusConnected() {}
fun onRtspStatusDisconnecting() {}
fun onRtspStatusDisconnected() {}
fun onRtspStatusFailedUnauthorized() {}
fun onRtspStatusFailed(message: String?) {}
fun onRtspFirstFrameRendered() {}
fun onRtspFrameSizeChanged(width: Int, height: Int) {}
}
/**
* Listener for getting RTSP raw data, e.g. for recording.
*/
interface RtspDataListener {
fun onRtspDataVideoNalUnitReceived(data: ByteArray, offset: Int, length: Int, timestamp: Long) {}
fun onRtspDataAudioSampleReceived(data: ByteArray, offset: Int, length: Int, timestamp: Long) {}
fun onRtspDataApplicationDataReceived(data: ByteArray, offset: Int, length: Int, timestamp: Long) {}
}
================================================
FILE: library-client-rtsp/src/main/java/com/alexvas/rtsp/widget/RtspProcessor.kt
================================================
package com.alexvas.rtsp.widget
import android.annotation.SuppressLint
import android.media.MediaFormat
import android.net.Uri
import android.os.Handler
import android.os.Looper
import android.util.Log
import androidx.media3.container.NalUnitUtil
import com.alexvas.rtsp.RtspClient
import com.alexvas.rtsp.RtspClient.SdpInfo
import com.alexvas.rtsp.codec.AudioCodecType
import com.alexvas.rtsp.codec.AudioDecodeThread
import com.alexvas.rtsp.codec.AudioFrameQueue
import com.alexvas.rtsp.codec.FrameQueue
import com.alexvas.rtsp.codec.VideoCodecType
import com.alexvas.rtsp.codec.VideoDecodeThread
import com.alexvas.rtsp.codec.VideoDecodeThread.DecoderType
import com.alexvas.rtsp.codec.VideoDecodeThread.VideoDecoderListener
import com.alexvas.rtsp.codec.VideoFrameQueue
import com.alexvas.utils.NetUtils
import com.alexvas.utils.VideoCodecUtils
import org.jcodec.codecs.h264.io.model.SeqParameterSet
import org.jcodec.codecs.h264.io.model.VUIParameters
import java.net.Socket
import java.nio.ByteBuffer
import java.util.concurrent.atomic.AtomicBoolean
import kotlin.math.min
class RtspProcessor(
private var onVideoDecoderCreateRequested: ((
videoMimeType: String,
videoRotation: Int, // 0, 90, 180, 270
videoFrameQueue: VideoFrameQueue,
videoDecoderListener: VideoDecoderListener,
videoDecoderType: DecoderType,
videoFrameRateStabilization: Boolean,
) -> VideoDecodeThread)
) {
class Statistics {
var videoDecoderType = DecoderType.HARDWARE
var videoDecoderName: String? = null
var videoDecoderLatencyMsec = -1
var networkLatencyMsec = -1
}
private lateinit var uri: Uri
private var username: String? = null
private var password: String? = null
private var userAgent: String? = null
private var requestVideo = true
private var requestAudio = true
private var requestApplication = false
private var rtspThread: RtspThread? = null
private var videoFrameQueue = VideoFrameQueue(60)
private var audioFrameQueue = AudioFrameQueue(10)
private var videoDecodeThread: VideoDecodeThread? = null
private var audioDecodeThread: AudioDecodeThread? = null
private val uiHandler = Handler(Looper.getMainLooper())
private var videoMimeType: String = "video/avc"
private var audioMimeType: String = ""
private var audioSampleRate: Int = 0
private var audioChannelCount: Int = 0
private var audioCodecConfig: ByteArray? = null
private var firstFrameRendered = false
var statistics = Statistics()
get() {
videoDecodeThread?.let { decoder ->
field.apply {
networkLatencyMsec = decoder.getCurrentNetworkLatencyMsec()
videoDecoderLatencyMsec = decoder.getCurrentVideoDecoderLatencyMsec()
videoDecoderType = decoder.getCurrentVideoDecoderType()
videoDecoderName = decoder.getCurrentVideoDecoderName()
}
}
return field
}
private set
/** Read and connect timeout for socket in msec. */
private var socketTimeoutMsec: Int = 5000
/**
* Show more debug info on console on runtime.
*/
var debug = false
/**
* Video rotation in degrees. Allowed values: 0, 90, 180, 270.
* Note that not all hardware video decoders support rotation.
*/
var videoRotation = 0
set(value) {
if (value == 0 || value == 90 || value == 180 || value == 270)
field = value
}
/**
* Requested video decoder type.
*/
var videoDecoderType = DecoderType.HARDWARE
/**
* Try to modify SPS frame coming from camera with low-latency parameters to decrease video
* decoding latency.
* If SPS frame param num_ref_frames is equal to 1 or more, set it to 0. That should decrease
* decoder latency by 2x times on some hardware decoders.
*/
var experimentalUpdateSpsFrameWithLowLatencyParams = false
/**
* Enables the playback smoothing logic inside the video decoder.
*/
var videoFrameRateStabilization: Boolean = false
set(value) {
field = value
videoDecodeThread?.setVideoFrameRateStabilization(value)
}
/**
* Status listener for getting RTSP event updates.
*/
var statusListener: RtspStatusListener? = null
/**
* Listener for getting raw data, e.g. for recording.
*/
var dataListener: RtspDataListener? = null
private val proxyClientListener = object: RtspClient.RtspClientListener {
override fun onRtspConnecting() {
if (DEBUG) Log.v(TAG, "onRtspConnecting()")
uiHandler.post {
statusListener?.onRtspStatusConnecting()
}
}
override fun onRtspConnected(sdpInfo: SdpInfo) {
if (DEBUG) Log.v(TAG, "onRtspConnected()")
if (sdpInfo.videoTrack != null) {
videoFrameQueue.clear()
when (sdpInfo.videoTrack?.videoCodec) {
RtspClient.VIDEO_CODEC_H264 -> videoMimeType = MediaFormat.MIMETYPE_VIDEO_AVC
RtspClient.VIDEO_CODEC_H265 -> videoMimeType = MediaFormat.MIMETYPE_VIDEO_HEVC
}
when (sdpInfo.audioTrack?.audioCodec) {
RtspClient.AUDIO_CODEC_AAC -> audioMimeType = MediaFormat.MIMETYPE_AUDIO_AAC
RtspClient.AUDIO_CODEC_OPUS -> audioMimeType = MediaFormat.MIMETYPE_AUDIO_OPUS
RtspClient.AUDIO_CODEC_G711_ULAW -> audioMimeType = MediaFormat.MIMETYPE_AUDIO_G711_MLAW
RtspClient.AUDIO_CODEC_G711_ALAW -> audioMimeType = MediaFormat.MIMETYPE_AUDIO_G711_ALAW
}
val sps: ByteArray? = sdpInfo.videoTrack?.sps
val pps: ByteArray? = sdpInfo.videoTrack?.pps
// Initialize decoder
@SuppressLint("UnsafeOptInUsageError")
if (sps != null && pps != null) {
val vps: ByteArray = sdpInfo.videoTrack?.vps ?: ByteArray(0)
val data = ByteArray(sps.size + pps.size + vps.size)
var offset = 0
sps.copyInto(data, offset, 0, sps.size)
offset += sps.size
pps.copyInto(data, offset, 0, pps.size)
offset += pps.size
vps.copyInto(data, offset, 0, vps.size)
videoFrameQueue.push(
FrameQueue.VideoFrame(
VideoCodecType.H264,
isKeyframe = true,
data,
0,
data.size,
0
)
)
try {
val startNalOffset = if (sps[3] == 1.toByte()) 5 else 4
val spsData = NalUnitUtil.parseSpsNalUnitPayload(
data, startNalOffset, data.size - startNalOffset)
if (spsData.maxNumReorderFrames > 0) {
Log.w(
TAG, "SPS frame param max_num_reorder_frames=" +
"${spsData.maxNumReorderFrames} is too high" +
" for low latency decoding (expecting 0)."
)
}
if (debug) {
Log.d(TAG, "SPS frame: ${sps.toHexString(0, sps.size)}")
Log.d(TAG, "\t${spsData.spsDataToString()}")
Log.d(TAG, "PPS frame: ${pps.toHexString(0, pps.size)}")
if (vps.isNotEmpty())
Log.d(TAG, "VPS frame: ${vps.toHexString(0, vps.size)}")
}
} catch (e: Exception) {
e.printStackTrace()
}
} else {
if (DEBUG) Log.d(TAG, "RTSP SPS and PPS NAL units missed in SDP")
}
}
if (sdpInfo.audioTrack != null) {
audioFrameQueue.clear()
when (sdpInfo.audioTrack?.audioCodec) {
RtspClient.AUDIO_CODEC_AAC -> audioMimeType = MediaFormat.MIMETYPE_AUDIO_AAC
RtspClient.AUDIO_CODEC_OPUS -> audioMimeType = MediaFormat.MIMETYPE_AUDIO_OPUS
}
audioSampleRate = sdpInfo.audioTrack?.sampleRateHz!!
audioChannelCount = sdpInfo.audioTrack?.channels!!
audioCodecConfig = sdpInfo.audioTrack?.config
}
onRtspClientConnected()
uiHandler.post {
statusListener?.onRtspStatusConnected()
}
}
private var framesPerGop = 0
override fun onRtspVideoNalUnitReceived(data: ByteArray, offset: Int, length: Int, timestamp: Long) {
if (DEBUG) Log.v(TAG, "onRtspVideoNalUnitReceived(data.size=${data.size}, length=$length, timestamp=$timestamp)")
val isH265 = videoMimeType == MediaFormat.MIMETYPE_VIDEO_HEVC
// Search for NAL_IDR_SLICE within first 1KB maximum
val isKeyframe = VideoCodecUtils.isAnyKeyFrame(data, offset, min(length, 1000), isH265)
var videoFrame = FrameQueue.VideoFrame(
VideoCodecType.H264,
isKeyframe,
data,
offset,
length,
timestamp,
capturedTimestampMs = System.currentTimeMillis()
)
if (isKeyframe && experimentalUpdateSpsFrameWithLowLatencyParams) {
videoFrame = getNewLowLatencyFrameFromKeyFrame(videoFrame)
}
if (debug) {
nalUnitsFound.clear()
VideoCodecUtils.getNalUnits(videoFrame.data, videoFrame.offset, videoFrame.length, nalUnitsFound, isH265)
var b = StringBuilder()
for (nal in nalUnitsFound) {
b
.append(if (isH265)
VideoCodecUtils.getH265NalUnitTypeString(nal.type)
else
VideoCodecUtils.getH264NalUnitTypeString(nal.type))
.append(" (${nal.length}), ")
}
if (b.length > 2)
b = b.removeRange(b.length - 2, b.length) as StringBuilder
Log.d(TAG, "NALs: $b")
@SuppressLint("UnsafeOptInUsageError")
if (isKeyframe) {
val sps = VideoCodecUtils.getSpsNalUnitFromArray(
videoFrame.data,
videoFrame.offset,
// Check only first 100 bytes maximum. That's enough for finding SPS NAL unit.
Integer.min(videoFrame.length, VideoCodecUtils.MAX_NAL_SPS_SIZE),
isH265
)
Log.d(TAG,
"\tKey frame received (${videoFrame.length} bytes, ts=$timestamp," +
" ${sps?.width}x${sps?.height}," +
" GoP=$framesPerGop," +
" profile=${sps?.profileIdc}, level=${sps?.levelIdc})")
framesPerGop = 0
} else {
framesPerGop++
}
}
videoFrameQueue.push(videoFrame)
dataListener?.onRtspDataVideoNalUnitReceived(
videoFrame.data,
videoFrame.offset,
videoFrame.length,
timestamp)
}
override fun onRtspAudioSampleReceived(data: ByteArray, offset: Int, length: Int, timestamp: Long) {
if (DEBUG) Log.v(TAG, "onRtspAudioSampleReceived(length=$length, timestamp=$timestamp)")
if (length > 0) {
audioFrameQueue.push(
FrameQueue.AudioFrame(
AudioCodecType.AAC_LC,
data, offset,
length,
timestamp
)
)
}
dataListener?.onRtspDataAudioSampleReceived(data, offset, length, timestamp)
}
override fun onRtspApplicationDataReceived(data: ByteArray, offset: Int, length: Int, timestamp: Long) {
if (DEBUG) Log.v(TAG, "onRtspApplicationDataReceived(length=$length, timestamp=$timestamp)")
dataListener?.onRtspDataApplicationDataReceived(data, offset, length, timestamp)
}
override fun onRtspDisconnecting() {
if (DEBUG) Log.v(TAG, "onRtspDisconnecting()")
uiHandler.post {
statusListener?.onRtspStatusDisconnecting()
}
}
override fun onRtspDisconnected() {
if (DEBUG) Log.v(TAG, "onRtspDisconnected()")
uiHandler.post {
statusListener?.onRtspStatusDisconnected()
}
}
override fun onRtspFailedUnauthorized() {
if (DEBUG) Log.v(TAG, "onRtspFailedUnauthorized()")
uiHandler.post {
statusListener?.onRtspStatusFailedUnauthorized()
}
}
override fun onRtspFailed(message: String?) {
if (DEBUG) Log.v(TAG, "onRtspFailed(message='$message')")
uiHandler.post {
statusListener?.onRtspStatusFailed(message)
}
}
}
inner class RtspThread: Thread() {
private var rtspStopped = AtomicBoolean(false)
fun stopAsync() {
if (DEBUG) Log.v(TAG, "stopAsync()")
rtspStopped.set(true)
// Wake up sleep() code
interrupt()
}
override fun run() {
onRtspClientStarted()
val port = if (uri.port == -1) DEFAULT_RTSP_PORT else uri.port
var socket: Socket? = null
try {
if (DEBUG) Log.d(TAG, "Connecting to ${uri.host.toString()}:$port...")
socket = if (uri.scheme?.lowercase() == "rtsps")
NetUtils.createSslSocketAndConnect(
uri.host.toString(),
port,
socketTimeoutMsec
)
else
NetUtils.createSocketAndConnect(
uri.host.toString(),
port,
socketTimeoutMsec
)
// Blocking call until stopped variable is true or connection failed
val rtspClient = RtspClient.Builder(socket, uri.toString(), rtspStopped, proxyClientListener)
.requestVideo(requestVideo)
.requestAudio(requestAudio)
.requestApplication(requestApplication)
.withDebug(debug)
.withUserAgent(userAgent)
.withCredentials(username, password)
.build()
rtspClient.execute()
} catch (e: Exception) {
e.printStackTrace()
uiHandler.post { proxyClientListener.onRtspFailed(e.message) }
} finally {
NetUtils.closeSocket(socket)
}
onRtspClientStopped()
}
}
private val videoDecoderListener = object: VideoDecoderListener {
override fun onVideoDecoderStarted() {
if (DEBUG) Log.v(TAG, "onVideoDecoderStarted()")
}
override fun onVideoDecoderStopped() {
if (DEBUG) Log.v(TAG, "onVideoDecoderStopped()")
}
override fun onVideoDecoderFailed(message: String?) {
if (DEBUG) Log.e(TAG, "onVideoDecoderFailed(message='$message')")
}
override fun onVideoDecoderFormatChanged(width: Int, height: Int) {
if (DEBUG) Log.v(TAG, "onVideoDecoderFormatChanged(width=$width, height=$height)")
statusListener?.onRtspFrameSizeChanged(width, height)
}
override fun onVideoDecoderFirstFrameRendered() {
if (DEBUG) Log.v(TAG, "onVideoDecoderFirstFrameDecoded()")
if (!firstFrameRendered) statusListener?.onRtspFirstFrameRendered()
firstFrameRendered = true
}
}
private fun onRtspClientStarted() {
if (DEBUG) Log.v(TAG, "onRtspClientStarted()")
// uiHandler.post { statusListener?.onRtspStatusConnected() }
}
private fun onRtspClientConnected() {
if (DEBUG) Log.v(TAG, "onRtspClientConnected()")
if (videoMimeType.isNotEmpty()) {
firstFrameRendered = false
Log.i(TAG, "Starting video decoder with mime type \"$videoMimeType\"")
videoDecodeThread = onVideoDecoderCreateRequested.invoke(
videoMimeType,
videoRotation,
videoFrameQueue,
videoDecoderListener,
videoDecoderType,
videoFrameRateStabilization,
)
videoDecodeThread!!.apply {
name = "RTSP video thread [${getUriName()}]"
start()
}
}
if (audioMimeType.isNotEmpty() /*&& checkAudio!!.isChecked*/) {
Log.i(TAG, "Starting audio decoder with mime type \"$audioMimeType\"")
audioDecodeThread = AudioDecodeThread(
audioMimeType, audioSampleRate, audioChannelCount, audioCodecConfig, audioFrameQueue)
audioDecodeThread!!.apply {
name = "RTSP audio thread [${getUriName()}]"
start()
}
}
}
private fun onRtspClientStopped() {
if (DEBUG) Log.v(TAG, "onRtspClientStopped()")
stopDecoders()
rtspThread = null
// uiHandler.post { statusListener?.onRtspStatusDisconnected() }
}
fun init(uri: Uri, username: String?, password: String?, userAgent: String? = null, socketTimeout: Int = DEFAULT_SOCKET_TIMEOUT) {
if (DEBUG) Log.v(TAG, "init(uri='$uri', username='$username', password='$password', userAgent='$userAgent', socketTimeout=$socketTimeout)")
this.uri = uri
this.username = username
this.password = password
this.userAgent = userAgent
this.socketTimeoutMsec = socketTimeout
}
fun start(requestVideo: Boolean, requestAudio: Boolean, requestApplication: Boolean = false) {
if (DEBUG) Log.v(TAG, "start(requestVideo=$requestVideo, requestAudio=$requestAudio, requestApplication=$requestApplication)")
if (rtspThread != null) rtspThread?.stopAsync()
this.requestVideo = requestVideo
this.requestAudio = requestAudio
this.requestApplication = requestApplication
rtspThread = RtspThread().apply {
name = "RTSP IO thread [${getUriName()}]"
start()
}
}
fun stop() {
if (DEBUG) Log.v(TAG, "stop()")
rtspThread?.stopAsync()
rtspThread = null
}
fun isStarted(): Boolean {
return rtspThread != null
}
fun stopDecoders() {
if (DEBUG) Log.v(TAG, "stopDecoders()")
videoDecodeThread?.stopAsync()
videoDecodeThread = null
audioDecodeThread?.stopAsync()
audioDecodeThread = null
}
// Cached values
private val nalUnitsFound = ArrayList()
private val spsBufferReadFrame = ByteBuffer.allocate(VideoCodecUtils.MAX_NAL_SPS_SIZE)
private val spsBufferWriteFrame = ByteBuffer.allocate(VideoCodecUtils.MAX_NAL_SPS_SIZE)
/**
* Try to get a new frame keyframe (SPS+PPS+IDR) with low latency modified SPS frame.
* If modification failed, original frame will be returned.
* Inspired by https://webrtc.googlesource.com/src/+/refs/heads/main/common_video/h264/sps_vui_rewriter.cc#400
*/
private fun getNewLowLatencyFrameFromKeyFrame(frame: FrameQueue.VideoFrame): FrameQueue.VideoFrame {
try {
// Support only H264 for now
if (frame.codecType == VideoCodecType.H265)
return frame
nalUnitsFound.clear()
VideoCodecUtils.getNalUnits(frame.data, frame.offset, frame.length, nalUnitsFound, isH265 = false)
val oldSpsNalUnit = nalUnitsFound.firstOrNull { it.type == VideoCodecUtils.NAL_SPS }
// SPS frame not found. Return original frame.
if (oldSpsNalUnit == null)
return frame
spsBufferReadFrame.apply {
rewind()
put(frame.data, oldSpsNalUnit.offset + 5,
Integer.min(oldSpsNalUnit.length, VideoCodecUtils.MAX_NAL_SPS_SIZE)
)
rewind()
}
// Read SPS frame
val spsSet = SeqParameterSet.read(spsBufferReadFrame)
// adding VUI might decrease latency for some streams, if max_dec_frame_buffering is set properly
// https://community.intel.com/t5/Media-Intel-oneAPI-Video/h-264-decoder-gives-two-frames-latency-while-decoding-a-stream/td-p/1099694
// https://github.com/Consti10/LiveVideo10ms/blob/master/VideoCore/src/main/cpp/NALU/H26X.hpp
fun modifyVui() {
// spsSet.vuiParams = VUIParameters()
spsSet.vuiParams.apply {
// videoSignalTypePresentFlag = true
// videoFormat = 5
// colourDescriptionPresentFlag = true
// matrixCoefficients = 5
// timingInfoPresentFlag = true
// numUnitsInTick = 1
// timeScale = 120
// fixedFrameRateFlag = true
bitstreamRestriction = VUIParameters.BitstreamRestriction().apply {
// motionVectorsOverPicBoundariesFlag = true
// log2MaxMvLengthHorizontal = 16
// log2MaxMvLengthVertical = 16
maxDecFrameBuffering = 1
numReorderFrames = 0
}
}
}
modifyVui()
// Write SPS frame
spsBufferWriteFrame.rewind()
spsSet.write(spsBufferWriteFrame)
val newSpsNalUnitSize = spsBufferWriteFrame.position()
if (oldSpsNalUnit.length > -1) {
val newSize = frame.length - oldSpsNalUnit.length + newSpsNalUnitSize
val newData = ByteArray(newSize + 5)
var newDataOffset = 0
for (nalUnit in nalUnitsFound) {
when (nalUnit.type) {
VideoCodecUtils.NAL_SPS -> {
// Write NAL header + SPS frame type
val b = byteArrayOf(0x00, 0x00, 0x00, 0x01, 0x27)
b.copyInto(newData, newDataOffset, 0, b.size)
newDataOffset += b.size
// Write SPS frame body
spsBufferWriteFrame.apply {
rewind()
get(newData, newDataOffset, newSpsNalUnitSize)
}
newDataOffset += newSpsNalUnitSize
}
else -> {
frame.data.copyInto(
newData,
newDataOffset,
nalUnit.offset,
nalUnit.offset + nalUnit.length
)
newDataOffset += nalUnit.length
}
}
}
// Create SPS+PPS+IDR frame with newly modified SPS frame data
return FrameQueue.VideoFrame(
frame.codecType,
frame.isKeyframe,
newData,
0,
newData.size,
frame.timestampMs,
frame.capturedTimestampMs
)
}
} catch (e: Exception) {
Log.e(TAG, "Failed to create low-latency keyframe", e)
}
return frame
}
private fun getUriName(): String {
val port = if (uri.port == -1) DEFAULT_RTSP_PORT else uri.port
return "${uri.host.toString()}:$port"
}
companion object {
private val TAG: String = RtspProcessor::class.java.simpleName
private const val DEBUG = false
private const val DEFAULT_RTSP_PORT = 554
const val DEFAULT_SOCKET_TIMEOUT = 5000
}
}
================================================
FILE: library-client-rtsp/src/main/java/com/alexvas/rtsp/widget/RtspSurfaceView.kt
================================================
package com.alexvas.rtsp.widget
import android.content.Context
import android.net.Uri
import android.util.AttributeSet
import android.util.Log
import android.view.SurfaceHolder
import android.view.SurfaceView
import androidx.annotation.OptIn
import androidx.media3.common.util.UnstableApi
import androidx.media3.container.NalUnitUtil
import com.alexvas.rtsp.codec.VideoDecodeThread.DecoderType
import com.alexvas.rtsp.codec.VideoDecoderSurfaceThread
import com.alexvas.rtsp.widget.RtspProcessor.Statistics
import com.limelight.binding.video.MediaCodecHelper
/**
* Low latency RTSP stream playback on surface view.
*/
open class RtspSurfaceView: SurfaceView {
private var surfaceWidth = 1920
private var surfaceHeight = 1080
private var rtspProcessor = RtspProcessor(
onVideoDecoderCreateRequested = {
videoMimeType,
videoRotation,
videoFrameQueue,
videoDecoderListener,
videoDecoderType,
videoFrameRateStabilization,
->
VideoDecoderSurfaceThread(
holder.surface,
videoMimeType,
surfaceWidth,
surfaceHeight,
videoRotation,
videoFrameQueue,
videoDecoderListener,
videoDecoderType,
videoFrameRateStabilization,
)
}
)
var statistics = Statistics()
get() = rtspProcessor.statistics
private set
var videoRotation: Int
get() = rtspProcessor.videoRotation
set(value) { rtspProcessor.videoRotation = value }
var videoDecoderType: DecoderType
get() = rtspProcessor.videoDecoderType
set(value) { rtspProcessor.videoDecoderType = value }
var experimentalUpdateSpsFrameWithLowLatencyParams: Boolean
get() = rtspProcessor.experimentalUpdateSpsFrameWithLowLatencyParams
set(value) { rtspProcessor.experimentalUpdateSpsFrameWithLowLatencyParams = value }
var debug: Boolean
get() = rtspProcessor.debug
set(value) { rtspProcessor.debug = value }
/** Enables decoder-side playback smoothing. Disabled by default. */
var videoFrameRateStabilization: Boolean
get() = rtspProcessor.videoFrameRateStabilization
set(value) { rtspProcessor.videoFrameRateStabilization = value }
private val surfaceCallback = object: SurfaceHolder.Callback {
override fun surfaceCreated(holder: SurfaceHolder) {
if (DEBUG) Log.v(TAG, "surfaceCreated()")
}
override fun surfaceChanged(holder: SurfaceHolder, format: Int, width: Int, height: Int) {
if (DEBUG) Log.v(TAG, "surfaceChanged(format=$format, width=$width, height=$height)")
surfaceWidth = width
surfaceHeight = height
}
override fun surfaceDestroyed(holder: SurfaceHolder) {
if (DEBUG) Log.v(TAG, "surfaceDestroyed()")
rtspProcessor.stopDecoders()
}
}
constructor(context: Context) : super(context) {
initView(context, null, 0)
}
constructor(context: Context, attrs: AttributeSet?) : super(context, attrs) {
initView(context, attrs, 0)
}
constructor(context: Context, attrs: AttributeSet?, defStyleAttr: Int) : super(context, attrs, defStyleAttr) {
initView(context, attrs, defStyleAttr)
}
private fun initView(context: Context, attrs: AttributeSet?, defStyleAttr: Int) {
if (DEBUG) Log.v(TAG, "initView()")
MediaCodecHelper.initialize(context, /*glRenderer*/ "")
holder.addCallback(surfaceCallback)
}
fun init(
uri: Uri,
username: String? = null,
password: String? = null,
userAgent: String? = null,
socketTimeout: Int? = null
) {
if (DEBUG) Log.v(TAG, "init(uri='$uri', username='$username', password='$password', userAgent='$userAgent', socketTimeout=$socketTimeout)")
rtspProcessor.init(
uri,
username,
password,
userAgent,
socketTimeout ?: RtspProcessor.DEFAULT_SOCKET_TIMEOUT
)
}
/**
* Start RTSP client.
*
* @param requestVideo request video track
* @param requestAudio request audio track
* @param requestApplication request application track
* @see https://datatracker.ietf.org/doc/html/rfc4566#section-5.14
*/
fun start(requestVideo: Boolean, requestAudio: Boolean, requestApplication: Boolean = false) {
if (DEBUG) Log.v(TAG, "start(requestVideo=$requestVideo, requestAudio=$requestAudio, requestApplication=$requestApplication)")
rtspProcessor.start(requestVideo, requestAudio, requestApplication)
}
/**
* Stop RTSP client.
*/
fun stop() {
if (DEBUG) Log.v(TAG, "stop()")
rtspProcessor.stop()
}
fun isStarted(): Boolean {
return rtspProcessor.isStarted()
}
fun setStatusListener(listener: RtspStatusListener?) {
if (DEBUG) Log.v(TAG, "setStatusListener()")
rtspProcessor.statusListener = listener
}
fun setDataListener(listener: RtspDataListener?) {
if (DEBUG) Log.v(TAG, "setDataListener()")
rtspProcessor.dataListener = listener
}
companion object {
private val TAG: String = RtspSurfaceView::class.java.simpleName
private const val DEBUG = false
}
}
@OptIn(UnstableApi::class)
fun NalUnitUtil.SpsData.spsDataToString(): String {
return "" +
"width=${this.width}, " +
"height=${this.height}, " +
"profile_idc=${this.profileIdc}, " +
"constraint_set_flags=${this.constraintsFlagsAndReservedZero2Bits}, " +
"level_idc=${this.levelIdc}, " +
"max_num_ref_frames=${this.maxNumRefFrames}, " +
"frame_mbs_only_flag=${this.frameMbsOnlyFlag}, " +
"log2_max_frame_num=${this.frameNumLength}, " +
"pic_order_cnt_type=${this.picOrderCountType}, " +
"log2_max_pic_order_cnt_lsb=${this.picOrderCntLsbLength}, " +
"delta_pic_order_always_zero_flag=${this.deltaPicOrderAlwaysZeroFlag}, " +
"max_reorder_frames=${this.maxNumReorderFrames}"
}
fun ByteArray.toHexString(offset: Int, maxLength: Int): String {
val length = minOf(maxLength, size - offset)
return sliceArray(offset until (offset + length))
.joinToString(separator = "") { byte ->
"%02x ".format(byte).uppercase()
}
}
================================================
FILE: library-client-rtsp/src/main/java/com/alexvas/utils/ByteUtils.java
================================================
package com.alexvas.utils;
import androidx.annotation.NonNull;
import java.io.File;
import java.io.FileOutputStream;
public class ByteUtils {
// int memcmp ( const void * ptr1, const void * ptr2, size_t num );
public static boolean memcmp(
@NonNull byte[] source1,
int offsetSource1,
@NonNull byte[] source2,
int offsetSource2,
int num) {
if (source1.length - offsetSource1 < num)
return false;
if (source2.length - offsetSource2 < num)
return false;
for (int i = 0; i < num; i++) {
if (source1[offsetSource1 + i] != source2[offsetSource2 + i])
return false;
}
return true;
}
public static byte[] copy(@NonNull byte[] src) {
byte[] dest = new byte[src.length];
System.arraycopy(src, 0, dest, 0, src.length);
return dest;
}
}
================================================
FILE: library-client-rtsp/src/main/java/com/alexvas/utils/MediaCodecUtils.kt
================================================
package com.alexvas.utils
import android.annotation.SuppressLint
import android.util.Log
import android.util.Range
import androidx.annotation.OptIn
import androidx.media3.common.util.UnstableApi
import androidx.media3.exoplayer.mediacodec.MediaCodecInfo
import androidx.media3.exoplayer.mediacodec.MediaCodecUtil
import java.lang.Exception
@SuppressLint("UnsafeOptInUsageError")
object MediaCodecUtils {
// key - codecs mime type
// value - list of codecs able to handle this mime type
private val decoderInfosMap = HashMap>()
private val TAG: String = MediaCodecUtils::class.java.simpleName
private fun getDecoderInfos(mimeType: String): List {
val list = decoderInfosMap[mimeType]
return if (list.isNullOrEmpty()) {
val decoderInfos = try {
MediaCodecUtil.getDecoderInfos(mimeType, false, false)
} catch (e: Exception) {
Log.e(TAG, "Failed to initialize '$mimeType' decoders list (${e.message})", e)
ArrayList()
}
decoderInfosMap[mimeType] = decoderInfos
decoderInfos
} else {
list
}
}
/**
* Get software decoders list. Usually used as fallback.
*/
@Synchronized
fun getSoftwareDecoders(mimeType: String): List {
val decoderInfos = getDecoderInfos(mimeType)
val list = ArrayList()
for (codec in decoderInfos) {
if (codec.softwareOnly)
list.add(codec)
}
return list
}
/**
* Get hardware accelerated decoders list. Used as default.
*/
@Synchronized
fun getHardwareDecoders(mimeType: String): List {
val decoderInfos = getDecoderInfos(mimeType)
val list = ArrayList()
for (codec in decoderInfos) {
if (codec.hardwareAccelerated)
list.add(codec)
}
return list
}
/**
* Look through all decoders (if there are multiple)
* and select the one which supports low-latency.
*/
@OptIn(UnstableApi::class)
fun getLowLatencyDecoder(decoders: List): MediaCodecInfo? {
// Some devices can have several decoders, e.g.
// Samsung Fold 5:
// "c2.qti.avc.decoder"
// "c2.qti.avc.decoder.low_latency"
for (decoder in decoders) {
if (decoder.name.contains("low_latency"))
return decoder
}
// Another approach to find decoder with low-latency is to call
// MediaCodec.createByCodecName(name) for every decoder to get decoder instance and then call
// decoder.codecInfo.getCapabilitiesForType(mimeType).isFeatureSupported(MediaCodecInfo.CodecCapabilities.FEATURE_LowLatency)
// No low-latency decoder found.
return null
}
}
fun android.media.MediaCodecInfo.CodecCapabilities.capabilitiesToString(): String {
var heights = videoCapabilities?.supportedHeights
if (heights == null)
heights = Range(-1, -1)
var widths = videoCapabilities?.supportedWidths
if (widths == null)
widths = Range(-1, -1)
return "max instances: ${maxSupportedInstances}, max resolution: ${heights.upper}x${widths.upper}"
}
================================================
FILE: library-client-rtsp/src/main/java/com/alexvas/utils/NetUtils.java
================================================
package com.alexvas.utils;
import android.util.Log;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import java.io.EOFException;
import java.io.IOException;
import java.io.InputStream;
import java.net.InetSocketAddress;
import java.net.Socket;
import java.security.cert.CertificateException;
import java.security.cert.X509Certificate;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import javax.net.ssl.SSLContext;
import javax.net.ssl.SSLSocket;
import javax.net.ssl.TrustManager;
import javax.net.ssl.X509TrustManager;
public class NetUtils {
private static final String TAG = NetUtils.class.getSimpleName();
private static final boolean DEBUG = false;
private final static int MAX_LINE_SIZE = 4098;
public static final class FakeX509TrustManager implements X509TrustManager {
/**
* Accepted issuers for fake trust manager
*/
final static private X509Certificate[] mAcceptedIssuers = new X509Certificate[]{};
/**
* Constructor for FakeX509TrustManager.
*/
public FakeX509TrustManager() {
}
/**
* @see javax.net.ssl.X509TrustManager#checkClientTrusted(X509Certificate[],String authType)
*/
public void checkClientTrusted(X509Certificate[] certificates, String authType)
throws CertificateException {
}
/**
* @see javax.net.ssl.X509TrustManager#checkServerTrusted(X509Certificate[],String authType)
*/
public void checkServerTrusted(X509Certificate[] certificates, String authType)
throws CertificateException {
}
// https://github.com/square/okhttp/issues/4669
// Called by Android via reflection in X509TrustManagerExtensions.
@SuppressWarnings("unused")
public List checkServerTrusted(X509Certificate[] chain, String authType, String host) throws CertificateException {
return Arrays.asList(chain);
}
/**
* @see javax.net.ssl.X509TrustManager#getAcceptedIssuers()
*/
public X509Certificate[] getAcceptedIssuers() {
return mAcceptedIssuers;
}
}
@NonNull
public static SSLSocket createSslSocketAndConnect(@NonNull String dstName, int dstPort, int timeout) throws Exception {
if (DEBUG)
Log.v(TAG, "createSslSocketAndConnect(dstName=" + dstName + ", dstPort=" + dstPort + ", timeout=" + timeout + ")");
// TrustManagerFactory trustManagerFactory = TrustManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm());
// trustManagerFactory.init((KeyStore) null);
// TrustManager[] trustManagers = trustManagerFactory.getTrustManagers();
// if (trustManagers.length != 1 || !(trustManagers[0] instanceof X509TrustManager)) {
// throw new IllegalStateException("Unexpected default trust managers:" + Arrays.toString(trustManagers));
// }
// X509TrustManager trustManager = (X509TrustManager) trustManagers[0];
SSLContext sslContext = SSLContext.getInstance("TLS");
sslContext.init(null, new TrustManager[] { new FakeX509TrustManager() }, null);
SSLSocket sslSocket = (SSLSocket) sslContext.getSocketFactory().createSocket();
sslSocket.connect(new InetSocketAddress(dstName, dstPort), timeout);
sslSocket.setSoLinger(false, 1);
sslSocket.setSoTimeout(timeout);
return sslSocket;
}
@NonNull
public static Socket createSocketAndConnect(@NonNull String dstName, int dstPort, int timeout) throws IOException {
if (DEBUG)
Log.v(TAG, "createSocketAndConnect(dstName=" + dstName + ", dstPort=" + dstPort + ", timeout=" + timeout + ")");
Socket socket = new Socket();
socket.connect(new InetSocketAddress(dstName, dstPort), timeout);
socket.setSoLinger(false, 1);
socket.setSoTimeout(timeout);
return socket;
}
@NonNull
public static Socket createSocket(int timeout) throws IOException {
Socket socket = new Socket();
socket.setSoLinger(false, 1);// 1 sec for flush() before close()
socket.setSoTimeout(timeout); // 10 sec timeout for read(), not for write()
return socket;
}
public static void closeSocket(@Nullable Socket socket) throws IOException {
if (DEBUG)
Log.v(TAG, "closeSocket()");
if (socket != null) {
try {
socket.shutdownInput();
} catch (Exception ignored) {
}
try {
socket.shutdownOutput();
} catch (Exception ignored) {
}
socket.close();
}
}
@NonNull
public static ArrayList readResponseHeaders(@NonNull InputStream inputStream) throws IOException {
// Assert.assertNotNull("Input stream should not be null", inputStream);
ArrayList headers = new ArrayList<>();
String line;
while (true) {
line = readLine(inputStream);
if (line != null) {
if (line.equals("\r\n"))
return headers;
else
headers.add(line);
} else {
break;
}
}
return headers;
}
@Nullable
public static String readLine(@NonNull InputStream inputStream) throws IOException {
// Assert.assertNotNull("Input stream should not be null", inputStream);
byte[] bufferLine = new byte[MAX_LINE_SIZE];
int offset = 0;
int readBytes;
do {
// Didn't find "\r\n" within 4K bytes
if (offset >= MAX_LINE_SIZE) {
throw new IOException("Invalid headers");
}
// Read 1 byte
readBytes = inputStream.read(bufferLine, offset, 1);
if (readBytes == 1) {
// Check for EOL
// Some cameras like Linksys WVC200 do not send \n instead of \r\n
if (offset > 0 && /*bufferLine[offset-1] == '\r' &&*/ bufferLine[offset] == '\n') {
// Found empty EOL. End of header section
if (offset == 1)
break;
// Found EOL. Add to array.
return new String(bufferLine, 0, offset-1);
} else {
offset++;
}
}
} while (readBytes > 0);
return null;
}
public static int getResponseStatusCode(@NonNull ArrayList headers) {
// Assert.assertNotNull("Headers should not be null", headers);
// Search for HTTP status code header
for (String header: headers) {
int indexHttp = header.indexOf("HTTP/1.1 "); // 9 characters
if (indexHttp == -1)
indexHttp = header.indexOf("HTTP/1.0 ");
if (indexHttp >= 0) {
int indexCode = header.indexOf(' ', 9);
String code = header.substring(9, indexCode);
try {
return Integer.parseInt(code);
} catch (NumberFormatException e) {
// Does not fulfill standard "HTTP/1.1 200 Ok" token
// Continue search for
}
}
}
// Not found
return -1;
}
// @Nullable
// static String readContentAsText(@Nullable InputStream inputStream) throws IOException {
// if (inputStream == null)
// return null;
// BufferedReader r = new BufferedReader(new InputStreamReader(inputStream));
// StringBuilder total = new StringBuilder();
// String line;
// while ((line = r.readLine()) != null) {
// total.append(line);
// total.append("\r\n");
// }
// return total.toString();
// }
@NonNull
public static String readContentAsText(@NonNull InputStream inputStream, int length) throws IOException {
// Assert.assertNotNull("Input stream should not be null", inputStream);
if (length <= 0)
return "";
byte[] b = new byte[length];
int read = readData(inputStream, b, 0, length);
return new String(b, 0, read);
}
public static int readData(@NonNull InputStream inputStream, @NonNull byte[] buffer, int offset, int length) throws IOException {
int readBytes;
int totalReadBytes = 0;
do {
readBytes = inputStream.read(buffer, offset + totalReadBytes, length - totalReadBytes);
if (readBytes == -1) {
throw new EOFException("Stream closed, read " + totalReadBytes + " of " + length + " bytes");
}
totalReadBytes += readBytes;
} while (readBytes >= 0 && totalReadBytes < length);
return totalReadBytes;
}
}
================================================
FILE: library-client-rtsp/src/main/java/com/alexvas/utils/VideoCodecUtils.kt
================================================
package com.alexvas.utils
import android.annotation.SuppressLint
import android.util.Log
import androidx.media3.container.NalUnitUtil
import androidx.media3.container.NalUnitUtil.SpsData
import java.util.concurrent.atomic.AtomicInteger
import kotlin.experimental.and
object VideoCodecUtils {
private val TAG = VideoCodecUtils::class.java.simpleName
/** Max possible NAL SPS size in bytes */
const val MAX_NAL_SPS_SIZE: Int = 500
const val NAL_SLICE: Byte = 1
const val NAL_DPA: Byte = 2
const val NAL_DPB: Byte = 3
const val NAL_DPC: Byte = 4
const val NAL_IDR_SLICE: Byte = 5
const val NAL_SEI: Byte = 6
const val NAL_SPS: Byte = 7
const val NAL_PPS: Byte = 8
const val NAL_AUD: Byte = 9
const val NAL_END_SEQUENCE: Byte = 10
const val NAL_END_STREAM: Byte = 11
const val NAL_FILLER_DATA: Byte = 12
const val NAL_SPS_EXT: Byte = 13
const val NAL_AUXILIARY_SLICE: Byte = 19
const val NAL_STAP_A: Byte = 24 // https://tools.ietf.org/html/rfc3984 5.7.1
const val NAL_STAP_B: Byte = 25 // 5.7.1
const val NAL_MTAP16: Byte = 26 // 5.7.2
const val NAL_MTAP24: Byte = 27 // 5.7.2
const val NAL_FU_A: Byte = 28 // 5.8 fragmented unit
const val NAL_FU_B: Byte = 29 // 5.8
// Table 7-3: NAL unit type codes
const val H265_NAL_TRAIL_N: Byte = 0
const val H265_NAL_TRAIL_R: Byte = 1
const val H265_NAL_TSA_N: Byte = 2
const val H265_NAL_TSA_R: Byte = 3
const val H265_NAL_STSA_N: Byte = 4
const val H265_NAL_STSA_R: Byte = 5
const val H265_NAL_RADL_N: Byte = 6
const val H265_NAL_RADL_R: Byte = 7
const val H265_NAL_RASL_N: Byte = 8
const val H265_NAL_RASL_R: Byte = 9
const val H265_NAL_BLA_W_LP: Byte = 16
const val H265_NAL_BLA_W_RADL: Byte = 17
const val H265_NAL_BLA_N_LP: Byte = 18
const val H265_NAL_IDR_W_RADL: Byte = 19
const val H265_NAL_IDR_N_LP: Byte = 20
const val H265_NAL_CRA_NUT: Byte = 21
const val H265_NAL_VPS: Byte = 32
const val H265_NAL_SPS: Byte = 33
const val H265_NAL_PPS: Byte = 34
const val H265_NAL_AUD: Byte = 35
const val H265_NAL_EOS_NUT: Byte = 36
const val H265_NAL_EOB_NUT: Byte = 37
const val H265_NAL_FD_NUT: Byte = 38
const val H265_NAL_SEI_PREFIX: Byte = 39
const val H265_NAL_SEI_SUFFIX: Byte = 40
private val NAL_PREFIX1 = byteArrayOf(0x00, 0x00, 0x00, 0x01)
private val NAL_PREFIX2 = byteArrayOf(0x00, 0x00, 0x01)
/**
* Search for 00 00 01 or 00 00 00 01 in byte stream.
* @return offset to the start of NAL unit if found, otherwise -1
*/
fun searchForNalUnitStart(
data: ByteArray,
offset: Int,
length: Int,
prefixSize: AtomicInteger
): Int {
if (offset >= data.size - 3) return -1
for (pos in 0 until length) {
val prefix: Int = getNalUnitStartCodePrefixSize(data, pos + offset, length)
if (prefix >= 0) {
prefixSize.set(prefix)
return pos + offset
}
}
return -1
}
fun searchForH264NalUnitByType(
data: ByteArray,
offset: Int,
length: Int,
byUnitType: Int
): Int {
var off = offset
val nalUnitPrefixSize = AtomicInteger(-1)
val timestamp = System.currentTimeMillis()
while (true) {
val nalUnitIndex = searchForNalUnitStart(data, off, length, nalUnitPrefixSize)
if (nalUnitIndex >= 0) {
val nalUnitOffset = nalUnitIndex + nalUnitPrefixSize.get()
if (nalUnitOffset >= data.size)
break
val nalUnitTypeOctet = data[nalUnitOffset]
if ((nalUnitTypeOctet and 0x1f).toInt() == byUnitType) {
return nalUnitIndex
}
off = nalUnitOffset
// Check that we are not too long here
if (System.currentTimeMillis() - timestamp > 100) {
Log.w(TAG, "Cannot process data within 100 msec in $length bytes")
break
}
} else {
break
}
}
return -1
}
fun getNalUnitType(data: ByteArray?, offset: Int, length: Int, isH265: Boolean): Byte {
if (data == null || length <= NAL_PREFIX1.size) return (-1).toByte()
var nalUnitTypeOctetOffset = -1
if (data[offset + NAL_PREFIX2.size - 1] == 1.toByte())
nalUnitTypeOctetOffset =
offset + NAL_PREFIX2.size - 1
else if (data[offset + NAL_PREFIX1.size - 1] == 1.toByte())
nalUnitTypeOctetOffset = offset + NAL_PREFIX1.size - 1
return if (nalUnitTypeOctetOffset != -1) {
val nalUnitTypeOctet = data[nalUnitTypeOctetOffset + 1]
if (isH265)
((nalUnitTypeOctet.toInt() shr 1) and 0x3F).toByte()
else
(nalUnitTypeOctet and 0x1f)
} else {
(-1).toByte()
}
}
private fun getNalUnitStartCodePrefixSize(
data: ByteArray,
offset: Int,
length: Int
): Int {
if (length < 4) return -1
return if (memcmp(data, offset, NAL_PREFIX1, 0, NAL_PREFIX1.size))
NAL_PREFIX1.size else
if (memcmp(data, offset, NAL_PREFIX2, 0, NAL_PREFIX2.size))
NAL_PREFIX2.size else
-1
}
private fun memcmp(
source1: ByteArray,
offsetSource1: Int,
source2: ByteArray,
offsetSource2: Int,
num: Int
): Boolean {
if (source1.size - offsetSource1 < num) return false
if (source2.size - offsetSource2 < num) return false
for (i in 0 until num) {
if (source1[offsetSource1 + i] != source2[offsetSource2 + i]) return false
}
return true
}
data class NalUnit (val type: Byte, val offset: Int, val length: Int)
fun getNalUnits(
data: ByteArray,
dataOffset: Int,
length: Int,
foundNals: ArrayList,
isH265: Boolean
): Int {
foundNals.clear()
var nalUnits = 0
val nextNalOffset = 0
val nalUnitPrefixSize = AtomicInteger(-1)
val timestamp = System.currentTimeMillis()
var offset = dataOffset
var stopped = false
while (!stopped) {
// Search for first NAL unit
val nalUnitIndex = searchForNalUnitStart(
data,
offset + nextNalOffset,
length - nextNalOffset,
nalUnitPrefixSize
)
// NAL unit found
if (nalUnitIndex >= 0) {
nalUnits++
val nalUnitOffset = offset + nextNalOffset + nalUnitPrefixSize.get()
val nalUnitTypeOctet = data[nalUnitOffset]
val nalUnitType = if (isH265)
((nalUnitTypeOctet.toInt() shr 1) and 0x3F).toByte()
else
(nalUnitTypeOctet and 0x1F)
// Search for second NAL unit (optional)
var nextNalUnitStartIndex = searchForNalUnitStart(
data,
nalUnitOffset,
length - nalUnitOffset,
nalUnitPrefixSize
)
// Second NAL unit not found. Use till the end.
if (nextNalUnitStartIndex < 0) {
// Not found next NAL unit. Use till the end.
// nextNalUnitStartIndex = length - nextNalOffset + dataOffset;
nextNalUnitStartIndex = length + dataOffset
stopped = true
}
val l = nextNalUnitStartIndex - offset
// if (DEBUG) Log.d(
// TAG,
// "NAL unit type: " + getH264NalUnitTypeString(nalUnitType.toInt()) +
// " (" + nalUnitType + ") - " + l + " bytes, offset " + offset
// )
foundNals.add(NalUnit(nalUnitType, offset, l))
offset = nextNalUnitStartIndex
// Check that we are not too long here
if (System.currentTimeMillis() - timestamp > 200) {
Log.w(TAG, "Cannot process data within 200 msec in $length bytes (NALs found: " + foundNals.size + ")")
break
}
} else {
stopped = true
}
}
return nalUnits
}
private fun getNalUnitStartLengthFromArray(
src: ByteArray, offset: Int, length: Int,
isH265: Boolean,
nalUnitType: Byte
): Pair? {
val nalUnitsFound = ArrayList()
if (getNalUnits(src, offset, length, nalUnitsFound, isH265) > 0) {
for (nalUnit in nalUnitsFound) {
if (nalUnit.type == nalUnitType) {
val prefixSize = AtomicInteger()
val nalUnitIndex = searchForNalUnitStart(
src,
nalUnit.offset,
nalUnit.length,
prefixSize
)
val nalOffset = nalUnitIndex + prefixSize.get() + 1 /* NAL unit type */
return Pair(nalOffset, nalUnit.length)
}
}
}
return null
}
@SuppressLint("UnsafeOptInUsageError")
fun getSpsNalUnitFromArray(src: ByteArray, offset: Int, length: Int, isH265: Boolean): SpsData? {
val spsStartLength = getNalUnitStartLengthFromArray(src, offset, length, isH265, NAL_SPS)
spsStartLength?.let {
return NalUnitUtil.parseSpsNalUnitPayload(
src, spsStartLength.first, spsStartLength.first + spsStartLength.second)
}
return null
}
@SuppressLint("UnsafeOptInUsageError")
fun getWidthHeightFromArray(src: ByteArray, offset: Int, length: Int, isH265: Boolean): Pair? {
val sps = getSpsNalUnitFromArray(src, offset, length, isH265)
sps?.let {
return Pair(sps.width, sps.height)
}
return null
}
// private fun isH265IRAP(nalUnitType: Byte): Boolean {
// return nalUnitType in 16..23
// }
fun isAnyKeyFrame(data: ByteArray?, offset: Int, length: Int, isH265: Boolean): Boolean {
if (data == null || length <= 0) return false
var currOffset = offset
val nalUnitPrefixSize = AtomicInteger(-1)
val timestamp = System.currentTimeMillis()
while (true) {
val nalUnitIndex = searchForNalUnitStart(
data,
currOffset,
length,
nalUnitPrefixSize
)
if (nalUnitIndex >= 0) {
val nalUnitOffset = nalUnitIndex + nalUnitPrefixSize.get()
if (nalUnitOffset >= data.size)
return false
val nalUnitTypeOctet = data[nalUnitOffset]
if (isH265) {
val nalUnitType = ((nalUnitTypeOctet.toInt() and 0x7E) shr 1).toByte()
// Treat SEI_PREFIX as key frame.
if (nalUnitType == H265_NAL_IDR_W_RADL || nalUnitType == H265_NAL_IDR_N_LP)
return true
} else {
val nalUnitType = (nalUnitTypeOctet.toInt() and 0x1f).toByte()
when (nalUnitType) {
NAL_IDR_SLICE -> return true
NAL_SLICE -> return false
}
}
// Continue searching
currOffset = nalUnitOffset
// Check that we are not too long here
if (System.currentTimeMillis() - timestamp > 100) {
Log.w(TAG, "Cannot process data within 100 msec in $length bytes (index=$nalUnitIndex)")
break
}
} else {
break
}
}
return false
}
fun getH264NalUnitTypeString(nalUnitType: Byte): String {
return when (nalUnitType) {
NAL_SLICE -> "NAL_SLICE"
NAL_DPA -> "NAL_DPA"
NAL_DPB -> "NAL_DPB"
NAL_DPC -> "NAL_DPC"
NAL_IDR_SLICE -> "NAL_IDR_SLICE"
NAL_SEI -> "NAL_SEI"
NAL_SPS -> "NAL_SPS"
NAL_PPS -> "NAL_PPS"
NAL_AUD -> "NAL_AUD"
NAL_END_SEQUENCE -> "NAL_END_SEQUENCE"
NAL_END_STREAM -> "NAL_END_STREAM"
NAL_FILLER_DATA -> "NAL_FILLER_DATA"
NAL_SPS_EXT -> "NAL_SPS_EXT"
NAL_AUXILIARY_SLICE -> "NAL_AUXILIARY_SLICE"
NAL_STAP_A -> "NAL_STAP_A"
NAL_STAP_B -> "NAL_STAP_B"
NAL_MTAP16 -> "NAL_MTAP16"
NAL_MTAP24 -> "NAL_MTAP24"
NAL_FU_A -> "NAL_FU_A"
NAL_FU_B -> "NAL_FU_B"
else -> "unknown - $nalUnitType"
}
}
fun getH265NalUnitTypeString(nalUnitType: Byte): String {
return when (nalUnitType) {
H265_NAL_TRAIL_N -> "NAL_TRAIL_N"
H265_NAL_TRAIL_R -> "NAL_TRAIL_R"
H265_NAL_TSA_N -> "NAL_TSA_N"
H265_NAL_TSA_R -> "NAL_TSA_R"
H265_NAL_STSA_N -> "NAL_STSA_N"
H265_NAL_STSA_R -> "NAL_STSA_R"
H265_NAL_RADL_N -> "NAL_RADL_N"
H265_NAL_RADL_R -> "NAL_RADL_R"
H265_NAL_RASL_N -> "NAL_RASL_N"
H265_NAL_RASL_R -> "NAL_RASL_R"
H265_NAL_BLA_W_LP -> "NAL_BLA_W_LP"
H265_NAL_BLA_W_RADL -> "NAL_BLA_W_RADL"
H265_NAL_BLA_N_LP -> "NAL_BLA_N_LP"
H265_NAL_IDR_W_RADL -> "NAL_IDR_W_RADL"
H265_NAL_IDR_N_LP -> "NAL_IDR_N_LP"
H265_NAL_CRA_NUT -> "NAL_CRA_NUT"
H265_NAL_VPS -> "NAL_VPS"
H265_NAL_SPS -> "NAL_SPS"
H265_NAL_PPS -> "NAL_PPS"
H265_NAL_AUD -> "NAL_AUD"
H265_NAL_EOS_NUT -> "NAL_EOS_NUT"
H265_NAL_EOB_NUT -> "NAL_EOB_NUT"
H265_NAL_FD_NUT -> "NAL_FD_NUT"
H265_NAL_SEI_PREFIX -> "NAL_SEI_PREFIX"
H265_NAL_SEI_SUFFIX -> "NAL_SEI_SUFFIX"
else -> "unknown - $nalUnitType"
}
}
}
================================================
FILE: library-client-rtsp/src/main/java/com/limelight/binding/video/MediaCodecHelper.java
================================================
package com.limelight.binding.video;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.util.Collections;
import java.util.LinkedList;
import java.util.List;
import java.util.Locale;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import android.annotation.SuppressLint;
import android.app.ActivityManager;
import android.content.Context;
import android.content.pm.ConfigurationInfo;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaCodecList;
import android.media.MediaCodecInfo.CodecCapabilities;
import android.media.MediaCodecInfo.CodecProfileLevel;
import android.media.MediaFormat;
import android.os.Build;
import android.util.Log;
// Based on https://github.com/moonlight-stream/moonlight-android/blob/master/app/src/main/java/com/limelight/binding/video/MediaCodecHelper.java
public class MediaCodecHelper {
private static final String TAG = MediaCodecHelper.class.getSimpleName();
private static final List preferredDecoders;
private static final List blacklistedDecoderPrefixes;
private static final List spsFixupBitstreamFixupDecoderPrefixes;
private static final List blacklistedAdaptivePlaybackPrefixes;
private static final List baselineProfileHackPrefixes;
private static final List directSubmitPrefixes;
private static final List constrainedHighProfilePrefixes;
private static final List whitelistedHevcDecoders;
private static final List refFrameInvalidationAvcPrefixes;
private static final List refFrameInvalidationHevcPrefixes;
private static final List useFourSlicesPrefixes;
private static final List qualcommDecoderPrefixes;
private static final List kirinDecoderPrefixes;
private static final List exynosDecoderPrefixes;
private static final List amlogicDecoderPrefixes;
private static final List knownVendorLowLatencyOptions;
public static final boolean SHOULD_BYPASS_SOFTWARE_BLOCK =
Build.HARDWARE.equals("ranchu") || Build.HARDWARE.equals("cheets") || Build.BRAND.equals("Android-x86");
private static boolean isLowEndSnapdragon = false;
private static boolean isAdreno620 = false;
private static boolean initialized = false;
static {
directSubmitPrefixes = new LinkedList<>();
// These decoders have low enough input buffer latency that they
// can be directly invoked from the receive thread
directSubmitPrefixes.add("omx.qcom");
directSubmitPrefixes.add("omx.sec");
directSubmitPrefixes.add("omx.exynos");
directSubmitPrefixes.add("omx.intel");
directSubmitPrefixes.add("omx.brcm");
directSubmitPrefixes.add("omx.TI");
directSubmitPrefixes.add("omx.arc");
directSubmitPrefixes.add("omx.nvidia");
// All Codec2 decoders
directSubmitPrefixes.add("c2.");
}
static {
refFrameInvalidationAvcPrefixes = new LinkedList<>();
refFrameInvalidationHevcPrefixes = new LinkedList<>();
refFrameInvalidationHevcPrefixes.add("omx.exynos");
refFrameInvalidationHevcPrefixes.add("c2.exynos");
// Qualcomm and NVIDIA may be added at runtime
}
static {
preferredDecoders = new LinkedList<>();
}
static {
blacklistedDecoderPrefixes = new LinkedList<>();
// Blacklist software decoders that don't support H264 high profile except on systems
// that are expected to only have software decoders (like emulators).
if (!SHOULD_BYPASS_SOFTWARE_BLOCK) {
blacklistedDecoderPrefixes.add("omx.google");
blacklistedDecoderPrefixes.add("AVCDecoder");
// We want to avoid ffmpeg decoders since they're usually software decoders,
// but we'll defer to the Android 10 isSoftwareOnly() API on newer devices
// to determine if we should use these or not.
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.Q) {
blacklistedDecoderPrefixes.add("OMX.ffmpeg");
}
}
// Force these decoders disabled because:
// 1) They are software decoders, so the performance is terrible
// 2) They crash with our HEVC stream anyway (at least prior to CSD batching)
blacklistedDecoderPrefixes.add("OMX.qcom.video.decoder.hevcswvdec");
blacklistedDecoderPrefixes.add("OMX.SEC.hevc.sw.dec");
}
static {
// If a decoder qualifies for reference frame invalidation,
// these entries will be ignored for those decoders.
spsFixupBitstreamFixupDecoderPrefixes = new LinkedList<>();
spsFixupBitstreamFixupDecoderPrefixes.add("omx.nvidia");
spsFixupBitstreamFixupDecoderPrefixes.add("omx.qcom");
spsFixupBitstreamFixupDecoderPrefixes.add("omx.brcm");
baselineProfileHackPrefixes = new LinkedList<>();
baselineProfileHackPrefixes.add("omx.intel");
blacklistedAdaptivePlaybackPrefixes = new LinkedList<>();
// The Intel decoder on Lollipop on Nexus Player would increase latency badly
// if adaptive playback was enabled so let's avoid it to be safe.
blacklistedAdaptivePlaybackPrefixes.add("omx.intel");
// The MediaTek decoder crashes at 1080p when adaptive playback is enabled
// on some Android TV devices with HEVC only.
blacklistedAdaptivePlaybackPrefixes.add("omx.mtk");
constrainedHighProfilePrefixes = new LinkedList<>();
constrainedHighProfilePrefixes.add("omx.intel");
}
static {
whitelistedHevcDecoders = new LinkedList<>();
// Allow software HEVC decoding in the official AOSP emulator
if (Build.HARDWARE.equals("ranchu")) {
whitelistedHevcDecoders.add("omx.google");
}
// Exynos seems to be the only HEVC decoder that works reliably
whitelistedHevcDecoders.add("omx.exynos");
// On Darcy (Shield 2017), HEVC runs fine with no fixups required. For some reason,
// other X1 implementations require bitstream fixups. However, since numReferenceFrames
// has been supported in GFE since late 2017, we'll go ahead and enable HEVC for all
// device models.
//
// NVIDIA does partial HEVC acceleration on the Shield Tablet. I don't know
// whether the performance is good enough to use for streaming, but they're
// using the same omx.nvidia.h265.decode name as the Shield TV which has a
// fully accelerated HEVC pipeline. AFAIK, the only K1 devices with this
// partially accelerated HEVC decoder are the Shield Tablet and Xiaomi MiPad,
// so I'll check for those here.
//
// In case there are some that I missed, I will also exclude pre-Oreo OSes since
// only Shield ATV got an Oreo update and any newer Tegra devices will not ship
// with an old OS like Nougat.
if (!Build.DEVICE.equalsIgnoreCase("shieldtablet") &&
!Build.DEVICE.equalsIgnoreCase("mocha") &&
Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
whitelistedHevcDecoders.add("omx.nvidia");
}
// Plot twist: On newer Sony devices (BRAVIA_ATV2, BRAVIA_ATV3_4K, BRAVIA_UR1_4K) the H.264 decoder crashes
// on several configurations (> 60 FPS and 1440p) that work with HEVC, so we'll whitelist those devices for HEVC.
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O && Build.DEVICE.startsWith("BRAVIA_")) {
whitelistedHevcDecoders.add("omx.mtk");
}
// Amlogic requires 1 reference frame for HEVC to avoid hanging. Since it's been years
// since GFE added support for maxNumReferenceFrames, we'll just enable all Amlogic SoCs
// running Android 9 or later.
//
// NB: We don't do this on Sabrina (GCWGTV) because H.264 is lower latency when we use
// vendor.low-latency.enable. We will still use HEVC if decoderCanMeetPerformancePointWithHevcAndNotAvc()
// determines it's the only way to meet the performance requirements.
//
// With the Android 12 update, Sabrina now uses HEVC (with RFI) based upon FEATURE_LowLatency
// support, which provides equivalent latency to H.264 now.
//
// FIXME: Should we do this for all Amlogic S905X SoCs?
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.P && !Build.DEVICE.equalsIgnoreCase("sabrina")) {
whitelistedHevcDecoders.add("omx.amlogic");
}
// Realtek SoCs are used inside many Android TV devices and can only do 4K60 with HEVC.
// We'll enable those HEVC decoders by default and see if anything breaks.
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.P) {
whitelistedHevcDecoders.add("omx.realtek");
}
// These theoretically have good HEVC decoding capabilities (potentially better than
// their AVC decoders), but haven't been tested enough
//whitelistedHevcDecoders.add("omx.rk");
// Let's see if HEVC decoders are finally stable with C2
whitelistedHevcDecoders.add("c2.");
// Based on GPU attributes queried at runtime, the omx.qcom/c2.qti prefix will be added
// during initialization to avoid SoCs with broken HEVC decoders.
}
static {
useFourSlicesPrefixes = new LinkedList<>();
// Software decoders will use 4 slices per frame to allow for slice multithreading
useFourSlicesPrefixes.add("omx.google");
useFourSlicesPrefixes.add("AVCDecoder");
useFourSlicesPrefixes.add("omx.ffmpeg");
useFourSlicesPrefixes.add("c2.android");
// Old Qualcomm decoders are detected at runtime
}
static {
knownVendorLowLatencyOptions = new LinkedList<>();
knownVendorLowLatencyOptions.add("vendor.qti-ext-dec-low-latency.enable");
knownVendorLowLatencyOptions.add("vendor.hisi-ext-low-latency-video-dec.video-scene-for-low-latency-req");
knownVendorLowLatencyOptions.add("vendor.rtc-ext-dec-low-latency.enable");
knownVendorLowLatencyOptions.add("vendor.low-latency.enable");
}
static {
qualcommDecoderPrefixes = new LinkedList<>();
qualcommDecoderPrefixes.add("omx.qcom");
qualcommDecoderPrefixes.add("c2.qti");
}
static {
kirinDecoderPrefixes = new LinkedList<>();
kirinDecoderPrefixes.add("omx.hisi");
kirinDecoderPrefixes.add("c2.hisi"); // Unconfirmed
}
static {
exynosDecoderPrefixes = new LinkedList<>();
exynosDecoderPrefixes.add("omx.exynos");
exynosDecoderPrefixes.add("c2.exynos");
}
static {
amlogicDecoderPrefixes = new LinkedList<>();
amlogicDecoderPrefixes.add("omx.amlogic");
amlogicDecoderPrefixes.add("c2.amlogic"); // Unconfirmed
}
private static boolean isPowerVR(String glRenderer) {
return glRenderer.toLowerCase().contains("powervr");
}
private static String getAdrenoVersionString(String glRenderer) {
glRenderer = glRenderer.toLowerCase().trim();
if (!glRenderer.contains("adreno")) {
return null;
}
Pattern modelNumberPattern = Pattern.compile("(.*)([0-9]{3})(.*)");
Matcher matcher = modelNumberPattern.matcher(glRenderer);
if (!matcher.matches()) {
return null;
}
String modelNumber = matcher.group(2);
Log.i(TAG, "Found Adreno GPU: "+modelNumber);
return modelNumber;
}
private static boolean isLowEndSnapdragonRenderer(String glRenderer) {
String modelNumber = getAdrenoVersionString(glRenderer);
if (modelNumber == null) {
// Not an Adreno GPU
return false;
}
// The current logic is to identify low-end SoCs based on a zero in the x0x place.
return modelNumber.charAt(1) == '0';
}
private static int getAdrenoRendererModelNumber(String glRenderer) {
String modelNumber = getAdrenoVersionString(glRenderer);
if (modelNumber == null) {
// Not an Adreno GPU
return -1;
}
return Integer.parseInt(modelNumber);
}
// This is a workaround for some broken devices that report
// only GLES 3.0 even though the GPU is an Adreno 4xx series part.
// An example of such a device is the Huawei Honor 5x with the
// Snapdragon 616 SoC (Adreno 405).
private static boolean isGLES31SnapdragonRenderer(String glRenderer) {
// Snapdragon 4xx and higher support GLES 3.1
return getAdrenoRendererModelNumber(glRenderer) >= 400;
}
public static void initialize(Context context, String glRenderer) {
if (initialized) {
return;
}
// Older Sony ATVs (SVP-DTV15) have broken MediaTek codecs (decoder hangs after rendering the first frame).
// I know the Fire TV 2 and 3 works, so I'll whitelist Amazon devices which seem to actually be tested.
// We still have to check Build.MANUFACTURER to catch Amazon Fire tablets.
if (context.getPackageManager().hasSystemFeature("amazon.hardware.fire_tv") ||
Build.MANUFACTURER.equalsIgnoreCase("Amazon")) {
// HEVC and RFI have been confirmed working on Fire TV 2, Fire TV Stick 2, Fire TV 4K Max,
// Fire HD 8 2020, and Fire HD 8 2022 models.
//
// This is probably a good enough sample to conclude that all MediaTek Fire OS devices
// are likely to be okay.
whitelistedHevcDecoders.add("omx.mtk");
refFrameInvalidationHevcPrefixes.add("omx.mtk");
refFrameInvalidationHevcPrefixes.add("c2.mtk");
// This requires setting vdec-lowlatency on the Fire TV 3, otherwise the decoder
// never produces any output frames. See comment above for details on why we only
// do this for Fire TV devices.
whitelistedHevcDecoders.add("omx.amlogic");
// Fire TV 3 seems to produce random artifacts on HEVC streams after packet loss.
// Enabling RFI turns these artifacts into full decoder output hangs, so let's not enable
// that for Fire OS 6 Amlogic devices. We will leave HEVC enabled because that's the only
// way these devices can hit 4K. Hopefully this is just a problem with the BSP used in
// the Fire OS 6 Amlogic devices, so we will leave this enabled for Fire OS 7+.
//
// Apart from a few TV models, the main Amlogic-based Fire TV devices are the Fire TV
// Cubes and Fire TV 3. This check will exclude the Fire TV 3 and Fire TV Cube 1, but
// allow the newer Fire TV Cubes to use HEVC RFI.
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
refFrameInvalidationHevcPrefixes.add("omx.amlogic");
refFrameInvalidationHevcPrefixes.add("c2.amlogic");
}
}
ActivityManager activityManager =
(ActivityManager) context.getSystemService(Context.ACTIVITY_SERVICE);
ConfigurationInfo configInfo = activityManager.getDeviceConfigurationInfo();
if (configInfo.reqGlEsVersion != ConfigurationInfo.GL_ES_VERSION_UNDEFINED) {
Log.i(TAG, "OpenGL ES version: "+configInfo.reqGlEsVersion);
isLowEndSnapdragon = isLowEndSnapdragonRenderer(glRenderer);
isAdreno620 = getAdrenoRendererModelNumber(glRenderer) == 620;
// Tegra K1 and later can do reference frame invalidation properly
if (configInfo.reqGlEsVersion >= 0x30000) {
Log.i(TAG, "Added omx.nvidia/c2.nvidia to reference frame invalidation support list");
refFrameInvalidationAvcPrefixes.add("omx.nvidia");
// Exclude HEVC RFI on Pixel C and Tegra devices prior to Android 11. Misbehaving RFI
// on these devices can cause hundreds of milliseconds of latency, so it's not worth
// using it unless we're absolutely sure that it will not cause increased latency.
if (!Build.DEVICE.equalsIgnoreCase("dragon") && Build.VERSION.SDK_INT >= Build.VERSION_CODES.R) {
refFrameInvalidationHevcPrefixes.add("omx.nvidia");
}
refFrameInvalidationAvcPrefixes.add("c2.nvidia"); // Unconfirmed
refFrameInvalidationHevcPrefixes.add("c2.nvidia"); // Unconfirmed
Log.i(TAG, "Added omx.qcom/c2.qti to reference frame invalidation support list");
refFrameInvalidationAvcPrefixes.add("omx.qcom");
refFrameInvalidationHevcPrefixes.add("omx.qcom");
refFrameInvalidationAvcPrefixes.add("c2.qti");
refFrameInvalidationHevcPrefixes.add("c2.qti");
}
// Qualcomm's early HEVC decoders break hard on our HEVC stream. The best check to
// tell the good from the bad decoders are the generation of Adreno GPU included:
// 3xx - bad
// 4xx - good
//
// The "good" GPUs support GLES 3.1, but we can't just check that directly
// (see comment on isGLES31SnapdragonRenderer).
//
if (isGLES31SnapdragonRenderer(glRenderer)) {
Log.i(TAG, "Added omx.qcom/c2.qti to HEVC decoders based on GLES 3.1+ support");
whitelistedHevcDecoders.add("omx.qcom");
whitelistedHevcDecoders.add("c2.qti");
}
else {
blacklistedDecoderPrefixes.add("OMX.qcom.video.decoder.hevc");
// These older decoders need 4 slices per frame for best performance
useFourSlicesPrefixes.add("omx.qcom");
}
// Older MediaTek SoCs have issues with HEVC rendering but the newer chips with
// PowerVR GPUs have good HEVC support.
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.N && isPowerVR(glRenderer)) {
Log.i(TAG, "Added omx.mtk to HEVC decoders based on PowerVR GPU");
whitelistedHevcDecoders.add("omx.mtk");
// This SoC (MT8176 in GPD XD+) supports AVC RFI too, but the maxNumReferenceFrames setting
// required to make it work adds a huge amount of latency. However, RFI on HEVC causes
// decoder hangs on the newer GE8100, GE8300, and GE8320 GPUs, so we limit it to the
// Series6XT GPUs where we know it works.
if (glRenderer.contains("GX6")) {
Log.i(TAG, "Added omx.mtk/c2.mtk to RFI list for HEVC");
refFrameInvalidationHevcPrefixes.add("omx.mtk");
refFrameInvalidationHevcPrefixes.add("c2.mtk");
}
}
}
initialized = true;
}
private static boolean isDecoderInList(List decoderList, String decoderName) {
if (!initialized) {
throw new IllegalStateException("MediaCodecHelper must be initialized before use");
}
for (String badPrefix : decoderList) {
if (decoderName.length() >= badPrefix.length()) {
String prefix = decoderName.substring(0, badPrefix.length());
if (prefix.equalsIgnoreCase(badPrefix)) {
return true;
}
}
}
return false;
}
private static boolean decoderSupportsAndroidRLowLatency(MediaCodecInfo decoderInfo, String mimeType) {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.R) {
try {
if (decoderInfo.getCapabilitiesForType(mimeType).isFeatureSupported(CodecCapabilities.FEATURE_LowLatency)) {
Log.i(TAG, "Low latency decoding mode supported (FEATURE_LowLatency)");
return true;
}
} catch (Exception e) {
// Tolerate buggy codecs
e.printStackTrace();
}
}
return false;
}
private static boolean decoderSupportsKnownVendorLowLatencyOption(String decoderName) {
// It's only possible to probe vendor parameters on Android 12 and above.
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.S) {
MediaCodec testCodec = null;
try {
// Unfortunately we have to create an actual codec instance to get supported options.
testCodec = MediaCodec.createByCodecName(decoderName);
// See if any of the vendor parameters match ones we know about
for (String supportedOption : testCodec.getSupportedVendorParameters()) {
for (String knownLowLatencyOption : knownVendorLowLatencyOptions) {
if (supportedOption.equalsIgnoreCase(knownLowLatencyOption)) {
Log.i(TAG, decoderName + " supports known low latency option: " + supportedOption);
return true;
}
}
}
} catch (Exception e) {
// Tolerate buggy codecs
e.printStackTrace();
} finally {
if (testCodec != null) {
testCodec.release();
}
}
}
return false;
}
private static boolean decoderSupportsMaxOperatingRate(String decoderName) {
// Operate at maximum rate to lower latency as much as possible on
// some Qualcomm platforms. We could also set KEY_PRIORITY to 0 (realtime)
// but that will actually result in the decoder crashing if it can't satisfy
// our (ludicrous) operating rate requirement. This seems to cause reliable
// crashes on the Xiaomi Mi 10 lite 5G and Redmi K30i 5G on Android 10, so
// we'll disable it on Snapdragon 765G and all non-Qualcomm devices to be safe.
//
// NB: Even on Android 10, this optimization still provides significant
// performance gains on Pixel 2.
return Build.VERSION.SDK_INT >= Build.VERSION_CODES.M &&
isDecoderInList(qualcommDecoderPrefixes, decoderName) &&
!isAdreno620;
}
public static boolean setDecoderLowLatencyOptions(MediaFormat videoFormat, MediaCodecInfo decoderInfo, int tryNumber) {
// Options here should be tried in the order of most to least risky. The decoder will use
// the first MediaFormat that doesn't fail in configure().
boolean setNewOption = false;
if (tryNumber < 1) {
// Official Android 11+ low latency option (KEY_LOW_LATENCY).
videoFormat.setInteger("low-latency", 1);
setNewOption = true;
// If this decoder officially supports FEATURE_LowLatency, we will just use that alone
// for try 0. Otherwise, we'll include it as best effort with other options.
if (decoderSupportsAndroidRLowLatency(decoderInfo, videoFormat.getString(MediaFormat.KEY_MIME))) {
return true;
}
}
if (tryNumber < 2 &&
(!Build.MANUFACTURER.equalsIgnoreCase("xiaomi") || Build.VERSION.SDK_INT > Build.VERSION_CODES.M)) {
// MediaTek decoders don't use vendor-defined keys for low latency mode. Instead, they have a modified
// version of AOSP's ACodec.cpp which supports the "vdec-lowlatency" option. This option is passed down
// to the decoder as OMX.MTK.index.param.video.LowLatencyDecode.
//
// This option is also plumbed for Amazon Amlogic-based devices like the Fire TV 3. Not only does it
// reduce latency on Amlogic, it fixes the HEVC bug that causes the decoder to not output any frames.
// Unfortunately, it does the exact opposite for the Xiaomi MITV4-ANSM0, breaking it in the way that
// Fire TV was broken prior to vdec-lowlatency :(
//
// On Fire TV 3, vdec-lowlatency is translated to OMX.amazon.fireos.index.video.lowLatencyDecode.
//
// https://github.com/yuan1617/Framwork/blob/master/frameworks/av/media/libstagefright/ACodec.cpp
// https://github.com/iykex/vendor_mediatek_proprietary_hardware/blob/master/libomx/video/MtkOmxVdecEx/MtkOmxVdecEx.h
videoFormat.setInteger("vdec-lowlatency", 1);
setNewOption = true;
}
if (tryNumber < 3) {
if (MediaCodecHelper.decoderSupportsMaxOperatingRate(decoderInfo.getName())) {
videoFormat.setInteger(MediaFormat.KEY_OPERATING_RATE, Short.MAX_VALUE);
setNewOption = true;
}
else if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
videoFormat.setInteger(MediaFormat.KEY_PRIORITY, 0);
setNewOption = true;
}
}
// MediaCodec supports vendor-defined format keys using the "vendor.." syntax.
// These allow access to functionality that is not exposed through documented MediaFormat.KEY_* values.
// https://cs.android.com/android/platform/superproject/+/master:hardware/qcom/sdm845/media/mm-video-v4l2/vidc/common/inc/vidc_vendor_extensions.h;l=67
//
// MediaCodec vendor extension support was introduced in Android 8.0:
// https://cs.android.com/android/_/android/platform/frameworks/av/+/01c10f8cdcd58d1e7025f426a72e6e75ba5d7fc2
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
// Try vendor-specific low latency options
//
// NOTE: Update knownVendorLowLatencyOptions if you modify this code!
if (isDecoderInList(qualcommDecoderPrefixes, decoderInfo.getName())) {
// Examples of Qualcomm's vendor extensions for Snapdragon 845:
// https://cs.android.com/android/platform/superproject/+/master:hardware/qcom/sdm845/media/mm-video-v4l2/vidc/vdec/src/omx_vdec_extensions.hpp
// https://cs.android.com/android/_/android/platform/hardware/qcom/sm8150/media/+/0621ceb1c1b19564999db8293574a0e12952ff6c
//
// We will first try both, then try vendor.qti-ext-dec-low-latency.enable alone if that fails
if (tryNumber < 4) {
videoFormat.setInteger("vendor.qti-ext-dec-picture-order.enable", 1);
setNewOption = true;
}
if (tryNumber < 5) {
videoFormat.setInteger("vendor.qti-ext-dec-low-latency.enable", 1);
setNewOption = true;
}
}
else if (isDecoderInList(kirinDecoderPrefixes, decoderInfo.getName())) {
if (tryNumber < 4) {
// Kirin low latency options
// https://developer.huawei.com/consumer/cn/forum/topic/0202325564295980115
videoFormat.setInteger("vendor.hisi-ext-low-latency-video-dec.video-scene-for-low-latency-req", 1);
videoFormat.setInteger("vendor.hisi-ext-low-latency-video-dec.video-scene-for-low-latency-rdy", -1);
setNewOption = true;
}
}
else if (isDecoderInList(exynosDecoderPrefixes, decoderInfo.getName())) {
if (tryNumber < 4) {
// Exynos low latency option for H.264 decoder
videoFormat.setInteger("vendor.rtc-ext-dec-low-latency.enable", 1);
setNewOption = true;
}
}
else if (isDecoderInList(amlogicDecoderPrefixes, decoderInfo.getName())) {
if (tryNumber < 4) {
// Amlogic low latency vendor extension
// https://github.com/codewalkerster/android_vendor_amlogic_common_prebuilt_libstagefrighthw/commit/41fefc4e035c476d58491324a5fe7666bfc2989e
videoFormat.setInteger("vendor.low-latency.enable", 1);
setNewOption = true;
}
}
}
return setNewOption;
}
public static boolean decoderSupportsFusedIdrFrame(MediaCodecInfo decoderInfo, String mimeType) {
// If adaptive playback is supported, we can submit new CSD together with a keyframe
try {
if (decoderInfo.getCapabilitiesForType(mimeType).
isFeatureSupported(CodecCapabilities.FEATURE_AdaptivePlayback)) {
Log.i(TAG, "Decoder supports fused IDR frames (FEATURE_AdaptivePlayback)");
return true;
}
} catch (Exception e) {
// Tolerate buggy codecs
e.printStackTrace();
}
return false;
}
public static boolean decoderSupportsAdaptivePlayback(MediaCodecInfo decoderInfo, String mimeType) {
if (isDecoderInList(blacklistedAdaptivePlaybackPrefixes, decoderInfo.getName())) {
Log.i(TAG, "Decoder blacklisted for adaptive playback");
return false;
}
try {
if (decoderInfo.getCapabilitiesForType(mimeType).
isFeatureSupported(CodecCapabilities.FEATURE_AdaptivePlayback))
{
// This will make getCapabilities() return that adaptive playback is supported
Log.i(TAG, "Adaptive playback supported (FEATURE_AdaptivePlayback)");
return true;
}
} catch (Exception e) {
// Tolerate buggy codecs
e.printStackTrace();
}
return false;
}
public static boolean decoderNeedsConstrainedHighProfile(String decoderName) {
return isDecoderInList(constrainedHighProfilePrefixes, decoderName);
}
public static boolean decoderCanDirectSubmit(String decoderName) {
return isDecoderInList(directSubmitPrefixes, decoderName) && !isExynos4Device();
}
public static boolean decoderNeedsSpsBitstreamRestrictions(String decoderName) {
return isDecoderInList(spsFixupBitstreamFixupDecoderPrefixes, decoderName);
}
public static boolean decoderNeedsBaselineSpsHack(String decoderName) {
return isDecoderInList(baselineProfileHackPrefixes, decoderName);
}
public static byte getDecoderOptimalSlicesPerFrame(String decoderName) {
if (isDecoderInList(useFourSlicesPrefixes, decoderName)) {
// 4 slices per frame reduces decoding latency on older Qualcomm devices
return 4;
}
else {
// 1 slice per frame produces the optimal encoding efficiency
return 1;
}
}
public static boolean decoderSupportsRefFrameInvalidationAvc(String decoderName, int videoHeight) {
// Reference frame invalidation is broken on low-end Snapdragon SoCs at 1080p.
if (videoHeight > 720 && isLowEndSnapdragon) {
return false;
}
// This device seems to crash constantly at 720p, so try disabling
// RFI to see if we can get that under control.
if (Build.DEVICE.equals("b3") || Build.DEVICE.equals("b5")) {
return false;
}
return isDecoderInList(refFrameInvalidationAvcPrefixes, decoderName);
}
public static boolean decoderSupportsRefFrameInvalidationHevc(MediaCodecInfo decoderInfo) {
// HEVC decoders seem to universally support RFI, but it can have huge latency penalties
// for some decoders due to the number of references frames being > 1. Old Amlogic
// decoders are known to have this problem.
//
// If the decoder supports FEATURE_LowLatency or any vendor low latency option,
// we will use that as an indication that it can handle HEVC RFI without excessively
// buffering frames.
if (decoderSupportsAndroidRLowLatency(decoderInfo, "video/hevc") ||
decoderSupportsKnownVendorLowLatencyOption(decoderInfo.getName())) {
Log.i(TAG, "Enabling HEVC RFI based on low latency option support");
return true;
}
return isDecoderInList(refFrameInvalidationHevcPrefixes, decoderInfo.getName());
}
public static boolean decoderSupportsRefFrameInvalidationAv1(MediaCodecInfo decoderInfo) {
// We'll use the same heuristics as HEVC for now
if (decoderSupportsAndroidRLowLatency(decoderInfo, "video/av01") ||
decoderSupportsKnownVendorLowLatencyOption(decoderInfo.getName())) {
Log.i(TAG, "Enabling AV1 RFI based on low latency option support");
return true;
}
return false;
}
public static boolean decoderIsWhitelistedForHevc(MediaCodecInfo decoderInfo) {
//
// Software decoders are terrible and we never want to use them.
// We want to catch decoders like:
// OMX.qcom.video.decoder.hevcswvdec
// OMX.SEC.hevc.sw.dec
//
if (decoderInfo.getName().contains("sw")) {
Log.i(TAG, "Disallowing HEVC on software decoder: " + decoderInfo.getName());
return false;
}
else if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q && (!decoderInfo.isHardwareAccelerated() || decoderInfo.isSoftwareOnly())) {
Log.i(TAG, "Disallowing HEVC on software decoder: " + decoderInfo.getName());
return false;
}
// If this device is media performance class 12 or higher, we will assume any hardware
// HEVC decoder present is fast and modern enough for streaming.
//
// [5.3/H-1-1] MUST NOT drop more than 2 frames in 10 seconds (i.e less than 0.333 percent frame drop) for a 1080p 60 fps video session under load.
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.S) {
Log.i(TAG, "Media performance class: " + Build.VERSION.MEDIA_PERFORMANCE_CLASS);
if (Build.VERSION.MEDIA_PERFORMANCE_CLASS >= Build.VERSION_CODES.S) {
Log.i(TAG, "Allowing HEVC based on media performance class");
return true;
}
}
// If the decoder supports FEATURE_LowLatency, we will assume it is fast and modern enough
// to be preferable for streaming over H.264 decoders.
if (decoderSupportsAndroidRLowLatency(decoderInfo, "video/hevc")) {
Log.i(TAG, "Allowing HEVC based on FEATURE_LowLatency support");
return true;
}
// Otherwise, we use our list of known working HEVC decoders
return isDecoderInList(whitelistedHevcDecoders, decoderInfo.getName());
}
public static boolean isDecoderWhitelistedForAv1(MediaCodecInfo decoderInfo) {
// Google didn't have official support for AV1 (or more importantly, a CTS test) until
// Android 10, so don't use any decoder before then.
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.Q) {
return false;
}
//
// Software decoders are terrible and we never want to use them.
// We want to catch decoders like:
// OMX.qcom.video.decoder.hevcswvdec
// OMX.SEC.hevc.sw.dec
//
if (decoderInfo.getName().contains("sw")) {
Log.i(TAG, "Disallowing AV1 on software decoder: " + decoderInfo.getName());
return false;
}
else if (!decoderInfo.isHardwareAccelerated() || decoderInfo.isSoftwareOnly()) {
Log.i(TAG, "Disallowing AV1 on software decoder: " + decoderInfo.getName());
return false;
}
// TODO: Test some AV1 decoders
return false;
}
@SuppressWarnings("deprecation")
@SuppressLint("NewApi")
private static LinkedList getMediaCodecList() {
LinkedList infoList = new LinkedList<>();
MediaCodecList mcl = new MediaCodecList(MediaCodecList.REGULAR_CODECS);
Collections.addAll(infoList, mcl.getCodecInfos());
return infoList;
}
@SuppressWarnings("RedundantThrows")
public static String dumpDecoders() throws Exception {
String str = "";
for (MediaCodecInfo codecInfo : getMediaCodecList()) {
// Skip encoders
if (codecInfo.isEncoder()) {
continue;
}
str += "Decoder: "+codecInfo.getName()+"\n";
for (String type : codecInfo.getSupportedTypes()) {
str += "\t"+type+"\n";
CodecCapabilities caps = codecInfo.getCapabilitiesForType(type);
for (CodecProfileLevel profile : caps.profileLevels) {
str += "\t\t"+profile.profile+" "+profile.level+"\n";
}
}
}
return str;
}
private static MediaCodecInfo findPreferredDecoder() {
// This is a different algorithm than the other findXXXDecoder functions,
// because we want to evaluate the decoders in our list's order
// rather than MediaCodecList's order
if (!initialized) {
throw new IllegalStateException("MediaCodecHelper must be initialized before use");
}
for (String preferredDecoder : preferredDecoders) {
for (MediaCodecInfo codecInfo : getMediaCodecList()) {
// Skip encoders
if (codecInfo.isEncoder()) {
continue;
}
// Check for preferred decoders
if (preferredDecoder.equalsIgnoreCase(codecInfo.getName())) {
Log.i(TAG, "Preferred decoder choice is "+codecInfo.getName());
return codecInfo;
}
}
}
return null;
}
private static boolean isCodecBlacklisted(MediaCodecInfo codecInfo) {
// Use the new isSoftwareOnly() function on Android Q
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) {
if (!SHOULD_BYPASS_SOFTWARE_BLOCK && codecInfo.isSoftwareOnly()) {
Log.i(TAG, "Skipping software-only decoder: "+codecInfo.getName());
return true;
}
}
// Check for explicitly blacklisted decoders
if (isDecoderInList(blacklistedDecoderPrefixes, codecInfo.getName())) {
Log.i(TAG, "Skipping blacklisted decoder: "+codecInfo.getName());
return true;
}
return false;
}
public static MediaCodecInfo findFirstDecoder(String mimeType) {
for (MediaCodecInfo codecInfo : getMediaCodecList()) {
// Skip encoders
if (codecInfo.isEncoder()) {
continue;
}
// Skip compatibility aliases on Q+
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) {
if (codecInfo.isAlias()) {
continue;
}
}
// Find a decoder that supports the specified video format
for (String mime : codecInfo.getSupportedTypes()) {
if (mime.equalsIgnoreCase(mimeType)) {
// Skip blacklisted codecs
if (isCodecBlacklisted(codecInfo)) {
continue;
}
Log.i(TAG, "First decoder choice is "+codecInfo.getName());
return codecInfo;
}
}
}
return null;
}
public static MediaCodecInfo findProbableSafeDecoder(String mimeType, int requiredProfile) {
// First look for a preferred decoder by name
MediaCodecInfo info = findPreferredDecoder();
if (info != null) {
return info;
}
// Now look for decoders we know are safe
try {
// If this function completes, it will determine if the decoder is safe
return findKnownSafeDecoder(mimeType, requiredProfile);
} catch (Exception e) {
// Some buggy devices seem to throw exceptions
// from getCapabilitiesForType() so we'll just assume
// they're okay and go with the first one we find
return findFirstDecoder(mimeType);
}
}
// We declare this method as explicitly throwing Exception
// since some bad decoders can throw IllegalArgumentExceptions unexpectedly
// and we want to be sure all callers are handling this possibility
@SuppressWarnings("RedundantThrows")
private static MediaCodecInfo findKnownSafeDecoder(String mimeType, int requiredProfile) throws Exception {
// Some devices (Exynos devces, at least) have two sets of decoders.
// The first set of decoders are C2 which do not support FEATURE_LowLatency,
// but the second set of OMX decoders do support FEATURE_LowLatency. We want
// to pick the OMX decoders despite the fact that C2 is listed first.
// On some Qualcomm devices (like Pixel 4), there are separate low latency decoders
// (like c2.qti.hevc.decoder.low_latency) that advertise FEATURE_LowLatency while
// the standard ones (like c2.qti.hevc.decoder) do not. Like Exynos, the decoders
// with FEATURE_LowLatency support are listed after the standard ones.
for (int i = 0; i < 2; i++) {
for (MediaCodecInfo codecInfo : getMediaCodecList()) {
// Skip encoders
if (codecInfo.isEncoder()) {
continue;
}
// Skip compatibility aliases on Q+
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) {
if (codecInfo.isAlias()) {
continue;
}
}
// Find a decoder that supports the requested video format
for (String mime : codecInfo.getSupportedTypes()) {
if (mime.equalsIgnoreCase(mimeType)) {
Log.i(TAG, "Examining decoder capabilities of " + codecInfo.getName() + " (round " + (i + 1) + ")");
// Skip blacklisted codecs
if (isCodecBlacklisted(codecInfo)) {
continue;
}
CodecCapabilities caps = codecInfo.getCapabilitiesForType(mime);
if (i == 0 && !decoderSupportsAndroidRLowLatency(codecInfo, mime)) {
Log.i(TAG, "Skipping decoder that lacks FEATURE_LowLatency for round 1");
continue;
}
if (requiredProfile != -1) {
for (CodecProfileLevel profile : caps.profileLevels) {
if (profile.profile == requiredProfile) {
Log.i(TAG, "Decoder " + codecInfo.getName() + " supports required profile");
return codecInfo;
}
}
Log.i(TAG, "Decoder " + codecInfo.getName() + " does NOT support required profile");
} else {
return codecInfo;
}
}
}
}
}
return null;
}
public static String readCpuinfo() throws Exception {
StringBuilder cpuInfo = new StringBuilder();
try (final BufferedReader br = new BufferedReader(new FileReader(new File("/proc/cpuinfo")))) {
for (;;) {
int ch = br.read();
if (ch == -1)
break;
cpuInfo.append((char)ch);
}
return cpuInfo.toString();
}
}
private static boolean stringContainsIgnoreCase(String string, String substring) {
return string.toLowerCase(Locale.ENGLISH).contains(substring.toLowerCase(Locale.ENGLISH));
}
public static boolean isExynos4Device() {
try {
// Try reading CPU info too look for
String cpuInfo = readCpuinfo();
// SMDK4xxx is Exynos 4
if (stringContainsIgnoreCase(cpuInfo, "SMDK4")) {
Log.i(TAG, "Found SMDK4 in /proc/cpuinfo");
return true;
}
// If we see "Exynos 4" also we'll count it
if (stringContainsIgnoreCase(cpuInfo, "Exynos 4")) {
Log.i(TAG, "Found Exynos 4 in /proc/cpuinfo");
return true;
}
} catch (Exception e) {
e.printStackTrace();
}
try {
File systemDir = new File("/sys/devices/system");
File[] files = systemDir.listFiles();
if (files != null) {
for (File f : files) {
if (stringContainsIgnoreCase(f.getName(), "exynos4")) {
Log.i(TAG, "Found exynos4 in /sys/devices/system");
return true;
}
}
}
} catch (Exception e) {
e.printStackTrace();
}
return false;
}
}
================================================
FILE: settings.gradle
================================================
include ':library-client-rtsp'
include ':app'