Repository: buhe/react-native-pili
Branch: master
Commit: 8914e02cfe15
Files: 51
Total size: 159.2 KB
Directory structure:
gitextract_h14nr57v/
├── .gitignore
├── .npmignore
├── AudioStreaming.js
├── LICENSE
├── Player.js
├── README.md
├── Streaming.js
├── StreamingConst.js
├── android/
│ ├── build.gradle
│ ├── gradle/
│ │ └── wrapper/
│ │ ├── gradle-wrapper.jar
│ │ └── gradle-wrapper.properties
│ ├── gradlew
│ ├── gradlew.bat
│ ├── libs/
│ │ ├── pldroid-media-streaming-2.0.4.jar
│ │ └── pldroid-player-1.3.2.jar
│ ├── proguard-rules.pro
│ └── src/
│ ├── androidTest/
│ │ └── java/
│ │ └── com/
│ │ └── pili/
│ │ └── rnpili/
│ │ └── ApplicationTest.java
│ ├── main/
│ │ ├── AndroidManifest.xml
│ │ └── java/
│ │ └── com/
│ │ └── pili/
│ │ └── rnpili/
│ │ ├── CameraPreviewFrameView.java
│ │ ├── PiliAudioStreamingViewManager.java
│ │ ├── PiliPackage.java
│ │ ├── PiliPlayerViewManager.java
│ │ ├── PiliStreamingViewManager.java
│ │ └── support/
│ │ ├── Config.java
│ │ ├── FocusIndicatorRotateLayout.java
│ │ ├── Jsons.java
│ │ ├── MediaController.java
│ │ ├── Rotatable.java
│ │ └── RotateLayout.java
│ └── test/
│ └── java/
│ └── com/
│ └── pili/
│ └── rnpili/
│ └── ExampleUnitTest.java
├── index.js
├── ios/
│ └── RCTPili/
│ ├── RCTPili/
│ │ ├── Info.plist
│ │ ├── RCTAudioStreaming.h
│ │ ├── RCTAudioStreaming.m
│ │ ├── RCTAudioStreamingManager.h
│ │ ├── RCTAudioStreamingManager.m
│ │ ├── RCTPili.h
│ │ ├── RCTPili.m
│ │ ├── RCTPlayer.h
│ │ ├── RCTPlayer.m
│ │ ├── RCTPlayerManager.h
│ │ ├── RCTPlayerManager.m
│ │ ├── RCTStreaming.h
│ │ ├── RCTStreaming.m
│ │ ├── RCTStreamingManager.h
│ │ ├── RCTStreamingManager.m
│ │ ├── Reachability.h
│ │ └── Reachability.m
│ ├── RCTPili.podspec
│ └── RCTPili.xcodeproj/
│ └── project.pbxproj
└── package.json
================================================
FILE CONTENTS
================================================
================================================
FILE: .gitignore
================================================
# OSX
#
.DS_Store
.vscode
# Xcode
#
build/
*.pbxuser
!default.pbxuser
*.mode1v3
!default.mode1v3
*.mode2v3
!default.mode2v3
*.perspectivev3
!default.perspectivev3
xcuserdata
*.xccheckout
*.moved-aside
DerivedData
*.hmap
*.ipa
*.xcuserstate
project.xcworkspace
# Android/IJ
#
.idea
*.iml
.gradle
local.properties
# node.js
#
node_modules/
npm-debug.log
================================================
FILE: .npmignore
================================================
.*.swp
._*
.DS_Store
.git
.hg
.npmrc
.lock-wscript
.svn
.wafpickle-*
config.gypi
CVS
npm-debug.log
================================================
FILE: AudioStreaming.js
================================================
/**
* Created by buhe on 16/4/29.
*/
import React, {
Component,
PropTypes
} from 'react';
import {
requireNativeComponent,
View,
} from 'react-native';
class AudioStreaming extends Component {
constructor(props, context) {
super(props, context);
this._onReady = this._onReady.bind(this);
this._onConnecting = this._onConnecting.bind(this);
this._onStreaming = this._onStreaming.bind(this);
this._onShutdown = this._onShutdown.bind(this);
this._onIOError = this._onIOError.bind(this);
this._onDisconnected = this._onDisconnected.bind(this);
}
_onReady(event) {
this.props.onReady && this.props.onReady(event.nativeEvent);
}
_onConnecting(event) {
this.props.onConnecting && this.props.onConnecting(event.nativeEvent);
}
_onStreaming(event) {
this.props.onStreaming && this.props.onStreaming(event.nativeEvent);
}
_onShutdown(event) {
this.props.onShutdown && this.props.onShutdown(event.nativeEvent);
}
_onIOError(event) {
this.props.onIOError && this.props.onIOError(event.nativeEvent);
}
_onDisconnected(event) {
this.props.onDisconnected && this.props.onDisconnected(event.nativeEvent);
}
render() {
const nativeProps = Object.assign({}, this.props);
Object.assign(nativeProps, {
onReady: this._onReady,
onConnecting: this._onConnecting,
onStreaming: this._onStreaming,
onShutdown: this._onShutdown,
onIOError: this._onIOError,
onDisconnected: this._onDisconnected,
});
return (
<RCTAudioStreaming
{...nativeProps}
/>
)
}
}
AudioStreaming.propTypes = {
rtmpURL: PropTypes.string,
muted: PropTypes.bool,
profile: PropTypes.shape({ // 是否符合指定格式的物件
audio: PropTypes.shape({
rate: PropTypes.number.isRequired,
bitrate: PropTypes.number.isRequired,
}).isRequired,
}).isRequired,
started: PropTypes.bool,
onReady: PropTypes.func,
onConnecting: PropTypes.func,
onStreaming: PropTypes.func,
onShutdown: PropTypes.func,
onIOError: PropTypes.func,
onDisconnected: PropTypes.func,
...View.propTypes,
}
const RCTAudioStreaming = requireNativeComponent('RCTAudioStreaming', AudioStreaming);
module.exports = AudioStreaming;
================================================
FILE: LICENSE
================================================
The MIT License (MIT)
Copyright (c) 2016 buhe
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
================================================
FILE: Player.js
================================================
/**
* Created by buhe on 16/5/4.
*/
import React, {
Component,
PropTypes
} from 'react';
import {
requireNativeComponent,
View,
} from 'react-native';
class Player extends Component {
constructor(props, context) {
super(props, context);
this._onLoading = this._onLoading.bind(this);
this._onPaused = this._onPaused.bind(this);
this._onShutdown = this._onShutdown.bind(this);
this._onError = this._onError.bind(this);
this._onPlaying = this._onPlaying.bind(this);
}
_onLoading(event) {
this.props.onLoading && this.props.onLoading(event.nativeEvent);
}
_onPaused(event) {
this.props.onPaused && this.props.onPaused(event.nativeEvent);
}
_onShutdown(event) {
this.props.onShutdown && this.props.onShutdown(event.nativeEvent);
}
_onError(event) {
this.props.onPlayerError && this.props.onPlayerError(event.nativeEvent);
}
_onPlaying(event) {
this.props.onPlaying && this.props.onPlaying(event.nativeEvent);
}
render() {
const nativeProps = Object.assign({}, this.props);
Object.assign(nativeProps, {
onLoading: this._onLoading,
onPaused: this._onPaused,
onShutdown: this._onShutdown,
onPlayerError: this._onPlayerError,
onPlaying: this._onPlaying,
});
return (
<RCTPlayer
{...nativeProps}
/>
)
}
}
Player.propTypes = {
source: PropTypes.shape({ // 是否符合指定格式的物件
uri: PropTypes.string.isRequired,
controller: PropTypes.bool, //Android only
timeout: PropTypes.number, //Android only
hardCodec: PropTypes.bool, //Android only
live: PropTypes.bool, //Android only
}).isRequired,
started:PropTypes.bool,
muted:PropTypes.bool, //iOS only
aspectRatio: PropTypes.oneOf([0, 1, 2, 3, 4]),
onLoading: PropTypes.func,
onPaused: PropTypes.func,
onShutdown: PropTypes.func,
onPlayerError: PropTypes.func,
onPlaying: PropTypes.func,
...View.propTypes,
}
const RCTPlayer = requireNativeComponent('RCTPlayer', Player);
module.exports = Player;
================================================
FILE: README.md
================================================
# Deprecated
# Pili Streaming Cloud React Native SDK
## Introduction
### Warning
This lib dependency PLMediaStreamingKit (2.1.1) and PLPlayerKit (2.2.4) , the latest native sdk break origin API, i will match the latest version be free..
这*可能*是**第一个**在 React Native 上实现全功能的直播 SDK 了,底层基于 [Pili-SDK](https://github.com/pili-engineering),把 iOS 和 Android 的 API 尽量统一。
2.0 版本为了更容易集成第三方 SDK ,对原有的 React Native 项目进行了改造,iOS 的依赖采用了 Cocoapod 进行管理,当然你也可以采用原来的方式,毕竟是可以共存的。具体可以参考 [AirApps](https://github.com/airapps/airapps) 可以查看如何进行集成。项目的简单的例子是 [react-native-pili](https://github.com/airapps/react-native-living)
This may be the **first** React Native to achieve full-featured live SDK, the bottom based on [Pili-SDK] (https://github.com/pili-engineering), the iOS and Android API as unified as possible.
2.0 version In order to more easily integrate third-party SDK, the original React Native project has been modified, iOS rely on the use of Cocoapod management, of course, you can also use the original way, after all, can coexist. Specifically, you can see how to integrate with AirApps (https://github.com/airapps/airapps). A simple example of a project is [react-native-pili] (https://github.com/airapps/react-native-living)
## Installation
```bash
git clone https://github.com/buhe/pili-startkit YourProjectName
cd YourProjectName/js && npm install
cd ../ios && pod install
```
### Javascript
```bash
cd YourProjectName/js
npm start
```
### iOS
1. Open ios/YourProjectName.xcworkspace (这里请注意是打开 .xcworkspace!请确认) (Please note that it opens .xcworkspace! Confirm it does)
2. Just run your project (Cmd+R)
3. 如果是 iOS 10 需要在 info 中额外添加如下权限: (If it is iOS 10 you need to add the following additional auth in info:)
```
<key>NSCameraUsageDescription</key>
<string>cameraDesciption</string>
<key>NSContactsUsageDescription</key>
<string>contactsDesciption</string>
<key>NSMicrophoneUsageDescription</key>
<string>microphoneDesciption</string>
```
ref: [iOS 10](http://www.jianshu.com/p/c212cde86877)
### Android
1. Open android use Android Studio
2. Just run your project
## TODO
- [x] Android Player
- [x] Android Streaming
- [x] iOS Player
- [x] iOS Streaming
- [ ] 美颜和水印支持 (Design and watermark support)
## Usage
### 1. 推流 (Streaming)
```javascript
<Streaming
rtmpURL={"rtmp://pili-publish.pilitest.qiniucdn.com/pilitest/demo_test?key=6eeee8a82246636e"}
style={{
height:400,
width:400,
}}
zoom={1} //zoom
muted={true} //muted
focus={false} //focus
profile={{ //video and audio profile
video:{
fps:30,
bps:1000 * 1024,
maxFrameInterval:48
},
audio:{
rate:44100,
bitrate:96 * 1024
},
started={false} //streaming status
onReady={()=>{}} //onReady event
onConnecting={()=>{}} //onConnecting event
onStreaming={()=>{}} //onStreaming event
onShutdown={()=>{}} //onShutdown event
onIOError={()=>{}} //onIOError event
onDisconnected={()=>{}} //onDisconnected event
/>
```
### 2. 直播播放 (Live player)
```javascript
<Player
source={{
uri:"rtmp://pili-live-rtmp.pilitest.qiniucdn.com/pilitest/xxx",
timeout: 10 * 1000, //live streaming timeout (ms) Android only
live:true, //live streaming ? Android only
hardCodec:false, //hard codec [recommended false] Android only
}}
started={true} //iOS only
muted={false} //iOS only
style={{
height:200,
width:200,
}}
onLoading={()=>{}} //loading from remote or local
onPaused={()=>{}} //pause event
onShutdown={()=>{}} //stopped event
onError={()=>{}} //error event
onPlaying={()=>{}} //play event
/>
```
## Release Note
## 2.1.1
- [x] Android Player
- [x] Android Streaming
- [x] iOS Player
- [x] iOS Streaming
================================================
FILE: Streaming.js
================================================
/**
* Created by buhe on 16/4/29.
*/
import React, {
Component,
PropTypes
} from 'react';
import {
requireNativeComponent,
View,
} from 'react-native';
import StreamingConst from './StreamingConst';
class Streaming extends Component {
constructor(props, context) {
super(props, context);
this._onReady = this._onReady.bind(this);
this._onConnecting = this._onConnecting.bind(this);
this._onStreaming = this._onStreaming.bind(this);
this._onShutdown = this._onShutdown.bind(this);
this._onIOError = this._onIOError.bind(this);
this._onDisconnected = this._onDisconnected.bind(this);
}
_onReady(event) {
this.props.onReady && this.props.onReady(event.nativeEvent);
}
_onConnecting(event) {
this.props.onConnecting && this.props.onConnecting(event.nativeEvent);
}
_onStreaming(event) {
this.props.onStreaming && this.props.onStreaming(event.nativeEvent);
}
_onShutdown(event) {
this.props.onShutdown && this.props.onShutdown(event.nativeEvent);
}
_onIOError(event) {
this.props.onIOError && this.props.onIOError(event.nativeEvent);
}
_onDisconnected(event) {
this.props.onDisconnected && this.props.onDisconnected(event.nativeEvent);
}
render() {
const nativeProps = Object.assign({}, this.props);
Object.assign(nativeProps, {
onReady: this._onReady,
onConnecting: this._onConnecting,
onStreaming: this._onStreaming,
onShutdown: this._onShutdown,
onIOError: this._onIOError,
onDisconnected: this._onDisconnected,
});
return (
<RCTStreaming
{...nativeProps}
/>
)
}
}
Streaming.propTypes = {
rtmpURL: PropTypes.string,
camera: PropTypes.oneOf(['front','back']),
muted: PropTypes.bool,
zoom: PropTypes.number,
focus: PropTypes.bool,
profile: PropTypes.shape({ // 是否符合指定格式的物件
video: PropTypes.shape({
fps: PropTypes.number.isRequired,
bps: PropTypes.number.isRequired,
maxFrameInterval: PropTypes.number.isRequired
}).isRequired,
audio: PropTypes.shape({
rate: PropTypes.number.isRequired,
bitrate: PropTypes.number.isRequired,
}).isRequired,
encodingSize: PropTypes.oneOf([StreamingConst.encodingSize._240, StreamingConst.encodingSize._480, StreamingConst.encodingSize._544, StreamingConst.encodingSize._720, StreamingConst.encodingSize._1088]).isRequired
}).isRequired,
started: PropTypes.bool,
settings: PropTypes.object,
onReady: PropTypes.func,
onConnecting: PropTypes.func,
onStreaming: PropTypes.func,
onShutdown: PropTypes.func,
onIOError: PropTypes.func,
onDisconnected: PropTypes.func,
...View.propTypes,
}
const RCTStreaming = requireNativeComponent('RCTStreaming', Streaming);
module.exports = Streaming;
================================================
FILE: StreamingConst.js
================================================
/**
* Created by buhe on 16/7/14.
*/
const video_encoding = {
get _240(){
return 0;
},
get _480(){
return 1;
},
get _544(){
return 2;
},
get _720(){
return 3;
},
get _1088(){
return 4;
},
}
module.exports = {
encodingSize:video_encoding
};
================================================
FILE: android/build.gradle
================================================
apply plugin: 'com.android.library'
android {
compileSdkVersion 23
buildToolsVersion "23.0.2"
defaultConfig {
minSdkVersion 16
targetSdkVersion 22
versionCode 1
versionName "1.0"
}
// buildTypes {
// release {
// minifyEnabled false
// proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro'
// }
// }
}
allprojects {
repositories {
jcenter()
maven { url "$projectDir/../../react-native/android" }
}
}
dependencies {
testCompile 'junit:junit:4.12'
compile 'com.android.support:appcompat-v7:23.1.1'
compile 'com.facebook.react:react-native:+'
compile 'com.qiniu:happy-dns:0.2.+'
compile 'com.qiniu.pili:pili-android-qos:0.8.13'
compile files('libs/pldroid-media-streaming-2.0.4.jar')
compile files('libs/pldroid-player-1.3.2.jar')
}
================================================
FILE: android/gradle/wrapper/gradle-wrapper.properties
================================================
#Mon Dec 28 10:00:20 PST 2015
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-2.10-all.zip
================================================
FILE: android/gradlew
================================================
#!/usr/bin/env bash
##############################################################################
##
## Gradle start up script for UN*X
##
##############################################################################
# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
DEFAULT_JVM_OPTS=""
APP_NAME="Gradle"
APP_BASE_NAME=`basename "$0"`
# Use the maximum available, or set MAX_FD != -1 to use that value.
MAX_FD="maximum"
warn ( ) {
echo "$*"
}
die ( ) {
echo
echo "$*"
echo
exit 1
}
# OS specific support (must be 'true' or 'false').
cygwin=false
msys=false
darwin=false
case "`uname`" in
CYGWIN* )
cygwin=true
;;
Darwin* )
darwin=true
;;
MINGW* )
msys=true
;;
esac
# Attempt to set APP_HOME
# Resolve links: $0 may be a link
PRG="$0"
# Need this for relative symlinks.
while [ -h "$PRG" ] ; do
ls=`ls -ld "$PRG"`
link=`expr "$ls" : '.*-> \(.*\)$'`
if expr "$link" : '/.*' > /dev/null; then
PRG="$link"
else
PRG=`dirname "$PRG"`"/$link"
fi
done
SAVED="`pwd`"
cd "`dirname \"$PRG\"`/" >/dev/null
APP_HOME="`pwd -P`"
cd "$SAVED" >/dev/null
CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
# Determine the Java command to use to start the JVM.
if [ -n "$JAVA_HOME" ] ; then
if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
# IBM's JDK on AIX uses strange locations for the executables
JAVACMD="$JAVA_HOME/jre/sh/java"
else
JAVACMD="$JAVA_HOME/bin/java"
fi
if [ ! -x "$JAVACMD" ] ; then
die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
else
JAVACMD="java"
which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
# Increase the maximum file descriptors if we can.
if [ "$cygwin" = "false" -a "$darwin" = "false" ] ; then
MAX_FD_LIMIT=`ulimit -H -n`
if [ $? -eq 0 ] ; then
if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
MAX_FD="$MAX_FD_LIMIT"
fi
ulimit -n $MAX_FD
if [ $? -ne 0 ] ; then
warn "Could not set maximum file descriptor limit: $MAX_FD"
fi
else
warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
fi
fi
# For Darwin, add options to specify how the application appears in the dock
if $darwin; then
GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
fi
# For Cygwin, switch paths to Windows format before running java
if $cygwin ; then
APP_HOME=`cygpath --path --mixed "$APP_HOME"`
CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
JAVACMD=`cygpath --unix "$JAVACMD"`
# We build the pattern for arguments to be converted via cygpath
ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
SEP=""
for dir in $ROOTDIRSRAW ; do
ROOTDIRS="$ROOTDIRS$SEP$dir"
SEP="|"
done
OURCYGPATTERN="(^($ROOTDIRS))"
# Add a user-defined pattern to the cygpath arguments
if [ "$GRADLE_CYGPATTERN" != "" ] ; then
OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
fi
# Now convert the arguments - kludge to limit ourselves to /bin/sh
i=0
for arg in "$@" ; do
CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
else
eval `echo args$i`="\"$arg\""
fi
i=$((i+1))
done
case $i in
(0) set -- ;;
(1) set -- "$args0" ;;
(2) set -- "$args0" "$args1" ;;
(3) set -- "$args0" "$args1" "$args2" ;;
(4) set -- "$args0" "$args1" "$args2" "$args3" ;;
(5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
(6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
(7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
(8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
(9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
esac
fi
# Split up the JVM_OPTS And GRADLE_OPTS values into an array, following the shell quoting and substitution rules
function splitJvmOpts() {
JVM_OPTS=("$@")
}
eval splitJvmOpts $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS
JVM_OPTS[${#JVM_OPTS[*]}]="-Dorg.gradle.appname=$APP_BASE_NAME"
exec "$JAVACMD" "${JVM_OPTS[@]}" -classpath "$CLASSPATH" org.gradle.wrapper.GradleWrapperMain "$@"
================================================
FILE: android/gradlew.bat
================================================
@if "%DEBUG%" == "" @echo off
@rem ##########################################################################
@rem
@rem Gradle startup script for Windows
@rem
@rem ##########################################################################
@rem Set local scope for the variables with windows NT shell
if "%OS%"=="Windows_NT" setlocal
@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
set DEFAULT_JVM_OPTS=
set DIRNAME=%~dp0
if "%DIRNAME%" == "" set DIRNAME=.
set APP_BASE_NAME=%~n0
set APP_HOME=%DIRNAME%
@rem Find java.exe
if defined JAVA_HOME goto findJavaFromJavaHome
set JAVA_EXE=java.exe
%JAVA_EXE% -version >NUL 2>&1
if "%ERRORLEVEL%" == "0" goto init
echo.
echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
echo.
echo Please set the JAVA_HOME variable in your environment to match the
echo location of your Java installation.
goto fail
:findJavaFromJavaHome
set JAVA_HOME=%JAVA_HOME:"=%
set JAVA_EXE=%JAVA_HOME%/bin/java.exe
if exist "%JAVA_EXE%" goto init
echo.
echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
echo.
echo Please set the JAVA_HOME variable in your environment to match the
echo location of your Java installation.
goto fail
:init
@rem Get command-line arguments, handling Windowz variants
if not "%OS%" == "Windows_NT" goto win9xME_args
if "%@eval[2+2]" == "4" goto 4NT_args
:win9xME_args
@rem Slurp the command line arguments.
set CMD_LINE_ARGS=
set _SKIP=2
:win9xME_args_slurp
if "x%~1" == "x" goto execute
set CMD_LINE_ARGS=%*
goto execute
:4NT_args
@rem Get arguments from the 4NT Shell from JP Software
set CMD_LINE_ARGS=%$
:execute
@rem Setup the command line
set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
@rem Execute Gradle
"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%
:end
@rem End local scope for the variables with windows NT shell
if "%ERRORLEVEL%"=="0" goto mainEnd
:fail
rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
rem the _cmd.exe /c_ return code!
if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
exit /b 1
:mainEnd
if "%OS%"=="Windows_NT" endlocal
:omega
================================================
FILE: android/proguard-rules.pro
================================================
# Add project specific ProGuard rules here.
# By default, the flags in this file are appended to flags specified
# in /Users/guguyanhua/sdk/tools/proguard/proguard-android.txt
# You can edit the include path and order by changing the proguardFiles
# directive in build.gradle.
#
# For more details, see
# http://developer.android.com/guide/developing/tools/proguard.html
# Add any project specific keep options here:
# If your project uses WebView with JS, uncomment the following
# and specify the fully qualified class name to the JavaScript interface
# class:
#-keepclassmembers class fqcn.of.javascript.interface.for.webview {
# public *;
#}
================================================
FILE: android/src/androidTest/java/com/pili/rnpili/ApplicationTest.java
================================================
package com.pili.rnpili;
import android.app.Application;
import android.test.ApplicationTestCase;
/**
* <a href="http://d.android.com/tools/testing/testing_android.html">Testing Fundamentals</a>
*/
public class ApplicationTest extends ApplicationTestCase<Application> {
public ApplicationTest() {
super(Application.class);
}
}
================================================
FILE: android/src/main/AndroidManifest.xml
================================================
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="com.pili.rnpili">
</manifest>
================================================
FILE: android/src/main/java/com/pili/rnpili/CameraPreviewFrameView.java
================================================
package com.pili.rnpili;
import android.content.Context;
import android.opengl.GLSurfaceView;
import android.util.AttributeSet;
import android.util.Log;
import android.view.GestureDetector;
import android.view.MotionEvent;
import android.view.ScaleGestureDetector;
import android.view.View;
import com.qiniu.android.dns.DnsManager;
import com.qiniu.android.dns.IResolver;
import com.qiniu.android.dns.NetworkInfo;
import com.qiniu.android.dns.http.DnspodFree;
import com.qiniu.android.dns.local.AndroidDnsServer;
import com.qiniu.android.dns.local.Resolver;
import java.io.IOException;
import java.net.InetAddress;
/**
* Created by buhe on 16/4/29.
* A Pili Streaming View
*/
public class CameraPreviewFrameView extends GLSurfaceView {
private static final String TAG = "CameraPreviewFrameView";
public interface Listener {
boolean onSingleTapUp(MotionEvent e);
boolean onZoomValueChanged(float factor);
}
private Listener mListener;
private ScaleGestureDetector mScaleDetector;
private GestureDetector mGestureDetector;
public CameraPreviewFrameView(Context context) {
super(context);
initialize(context);
}
public CameraPreviewFrameView(Context context, AttributeSet attrs) {
super(context, attrs);
initialize(context);
}
public void setListener(Listener listener) {
mListener = listener;
}
@Override
public boolean onTouchEvent(MotionEvent event) {
if (!mGestureDetector.onTouchEvent(event)) {
return mScaleDetector.onTouchEvent(event);
}
return false;
}
private GestureDetector.SimpleOnGestureListener mGestureListener = new GestureDetector.SimpleOnGestureListener() {
@Override
public boolean onSingleTapUp(MotionEvent e) {
if (mListener != null) {
mListener.onSingleTapUp(e);
}
return false;
}
};
private ScaleGestureDetector.SimpleOnScaleGestureListener mScaleListener = new ScaleGestureDetector.SimpleOnScaleGestureListener() {
private float mScaleFactor = 1.0f;
@Override
public boolean onScaleBegin(ScaleGestureDetector detector) {
return true;
}
@Override
public boolean onScale(ScaleGestureDetector detector) {
// factor > 1, zoom
// factor < 1, pinch
mScaleFactor *= detector.getScaleFactor();
// Don't let the object get too small or too large.
mScaleFactor = Math.max(0.01f, Math.min(mScaleFactor, 1.0f));
return mListener != null && mListener.onZoomValueChanged(mScaleFactor);
}
};
private void initialize(Context context) {
Log.i(TAG, "initialize");
mScaleDetector = new ScaleGestureDetector(context, mScaleListener);
mGestureDetector = new GestureDetector(context, mGestureListener);
}
}
================================================
FILE: android/src/main/java/com/pili/rnpili/PiliAudioStreamingViewManager.java
================================================
package com.pili.rnpili;
import android.hardware.Camera;
import android.os.Handler;
import android.os.Looper;
import android.os.Message;
import android.util.Log;
import android.view.MotionEvent;
import android.view.View;
import android.view.ViewGroup;
import android.widget.FrameLayout;
import com.facebook.react.bridge.Arguments;
import com.facebook.react.bridge.LifecycleEventListener;
import com.facebook.react.bridge.ReadableMap;
import com.facebook.react.common.MapBuilder;
import com.facebook.react.uimanager.SimpleViewManager;
import com.facebook.react.uimanager.ThemedReactContext;
import com.facebook.react.uimanager.annotations.ReactProp;
import com.facebook.react.uimanager.events.RCTEventEmitter;
import com.pili.rnpili.support.Config;
import com.pili.rnpili.support.RotateLayout;
import com.qiniu.android.dns.DnsManager;
import com.qiniu.android.dns.IResolver;
import com.qiniu.android.dns.NetworkInfo;
import com.qiniu.android.dns.http.DnspodFree;
import com.qiniu.android.dns.local.AndroidDnsServer;
import com.qiniu.android.dns.local.Resolver;
import com.qiniu.pili.droid.streaming.AVCodecType;
import com.qiniu.pili.droid.streaming.CameraStreamingSetting;
import com.qiniu.pili.droid.streaming.CameraStreamingSetting.CAMERA_FACING_ID;
import com.qiniu.pili.droid.streaming.MediaStreamingManager;
import com.qiniu.pili.droid.streaming.MicrophoneStreamingSetting;
import com.qiniu.pili.droid.streaming.StreamingEnv;
import com.qiniu.pili.droid.streaming.StreamingProfile;
import com.qiniu.pili.droid.streaming.StreamingSessionListener;
import com.qiniu.pili.droid.streaming.StreamingState;
import com.qiniu.pili.droid.streaming.StreamingStateChangedListener;
import com.qiniu.pili.droid.streaming.widget.AspectFrameLayout;
import java.io.IOException;
import java.net.InetAddress;
import java.net.URISyntaxException;
import java.util.List;
import java.util.Map;
import javax.annotation.Nullable;
/**
* Created by buhe on 16/4/29.
*/
public class PiliAudioStreamingViewManager extends SimpleViewManager<View>
implements
// CameraPreviewFrameView.Listener,
StreamingSessionListener,
StreamingStateChangedListener,
LifecycleEventListener
{
public enum Events {
READY("onReady"),
CONNECTING("onConnecting"),
STREAMING("onStreaming"),
SHUTDOWN("onShutdown"),
IOERROR("onIOError"),
DISCONNECTED("onDisconnected");
private final String mName;
Events(final String name) {
mName = name;
}
@Override
public String toString() {
return mName;
}
}
private static final String TAG = "PiliStreamingView";
protected static final int MSG_START_STREAMING = 0;
protected static final int MSG_STOP_STREAMING = 1;
private static final int MSG_SET_ZOOM = 2;
private static final int MSG_MUTE = 3;
private static final int ZOOM_MINIMUM_WAIT_MILLIS = 33; //ms
protected MediaStreamingManager mMediaStreamingManager;
protected boolean mIsReady = false;
private int mCurrentZoom = 0;
private int mMaxZoom = 0;
private StreamingProfile mProfile;
// private CameraStreamingSetting setting;
// private MicrophoneStreamingSetting microphoneSetting;
private ThemedReactContext context;
private RotateLayout mRotateLayout;
private CameraPreviewFrameView previewFrameView;
private View piliStreamPreview;
private boolean focus = false;
private boolean started = false;//default start attach on parent view
private RCTEventEmitter mEventEmitter;
private void initializeStreamingSessionIfNeeded(View view) {
if (mMediaStreamingManager == null) {
mMediaStreamingManager = new MediaStreamingManager(
context,
AVCodecType.SW_AUDIO_CODEC); // soft codec
mProfile = new StreamingProfile();
mProfile
.setAudioQuality(StreamingProfile.AUDIO_QUALITY_LOW1)
.setEncoderRCMode(StreamingProfile.EncoderRCModes.QUALITY_PRIORITY)
.setDnsManager(getMyDnsManager())
.setStreamStatusConfig(new StreamingProfile.StreamStatusConfig(3))
.setSendingBufferProfile(new StreamingProfile.SendingBufferProfile(0.2f, 0.8f, 3.0f, 20 * 1000))
;
mMediaStreamingManager.setStreamingStateListener(this);
mMediaStreamingManager.setStreamingSessionListener(this);
mMediaStreamingManager.prepare(mProfile);
context.addLifecycleEventListener(this);
}
}
@Override
@Nullable
public Map getExportedCustomDirectEventTypeConstants() {
MapBuilder.Builder builder = MapBuilder.builder();
for (Events event : Events.values()) {
builder.put(event.toString(), MapBuilder.of("registrationName", event.toString()));
}
return builder.build();
}
@Override
public View createViewInstance(ThemedReactContext context) {
this.context = context;
StreamingEnv.init(context.getApplicationContext());
mEventEmitter = context.getJSModule(RCTEventEmitter.class);
piliStreamPreview = new View(context);
initializeStreamingSessionIfNeeded(piliStreamPreview);
piliStreamPreview.addOnAttachStateChangeListener(new View.OnAttachStateChangeListener() {
@Override
public void onViewAttachedToWindow(View v) {
mMediaStreamingManager.resume();
}
@Override
public void onViewDetachedFromWindow(View v) {
mHandler.removeCallbacksAndMessages(null);
mMediaStreamingManager.pause();
mMediaStreamingManager.stopStreaming();
mMediaStreamingManager.destroy();
mMediaStreamingManager = null;
}
});
return piliStreamPreview;
}
@Override
/**
* <Streaming />
*/
public String getName() {
return "RCTAudioStreaming";
}
@ReactProp(name = "rtmpURL")
public void setRtmpURL(View view, @Nullable String rtmpURL) {
try {
mProfile.setPublishUrl(rtmpURL);
} catch (URISyntaxException e) {
Log.e(TAG, "RTMP URL is syntax error.");
}
mMediaStreamingManager.setStreamingProfile(mProfile);
}
@ReactProp(name = "profile")
public void setProfile(View view, @Nullable ReadableMap profile) {
ReadableMap audio = profile.getMap("audio");
StreamingProfile.VideoProfile vProfile =
new StreamingProfile.VideoProfile(0,0,0);//hack for bad API
StreamingProfile.AudioProfile aProfile =
new StreamingProfile.AudioProfile(audio.getInt("rate"), audio.getInt("bitrate")); //audio sample rate, audio bitrate
StreamingProfile.AVProfile avProfile = new StreamingProfile.AVProfile(vProfile, aProfile);
mProfile.setAVProfile(avProfile);
mMediaStreamingManager.setStreamingProfile(mProfile);
}
@ReactProp(name = "muted")
public void setMuted(View view, boolean muted) {
mMediaStreamingManager.mute(muted);
}
@ReactProp(name = "started")
public void setStarted(View view, boolean started) {
if(this.started == started){
//ignore
return;
}
this.started = started;
if (mIsReady) { //没有准备好则只赋值,等待onStateChanged 唤起
if (started) {
startStreaming();
} else {
stopStreaming();
}
}
}
public int getTargetId() {
return piliStreamPreview.getId();
}
@Override
public void onStateChanged(StreamingState state, Object extra) {
switch (state) {
case PREPARING:
break;
case READY:
mIsReady = true;
mMaxZoom = mMediaStreamingManager.getMaxZoom();
if (started) {
startStreaming();
}
mEventEmitter.receiveEvent(getTargetId(), Events.READY.toString(), Arguments.createMap());
break;
case CONNECTING:
mEventEmitter.receiveEvent(getTargetId(), Events.CONNECTING.toString(), Arguments.createMap());
break;
case STREAMING:
mEventEmitter.receiveEvent(getTargetId(), Events.STREAMING.toString(), Arguments.createMap());
break;
case SHUTDOWN:
mEventEmitter.receiveEvent(getTargetId(), Events.SHUTDOWN.toString(), Arguments.createMap());
break;
case IOERROR:
mEventEmitter.receiveEvent(getTargetId(), Events.IOERROR.toString(), Arguments.createMap());
break;
case UNKNOWN:
break;
case SENDING_BUFFER_EMPTY:
break;
case SENDING_BUFFER_FULL:
break;
case AUDIO_RECORDING_FAIL:
break;
case OPEN_CAMERA_FAIL:
break;
case DISCONNECTED:
mEventEmitter.receiveEvent(getTargetId(), Events.DISCONNECTED.toString(), Arguments.createMap());
break;
case CAMERA_SWITCHED:
if (extra != null) {
Log.i(TAG, "current camera id:" + (Integer) extra);
}
Log.i(TAG, "camera switched");
break;
case TORCH_INFO:
if (extra != null) {
final boolean isSupportedTorch = (Boolean) extra;
Log.i(TAG, "isSupportedTorch=" + isSupportedTorch);
// this.runOnUiThread(new Runnable() {
// @Override
// public void run() {
// if (isSupportedTorch) {
// mTorchBtn.setVisibility(View.VISIBLE);
// } else {
// mTorchBtn.setVisibility(View.GONE);
// }
// }
// });
}
break;
}
}
@Override
public boolean onRecordAudioFailedHandled(int err) {
mMediaStreamingManager.updateEncodingType(AVCodecType.SW_VIDEO_CODEC);
mMediaStreamingManager.startStreaming();
return true;
}
@Override
public boolean onRestartStreamingHandled(int err) {
Log.i(TAG, "onRestartStreamingHandled");
return mMediaStreamingManager.startStreaming();
}
@Override
public Camera.Size onPreviewSizeSelected(List<Camera.Size> list) {
Camera.Size size = null;
// if (list != null) {
// for (Camera.Size s : list) {
// Log.i(TAG, "w:" + s.width + ", h:" + s.height);
// }
// }
// Log.e(TAG, "selected size :" + size.width + "x" + size.height);
return size;
}
@Override
public void onHostResume() {
mMediaStreamingManager.resume();
}
@Override
public void onHostPause() {
mHandler.removeCallbacksAndMessages(null);
mMediaStreamingManager.pause();
}
@Override
public void onHostDestroy() {
mMediaStreamingManager.destroy();
}
protected Handler mHandler = new Handler(Looper.getMainLooper()) {
@Override
public void handleMessage(Message msg) {
switch (msg.what) {
case MSG_START_STREAMING:
new Thread(new Runnable() {
@Override
public void run() {
boolean res = mMediaStreamingManager.startStreaming();
Log.i(TAG, "res:" + res);
}
}).start();
break;
case MSG_STOP_STREAMING:
boolean res = mMediaStreamingManager.stopStreaming();
break;
case MSG_SET_ZOOM:
mMediaStreamingManager.setZoomValue(mCurrentZoom);
break;
default:
Log.e(TAG, "Invalid message");
}
}
};
private void startStreaming() {
mHandler.removeCallbacksAndMessages(null);
mHandler.sendMessageDelayed(mHandler.obtainMessage(MSG_START_STREAMING), 50);
}
private void stopStreaming() {
mHandler.removeCallbacksAndMessages(null);
mHandler.sendMessageDelayed(mHandler.obtainMessage(MSG_STOP_STREAMING), 50);
}
private DnsManager getMyDnsManager() {
IResolver r0 = new DnspodFree();
IResolver r1 = AndroidDnsServer.defaultResolver();
IResolver r2 = null;
try {
r2 = new Resolver(InetAddress.getByName("119.29.29.29"));
} catch (IOException ex) {
ex.printStackTrace();
}
return new DnsManager(NetworkInfo.normal, new IResolver[]{r0, r1, r2});
}
}
================================================
FILE: android/src/main/java/com/pili/rnpili/PiliPackage.java
================================================
package com.pili.rnpili;
import android.app.Activity;
import com.facebook.react.ReactPackage;
import com.facebook.react.bridge.JavaScriptModule;
import com.facebook.react.bridge.NativeModule;
import com.facebook.react.bridge.ReactApplicationContext;
import com.facebook.react.uimanager.ViewManager;
import com.qiniu.pili.droid.streaming.StreamingEnv;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
/**
* Created by buhe on 16/4/29.
*/
public class PiliPackage implements ReactPackage {
public PiliPackage() {
}
@Override
public List<NativeModule> createNativeModules(ReactApplicationContext reactContext) {
return Collections.emptyList();
}
@Override
public List<Class<? extends JavaScriptModule>> createJSModules() {
return Collections.emptyList();
}
@Override
public List<ViewManager> createViewManagers(ReactApplicationContext reactContext) {
return Arrays.<ViewManager>asList(
new PiliStreamingViewManager(),
new PiliPlayerViewManager(),
new PiliAudioStreamingViewManager()
);
}
}
================================================
FILE: android/src/main/java/com/pili/rnpili/PiliPlayerViewManager.java
================================================
package com.pili.rnpili;
import android.util.Log;
import com.facebook.react.bridge.Arguments;
import com.facebook.react.bridge.LifecycleEventListener;
import com.facebook.react.bridge.ReadableMap;
import com.facebook.react.bridge.WritableMap;
import com.facebook.react.common.MapBuilder;
import com.facebook.react.uimanager.SimpleViewManager;
import com.facebook.react.uimanager.ThemedReactContext;
import com.facebook.react.uimanager.annotations.ReactProp;
import com.facebook.react.uimanager.events.RCTEventEmitter;
import com.pili.pldroid.player.AVOptions;
import com.pili.pldroid.player.PLMediaPlayer;
import com.pili.pldroid.player.widget.PLVideoView;
//import com.pili.rnpili.support.MediaController;
import java.util.Map;
import javax.annotation.Nullable;
/**
* Created by buhe on 16/4/29.
*/
public class PiliPlayerViewManager extends SimpleViewManager<PLVideoView> implements LifecycleEventListener {
private ThemedReactContext reactContext;
private static final String TAG = PiliPlayerViewManager.class.getSimpleName();
private PLVideoView mVideoView;
private RCTEventEmitter mEventEmitter;
private static final int MEDIA_INFO_UNKNOWN = 1;
private static final int MEDIA_INFO_VIDEO_RENDERING_START = 3;
private static final int MEDIA_INFO_BUFFERING_START = 701;
private static final int MEDIA_INFO_BUFFERING_END = 702;
private static final int MEDIA_INFO_AUDIO_RENDERING_START = 10002;
private boolean started;
private int aspectRatio;
public enum Events {
LOADING("onLoading"),
PAUSE("onPaused"),
SHUTDOWN("onShutdown"),
ERROR("onPlayerError"),
PLAYING("onPlaying");
private final String mName;
Events(final String name) {
mName = name;
}
@Override
public String toString() {
return mName;
}
}
@Override
public String getName() {
return "RCTPlayer";
}
@Override
@Nullable
public Map getExportedCustomDirectEventTypeConstants() {
MapBuilder.Builder builder = MapBuilder.builder();
for (Events event : Events.values()) {
builder.put(event.toString(), MapBuilder.of("registrationName", event.toString()));
}
return builder.build();
}
@Override
protected PLVideoView createViewInstance(ThemedReactContext reactContext) {
this.reactContext = reactContext;
mEventEmitter = reactContext.getJSModule(RCTEventEmitter.class);
mVideoView = new PLVideoView(reactContext);
// Set some listeners
mVideoView.setOnPreparedListener(mOnPreparedListener);
mVideoView.setOnInfoListener(mOnInfoListener);
mVideoView.setOnVideoSizeChangedListener(mOnVideoSizeChangedListener);
mVideoView.setOnBufferingUpdateListener(mOnBufferingUpdateListener);
mVideoView.setOnCompletionListener(mOnCompletionListener);
mVideoView.setOnSeekCompleteListener(mOnSeekCompleteListener);
mVideoView.setOnErrorListener(mOnErrorListener);
reactContext.addLifecycleEventListener(this);
return mVideoView;
}
private boolean isLiveStreaming(String url) {
if (url.startsWith("rtmp://")
|| (url.startsWith("http://") && url.endsWith(".m3u8"))
|| (url.startsWith("http://") && url.endsWith(".flv"))) {
return true;
}
return false;
}
@ReactProp(name = "source")
public void setSource(PLVideoView mVideoView, ReadableMap source) {
AVOptions options = new AVOptions();
String uri = source.getString("uri");
boolean mediaController = source.hasKey("controller") && source.getBoolean("controller");
int avFrameTimeout = source.hasKey("timeout") ? source.getInt("timeout") : -1; //10 * 1000 ms
boolean liveStreaming = source.hasKey("live") && source.getBoolean("live"); //1 or 0 // 1 -> live
boolean codec = source.hasKey("hardCodec") && source.getBoolean("hardCodec"); //1 or 0 // 1 -> hw codec enable, 0 -> disable [recommended]
// the unit of timeout is ms
if (avFrameTimeout >= 0) {
options.setInteger(AVOptions.KEY_GET_AV_FRAME_TIMEOUT, avFrameTimeout);
}
// Some optimization with buffering mechanism when be set to 1
if (liveStreaming) {
options.setInteger(AVOptions.KEY_LIVE_STREAMING, 1);
} else {
options.setInteger(AVOptions.KEY_LIVE_STREAMING, 0);
}
// }
// 1 -> hw codec enable, 0 -> disable [recommended]
if (codec) {
options.setInteger(AVOptions.KEY_MEDIACODEC, 1);
} else {
options.setInteger(AVOptions.KEY_MEDIACODEC, 0);
}
mVideoView.setAVOptions(options);
// After setVideoPath, the play will start automatically
// mVideoView.start() is not required
mVideoView.setVideoPath(uri);
// if (mediaController) {
// // You can also use a custom `MediaController` widget
// MediaController mMediaController = new MediaController(reactContext, false, isLiveStreaming(uri));
// mVideoView.setMediaController(mMediaController);
// }
}
@ReactProp(name = "aspectRatio")
public void setAspectRatio(PLVideoView mVideoView, int aspectRatio) {
/**
* ASPECT_RATIO_ORIGIN = 0;
* ASPECT_RATIO_FIT_PARENT = 1
* ASPECT_RATIO_PAVED_PARENT = 2
* ASPECT_RATIO_16_9 = 3
* ASPECT_RATIO_4_3 = 4
*/
this.aspectRatio = aspectRatio;
mVideoView.setDisplayAspectRatio(aspectRatio);
}
@ReactProp(name = "started")
public void setStarted(PLVideoView mVideoView, boolean started) {
this.started = started;
if (started) {
mVideoView.start();
} else {
mVideoView.pause();
mEventEmitter.receiveEvent(getTargetId(), Events.PAUSE.toString(), Arguments.createMap());
}
}
@ReactProp(name = "muted")
public void setMuted(PLVideoView mVideoView, boolean muted){
// mVideoView.mute
//Android not implements
}
private PLMediaPlayer.OnPreparedListener mOnPreparedListener = new PLMediaPlayer.OnPreparedListener() {
@Override
public void onPrepared(PLMediaPlayer plMediaPlayer) {
Log.d(TAG, "onPrepared ! ");
mEventEmitter.receiveEvent(getTargetId(), Events.LOADING.toString(), Arguments.createMap());
}
};
private PLMediaPlayer.OnInfoListener mOnInfoListener = new PLMediaPlayer.OnInfoListener() {
@Override
public boolean onInfo(PLMediaPlayer plMediaPlayer, int what, int extra) {
Log.d(TAG, "onInfo: " + what + ", " + extra);
switch (what) {
case MEDIA_INFO_VIDEO_RENDERING_START:
mEventEmitter.receiveEvent(getTargetId(), Events.PLAYING.toString(), Arguments.createMap());
break;
case MEDIA_INFO_BUFFERING_START:
mEventEmitter.receiveEvent(getTargetId(), Events.LOADING.toString(), Arguments.createMap());
break;
case MEDIA_INFO_BUFFERING_END:
mEventEmitter.receiveEvent(getTargetId(), Events.PLAYING.toString(), Arguments.createMap());
break;
}
return true;
}
};
private PLMediaPlayer.OnErrorListener mOnErrorListener = new PLMediaPlayer.OnErrorListener() {
@Override
public boolean onError(PLMediaPlayer plMediaPlayer, int errorCode) {
Log.e(TAG, "Error happened, errorCode = " + errorCode);
WritableMap event = Arguments.createMap();
event.putInt("errorCode",errorCode);
mEventEmitter.receiveEvent(getTargetId(), Events.ERROR.toString(), Arguments.createMap());
return true;
}
};
private PLMediaPlayer.OnCompletionListener mOnCompletionListener = new PLMediaPlayer.OnCompletionListener() {
@Override
public void onCompletion(PLMediaPlayer plMediaPlayer) {
Log.d(TAG, "Play Completed !");
mEventEmitter.receiveEvent(getTargetId(), Events.SHUTDOWN.toString(), Arguments.createMap());
}
};
private PLMediaPlayer.OnBufferingUpdateListener mOnBufferingUpdateListener = new PLMediaPlayer.OnBufferingUpdateListener() {
@Override
public void onBufferingUpdate(PLMediaPlayer plMediaPlayer, int precent) {
Log.d(TAG, "onBufferingUpdate: " + precent);
}
};
private PLMediaPlayer.OnSeekCompleteListener mOnSeekCompleteListener = new PLMediaPlayer.OnSeekCompleteListener() {
@Override
public void onSeekComplete(PLMediaPlayer plMediaPlayer) {
Log.d(TAG, "onSeekComplete !");
}
;
};
private PLMediaPlayer.OnVideoSizeChangedListener mOnVideoSizeChangedListener = new PLMediaPlayer.OnVideoSizeChangedListener() {
@Override
public void onVideoSizeChanged(PLMediaPlayer plMediaPlayer, int width, int height) {
Log.d(TAG, "onVideoSizeChanged: " + width + "," + height);
}
};
@Override
public void onHostResume() {
mVideoView.start();
}
@Override
public void onHostPause() {
mVideoView.pause();
}
@Override
public void onHostDestroy() {
mVideoView.stopPlayback();
}
public int getTargetId() {
return mVideoView.getId();
}
}
================================================
FILE: android/src/main/java/com/pili/rnpili/PiliStreamingViewManager.java
================================================
package com.pili.rnpili;
import android.app.Activity;
import android.hardware.Camera;
import android.os.Handler;
import android.os.Looper;
import android.os.Message;
import android.util.Log;
import android.view.Gravity;
import android.view.MotionEvent;
import android.view.View;
import android.view.ViewGroup;
import android.widget.FrameLayout;
import com.facebook.react.bridge.Arguments;
import com.facebook.react.bridge.LifecycleEventListener;
import com.facebook.react.bridge.ReadableMap;
import com.facebook.react.common.MapBuilder;
import com.facebook.react.uimanager.SimpleViewManager;
import com.facebook.react.uimanager.ThemedReactContext;
import com.facebook.react.uimanager.annotations.ReactProp;
import com.facebook.react.uimanager.events.RCTEventEmitter;
import com.pili.rnpili.support.Config;
import com.pili.rnpili.support.Jsons;
import com.pili.rnpili.support.RotateLayout;
import com.qiniu.android.dns.DnsManager;
import com.qiniu.android.dns.IResolver;
import com.qiniu.android.dns.NetworkInfo;
import com.qiniu.android.dns.http.DnspodFree;
import com.qiniu.android.dns.local.AndroidDnsServer;
import com.qiniu.android.dns.local.Resolver;
import com.qiniu.pili.droid.streaming.AVCodecType;
import com.qiniu.pili.droid.streaming.CameraStreamingSetting;
import com.qiniu.pili.droid.streaming.MediaStreamingManager;
import com.qiniu.pili.droid.streaming.MicrophoneStreamingSetting;
import com.qiniu.pili.droid.streaming.StreamingEnv;
import com.qiniu.pili.droid.streaming.StreamingProfile;
import com.qiniu.pili.droid.streaming.widget.AspectFrameLayout;
import com.qiniu.pili.droid.streaming.CameraStreamingSetting;
import com.qiniu.pili.droid.streaming.CameraStreamingSetting.CAMERA_FACING_ID;
import com.qiniu.pili.droid.streaming.FrameCapturedCallback;
import com.qiniu.pili.droid.streaming.MediaStreamingManager;
import com.qiniu.pili.droid.streaming.MicrophoneStreamingSetting;
import com.qiniu.pili.droid.streaming.StreamStatusCallback;
import com.qiniu.pili.droid.streaming.StreamingPreviewCallback;
import com.qiniu.pili.droid.streaming.StreamingProfile;
import com.qiniu.pili.droid.streaming.StreamingSessionListener;
import com.qiniu.pili.droid.streaming.StreamingState;
import com.qiniu.pili.droid.streaming.StreamingStateChangedListener;
import com.qiniu.pili.droid.streaming.SurfaceTextureCallback;
import org.json.JSONObject;
import java.io.IOException;
import java.net.InetAddress;
import java.net.URISyntaxException;
import java.util.List;
import java.util.Map;
import javax.annotation.Nullable;
/**
* Created by buhe on 16/4/29.
*/
public class PiliStreamingViewManager extends SimpleViewManager<AspectFrameLayout>
implements
CameraPreviewFrameView.Listener,
StreamingSessionListener,
StreamingStateChangedListener,
LifecycleEventListener
{
public enum Events {
READY("onReady"),
CONNECTING("onConnecting"),
STREAMING("onStreaming"),
SHUTDOWN("onShutdown"),
IOERROR("onIOError"),
DISCONNECTED("onDisconnected");
private final String mName;
Events(final String name) {
mName = name;
}
@Override
public String toString() {
return mName;
}
}
private static final String TAG = "PiliStreamingView";
protected static final int MSG_START_STREAMING = 0;
protected static final int MSG_STOP_STREAMING = 1;
private static final int MSG_SET_ZOOM = 2;
private static final int MSG_MUTE = 3;
private static final int ZOOM_MINIMUM_WAIT_MILLIS = 33; //ms
protected MediaStreamingManager mMediaStreamingManager;
protected boolean mIsReady = false;
private int mCurrentZoom = 0;
private int mMaxZoom = 0;
private StreamingProfile mProfile;
private CameraStreamingSetting setting;
private MicrophoneStreamingSetting microphoneSetting;
private ThemedReactContext context;
private RotateLayout mRotateLayout;
private CameraPreviewFrameView previewFrameView;
private AspectFrameLayout piliStreamPreview;
private boolean focus = false;
private boolean started = false;//default start attach on parent view
private RCTEventEmitter mEventEmitter;
private void initializeStreamingSessionIfNeeded(AspectFrameLayout afl, CameraPreviewFrameView previewFrameView) throws URISyntaxException {
if (mMediaStreamingManager == null) {
mMediaStreamingManager = new MediaStreamingManager(
context,
afl,
previewFrameView,
AVCodecType.SW_VIDEO_WITH_SW_AUDIO_CODEC); // soft codec
mProfile = new StreamingProfile();
StreamingProfile.AudioProfile aProfile = new StreamingProfile.AudioProfile(44100, 96 * 1024); //audio sample rate, audio bitrate
StreamingProfile.VideoProfile vProfile = new StreamingProfile.VideoProfile(30, 1000 * 1024, 48);//fps bps maxFrameInterval
StreamingProfile.AVProfile avProfile = new StreamingProfile.AVProfile(vProfile, aProfile);
mProfile.setVideoQuality(StreamingProfile.VIDEO_QUALITY_HIGH3)
// .setPublishUrl("rtmp://pili-publish.wantplus.cn/wantplus-1/bhjkhjgcnkknn?key=f4f380426f4bdb1a")
.setAudioQuality(StreamingProfile.AUDIO_QUALITY_MEDIUM2)
// .setPreferredVideoEncodingSize(960, 544)
.setEncodingSizeLevel(Config.ENCODING_LEVEL)
.setEncoderRCMode(StreamingProfile.EncoderRCModes.QUALITY_PRIORITY)
// .setStream(stream) //set Stream
.setAVProfile(avProfile)
.setDnsManager(getMyDnsManager())
.setStreamStatusConfig(new StreamingProfile.StreamStatusConfig(3))
// .setEncodingOrientation(StreamingProfile.ENCODING_ORIENTATION.PORT)
.setSendingBufferProfile(new StreamingProfile.SendingBufferProfile(0.2f, 0.8f, 3.0f, 20 * 1000))
;
setting = new CameraStreamingSetting();
setting.setCameraId(Camera.CameraInfo.CAMERA_FACING_BACK)
.setContinuousFocusModeEnabled(true)
.setRecordingHint(false)
.setResetTouchFocusDelayInMs(3000)
.setFocusMode(CameraStreamingSetting.FOCUS_MODE_CONTINUOUS_PICTURE)
.setCameraPrvSizeLevel(CameraStreamingSetting.PREVIEW_SIZE_LEVEL.MEDIUM)
.setCameraPrvSizeRatio(CameraStreamingSetting.PREVIEW_SIZE_RATIO.RATIO_16_9);
microphoneSetting = new MicrophoneStreamingSetting();
microphoneSetting.setBluetoothSCOEnabled(false);
mMediaStreamingManager.setStreamingStateListener(this);
mMediaStreamingManager.setStreamingSessionListener(this);
mMediaStreamingManager.prepare(setting, microphoneSetting, mProfile);
context.addLifecycleEventListener(this);
}
}
@Override
@Nullable
public Map getExportedCustomDirectEventTypeConstants() {
MapBuilder.Builder builder = MapBuilder.builder();
for (Events event : Events.values()) {
builder.put(event.toString(), MapBuilder.of("registrationName", event.toString()));
}
return builder.build();
}
@Override
public AspectFrameLayout createViewInstance(ThemedReactContext context) {
this.context = context;
StreamingEnv.init(context.getApplicationContext());
mEventEmitter = context.getJSModule(RCTEventEmitter.class);
piliStreamPreview = new AspectFrameLayout(context);
piliStreamPreview.setShowMode(AspectFrameLayout.SHOW_MODE.REAL);
previewFrameView = new CameraPreviewFrameView(context);
previewFrameView.setListener(this);
previewFrameView.setLayoutParams(new FrameLayout.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.MATCH_PARENT));
piliStreamPreview.addView(previewFrameView);
try {
initializeStreamingSessionIfNeeded(piliStreamPreview, previewFrameView);
} catch (URISyntaxException e) {
e.printStackTrace();
}
piliStreamPreview.addOnAttachStateChangeListener(new View.OnAttachStateChangeListener() {
@Override
public void onViewAttachedToWindow(View v) {
mMediaStreamingManager.resume();
}
@Override
public void onViewDetachedFromWindow(View v) {
mHandler.removeCallbacksAndMessages(null);
mMediaStreamingManager.pause();
mMediaStreamingManager.stopStreaming();
mMediaStreamingManager.destroy();
mMediaStreamingManager = null;
}
});
return piliStreamPreview;
}
@Override
/**
* <Streaming />
*/
public String getName() {
return "RCTStreaming";
}
@ReactProp(name = "camera")
public void setCamera(AspectFrameLayout view, @Nullable String camera){
if(camera.equals("front")){
mMediaStreamingManager.switchCamera(CAMERA_FACING_ID.CAMERA_FACING_FRONT);
}else if(camera.equals("back")){
mMediaStreamingManager.switchCamera(CAMERA_FACING_ID.CAMERA_FACING_BACK);
}else{
}
}
@ReactProp(name = "rtmpURL")
public void setRtmpURL(AspectFrameLayout view, @Nullable String rtmpURL) {
try {
mProfile.setPublishUrl(rtmpURL);
} catch (URISyntaxException e) {
Log.e(TAG, "RTMP URL is syntax error.");
}
mMediaStreamingManager.setStreamingProfile(mProfile);
}
@ReactProp(name = "profile")
public void setProfile(AspectFrameLayout view, @Nullable ReadableMap profile) {
ReadableMap video = profile.getMap("video");
ReadableMap audio = profile.getMap("audio");
int encodingSize = profile.getInt("encodingSize");
StreamingProfile.AudioProfile aProfile =
new StreamingProfile.AudioProfile(audio.getInt("rate"), audio.getInt("bitrate")); //audio sample rate, audio bitrate
StreamingProfile.VideoProfile vProfile =
new StreamingProfile.VideoProfile(video.getInt("fps"), video.getInt("bps"), video.getInt("maxFrameInterval"));//fps bps maxFrameInterval
StreamingProfile.AVProfile avProfile = new StreamingProfile.AVProfile(vProfile, aProfile);
mProfile.setAVProfile(avProfile);
mProfile.setEncodingSizeLevel(encodingSize);
mMediaStreamingManager.setStreamingProfile(mProfile);
}
@ReactProp(name = "muted")
public void setMuted(AspectFrameLayout view, boolean muted) {
mMediaStreamingManager.mute(muted);
}
@ReactProp(name = "zoom")
public void setZoom(AspectFrameLayout view, int zoom) {
mCurrentZoom = zoom;
mCurrentZoom = Math.min(mCurrentZoom, mMaxZoom);
mCurrentZoom = Math.max(0, mCurrentZoom);
mMediaStreamingManager.setZoomValue(zoom);
}
@ReactProp(name = "focus")
public void setFocus(AspectFrameLayout view, boolean focus) {
this.focus = focus;
}
@ReactProp(name = "started")
public void setStarted(AspectFrameLayout view, boolean started) {
if(this.started == started){
//ignore
return;
}
this.started = started;
if (mIsReady) { //没有准备好则只赋值,等待onStateChanged 唤起
if (started) {
startStreaming();
} else {
stopStreaming();
}
}
}
protected void setFocusAreaIndicator() {
// if (mRotateLayout == null) {
// mRotateLayout = new FocusIndicatorRotateLayout(context, null);
// mRotateLayout
// .setLayoutParams(new FrameLayout.LayoutParams(
// FrameLayout.LayoutParams.WRAP_CONTENT,
// FrameLayout.LayoutParams.WRAP_CONTENT,
// Gravity.CENTER
// ));
// View indicator = new View(context);
// indicator.setLayoutParams(new ViewGroup.LayoutParams(120, 120));
// mRotateLayout.addView(indicator);
// mRotateLayout.setChild(indicator);
// piliStreamPreview.addView(mRotateLayout);
// mMediaStreamingManager.setFocusAreaIndicator(mRotateLayout,
// indicator);
// }
}
public int getTargetId() {
return piliStreamPreview.getId();
}
@Override
public void onStateChanged(StreamingState state, Object extra) {
switch (state) {
case PREPARING:
break;
case READY:
mIsReady = true;
mMaxZoom = mMediaStreamingManager.getMaxZoom();
if (started) {
startStreaming();
}
mEventEmitter.receiveEvent(getTargetId(), Events.READY.toString(), Arguments.createMap());
break;
case CONNECTING:
mEventEmitter.receiveEvent(getTargetId(), Events.CONNECTING.toString(), Arguments.createMap());
break;
case STREAMING:
mEventEmitter.receiveEvent(getTargetId(), Events.STREAMING.toString(), Arguments.createMap());
break;
case SHUTDOWN:
mEventEmitter.receiveEvent(getTargetId(), Events.SHUTDOWN.toString(), Arguments.createMap());
break;
case IOERROR:
mEventEmitter.receiveEvent(getTargetId(), Events.IOERROR.toString(), Arguments.createMap());
break;
case UNKNOWN:
break;
case SENDING_BUFFER_EMPTY:
break;
case SENDING_BUFFER_FULL:
break;
case AUDIO_RECORDING_FAIL:
break;
case OPEN_CAMERA_FAIL:
break;
case DISCONNECTED:
mEventEmitter.receiveEvent(getTargetId(), Events.DISCONNECTED.toString(), Arguments.createMap());
break;
case CAMERA_SWITCHED:
if (extra != null) {
Log.i(TAG, "current camera id:" + (Integer) extra);
}
Log.i(TAG, "camera switched");
break;
case TORCH_INFO:
if (extra != null) {
final boolean isSupportedTorch = (Boolean) extra;
Log.i(TAG, "isSupportedTorch=" + isSupportedTorch);
// this.runOnUiThread(new Runnable() {
// @Override
// public void run() {
// if (isSupportedTorch) {
// mTorchBtn.setVisibility(View.VISIBLE);
// } else {
// mTorchBtn.setVisibility(View.GONE);
// }
// }
// });
}
break;
}
}
@Override
public boolean onRecordAudioFailedHandled(int err) {
mMediaStreamingManager.updateEncodingType(AVCodecType.SW_VIDEO_CODEC);
mMediaStreamingManager.startStreaming();
return true;
}
@Override
public boolean onRestartStreamingHandled(int err) {
Log.i(TAG, "onRestartStreamingHandled");
return mMediaStreamingManager.startStreaming();
}
@Override
public Camera.Size onPreviewSizeSelected(List<Camera.Size> list) {
Camera.Size size = null;
// if (list != null) {
// for (Camera.Size s : list) {
// Log.i(TAG, "w:" + s.width + ", h:" + s.height);
// }
// }
// Log.e(TAG, "selected size :" + size.width + "x" + size.height);
return size;
}
@Override
public boolean onSingleTapUp(MotionEvent e) {
Log.i(TAG, "onSingleTapUp X:" + e.getX() + ",Y:" + e.getY());
if (mIsReady && focus) {
setFocusAreaIndicator();
try {
mMediaStreamingManager.doSingleTapUp((int) e.getX(), (int) e.getY());
} catch (Exception ex) {
Log.e(TAG, ex.getMessage());
}
return true;
}
return false;
}
@Override
public boolean onZoomValueChanged(float factor) {
if (mIsReady && mMediaStreamingManager.isZoomSupported()) {
mCurrentZoom = (int) (mMaxZoom * factor);
mCurrentZoom = Math.min(mCurrentZoom, mMaxZoom);
mCurrentZoom = Math.max(0, mCurrentZoom);
Log.d(TAG, "zoom ongoing, scale: " + mCurrentZoom + ",factor:" + factor + ",maxZoom:" + mMaxZoom);
if (!mHandler.hasMessages(MSG_SET_ZOOM)) {
mHandler.sendMessageDelayed(mHandler.obtainMessage(MSG_SET_ZOOM), ZOOM_MINIMUM_WAIT_MILLIS);
return true;
}
}
return false;
}
// @Override
// public boolean onStateHandled(final int state, Object extra) {
// switch (state) {
// case SENDING_BUFFER_HAS_FEW_ITEMS:
// return false;
// case SENDING_BUFFER_HAS_MANY_ITEMS:
// return false;
// }
// return false;
// }
@Override
public void onHostResume() {
mMediaStreamingManager.resume();
}
@Override
public void onHostPause() {
mHandler.removeCallbacksAndMessages(null);
mMediaStreamingManager.pause();
}
@Override
public void onHostDestroy() {
mMediaStreamingManager.destroy();
}
protected Handler mHandler = new Handler(Looper.getMainLooper()) {
@Override
public void handleMessage(Message msg) {
switch (msg.what) {
case MSG_START_STREAMING:
new Thread(new Runnable() {
@Override
public void run() {
boolean res = mMediaStreamingManager.startStreaming();
Log.i(TAG, "res:" + res);
}
}).start();
break;
case MSG_STOP_STREAMING:
boolean res = mMediaStreamingManager.stopStreaming();
break;
case MSG_SET_ZOOM:
mMediaStreamingManager.setZoomValue(mCurrentZoom);
break;
default:
Log.e(TAG, "Invalid message");
}
}
};
private void startStreaming() {
mHandler.removeCallbacksAndMessages(null);
mHandler.sendMessageDelayed(mHandler.obtainMessage(MSG_START_STREAMING), 50);
}
private void stopStreaming() {
mHandler.removeCallbacksAndMessages(null);
mHandler.sendMessageDelayed(mHandler.obtainMessage(MSG_STOP_STREAMING), 50);
}
private DnsManager getMyDnsManager() {
IResolver r0 = new DnspodFree();
IResolver r1 = AndroidDnsServer.defaultResolver();
IResolver r2 = null;
try {
r2 = new Resolver(InetAddress.getByName("119.29.29.29"));
} catch (IOException ex) {
ex.printStackTrace();
}
return new DnsManager(NetworkInfo.normal, new IResolver[]{r0, r1, r2});
}
}
================================================
FILE: android/src/main/java/com/pili/rnpili/support/Config.java
================================================
package com.pili.rnpili.support;
import android.content.pm.ActivityInfo;
import com.qiniu.pili.droid.streaming.StreamingProfile;
/**
* Created by jerikc on 15/12/8.
*/
public class Config {
public static final boolean DEBUG_MODE = false;
public static final boolean FILTER_ENABLED = false;
public static final int ENCODING_LEVEL = StreamingProfile.VIDEO_ENCODING_HEIGHT_480;
public static final int SCREEN_ORIENTATION = ActivityInfo.SCREEN_ORIENTATION_PORTRAIT;
public static final String EXTRA_KEY_STREAM_JSON = "stream_json_str";
public static final String HINT_ENCODING_ORIENTATION_CHANGED =
"Encoding orientation had been changed. Stop streaming first and restart streaming will take effect";
}
================================================
FILE: android/src/main/java/com/pili/rnpili/support/FocusIndicatorRotateLayout.java
================================================
//package com.pili.rnpili.support;
//
//import android.annotation.TargetApi;
//import android.content.Context;
//import android.os.Build;
//import android.util.AttributeSet;
//import android.util.Log;
//
//import com.pili.rnpili.R;
//import com.qiniu.pili.droid.streaming.ui.FocusIndicator;
//
//// A view that indicates the focus area or the metering area.
//public class FocusIndicatorRotateLayout extends RotateLayout implements FocusIndicator {
// private static final String TAG = "FocusIndicatorLayout";
//
// // Sometimes continuous autofucus starts and stops several times quickly.
// // These states are used to make sure the animation is run for at least some
// // time.
// private int mState;
// private static final int STATE_IDLE = 0;
// private static final int STATE_FOCUSING = 1;
// private static final int STATE_FINISHING = 2;
//
// private Runnable mDisappear = new Disappear();
// private Runnable mEndAction = new EndAction();
//
// private static final int SCALING_UP_TIME = 1000;
// private static final int SCALING_DOWN_TIME = 200;
// private static final int DISAPPEAR_TIMEOUT = 200;
//
// public FocusIndicatorRotateLayout(Context context, AttributeSet attrs) {
// super(context, attrs);
// }
//
// private void setDrawable(int resid) {
// mChild.setBackgroundDrawable(getResources().getDrawable(resid));
// }
//
// @Override
// @TargetApi(Build.VERSION_CODES.JELLY_BEAN)
// public void showStart() {
// Log.i(TAG, "showStart");
// if (Build.VERSION.SDK_INT < Build.VERSION_CODES.JELLY_BEAN) {
// return;
// }
// if (mState == STATE_IDLE) {
// setDrawable(R.drawable.ic_focus_focusing);
// animate().withLayer().setDuration(SCALING_UP_TIME)
// .scaleX(1.5f).scaleY(1.5f);
// mState = STATE_FOCUSING;
// }
// }
//
// @TargetApi(Build.VERSION_CODES.JELLY_BEAN)
// @Override
// public void showSuccess(boolean timeout) {
// Log.i(TAG, "showSuccess");
// if (Build.VERSION.SDK_INT < Build.VERSION_CODES.JELLY_BEAN) {
// return;
// }
// if (mState == STATE_FOCUSING) {
// setDrawable(R.drawable.ic_focus_focused);
// animate().withLayer().setDuration(SCALING_DOWN_TIME).scaleX(1f)
// .scaleY(1f).withEndAction(timeout ? mEndAction : null);
// mState = STATE_FINISHING;
// }
// }
//
// @TargetApi(Build.VERSION_CODES.JELLY_BEAN)
// @Override
// public void showFail(boolean timeout) {
// Log.i(TAG, "showFail");
// if (Build.VERSION.SDK_INT < Build.VERSION_CODES.JELLY_BEAN) {
// return;
// }
// if (mState == STATE_FOCUSING) {
// setDrawable(R.drawable.ic_focus_failed);
// animate().withLayer().setDuration(SCALING_DOWN_TIME).scaleX(1f)
// .scaleY(1f).withEndAction(timeout ? mEndAction : null);
// mState = STATE_FINISHING;
// }
// }
//
// @TargetApi(Build.VERSION_CODES.JELLY_BEAN)
// @Override
// public void clear() {
// Log.i(TAG, "clear");
// if (Build.VERSION.SDK_INT < Build.VERSION_CODES.JELLY_BEAN) {
// return;
// }
// animate().cancel();
// removeCallbacks(mDisappear);
// mDisappear.run();
// setScaleX(1f);
// setScaleY(1f);
// }
//
// private class EndAction implements Runnable {
// @Override
// public void run() {
// // Keep the focus indicator for some time.
// postDelayed(mDisappear, DISAPPEAR_TIMEOUT);
// }
// }
//
// private class Disappear implements Runnable {
// @Override
// public void run() {
// mChild.setBackgroundDrawable(null);
// mState = STATE_IDLE;
// }
// }
//}
================================================
FILE: android/src/main/java/com/pili/rnpili/support/Jsons.java
================================================
package com.pili.rnpili.support;
import com.facebook.react.bridge.ReadableMap;
import com.facebook.react.bridge.ReadableMapKeySetIterator;
import com.facebook.react.bridge.ReadableType;
import org.json.JSONException;
import org.json.JSONObject;
/**
* Created by buhe on 16/5/5.
*/
public class Jsons {
public static JSONObject readableMapToJson(ReadableMap readableMap) {
JSONObject jsonObject = new JSONObject();
if (readableMap == null) {
return null;
}
ReadableMapKeySetIterator iterator = readableMap.keySetIterator();
if (!iterator.hasNextKey()) {
return null;
}
while (iterator.hasNextKey()) {
String key = iterator.nextKey();
ReadableType readableType = readableMap.getType(key);
try {
switch (readableType) {
case Null:
jsonObject.put(key, null);
break;
case Boolean:
jsonObject.put(key, readableMap.getBoolean(key));
break;
case Number:
// Can be int or double.
jsonObject.put(key, readableMap.getInt(key));
break;
case String:
jsonObject.put(key, readableMap.getString(key));
break;
case Map:
jsonObject.put(key, readableMapToJson(readableMap.getMap(key)));
break;
case Array:
jsonObject.put(key, readableMap.getArray(key));
default:
// Do nothing and fail silently
}
} catch (JSONException ex) {
// Do nothing and fail silently
}
}
return jsonObject;
}
}
================================================
FILE: android/src/main/java/com/pili/rnpili/support/MediaController.java
================================================
//package com.pili.rnpili.support;
//
//import android.annotation.SuppressLint;
//import android.content.Context;
//import android.content.res.Resources;
//import android.graphics.Rect;
//import android.media.AudioManager;
//import android.os.Build;
//import android.os.Handler;
//import android.os.Message;
//import android.util.AttributeSet;
//import android.util.Log;
//import android.view.Gravity;
//import android.view.KeyEvent;
//import android.view.LayoutInflater;
//import android.view.MotionEvent;
//import android.view.View;
//import android.widget.FrameLayout;
//import android.widget.ImageButton;
//import android.widget.PopupWindow;
//import android.widget.ProgressBar;
//import android.widget.SeekBar;
//import android.widget.TextView;
//
//import com.pili.pldroid.player.IMediaController;
//
//import java.util.Locale;
//
///**
// * You can write a custom MediaController instead of this class
// * A MediaController widget must implement all the interface defined by com.pili.pldroid.player.IMediaController
// */
//public class MediaController extends FrameLayout implements IMediaController {
//
// private static final String TAG = "PLMediaController";
// private IMediaController.MediaPlayerControl mPlayer;
// private Context mContext;
// private PopupWindow mWindow;
// private int mAnimStyle;
// private View mAnchor;
// private View mRoot;
// private ProgressBar mProgress;
// private TextView mEndTime, mCurrentTime;
// private long mDuration;
// private boolean mShowing;
// private boolean mDragging;
// private boolean mInstantSeeking = true;
// private static int sDefaultTimeout = 3000;
// private static final int SEEK_TO_POST_DELAY_MILLIS = 200;
//
// private static final int FADE_OUT = 1;
// private static final int SHOW_PROGRESS = 2;
// private boolean mFromXml = false;
// private ImageButton mPauseButton;
// private ImageButton mFfwdButton;
// private ImageButton mRewButton;
// private ImageButton mNextButton;
// private ImageButton mPrevButton;
//
// private boolean mUseFastForward;
//
// private static final int IC_MEDIA_PAUSE_ID = Resources.getSystem().getIdentifier("ic_media_pause","drawable", "android");
// private static final int IC_MEDIA_PLAY_ID = Resources.getSystem().getIdentifier("ic_media_play","drawable", "android");
// private static final int MEDIA_CONTROLLER_ID = Resources.getSystem().getIdentifier("media_controller", "layout", "android");
// private static final int PRV_BUTTON_ID = Resources.getSystem().getIdentifier("prev","id", "android");
// private static final int FFWD_BUTTON_ID = Resources.getSystem().getIdentifier("ffwd","id", "android");
// private static final int NEXT_BUTTON_ID = Resources.getSystem().getIdentifier("next","id", "android");
// private static final int REW_BUTTON_ID = Resources.getSystem().getIdentifier("rew","id", "android");
// private static final int PAUSE_BUTTON_ID = Resources.getSystem().getIdentifier("pause","id", "android");
// private static final int MEDIACONTROLLER_PROGRESS_ID = Resources.getSystem().getIdentifier("mediacontroller_progress","id", "android");
// private static final int END_TIME_ID = Resources.getSystem().getIdentifier("time","id", "android");
// private static final int CURRENT_TIME_ID = Resources.getSystem().getIdentifier("time_current","id", "android");
//
// private AudioManager mAM;
// private Runnable mLastSeekBarRunnable;
// private boolean mDisableProgress = false;
//
// public MediaController(Context context, AttributeSet attrs) {
// super(context, attrs);
// mRoot = this;
// mFromXml = true;
// initController(context);
// }
//
// public MediaController(Context context) {
// super(context);
// if (!mFromXml && initController(context))
// initFloatingWindow();
// }
//
// public MediaController(Context context, boolean useFastForward, boolean disableProgressBar) {
// this(context);
// mUseFastForward = useFastForward;
// mDisableProgress = disableProgressBar;
// }
//
// public MediaController(Context context, boolean useFastForward) {
// this(context);
// mUseFastForward = useFastForward;
// }
//
// private boolean initController(Context context) {
// mUseFastForward = true;
// mContext = context;
// mAM = (AudioManager) mContext.getSystemService(Context.AUDIO_SERVICE);
// return true;
// }
//
// @Override
// public void onFinishInflate() {
// if (mRoot != null)
// initControllerView(mRoot);
// super.onFinishInflate();
// }
//
// private void initFloatingWindow() {
// mWindow = new PopupWindow(mContext);
// mWindow.setFocusable(false);
// mWindow.setBackgroundDrawable(null);
// mWindow.setOutsideTouchable(true);
// mAnimStyle = android.R.style.Animation;
// }
//
// /**
// * Create the view that holds the widgets that control playback. Derived
// * classes can override this to create their own.
// *
// * @return The controller view.
// */
// protected View makeControllerView() {
// return ((LayoutInflater) mContext
// .getSystemService(Context.LAYOUT_INFLATER_SERVICE)).inflate(MEDIA_CONTROLLER_ID, this);
// }
//
// private void initControllerView(View v) {
// // By default these are hidden.
// mPrevButton = (ImageButton) v.findViewById(PRV_BUTTON_ID);
// if (mPrevButton != null) {
// mPrevButton.setVisibility(View.GONE);
// }
// mNextButton = (ImageButton) v.findViewById(NEXT_BUTTON_ID);
// if (mNextButton != null) {
// mNextButton.setVisibility(View.GONE);
// }
//
// mFfwdButton = (ImageButton) v.findViewById(FFWD_BUTTON_ID);
// if (mFfwdButton != null) {
// mFfwdButton.setOnClickListener(mFfwdListener);
// if (!mFromXml) {
// mFfwdButton.setVisibility(mUseFastForward ? View.VISIBLE : View.GONE);
// }
// }
//
// mRewButton = (ImageButton) v.findViewById(REW_BUTTON_ID);
// if (mRewButton != null) {
// mRewButton.setOnClickListener(mRewListener);
// if (!mFromXml) {
// mRewButton.setVisibility(mUseFastForward ? View.VISIBLE : View.GONE);
// }
// }
// mPauseButton = (ImageButton) v.findViewById(PAUSE_BUTTON_ID);
// if (mPauseButton != null) {
// mPauseButton.requestFocus();
// mPauseButton.setOnClickListener(mPauseListener);
// }
//
// mProgress = (ProgressBar) v.findViewById(MEDIACONTROLLER_PROGRESS_ID);
// if (mProgress != null) {
// if (mProgress instanceof SeekBar) {
// SeekBar seeker = (SeekBar) mProgress;
// seeker.setOnSeekBarChangeListener(mSeekListener);
// seeker.setThumbOffset(1);
// }
// mProgress.setMax(1000);
// mProgress.setEnabled(!mDisableProgress);
// }
//
// mEndTime = (TextView) v.findViewById(END_TIME_ID);
// mCurrentTime = (TextView) v.findViewById(CURRENT_TIME_ID);
// }
//
// /**
// * Control the action when the seekbar dragged by user
// *
// * @param seekWhenDragging
// * True the media will seek periodically
// */
// public void setInstantSeeking(boolean seekWhenDragging) {
// mInstantSeeking = seekWhenDragging;
// }
//
// private void disableUnsupportedButtons() {
// try {
// if (mPauseButton != null && !mPlayer.canPause())
// mPauseButton.setEnabled(false);
// } catch (IncompatibleClassChangeError ex) {
// }
// }
//
// /**
// * <p>
// * Change the animation style resource for this controller.
// * </p>
// *
// * <p>
// * If the controller is showing, calling this method will take effect only
// * the next time the controller is shown.
// * </p>
// *
// * @param animationStyle
// * animation style to use when the controller appears and disappears.
// * Set to -1 for the default animation, 0 for no animation,
// * or a resource identifier for an explicit animation.
// *
// */
// public void setAnimationStyle(int animationStyle) {
// mAnimStyle = animationStyle;
// }
//
// public interface OnShownListener {
// public void onShown();
// }
//
// private OnShownListener mShownListener;
//
// public void setOnShownListener(OnShownListener l) {
// mShownListener = l;
// }
//
// public interface OnHiddenListener {
// public void onHidden();
// }
//
// private OnHiddenListener mHiddenListener;
//
// public void setOnHiddenListener(OnHiddenListener l) {
// mHiddenListener = l;
// }
//
// @SuppressLint("HandlerLeak")
// private Handler mHandler = new Handler() {
// @Override
// public void handleMessage(Message msg) {
// long pos;
// switch (msg.what) {
// case FADE_OUT:
// hide();
// break;
// case SHOW_PROGRESS:
// pos = setProgress();
// if (!mDragging && mShowing) {
// msg = obtainMessage(SHOW_PROGRESS);
// sendMessageDelayed(msg, 1000 - (pos % 1000));
// updatePausePlay();
// }
// break;
// }
// }
// };
//
// private long setProgress() {
// if (mPlayer == null || mDragging)
// return 0;
//
// long position = mPlayer.getCurrentPosition();
// long duration = mPlayer.getDuration();
// if (mProgress != null) {
// if (duration > 0) {
// long pos = 1000L * position / duration;
// mProgress.setProgress((int) pos);
// }
// int percent = mPlayer.getBufferPercentage();
// mProgress.setSecondaryProgress(percent * 10);
// }
//
// mDuration = duration;
//
// if (mEndTime != null)
// mEndTime.setText(generateTime(mDuration));
// if (mCurrentTime != null)
// mCurrentTime.setText(generateTime(position));
//
// return position;
// }
//
// private static String generateTime(long position) {
// int totalSeconds = (int) (position / 1000);
//
// int seconds = totalSeconds % 60;
// int minutes = (totalSeconds / 60) % 60;
// int hours = totalSeconds / 3600;
//
// if (hours > 0) {
// return String.format(Locale.US, "%02d:%02d:%02d", hours, minutes,
// seconds).toString();
// } else {
// return String.format(Locale.US, "%02d:%02d", minutes, seconds)
// .toString();
// }
// }
//
// @Override
// public boolean onTouchEvent(MotionEvent event) {
// show(sDefaultTimeout);
// return true;
// }
//
// @Override
// public boolean onTrackballEvent(MotionEvent ev) {
// show(sDefaultTimeout);
// return false;
// }
//
// @Override
// public boolean dispatchKeyEvent(KeyEvent event) {
// int keyCode = event.getKeyCode();
// if (event.getRepeatCount() == 0
// && (keyCode == KeyEvent.KEYCODE_HEADSETHOOK
// || keyCode == KeyEvent.KEYCODE_MEDIA_PLAY_PAUSE || keyCode == KeyEvent.KEYCODE_SPACE)) {
// doPauseResume();
// show(sDefaultTimeout);
// if (mPauseButton != null)
// mPauseButton.requestFocus();
// return true;
// } else if (keyCode == KeyEvent.KEYCODE_MEDIA_STOP) {
// if (mPlayer.isPlaying()) {
// mPlayer.pause();
// updatePausePlay();
// }
// return true;
// } else if (keyCode == KeyEvent.KEYCODE_BACK
// || keyCode == KeyEvent.KEYCODE_MENU) {
// hide();
// return true;
// } else {
// show(sDefaultTimeout);
// }
// return super.dispatchKeyEvent(event);
// }
//
// private OnClickListener mPauseListener = new OnClickListener() {
// public void onClick(View v) {
// doPauseResume();
// show(sDefaultTimeout);
// }
// };
//
// private void updatePausePlay() {
// if (mRoot == null || mPauseButton == null)
// return;
//
// if (mPlayer.isPlaying())
// mPauseButton.setImageResource(IC_MEDIA_PAUSE_ID);
// else
// mPauseButton.setImageResource(IC_MEDIA_PLAY_ID);
// }
//
// private void doPauseResume() {
// if (mPlayer.isPlaying())
// mPlayer.pause();
// else
// mPlayer.start();
// updatePausePlay();
// }
//
// private SeekBar.OnSeekBarChangeListener mSeekListener = new SeekBar.OnSeekBarChangeListener() {
//
// public void onStartTrackingTouch(SeekBar bar) {
// mDragging = true;
// show(3600000);
// mHandler.removeMessages(SHOW_PROGRESS);
// if (mInstantSeeking)
// mAM.setStreamMute(AudioManager.STREAM_MUSIC, true);
// }
//
// public void onProgressChanged(SeekBar bar, int progress, boolean fromuser) {
// if (!fromuser)
// return;
//
// final int newposition = (int) (mDuration * progress) / 1000;
// String time = generateTime(newposition);
// if (mInstantSeeking) {
// mHandler.removeCallbacks(mLastSeekBarRunnable);
// mLastSeekBarRunnable = new Runnable() {
// @Override
// public void run() {
// mPlayer.seekTo(newposition);
// }
// };
// mHandler.postDelayed(mLastSeekBarRunnable, SEEK_TO_POST_DELAY_MILLIS);
// }
// if (mCurrentTime != null)
// mCurrentTime.setText(time);
// }
//
// public void onStopTrackingTouch(SeekBar bar) {
// if (!mInstantSeeking)
// mPlayer.seekTo((int)(mDuration * bar.getProgress()) / 1000);
//
// show(sDefaultTimeout);
// mHandler.removeMessages(SHOW_PROGRESS);
// mAM.setStreamMute(AudioManager.STREAM_MUSIC, false);
// mDragging = false;
// mHandler.sendEmptyMessageDelayed(SHOW_PROGRESS, 1000);
// }
// };
//
// private OnClickListener mRewListener = new OnClickListener() {
// public void onClick(View v) {
// int pos = (int)mPlayer.getCurrentPosition();
// pos -= 5000; // milliseconds
// mPlayer.seekTo(pos);
// setProgress();
//
// show(sDefaultTimeout);
// }
// };
//
// private OnClickListener mFfwdListener = new OnClickListener() {
// public void onClick(View v) {
// int pos = (int)mPlayer.getCurrentPosition();
// pos += 15000; // milliseconds
// mPlayer.seekTo(pos);
// setProgress();
//
// show(sDefaultTimeout);
// }
// };
//
// /**
// * Set the view that acts as the anchor for the control view.
// *
// * - This can for example be a VideoView, or your Activity's main view.
// * - AudioPlayer has no anchor view, so the view parameter will be null.
// *
// * @param view
// * The view to which to anchor the controller when it is visible.
// */
// @Override
// public void setAnchorView(View view) {
// mAnchor = view;
// if (mAnchor == null) {
// sDefaultTimeout = 0; // show forever
// }
// if (!mFromXml) {
// removeAllViews();
// mRoot = makeControllerView();
// mWindow.setContentView(mRoot);
// mWindow.setWidth(LayoutParams.MATCH_PARENT);
// mWindow.setHeight(LayoutParams.WRAP_CONTENT);
// }
// initControllerView(mRoot);
// }
//
// @Override
// public void setMediaPlayer(MediaPlayerControl player) {
// mPlayer = player;
// updatePausePlay();
// }
//
// @Override
// public void show() {
// show(sDefaultTimeout);
// }
//
// /**
// * Show the controller on screen. It will go away automatically after
// * 'timeout' milliseconds of inactivity.
// *
// * @param timeout
// * The timeout in milliseconds. Use 0 to show the controller until hide() is called.
// */
// @Override
// public void show(int timeout) {
// if (!mShowing) {
// if (mAnchor != null && mAnchor.getWindowToken() != null) {
// if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.ICE_CREAM_SANDWICH) {
// mAnchor.setSystemUiVisibility(View.SYSTEM_UI_FLAG_VISIBLE);
// }
// }
// if (mPauseButton != null)
// mPauseButton.requestFocus();
// disableUnsupportedButtons();
//
// if (mFromXml) {
// setVisibility(View.VISIBLE);
// } else {
// int[] location = new int[2];
//
// if (mAnchor != null) {
// mAnchor.getLocationOnScreen(location);
// Rect anchorRect = new Rect(location[0], location[1],
// location[0] + mAnchor.getWidth(), location[1]
// + mAnchor.getHeight());
//
// mWindow.setAnimationStyle(mAnimStyle);
// mWindow.showAtLocation(mAnchor, Gravity.BOTTOM,
// anchorRect.left, 0);
// } else {
// Rect anchorRect = new Rect(location[0], location[1],
// location[0] + mRoot.getWidth(), location[1]
// + mRoot.getHeight());
//
// mWindow.setAnimationStyle(mAnimStyle);
// mWindow.showAtLocation(mRoot, Gravity.BOTTOM,
// anchorRect.left, 0);
// }
// }
// mShowing = true;
// if (mShownListener != null)
// mShownListener.onShown();
// }
// updatePausePlay();
// mHandler.sendEmptyMessage(SHOW_PROGRESS);
//
// if (timeout != 0) {
// mHandler.removeMessages(FADE_OUT);
// mHandler.sendMessageDelayed(mHandler.obtainMessage(FADE_OUT),
// timeout);
// }
// }
//
// @Override
// public boolean isShowing() {
// return mShowing;
// }
//
// @Override
// public void hide() {
// if (mShowing) {
// if (mAnchor != null) {
// if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.ICE_CREAM_SANDWICH) {
// //mAnchor.setSystemUiVisibility(View.SYSTEM_UI_FLAG_HIDE_NAVIGATION);
// }
// }
// try {
// mHandler.removeMessages(SHOW_PROGRESS);
// if (mFromXml)
// setVisibility(View.GONE);
// else
// mWindow.dismiss();
// } catch (IllegalArgumentException ex) {
// Log.d(TAG, "MediaController already removed");
// }
// mShowing = false;
// if (mHiddenListener != null)
// mHiddenListener.onHidden();
// }
// }
//
// @Override
// public void setEnabled(boolean enabled) {
// if (mPauseButton != null) {
// mPauseButton.setEnabled(enabled);
// }
// if (mFfwdButton != null) {
// mFfwdButton.setEnabled(enabled);
// }
// if (mRewButton != null) {
// mRewButton.setEnabled(enabled);
// }
// if (mProgress != null && !mDisableProgress)
// mProgress.setEnabled(enabled);
// disableUnsupportedButtons();
// super.setEnabled(enabled);
// }
//}
================================================
FILE: android/src/main/java/com/pili/rnpili/support/Rotatable.java
================================================
package com.pili.rnpili.support;
/**
* Created by jerikc on 16/2/5.
*/
public interface Rotatable {
// Set parameter 'animation' to true to have animation when rotation.
void setOrientation(int orientation, boolean animation);
}
================================================
FILE: android/src/main/java/com/pili/rnpili/support/RotateLayout.java
================================================
package com.pili.rnpili.support;
import android.annotation.TargetApi;
import android.content.Context;
import android.os.Build;
import android.util.AttributeSet;
import android.view.View;
import android.view.ViewGroup;
// A RotateLayout is designed to display a single item and provides the
// capabilities to rotate the item.
public class RotateLayout extends ViewGroup implements Rotatable {
@SuppressWarnings("unused")
private static final String TAG = "RotateLayout";
private int mOrientation;
protected View mChild;
public RotateLayout(Context context, AttributeSet attrs) {
super(context, attrs);
// The transparent background here is a workaround of the render issue
// happened when the view is rotated as the device's orientation
// changed. The view looks fine in landscape. After rotation, the view
// is invisible.
setBackgroundResource(android.R.color.transparent);
}
@TargetApi(Build.VERSION_CODES.HONEYCOMB)
@Override
protected void onFinishInflate() {
super.onFinishInflate();
mChild = getChildAt(0);
mChild.setPivotX(0);
mChild.setPivotY(0);
}
public void setChild(View mChild) {
this.mChild = mChild;
}
@Override
protected void onLayout(
boolean change, int left, int top, int right, int bottom) {
int width = right - left;
int height = bottom - top;
switch (mOrientation) {
case 0:
case 180:
mChild.layout(0, 0, width, height);
break;
case 90:
case 270:
mChild.layout(0, 0, height, width);
break;
}
}
@TargetApi(Build.VERSION_CODES.HONEYCOMB)
@Override
protected void onMeasure(int widthSpec, int heightSpec) {
int w = 0, h = 0;
switch(mOrientation) {
case 0:
case 180:
measureChild(mChild, widthSpec, heightSpec);
w = mChild.getMeasuredWidth();
h = mChild.getMeasuredHeight();
break;
case 90:
case 270:
measureChild(mChild, heightSpec, widthSpec);
w = mChild.getMeasuredHeight();
h = mChild.getMeasuredWidth();
break;
}
setMeasuredDimension(w, h);
switch (mOrientation) {
case 0:
mChild.setTranslationX(0);
mChild.setTranslationY(0);
break;
case 90:
mChild.setTranslationX(0);
mChild.setTranslationY(h);
break;
case 180:
mChild.setTranslationX(w);
mChild.setTranslationY(h);
break;
case 270:
mChild.setTranslationX(w);
mChild.setTranslationY(0);
break;
}
mChild.setRotation(-mOrientation);
}
@Override
public boolean shouldDelayChildPressedState() {
return false;
}
// Rotate the view counter-clockwise
@Override
public void setOrientation(int orientation, boolean animation) {
orientation = orientation % 360;
if (mOrientation == orientation) return;
mOrientation = orientation;
requestLayout();
}
}
================================================
FILE: android/src/test/java/com/pili/rnpili/ExampleUnitTest.java
================================================
package com.pili.rnpili;
import org.junit.Test;
import static org.junit.Assert.*;
/**
* To work on unit tests, switch the Test Artifact in the Build Variants view.
*/
public class ExampleUnitTest {
@Test
public void addition_isCorrect() throws Exception {
assertEquals(4, 2 + 2);
}
}
================================================
FILE: index.js
================================================
/**
* Created by buhe on 16/4/28.
*/
module.exports = {
Streaming: require('./Streaming'),
AudioStreaming: require('./AudioStreaming'),
Player: require('./Player'),
StreamingConst: require('./StreamingConst')
};
================================================
FILE: ios/RCTPili/RCTPili/Info.plist
================================================
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>CFBundleDevelopmentRegion</key>
<string>en</string>
<key>CFBundleExecutable</key>
<string>$(EXECUTABLE_NAME)</string>
<key>CFBundleIdentifier</key>
<string>$(PRODUCT_BUNDLE_IDENTIFIER)</string>
<key>CFBundleInfoDictionaryVersion</key>
<string>6.0</string>
<key>CFBundleName</key>
<string>$(PRODUCT_NAME)</string>
<key>CFBundlePackageType</key>
<string>FMWK</string>
<key>CFBundleShortVersionString</key>
<string>1.0</string>
<key>CFBundleSignature</key>
<string>????</string>
<key>CFBundleVersion</key>
<string>$(CURRENT_PROJECT_VERSION)</string>
<key>NSPrincipalClass</key>
<string></string>
</dict>
</plist>
================================================
FILE: ios/RCTPili/RCTPili/RCTAudioStreaming.h
================================================
//
// RCTStreaming.h
// RCTPili
//
// Created by guguyanhua on 16/5/26.
// Copyright © 2016年 pili. All rights reserved.
//
#import <UIKit/UIKit.h>
#import "RCTView.h"
#import "PLCameraStreamingKit.h"
#import "Reachability.h"
#import <asl.h>
@class RCTEventDispatcher;
@interface RCTAudioStreaming : UIView<PLCameraStreamingSessionDelegate,PLStreamingSendingBufferDelegate>
@property (nonatomic, strong) PLCameraStreamingSession *session;
@property (nonatomic, strong) dispatch_queue_t sessionQueue;
@property (nonatomic, strong) Reachability *internetReachability;
@property (nonatomic, strong) NSDictionary *profile;
@property (nonatomic, strong) NSString *rtmpURL;
- (instancetype)initWithEventDispatcher:(RCTEventDispatcher *)eventDispatcher NS_DESIGNATED_INITIALIZER;
@end
================================================
FILE: ios/RCTPili/RCTPili/RCTAudioStreaming.m
================================================
//
// RCTStreaming.m
// RCTPili
//
// Created by guguyanhua on 16/5/26.
// Copyright © 2016年 pili. All rights reserved.
//
#import "RCTAudioStreaming.h"
#import "RCTBridgeModule.h"
#import "UIView+React.h"
#import "RCTEventDispatcher.h"
@implementation RCTAudioStreaming{
RCTEventDispatcher *_eventDispatcher;
BOOL _started;
BOOL _muted;
}
const char *audioStateNames[] = {
"Unknow",
"Connecting",
"Connected",
"Disconnecting",
"Disconnected",
"Error"
};
const char *audioNetworkStatus[] = {
"Not Reachable",
"Reachable via WiFi",
"Reachable via CELL"
};
- (instancetype)initWithEventDispatcher:(RCTEventDispatcher *)eventDispatcher
{
if ((self = [super init])) {
[PLStreamingEnv initEnv];
_eventDispatcher = eventDispatcher;
_started = YES;
_muted = NO;
[[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(reachabilityChanged:) name:kReachabilityChangedNotification object:nil];
self.internetReachability = [Reachability reachabilityForInternetConnection];
[self.internetReachability startNotifier];
[[NSNotificationCenter defaultCenter] addObserver:self
selector:@selector(handleInterruption:)
name:AVAudioSessionInterruptionNotification
object:[AVAudioSession sharedInstance]];
CGSize videoSize = CGSizeMake(480 , 640);
UIDeviceOrientation orientation = [[UIDevice currentDevice] orientation];
if (orientation <= AVCaptureVideoOrientationLandscapeLeft) {
if (orientation > AVCaptureVideoOrientationPortraitUpsideDown) {
videoSize = CGSizeMake(640 , 480);
}
}
self.sessionQueue = dispatch_queue_create("pili.queue.streaming", DISPATCH_QUEUE_SERIAL);
}
return self;
};
- (void) setRtmpURL:(NSString *)rtmpURL
{
_rtmpURL = rtmpURL;
[self setSourceAndProfile];
}
- (void)setProfile:(NSDictionary *)profile{
_profile = profile;
[self setSourceAndProfile];
}
- (void) setSourceAndProfile{
if(self.profile && self.rtmpURL){
void (^permissionBlock)(void) = ^{
dispatch_async(self.sessionQueue, ^{
NSDictionary *audio = self.profile[@"audio"];
PLAudioCaptureConfiguration *audioCaptureConfiguration = [PLAudioCaptureConfiguration defaultConfiguration];
// 音频编码配置
PLAudioStreamingConfiguration *audioStreamingConfiguration = [PLAudioStreamingConfiguration defaultConfiguration];
AVCaptureVideoOrientation orientation = (AVCaptureVideoOrientation)(([[UIDevice currentDevice] orientation] <= UIDeviceOrientationLandscapeRight && [[UIDevice currentDevice] orientation] != UIDeviceOrientationUnknown) ? [[UIDevice currentDevice] orientation]: UIDeviceOrientationPortrait);
// 推流 session
self.session = [[PLCameraStreamingSession alloc] initWithVideoCaptureConfiguration:nil audioCaptureConfiguration:audioCaptureConfiguration videoStreamingConfiguration:nil audioStreamingConfiguration:audioStreamingConfiguration stream:nil videoOrientation:orientation];
self.session.delegate = self;
dispatch_async(dispatch_get_main_queue(), ^{
if(_muted){
[self setMuted:_muted];
}
[self startSession];
});
});
};
void (^noAccessBlock)(void) = ^{
UIAlertView *alertView = [[UIAlertView alloc] initWithTitle:NSLocalizedString(@"No Access", nil)
message:NSLocalizedString(@"!", nil)
delegate:nil
cancelButtonTitle:NSLocalizedString(@"Cancel", nil)
otherButtonTitles:nil];
[alertView show];
};
switch ([PLCameraStreamingSession cameraAuthorizationStatus]) {
case PLAuthorizationStatusAuthorized:
permissionBlock();
break;
case PLAuthorizationStatusNotDetermined: {
[PLCameraStreamingSession requestCameraAccessWithCompletionHandler:^(BOOL granted) {
granted ? permissionBlock() : noAccessBlock();
}];
}
break;
default:
noAccessBlock();
break;
}
}
}
- (void)setStarted:(BOOL) started {
if(started != _started){
if(started){
[self startSession];
_started = started;
}else{
[self stopSession];
_started = started;
}
}
}
-(void)setMuted:(BOOL) muted {
_muted = muted;
[self.session setMuted:muted];
}
- (void)streamingSessionSendingBufferDidFull:(id)session {
NSString *log = @"Buffer is full";
NSLog(@"%@", log);
}
- (void)streamingSession:(id)session sendingBufferDidDropItems:(NSArray *)items {
NSString *log = @"Frame dropped";
NSLog(@"%@", log);
}
- (void)stopSession {
dispatch_async(self.sessionQueue, ^{
[self.session stop];
});
}
- (void)startSession {
dispatch_async(self.sessionQueue, ^{
NSURL *streamURL = [NSURL URLWithString:self.rtmpURL];
[self.session startWithPushURL:streamURL feedback:^(PLStreamStartStateFeedback feedback) {
dispatch_async(dispatch_get_main_queue(), ^{
NSLog(@"success ");
});
}];
});
}
- (void)cameraStreamingSession:(PLCameraStreamingSession *)session streamStatusDidUpdate:(PLStreamStatus *)status {
NSString *log = [NSString stringWithFormat:@"Stream Status: %@", status];
NSLog(@"%@", log);
}
- (void)cameraStreamingSession:(PLCameraStreamingSession *)session streamStateDidChange:(PLStreamState)state {
NSString *log = [NSString stringWithFormat:@"Stream State: %s", audioStateNames[state]];
NSLog(@"%@", log);
switch (state) {
case PLStreamStateUnknow:
[_eventDispatcher sendInputEventWithName:@"onLoading" body:@{@"target": self.reactTag}];
break;
case PLStreamStateConnecting:
[_eventDispatcher sendInputEventWithName:@"onConnecting" body:@{@"target": self.reactTag}];
break;
case PLStreamStateConnected:
[_eventDispatcher sendInputEventWithName:@"onStreaming" body:@{@"target": self.reactTag}];
break;
case PLStreamStateDisconnecting:
break;
case PLStreamStateDisconnected:
[_eventDispatcher sendInputEventWithName:@"onDisconnected" body:@{@"target": self.reactTag}];
[_eventDispatcher sendInputEventWithName:@"onShutdown" body:@{@"target": self.reactTag}]; //FIXME
break;
case PLStreamStateError:
[_eventDispatcher sendInputEventWithName:@"onIOError" body:@{@"target": self.reactTag}];
break;
default:
break;
}
}
- (void)cameraStreamingSession:(PLCameraStreamingSession *)session didDisconnectWithError:(NSError *)error {
NSString *log = [NSString stringWithFormat:@"Stream State: Error. %@", error];
NSLog(@"%@", log);
[self startSession];
}
- (void)reachabilityChanged:(NSNotification *)notif{
Reachability *curReach = [notif object];
NSParameterAssert([curReach isKindOfClass:[Reachability class]]);
NetworkStatus status = [curReach currentReachabilityStatus];
if (NotReachable == status) {
// 对断网情况做处理
[self stopSession];
}
NSString *log = [NSString stringWithFormat:@"Networkt Status: %s", audioNetworkStatus[status]];
NSLog(@"%@", log);
}
- (void)handleInterruption:(NSNotification *)notification {
if ([notification.name isEqualToString:AVAudioSessionInterruptionNotification]) {
NSLog(@"Interruption notification");
if ([[notification.userInfo valueForKey:AVAudioSessionInterruptionTypeKey] isEqualToNumber:[NSNumber numberWithInt:AVAudioSessionInterruptionTypeBegan]]) {
NSLog(@"InterruptionTypeBegan");
} else {
// the facetime iOS 9 has a bug: 1 does not send interrupt end 2 you can use application become active, and repeat set audio session acitve until success. ref http://blog.corywiles.com/broken-facetime-audio-interruptions-in-ios-9
NSLog(@"InterruptionTypeEnded");
AVAudioSession *session = [AVAudioSession sharedInstance];
[session setActive:YES error:nil];
}
}
}
@end
================================================
FILE: ios/RCTPili/RCTPili/RCTAudioStreamingManager.h
================================================
//
// RCTStreamingManager.h
// RCTPili
//
// Created by guguyanhua on 16/5/26.
// Copyright © 2016年 pili. All rights reserved.
//
#import "RCTViewManager.h"
@interface RCTAudioStreamingManager : RCTViewManager
@end
================================================
FILE: ios/RCTPili/RCTPili/RCTAudioStreamingManager.m
================================================
//
// RCTStreamingManager.m
// RCTPili
//
// Created by guguyanhua on 16/5/26.
// Copyright © 2016年 pili. All rights reserved.
//
#import "RCTAudioStreamingManager.h"
#import "RCTAudioStreaming.h"
@implementation RCTAudioStreamingManager
RCT_EXPORT_MODULE();
@synthesize bridge = _bridge;
- (UIView *)view
{
return [[RCTAudioStreaming alloc] initWithEventDispatcher:self.bridge.eventDispatcher];
}
- (NSArray *)customDirectEventTypes
{
return @[
@"onReady",
@"onConnecting",
@"onStreaming",
@"onShutdown",
@"onIOError",
@"onDisconnected"
];
}
- (dispatch_queue_t)methodQueue
{
return dispatch_get_main_queue();
}
RCT_EXPORT_VIEW_PROPERTY(rtmpURL, NSString);
RCT_EXPORT_VIEW_PROPERTY(profile, NSDictionary);
RCT_EXPORT_VIEW_PROPERTY(started, BOOL);
RCT_EXPORT_VIEW_PROPERTY(muted, BOOL);
@end
================================================
FILE: ios/RCTPili/RCTPili/RCTPili.h
================================================
//
// RCTPili.h
// RCTPili
//
// Created by buhe on 16/5/11.
// Copyright © 2016年 pili. All rights reserved.
//
#import <Foundation/Foundation.h>
@interface RCTPili : NSObject
@end
================================================
FILE: ios/RCTPili/RCTPili/RCTPili.m
================================================
//
// RCTPili.m
// RCTPili
//
// Created by buhe on 16/5/11.
// Copyright © 2016年 pili. All rights reserved.
//
#import "RCTPili.h"
@implementation RCTPili
@end
================================================
FILE: ios/RCTPili/RCTPili/RCTPlayer.h
================================================
//
// RCTPlayer.h
// RCTPili
//
// Created by buhe on 16/5/12.
// Copyright © 2016年 pili. All rights reserved.
//
#import <UIKit/UIKit.h>
#import "RCTView.h"
#import "PLPlayer.h"
@class RCTEventDispatcher;
@interface RCTPlayer : UIView<PLPlayerDelegate>
@property (nonatomic, assign) int reconnectCount;
- (instancetype)initWithEventDispatcher:(RCTEventDispatcher *)eventDispatcher NS_DESIGNATED_INITIALIZER;
@end
================================================
FILE: ios/RCTPili/RCTPili/RCTPlayer.m
================================================
//
// RCTPlayer.m
// RCTPili
//
// Created by buhe on 16/5/12.
// Copyright © 2016年 pili. All rights reserved.
//
#import "RCTPlayer.h"
#import "RCTBridgeModule.h"
#import "RCTEventDispatcher.h"
#import "UIView+React.h"
@implementation RCTPlayer{
RCTEventDispatcher *_eventDispatcher;
PLPlayer *_plplayer;
bool _started;
bool _muted;
}
static NSString *status[] = {
@"PLPlayerStatusUnknow",
@"PLPlayerStatusPreparing",
@"PLPlayerStatusReady",
@"PLPlayerStatusCaching",
@"PLPlayerStatusPlaying",
@"PLPlayerStatusPaused",
@"PLPlayerStatusStopped",
@"PLPlayerStatusError"
};
- (instancetype)initWithEventDispatcher:(RCTEventDispatcher *)eventDispatcher
{
if ((self = [super init])) {
_eventDispatcher = eventDispatcher;
_started = YES;
_muted = NO;
[[AVAudioSession sharedInstance] setCategory:AVAudioSessionCategoryPlayback error:nil];
self.reconnectCount = 0;
}
return self;
};
- (void) setSource:(NSDictionary *)source
{
NSString *uri = source[@"uri"];
bool backgroundPlay = source[@"backgroundPlay"] == nil ? false : source[@"backgroundPlay"];
PLPlayerOption *option = [PLPlayerOption defaultOption];
// 更改需要修改的 option 属性键所对应的值
[option setOptionValue:@15 forKey:PLPlayerOptionKeyTimeoutIntervalForMediaPackets];
if(_plplayer){
[_plplayer stop]; //TODO View 被卸载时 也要调用
}
_plplayer = [PLPlayer playerWithURL:[[NSURL alloc] initWithString:uri] option:option];
_plplayer.delegate = self;
_plplayer.delegateQueue = dispatch_get_main_queue();
_plplayer.backgroundPlayEnable = backgroundPlay;
if(backgroundPlay){
[[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(startPlayer) name:UIApplicationWillEnterForegroundNotification object:nil];
}
[self setupUI];
[self startPlayer];
}
- (void)setupUI {
if (_plplayer.status != PLPlayerStatusError) {
// add player view
UIView *playerView = _plplayer.playerView;
[self addSubview:playerView];
[playerView setTranslatesAutoresizingMaskIntoConstraints:NO];
NSLayoutConstraint *centerX = [NSLayoutConstraint constraintWithItem:playerView attribute:NSLayoutAttributeCenterX relatedBy:NSLayoutRelationEqual toItem:self attribute:NSLayoutAttributeCenterX multiplier:1.0 constant:0];
NSLayoutConstraint *centerY = [NSLayoutConstraint constraintWithItem:playerView attribute:NSLayoutAttributeCenterY relatedBy:NSLayoutRelationEqual toItem:self attribute:NSLayoutAttributeCenterY multiplier:1.0 constant:0];
NSLayoutConstraint *width = [NSLayoutConstraint constraintWithItem:playerView attribute:NSLayoutAttributeWidth relatedBy:NSLayoutRelationEqual toItem:self attribute:NSLayoutAttributeWidth multiplier:1.0 constant:0];
NSLayoutConstraint *height = [NSLayoutConstraint constraintWithItem:playerView attribute:NSLayoutAttributeHeight relatedBy:NSLayoutRelationEqual toItem:self attribute:NSLayoutAttributeHeight multiplier:1.0 constant:0];
NSArray *constraints = [NSArray arrayWithObjects:centerX, centerY,width,height, nil];
[self addConstraints: constraints];
}
}
- (void) setStarted:(BOOL) started{
if(started != _started){
if(started){
[_plplayer resume];
_started = started;
}else{
[_plplayer pause];
_started = started;
}
}
}
- (void) setMuted:(BOOL) muted {
_muted = muted;
[_plplayer setMute:muted];
}
- (void)startPlayer {
[UIApplication sharedApplication].idleTimerDisabled = YES;
[_plplayer play];
_started = true;
}
#pragma mark - <PLPlayerDelegate>
- (void)player:(nonnull PLPlayer *)player statusDidChange:(PLPlayerStatus)state {
switch (state) {
case PLPlayerStatusCaching:
[_eventDispatcher sendInputEventWithName:@"onLoading" body:@{@"target": self.reactTag}];
break;
case PLPlayerStatusPlaying:
[_eventDispatcher sendInputEventWithName:@"onPlaying" body:@{@"target": self.reactTag}];
break;
case PLPlayerStatusPaused:
[_eventDispatcher sendInputEventWithName:@"onPaused" body:@{@"target": self.reactTag}];
break;
case PLPlayerStatusStopped:
[_eventDispatcher sendInputEventWithName:@"onShutdown" body:@{@"target": self.reactTag}];
break;
case PLPlayerStatusError:
[_eventDispatcher sendInputEventWithName:@"onError" body:@{@"target": self.reactTag , @"errorCode": [NSNumber numberWithUnsignedInt:0]}];
break;
default:
break;
}
NSLog(@"%@", status[state]);
}
- (void)player:(nonnull PLPlayer *)player stoppedWithError:(nullable NSError *)error {
[self tryReconnect:error];
}
- (void)tryReconnect:(nullable NSError *)error {
if (self.reconnectCount < 3) {
_reconnectCount ++;
UIAlertView *alert = [[UIAlertView alloc] initWithTitle:@"错误" message:[NSString stringWithFormat:@"错误 %@,播放器将在%.1f秒后进行第 %d 次重连", error.localizedDescription,0.5 * pow(2, self.reconnectCount - 1), _reconnectCount] delegate:nil cancelButtonTitle:@"OK" otherButtonTitles:nil];
[alert show];
dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t)(0.5 * pow(2, self.reconnectCount) * NSEC_PER_SEC)), dispatch_get_main_queue(), ^{
[_plplayer play];
});
}else {
[UIApplication sharedApplication].idleTimerDisabled = NO;
NSLog(@"%@", error);
}
}
@end
================================================
FILE: ios/RCTPili/RCTPili/RCTPlayerManager.h
================================================
//
// RCTPlayerManger.h
// RCTPili
//
// Created by buhe on 16/5/12.
// Copyright © 2016年 pili. All rights reserved.
//
#import <UIKit/UIKit.h>
#import "RCTViewManager.h"
@interface RCTPlayerManager : RCTViewManager
@end
================================================
FILE: ios/RCTPili/RCTPili/RCTPlayerManager.m
================================================
//
// RCTPlayerManger.m
// RCTPili
//
// Created by buhe on 16/5/12.
// Copyright © 2016年 pili. All rights reserved.
//
#import "RCTPlayerManager.h"
#import "RCTPlayer.h"
@implementation RCTPlayerManager
RCT_EXPORT_MODULE();
@synthesize bridge = _bridge;
- (UIView *)view
{
return [[RCTPlayer alloc] initWithEventDispatcher:self.bridge.eventDispatcher];
}
- (NSArray *)customDirectEventTypes
{
return @[
@"onLoading",
@"onPaused",
@"onShutdown",
@"onError",
@"onPlaying"
];
}
- (dispatch_queue_t)methodQueue
{
return dispatch_get_main_queue();
}
RCT_EXPORT_VIEW_PROPERTY(source, NSDictionary);
RCT_EXPORT_VIEW_PROPERTY(started, BOOL);
RCT_EXPORT_VIEW_PROPERTY(muted, BOOL);
@end
================================================
FILE: ios/RCTPili/RCTPili/RCTStreaming.h
================================================
//
// RCTStreaming.h
// RCTPili
//
// Created by guguyanhua on 16/5/26.
// Copyright © 2016年 pili. All rights reserved.
//
#import <UIKit/UIKit.h>
#import "RCTView.h"
#import "PLCameraStreamingKit.h"
#import "Reachability.h"
#import <asl.h>
@class RCTEventDispatcher;
@interface RCTStreaming : UIView<PLCameraStreamingSessionDelegate,PLStreamingSendingBufferDelegate>
@property (nonatomic, strong) PLCameraStreamingSession *session;
@property (nonatomic, strong) dispatch_queue_t sessionQueue;
@property (nonatomic, strong) Reachability *internetReachability;
@property (nonatomic, strong) NSDictionary *profile;
@property (nonatomic, strong) NSString *rtmpURL;
- (instancetype)initWithEventDispatcher:(RCTEventDispatcher *)eventDispatcher NS_DESIGNATED_INITIALIZER;
@end
================================================
FILE: ios/RCTPili/RCTPili/RCTStreaming.m
================================================
//
// RCTStreaming.m
// RCTPili
//
// Created by guguyanhua on 16/5/26.
// Copyright © 2016年 pili. All rights reserved.
//
#import "RCTStreaming.h"
#import "RCTBridgeModule.h"
#import "UIView+React.h"
#import "RCTEventDispatcher.h"
@implementation RCTStreaming{
RCTEventDispatcher *_eventDispatcher;
BOOL _started;
BOOL _muted;
BOOL _focus;
NSString *_camera;
}
const char *stateNames[] = {
"Unknow",
"Connecting",
"Connected",
"Disconnecting",
"Disconnected",
"Error"
};
const char *networkStatus[] = {
"Not Reachable",
"Reachable via WiFi",
"Reachable via CELL"
};
- (instancetype)initWithEventDispatcher:(RCTEventDispatcher *)eventDispatcher
{
if ((self = [super init])) {
[PLStreamingEnv initEnv];
_eventDispatcher = eventDispatcher;
_started = YES;
_muted = NO;
_focus = NO;
_camera = @"front";
[[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(reachabilityChanged:) name:kReachabilityChangedNotification object:nil];
self.internetReachability = [Reachability reachabilityForInternetConnection];
[self.internetReachability startNotifier];
[[NSNotificationCenter defaultCenter] addObserver:self
selector:@selector(handleInterruption:)
name:AVAudioSessionInterruptionNotification
object:[AVAudioSession sharedInstance]];
CGSize videoSize = CGSizeMake(480 , 640);
UIDeviceOrientation orientation = [[UIDevice currentDevice] orientation];
if (orientation <= AVCaptureVideoOrientationLandscapeLeft) {
if (orientation > AVCaptureVideoOrientationPortraitUpsideDown) {
videoSize = CGSizeMake(640 , 480);
}
}
self.sessionQueue = dispatch_queue_create("pili.queue.streaming", DISPATCH_QUEUE_SERIAL);
}
return self;
};
- (void) setRtmpURL:(NSString *)rtmpURL
{
_rtmpURL = rtmpURL;
[self setSourceAndProfile];
}
- (void)setProfile:(NSDictionary *)profile{
_profile = profile;
[self setSourceAndProfile];
}
- (void) setSourceAndProfile{
if(self.profile && self.rtmpURL){
void (^permissionBlock)(void) = ^{
dispatch_async(self.sessionQueue, ^{
NSDictionary *video = self.profile[@"video"];
NSDictionary *audio = self.profile[@"audio"];
int *fps = [video[@"fps"] integerValue];
int *bps = [video[@"bps"] integerValue];
int *maxFrameInterval = [video[@"maxFrameInterval"] integerValue];
//TODO
double height = 800;
double width = 640;
//TODO videoProfileLevel 需要通过 分辨率 选择
PLVideoStreamingConfiguration *videoStreamingConfiguration = [[PLVideoStreamingConfiguration alloc] initWithVideoSize:CGSizeMake(width, height) expectedSourceVideoFrameRate:fps videoMaxKeyframeInterval:maxFrameInterval averageVideoBitRate:bps videoProfileLevel:AVVideoProfileLevelH264Baseline31];
PLVideoCaptureConfiguration *videoCaptureConfiguration = [PLVideoCaptureConfiguration defaultConfiguration];
PLAudioCaptureConfiguration *audioCaptureConfiguration = [PLAudioCaptureConfiguration defaultConfiguration];
// 音频编码配置
PLAudioStreamingConfiguration *audioStreamingConfiguration = [PLAudioStreamingConfiguration defaultConfiguration];
AVCaptureVideoOrientation orientation = (AVCaptureVideoOrientation)(([[UIDevice currentDevice] orientation] <= UIDeviceOrientationLandscapeRight && [[UIDevice currentDevice] orientation] != UIDeviceOrientationUnknown) ? [[UIDevice currentDevice] orientation]: UIDeviceOrientationPortrait);
// 推流 session
self.session = [[PLCameraStreamingSession alloc] initWithVideoCaptureConfiguration:videoCaptureConfiguration audioCaptureConfiguration:audioCaptureConfiguration videoStreamingConfiguration:videoStreamingConfiguration audioStreamingConfiguration:audioStreamingConfiguration stream:nil videoOrientation:orientation];
self.session.delegate = self;
// UIImage *waterMark = [UIImage imageNamed:@"qiniu.png"];
// PLFilterHandler handler = [self.session addWaterMark:waterMark origin:CGPointMake(100, 300)];
// self.filterHandlers = [@[handler] mutableCopy];//TODO - 水印暂时注释
dispatch_async(dispatch_get_main_queue(), ^{
UIView *previewView = self.session.previewView;
[self addSubview:previewView];
[previewView setTranslatesAutoresizingMaskIntoConstraints:NO];
NSLayoutConstraint *centerX = [NSLayoutConstraint constraintWithItem:previewView attribute:NSLayoutAttributeCenterX relatedBy:NSLayoutRelationEqual toItem:self attribute:NSLayoutAttributeCenterX multiplier:1.0 constant:0];
NSLayoutConstraint *centerY = [NSLayoutConstraint constraintWithItem:previewView attribute:NSLayoutAttributeCenterY relatedBy:NSLayoutRelationEqual toItem:self attribute:NSLayoutAttributeCenterY multiplier:1.0 constant:0];
NSLayoutConstraint *width = [NSLayoutConstraint constraintWithItem:previewView attribute:NSLayoutAttributeWidth relatedBy:NSLayoutRelationEqual toItem:self attribute:NSLayoutAttributeWidth multiplier:1.0 constant:0];
NSLayoutConstraint *height = [NSLayoutConstraint constraintWithItem:previewView attribute:NSLayoutAttributeHeight relatedBy:NSLayoutRelationEqual toItem:self attribute:NSLayoutAttributeHeight multiplier:1.0 constant:0];
NSArray *constraints = [NSArray arrayWithObjects:centerX, centerY,width,height, nil];
[self addConstraints: constraints];
NSString *log = [NSString stringWithFormat:@"Zoom Range: [1..%.0f]", self.session.videoActiveFormat.videoMaxZoomFactor];
NSLog(@"%@", log);
if(_focus){
[self.session setSmoothAutoFocusEnabled:_focus];
[self.session setTouchToFocusEnable:_focus];
}
if(_muted){
[self setMuted:_muted];
}
[self startSession];
});
});
};
void (^noAccessBlock)(void) = ^{
UIAlertView *alertView = [[UIAlertView alloc] initWithTitle:NSLocalizedString(@"No Access", nil)
message:NSLocalizedString(@"!", nil)
delegate:nil
cancelButtonTitle:NSLocalizedString(@"Cancel", nil)
otherButtonTitles:nil];
[alertView show];
};
switch ([PLCameraStreamingSession cameraAuthorizationStatus]) {
case PLAuthorizationStatusAuthorized:
permissionBlock();
break;
case PLAuthorizationStatusNotDetermined: {
[PLCameraStreamingSession requestCameraAccessWithCompletionHandler:^(BOOL granted) {
granted ? permissionBlock() : noAccessBlock();
}];
}
break;
default:
noAccessBlock();
break;
}
}
}
- (void)setStarted:(BOOL) started {
if(started != _started){
if(started){
[self startSession];
_started = started;
}else{
[self stopSession];
_started = started;
}
}
}
-(void)setMuted:(BOOL) muted {
_muted = muted;
[self.session setMuted:muted];
}
-(void)setFocus:(BOOL) focus {
_focus = focus;
[self.session setSmoothAutoFocusEnabled:focus];
[self.session setTouchToFocusEnable:focus];
}
-(void)setZoom:(NSNumber*) zoom {
self.session.videoZoomFactor = [zoom integerValue];
}
-(void)setCamera:(NSString*)camera{
if([camera isEqualToString:@"front"] || [camera isEqualToString:@"back"]){
if(![camera isEqualToString:_camera]){
_camera = camera;
[self.session toggleCamera];
}
}
}
- (void)streamingSessionSendingBufferDidFull:(id)session {
NSString *log = @"Buffer is full";
NSLog(@"%@", log);
}
- (void)streamingSession:(id)session sendingBufferDidDropItems:(NSArray *)items {
NSString *log = @"Frame dropped";
NSLog(@"%@", log);
}
- (void)stopSession {
dispatch_async(self.sessionQueue, ^{
[self.session stop];
});
}
- (void)startSession {
dispatch_async(self.sessionQueue, ^{
NSURL *streamURL = [NSURL URLWithString:self.rtmpURL];
[self.session startWithPushURL:streamURL feedback:^(PLStreamStartStateFeedback feedback) {
dispatch_async(dispatch_get_main_queue(), ^{
NSLog(@"success ");
});
}];
});
}
- (void)cameraStreamingSession:(PLCameraStreamingSession *)session streamStatusDidUpdate:(PLStreamStatus *)status {
NSString *log = [NSString stringWithFormat:@"Stream Status: %@", status];
NSLog(@"%@", log);
}
- (void)cameraStreamingSession:(PLCameraStreamingSession *)session streamStateDidChange:(PLStreamState)state {
NSString *log = [NSString stringWithFormat:@"Stream State: %s", stateNames[state]];
NSLog(@"%@", log);
switch (state) {
case PLStreamStateUnknow:
[_eventDispatcher sendInputEventWithName:@"onLoading" body:@{@"target": self.reactTag}];
break;
case PLStreamStateConnecting:
[_eventDispatcher sendInputEventWithName:@"onConnecting" body:@{@"target": self.reactTag}];
break;
case PLStreamStateConnected:
[_eventDispatcher sendInputEventWithName:@"onStreaming" body:@{@"target": self.reactTag}];
break;
case PLStreamStateDisconnecting:
break;
case PLStreamStateDisconnected:
[_eventDispatcher sendInputEventWithName:@"onDisconnected" body:@{@"target": self.reactTag}];
[_eventDispatcher sendInputEventWithName:@"onShutdown" body:@{@"target": self.reactTag}]; //FIXME
break;
case PLStreamStateError:
[_eventDispatcher sendInputEventWithName:@"onIOError" body:@{@"target": self.reactTag}];
break;
default:
break;
}
}
- (void)cameraStreamingSession:(PLCameraStreamingSession *)session didDisconnectWithError:(NSError *)error {
NSString *log = [NSString stringWithFormat:@"Stream State: Error. %@", error];
NSLog(@"%@", log);
[self startSession];
}
- (void)reachabilityChanged:(NSNotification *)notif{
Reachability *curReach = [notif object];
NSParameterAssert([curReach isKindOfClass:[Reachability class]]);
NetworkStatus status = [curReach currentReachabilityStatus];
if (NotReachable == status) {
// 对断网情况做处理
[self stopSession];
}
NSString *log = [NSString stringWithFormat:@"Networkt Status: %s", networkStatus[status]];
NSLog(@"%@", log);
}
- (void)handleInterruption:(NSNotification *)notification {
if ([notification.name isEqualToString:AVAudioSessionInterruptionNotification]) {
NSLog(@"Interruption notification");
if ([[notification.userInfo valueForKey:AVAudioSessionInterruptionTypeKey] isEqualToNumber:[NSNumber numberWithInt:AVAudioSessionInterruptionTypeBegan]]) {
NSLog(@"InterruptionTypeBegan");
} else {
// the facetime iOS 9 has a bug: 1 does not send interrupt end 2 you can use application become active, and repeat set audio session acitve until success. ref http://blog.corywiles.com/broken-facetime-audio-interruptions-in-ios-9
NSLog(@"InterruptionTypeEnded");
AVAudioSession *session = [AVAudioSession sharedInstance];
[session setActive:YES error:nil];
}
}
}
@end
================================================
FILE: ios/RCTPili/RCTPili/RCTStreamingManager.h
================================================
//
// RCTStreamingManager.h
// RCTPili
//
// Created by guguyanhua on 16/5/26.
// Copyright © 2016年 pili. All rights reserved.
//
#import "RCTViewManager.h"
@interface RCTStreamingManager : RCTViewManager
@end
================================================
FILE: ios/RCTPili/RCTPili/RCTStreamingManager.m
================================================
//
// RCTStreamingManager.m
// RCTPili
//
// Created by guguyanhua on 16/5/26.
// Copyright © 2016年 pili. All rights reserved.
//
#import "RCTStreamingManager.h"
#import "RCTStreaming.h"
@implementation RCTStreamingManager
RCT_EXPORT_MODULE();
@synthesize bridge = _bridge;
- (UIView *)view
{
return [[RCTStreaming alloc] initWithEventDispatcher:self.bridge.eventDispatcher];
}
- (NSArray *)customDirectEventTypes
{
return @[
@"onReady",
@"onConnecting",
@"onStreaming",
@"onShutdown",
@"onIOError",
@"onDisconnected"
];
}
- (dispatch_queue_t)methodQueue
{
return dispatch_get_main_queue();
}
RCT_EXPORT_VIEW_PROPERTY(rtmpURL, NSString);
RCT_EXPORT_VIEW_PROPERTY(profile, NSDictionary);
RCT_EXPORT_VIEW_PROPERTY(started, BOOL);
RCT_EXPORT_VIEW_PROPERTY(muted, BOOL);
RCT_EXPORT_VIEW_PROPERTY(zoom, NSNumber);
RCT_EXPORT_VIEW_PROPERTY(focus, BOOL);
RCT_EXPORT_VIEW_PROPERTY(camera, NSString);
@end
================================================
FILE: ios/RCTPili/RCTPili/Reachability.h
================================================
/*
File: Reachability.h
Abstract: Basic demonstration of how to use the SystemConfiguration Reachablity APIs.
Version: 3.5
Disclaimer: IMPORTANT: This Apple software is supplied to you by Apple
Inc. ("Apple") in consideration of your agreement to the following
terms, and your use, installation, modification or redistribution of
this Apple software constitutes acceptance of these terms. If you do
not agree with these terms, please do not use, install, modify or
redistribute this Apple software.
In consideration of your agreement to abide by the following terms, and
subject to these terms, Apple grants you a personal, non-exclusive
license, under Apple's copyrights in this original Apple software (the
"Apple Software"), to use, reproduce, modify and redistribute the Apple
Software, with or without modifications, in source and/or binary forms;
provided that if you redistribute the Apple Software in its entirety and
without modifications, you must retain this notice and the following
text and disclaimers in all such redistributions of the Apple Software.
Neither the name, trademarks, service marks or logos of Apple Inc. may
be used to endorse or promote products derived from the Apple Software
without specific prior written permission from Apple. Except as
expressly stated in this notice, no other rights or licenses, express or
implied, are granted by Apple herein, including but not limited to any
patent rights that may be infringed by your derivative works or by other
works in which the Apple Software may be incorporated.
The Apple Software is provided by Apple on an "AS IS" basis. APPLE
MAKES NO WARRANTIES, EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION
THE IMPLIED WARRANTIES OF NON-INFRINGEMENT, MERCHANTABILITY AND FITNESS
FOR A PARTICULAR PURPOSE, REGARDING THE APPLE SOFTWARE OR ITS USE AND
OPERATION ALONE OR IN COMBINATION WITH YOUR PRODUCTS.
IN NO EVENT SHALL APPLE BE LIABLE FOR ANY SPECIAL, INDIRECT, INCIDENTAL
OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) ARISING IN ANY WAY OUT OF THE USE, REPRODUCTION,
MODIFICATION AND/OR DISTRIBUTION OF THE APPLE SOFTWARE, HOWEVER CAUSED
AND WHETHER UNDER THEORY OF CONTRACT, TORT (INCLUDING NEGLIGENCE),
STRICT LIABILITY OR OTHERWISE, EVEN IF APPLE HAS BEEN ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
Copyright (C) 2014 Apple Inc. All Rights Reserved.
*/
#import <Foundation/Foundation.h>
#import <SystemConfiguration/SystemConfiguration.h>
#import <netinet/in.h>
typedef enum : NSInteger {
NotReachable = 0,
ReachableViaWiFi,
ReachableViaWWAN
} NetworkStatus;
extern NSString *kReachabilityChangedNotification;
@interface Reachability : NSObject
/*!
* Use to check the reachability of a given host name.
*/
+ (instancetype)reachabilityWithHostName:(NSString *)hostName;
/*!
* Use to check the reachability of a given IP address.
*/
+ (instancetype)reachabilityWithAddress:(const struct sockaddr_in *)hostAddress;
/*!
* Checks whether the default route is available. Should be used by applications that do not connect to a particular host.
*/
+ (instancetype)reachabilityForInternetConnection;
/*!
* Checks whether a local WiFi connection is available.
*/
+ (instancetype)reachabilityForLocalWiFi;
/*!
* Start listening for reachability notifications on the current run loop.
*/
- (BOOL)startNotifier;
- (void)stopNotifier;
- (NetworkStatus)currentReachabilityStatus;
/*!
* WWAN may be available, but not active until a connection has been established. WiFi may require a connection for VPN on Demand.
*/
- (BOOL)connectionRequired;
@end
================================================
FILE: ios/RCTPili/RCTPili/Reachability.m
================================================
/*
File: Reachability.m
Abstract: Basic demonstration of how to use the SystemConfiguration Reachablity APIs.
Version: 3.5
Disclaimer: IMPORTANT: This Apple software is supplied to you by Apple
Inc. ("Apple") in consideration of your agreement to the following
terms, and your use, installation, modification or redistribution of
this Apple software constitutes acceptance of these terms. If you do
not agree with these terms, please do not use, install, modify or
redistribute this Apple software.
In consideration of your agreement to abide by the following terms, and
subject to these terms, Apple grants you a personal, non-exclusive
license, under Apple's copyrights in this original Apple software (the
"Apple Software"), to use, reproduce, modify and redistribute the Apple
Software, with or without modifications, in source and/or binary forms;
provided that if you redistribute the Apple Software in its entirety and
without modifications, you must retain this notice and the following
text and disclaimers in all such redistributions of the Apple Software.
Neither the name, trademarks, service marks or logos of Apple Inc. may
be used to endorse or promote products derived from the Apple Software
without specific prior written permission from Apple. Except as
expressly stated in this notice, no other rights or licenses, express or
implied, are granted by Apple herein, including but not limited to any
patent rights that may be infringed by your derivative works or by other
works in which the Apple Software may be incorporated.
The Apple Software is provided by Apple on an "AS IS" basis. APPLE
MAKES NO WARRANTIES, EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION
THE IMPLIED WARRANTIES OF NON-INFRINGEMENT, MERCHANTABILITY AND FITNESS
FOR A PARTICULAR PURPOSE, REGARDING THE APPLE SOFTWARE OR ITS USE AND
OPERATION ALONE OR IN COMBINATION WITH YOUR PRODUCTS.
IN NO EVENT SHALL APPLE BE LIABLE FOR ANY SPECIAL, INDIRECT, INCIDENTAL
OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) ARISING IN ANY WAY OUT OF THE USE, REPRODUCTION,
MODIFICATION AND/OR DISTRIBUTION OF THE APPLE SOFTWARE, HOWEVER CAUSED
AND WHETHER UNDER THEORY OF CONTRACT, TORT (INCLUDING NEGLIGENCE),
STRICT LIABILITY OR OTHERWISE, EVEN IF APPLE HAS BEEN ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
Copyright (C) 2014 Apple Inc. All Rights Reserved.
*/
#import <arpa/inet.h>
#import <ifaddrs.h>
#import <netdb.h>
#import <sys/socket.h>
#import <CoreFoundation/CoreFoundation.h>
#import "Reachability.h"
NSString *kReachabilityChangedNotification = @"kNetworkReachabilityChangedNotification";
#pragma mark - Supporting functions
#define kShouldPrintReachabilityFlags 0
static void PrintReachabilityFlags(SCNetworkReachabilityFlags flags, const char* comment)
{
#if kShouldPrintReachabilityFlags
NSLog(@"Reachability Flag Status: %c%c %c%c%c%c%c%c%c %s\n",
(flags & kSCNetworkReachabilityFlagsIsWWAN) ? 'W' : '-',
(flags & kSCNetworkReachabilityFlagsReachable) ? 'R' : '-',
(flags & kSCNetworkReachabilityFlagsTransientConnection) ? 't' : '-',
(flags & kSCNetworkReachabilityFlagsConnectionRequired) ? 'c' : '-',
(flags & kSCNetworkReachabilityFlagsConnectionOnTraffic) ? 'C' : '-',
(flags & kSCNetworkReachabilityFlagsInterventionRequired) ? 'i' : '-',
(flags & kSCNetworkReachabilityFlagsConnectionOnDemand) ? 'D' : '-',
(flags & kSCNetworkReachabilityFlagsIsLocalAddress) ? 'l' : '-',
(flags & kSCNetworkReachabilityFlagsIsDirect) ? 'd' : '-',
comment
);
#endif
}
static void ReachabilityCallback(SCNetworkReachabilityRef target, SCNetworkReachabilityFlags flags, void* info)
{
#pragma unused (target, flags)
NSCAssert(info != NULL, @"info was NULL in ReachabilityCallback");
NSCAssert([(__bridge NSObject*) info isKindOfClass: [Reachability class]], @"info was wrong class in ReachabilityCallback");
Reachability* noteObject = (__bridge Reachability *)info;
// Post a notification to notify the client that the network reachability changed.
[[NSNotificationCenter defaultCenter] postNotificationName: kReachabilityChangedNotification object: noteObject];
}
#pragma mark - Reachability implementation
@implementation Reachability
{
BOOL _alwaysReturnLocalWiFiStatus; //default is NO
SCNetworkReachabilityRef _reachabilityRef;
}
+ (instancetype)reachabilityWithHostName:(NSString *)hostName
{
Reachability* returnValue = NULL;
SCNetworkReachabilityRef reachability = SCNetworkReachabilityCreateWithName(NULL, [hostName UTF8String]);
if (reachability != NULL)
{
returnValue= [[self alloc] init];
if (returnValue != NULL)
{
returnValue->_reachabilityRef = reachability;
returnValue->_alwaysReturnLocalWiFiStatus = NO;
}
}
return returnValue;
}
+ (instancetype)reachabilityWithAddress:(const struct sockaddr_in *)hostAddress
{
SCNetworkReachabilityRef reachability = SCNetworkReachabilityCreateWithAddress(kCFAllocatorDefault, (const struct sockaddr *)hostAddress);
Reachability* returnValue = NULL;
if (reachability != NULL)
{
returnValue = [[self alloc] init];
if (returnValue != NULL)
{
returnValue->_reachabilityRef = reachability;
returnValue->_alwaysReturnLocalWiFiStatus = NO;
}
}
return returnValue;
}
+ (instancetype)reachabilityForInternetConnection
{
struct sockaddr_in zeroAddress;
bzero(&zeroAddress, sizeof(zeroAddress));
zeroAddress.sin_len = sizeof(zeroAddress);
zeroAddress.sin_family = AF_INET;
return [self reachabilityWithAddress:&zeroAddress];
}
+ (instancetype)reachabilityForLocalWiFi
{
struct sockaddr_in localWifiAddress;
bzero(&localWifiAddress, sizeof(localWifiAddress));
localWifiAddress.sin_len = sizeof(localWifiAddress);
localWifiAddress.sin_family = AF_INET;
// IN_LINKLOCALNETNUM is defined in <netinet/in.h> as 169.254.0.0.
localWifiAddress.sin_addr.s_addr = htonl(IN_LINKLOCALNETNUM);
Reachability* returnValue = [self reachabilityWithAddress: &localWifiAddress];
if (returnValue != NULL)
{
returnValue->_alwaysReturnLocalWiFiStatus = YES;
}
return returnValue;
}
#pragma mark - Start and stop notifier
- (BOOL)startNotifier
{
BOOL returnValue = NO;
SCNetworkReachabilityContext context = {0, (__bridge void *)(self), NULL, NULL, NULL};
if (SCNetworkReachabilitySetCallback(_reachabilityRef, ReachabilityCallback, &context))
{
if (SCNetworkReachabilityScheduleWithRunLoop(_reachabilityRef, CFRunLoopGetCurrent(), kCFRunLoopDefaultMode))
{
returnValue = YES;
}
}
return returnValue;
}
- (void)stopNotifier
{
if (_reachabilityRef != NULL)
{
SCNetworkReachabilityUnscheduleFromRunLoop(_reachabilityRef, CFRunLoopGetCurrent(), kCFRunLoopDefaultMode);
}
}
- (void)dealloc
{
[self stopNotifier];
if (_reachabilityRef != NULL)
{
CFRelease(_reachabilityRef);
}
}
#pragma mark - Network Flag Handling
- (NetworkStatus)localWiFiStatusForFlags:(SCNetworkReachabilityFlags)flags
{
PrintReachabilityFlags(flags, "localWiFiStatusForFlags");
NetworkStatus returnValue = NotReachable;
if ((flags & kSCNetworkReachabilityFlagsReachable) && (flags & kSCNetworkReachabilityFlagsIsDirect))
{
returnValue = ReachableViaWiFi;
}
return returnValue;
}
- (NetworkStatus)networkStatusForFlags:(SCNetworkReachabilityFlags)flags
{
PrintReachabilityFlags(flags, "networkStatusForFlags");
if ((flags & kSCNetworkReachabilityFlagsReachable) == 0)
{
// The target host is not reachable.
return NotReachable;
}
NetworkStatus returnValue = NotReachable;
if ((flags & kSCNetworkReachabilityFlagsConnectionRequired) == 0)
{
/*
If the target host is reachable and no connection is required then we'll assume (for now) that you're on Wi-Fi...
*/
returnValue = ReachableViaWiFi;
}
if ((((flags & kSCNetworkReachabilityFlagsConnectionOnDemand ) != 0) ||
(flags & kSCNetworkReachabilityFlagsConnectionOnTraffic) != 0))
{
/*
... and the connection is on-demand (or on-traffic) if the calling application is using the CFSocketStream or higher APIs...
*/
if ((flags & kSCNetworkReachabilityFlagsInterventionRequired) == 0)
{
/*
... and no [user] intervention is needed...
*/
returnValue = ReachableViaWiFi;
}
}
if ((flags & kSCNetworkReachabilityFlagsIsWWAN) == kSCNetworkReachabilityFlagsIsWWAN)
{
/*
... but WWAN connections are OK if the calling application is using the CFNetwork APIs.
*/
returnValue = ReachableViaWWAN;
}
return returnValue;
}
- (BOOL)connectionRequired
{
NSAssert(_reachabilityRef != NULL, @"connectionRequired called with NULL reachabilityRef");
SCNetworkReachabilityFlags flags;
if (SCNetworkReachabilityGetFlags(_reachabilityRef, &flags))
{
return (flags & kSCNetworkReachabilityFlagsConnectionRequired);
}
return NO;
}
- (NetworkStatus)currentReachabilityStatus
{
NSAssert(_reachabilityRef != NULL, @"currentNetworkStatus called with NULL SCNetworkReachabilityRef");
NetworkStatus returnValue = NotReachable;
SCNetworkReachabilityFlags flags;
if (SCNetworkReachabilityGetFlags(_reachabilityRef, &flags))
{
if (_alwaysReturnLocalWiFiStatus)
{
returnValue = [self localWiFiStatusForFlags:flags];
}
else
{
returnValue = [self networkStatusForFlags:flags];
}
}
return returnValue;
}
@end
================================================
FILE: ios/RCTPili/RCTPili.podspec
================================================
#
# Be sure to run `pod spec lint RCTPili.podspec' to ensure this is a
# valid spec and to remove all comments including this before submitting the spec.
#
# To learn more about Podspec attributes see http://docs.cocoapods.org/specification.html
# To see working Podspecs in the CocoaPods repo see https://github.com/CocoaPods/Specs/
#
Pod::Spec.new do |s|
s.name = "RCTPili"
s.version = "2.0.0"
s.summary = "React Native Pili SDK Bridge"
# This description is used to generate tags and improve search results.
# * Think: What does it do? Why did you write it? What is the focus?
# * Try to keep it short, snappy and to the point.
# * Write the description between the DESC delimiters below.
# * Finally, don't worry about the indent, CocoaPods strips it!
s.description = <<-DESC
React Native Pili SDK Bridge
DESC
s.homepage = "http://www.airapps.cn/package/react-native-pili"
s.license = "MIT"
# s.license = { :type => "MIT", :file => "FILE_LICENSE" }
s.author = { "buhe" => "bugu1986@126.com" }
s.platform = :ios, "7.0"
s.source = { :git => "https://github.com/buhe/react-native-piliv2.git", :tag => "master" }
s.source_files = "RCTPili/**/*.{h,m}"
s.requires_arc = true
# s.xcconfig = { "HEADER_SEARCH_PATHS" => "$(SDKROOT)/usr/include/libxml2" }
s.dependency "React"
s.dependency "PLMediaStreamingKit"
s.dependency "PLPlayerKit"
end
================================================
FILE: ios/RCTPili/RCTPili.xcodeproj/project.pbxproj
================================================
// !$*UTF8*$!
{
archiveVersion = 1;
classes = {
};
objectVersion = 46;
objects = {
/* Begin PBXBuildFile section */
78CFC6511D8942EC00386E6B /* RCTPili.h in Headers */ = {isa = PBXBuildFile; fileRef = 78CFC6451D8942EC00386E6B /* RCTPili.h */; };
78CFC6521D8942EC00386E6B /* RCTPili.m in Sources */ = {isa = PBXBuildFile; fileRef = 78CFC6461D8942EC00386E6B /* RCTPili.m */; };
78CFC6531D8942EC00386E6B /* RCTPlayer.h in Headers */ = {isa = PBXBuildFile; fileRef = 78CFC6471D8942EC00386E6B /* RCTPlayer.h */; };
78CFC6541D8942EC00386E6B /* RCTPlayer.m in Sources */ = {isa = PBXBuildFile; fileRef = 78CFC6481D8942EC00386E6B /* RCTPlayer.m */; };
78CFC6551D8942EC00386E6B /* RCTPlayerManager.h in Headers */ = {isa = PBXBuildFile; fileRef = 78CFC6491D8942EC00386E6B /* RCTPlayerManager.h */; };
78CFC6561D8942EC00386E6B /* RCTPlayerManager.m in Sources */ = {isa = PBXBuildFile; fileRef = 78CFC64A1D8942EC00386E6B /* RCTPlayerManager.m */; };
78CFC6571D8942EC00386E6B /* RCTStreaming.h in Headers */ = {isa = PBXBuildFile; fileRef = 78CFC64B1D8942EC00386E6B /* RCTStreaming.h */; };
78CFC6581D8942EC00386E6B /* RCTStreaming.m in Sources */ = {isa = PBXBuildFile; fileRef = 78CFC64C1D8942EC00386E6B /* RCTStreaming.m */; };
78CFC6591D8942EC00386E6B /* RCTStreamingManager.h in Headers */ = {isa = PBXBuildFile; fileRef = 78CFC64D1D8942EC00386E6B /* RCTStreamingManager.h */; };
78CFC65A1D8942EC00386E6B /* RCTStreamingManager.m in Sources */ = {isa = PBXBuildFile; fileRef = 78CFC64E1D8942EC00386E6B /* RCTStreamingManager.m */; };
78CFC65B1D8942EC00386E6B /* Reachability.h in Headers */ = {isa = PBXBuildFile; fileRef = 78CFC64F1D8942EC00386E6B /* Reachability.h */; };
78CFC65C1D8942EC00386E6B /* Reachability.m in Sources */ = {isa = PBXBuildFile; fileRef = 78CFC6501D8942EC00386E6B /* Reachability.m */; };
/* End PBXBuildFile section */
/* Begin PBXFileReference section */
78CFC63A1D89426D00386E6B /* RCTPili.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = RCTPili.framework; sourceTree = BUILT_PRODUCTS_DIR; };
78CFC63F1D89426D00386E6B /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = "<group>"; };
78CFC6451D8942EC00386E6B /* RCTPili.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = RCTPili.h; sourceTree = "<group>"; };
78CFC6461D8942EC00386E6B /* RCTPili.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = RCTPili.m; sourceTree = "<group>"; };
78CFC6471D8942EC00386E6B /* RCTPlayer.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = RCTPlayer.h; sourceTree = "<group>"; };
78CFC6481D8942EC00386E6B /* RCTPlayer.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = RCTPlayer.m; sourceTree = "<group>"; };
78CFC6491D8942EC00386E6B /* RCTPlayerManager.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = RCTPlayerManager.h; sourceTree = "<group>"; };
78CFC64A1D8942EC00386E6B /* RCTPlayerManager.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = RCTPlayerManager.m; sourceTree = "<group>"; };
78CFC64B1D8942EC00386E6B /* RCTStreaming.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = RCTStreaming.h; sourceTree = "<group>"; };
78CFC64C1D8942EC00386E6B /* RCTStreaming.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = RCTStreaming.m; sourceTree = "<group>"; };
78CFC64D1D8942EC00386E6B /* RCTStreamingManager.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = RCTStreamingManager.h; sourceTree = "<group>"; };
78CFC64E1D8942EC00386E6B /* RCTStreamingManager.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = RCTStreamingManager.m; sourceTree = "<group>"; };
78CFC64F1D8942EC00386E6B /* Reachability.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = Reachability.h; sourceTree = "<group>"; };
78CFC6501D8942EC00386E6B /* Reachability.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = Reachability.m; sourceTree = "<group>"; };
/* End PBXFileReference section */
/* Begin PBXFrameworksBuildPhase section */
78CFC6361D89426D00386E6B /* Frameworks */ = {
isa = PBXFrameworksBuildPhase;
buildActionMask = 2147483647;
files = (
);
runOnlyForDeploymentPostprocessing = 0;
};
/* End PBXFrameworksBuildPhase section */
/* Begin PBXGroup section */
78CFC6301D89426D00386E6B = {
isa = PBXGroup;
children = (
78CFC63C1D89426D00386E6B /* RCTPili */,
78CFC63B1D89426D00386E6B /* Products */,
);
sourceTree = "<group>";
};
78CFC63B1D89426D00386E6B /* Products */ = {
isa = PBXGroup;
children = (
78CFC63A1D89426D00386E6B /* RCTPili.framework */,
);
name = Products;
sourceTree = "<group>";
};
78CFC63C1D89426D00386E6B /* RCTPili */ = {
isa = PBXGroup;
children = (
78CFC6451D8942EC00386E6B /* RCTPili.h */,
78CFC6461D8942EC00386E6B /* RCTPili.m */,
78CFC6471D8942EC00386E6B /* RCTPlayer.h */,
78CFC6481D8942EC00386E6B /* RCTPlayer.m */,
78CFC6491D8942EC00386E6B /* RCTPlayerManager.h */,
78CFC64A1D8942EC00386E6B /* RCTPlayerManager.m */,
78CFC64B1D8942EC00386E6B /* RCTStreaming.h */,
78CFC64C1D8942EC00386E6B /* RCTStreaming.m */,
78CFC64D1D8942EC00386E6B /* RCTStreamingManager.h */,
78CFC64E1D8942EC00386E6B /* RCTStreamingManager.m */,
78CFC64F1D8942EC00386E6B /* Reachability.h */,
78CFC6501D8942EC00386E6B /* Reachability.m */,
78CFC63F1D89426D00386E6B /* Info.plist */,
);
path = RCTPili;
sourceTree = "<group>";
};
/* End PBXGroup section */
/* Begin PBXHeadersBuildPhase section */
78CFC6371D89426D00386E6B /* Headers */ = {
isa = PBXHeadersBuildPhase;
buildActionMask = 2147483647;
files = (
78CFC6511D8942EC00386E6B /* RCTPili.h in Headers */,
78CFC6591D8942EC00386E6B /* RCTStreamingManager.h in Headers */,
78CFC6551D8942EC00386E6B /* RCTPlayerManager.h in Headers */,
78CFC65B1D8942EC00386E6B /* Reachability.h in Headers */,
78CFC6531D8942EC00386E6B /* RCTPlayer.h in Headers */,
78CFC6571D8942EC00386E6B /* RCTStreaming.h in Headers */,
);
runOnlyForDeploymentPostprocessing = 0;
};
/* End PBXHeadersBuildPhase section */
/* Begin PBXNativeTarget section */
78CFC6391D89426D00386E6B /* RCTPili */ = {
isa = PBXNativeTarget;
buildConfigurationList = 78CFC6421D89426D00386E6B /* Build configuration list for PBXNativeTarget "RCTPili" */;
buildPhases = (
78CFC6351D89426D00386E6B /* Sources */,
78CFC6361D89426D00386E6B /* Frameworks */,
78CFC6371D89426D00386E6B /* Headers */,
78CFC6381D89426D00386E6B /* Resources */,
);
buildRules = (
);
dependencies = (
);
name = RCTPili;
productName = RCTPili;
productReference = 78CFC63A1D89426D00386E6B /* RCTPili.framework */;
productType = "com.apple.product-type.framework";
};
/* End PBXNativeTarget section */
/* Begin PBXProject section */
78CFC6311D89426D00386E6B /* Project object */ = {
isa = PBXProject;
attributes = {
LastUpgradeCheck = 0730;
ORGANIZATIONNAME = airapps;
TargetAttributes = {
78CFC6391D89426D00386E6B = {
CreatedOnToolsVersion = 7.3.1;
};
};
};
buildConfigurationList = 78CFC6341D89426D00386E6B /* Build configuration list for PBXProject "RCTPili" */;
compatibilityVersion = "Xcode 3.2";
developmentRegion = English;
hasScannedForEncodings = 0;
knownRegions = (
en,
);
mainGroup = 78CFC6301D89426D00386E6B;
productRefGroup = 78CFC63B1D89426D00386E6B /* Products */;
projectDirPath = "";
projectRoot = "";
targets = (
78CFC6391D89426D00386E6B /* RCTPili */,
);
};
/* End PBXProject section */
/* Begin PBXResourcesBuildPhase section */
78CFC6381D89426D00386E6B /* Resources */ = {
isa = PBXResourcesBuildPhase;
buildActionMask = 2147483647;
files = (
);
runOnlyForDeploymentPostprocessing = 0;
};
/* End PBXResourcesBuildPhase section */
/* Begin PBXSourcesBuildPhase section */
78CFC6351D89426D00386E6B /* Sources */ = {
isa = PBXSourcesBuildPhase;
buildActionMask = 2147483647;
files = (
78CFC6581D8942EC00386E6B /* RCTStreaming.m in Sources */,
78CFC65A1D8942EC00386E6B /* RCTStreamingManager.m in Sources */,
78CFC6541D8942EC00386E6B /* RCTPlayer.m in Sources */,
78CFC6521D8942EC00386E6B /* RCTPili.m in Sources */,
78CFC65C1D8942EC00386E6B /* Reachability.m in Sources */,
78CFC6561D8942EC00386E6B /* RCTPlayerManager.m in Sources */,
);
runOnlyForDeploymentPostprocessing = 0;
};
/* End PBXSourcesBuildPhase section */
/* Begin XCBuildConfiguration section */
78CFC6401D89426D00386E6B /* Debug */ = {
isa = XCBuildConfiguration;
buildSettings = {
ALWAYS_SEARCH_USER_PATHS = NO;
CLANG_ANALYZER_NONNULL = YES;
CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x";
CLANG_CXX_LIBRARY = "libc++";
CLANG_ENABLE_MODULES = YES;
CLANG_ENABLE_OBJC_ARC = YES;
CLANG_WARN_BOOL_CONVERSION = YES;
CLANG_WARN_CONSTANT_CONVERSION = YES;
CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
CLANG_WARN_EMPTY_BODY = YES;
CLANG_WARN_ENUM_CONVERSION = YES;
CLANG_WARN_INT_CONVERSION = YES;
CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
CLANG_WARN_UNREACHABLE_CODE = YES;
CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
"CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer";
COPY_PHASE_STRIP = NO;
CURRENT_PROJECT_VERSION = 1;
DEBUG_INFORMATION_FORMAT = dwarf;
ENABLE_STRICT_OBJC_MSGSEND = YES;
ENABLE_TESTABILITY = YES;
GCC_C_LANGUAGE_STANDARD = gnu99;
GCC_DYNAMIC_NO_PIC = NO;
GCC_NO_COMMON_BLOCKS = YES;
GCC_OPTIMIZATION_LEVEL = 0;
GCC_PREPROCESSOR_DEFINITIONS = (
"DEBUG=1",
"$(inherited)",
);
GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
GCC_WARN_UNDECLARED_SELECTOR = YES;
GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
GCC_WARN_UNUSED_FUNCTION = YES;
GCC_WARN_UNUSED_VARIABLE = YES;
IPHONEOS_DEPLOYMENT_TARGET = 9.3;
MTL_ENABLE_DEBUG_INFO = YES;
ONLY_ACTIVE_ARCH = YES;
SDKROOT = iphoneos;
TARGETED_DEVICE_FAMILY = "1,2";
VERSIONING_SYSTEM = "apple-generic";
VERSION_INFO_PREFIX = "";
};
name = Debug;
};
78CFC6411D89426D00386E6B /* Release */ = {
isa = XCBuildConfiguration;
buildSettings = {
ALWAYS_SEARCH_USER_PATHS = NO;
CLANG_ANALYZER_NONNULL = YES;
CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x";
CLANG_CXX_LIBRARY = "libc++";
CLANG_ENABLE_MODULES = YES;
CLANG_ENABLE_OBJC_ARC = YES;
CLANG_WARN_BOOL_CONVERSION = YES;
CLANG_WARN_CONSTANT_CONVERSION = YES;
CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
CLANG_WARN_EMPTY_BODY = YES;
CLANG_WARN_ENUM_CONVERSION = YES;
CLANG_WARN_INT_CONVERSION = YES;
CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
CLANG_WARN_UNREACHABLE_CODE = YES;
CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
"CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer";
COPY_PHASE_STRIP = NO;
CURRENT_PROJECT_VERSION = 1;
DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym";
ENABLE_NS_ASSERTIONS = NO;
ENABLE_STRICT_OBJC_MSGSEND = YES;
GCC_C_LANGUAGE_STANDARD = gnu99;
GCC_NO_COMMON_BLOCKS = YES;
GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
GCC_WARN_UNDECLARED_SELECTOR = YES;
GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
GCC_WARN_UNUSED_FUNCTION = YES;
GCC_WARN_UNUSED_VARIABLE = YES;
IPHONEOS_DEPLOYMENT_TARGET = 9.3;
MTL_ENABLE_DEBUG_INFO = NO;
SDKROOT = iphoneos;
TARGETED_DEVICE_FAMILY = "1,2";
VALIDATE_PRODUCT = YES;
VERSIONING_SYSTEM = "apple-generic";
VERSION_INFO_PREFIX = "";
};
name = Release;
};
78CFC6431D89426D00386E6B /* Debug */ = {
isa = XCBuildConfiguration;
buildSettings = {
DEFINES_MODULE = YES;
DYLIB_COMPATIBILITY_VERSION = 1;
DYLIB_CURRENT_VERSION = 1;
DYLIB_INSTALL_NAME_BASE = "@rpath";
HEADER_SEARCH_PATHS = (
"$(inherited)",
"\"${PODS_ROOT}/Headers/Public/React\"",
);
INFOPLIST_FILE = RCTPili/Info.plist;
INSTALL_PATH = "$(LOCAL_LIBRARY_DIR)/Frameworks";
LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks @loader_path/Frameworks";
PRODUCT_BUNDLE_IDENTIFIER = cn.airapps.RCTPili;
PRODUCT_NAME = "$(TARGET_NAME)";
SKIP_INSTALL = YES;
};
name = Debug;
};
78CFC6441D89426D00386E6B /* Release */ = {
isa = XCBuildConfiguration;
buildSettings = {
DEFINES_MODULE = YES;
DYLIB_COMPATIBILITY_VERSION = 1;
DYLIB_CURRENT_VERSION = 1;
DYLIB_INSTALL_NAME_BASE = "@rpath";
HEADER_SEARCH_PATHS = (
"$(inherited)",
"\"${PODS_ROOT}/Headers/Public/React\"",
);
INFOPLIST_FILE = RCTPili/Info.plist;
INSTALL_PATH = "$(LOCAL_LIBRARY_DIR)/Frameworks";
LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks @loader_path/Frameworks";
PRODUCT_BUNDLE_IDENTIFIER = cn.airapps.RCTPili;
PRODUCT_NAME = "$(TARGET_NAME)";
SKIP_INSTALL = YES;
};
name = Release;
};
/* End XCBuildConfiguration section */
/* Begin XCConfigurationList section */
78CFC6341D89426D00386E6B /* Build configuration list for PBXProject "RCTPili" */ = {
isa = XCConfigurationList;
buildConfigurations = (
78CFC6401D89426D00386E6B /* Debug */,
78CFC6411D89426D00386E6B /* Release */,
);
defaultConfigurationIsVisible = 0;
defaultConfigurationName = Release;
};
78CFC6421D89426D00386E6B /* Build configuration list for PBXNativeTarget "RCTPili" */ = {
isa = XCConfigurationList;
buildConfigurations = (
78CFC6431D89426D00386E6B /* Debug */,
78CFC6441D89426D00386E6B /* Release */,
);
defaultConfigurationIsVisible = 0;
defaultConfigurationName = Release;
};
/* End XCConfigurationList section */
};
rootObject = 78CFC6311D89426D00386E6B /* Project object */;
}
================================================
FILE: package.json
================================================
{
"name": "react-native-pili",
"version": "2.3.0",
"description": "Pili Streaming Cloud React Native SDK",
"main": "index.js",
"scripts": {
"test": "npm test"
},
"repository": {
"type": "git",
"url": "git+https://github.com/buhe/react-native-pili.git"
},
"keywords": [
"React",
"Native",
"SDK",
"Qiniu",
"Pili",
"Stream",
"Cloud"
],
"author": "buhe",
"license": "MIT",
"bugs": {
"url": "https://github.com/buhe/react-native-pili/issues"
},
"homepage": "https://github.com/buhe/react-native-pili#readme",
"devDependencies": {
"react": "^0.14.8",
"react-native": "^0.24.1"
}
}
gitextract_h14nr57v/ ├── .gitignore ├── .npmignore ├── AudioStreaming.js ├── LICENSE ├── Player.js ├── README.md ├── Streaming.js ├── StreamingConst.js ├── android/ │ ├── build.gradle │ ├── gradle/ │ │ └── wrapper/ │ │ ├── gradle-wrapper.jar │ │ └── gradle-wrapper.properties │ ├── gradlew │ ├── gradlew.bat │ ├── libs/ │ │ ├── pldroid-media-streaming-2.0.4.jar │ │ └── pldroid-player-1.3.2.jar │ ├── proguard-rules.pro │ └── src/ │ ├── androidTest/ │ │ └── java/ │ │ └── com/ │ │ └── pili/ │ │ └── rnpili/ │ │ └── ApplicationTest.java │ ├── main/ │ │ ├── AndroidManifest.xml │ │ └── java/ │ │ └── com/ │ │ └── pili/ │ │ └── rnpili/ │ │ ├── CameraPreviewFrameView.java │ │ ├── PiliAudioStreamingViewManager.java │ │ ├── PiliPackage.java │ │ ├── PiliPlayerViewManager.java │ │ ├── PiliStreamingViewManager.java │ │ └── support/ │ │ ├── Config.java │ │ ├── FocusIndicatorRotateLayout.java │ │ ├── Jsons.java │ │ ├── MediaController.java │ │ ├── Rotatable.java │ │ └── RotateLayout.java │ └── test/ │ └── java/ │ └── com/ │ └── pili/ │ └── rnpili/ │ └── ExampleUnitTest.java ├── index.js ├── ios/ │ └── RCTPili/ │ ├── RCTPili/ │ │ ├── Info.plist │ │ ├── RCTAudioStreaming.h │ │ ├── RCTAudioStreaming.m │ │ ├── RCTAudioStreamingManager.h │ │ ├── RCTAudioStreamingManager.m │ │ ├── RCTPili.h │ │ ├── RCTPili.m │ │ ├── RCTPlayer.h │ │ ├── RCTPlayer.m │ │ ├── RCTPlayerManager.h │ │ ├── RCTPlayerManager.m │ │ ├── RCTStreaming.h │ │ ├── RCTStreaming.m │ │ ├── RCTStreamingManager.h │ │ ├── RCTStreamingManager.m │ │ ├── Reachability.h │ │ └── Reachability.m │ ├── RCTPili.podspec │ └── RCTPili.xcodeproj/ │ └── project.pbxproj └── package.json
SYMBOL INDEX (144 symbols across 16 files)
FILE: AudioStreaming.js
class AudioStreaming (line 13) | class AudioStreaming extends Component {
method constructor (line 15) | constructor(props, context) {
method _onReady (line 25) | _onReady(event) {
method _onConnecting (line 29) | _onConnecting(event) {
method _onStreaming (line 33) | _onStreaming(event) {
method _onShutdown (line 37) | _onShutdown(event) {
method _onIOError (line 41) | _onIOError(event) {
method _onDisconnected (line 45) | _onDisconnected(event) {
method render (line 49) | render() {
FILE: Player.js
class Player (line 13) | class Player extends Component {
method constructor (line 15) | constructor(props, context) {
method _onLoading (line 24) | _onLoading(event) {
method _onPaused (line 28) | _onPaused(event) {
method _onShutdown (line 32) | _onShutdown(event) {
method _onError (line 37) | _onError(event) {
method _onPlaying (line 41) | _onPlaying(event) {
method render (line 45) | render() {
FILE: Streaming.js
class Streaming (line 15) | class Streaming extends Component {
method constructor (line 17) | constructor(props, context) {
method _onReady (line 27) | _onReady(event) {
method _onConnecting (line 31) | _onConnecting(event) {
method _onStreaming (line 35) | _onStreaming(event) {
method _onShutdown (line 39) | _onShutdown(event) {
method _onIOError (line 43) | _onIOError(event) {
method _onDisconnected (line 47) | _onDisconnected(event) {
method render (line 51) | render() {
FILE: StreamingConst.js
method _240 (line 5) | get _240(){
method _480 (line 8) | get _480(){
method _544 (line 11) | get _544(){
method _720 (line 14) | get _720(){
method _1088 (line 17) | get _1088(){
FILE: android/src/androidTest/java/com/pili/rnpili/ApplicationTest.java
class ApplicationTest (line 9) | public class ApplicationTest extends ApplicationTestCase<Application> {
method ApplicationTest (line 10) | public ApplicationTest() {
FILE: android/src/main/java/com/pili/rnpili/CameraPreviewFrameView.java
class CameraPreviewFrameView (line 26) | public class CameraPreviewFrameView extends GLSurfaceView {
type Listener (line 30) | public interface Listener {
method onSingleTapUp (line 31) | boolean onSingleTapUp(MotionEvent e);
method onZoomValueChanged (line 32) | boolean onZoomValueChanged(float factor);
method CameraPreviewFrameView (line 39) | public CameraPreviewFrameView(Context context) {
method CameraPreviewFrameView (line 44) | public CameraPreviewFrameView(Context context, AttributeSet attrs) {
method setListener (line 49) | public void setListener(Listener listener) {
method onTouchEvent (line 53) | @Override
method onSingleTapUp (line 62) | @Override
method onScaleBegin (line 75) | @Override
method onScale (line 80) | @Override
method initialize (line 93) | private void initialize(Context context) {
FILE: android/src/main/java/com/pili/rnpili/PiliAudioStreamingViewManager.java
class PiliAudioStreamingViewManager (line 52) | public class PiliAudioStreamingViewManager extends SimpleViewManager<View>
type Events (line 61) | public enum Events {
method Events (line 71) | Events(final String name) {
method toString (line 75) | @Override
method initializeStreamingSessionIfNeeded (line 105) | private void initializeStreamingSessionIfNeeded(View view) {
method getExportedCustomDirectEventTypeConstants (line 128) | @Override
method createViewInstance (line 138) | @Override
method getName (line 167) | @Override
method setRtmpURL (line 175) | @ReactProp(name = "rtmpURL")
method setProfile (line 185) | @ReactProp(name = "profile")
method setMuted (line 200) | @ReactProp(name = "muted")
method setStarted (line 206) | @ReactProp(name = "started")
method getTargetId (line 223) | public int getTargetId() {
method onStateChanged (line 227) | @Override
method onRecordAudioFailedHandled (line 291) | @Override
method onRestartStreamingHandled (line 298) | @Override
method onPreviewSizeSelected (line 304) | @Override
method onHostResume (line 316) | @Override
method onHostPause (line 321) | @Override
method onHostDestroy (line 328) | @Override
method handleMessage (line 335) | @Override
method startStreaming (line 359) | private void startStreaming() {
method stopStreaming (line 364) | private void stopStreaming() {
method getMyDnsManager (line 369) | private DnsManager getMyDnsManager() {
FILE: android/src/main/java/com/pili/rnpili/PiliPackage.java
class PiliPackage (line 19) | public class PiliPackage implements ReactPackage {
method PiliPackage (line 22) | public PiliPackage() {
method createNativeModules (line 25) | @Override
method createJSModules (line 30) | @Override
method createViewManagers (line 35) | @Override
FILE: android/src/main/java/com/pili/rnpili/PiliPlayerViewManager.java
class PiliPlayerViewManager (line 26) | public class PiliPlayerViewManager extends SimpleViewManager<PLVideoView...
type Events (line 40) | public enum Events {
method Events (line 49) | Events(final String name) {
method toString (line 53) | @Override
method getName (line 59) | @Override
method getExportedCustomDirectEventTypeConstants (line 64) | @Override
method createViewInstance (line 74) | @Override
method isLiveStreaming (line 92) | private boolean isLiveStreaming(String url) {
method setSource (line 101) | @ReactProp(name = "source")
method setAspectRatio (line 143) | @ReactProp(name = "aspectRatio")
method setStarted (line 156) | @ReactProp(name = "started")
method setMuted (line 167) | @ReactProp(name = "muted")
method onPrepared (line 174) | @Override
method onInfo (line 182) | @Override
method onError (line 202) | @Override
method onCompletion (line 213) | @Override
method onBufferingUpdate (line 221) | @Override
method onSeekComplete (line 228) | @Override
method onVideoSizeChanged (line 237) | @Override
method onHostResume (line 243) | @Override
method onHostPause (line 248) | @Override
method onHostDestroy (line 253) | @Override
method getTargetId (line 258) | public int getTargetId() {
FILE: android/src/main/java/com/pili/rnpili/PiliStreamingViewManager.java
class PiliStreamingViewManager (line 65) | public class PiliStreamingViewManager extends SimpleViewManager<AspectFr...
type Events (line 74) | public enum Events {
method Events (line 84) | Events(final String name) {
method toString (line 88) | @Override
method initializeStreamingSessionIfNeeded (line 118) | private void initializeStreamingSessionIfNeeded(AspectFrameLayout afl,...
method getExportedCustomDirectEventTypeConstants (line 163) | @Override
method createViewInstance (line 173) | @Override
method getName (line 212) | @Override
method setCamera (line 220) | @ReactProp(name = "camera")
method setRtmpURL (line 232) | @ReactProp(name = "rtmpURL")
method setProfile (line 242) | @ReactProp(name = "profile")
method setMuted (line 259) | @ReactProp(name = "muted")
method setZoom (line 264) | @ReactProp(name = "zoom")
method setFocus (line 272) | @ReactProp(name = "focus")
method setStarted (line 277) | @ReactProp(name = "started")
method setFocusAreaIndicator (line 293) | protected void setFocusAreaIndicator() {
method getTargetId (line 312) | public int getTargetId() {
method onStateChanged (line 316) | @Override
method onRecordAudioFailedHandled (line 380) | @Override
method onRestartStreamingHandled (line 387) | @Override
method onPreviewSizeSelected (line 393) | @Override
method onSingleTapUp (line 405) | @Override
method onZoomValueChanged (line 422) | @Override
method onHostResume (line 451) | @Override
method onHostPause (line 456) | @Override
method onHostDestroy (line 463) | @Override
method handleMessage (line 470) | @Override
method startStreaming (line 494) | private void startStreaming() {
method stopStreaming (line 499) | private void stopStreaming() {
method getMyDnsManager (line 504) | private DnsManager getMyDnsManager() {
FILE: android/src/main/java/com/pili/rnpili/support/Config.java
class Config (line 11) | public class Config {
FILE: android/src/main/java/com/pili/rnpili/support/Jsons.java
class Jsons (line 13) | public class Jsons {
method readableMapToJson (line 14) | public static JSONObject readableMapToJson(ReadableMap readableMap) {
FILE: android/src/main/java/com/pili/rnpili/support/Rotatable.java
type Rotatable (line 6) | public interface Rotatable {
method setOrientation (line 8) | void setOrientation(int orientation, boolean animation);
FILE: android/src/main/java/com/pili/rnpili/support/RotateLayout.java
class RotateLayout (line 12) | public class RotateLayout extends ViewGroup implements Rotatable {
method RotateLayout (line 17) | public RotateLayout(Context context, AttributeSet attrs) {
method onFinishInflate (line 26) | @TargetApi(Build.VERSION_CODES.HONEYCOMB)
method setChild (line 35) | public void setChild(View mChild) {
method onLayout (line 39) | @Override
method onMeasure (line 56) | @TargetApi(Build.VERSION_CODES.HONEYCOMB)
method shouldDelayChildPressedState (line 96) | @Override
method setOrientation (line 102) | @Override
FILE: android/src/test/java/com/pili/rnpili/ExampleUnitTest.java
class ExampleUnitTest (line 10) | public class ExampleUnitTest {
method addition_isCorrect (line 11) | @Test
FILE: ios/RCTPili/RCTPili/Reachability.h
type NetworkStatus (line 53) | typedef enum : NSInteger {
type sockaddr_in (line 73) | struct sockaddr_in
Condensed preview — 51 files, each showing path, character count, and a content snippet. Download the .json file or copy for the full structured content (174K chars).
[
{
"path": ".gitignore",
"chars": 361,
"preview": "\n\n\n\n# OSX\n#\n.DS_Store\n\n.vscode\n\n# Xcode\n#\nbuild/\n*.pbxuser\n!default.pbxuser\n*.mode1v3\n!default.mode1v3\n*.mode2v3\n!defaul"
},
{
"path": ".npmignore",
"chars": 99,
"preview": ".*.swp\n._*\n.DS_Store\n.git\n.hg\n.npmrc\n.lock-wscript\n.svn\n.wafpickle-*\nconfig.gypi\nCVS\nnpm-debug.log\n"
},
{
"path": "AudioStreaming.js",
"chars": 2283,
"preview": "/**\n * Created by buhe on 16/4/29.\n */\nimport React, {\n Component,\n PropTypes\n} from 'react';\nimport {\n require"
},
{
"path": "LICENSE",
"chars": 1071,
"preview": "The MIT License (MIT)\n\nCopyright (c) 2016 buhe\n\nPermission is hereby granted, free of charge, to any person obtaining a "
},
{
"path": "Player.js",
"chars": 2073,
"preview": "/**\n * Created by buhe on 16/5/4.\n */\nimport React, {\n Component,\n PropTypes\n} from 'react';\nimport {\n requireN"
},
{
"path": "README.md",
"chars": 3821,
"preview": "# Deprecated\n# Pili Streaming Cloud React Native SDK\n\n## Introduction\n\n### Warning\n\nThis lib dependency PLMediaStreamin"
},
{
"path": "Streaming.js",
"chars": 2821,
"preview": "/**\n * Created by buhe on 16/4/29.\n */\nimport React, {\n Component,\n PropTypes\n} from 'react';\nimport {\n require"
},
{
"path": "StreamingConst.js",
"chars": 285,
"preview": "/**\n * Created by buhe on 16/7/14.\n */\nconst video_encoding = {\n get _240(){\n return 0;\n },\n get _480(){\n retur"
},
{
"path": "android/build.gradle",
"chars": 906,
"preview": "apply plugin: 'com.android.library'\n\n\nandroid {\n compileSdkVersion 23\n buildToolsVersion \"23.0.2\"\n\n defaultConf"
},
{
"path": "android/gradle/wrapper/gradle-wrapper.properties",
"chars": 231,
"preview": "#Mon Dec 28 10:00:20 PST 2015\ndistributionBase=GRADLE_USER_HOME\ndistributionPath=wrapper/dists\nzipStoreBase=GRADLE_USER_"
},
{
"path": "android/gradlew",
"chars": 4971,
"preview": "#!/usr/bin/env bash\n\n##############################################################################\n##\n## Gradle start "
},
{
"path": "android/gradlew.bat",
"chars": 2404,
"preview": "@if \"%DEBUG%\" == \"\" @echo off\r\n@rem ##########################################################################\r\n@rem\r\n@r"
},
{
"path": "android/proguard-rules.pro",
"chars": 652,
"preview": "# Add project specific ProGuard rules here.\n# By default, the flags in this file are appended to flags specified\n# in /U"
},
{
"path": "android/src/androidTest/java/com/pili/rnpili/ApplicationTest.java",
"chars": 346,
"preview": "package com.pili.rnpili;\n\nimport android.app.Application;\nimport android.test.ApplicationTestCase;\n\n/**\n * <a href=\"http"
},
{
"path": "android/src/main/AndroidManifest.xml",
"chars": 113,
"preview": "<manifest xmlns:android=\"http://schemas.android.com/apk/res/android\"\n package=\"com.pili.rnpili\">\n\n</manifest>\n"
},
{
"path": "android/src/main/java/com/pili/rnpili/CameraPreviewFrameView.java",
"chars": 2944,
"preview": "package com.pili.rnpili;\n\nimport android.content.Context;\nimport android.opengl.GLSurfaceView;\nimport android.util.Attri"
},
{
"path": "android/src/main/java/com/pili/rnpili/PiliAudioStreamingViewManager.java",
"chars": 13146,
"preview": "package com.pili.rnpili;\n\nimport android.hardware.Camera;\nimport android.os.Handler;\nimport android.os.Looper;\nimport an"
},
{
"path": "android/src/main/java/com/pili/rnpili/PiliPackage.java",
"chars": 1151,
"preview": "package com.pili.rnpili;\n\nimport android.app.Activity;\n\nimport com.facebook.react.ReactPackage;\nimport com.facebook.reac"
},
{
"path": "android/src/main/java/com/pili/rnpili/PiliPlayerViewManager.java",
"chars": 9588,
"preview": "package com.pili.rnpili;\n\nimport android.util.Log;\n\nimport com.facebook.react.bridge.Arguments;\nimport com.facebook.reac"
},
{
"path": "android/src/main/java/com/pili/rnpili/PiliStreamingViewManager.java",
"chars": 19465,
"preview": "package com.pili.rnpili;\n\nimport android.app.Activity;\nimport android.hardware.Camera;\nimport android.os.Handler;\nimport"
},
{
"path": "android/src/main/java/com/pili/rnpili/support/Config.java",
"chars": 744,
"preview": "package com.pili.rnpili.support;\n\nimport android.content.pm.ActivityInfo;\n\nimport com.qiniu.pili.droid.streaming.Streami"
},
{
"path": "android/src/main/java/com/pili/rnpili/support/FocusIndicatorRotateLayout.java",
"chars": 3894,
"preview": "//package com.pili.rnpili.support;\n//\n//import android.annotation.TargetApi;\n//import android.content.Context;\n//import "
},
{
"path": "android/src/main/java/com/pili/rnpili/support/Jsons.java",
"chars": 1940,
"preview": "package com.pili.rnpili.support;\n\nimport com.facebook.react.bridge.ReadableMap;\nimport com.facebook.react.bridge.Readabl"
},
{
"path": "android/src/main/java/com/pili/rnpili/support/MediaController.java",
"chars": 20158,
"preview": "//package com.pili.rnpili.support;\n//\n//import android.annotation.SuppressLint;\n//import android.content.Context;\n//impo"
},
{
"path": "android/src/main/java/com/pili/rnpili/support/Rotatable.java",
"chars": 240,
"preview": "package com.pili.rnpili.support;\n\n/**\n * Created by jerikc on 16/2/5.\n */\npublic interface Rotatable {\n // Set parame"
},
{
"path": "android/src/main/java/com/pili/rnpili/support/RotateLayout.java",
"chars": 3387,
"preview": "package com.pili.rnpili.support;\n\nimport android.annotation.TargetApi;\nimport android.content.Context;\nimport android.os"
},
{
"path": "android/src/test/java/com/pili/rnpili/ExampleUnitTest.java",
"chars": 308,
"preview": "package com.pili.rnpili;\n\nimport org.junit.Test;\n\nimport static org.junit.Assert.*;\n\n/**\n * To work on unit tests, switc"
},
{
"path": "index.js",
"chars": 222,
"preview": "/**\n * Created by buhe on 16/4/28.\n */\nmodule.exports = {\n Streaming: require('./Streaming'),\n AudioStreaming: require"
},
{
"path": "ios/RCTPili/RCTPili/Info.plist",
"chars": 806,
"preview": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<!DOCTYPE plist PUBLIC \"-//Apple//DTD PLIST 1.0//EN\" \"http://www.apple.com/DTDs/P"
},
{
"path": "ios/RCTPili/RCTPili/RCTAudioStreaming.h",
"chars": 789,
"preview": "//\n// RCTStreaming.h\n// RCTPili\n//\n// Created by guguyanhua on 16/5/26.\n// Copyright © 2016年 pili. All rights reserv"
},
{
"path": "ios/RCTPili/RCTPili/RCTAudioStreaming.m",
"chars": 8912,
"preview": "//\n// RCTStreaming.m\n// RCTPili\n//\n// Created by guguyanhua on 16/5/26.\n// Copyright © 2016年 pili. All rights reserv"
},
{
"path": "ios/RCTPili/RCTPili/RCTAudioStreamingManager.h",
"chars": 222,
"preview": "//\n// RCTStreamingManager.h\n// RCTPili\n//\n// Created by guguyanhua on 16/5/26.\n// Copyright © 2016年 pili. All rights"
},
{
"path": "ios/RCTPili/RCTPili/RCTAudioStreamingManager.m",
"chars": 909,
"preview": "//\n// RCTStreamingManager.m\n// RCTPili\n//\n// Created by guguyanhua on 16/5/26.\n// Copyright © 2016年 pili. All rights"
},
{
"path": "ios/RCTPili/RCTPili/RCTPili.h",
"chars": 188,
"preview": "//\n// RCTPili.h\n// RCTPili\n//\n// Created by buhe on 16/5/11.\n// Copyright © 2016年 pili. All rights reserved.\n//\n\n#im"
},
{
"path": "ios/RCTPili/RCTPili/RCTPili.m",
"chars": 168,
"preview": "//\n// RCTPili.m\n// RCTPili\n//\n// Created by buhe on 16/5/11.\n// Copyright © 2016年 pili. All rights reserved.\n//\n\n#im"
},
{
"path": "ios/RCTPili/RCTPili/RCTPlayer.h",
"chars": 426,
"preview": "//\n// RCTPlayer.h\n// RCTPili\n//\n// Created by buhe on 16/5/12.\n// Copyright © 2016年 pili. All rights reserved.\n//\n\n#"
},
{
"path": "ios/RCTPili/RCTPili/RCTPlayer.m",
"chars": 5584,
"preview": "//\n// RCTPlayer.m\n// RCTPili\n//\n// Created by buhe on 16/5/12.\n// Copyright © 2016年 pili. All rights reserved.\n//\n\n#"
},
{
"path": "ios/RCTPili/RCTPili/RCTPlayerManager.h",
"chars": 228,
"preview": "//\n// RCTPlayerManger.h\n// RCTPili\n//\n// Created by buhe on 16/5/12.\n// Copyright © 2016年 pili. All rights reserved."
},
{
"path": "ios/RCTPili/RCTPili/RCTPlayerManager.m",
"chars": 782,
"preview": "//\n// RCTPlayerManger.m\n// RCTPili\n//\n// Created by buhe on 16/5/12.\n// Copyright © 2016年 pili. All rights reserved."
},
{
"path": "ios/RCTPili/RCTPili/RCTStreaming.h",
"chars": 784,
"preview": "//\n// RCTStreaming.h\n// RCTPili\n//\n// Created by guguyanhua on 16/5/26.\n// Copyright © 2016年 pili. All rights reserv"
},
{
"path": "ios/RCTPili/RCTPili/RCTStreaming.m",
"chars": 12525,
"preview": "//\n// RCTStreaming.m\n// RCTPili\n//\n// Created by guguyanhua on 16/5/26.\n// Copyright © 2016年 pili. All rights reserv"
},
{
"path": "ios/RCTPili/RCTPili/RCTStreamingManager.h",
"chars": 217,
"preview": "//\n// RCTStreamingManager.h\n// RCTPili\n//\n// Created by guguyanhua on 16/5/26.\n// Copyright © 2016年 pili. All rights"
},
{
"path": "ios/RCTPili/RCTPili/RCTStreamingManager.m",
"chars": 1014,
"preview": "//\n// RCTStreamingManager.m\n// RCTPili\n//\n// Created by guguyanhua on 16/5/26.\n// Copyright © 2016年 pili. All rights"
},
{
"path": "ios/RCTPili/RCTPili/Reachability.h",
"chars": 3722,
"preview": "/*\n File: Reachability.h\n Abstract: Basic demonstration of how to use the SystemConfiguration Reachablity APIs.\n Ve"
},
{
"path": "ios/RCTPili/RCTPili/Reachability.m",
"chars": 9617,
"preview": "/*\n File: Reachability.m\n Abstract: Basic demonstration of how to use the SystemConfiguration Reachablity APIs.\n Ve"
},
{
"path": "ios/RCTPili/RCTPili.podspec",
"chars": 1489,
"preview": "#\n# Be sure to run `pod spec lint RCTPili.podspec' to ensure this is a\n# valid spec and to remove all comments includi"
},
{
"path": "ios/RCTPili/RCTPili.xcodeproj/project.pbxproj",
"chars": 14318,
"preview": "// !$*UTF8*$!\n{\n\tarchiveVersion = 1;\n\tclasses = {\n\t};\n\tobjectVersion = 46;\n\tobjects = {\n\n/* Begin PBXBuildFile section *"
},
{
"path": "package.json",
"chars": 661,
"preview": "{\n \"name\": \"react-native-pili\",\n \"version\": \"2.3.0\",\n \"description\": \"Pili Streaming Cloud React Native SDK\",\n \"main"
}
]
// ... and 3 more files (download for full content)
About this extraction
This page contains the full source code of the buhe/react-native-pili GitHub repository, extracted and formatted as plain text for AI agents and large language models (LLMs). The extraction includes 51 files (159.2 KB), approximately 39.8k tokens, and a symbol index with 144 extracted functions, classes, methods, constants, and types. Use this with OpenClaw, Claude, ChatGPT, Cursor, Windsurf, or any other AI tool that accepts text input. You can copy the full output to your clipboard or download it as a .txt file.
Extracted by GitExtract — free GitHub repo to text converter for AI. Built by Nikandr Surkov.