Repository: zerochl/MagicShow
Branch: master
Commit: 52438cced13e
Files: 248
Total size: 707.4 KB
Directory structure:
gitextract_f9toojvk/
├── .gitignore
├── MagicShow/
│ ├── .gitignore
│ ├── CMakeLists.txt
│ ├── build.gradle
│ ├── proguard-rules.pro
│ └── src/
│ ├── androidTest/
│ │ └── java/
│ │ └── com/
│ │ └── zero/
│ │ └── magicshow/
│ │ └── core/
│ │ └── ApplicationTest.java
│ ├── main/
│ │ ├── AndroidManifest.xml
│ │ ├── cpp/
│ │ │ ├── MagicJni.cpp
│ │ │ ├── beautify/
│ │ │ │ ├── MagicBeautify.cpp
│ │ │ │ └── MagicBeautify.h
│ │ │ └── bitmap/
│ │ │ ├── BitmapOperation.cpp
│ │ │ ├── BitmapOperation.h
│ │ │ ├── Conversion.cpp
│ │ │ ├── Conversion.h
│ │ │ └── JniBitmap.h
│ │ ├── java/
│ │ │ └── com/
│ │ │ └── zero/
│ │ │ └── magicshow/
│ │ │ ├── MagicShowManager.java
│ │ │ ├── activity/
│ │ │ │ ├── AlbumActivity.java
│ │ │ │ └── CameraActivity.java
│ │ │ ├── adapter/
│ │ │ │ └── FilterAdapter.java
│ │ │ ├── common/
│ │ │ │ ├── base/
│ │ │ │ │ ├── BaseActivity.java
│ │ │ │ │ └── MagicBaseView.java
│ │ │ │ ├── config/
│ │ │ │ │ ├── CameraConfig.java
│ │ │ │ │ └── PathConfig.java
│ │ │ │ ├── entity/
│ │ │ │ │ └── MagicShowResultEntity.java
│ │ │ │ ├── iface/
│ │ │ │ │ ├── CameraShootCallBack.java
│ │ │ │ │ ├── DialogYesOrNoCallBack.java
│ │ │ │ │ ├── GravityCallBack.java
│ │ │ │ │ ├── ImageEditCallBack.java
│ │ │ │ │ └── MagicListener.java
│ │ │ │ └── utils/
│ │ │ │ ├── BaseUtil.java
│ │ │ │ ├── CameraBitmapUtil.java
│ │ │ │ ├── CameraParamUtil.java
│ │ │ │ ├── Constants.java
│ │ │ │ ├── DateUtils.java
│ │ │ │ ├── FilterTypeHelper.java
│ │ │ │ ├── GravityUtil.java
│ │ │ │ ├── MagicParams.java
│ │ │ │ ├── OpenGlUtils.java
│ │ │ │ ├── Rotation.java
│ │ │ │ ├── RxBus.java
│ │ │ │ ├── SavePictureTask.java
│ │ │ │ ├── TextureRotationUtil.java
│ │ │ │ └── ZeroSubject.java
│ │ │ ├── core/
│ │ │ │ ├── MagicEngine.java
│ │ │ │ ├── beautify/
│ │ │ │ │ └── MagicJni.java
│ │ │ │ ├── camera/
│ │ │ │ │ ├── CameraEngine.java
│ │ │ │ │ └── utils/
│ │ │ │ │ ├── CameraInfo.java
│ │ │ │ │ └── CameraUtils.java
│ │ │ │ ├── encoder/
│ │ │ │ │ ├── gles/
│ │ │ │ │ │ ├── EglCore.java
│ │ │ │ │ │ └── EglSurfaceBase.java
│ │ │ │ │ └── video/
│ │ │ │ │ ├── TextureMovieEncoder.java
│ │ │ │ │ ├── VideoEncoderCore.java
│ │ │ │ │ └── WindowSurface.java
│ │ │ │ ├── filter/
│ │ │ │ │ ├── advanced/
│ │ │ │ │ │ ├── MagicAmaroFilter.java
│ │ │ │ │ │ ├── MagicAntiqueFilter.java
│ │ │ │ │ │ ├── MagicBeautyFilter.java
│ │ │ │ │ │ ├── MagicBlackCatFilter.java
│ │ │ │ │ │ ├── MagicBrannanFilter.java
│ │ │ │ │ │ ├── MagicBrooklynFilter.java
│ │ │ │ │ │ ├── MagicCalmFilter.java
│ │ │ │ │ │ ├── MagicCoolFilter.java
│ │ │ │ │ │ ├── MagicCrayonFilter.java
│ │ │ │ │ │ ├── MagicEarlyBirdFilter.java
│ │ │ │ │ │ ├── MagicEmeraldFilter.java
│ │ │ │ │ │ ├── MagicEvergreenFilter.java
│ │ │ │ │ │ ├── MagicFairytaleFilter.java
│ │ │ │ │ │ ├── MagicFreudFilter.java
│ │ │ │ │ │ ├── MagicHealthyFilter.java
│ │ │ │ │ │ ├── MagicHefeFilter.java
│ │ │ │ │ │ ├── MagicHudsonFilter.java
│ │ │ │ │ │ ├── MagicImageAdjustFilter.java
│ │ │ │ │ │ ├── MagicInkwellFilter.java
│ │ │ │ │ │ ├── MagicKevinFilter.java
│ │ │ │ │ │ ├── MagicLatteFilter.java
│ │ │ │ │ │ ├── MagicLomoFilter.java
│ │ │ │ │ │ ├── MagicN1977Filter.java
│ │ │ │ │ │ ├── MagicNashvilleFilter.java
│ │ │ │ │ │ ├── MagicNostalgiaFilter.java
│ │ │ │ │ │ ├── MagicPixarFilter.java
│ │ │ │ │ │ ├── MagicRiseFilter.java
│ │ │ │ │ │ ├── MagicRomanceFilter.java
│ │ │ │ │ │ ├── MagicSakuraFilter.java
│ │ │ │ │ │ ├── MagicSierraFilter.java
│ │ │ │ │ │ ├── MagicSketchFilter.java
│ │ │ │ │ │ ├── MagicSkinWhitenFilter.java
│ │ │ │ │ │ ├── MagicSunriseFilter.java
│ │ │ │ │ │ ├── MagicSunsetFilter.java
│ │ │ │ │ │ ├── MagicSutroFilter.java
│ │ │ │ │ │ ├── MagicSweetsFilter.java
│ │ │ │ │ │ ├── MagicTenderFilter.java
│ │ │ │ │ │ ├── MagicToasterFilter.java
│ │ │ │ │ │ ├── MagicValenciaFilter.java
│ │ │ │ │ │ ├── MagicWaldenFilter.java
│ │ │ │ │ │ ├── MagicWarmFilter.java
│ │ │ │ │ │ ├── MagicWhiteCatFilter.java
│ │ │ │ │ │ └── MagicXproIIFilter.java
│ │ │ │ │ ├── base/
│ │ │ │ │ │ ├── MagicBaseGroupFilter.java
│ │ │ │ │ │ ├── MagicCameraInputFilter.java
│ │ │ │ │ │ ├── MagicLookupFilter.java
│ │ │ │ │ │ └── gpuimage/
│ │ │ │ │ │ ├── GPUImageBrightnessFilter.java
│ │ │ │ │ │ ├── GPUImageContrastFilter.java
│ │ │ │ │ │ ├── GPUImageExposureFilter.java
│ │ │ │ │ │ ├── GPUImageFilter.java
│ │ │ │ │ │ ├── GPUImageHueFilter.java
│ │ │ │ │ │ ├── GPUImageSaturationFilter.java
│ │ │ │ │ │ └── GPUImageSharpenFilter.java
│ │ │ │ │ └── utils/
│ │ │ │ │ ├── MagicFilterFactory.java
│ │ │ │ │ └── MagicFilterType.java
│ │ │ │ └── widget/
│ │ │ │ ├── BubbleSeekBar.java
│ │ │ │ ├── MagicCameraView.java
│ │ │ │ ├── MagicImageView.java
│ │ │ │ └── TwoLineSeekBar.java
│ │ │ ├── view/
│ │ │ │ └── edit/
│ │ │ │ ├── ImageEditFragment.java
│ │ │ │ ├── ImageEditManager.java
│ │ │ │ ├── adjust/
│ │ │ │ │ └── ImageEditAdjustView.java
│ │ │ │ ├── beauty/
│ │ │ │ │ └── ImageEditBeautyView.java
│ │ │ │ ├── filter/
│ │ │ │ │ └── ImageEditFilterView.java
│ │ │ │ ├── iface/
│ │ │ │ │ └── ImageEditNavListener.java
│ │ │ │ └── navigation/
│ │ │ │ └── ImageEditNavigationView.java
│ │ │ └── viewmanager/
│ │ │ └── CameraManager.java
│ │ ├── libs/
│ │ │ └── zero.jar
│ │ └── res/
│ │ ├── anim/
│ │ │ ├── bottom_sliding_in.xml
│ │ │ └── bottom_sliding_out.xml
│ │ ├── color/
│ │ │ ├── selector_image_back.xml
│ │ │ ├── selector_image_edit.xml
│ │ │ └── selector_image_edit_yellow.xml
│ │ ├── drawable/
│ │ │ ├── btn_camera_beauty.xml
│ │ │ ├── btn_camera_filter.xml
│ │ │ ├── btn_camera_shutter.xml
│ │ │ ├── btn_layout_filters_bar_close.xml
│ │ │ ├── corners_yuanjiao_dialog.xml
│ │ │ ├── seekbar_progress.xml
│ │ │ ├── seekbar_progress_light.xml
│ │ │ ├── seekbar_thumb.xml
│ │ │ ├── seekbar_thumb_light.xml
│ │ │ ├── selector_filter_favorite_btn.xml
│ │ │ ├── selector_filter_selected.xml
│ │ │ ├── selector_image_adds.xml
│ │ │ ├── selector_image_back.xml
│ │ │ ├── selector_image_beauty.xml
│ │ │ ├── selector_image_edit.xml
│ │ │ ├── selector_image_edit_adjust_bright.xml
│ │ │ ├── selector_image_edit_adjust_contrast.xml
│ │ │ ├── selector_image_edit_adjust_exposure.xml
│ │ │ ├── selector_image_edit_adjust_hue.xml
│ │ │ ├── selector_image_edit_adjust_saturation.xml
│ │ │ ├── selector_image_edit_adjust_sharpness.xml
│ │ │ ├── selector_image_edit_adjust_vibrance.xml
│ │ │ ├── selector_image_filter.xml
│ │ │ ├── selector_image_fragment_skin.xml
│ │ │ ├── selector_image_frame.xml
│ │ │ ├── selector_image_save.xml
│ │ │ ├── selector_rewardcamera.xml
│ │ │ └── take_filter_favorite_btn.xml
│ │ ├── layout/
│ │ │ ├── activity_camera.xml
│ │ │ ├── activity_image.xml
│ │ │ ├── activity_main.xml
│ │ │ ├── dialog_yes_or_no.xml
│ │ │ ├── filter_item_layout.xml
│ │ │ ├── filter_layout.xml
│ │ │ ├── fragment_image_edit_adjust.xml
│ │ │ ├── fragment_image_edit_beauty.xml
│ │ │ └── fragment_image_edit_filter.xml
│ │ ├── raw/
│ │ │ ├── amaro.glsl
│ │ │ ├── antique.glsl
│ │ │ ├── beauty.glsl
│ │ │ ├── blackcat.glsl
│ │ │ ├── brannan.glsl
│ │ │ ├── brooklyn.glsl
│ │ │ ├── calm.glsl
│ │ │ ├── cool.glsl
│ │ │ ├── crayon.glsl
│ │ │ ├── default_fragment.glsl
│ │ │ ├── default_vertex.glsl
│ │ │ ├── earlybird.glsl
│ │ │ ├── emerald.glsl
│ │ │ ├── evergreen.glsl
│ │ │ ├── freud.glsl
│ │ │ ├── healthy.glsl
│ │ │ ├── hefe.glsl
│ │ │ ├── hudson.glsl
│ │ │ ├── inkwell.glsl
│ │ │ ├── kevin_new.glsl
│ │ │ ├── latte.glsl
│ │ │ ├── lomo.glsl
│ │ │ ├── n1977.glsl
│ │ │ ├── nashville.glsl
│ │ │ ├── nostalgia.glsl
│ │ │ ├── pixar.glsl
│ │ │ ├── rise.glsl
│ │ │ ├── romance.glsl
│ │ │ ├── sakura.glsl
│ │ │ ├── sierra.glsl
│ │ │ ├── sketch.glsl
│ │ │ ├── skinwhiten.glsl
│ │ │ ├── suger_tablets.glsl
│ │ │ ├── sunrise.glsl
│ │ │ ├── sunset.glsl
│ │ │ ├── sutro.glsl
│ │ │ ├── sweets.glsl
│ │ │ ├── tender.glsl
│ │ │ ├── toaster2_filter_shader.glsl
│ │ │ ├── valencia.glsl
│ │ │ ├── walden.glsl
│ │ │ ├── warm.glsl
│ │ │ ├── whitecat.glsl
│ │ │ └── xproii_filter_shader.glsl
│ │ ├── values/
│ │ │ ├── colors.xml
│ │ │ ├── dimens.xml
│ │ │ ├── strings.xml
│ │ │ └── styles.xml
│ │ ├── values-v11/
│ │ │ └── styles.xml
│ │ ├── values-v14/
│ │ │ └── styles.xml
│ │ ├── values-v21/
│ │ │ └── styles.xml
│ │ ├── values-w820dp/
│ │ │ └── dimens.xml
│ │ └── values-zh-rCN/
│ │ └── strings.xml
│ └── test/
│ └── java/
│ └── com/
│ └── zero/
│ └── magicshow/
│ └── core/
│ └── ExampleUnitTest.java
├── README.md
├── app/
│ ├── .gitignore
│ ├── build.gradle
│ ├── proguard-rules.pro
│ └── src/
│ ├── androidTest/
│ │ └── java/
│ │ └── com/
│ │ └── zero/
│ │ └── magicshow/
│ │ └── ApplicationTest.java
│ ├── main/
│ │ ├── AndroidManifest.xml
│ │ ├── java/
│ │ │ └── com/
│ │ │ └── zero/
│ │ │ └── magicshowsim/
│ │ │ └── MainActivity.java
│ │ └── res/
│ │ ├── anim/
│ │ │ ├── bottom_sliding_in.xml
│ │ │ └── bottom_sliding_out.xml
│ │ ├── color/
│ │ │ ├── selector_image_back.xml
│ │ │ ├── selector_image_edit.xml
│ │ │ └── selector_image_edit_yellow.xml
│ │ ├── layout/
│ │ │ └── activity_main.xml
│ │ ├── values/
│ │ │ ├── colors.xml
│ │ │ ├── dimens.xml
│ │ │ ├── strings.xml
│ │ │ └── styles.xml
│ │ ├── values-v11/
│ │ │ └── styles.xml
│ │ ├── values-v14/
│ │ │ └── styles.xml
│ │ ├── values-v21/
│ │ │ └── styles.xml
│ │ ├── values-w820dp/
│ │ │ └── dimens.xml
│ │ └── values-zh-rCN/
│ │ └── strings.xml
│ └── test/
│ └── java/
│ └── com/
│ └── zero/
│ └── magicshow/
│ └── ExampleUnitTest.java
├── build.gradle
├── gradle/
│ └── wrapper/
│ ├── gradle-wrapper.jar
│ └── gradle-wrapper.properties
├── gradle.properties
├── gradlew
├── gradlew.bat
├── settings.gradle
└── tools/
└── versions_configuration.gradle
================================================
FILE CONTENTS
================================================
================================================
FILE: .gitignore
================================================
*.iml
.gradle
/local.properties
/.idea/workspace.xml
/.idea/libraries
.DS_Store
/build
/captures
MagicShow/.externalNativeBuild
.idea/*
================================================
FILE: MagicShow/.gitignore
================================================
/build
================================================
FILE: MagicShow/CMakeLists.txt
================================================
# For more information about using CMake with Android Studio, read the
# documentation: https://d.android.com/studio/projects/add-native-code.html
# Sets the minimum version of CMake required to build the native library.
cmake_minimum_required(VERSION 3.4.1)
#C 的编译选项是 CMAKE_C_FLAGS
# 指定编译参数,可选
# SET(CMAKE_CXX_FLAGS "-Wno-error=format-security -Wno-error=pointer-sign")
# Creates and names a library, sets it as either STATIC
# or SHARED, and provides the relative paths to its source code.
# You can define multiple libraries, and CMake builds them for you.
# Gradle automatically packages shared libraries with your APK.
add_library( # Sets the name of the library.
MagicJni
# Sets the library as a shared library.
SHARED
# Provides a relative path to your source file(s).
src/main/cpp/beautify/MagicBeautify.cpp
src/main/cpp/bitmap/BitmapOperation.cpp
src/main/cpp/bitmap/Conversion.cpp
src/main/cpp/MagicJni.cpp
)
target_link_libraries( MagicJni
log )
# Searches for a specified prebuilt library and stores the path as a
# variable. Because CMake includes system libraries in the search path by
# default, you only need to specify the name of the public NDK library
# you want to add. CMake verifies that the library exists before
# completing its build.
find_library( # Sets the name of the path variable.
log-lib
# Specifies the name of the NDK library that
# you want CMake to locate.
log )
# Specifies libraries CMake should link to your target library. You
# can link multiple libraries, such as libraries you define in this
# build script, prebuilt third-party libraries, or system libraries.
# 设置生成的so动态库最后输出的路径
set(CMAKE_LIBRARY_OUTPUT_DIRECTORY ${PROJECT_SOURCE_DIR}/../jniLibs/${ANDROID_ABI})
target_link_libraries( # Specifies the target library.
MagicJni
# Links the target library to the log library
# included in the NDK.
#加入该依赖库
jnigraphics
${log-lib} )
================================================
FILE: MagicShow/build.gradle
================================================
apply plugin: 'com.android.library'
//apply from: "$project.rootDir/tools/versions_configuration.gradle"
// JitPack Maven
apply plugin: 'com.github.dcendents.android-maven'
// Your Group
group='com.github.zerochl'
android {
compileSdkVersion 28
buildToolsVersion "28.0.0"
defaultConfig {
minSdkVersion 18
targetSdkVersion 28
versionCode 1
versionName "1.0"
externalNativeBuild {
cmake {
cppFlags "-std=c++11"
// cppFlags "-DANDROID_NDK","-D_DEBUG"
}
}
ndk{
moduleName "MagicJni"
abiFilters "armeabi-v7a","arm64-v8a","x86","x86_64"
// stl = "stlport_static"
}
}
lintOptions {
abortOnError false
}
externalNativeBuild {
cmake {
path "CMakeLists.txt"
}
}
sourceSets{
main{
jniLibs.srcDirs = ['src/main/jniLibs']
}
}
buildTypes {
release {
minifyEnabled = false
proguardFiles.add(file('proguard-rules.txt'))
}
}
}
dependencies {
api fileTree(include: ['*.jar'], dir: 'libs')
testImplementation 'junit:junit:4.12'
api files('src/main/libs/zero.jar')
api "com.android.support:design:27.1.1"
api "com.android.support:appcompat-v7:27.1.1"
api 'io.reactivex:rxjava:latest.integration'
api 'io.reactivex:rxandroid:latest.integration'
}
================================================
FILE: MagicShow/proguard-rules.pro
================================================
# Add project specific ProGuard rules here.
# By default, the flags in this file are appended to flags specified
# in D:\android-sdk-windows/tools/proguard/proguard-android.txt
# You can edit the include path and order by changing the proguardFiles
# directive in build.gradle.
#
# For more details, see
# http://developer.android.com/guide/developing/tools/proguard.html
# Add any project specific keep options here:
# If your project uses WebView with JS, uncomment the following
# and specify the fully qualified class name to the JavaScript interface
# class:
#-keepclassmembers class fqcn.of.javascript.interface.for.webview {
# public *;
#}
================================================
FILE: MagicShow/src/androidTest/java/com/zero/magicshow/core/ApplicationTest.java
================================================
package com.zero.magicshow.core;
import android.app.Application;
import android.test.ApplicationTestCase;
/**
* Testing Fundamentals
*/
public class ApplicationTest extends ApplicationTestCase {
public ApplicationTest() {
super(Application.class);
}
}
================================================
FILE: MagicShow/src/main/AndroidManifest.xml
================================================
================================================
FILE: MagicShow/src/main/cpp/MagicJni.cpp
================================================
#include
#include
#include
#include "bitmap/BitmapOperation.h"
#include "beautify/MagicBeautify.h"
#define LOG_TAG "MagicJni"
#define LOGD(...) __android_log_print(ANDROID_LOG_DEBUG,LOG_TAG,__VA_ARGS__)
#define LOGE(...) __android_log_print(ANDROID_LOG_ERROR,LOG_TAG,__VA_ARGS__)
#ifdef __cplusplus
extern "C" {
#endif
JNIEXPORT void JNICALL
Java_com_zero_magicshow_core_beautify_MagicJni_jniInitMagicBeautify(JNIEnv *env, jobject instance,
jobject handler) {
//LOGE("in jniInitMagicBeautify");
JniBitmap* jniBitmap = (JniBitmap*) env->GetDirectBufferAddress(handler);
if (jniBitmap->_storedBitmapPixels == NULL){
LOGE("no bitmap data was stored. returning null...");
return;
}
MagicBeautify::getInstance()->initMagicBeautify(jniBitmap);
}
JNIEXPORT void JNICALL
Java_com_zero_magicshow_core_beautify_MagicJni_jniStartWhiteSkin(JNIEnv *env, jobject instance,
jfloat whiteLevel){
MagicBeautify::getInstance()->startWhiteSkin(whiteLevel);
}
JNIEXPORT void JNICALL
Java_com_zero_magicshow_core_beautify_MagicJni_jniStartSkinSmooth(JNIEnv *env, jobject instance,
jobject obj, jfloat DenoiseLevel){
float sigema = 10 + DenoiseLevel * DenoiseLevel * 5;
MagicBeautify::getInstance()->startSkinSmooth(sigema);
}
JNIEXPORT void JNICALL
Java_com_zero_magicshow_core_beautify_MagicJni_jniUnInitMagicBeautify(JNIEnv *env, jobject instance){
MagicBeautify::getInstance()->unInitMagicBeautify();
}
JNIEXPORT jobject JNICALL
Java_com_zero_magicshow_core_beautify_MagicJni_jniStoreBitmapData(JNIEnv *env, jobject instance,
jobject bitmap){
//LOGE("in jniStoreBitmapData");
return BitmapOperation::jniStoreBitmapData(env, instance, bitmap);
}
JNIEXPORT void JNICALL
Java_com_zero_magicshow_core_beautify_MagicJni_jniFreeBitmapData(JNIEnv *env, jobject instance,
jobject handle){
BitmapOperation::jniFreeBitmapData(env, instance, handle);
}
JNIEXPORT jobject JNICALL
Java_com_zero_magicshow_core_beautify_MagicJni_jniGetBitmapFromStoredBitmapData(JNIEnv *env, jobject instance,
jobject handle){
return BitmapOperation::jniGetBitmapFromStoredBitmapData(env, instance, handle);
}
#ifdef __cplusplus
}
#endif
================================================
FILE: MagicShow/src/main/cpp/beautify/MagicBeautify.cpp
================================================
#include "MagicBeautify.h"
#include "Math.h"
#include "../bitmap/BitmapOperation.h"
#include "../bitmap/Conversion.h"
#include //多线程相关操作头文件,可移植众多平台
#define LOG_TAG "MagicBeautify"
#define LOGD(...) __android_log_print(ANDROID_LOG_DEBUG,LOG_TAG,__VA_ARGS__)
#define LOGE(...) __android_log_print(ANDROID_LOG_ERROR,LOG_TAG,__VA_ARGS__)
#define div255(x) (x * 0.003921F)
#define abs(x) (x>=0 ? x:(-x))
MagicBeautify* MagicBeautify::instance;
MagicBeautify* MagicBeautify::getInstance()
{
if (instance == NULL)
instance = new MagicBeautify();
return instance;
}
MagicBeautify::MagicBeautify()
{
LOGE("MagicBeautify");
mIntegralMatrix = NULL;
mIntegralMatrixSqr = NULL;
mImageData_yuv = NULL;
mSkinMatrix = NULL;
mImageData_rgb = NULL;
mSmoothLevel = 0.0;
mWhitenLevel = 0.0;
}
MagicBeautify::~MagicBeautify()
{
LOGE("~MagicBeautify");
if(mIntegralMatrix != NULL)
delete[] mIntegralMatrix;
if(mIntegralMatrixSqr != NULL)
delete[] mIntegralMatrixSqr;
if(mImageData_yuv != NULL)
delete[] mImageData_yuv;
if(mSkinMatrix != NULL)
delete[] mSkinMatrix;
if(mImageData_rgb != NULL)
delete[] mImageData_rgb;
}
void MagicBeautify::initMagicBeautify(JniBitmap* jniBitmap){
LOGE("initMagicBeautify");
storedBitmapPixels = jniBitmap->_storedBitmapPixels;
mImageWidth = jniBitmap->_bitmapInfo.width;
mImageHeight = jniBitmap->_bitmapInfo.height;
if(mImageData_rgb == NULL)
mImageData_rgb = new uint32_t[mImageWidth*mImageHeight];
memcpy(mImageData_rgb, jniBitmap->_storedBitmapPixels, sizeof(uint32_t) * mImageWidth * mImageHeight);
if(mImageData_yuv == NULL)
mImageData_yuv = new uint8_t[mImageWidth * mImageHeight * 3];
Conversion::RGBToYCbCr((uint8_t*)mImageData_rgb, mImageData_yuv, mImageWidth * mImageHeight);
initSkinMatrix();
initIntegral();
}
void MagicBeautify::unInitMagicBeautify(){
if(instance != NULL)
delete instance;
instance = NULL;
}
void MagicBeautify::startSkinSmooth(float smoothlevel){
_startBeauty(smoothlevel,mWhitenLevel);
}
void MagicBeautify::startWhiteSkin(float whitenlevel){
_startBeauty(mSmoothLevel,whitenlevel);
}
void MagicBeautify::_startBeauty(float smoothlevel, float whitenlevel){
LOGE("smoothlevel=%f---whitenlevel=%f",smoothlevel,whitenlevel);
if(smoothlevel >= 10.0 && smoothlevel <= 510.0 && mSmoothLevel != smoothlevel){
mSmoothLevel = smoothlevel;
_startSkinSmooth(mSmoothLevel);
}
if(whitenlevel >= 1.0 && whitenlevel <= 5.0 && whitenlevel != mWhitenLevel){
mWhitenLevel = whitenlevel;
_startWhiteSkin(mWhitenLevel);
}
}
void MagicBeautify::_startWhiteSkin(float whitenlevel){
float a = log(whitenlevel);
pthread_t tids[4]; //线程id
float threadFirstParams[2] = {whitenlevel,1};
// LOGE("_FirstPart=%f---second part=%f",threadFirstParams[0],threadFirstParams[1]);
pthread_create(&tids[0], NULL, _startWhiteSkinAsync, (void*)&threadFirstParams); //参数:创建的线程id,线程参数,线程运行函数的起始地址,运行函数的参数
float threadSecondParams[2] = {whitenlevel,2};
pthread_create(&tids[1], NULL, _startWhiteSkinAsync, (void*)&threadSecondParams);
float threadThirdParams[2] = {whitenlevel,3};
pthread_create(&tids[2], NULL, _startWhiteSkinAsync, (void*)&threadThirdParams);
float threadFourthParams[2] = {whitenlevel,4};
pthread_create(&tids[3], NULL, _startWhiteSkinAsync, (void*)&threadFourthParams);
pthread_join (tids[0], NULL);
pthread_join (tids[1], NULL);
pthread_join (tids[2], NULL);
pthread_join (tids[3], NULL);
}
void* MagicBeautify::_startWhiteSkinAsync( void* args )
{
float *p=(float *)args;
float whitenlevel= p[0];
int partSecion = (int)p[1];
int startHeight = partSecion == 1 ? 0 : partSecion == 2 ?
getInstance()->mImageHeight / 4 : partSecion == 3 ?
getInstance()->mImageHeight / 2 : partSecion == 4 ?
getInstance()->mImageHeight / 4 * 3 : 0;
int endHeight = partSecion == 1 ? getInstance()->mImageHeight / 4 :
partSecion == 2 ? getInstance()->mImageHeight / 2 :
partSecion == 3 ? getInstance()->mImageHeight / 4 * 3 :
partSecion == 4 ? getInstance()->mImageHeight : 0;
// float whitenlevel = *( (float*)args );
float a = log(whitenlevel);
for(int i = startHeight; i < endHeight; i++){
for(int j = 0; j < getInstance()->mImageWidth; j++){
//calculate the point location
int offset = i*getInstance()->mImageWidth+j;
ARGB RGB;
BitmapOperation::convertIntToArgb(getInstance()->mImageData_rgb[offset],&RGB);
if(a != 0){
RGB.red = 255 * (log(div255(RGB.red) * (whitenlevel - 1) + 1) / a);
RGB.green = 255 * (log(div255(RGB.green) * (whitenlevel - 1) + 1) / a);
RGB.blue = 255 * (log(div255(RGB.blue) * (whitenlevel - 1) + 1) / a);
}
getInstance()->storedBitmapPixels[offset] = BitmapOperation::convertArgbToInt(RGB);
}
}
pthread_exit(nullptr);
} //函数返回的是函数指针,便于后面作为参数
void MagicBeautify::_startSkinSmooth(float smoothlevel){
if(mIntegralMatrix == NULL || mIntegralMatrixSqr == NULL || mSkinMatrix == NULL){
LOGE("not init correctly");
return;
}
Conversion::RGBToYCbCr((uint8_t*)mImageData_rgb, mImageData_yuv, mImageWidth * mImageHeight);
int radius = mImageWidth > mImageHeight ? mImageWidth * 0.02 : mImageHeight * 0.02;
for(int i = 1; i < mImageHeight; i++){
for(int j = 1; j < mImageWidth; j++){
int offset = i * mImageWidth + j;
if(mSkinMatrix[offset] == 255){
int iMax = i + radius >= mImageHeight-1 ? mImageHeight-1 : i + radius;
int jMax = j + radius >= mImageWidth-1 ? mImageWidth-1 :j + radius;
int iMin = i - radius <= 1 ? 1 : i - radius;
int jMin = j - radius <= 1 ? 1 : j - radius;
int squar = (iMax - iMin + 1)*(jMax - jMin + 1);
int i4 = iMax*mImageWidth+jMax;
int i3 = (iMin-1)*mImageWidth+(jMin-1);
int i2 = iMax*mImageWidth+(jMin-1);
int i1 = (iMin-1)*mImageWidth+jMax;
float m = (mIntegralMatrix[i4]
+ mIntegralMatrix[i3]
- mIntegralMatrix[i2]
- mIntegralMatrix[i1]) / squar;
float v = (mIntegralMatrixSqr[i4]
+ mIntegralMatrixSqr[i3]
- mIntegralMatrixSqr[i2]
- mIntegralMatrixSqr[i1]) / squar - m*m;
float k = v / (v + smoothlevel);
mImageData_yuv[offset * 3] = ceil(m - k * m + k * mImageData_yuv[offset * 3]);
}
}
}
Conversion::YCbCrToRGB(mImageData_yuv, (uint8_t*)storedBitmapPixels,
mImageWidth * mImageHeight);
}
void MagicBeautify::initSkinMatrix(){
LOGE("initSkinMatrix");
if(mSkinMatrix == NULL)
mSkinMatrix = new uint8_t[mImageWidth * mImageHeight];
for(int i = 0; i < mImageHeight; i++){
for(int j = 0; j < mImageWidth; j++){
int offset = i*mImageWidth+j;
ARGB RGB;
BitmapOperation::convertIntToArgb(mImageData_rgb[offset],&RGB);
if ((RGB.blue>95 && RGB.green>40 && RGB.red>20 &&
RGB.blue-RGB.red>15 && RGB.blue-RGB.green>15)||
(RGB.blue>200 && RGB.green>210 && RGB.red>170 &&
abs(RGB.blue-RGB.red)<=15 && RGB.blue>RGB.red&& RGB.green>RGB.red))
mSkinMatrix[offset] = 255;
else
mSkinMatrix[offset] = 0;
}
}
}
void MagicBeautify::initIntegral(){
LOGE("initIntegral");
if(mIntegralMatrix == NULL)
mIntegralMatrix = new uint64_t[mImageWidth * mImageHeight];
if(mIntegralMatrixSqr == NULL)
mIntegralMatrixSqr = new uint64_t[mImageWidth * mImageHeight];
uint64_t *columnSum = new uint64_t[mImageWidth];
uint64_t *columnSumSqr = new uint64_t[mImageWidth];
columnSum[0] = mImageData_yuv[0];
columnSumSqr[0] = mImageData_yuv[0] * mImageData_yuv[0];
mIntegralMatrix[0] = columnSum[0];
mIntegralMatrixSqr[0] = columnSumSqr[0];
for(int i = 1;i < mImageWidth;i++){
columnSum[i] = mImageData_yuv[3*i];
columnSumSqr[i] = mImageData_yuv[3*i] * mImageData_yuv[3*i];
mIntegralMatrix[i] = columnSum[i];
mIntegralMatrix[i] += mIntegralMatrix[i-1];
mIntegralMatrixSqr[i] = columnSumSqr[i];
mIntegralMatrixSqr[i] += mIntegralMatrixSqr[i-1];
}
for (int i = 1;i < mImageHeight; i++){
int offset = i * mImageWidth;
columnSum[0] += mImageData_yuv[3*offset];
columnSumSqr[0] += mImageData_yuv[3*offset] * mImageData_yuv[3*offset];
mIntegralMatrix[offset] = columnSum[0];
mIntegralMatrixSqr[offset] = columnSumSqr[0];
for(int j = 1; j < mImageWidth; j++){
columnSum[j] += mImageData_yuv[3*(offset+j)];
columnSumSqr[j] += mImageData_yuv[3*(offset+j)] * mImageData_yuv[3*(offset+j)];
mIntegralMatrix[offset+j] = mIntegralMatrix[offset+j-1] + columnSum[j];
mIntegralMatrixSqr[offset+j] = mIntegralMatrixSqr[offset+j-1] + columnSumSqr[j];
}
}
delete[] columnSum;
delete[] columnSumSqr;
LOGE("initIntegral~end");
}
================================================
FILE: MagicShow/src/main/cpp/beautify/MagicBeautify.h
================================================
#ifndef _MAGIC_BEAUTIFY_H_
#define _MAGIC_BEAUTIFY_H_
#include "../bitmap/JniBitmap.h"
class MagicBeautify
{
public:
void initMagicBeautify(JniBitmap* jniBitmap);
void unInitMagicBeautify();
void startSkinSmooth(float smoothlevel);
void startWhiteSkin(float whitenlevel);
static MagicBeautify* getInstance();
~MagicBeautify();
static void* _startWhiteSkinAsync(void* args);
private:
static MagicBeautify * instance;
MagicBeautify();
uint64_t *mIntegralMatrix;
uint64_t *mIntegralMatrixSqr;
uint32_t *storedBitmapPixels;
uint32_t *mImageData_rgb;
uint8_t *mImageData_yuv;
uint8_t *mSkinMatrix;
int mImageWidth;
int mImageHeight;
float mSmoothLevel;
float mWhitenLevel;
void initIntegral();
void initSkinMatrix();
void _startBeauty(float smoothlevel, float whitenlevel);
void _startSkinSmooth(float smoothlevel);
void _startWhiteSkin(float whitenlevel);
};
#endif
================================================
FILE: MagicShow/src/main/cpp/bitmap/BitmapOperation.cpp
================================================
#include "BitmapOperation.h"
#include "Conversion.h"
#define LOG_TAG "BitmapOperation"
#define LOGD(...) __android_log_print(ANDROID_LOG_DEBUG,LOG_TAG,__VA_ARGS__)
#define LOGE(...) __android_log_print(ANDROID_LOG_ERROR,LOG_TAG,__VA_ARGS__)
int32_t BitmapOperation::convertArgbToInt(ARGB argb)
{
return (argb.alpha << 24) | (argb.red << 16) | (argb.green << 8) | argb.blue;
}
void BitmapOperation::convertIntToArgb(uint32_t pixel, ARGB* argb)
{
argb->red = ((pixel >> 16) & 0xff);
argb->green = ((pixel >> 8) & 0xff);
argb->blue = (pixel & 0xff);
argb->alpha = (pixel >> 24);
}
/**store java bitmap as JNI data*/ //
jobject BitmapOperation::jniStoreBitmapData(
JNIEnv * env, jobject obj, jobject bitmap)
{
//LOGE("reading bitmap info...");
AndroidBitmapInfo bitmapInfo;
uint32_t* storedBitmapPixels = NULL;
int ret;
if ((ret = AndroidBitmap_getInfo(env, bitmap, &bitmapInfo)) < 0)
{
LOGE("AndroidBitmap_getInfo() failed ! error=%d", ret);
return NULL;
}
//LOGE("width:%d height:%d stride:%d", bitmapInfo.width, bitmapInfo.height, bitmapInfo.stride);
if (bitmapInfo.format != ANDROID_BITMAP_FORMAT_RGBA_8888)
{
LOGE("Bitmap format is not RGBA_8888!");
return NULL;
}
//
//read pixels of bitmap into native memory :
//
void* bitmapPixels;
if ((ret = AndroidBitmap_lockPixels(env, bitmap, &bitmapPixels)) < 0)
{
LOGE("AndroidBitmap_lockPixels() failed ! error=%d", ret);
return NULL;
}
uint32_t* src = (uint32_t*) bitmapPixels;
storedBitmapPixels = new uint32_t[bitmapInfo.height * bitmapInfo.width];
int pixelsCount = bitmapInfo.height * bitmapInfo.width;
memcpy(storedBitmapPixels, src, sizeof(uint32_t) * pixelsCount);
AndroidBitmap_unlockPixels(env, bitmap);
JniBitmap *jniBitmap = new JniBitmap();
jniBitmap->_bitmapInfo = bitmapInfo;
jniBitmap->_storedBitmapPixels = storedBitmapPixels;
//LOGE("return NewDirectByteBuffer");
return env->NewDirectByteBuffer(jniBitmap, 0);
}
/**free bitmap*/ //
void BitmapOperation::jniFreeBitmapData(
JNIEnv * env, jobject obj, jobject handle)
{
JniBitmap* jniBitmap = (JniBitmap*) env->GetDirectBufferAddress(handle);
if (jniBitmap->_storedBitmapPixels == NULL)
return;
delete[] jniBitmap->_storedBitmapPixels;
jniBitmap->_storedBitmapPixels = NULL;
delete jniBitmap;
}
/**restore java bitmap (from JNI data)*/ //
jobject BitmapOperation::jniGetBitmapFromStoredBitmapData(
JNIEnv * env, jobject obj, jobject handle)
{
JniBitmap* jniBitmap = (JniBitmap*) env->GetDirectBufferAddress(handle);
if (jniBitmap->_storedBitmapPixels == NULL)
{
LOGD("no bitmap data was stored. returning null...");
return NULL;
}
//
//creating a new bitmap to put the pixels into it - using Bitmap Bitmap.createBitmap (int width, int height, Bitmap.Config config) :
//
jclass bitmapCls = env->FindClass("android/graphics/Bitmap");
jmethodID createBitmapFunction = env->GetStaticMethodID(bitmapCls,
"createBitmap",
"(IILandroid/graphics/Bitmap$Config;)Landroid/graphics/Bitmap;");
jstring configName = env->NewStringUTF("ARGB_8888");
jclass bitmapConfigClass = env->FindClass("android/graphics/Bitmap$Config");
jmethodID valueOfBitmapConfigFunction = env->GetStaticMethodID(
bitmapConfigClass, "valueOf",
"(Ljava/lang/String;)Landroid/graphics/Bitmap$Config;");
jobject bitmapConfig = env->CallStaticObjectMethod(bitmapConfigClass,
valueOfBitmapConfigFunction, configName);
jobject newBitmap = env->CallStaticObjectMethod(bitmapCls,
createBitmapFunction, jniBitmap->_bitmapInfo.width,
jniBitmap->_bitmapInfo.height, bitmapConfig);
//
// putting the pixels into the new bitmap:
//
int ret;
void* bitmapPixels;
if ((ret = AndroidBitmap_lockPixels(env, newBitmap, &bitmapPixels)) < 0)
{
LOGE("AndroidBitmap_lockPixels() failed ! error=%d", ret);
return NULL;
}
uint32_t* newBitmapPixels = (uint32_t*) bitmapPixels;
int pixelsCount = jniBitmap->_bitmapInfo.height
* jniBitmap->_bitmapInfo.width;
memcpy(newBitmapPixels, jniBitmap->_storedBitmapPixels,
sizeof(uint32_t) * pixelsCount);
AndroidBitmap_unlockPixels(env, newBitmap);
//LOGD("returning the new bitmap");
return newBitmap;
}
================================================
FILE: MagicShow/src/main/cpp/bitmap/BitmapOperation.h
================================================
#ifndef _BITMAP_OPERATION_H_
#define _BITMAP_OPERATION_H_
#include
#include
#include
#include
#include
#include
#include "JniBitmap.h"
class BitmapOperation
{
public:
static int32_t convertArgbToInt(ARGB argb);
static void convertIntToArgb(uint32_t pixel, ARGB* argb);
static jobject jniStoreBitmapData(
JNIEnv * env, jobject obj, jobject bitmap);
static void jniFreeBitmapData(
JNIEnv * env, jobject obj, jobject handle);
static jobject jniGetBitmapFromStoredBitmapData(
JNIEnv * env, jobject obj, jobject handle);
};
#endif
================================================
FILE: MagicShow/src/main/cpp/bitmap/Conversion.cpp
================================================
#include "Conversion.h"
void Conversion::YCbCrToRGB(uint8_t* From, uint8_t* To, int length)
{
if (length < 1) return;
int Red, Green, Blue;
int Y, Cb, Cr;
int i,offset;
for(i = 0; i < length; i++)
{
offset = (i << 1) + i;
Y = From[offset]; Cb = From[offset+1] - 128; Cr = From[offset+2] - 128;
Red = Y + ((RGBRCrI * Cr + HalfShiftValue) >> Shift);
Green = Y + ((RGBGCbI * Cb + RGBGCrI * Cr + HalfShiftValue) >> Shift);
Blue = Y + ((RGBBCbI * Cb + HalfShiftValue) >> Shift);
if (Red > 255) Red = 255; else if (Red < 0) Red = 0;
if (Green > 255) Green = 255; else if (Green < 0) Green = 0;
if (Blue > 255) Blue = 255; else if (Blue < 0) Blue = 0;
offset = i << 2;
To[offset] = (uint8_t)Blue;
To[offset+1] = (uint8_t)Green;
To[offset+2] = (uint8_t)Red;
To[offset+3] = 0xff;
}
}
void Conversion::RGBToYCbCr(uint8_t* From, uint8_t* To, int length)
{
if (length < 1) return;
int Red, Green, Blue;
int i,offset;
for(i = 0; i < length; i++)
{
offset = i << 2;
Blue = From[offset]; Green = From[offset+1]; Red = From[offset+2];
offset = (i << 1) + i;
To[offset] = (uint8_t)((YCbCrYRI * Red + YCbCrYGI * Green + YCbCrYBI * Blue + HalfShiftValue) >> Shift);
To[offset+1] = (uint8_t)(128 + ((YCbCrCbRI * Red + YCbCrCbGI * Green + YCbCrCbBI * Blue + HalfShiftValue) >> Shift));
To[offset+2] = (uint8_t)(128 + ((YCbCrCrRI * Red + YCbCrCrGI * Green + YCbCrCrBI * Blue + HalfShiftValue) >> Shift));
}
}
================================================
FILE: MagicShow/src/main/cpp/bitmap/Conversion.h
================================================
#ifndef _CONVERSION_H_
#define _CONVERSION_H_
#include
#include
const float YCbCrYRF = 0.299F;
const float YCbCrYGF = 0.587F;
const float YCbCrYBF = 0.114F;
const float YCbCrCbRF = -0.168736F;
const float YCbCrCbGF = -0.331264F;
const float YCbCrCbBF = 0.500000F;
const float YCbCrCrRF = 0.500000F;
const float YCbCrCrGF = -0.418688F;
const float YCbCrCrBF = -0.081312F;
const float RGBRYF = 1.00000F;
const float RGBRCbF = 0.0000F;
const float RGBRCrF = 1.40200F;
const float RGBGYF = 1.00000F;
const float RGBGCbF = -0.34414F;
const float RGBGCrF = -0.71414F;
const float RGBBYF = 1.00000F;
const float RGBBCbF = 1.77200F;
const float RGBBCrF = 0.00000F;
const int Shift = 20;
const int HalfShiftValue = 1 << (Shift - 1);
const int YCbCrYRI = (int)(YCbCrYRF * (1 << Shift) + 0.5);
const int YCbCrYGI = (int)(YCbCrYGF * (1 << Shift) + 0.5);
const int YCbCrYBI = (int)(YCbCrYBF * (1 << Shift) + 0.5);
const int YCbCrCbRI = (int)(YCbCrCbRF * (1 << Shift) + 0.5);
const int YCbCrCbGI = (int)(YCbCrCbGF * (1 << Shift) + 0.5);
const int YCbCrCbBI = (int)(YCbCrCbBF * (1 << Shift) + 0.5);
const int YCbCrCrRI = (int)(YCbCrCrRF * (1 << Shift) + 0.5);
const int YCbCrCrGI = (int)(YCbCrCrGF * (1 << Shift) + 0.5);
const int YCbCrCrBI = (int)(YCbCrCrBF * (1 << Shift) + 0.5);
const int RGBRYI = (int)(RGBRYF * (1 << Shift) + 0.5);
const int RGBRCbI = (int)(RGBRCbF * (1 << Shift) + 0.5);
const int RGBRCrI = (int)(RGBRCrF * (1 << Shift) + 0.5);
const int RGBGYI = (int)(RGBGYF * (1 << Shift) + 0.5);
const int RGBGCbI = (int)(RGBGCbF * (1 << Shift) + 0.5);
const int RGBGCrI = (int)(RGBGCrF * (1 << Shift) + 0.5);
const int RGBBYI = (int)(RGBBYF * (1 << Shift) + 0.5);
const int RGBBCbI = (int)(RGBBCbF * (1 << Shift) + 0.5);
const int RGBBCrI = (int)(RGBBCrF * (1 << Shift) + 0.5);
class Conversion
{
public:
static void YCbCrToRGB(uint8_t* From, uint8_t* To, int Length);
static void RGBToYCbCr(uint8_t* From, uint8_t* To, int Length);
private:
};
#endif
================================================
FILE: MagicShow/src/main/cpp/bitmap/JniBitmap.h
================================================
#ifndef _JNI_BITMAP_H_
#define _JNI_BITMAP_H_
#include
typedef struct
{
uint8_t alpha, red, green, blue;
} ARGB;
class JniBitmap
{
public:
uint32_t* _storedBitmapPixels;
AndroidBitmapInfo _bitmapInfo;
JniBitmap()
{
_storedBitmapPixels = NULL;
}
};
#endif
================================================
FILE: MagicShow/src/main/java/com/zero/magicshow/MagicShowManager.java
================================================
package com.zero.magicshow;
import android.Manifest;
import android.app.Activity;
import android.content.Context;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.support.v4.app.ActivityCompat;
import android.support.v4.content.PermissionChecker;
import android.text.TextUtils;
import android.util.Log;
import com.zero.magicshow.activity.AlbumActivity;
import com.zero.magicshow.activity.CameraActivity;
import com.zero.magicshow.common.entity.MagicShowResultEntity;
import com.zero.magicshow.common.iface.CameraShootCallBack;
import com.zero.magicshow.common.iface.ImageEditCallBack;
import com.zero.magicshow.common.utils.Constants;
import com.zero.magicshow.common.utils.RxBus;
import com.zero.magicshow.common.config.PathConfig;
import com.zero.zerolib.util.FileUtil;
import rx.functions.Action1;
/**
* Created by hongli on 2017/8/31.
*/
public class MagicShowManager {
private static final String TAG = "MagicShowManager";
private static final MagicShowManager magicShowManager = new MagicShowManager();
private MagicShowManager(){}
public static MagicShowManager getInstance(){
return magicShowManager;
}
public void setCachePath(String cachePath){
PathConfig.setTempCache(cachePath);
}
/**
* 执行照片编辑
* @param context
* @param imagePath
* @param imageEditCallBack
*/
public void openEdit(Context context,String imagePath, final ImageEditCallBack imageEditCallBack){
if(null == context || TextUtils.isEmpty(imagePath) || !FileUtil.isExist(imagePath)){
Log.e(TAG,"in open edit data error.");
return;
}
RxBus.getInstance().unregisterMain(Constants.RX_JAVA_TYPE_IMAGE_EDIT);
RxBus.getInstance().registerMain(Constants.RX_JAVA_TYPE_IMAGE_EDIT, new Action1() {
@Override
public void call(MagicShowResultEntity magicShowResultEntity) {
imageEditCallBack.onCompentFinished(magicShowResultEntity);
RxBus.getInstance().unregisterMain(Constants.RX_JAVA_TYPE_IMAGE_EDIT);
}
});
Intent intent = new Intent(context, AlbumActivity.class);
intent.putExtra(Constants.TRANSMIT_IMAGE_URL,imagePath);
context.startActivity(intent);
}
/**
* 执行拍照
* @param context
* @param cameraShootCallBack
*/
public void openCamera(Activity context,final CameraShootCallBack cameraShootCallBack){
if(null == context){
Log.e(TAG,"in open edit data error.");
return;
}
if (PermissionChecker.checkSelfPermission(context, Manifest.permission.CAMERA)
== PackageManager.PERMISSION_DENIED) {
ActivityCompat.requestPermissions(context, new String[] { Manifest.permission.CAMERA },1);
return;
}
RxBus.getInstance().unregisterMain(Constants.RX_JAVA_TYPE_CAMERA_SHOOT);
RxBus.getInstance().registerMain(Constants.RX_JAVA_TYPE_CAMERA_SHOOT, new Action1() {
@Override
public void call(MagicShowResultEntity magicShowResultEntity) {
cameraShootCallBack.onCompentFinished(magicShowResultEntity);
RxBus.getInstance().unregisterMain(Constants.RX_JAVA_TYPE_CAMERA_SHOOT);
}
});
Intent intent = new Intent(context, CameraActivity.class);
context.startActivity(intent);
}
/**
* 打开拍照,然后直接对拍照图片进行编辑
* @param context
* @param imageEditCallBack
*/
public void openCameraAndEdit(final Activity context, final ImageEditCallBack imageEditCallBack){
openCamera(context, new CameraShootCallBack() {
@Override
public void onCompentFinished(MagicShowResultEntity magicShowResultEntity) {
openEdit(context,magicShowResultEntity.getFilePath(),imageEditCallBack);
}
});
}
}
================================================
FILE: MagicShow/src/main/java/com/zero/magicshow/activity/AlbumActivity.java
================================================
package com.zero.magicshow.activity;
import android.content.Context;
import android.os.Bundle;
import android.support.v4.app.Fragment;
import android.support.v4.app.FragmentTransaction;
import android.util.Log;
import android.view.View;
import android.view.animation.Animation;
import android.widget.ImageView;
import android.widget.LinearLayout;
import android.widget.TextView;
import com.zero.magicshow.R;
import com.zero.magicshow.common.base.BaseActivity;
import com.zero.magicshow.common.base.MagicBaseView;
import com.zero.magicshow.common.entity.MagicShowResultEntity;
import com.zero.magicshow.common.utils.BaseUtil;
import com.zero.magicshow.common.utils.Constants;
import com.zero.magicshow.common.utils.RxBus;
import com.zero.magicshow.common.utils.SavePictureTask;
import com.zero.magicshow.core.MagicEngine;
import com.zero.magicshow.core.widget.MagicImageView;
import com.zero.magicshow.view.edit.ImageEditFragment;
import com.zero.magicshow.view.edit.ImageEditManager;
import com.zero.magicshow.view.edit.iface.ImageEditNavListener;
import com.zero.zerolib.util.AnimationUtils;
import com.zero.zerolib.util.StringUtil;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
/**
* Created by zero on 2016/3/18.
*/
public class AlbumActivity extends BaseActivity{
private ImageView btnBack, btnNext,btnModifyClose,btnModifySave;
private TextView tvTitle;
private MagicImageView magicImageView;
private LinearLayout blockNavigation;
private View blockModifyController,blockTopBar;
private HashMap fragmentHashMap = new HashMap<>();
public static final String IMAGE_EDIT_TYPE_BEAUTY = "beauty";//美颜
public static final String IMAGE_EDIT_TYPE_ADJUST = "adjust";//编辑
public static final String IMAGE_EDIT_TYPE_FILTER = "filter";//滤镜
private Context context;
private MagicEngine magicEngine;
private String imageUrl;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
context = this;
setContentView(R.layout.activity_image);
init();
}
private void init(){
initView();
initData();
initListener();
}
private void initView(){
btnBack = (ImageView)findViewById(R.id.image_edit_back);
btnNext = (ImageView)findViewById(R.id.image_edit_save);
tvTitle = (TextView)findViewById(R.id.image_edit_title);
magicImageView = (MagicImageView)findViewById(R.id.image_edit_magicimageview);
blockNavigation = (LinearLayout)findViewById(R.id.image_edit_navigation);
blockModifyController = findViewById(R.id.image_edit_modify_controller_block);
blockTopBar = findViewById(R.id.image_edit_topbar);
btnModifyClose = (ImageView)findViewById(R.id.image_edit_modify_controller_close);
btnModifySave = (ImageView)findViewById(R.id.image_edit_modify_controller_save);
}
private void initData(){
imageUrl = getIntent().getStringExtra(Constants.TRANSMIT_IMAGE_URL);
// MagicEngine.Builder builder = new MagicEngine.Builder();
magicEngine = new MagicEngine.Builder().build(magicImageView);
initFragments();
magicImageView.setGLScaleType(MagicBaseView.ScaleType.CENTER_INSIDE);
if(StringUtil.isEmpty(imageUrl)){
magicImageView.setImageBitmap(BaseUtil.getImageFromAssetsFile(context,"dark.jpg"));
}else{
magicImageView.setImageBitmap(BaseUtil.getBitmapBySimpleSize(imageUrl,BaseUtil.dipToPix(context,640),BaseUtil.dipToPix(context,640)));
}
}
private void initListener(){
btnBack.setOnClickListener(onClickListener);
btnNext.setOnClickListener(onClickListener);
btnModifyClose.setOnClickListener(onClickListener);
btnModifySave.setOnClickListener(onClickListener);
}
private void initFragments(){
ImageEditManager.initAdjustView(context,fragmentHashMap,blockNavigation,onHideListener,imageEditNavListener);
ImageEditManager.initBeautyView(context,fragmentHashMap,blockNavigation,onHideListener,imageEditNavListener);
ImageEditManager.initFilterView(context,fragmentHashMap,blockNavigation,onHideListener,imageEditNavListener);
}
@Override
protected void doFinishAction() {
Fragment showFragment = getShowFragment();
if(null != showFragment){
((ImageEditFragment)showFragment).doFinishAction();
return;
}
super.doFinishAction();
}
private Fragment getShowFragment(){
Iterator> iterator = fragmentHashMap.entrySet().iterator();
while (iterator.hasNext()){
Map.Entry item = iterator.next();
if(item.getValue().isVisible()){
return item.getValue();
}
}
return null;
}
private void doClickModifyCloseAction(){
Fragment showFragment = getShowFragment();
if(null != showFragment){
((ImageEditFragment)showFragment).doFinishAction();
}
}
private void doClickModifySaveAction(){
Fragment showFragment = getShowFragment();
if(null != showFragment){
((ImageEditFragment)showFragment).doSaveConfigeAction();
}
}
private void doBackAction(){
doFinishAction();
}
private void doNextAction(){
MagicEngine.getInstance().savePicture(BaseUtil.getRandomTempImageFile(), new SavePictureTask.OnPictureSaveListener() {
@Override
public void onSaved(MagicShowResultEntity resultEntity) {
RxBus.getInstance().post(resultEntity,Constants.RX_JAVA_TYPE_IMAGE_EDIT);
doFinishAction();
}
});
}
private View.OnClickListener onClickListener = new View.OnClickListener() {
@Override
public void onClick(View v) {
if(v == btnModifyClose){
doClickModifyCloseAction();
}else if(v == btnModifySave){
doClickModifySaveAction();
}else if(v == btnBack){
doBackAction();
}else if(v == btnNext){
doNextAction();
}
}
};
private ImageEditFragment.onHideListener onHideListener = new ImageEditFragment.onHideListener() {
@Override
public void onAfterHide() {
Log.e("HongLi","in onAfterHide");
Fragment showFragment = getShowFragment();
if(null != showFragment){
hiddenFragment(showFragment);
}
}
};
private ImageEditNavListener imageEditNavListener = new ImageEditNavListener() {
@Override
public void onClick(View view, String type) {
showFragment(fragmentHashMap.get(type));
}
};
private void showFragment(Fragment fragment){
FragmentTransaction fragmentTransaction = getSupportFragmentManager().beginTransaction();
fragmentTransaction.setCustomAnimations(R.anim.bottom_sliding_in,R.anim.bottom_sliding_out);
if(!fragment.isAdded()){
fragmentTransaction.add(R.id.image_edit_fragment_container, fragment).show(fragment).commit();
}else{
fragmentTransaction.show(fragment).commit();
}
blockModifyController.setVisibility(View.VISIBLE);
AnimationUtils.doSlidingInFromBottom(blockModifyController,blockModifyController.getHeight(),false);
com.zero.magicshow.common.utils.BaseUtil.fadeOutView(blockTopBar);
}
private void hiddenFragment(Fragment fragment){
FragmentTransaction fragmentTransaction = getSupportFragmentManager().beginTransaction();
fragmentTransaction.setCustomAnimations(R.anim.bottom_sliding_in,R.anim.bottom_sliding_out);
fragmentTransaction.hide(fragment).commit();
AnimationUtils.doSlidingOutFromBottom(blockModifyController, blockModifyController.getHeight(), false, new Animation.AnimationListener() {
@Override
public void onAnimationStart(Animation animation) {}
@Override
public void onAnimationEnd(Animation animation) {
blockModifyController.setVisibility(View.GONE);
}
@Override
public void onAnimationRepeat(Animation animation) {}
});
com.zero.magicshow.common.utils.BaseUtil.fadeInView(blockTopBar);
}
@Override
protected void onDestroy() {
super.onDestroy();
RxBus.getInstance().unregisterMain(Constants.RX_JAVA_TYPE_IMAGE_EDIT);
}
}
================================================
FILE: MagicShow/src/main/java/com/zero/magicshow/activity/CameraActivity.java
================================================
package com.zero.magicshow.activity;
import android.Manifest;
import android.animation.ObjectAnimator;
import android.app.AlertDialog;
import android.content.DialogInterface;
import android.content.pm.PackageManager;
import android.os.Bundle;
import android.support.v4.app.ActivityCompat;
import android.support.v4.content.PermissionChecker;
import android.support.v7.widget.LinearLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.view.View;
import android.view.animation.Animation;
import android.widget.ImageView;
import android.widget.LinearLayout;
import com.zero.magicshow.R;
import com.zero.magicshow.adapter.FilterAdapter;
import com.zero.magicshow.common.base.BaseActivity;
import com.zero.magicshow.common.entity.MagicShowResultEntity;
import com.zero.magicshow.common.utils.BaseUtil;
import com.zero.magicshow.common.utils.Constants;
import com.zero.magicshow.common.utils.GravityUtil;
import com.zero.magicshow.common.utils.MagicParams;
import com.zero.magicshow.common.utils.RxBus;
import com.zero.magicshow.common.utils.SavePictureTask;
import com.zero.magicshow.core.MagicEngine;
import com.zero.magicshow.core.camera.CameraEngine;
import com.zero.magicshow.core.filter.utils.MagicFilterType;
import com.zero.magicshow.core.widget.MagicCameraView;
import com.zero.magicshow.viewmanager.CameraManager;
import com.zero.zerolib.util.AnimationUtils;
/**
* Created by zero on 2016/3/17.
*/
public class CameraActivity extends BaseActivity{
private LinearLayout filterLayout;
private RecyclerView filterListView;
private MagicCameraView magicCameraView;
private FilterAdapter filterAdapter;
private MagicEngine magicEngine;
private boolean isRecording = false;
private final int MODE_PIC = 1;
private final int MODE_VIDEO = 2;
private int mode = MODE_PIC;
private ImageView btnShutter,btnMode,btnFilter,btnFilterClose,btnCameraSwitch;//,btnBeauty;
private ObjectAnimator animator;
private final static int PERMISSION_REQUEST_WRITE = 1001;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_camera);
init();
}
private void init(){
initView();
initData();
initListener();
}
private void initView(){
filterLayout = (LinearLayout)findViewById(R.id.layout_filter);
filterListView = (RecyclerView) findViewById(R.id.filter_listView);
btnShutter = (ImageView)findViewById(R.id.camera_shutter);
btnMode = (ImageView)findViewById(R.id.camera_mode);
btnFilter = (ImageView)findViewById(R.id.camera_filter);
btnFilterClose = (ImageView)findViewById(R.id.camera_closefilter);
btnCameraSwitch = (ImageView)findViewById(R.id.camera_switch);
// btnBeauty = (ImageView)findViewById(R.id.camera_beauty);
magicCameraView = (MagicCameraView)findViewById(R.id.camera_camera_view);
}
private void initData(){
magicEngine = new MagicEngine.Builder().build(magicCameraView);
initFilterView();
animator = CameraManager.getShutterAnim(btnShutter);
}
private void initListener(){
btnFilter.setOnClickListener(btn_listener);
btnFilterClose.setOnClickListener(btn_listener);
btnShutter.setOnClickListener(btn_listener);
btnCameraSwitch.setOnClickListener(btn_listener);
btnMode.setOnClickListener(btn_listener);
// btnBeauty.setOnClickListener(btn_listener);
}
private void initFilterView(){
LinearLayoutManager linearLayoutManager = new LinearLayoutManager(this);
linearLayoutManager.setOrientation(LinearLayoutManager.HORIZONTAL);
filterListView.setLayoutManager(linearLayoutManager);
filterAdapter = new FilterAdapter(this, Constants.FILTER_TYPES);
filterListView.setAdapter(filterAdapter);
filterAdapter.setOnFilterChangeListener(onFilterChangeListener);
}
private FilterAdapter.onFilterChangeListener onFilterChangeListener = new FilterAdapter.onFilterChangeListener(){
@Override
public void onFilterChanged(MagicFilterType filterType) {
magicEngine.setFilter(filterType);
}
};
// @Override
// public void onRequestPermissionsResult(int requestCode, String[] permissions,int[] grantResults) {
// if (grantResults.length != 1 || grantResults[0] == PackageManager.PERMISSION_GRANTED) {
// if(mode == MODE_PIC){
// takePhoto();
// }else{
// takeVideo();
// }
// } else {
// super.onRequestPermissionsResult(requestCode, permissions, grantResults);
// }
// }
private void doClickShutterAction(View view){
if (PermissionChecker.checkSelfPermission(CameraActivity.this, Manifest.permission.WRITE_EXTERNAL_STORAGE) == PackageManager.PERMISSION_DENIED) {
ActivityCompat.requestPermissions(CameraActivity.this,
new String[] { Manifest.permission.WRITE_EXTERNAL_STORAGE },
PERMISSION_REQUEST_WRITE);
} else {
if(mode == MODE_PIC){
takePhoto();
}else{
takeVideo();
}
}
}
private void doClickBeautyAction(){
new AlertDialog.Builder(CameraActivity.this)
.setSingleChoiceItems(new String[] { "关闭", "1", "2", "3", "4", "5"}, MagicParams.beautyLevel,
new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
magicEngine.setBeautyLevel(which);
dialog.dismiss();
}
})
.setNegativeButton("取消", null)
.show();
}
private View.OnClickListener btn_listener = new View.OnClickListener() {
@Override
public void onClick(View v) {
if(v == btnMode){
switchMode();
}else if(v == btnShutter){
doClickShutterAction(v);
}else if(v == btnFilter){
showFilters();
}else if(v == btnCameraSwitch){
magicEngine.switchCamera();
}
// else if(v == btnBeauty){
// doClickBeautyAction();
// }
else if(v == btnFilterClose){
hideFilters();
}
}
};
private void switchMode(){
if(mode == MODE_PIC){
mode = MODE_VIDEO;
btnMode.setImageResource(R.drawable.icon_camera);
}else{
mode = MODE_PIC;
btnMode.setImageResource(R.drawable.icon_video);
}
}
private void takePhoto(){
// final long startTime = System.nanoTime() / 1000000;
magicEngine.savePicture(BaseUtil.getRandomTempImageFile(), new SavePictureTask.OnPictureSaveListener() {
@Override
public void onSaved(MagicShowResultEntity resultEntity) {
// Log.e("HongLi","保存成功:" + (System.nanoTime() / 1000000 - startTime));
RxBus.getInstance().post(resultEntity,Constants.RX_JAVA_TYPE_CAMERA_SHOOT);
doFinishAction();
}
});
}
private void takeVideo(){
if(isRecording) {
animator.end();
magicEngine.stopRecord();
}else {
animator.start();
magicEngine.startRecord();
}
isRecording = !isRecording;
}
@Override
protected void doFinishAction() {
if(filterLayout.getVisibility() == View.VISIBLE){
hideFilters();
return;
}
super.doFinishAction();
}
private void showFilters(){
btnShutter.setClickable(false);
filterLayout.setVisibility(View.VISIBLE);
AnimationUtils.doSlidingInFromBottom(filterLayout, filterLayout.getHeight(),false);
}
private void hideFilters(){
AnimationUtils.doSlidingOutFromBottom(filterLayout, filterLayout.getHeight(), false, new Animation.AnimationListener() {
@Override
public void onAnimationStart(Animation animation) {}
@Override
public void onAnimationEnd(Animation animation) {
filterLayout.setVisibility(View.INVISIBLE);
btnShutter.setClickable(true);
}
@Override
public void onAnimationRepeat(Animation animation) {}
});
}
@Override
protected void onDestroy() {
super.onDestroy();
CameraEngine.releaseCamera(true);
GravityUtil.getInstance().stop();
RxBus.getInstance().unregisterMain(Constants.RX_JAVA_TYPE_CAMERA_SHOOT);
}
}
================================================
FILE: MagicShow/src/main/java/com/zero/magicshow/adapter/FilterAdapter.java
================================================
package com.zero.magicshow.adapter;
import android.content.Context;
import android.support.v7.widget.RecyclerView;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.FrameLayout;
import android.widget.ImageView;
import android.widget.TextView;
import com.zero.magicshow.R;
import com.zero.magicshow.common.utils.FilterTypeHelper;
import com.zero.magicshow.core.filter.utils.MagicFilterType;
/**
* Created by why8222 on 2016/3/17.
*/
public class FilterAdapter extends RecyclerView.Adapter{
private MagicFilterType[] filters;
private Context context;
private int selected = 0;
public FilterAdapter(Context context, MagicFilterType[] filters) {
this.filters = filters;
this.context = context;
}
@Override
public FilterHolder onCreateViewHolder(ViewGroup parent, int viewType) {
View view = LayoutInflater.from(context).inflate(R.layout.filter_item_layout,
parent, false);
FilterHolder viewHolder = new FilterHolder(view);
viewHolder.thumbImage = (ImageView) view
.findViewById(R.id.filter_thumb_image);
viewHolder.filterName = (TextView) view
.findViewById(R.id.filter_thumb_name);
viewHolder.filterRoot = (FrameLayout)view
.findViewById(R.id.filter_root);
viewHolder.thumbSelected = (FrameLayout) view
.findViewById(R.id.filter_thumb_selected);
viewHolder.thumbSelected_bg = view.
findViewById(R.id.filter_thumb_selected_bg);
return viewHolder;
}
@Override
public void onBindViewHolder(FilterHolder holder,final int position) {
holder.thumbImage.setImageResource(FilterTypeHelper.FilterType2Thumb(filters[position]));
holder.filterName.setText(FilterTypeHelper.FilterType2Name(filters[position]));
holder.filterName.setBackgroundColor(context.getResources().getColor(
FilterTypeHelper.FilterType2Color(filters[position])));
if(position == selected){
holder.thumbSelected.setVisibility(View.VISIBLE);
holder.thumbSelected_bg.setBackgroundColor(context.getResources().getColor(
FilterTypeHelper.FilterType2Color(filters[position])));
holder.thumbSelected_bg.setAlpha(0.7f);
}else {
holder.thumbSelected.setVisibility(View.GONE);
}
holder.filterRoot.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
if(selected == position)
return;
int lastSelected = selected;
selected = position;
notifyItemChanged(lastSelected);
notifyItemChanged(position);
onFilterChangeListener.onFilterChanged(filters[position]);
}
});
}
public void refreshList(){
selected = 0;
notifyDataSetChanged();
}
@Override
public int getItemCount() {
return filters == null ? 0 : filters.length;
}
class FilterHolder extends RecyclerView.ViewHolder {
ImageView thumbImage;
TextView filterName;
FrameLayout thumbSelected;
FrameLayout filterRoot;
View thumbSelected_bg;
public FilterHolder(View itemView) {
super(itemView);
}
}
public interface onFilterChangeListener{
void onFilterChanged(MagicFilterType filterType);
}
private onFilterChangeListener onFilterChangeListener;
public void setOnFilterChangeListener(onFilterChangeListener onFilterChangeListener){
this.onFilterChangeListener = onFilterChangeListener;
}
}
================================================
FILE: MagicShow/src/main/java/com/zero/magicshow/common/base/BaseActivity.java
================================================
package com.zero.magicshow.common.base;
import android.support.v4.app.FragmentActivity;
import android.view.KeyEvent;
/**
* Created by hongli on 2017/8/22.
*/
public abstract class BaseActivity extends FragmentActivity{
protected void doFinishAction(){
finish();
}
@Override
public boolean onKeyDown(int keyCode, KeyEvent event) {
if (keyCode == KeyEvent.KEYCODE_BACK && event.getRepeatCount() == 0) {
doFinishAction();
return true;
}
return super.onKeyDown(keyCode, event);
}
}
================================================
FILE: MagicShow/src/main/java/com/zero/magicshow/common/base/MagicBaseView.java
================================================
package com.zero.magicshow.common.base;
import android.content.Context;
import android.graphics.Bitmap;
import android.opengl.GLES20;
import android.opengl.GLSurfaceView;
import android.util.AttributeSet;
import android.util.Log;
import com.zero.magicshow.common.iface.MagicListener;
import com.zero.magicshow.common.utils.OpenGlUtils;
import com.zero.magicshow.common.utils.Rotation;
import com.zero.magicshow.common.utils.SavePictureTask;
import com.zero.magicshow.common.utils.TextureRotationUtil;
import com.zero.magicshow.core.filter.base.gpuimage.GPUImageFilter;
import com.zero.magicshow.core.filter.utils.MagicFilterFactory;
import com.zero.magicshow.core.filter.utils.MagicFilterType;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import java.nio.IntBuffer;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;
/**
* Created by why8222 on 2016/2/25.
*/
public abstract class MagicBaseView extends GLSurfaceView implements GLSurfaceView.Renderer{
/**
* 所选择的滤镜,类型为MagicBaseGroupFilter
* 1.mCameraInputFilter将SurfaceTexture中YUV数据绘制到FrameBuffer
* 2.filter将FrameBuffer中的纹理绘制到屏幕中
*/
protected GPUImageFilter filter;
/**
* SurfaceTexure纹理id
*/
protected int textureId = OpenGlUtils.NO_TEXTURE;
/**
* 顶点坐标
*/
protected final FloatBuffer gLCubeBuffer;
/**
* 纹理坐标
*/
protected final FloatBuffer gLTextureBuffer;
/**
* GLSurfaceView的宽高
*/
protected int surfaceWidth, surfaceHeight;
/**
* 图像宽高
*/
protected int imageWidth, imageHeight;
protected ScaleType scaleType = ScaleType.FIT_XY;
protected MagicListener magicListener;
public MagicBaseView(Context context) {
this(context, null);
}
public MagicBaseView(Context context, AttributeSet attrs) {
super(context, attrs);
gLCubeBuffer = ByteBuffer.allocateDirect(TextureRotationUtil.CUBE.length * 4)
.order(ByteOrder.nativeOrder())
.asFloatBuffer();
gLCubeBuffer.put(TextureRotationUtil.CUBE).position(0);
gLTextureBuffer = ByteBuffer.allocateDirect(TextureRotationUtil.TEXTURE_NO_ROTATION.length * 4)
.order(ByteOrder.nativeOrder())
.asFloatBuffer();
gLTextureBuffer.put(TextureRotationUtil.TEXTURE_NO_ROTATION).position(0);
setEGLContextClientVersion(2);
setRenderer(this);
setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY);
}
@Override
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
GLES20.glDisable(GL10.GL_DITHER);
GLES20.glClearColor(0,0, 0, 0);
GLES20.glEnable(GL10.GL_CULL_FACE);
GLES20.glEnable(GL10.GL_DEPTH_TEST);
}
@Override
public void onSurfaceChanged(GL10 gl, int width, int height) {
GLES20.glViewport(0,0,width, height);
surfaceWidth = width;
surfaceHeight = height;
Log.e("HongLi","surfaceWidth:" + surfaceWidth + ";surfaceHeight:" + surfaceHeight);
onFilterChanged();
}
@Override
public void onDrawFrame(GL10 gl) {
GLES20.glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
}
protected void setMagicListener(MagicListener magicListener){
this.magicListener = magicListener;
}
protected void onFilterChanged(){
if(filter != null) {
filter.onDisplaySizeChanged(surfaceWidth, surfaceHeight);
filter.onInputSizeChanged(imageWidth, imageHeight);
}
}
public void setGLScaleType(ScaleType scaleType){
this.scaleType = scaleType;
}
public void setFilter(final MagicFilterType type){
queueEvent(new Runnable() {
@Override
public void run() {
if (filter != null)
filter.destroy();
filter = null;
filter = MagicFilterFactory.initFilters(type);
if (filter != null)
filter.init();
onFilterChanged();
}
});
requestRender();
}
public MagicFilterType getFilterType(){
if(null == filter){
return MagicFilterType.NONE;
}
return MagicFilterFactory.getCurrentFilterType();
}
public void reftreshDisplay(){
deleteTextures();
requestRender();
}
protected void deleteTextures() {
if(textureId != OpenGlUtils.NO_TEXTURE){
queueEvent(new Runnable() {
@Override
public void run() {
GLES20.glDeleteTextures(1, new int[]{
textureId
}, 0);
textureId = OpenGlUtils.NO_TEXTURE;
}
});
}
}
public abstract void savePicture(SavePictureTask savePictureTask);
protected void adjustSize(int rotation, boolean flipHorizontal, boolean flipVertical){
float[] textureCords = TextureRotationUtil.getRotation(Rotation.fromInt(rotation),
flipHorizontal, flipVertical);
float[] cube = TextureRotationUtil.CUBE;
float ratio1 = (float)surfaceWidth / imageWidth;
float ratio2 = (float)surfaceHeight / imageHeight;
float ratioMax = Math.max(ratio1, ratio2);
int imageWidthNew = Math.round(imageWidth * ratioMax);
int imageHeightNew = Math.round(imageHeight * ratioMax);
float ratioWidth = imageWidthNew / (float)surfaceWidth;
float ratioHeight = imageHeightNew / (float)surfaceHeight;
if(scaleType == ScaleType.CENTER_INSIDE){
cube = new float[]{
TextureRotationUtil.CUBE[0] / ratioHeight, TextureRotationUtil.CUBE[1] / ratioWidth,
TextureRotationUtil.CUBE[2] / ratioHeight, TextureRotationUtil.CUBE[3] / ratioWidth,
TextureRotationUtil.CUBE[4] / ratioHeight, TextureRotationUtil.CUBE[5] / ratioWidth,
TextureRotationUtil.CUBE[6] / ratioHeight, TextureRotationUtil.CUBE[7] / ratioWidth,
};
}else if(scaleType == ScaleType.FIT_XY){
}else if(scaleType == ScaleType.CENTER_CROP){
float distHorizontal = (1 - 1 / ratioWidth) / 2;
float distVertical = (1 - 1 / ratioHeight) / 2;
textureCords = new float[]{
addDistance(textureCords[0], distVertical), addDistance(textureCords[1], distHorizontal),
addDistance(textureCords[2], distVertical), addDistance(textureCords[3], distHorizontal),
addDistance(textureCords[4], distVertical), addDistance(textureCords[5], distHorizontal),
addDistance(textureCords[6], distVertical), addDistance(textureCords[7], distHorizontal),
};
}
gLCubeBuffer.clear();
gLCubeBuffer.put(cube).position(0);
gLTextureBuffer.clear();
gLTextureBuffer.put(textureCords).position(0);
}
protected void getBitmapFromGL(final Bitmap bitmap, final boolean newTexture){
queueEvent(new Runnable() {
@Override
public void run() {
int width = bitmap.getWidth();
int height = bitmap.getHeight();
int[] mFrameBuffers = new int[1];
int[] mFrameBufferTextures = new int[1];
GLES20.glGenFramebuffers(1, mFrameBuffers, 0);
GLES20.glGenTextures(1, mFrameBufferTextures, 0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mFrameBufferTextures[0]);
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, width, height, 0,
GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, null);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, mFrameBuffers[0]);
GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0,
GLES20.GL_TEXTURE_2D, mFrameBufferTextures[0], 0);
GLES20.glViewport(0, 0, width, height);
filter.onInputSizeChanged(width, height);
// filter.onOutputSizeChanged(width,height);
filter.onDisplaySizeChanged(imageWidth, imageHeight);
// filter.onDisplaySizeChanged(surfaceWidth, surfaceHeight);
int textureId;
if(newTexture){
textureId = OpenGlUtils.loadTexture(bitmap, OpenGlUtils.NO_TEXTURE, true);
}else{
textureId = MagicBaseView.this.textureId;
}
GLES20.glViewport(0, 0, width, height);
long startTime = System.nanoTime() / 1000000;
filter.onDrawFrame(textureId);
// filter.onDrawFrameNormal(textureId,width, height);
IntBuffer ib = IntBuffer.allocate(width * height);
GLES20.glReadPixels(0, 0, width, height, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, ib);
Bitmap mBitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
mBitmap.copyPixelsFromBuffer(IntBuffer.wrap(ib.array()));
Log.e("HongLi","消耗的时间:" + (System.nanoTime() / 1000000 - startTime));
// BaseUtil.saveBitmap(mBitmap,"/sdcard/DCIM/test2.jpg");
if(newTexture)
GLES20.glDeleteTextures(1, new int[]{textureId}, 0);
GLES20.glDeleteFramebuffers(1, mFrameBuffers, 0);
GLES20.glDeleteTextures(1, mFrameBufferTextures, 0);
GLES20.glViewport(0, 0, surfaceWidth, surfaceHeight);
filter.destroy();
filter.init();
filter.onInputSizeChanged(imageWidth, imageHeight);
onGetBitmapFromGL(mBitmap);
}
});
}
protected void onGetBitmapFromGL(Bitmap bitmap){
}
private float addDistance(float coordinate, float distance) {
return coordinate == 0.0f ? distance : 1 - distance;
}
public enum ScaleType{
CENTER_INSIDE,
CENTER_CROP,
FIT_XY;
}
}
================================================
FILE: MagicShow/src/main/java/com/zero/magicshow/common/config/CameraConfig.java
================================================
package com.zero.magicshow.common.config;
/**
* Created by hongli on 2017/9/1.
*/
public class CameraConfig {
public static int orientation;
public static int pictureWidth = 1280;
public static int pictureHeight = 720;
public static void setCameraConfig(int pictureWidth,int pictureHeight){
CameraConfig.pictureWidth = pictureWidth;
CameraConfig.pictureHeight = pictureHeight;
}
}
================================================
FILE: MagicShow/src/main/java/com/zero/magicshow/common/config/PathConfig.java
================================================
package com.zero.magicshow.common.config;
import android.os.Environment;
/**
* Created by hongli on 2017/8/30.
*/
public class PathConfig {
private static String SD_DIRECTORY = "/Android/data/com.zero.lib/cache";
private static String tempCache = Environment.getExternalStorageDirectory().getAbsolutePath() + SD_DIRECTORY;
public static void setTempCache(String path){
tempCache = path;
}
public static String getTempPath(){
return tempCache;
}
}
================================================
FILE: MagicShow/src/main/java/com/zero/magicshow/common/entity/MagicShowResultEntity.java
================================================
package com.zero.magicshow.common.entity;
/**
* Created by hongli on 2017/8/31.
*/
public class MagicShowResultEntity {
private int resultCode;
private String resultMsg;
private String filePath;
private int imageWidth;
private int imageHeight;
private int angle;
public int getAngle() {
return angle;
}
public void setAngle(int angle) {
this.angle = angle;
}
public int getResultCode() {
return resultCode;
}
public void setResultCode(int resultCode) {
this.resultCode = resultCode;
}
public String getResultMsg() {
return resultMsg;
}
public void setResultMsg(String resultMsg) {
this.resultMsg = resultMsg;
}
public String getFilePath() {
return filePath;
}
public void setFilePath(String filePath) {
this.filePath = filePath;
}
public int getImageWidth() {
return imageWidth;
}
public void setImageWidth(int imageWidth) {
this.imageWidth = imageWidth;
}
public int getImageHeight() {
return imageHeight;
}
public void setImageHeight(int imageHeight) {
this.imageHeight = imageHeight;
}
}
================================================
FILE: MagicShow/src/main/java/com/zero/magicshow/common/iface/CameraShootCallBack.java
================================================
package com.zero.magicshow.common.iface;
import com.zero.magicshow.common.entity.MagicShowResultEntity;
/**
* Created by hongli on 2017/9/1.
*/
public interface CameraShootCallBack {
public abstract void onCompentFinished(MagicShowResultEntity magicShowResultEntity);
}
================================================
FILE: MagicShow/src/main/java/com/zero/magicshow/common/iface/DialogYesOrNoCallBack.java
================================================
package com.zero.magicshow.common.iface;
/**
* Created by hongli on 2017/8/30.
*/
public interface DialogYesOrNoCallBack {
public void onYesClick();
public void onNoClick();
}
================================================
FILE: MagicShow/src/main/java/com/zero/magicshow/common/iface/GravityCallBack.java
================================================
package com.zero.magicshow.common.iface;
/**
* Created by hongli on 2017/9/1.
*/
public interface GravityCallBack {
public void onGravityChange(int direction);
}
================================================
FILE: MagicShow/src/main/java/com/zero/magicshow/common/iface/ImageEditCallBack.java
================================================
package com.zero.magicshow.common.iface;
import com.zero.magicshow.common.entity.MagicShowResultEntity;
/**
* Created by hongli on 2017/8/31.
*/
public interface ImageEditCallBack {
public abstract void onCompentFinished(MagicShowResultEntity magicShowResultEntity);
}
================================================
FILE: MagicShow/src/main/java/com/zero/magicshow/common/iface/MagicListener.java
================================================
package com.zero.magicshow.common.iface;
/**
* Created by hongli on 2017/8/23.
*/
public interface MagicListener {
public void onEnd();
}
================================================
FILE: MagicShow/src/main/java/com/zero/magicshow/common/utils/BaseUtil.java
================================================
package com.zero.magicshow.common.utils;
import android.app.Activity;
import android.app.Dialog;
import android.content.Context;
import android.content.DialogInterface;
import android.content.res.AssetManager;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Matrix;
import android.media.ExifInterface;
import android.text.TextUtils;
import android.util.DisplayMetrics;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.view.animation.Animation;
import android.widget.ImageView;
import android.widget.TextView;
import com.zero.magicshow.R;
import com.zero.magicshow.common.config.PathConfig;
import com.zero.magicshow.common.iface.DialogYesOrNoCallBack;
import com.zero.zerolib.util.AnimationUtils;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
/**
* Created by hongli on 2017/8/30.
*/
public class BaseUtil extends com.zero.zerolib.util.BaseUtil {
// public static void scanFile(String filePath){
// MediaScannerConnection.scanFile(MagicParams.context,
// new String[] {filePath}, null,
// new MediaScannerConnection.OnScanCompletedListener() {
// @Override
// public void onScanCompleted(final String path, final Uri uri) {
//
// }
// });
// }
public static Bitmap rotateBitmapByDegree(Bitmap bm, int degree) {
Bitmap returnBm = null;
// 根据旋转角度,生成旋转矩阵
Matrix matrix = new Matrix();
matrix.postRotate(degree);
try {
// 将原始图片按照旋转矩阵进行旋转,并得到新的图片
returnBm = Bitmap.createBitmap(bm, 0, 0, bm.getWidth(), bm.getHeight(), matrix, true);
} catch (OutOfMemoryError e) {
}
if (returnBm == null) {
returnBm = bm;
}
if (bm != returnBm) {
bm.recycle();
}
return returnBm;
}
public static boolean isPortrait(Activity activity) {
DisplayMetrics dm = new DisplayMetrics();
activity.getWindowManager().getDefaultDisplay().getMetrics(dm);
return dm.widthPixels < dm.heightPixels;
}
/**
* 读取照片exif信息中的旋转角度
*
* @param path 照片路径
* @return角度
*/
public static int readPictureDegree(String path) {
int degree = 0;
try {
ExifInterface exifInterface = new ExifInterface(path);
int orientation =
exifInterface.getAttributeInt(ExifInterface.TAG_ORIENTATION,
ExifInterface.ORIENTATION_NORMAL);
switch (orientation) {
case ExifInterface.ORIENTATION_ROTATE_90:
degree = 90;
break;
case ExifInterface.ORIENTATION_ROTATE_180:
degree = 180;
break;
case ExifInterface.ORIENTATION_ROTATE_270:
degree = 270;
break;
}
} catch (IOException e) {
e.printStackTrace();
}
return degree;
}
public static void fadeOutView(final View view) {
if (view.getVisibility() != View.VISIBLE) {
return;
}
AnimationUtils.doFadeOut(view, new Animation.AnimationListener() {
@Override
public void onAnimationStart(Animation animation) {
}
@Override
public void onAnimationEnd(Animation animation) {
view.setVisibility(View.GONE);
}
@Override
public void onAnimationRepeat(Animation animation) {
}
});
}
public static void fadeInView(View view) {
if (view.getVisibility() == View.VISIBLE) {
return;
}
view.setVisibility(View.VISIBLE);
AnimationUtils.doFadeIn(view);
}
public static void openYesOrNoDialog(final Activity activity, String title, String content, String yesText, String noText, final DialogYesOrNoCallBack yesOrNoCallBack) {
final Dialog shopTipDialog = new Dialog(activity, R.style.loading_dialog);
View view = LayoutInflater.from(activity).inflate(R.layout.dialog_yes_or_no, null);
final ImageView btnClose = (ImageView) view.findViewById(R.id.gift_balance_lack_close);
final TextView btnYes = (TextView) view.findViewById(R.id.dialog_yes_or_no_btnyes);
final TextView tvTitle = (TextView) view.findViewById(R.id.dialog_yes_or_no_title);
final TextView tvCon = (TextView) view.findViewById(R.id.dialog_yes_or_no_con);
final TextView btnNo = (TextView) view.findViewById(R.id.dialog_yes_or_no_btnno);
tvTitle.setText(title);
if (TextUtils.isEmpty(title)) {
tvTitle.setVisibility(View.GONE);
}
tvCon.setText(content);
if (TextUtils.isEmpty(content)) {
tvCon.setVisibility(View.GONE);
}
if (!TextUtils.isEmpty(yesText)) {
btnYes.setText(yesText);
}
if (!TextUtils.isEmpty(noText)) {
btnNo.setText(noText);
}
View.OnClickListener onClickListener = new View.OnClickListener() {
@Override
public void onClick(View v) {
if (v == btnClose) {
shopTipDialog.dismiss();
} else if (v == btnYes) {
yesOrNoCallBack.onYesClick();
shopTipDialog.dismiss();
} else if (v == btnNo) {
yesOrNoCallBack.onNoClick();
shopTipDialog.dismiss();
}
}
};
shopTipDialog.setOnCancelListener(new DialogInterface.OnCancelListener() {
@Override
public void onCancel(DialogInterface dialog) {
yesOrNoCallBack.onNoClick();
}
});
btnClose.setOnClickListener(onClickListener);
btnYes.setOnClickListener(onClickListener);
btnNo.setOnClickListener(onClickListener);
// shopTipDialog.setCanceledOnTouchOutside(true);
shopTipDialog.setContentView(view, new ViewGroup.LayoutParams(dipToPix(activity, 267), dipToPix(activity, 100)));
//try catch防止activity已经销毁
try {
shopTipDialog.show();
} catch (Exception e) {
e.printStackTrace();
}
}
public static File getRandomTempImageFile() {
return new File(getRandomTempImagePath());
}
public static String getRandomTempImagePath() {
return PathConfig.getTempPath() + "/" + getRandomStr() + ".jpg";
}
public static String getRandomStr() {
int randInt = (int) (Math.random() * 100000);
if (randInt < 10000) {
randInt = randInt + 10000;
}
return DateUtils.getTimeStamp() + randInt;
}
/**
* 保存bitmap到本地
*
* @param bitmap
* @param distancePath
*/
public static void saveBitmap(Bitmap bitmap, String distancePath) {
if (TextUtils.isEmpty(distancePath) || null == bitmap || bitmap.isRecycled()) {
Log.e("BaseUtil", "in saveBitmap bitmap is null or recycles or distancePath is empty.");
return;
}
File file = new File(distancePath);
if (file.exists()) {
file.delete();
}
try {
FileOutputStream out = new FileOutputStream(file);
//压缩20%,否则保存的文件会变大
bitmap.compress(Bitmap.CompressFormat.JPEG, 75, out);
out.flush();
out.close();
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
public static Bitmap getImageFromAssetsFile(Context context, String fileName) {
Bitmap image = null;
AssetManager am = context.getResources().getAssets();
try {
InputStream is = am.open(fileName);
image = BitmapFactory.decodeStream(is);
is.close();
} catch (IOException e) {
e.printStackTrace();
}
return image;
}
/**
* according to the width to get bitmap,if image's width more than width,
* than compress the image's width and height
*
* @param path
* @param width the width which need to be
* @param height
*/
public static Bitmap getBitmapBySimpleSize(
String path,
float width,
float height) {
if (TextUtils.isEmpty(path)) {
return null;
}
BitmapFactory.Options options = new BitmapFactory.Options();
options.inJustDecodeBounds = true;
// 通过这个bitmap获取图片的宽和高
Bitmap bitmap = BitmapFactory.decodeFile(path, options);
setBitmapNormalOptions(options, width, height);
// 注意这次要把options.inJustDecodeBounds 设为 false,这次图片是要读取出来的。
// MyLogUtil.e("HongLi", "path:" + path);
bitmap = BitmapFactory.decodeFile(path, options);
// MyLogUtil.e("HongLi", "image width:" + bitmap.getWidth() + ";height:" + bitmap.getHeight());
return bitmap;
}
/**
* 设置bitmap的缩放比
*
* @param options
* @param width
* @param height
*/
private static void setBitmapNormalOptions(
BitmapFactory.Options options, float width, float height) {
if (null == options) {
return;
}
if (options.outWidth > width) {
float scale = (options.outWidth / width);
//如果按照宽度缩放比例导致高度小于原定高度,则按照高度的缩放比例来缩放
if (options.outHeight / scale < height) {
scale = (options.outHeight / height);
}
int realScale = scale - (int) scale >= 0.2 ? (int) scale + 1 : (int) scale;
realScale = realScale <= 0 ? 1 : realScale;
options.inSampleSize = realScale;
options.outWidth = options.outWidth / options.inSampleSize;
options.outHeight = options.outHeight / options.inSampleSize;
} else {
options.inSampleSize = 1;
}
if (options.outHeight > 3500) {
int realScale = options.outHeight / 3500;
options.inSampleSize = options.inSampleSize < realScale ? realScale : options.inSampleSize;
}
if (options.outWidth > 3500) {
int realScale = options.outWidth / 3500;
options.inSampleSize = options.inSampleSize < realScale ? realScale : options.inSampleSize;
}
options.inJustDecodeBounds = false;
options.inPurgeable = true;
options.inInputShareable = true;
//RGB会造成透明PNG显示黑色背景
options.inPreferredConfig = Bitmap.Config.ARGB_8888;
}
}
================================================
FILE: MagicShow/src/main/java/com/zero/magicshow/common/utils/CameraBitmapUtil.java
================================================
package com.zero.magicshow.common.utils;
import android.app.Activity;
import android.graphics.Bitmap;
import android.graphics.Matrix;
import android.hardware.Camera;
import android.view.Surface;
/**
* Created by hongli on 2017/12/14.
*/
public class CameraBitmapUtil {
public static Bitmap handlerCameraBitmap(Activity activity, Bitmap takeBitmap,int cameraId) {
Matrix matrix = new Matrix();
matrix.postRotate(getCameraDisplayOrientation(activity, cameraId));
matrix.postScale(1, cameraId == 1 ? -1 : 1);
Bitmap cropRotateScaled = Bitmap.createBitmap(takeBitmap, 0, 0, takeBitmap.getWidth(), takeBitmap.getHeight(), matrix, true);
if (cropRotateScaled != takeBitmap) {
takeBitmap.recycle();
}
return cropRotateScaled;
}
public static int getCameraDisplayOrientation(Activity activity,int cameraId) {
int result = 90;
try {
android.hardware.Camera.CameraInfo info = new android.hardware.Camera.CameraInfo();
android.hardware.Camera.getCameraInfo(cameraId, info);
int rotation = activity.getWindowManager().getDefaultDisplay().getRotation();
int degrees = 0;
switch (rotation) {
case Surface.ROTATION_0:
degrees = 0;
break;
case Surface.ROTATION_90:
degrees = 90;
break;
case Surface.ROTATION_180:
degrees = 180;
break;
case Surface.ROTATION_270:
degrees = 270;
break;
}
if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
result = (info.orientation + degrees) % 360;
result = (360 - result) % 360; // compensate the mirror
} else { // back-facing
result = (info.orientation - degrees + 360) % 360;
}
} catch (Exception e) {
e.printStackTrace();
}
return result;
}
}
================================================
FILE: MagicShow/src/main/java/com/zero/magicshow/common/utils/CameraParamUtil.java
================================================
package com.zero.magicshow.common.utils;
import android.hardware.Camera;
import android.util.Log;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
/**
* Created by hongli on 2017/9/1.
*/
public class CameraParamUtil {
private static final String TAG = "JCameraView";
private CameraSizeComparator sizeComparator = new CameraSizeComparator();
private static CameraParamUtil cameraParamUtil = null;
private CameraParamUtil() {
}
public static CameraParamUtil getInstance() {
if (cameraParamUtil == null) {
cameraParamUtil = new CameraParamUtil();
return cameraParamUtil;
} else {
return cameraParamUtil;
}
}
public Camera.Size getPreviewSize(List list, int th, float rate) {
Collections.sort(list, sizeComparator);
int i = 0;
for (Camera.Size s : list) {
if ((s.width > th) && equalRate(s, rate)) {
Log.i(TAG, "MakeSure Preview :w = " + s.width + " h = " + s.height);
break;
}
i++;
}
if (i == list.size()) {
return getBestSize(list, rate);
} else {
return list.get(i);
}
}
public Camera.Size getPictureSize(List list, int th, float rate) {
Collections.sort(list, sizeComparator);
int i = 0;
for (Camera.Size s : list) {
if ((s.width > th) && equalRate(s, rate)) {
Log.i(TAG, "MakeSure Picture :w = " + s.width + " h = " + s.height);
break;
}
i++;
}
if (i == list.size()) {
return getBestSize(list, rate);
} else {
return list.get(i);
}
}
public Camera.Size getBestSize(List list, float rate) {
float previewDisparity = 100;
int index = 0;
for (int i = 0; i < list.size(); i++) {
Camera.Size cur = list.get(i);
float prop = (float) cur.width / (float) cur.height;
if (Math.abs(rate - prop) < previewDisparity) {
previewDisparity = Math.abs(rate - prop);
index = i;
}
}
return list.get(index);
}
public boolean equalRate(Camera.Size s, float rate) {
float r = (float) (s.width) / (float) (s.height);
if (Math.abs(r - rate) <= 0.2) {
return true;
} else {
return false;
}
}
public boolean isSupportedFocusMode(List focusList, String focusMode) {
for (int i = 0; i < focusList.size(); i++) {
if (focusMode.equals(focusList.get(i))) {
Log.i(TAG, "FocusMode supported " + focusMode);
return true;
}
}
Log.i(TAG, "FocusMode not supported " + focusMode);
return false;
}
public boolean isSupportedPictureFormats(List supportedPictureFormats, int jpeg) {
for (int i = 0; i < supportedPictureFormats.size(); i++) {
if (jpeg == supportedPictureFormats.get(i)) {
Log.i(TAG, "Formats supported " + jpeg);
return true;
}
}
Log.i(TAG, "Formats not supported " + jpeg);
return false;
}
public class CameraSizeComparator implements Comparator {
public int compare(Camera.Size lhs, Camera.Size rhs) {
if (lhs.width == rhs.width) {
return 0;
} else if (lhs.width > rhs.width) {
return 1;
} else {
return -1;
}
}
}
}
================================================
FILE: MagicShow/src/main/java/com/zero/magicshow/common/utils/Constants.java
================================================
package com.zero.magicshow.common.utils;
import android.os.Environment;
import android.util.Log;
import com.zero.magicshow.core.filter.utils.MagicFilterType;
import java.io.File;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.Locale;
/**
* Created by hongli on 2017/8/22.
*/
public class Constants {
public static final String TRANSMIT_IMAGE_URL = "imageUrl";
public static final MagicFilterType[] FILTER_TYPES = new MagicFilterType[]{
MagicFilterType.NONE,
MagicFilterType.FAIRYTALE,
MagicFilterType.SUNRISE,
MagicFilterType.SUNSET,
MagicFilterType.WHITECAT,
MagicFilterType.BLACKCAT,
MagicFilterType.SKINWHITEN,
MagicFilterType.HEALTHY,
MagicFilterType.SWEETS,
MagicFilterType.ROMANCE,
MagicFilterType.SAKURA,
MagicFilterType.WARM,
MagicFilterType.ANTIQUE,
MagicFilterType.NOSTALGIA,
MagicFilterType.CALM,
MagicFilterType.LATTE,
MagicFilterType.TENDER,
MagicFilterType.COOL,
MagicFilterType.EMERALD,
MagicFilterType.EVERGREEN,
MagicFilterType.CRAYON,
MagicFilterType.SKETCH,
MagicFilterType.AMARO,
MagicFilterType.BRANNAN,
MagicFilterType.BROOKLYN,
MagicFilterType.EARLYBIRD,
MagicFilterType.FREUD,
MagicFilterType.HEFE,
MagicFilterType.HUDSON,
MagicFilterType.INKWELL,
MagicFilterType.KEVIN,
// MagicFilterType.LOMO,
MagicFilterType.N1977,
MagicFilterType.NASHVILLE,
MagicFilterType.PIXAR,
MagicFilterType.RISE,
MagicFilterType.SIERRA,
MagicFilterType.SUTRO,
MagicFilterType.TOASTER2,
MagicFilterType.VALENCIA,
MagicFilterType.WALDEN,
MagicFilterType.XPROII
};
public static int mScreenWidth;
public static int mScreenHeight;
public static File getOutputMediaFile() {
// To be safe, you should check that the SDCard is mounted
// using Environment.getExternalStorageState() before doing this.
File mediaStorageDir = new File(Environment.getExternalStoragePublicDirectory(
Environment.DIRECTORY_PICTURES), "MagicCamera");
// This location works best if you want the created images to be shared
// between applications and persist after your app has been uninstalled.
// Create the storage directory if it does not exist
if (!mediaStorageDir.exists()) {
if (!mediaStorageDir.mkdirs()) {
Log.d("MyCameraApp", "failed to create directory");
return null;
}
}
// Create a media file name
String timeStamp = new SimpleDateFormat("yyyyMMdd_HHmmss",Locale.CHINESE).format(new Date());
File mediaFile = new File(mediaStorageDir.getPath() + File.separator +
"IMG_" + timeStamp + ".jpg");
return mediaFile;
}
public static final int RX_JAVA_TYPE_IMAGE_EDIT = 10001;//图片编译
public static final int RX_JAVA_TYPE_CAMERA_SHOOT = 10002;//照片拍摄
}
================================================
FILE: MagicShow/src/main/java/com/zero/magicshow/common/utils/DateUtils.java
================================================
package com.zero.magicshow.common.utils;
import com.zero.zerolib.util.DateUtil;
import java.text.SimpleDateFormat;
import java.util.Date;
/**
* Created by hongli on 2017/8/30.
*/
public class DateUtils extends DateUtil{
/**
* 获取时间戳
* @return 20180521172011
*/
public static String getTimeStamp(){
SimpleDateFormat sim = new SimpleDateFormat("yyyyMMddHHmmss");
try {
return sim.format(new Date());
}catch(Exception e){
e.printStackTrace();
}
return "";
}
}
================================================
FILE: MagicShow/src/main/java/com/zero/magicshow/common/utils/FilterTypeHelper.java
================================================
package com.zero.magicshow.common.utils;
import com.zero.magicshow.R;
import com.zero.magicshow.core.filter.utils.MagicFilterType;
public class FilterTypeHelper {
public static int FilterType2Color(MagicFilterType filterType){
switch (filterType) {
case NONE:
return R.color.filter_color_grey_light;
case WHITECAT:
case BLACKCAT:
case SUNRISE:
case SUNSET:
return R.color.filter_color_brown_light;
case COOL:
return R.color.filter_color_blue_dark;
case EMERALD:
case EVERGREEN:
return R.color.filter_color_blue_dark_dark;
case FAIRYTALE:
return R.color.filter_color_blue;
case ROMANCE:
case SAKURA:
case WARM:
return R.color.filter_color_pink;
case AMARO:
case BRANNAN:
case BROOKLYN:
case EARLYBIRD:
case FREUD:
case HEFE:
case HUDSON:
case INKWELL:
case KEVIN:
case LOMO:
case N1977:
case NASHVILLE:
case PIXAR:
case RISE:
case SIERRA:
case SUTRO:
case TOASTER2:
case VALENCIA:
case WALDEN:
case XPROII:
return R.color.filter_color_brown_dark;
case ANTIQUE:
case NOSTALGIA:
return R.color.filter_color_green_dark;
case SKINWHITEN:
case HEALTHY:
return R.color.filter_color_red;
case SWEETS:
return R.color.filter_color_red_dark;
case CALM:
case LATTE:
case TENDER:
return R.color.filter_color_brown;
default:
return R.color.filter_color_grey_light;
}
}
public static int FilterType2Thumb(MagicFilterType filterType){
switch (filterType) {
case NONE:
return R.drawable.filter_thumb_original;
case WHITECAT:
return R.drawable.filter_thumb_whitecat;
case BLACKCAT:
return R.drawable.filter_thumb_blackcat;
case ROMANCE:
return R.drawable.filter_thumb_romance;
case SAKURA:
return R.drawable.filter_thumb_sakura;
case AMARO:
return R.drawable.filter_thumb_amoro;
case BRANNAN:
return R.drawable.filter_thumb_brannan;
case BROOKLYN:
return R.drawable.filter_thumb_brooklyn;
case EARLYBIRD:
return R.drawable.filter_thumb_earlybird;
case FREUD:
return R.drawable.filter_thumb_freud;
case HEFE:
return R.drawable.filter_thumb_hefe;
case HUDSON:
return R.drawable.filter_thumb_hudson;
case INKWELL:
return R.drawable.filter_thumb_inkwell;
case KEVIN:
return R.drawable.filter_thumb_kevin;
case LOMO:
return R.drawable.filter_thumb_lomo;
case N1977:
return R.drawable.filter_thumb_1977;
case NASHVILLE:
return R.drawable.filter_thumb_nashville;
case PIXAR:
return R.drawable.filter_thumb_piaxr;
case RISE:
return R.drawable.filter_thumb_rise;
case SIERRA:
return R.drawable.filter_thumb_sierra;
case SUTRO:
return R.drawable.filter_thumb_sutro;
case TOASTER2:
return R.drawable.filter_thumb_toastero;
case VALENCIA:
return R.drawable.filter_thumb_valencia;
case WALDEN:
return R.drawable.filter_thumb_walden;
case XPROII:
return R.drawable.filter_thumb_xpro;
case ANTIQUE:
return R.drawable.filter_thumb_antique;
case SKINWHITEN:
return R.drawable.filter_thumb_beauty;
case CALM:
return R.drawable.filter_thumb_calm;
case COOL:
return R.drawable.filter_thumb_cool;
case EMERALD:
return R.drawable.filter_thumb_emerald;
case EVERGREEN:
return R.drawable.filter_thumb_evergreen;
case FAIRYTALE:
return R.drawable.filter_thumb_fairytale;
case HEALTHY:
return R.drawable.filter_thumb_healthy;
case NOSTALGIA:
return R.drawable.filter_thumb_nostalgia;
case TENDER:
return R.drawable.filter_thumb_tender;
case SWEETS:
return R.drawable.filter_thumb_sweets;
case LATTE:
return R.drawable.filter_thumb_latte;
case WARM:
return R.drawable.filter_thumb_warm;
case SUNRISE:
return R.drawable.filter_thumb_sunrise;
case SUNSET:
return R.drawable.filter_thumb_sunset;
case CRAYON:
return R.drawable.filter_thumb_crayon;
case SKETCH:
return R.drawable.filter_thumb_sketch;
default:
return R.drawable.filter_thumb_original;
}
}
public static int FilterType2Name(MagicFilterType filterType){
switch (filterType) {
case NONE:
return R.string.filter_none;
case WHITECAT:
return R.string.filter_whitecat;
case BLACKCAT:
return R.string.filter_blackcat;
case ROMANCE:
return R.string.filter_romance;
case SAKURA:
return R.string.filter_sakura;
case AMARO:
return R.string.filter_amaro;
case BRANNAN:
return R.string.filter_brannan;
case BROOKLYN:
return R.string.filter_brooklyn;
case EARLYBIRD:
return R.string.filter_Earlybird;
case FREUD:
return R.string.filter_freud;
case HEFE:
return R.string.filter_hefe;
case HUDSON:
return R.string.filter_hudson;
case INKWELL:
return R.string.filter_inkwell;
case KEVIN:
return R.string.filter_kevin;
case LOMO:
return R.string.filter_lomo;
case N1977:
return R.string.filter_n1977;
case NASHVILLE:
return R.string.filter_nashville;
case PIXAR:
return R.string.filter_pixar;
case RISE:
return R.string.filter_rise;
case SIERRA:
return R.string.filter_sierra;
case SUTRO:
return R.string.filter_sutro;
case TOASTER2:
return R.string.filter_toastero;
case VALENCIA:
return R.string.filter_valencia;
case WALDEN:
return R.string.filter_walden;
case XPROII:
return R.string.filter_xproii;
case ANTIQUE:
return R.string.filter_antique;
case CALM:
return R.string.filter_calm;
case COOL:
return R.string.filter_cool;
case EMERALD:
return R.string.filter_emerald;
case EVERGREEN:
return R.string.filter_evergreen;
case FAIRYTALE:
return R.string.filter_fairytale;
case HEALTHY:
return R.string.filter_healthy;
case NOSTALGIA:
return R.string.filter_nostalgia;
case TENDER:
return R.string.filter_tender;
case SWEETS:
return R.string.filter_sweets;
case LATTE:
return R.string.filter_latte;
case WARM:
return R.string.filter_warm;
case SUNRISE:
return R.string.filter_sunrise;
case SUNSET:
return R.string.filter_sunset;
case SKINWHITEN:
return R.string.filter_skinwhiten;
case CRAYON:
return R.string.filter_crayon;
case SKETCH:
return R.string.filter_sketch;
default:
return R.string.filter_none;
}
}
}
================================================
FILE: MagicShow/src/main/java/com/zero/magicshow/common/utils/GravityUtil.java
================================================
package com.zero.magicshow.common.utils;
import android.app.Activity;
import android.content.Context;
import android.hardware.Sensor;
import android.hardware.SensorEvent;
import android.hardware.SensorEventListener;
import android.hardware.SensorManager;
import android.os.Handler;
import android.os.Message;
import android.util.Log;
import com.zero.magicshow.common.iface.GravityCallBack;
/**
* Created by hongli on 2017/9/1.
*/
public class GravityUtil {
private static final String TAG = GravityUtil.class.getSimpleName();
public static final int DIRECTION_LAND_LEFT = 1;
public static final int DIRECTION_PORTRAIT_POSITIVE = 2;
public static final int DIRECTION_LAND_RIGHT = 3;
public static final int DIRECTION_PORTRAIT_NEGATIVE = 4;
public static final int DIRECTION_LAND = 5;//手动选择横屏
public static final int DIRECTION_PORTRAIT = 6;//手动选择竖屏
private volatile static GravityUtil gravityUtil = new GravityUtil();
private GravityCallBack gravityCallBack;
private Activity mActivity;
// 是否是竖屏
private boolean isPortrait = true;
//是否是左面倾斜横屏还是右面倾斜
private boolean isLeftCrossScreen = true;
private SensorManager sm;
private GravityUtil.OrientationSensorListener listener;
private Sensor sensor;
private SensorManager sm1;
private Sensor sensor1;
private GravityUtil.OrientationSensorListener1 listener1;
private final int MAX_RETRY_COUNT = 10;
private int retryLandLeftCount = 0;//并非已判断是横或者竖就立即发送消息,持续MAX_RETRY_COUNT次之后才会发送
private int retryLandRightCount = 0;
private int retryPorPositiveCount = 0;
private int retryPorNegtiveCount = 0;
public static GravityUtil getInstance(){
return gravityUtil;
}
private GravityUtil(){
}
/**
* 返回ScreenSwitchUtils单例
**/
public void init(Context context, GravityCallBack gravityCallBack) {
Log.d(TAG, "init orientation listener.");
if(null == sm){
// 注册重力感应器,监听屏幕旋转
sm = (SensorManager) context.getSystemService(Context.SENSOR_SERVICE);
sensor = sm.getDefaultSensor(Sensor.TYPE_ACCELEROMETER);
listener = new GravityUtil.OrientationSensorListener(mHandler);
// 根据 旋转之后/点击全屏之后 两者方向一致,激活sm.
sm1 = (SensorManager) context.getSystemService(Context.SENSOR_SERVICE);
sensor1 = sm1.getDefaultSensor(Sensor.TYPE_ACCELEROMETER);
listener1 = new GravityUtil.OrientationSensorListener1();
}
this.gravityCallBack = gravityCallBack;
}
private Handler mHandler = new Handler() {
public void handleMessage(Message msg) {
switch (msg.what) {
case 888:
int orientation = msg.arg1;
if (orientation > 45 && orientation < 135) {
if (isPortrait && retryLandRightCount >= MAX_RETRY_COUNT) {
Log.e("test", "切换成右横屏");
isLeftCrossScreen = false;
gravityCallBack.onGravityChange(DIRECTION_LAND_RIGHT);
// mActivity.setRequestedOrientation(0);
isPortrait = false;
retryLandRightCount = 0;
}else{
retryLandRightCount++;
retryLandLeftCount = 0;
retryPorNegtiveCount = 0;
retryPorPositiveCount = 0;
}
} else if (orientation > 135 && orientation < 225) {
if (!isPortrait && retryPorNegtiveCount >= MAX_RETRY_COUNT) {
Log.e("test", "切换成反竖屏");
// mActivity.setRequestedOrientation(1);
gravityCallBack.onGravityChange(DIRECTION_PORTRAIT_NEGATIVE);
isPortrait = true;
retryPorNegtiveCount = 0;
}else{
retryPorNegtiveCount++;
retryLandRightCount = 0;
retryLandLeftCount = 0;
retryPorPositiveCount = 0;
}
} else if (orientation > 225 && orientation < 315) {
if (isPortrait && retryLandLeftCount >= MAX_RETRY_COUNT) {
Log.e("test", "切换成左横屏");
isLeftCrossScreen = true;
gravityCallBack.onGravityChange(DIRECTION_LAND_LEFT);
// mActivity.setRequestedOrientation(0);
isPortrait = false;
retryLandLeftCount = 0;
}else{
retryLandLeftCount++;
retryPorNegtiveCount = 0;
retryLandRightCount = 0;
retryPorPositiveCount = 0;
}
} else if ((orientation > 315 && orientation < 360) || (orientation > 0 && orientation < 45)) {
if (!isPortrait && retryPorPositiveCount >+MAX_RETRY_COUNT) {
Log.e("test", "切换成竖屏");
// mActivity.setRequestedOrientation(1);
gravityCallBack.onGravityChange(DIRECTION_PORTRAIT_POSITIVE);
isPortrait = true;
retryPorPositiveCount = 0;
}else{
retryPorPositiveCount++;
retryPorNegtiveCount = 0;
retryLandRightCount = 0;
retryLandLeftCount = 0;
}
}
break;
default:
break;
}
}
};
/**
* 开始监听
*/
public void start(Activity activity) {
Log.d(TAG, "start orientation listener.");
mActivity = activity;
sm.registerListener(listener, sensor, SensorManager.SENSOR_DELAY_UI);
}
/**
* 停止监听
*/
public void stop() {
Log.d(TAG, "stop orientation listener.");
sm.unregisterListener(listener);
sm1.unregisterListener(listener1);
}
/**
* 手动横竖屏切换方向
*/
public void toggleScreen() {
// sm.unregisterListener(listener);
// sm1.registerListener(listener1, sensor1,SensorManager.SENSOR_DELAY_UI);
// if (isPortrait) {
// isPortrait = false;
// // 切换成横屏
// mActivity.setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE);
// } else {
// isPortrait = true;
// // 切换成竖屏
// mActivity.setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_PORTRAIT);
// }
}
public boolean isPortrait() {
return this.isPortrait;
}
public boolean isLeftCrossScreen() {
return this.isLeftCrossScreen;
}
/**
* 重力感应监听者
*/
public class OrientationSensorListener implements SensorEventListener {
private static final int _DATA_X = 0;
private static final int _DATA_Y = 1;
private static final int _DATA_Z = 2;
public static final int ORIENTATION_UNKNOWN = -1;
private Handler rotateHandler;
public OrientationSensorListener(Handler handler) {
rotateHandler = handler;
}
public void onAccuracyChanged(Sensor arg0, int arg1) {
}
public void onSensorChanged(SensorEvent event) {
float[] values = event.values;
int orientation = ORIENTATION_UNKNOWN;
float X = -values[_DATA_X];
float Y = -values[_DATA_Y];
float Z = -values[_DATA_Z];
float magnitude = X * X + Y * Y;
// Don't trust the angle if the magnitude is small compared to the y
// value
if (magnitude * 4 >= Z * Z) {
// 屏幕旋转时
float OneEightyOverPi = 57.29577957855f;
float angle = (float) Math.atan2(-Y, X) * OneEightyOverPi;
orientation = 90 - (int) Math.round(angle);
// normalize to 0 - 359 range
while (orientation >= 360) {
orientation -= 360;
}
while (orientation < 0) {
orientation += 360;
}
}
if (rotateHandler != null) {
rotateHandler.obtainMessage(888, orientation, 0).sendToTarget();
}
}
}
public class OrientationSensorListener1 implements SensorEventListener {
private static final int _DATA_X = 0;
private static final int _DATA_Y = 1;
private static final int _DATA_Z = 2;
public static final int ORIENTATION_UNKNOWN = -1;
public OrientationSensorListener1() {
}
public void onAccuracyChanged(Sensor arg0, int arg1) {
}
public void onSensorChanged(SensorEvent event) {
float[] values = event.values;
int orientation = ORIENTATION_UNKNOWN;
float X = -values[_DATA_X];
float Y = -values[_DATA_Y];
float Z = -values[_DATA_Z];
float magnitude = X * X + Y * Y;
// Don't trust the angle if the magnitude is small compared to the y
// value
if (magnitude * 4 >= Z * Z) {
// 屏幕旋转时
float OneEightyOverPi = 57.29577957855f;
float angle = (float) Math.atan2(-Y, X) * OneEightyOverPi;
orientation = 90 - (int) Math.round(angle);
// normalize to 0 - 359 range
while (orientation >= 360) {
orientation -= 360;
}
while (orientation < 0) {
orientation += 360;
}
}
if (orientation > 225 && orientation < 315) {// 检测到当前实际是横屏
if (!isPortrait) {
sm.registerListener(listener, sensor, SensorManager.SENSOR_DELAY_UI);
sm1.unregisterListener(listener1);
}
} else if ((orientation > 315 && orientation < 360) || (orientation > 0 && orientation < 45)) {// 检测到当前实际是竖屏
if (isPortrait) {
sm.registerListener(listener, sensor, SensorManager.SENSOR_DELAY_UI);
sm1.unregisterListener(listener1);
}
}
}
}
}
================================================
FILE: MagicShow/src/main/java/com/zero/magicshow/common/utils/MagicParams.java
================================================
package com.zero.magicshow.common.utils;
import android.content.Context;
import android.os.Environment;
import com.zero.magicshow.common.base.MagicBaseView;
/**
* Created by why8222 on 2016/2/26.
*/
public class MagicParams {
public static Context context;
public static MagicBaseView magicBaseView;
public static String videoPath = Environment.getExternalStorageDirectory().getPath();
public static String videoName = "MagicCamera_test.mp4";
public static int beautyLevel = 0;
public MagicParams() {
}
}
================================================
FILE: MagicShow/src/main/java/com/zero/magicshow/common/utils/OpenGlUtils.java
================================================
package com.zero.magicshow.common.utils;
import android.content.Context;
import android.content.res.AssetManager;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.opengl.GLES11Ext;
import android.opengl.GLES20;
import android.opengl.GLUtils;
import android.util.Log;
import com.zero.magicshow.core.filter.base.gpuimage.GPUImageFilter;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.nio.Buffer;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import java.nio.IntBuffer;
import javax.microedition.khronos.opengles.GL10;
public class OpenGlUtils {
public static final int NO_TEXTURE = -1;
public static final int NOT_INIT = -1;
public static final int ON_DRAWN = 1;
public static int loadTexture(final Bitmap img, final int usedTexId) {
return loadTexture(img, usedTexId, false);
}
public static int loadTexture(final Bitmap img, final int usedTexId, boolean recyled) {
if(img == null)
return NO_TEXTURE;
int textures[] = new int[1];
if (usedTexId == NO_TEXTURE) {
GLES20.glGenTextures(1, textures, 0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textures[0]);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, img, 0);
} else {
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, usedTexId);
GLUtils.texSubImage2D(GLES20.GL_TEXTURE_2D, 0, 0, 0, img);
textures[0] = usedTexId;
}
if(recyled)
img.recycle();
return textures[0];
}
public static int loadTexture(final Buffer data, final int width,final int height, final int usedTexId) {
if(data == null)
return NO_TEXTURE;
int textures[] = new int[1];
if (usedTexId == NO_TEXTURE) {
GLES20.glGenTextures(1, textures, 0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textures[0]);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, width, height,
0, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, data);
} else {
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, usedTexId);
GLES20.glTexSubImage2D(GLES20.GL_TEXTURE_2D, 0, 0, 0, width,
height, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, data);
textures[0] = usedTexId;
}
return textures[0];
}
public static int loadTexture(final Buffer data, final int width,final int height, final int usedTexId,final int type) {
if(data == null)
return NO_TEXTURE;
int textures[] = new int[1];
if (usedTexId == NO_TEXTURE) {
GLES20.glGenTextures(1, textures, 0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textures[0]);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, width, height,
0, GLES20.GL_RGBA, type, data);
} else {
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, usedTexId);
GLES20.glTexSubImage2D(GLES20.GL_TEXTURE_2D, 0, 0, 0, width,
height, GLES20.GL_RGBA, type, data);
textures[0] = usedTexId;
}
return textures[0];
}
public static int loadTexture(final Context context, final String name){
final int[] textureHandle = new int[1];
GLES20.glGenTextures(1, textureHandle, 0);
if (textureHandle[0] != 0){
// Read in the resource
final Bitmap bitmap = getImageFromAssetsFile(context,name);
// Bind to the texture in OpenGL
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureHandle[0]);
// Set filtering
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
// Load the bitmap into the bound texture.
GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, bitmap, 0);
// Recycle the bitmap, since its data has been loaded into OpenGL.
bitmap.recycle();
}
if (textureHandle[0] == 0){
throw new RuntimeException("Error loading texture.");
}
return textureHandle[0];
}
private static Bitmap getImageFromAssetsFile(Context context,String fileName){
Bitmap image = null;
AssetManager am = context.getResources().getAssets();
try{
InputStream is = am.open(fileName);
image = BitmapFactory.decodeStream(is);
is.close();
}catch (IOException e){
e.printStackTrace();
}
return image;
}
public static int loadProgram(final String strVSource, final String strFSource) {
int iVShader;
int iFShader;
int iProgId;
int[] link = new int[1];
iVShader = loadShader(strVSource, GLES20.GL_VERTEX_SHADER);
if (iVShader == 0) {
Log.d("Load Program", "Vertex Shader Failed");
return 0;
}
iFShader = loadShader(strFSource, GLES20.GL_FRAGMENT_SHADER);
if (iFShader == 0) {
Log.d("Load Program", "Fragment Shader Failed");
return 0;
}
iProgId = GLES20.glCreateProgram();
GLES20.glAttachShader(iProgId, iVShader);
GLES20.glAttachShader(iProgId, iFShader);
GLES20.glLinkProgram(iProgId);
GLES20.glGetProgramiv(iProgId, GLES20.GL_LINK_STATUS, link, 0);
if (link[0] <= 0) {
Log.d("Load Program", "Linking Failed");
return 0;
}
GLES20.glDeleteShader(iVShader);
GLES20.glDeleteShader(iFShader);
return iProgId;
}
private static int loadShader(final String strSource, final int iType) {
int[] compiled = new int[1];
int iShader = GLES20.glCreateShader(iType);
GLES20.glShaderSource(iShader, strSource);
GLES20.glCompileShader(iShader);
GLES20.glGetShaderiv(iShader, GLES20.GL_COMPILE_STATUS, compiled, 0);
if (compiled[0] == 0) {
Log.e("Load Shader Failed", "Compilation\n" + GLES20.glGetShaderInfoLog(iShader));
return 0;
}
return iShader;
}
public static int getExternalOESTextureID(){
int[] texture = new int[1];
GLES20.glGenTextures(1, texture, 0);
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, texture[0]);
GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
GL10.GL_TEXTURE_MIN_FILTER,GL10.GL_LINEAR);
GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
GL10.GL_TEXTURE_MAG_FILTER, GL10.GL_LINEAR);
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
GL10.GL_TEXTURE_WRAP_S, GL10.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
GL10.GL_TEXTURE_WRAP_T, GL10.GL_CLAMP_TO_EDGE);
return texture[0];
}
public static String readShaderFromRawResource(final int resourceId){
final InputStream inputStream = MagicParams.context.getResources().openRawResource(
resourceId);
final InputStreamReader inputStreamReader = new InputStreamReader(
inputStream);
final BufferedReader bufferedReader = new BufferedReader(
inputStreamReader);
String nextLine;
final StringBuilder body = new StringBuilder();
try{
while ((nextLine = bufferedReader.readLine()) != null){
body.append(nextLine);
body.append('\n');
}
}
catch (IOException e){
return null;
}
return body.toString();
}
public static Bitmap drawToBitmapByFilter(Bitmap bitmap, GPUImageFilter filter,
int displayWidth, int displayHeight, boolean rotate){
if(filter == null)
return null;
int width = bitmap.getWidth();
int height = bitmap.getHeight();
int[] mFrameBuffers = new int[1];
int[] mFrameBufferTextures = new int[1];
GLES20.glGenFramebuffers(1, mFrameBuffers, 0);
GLES20.glGenTextures(1, mFrameBufferTextures, 0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mFrameBufferTextures[0]);
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, width, height, 0,
GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, null);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, mFrameBuffers[0]);
GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0,
GLES20.GL_TEXTURE_2D, mFrameBufferTextures[0], 0);
GLES20.glViewport(0, 0, width, height);
filter.onInputSizeChanged(width, height);
filter.onDisplaySizeChanged(displayWidth, displayHeight);
int textureId = OpenGlUtils.loadTexture(bitmap, OpenGlUtils.NO_TEXTURE, true);
if(rotate){
FloatBuffer gLCubeBuffer = ByteBuffer.allocateDirect(TextureRotationUtil.CUBE.length * 4)
.order(ByteOrder.nativeOrder())
.asFloatBuffer();
gLCubeBuffer.put(TextureRotationUtil.CUBE).position(0);
FloatBuffer gLTextureBuffer = ByteBuffer.allocateDirect(TextureRotationUtil.TEXTURE_NO_ROTATION.length * 4)
.order(ByteOrder.nativeOrder())
.asFloatBuffer();
gLTextureBuffer.put(TextureRotationUtil.getRotation(Rotation.ROTATION_90, true, false)).position(0);
filter.onDrawFrame(textureId, gLCubeBuffer, gLTextureBuffer);
}else {
filter.onDrawFrame(textureId);
}
IntBuffer ib = IntBuffer.allocate(width * height);
GLES20.glReadPixels(0, 0, width, height, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, ib);
Bitmap result = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
result.copyPixelsFromBuffer(IntBuffer.wrap(ib.array()));
GLES20.glDeleteTextures(1, new int[]{textureId}, 0);
GLES20.glDeleteFramebuffers(1, mFrameBuffers, 0);
GLES20.glDeleteTextures(1, mFrameBufferTextures, 0);
filter.onInputSizeChanged(displayWidth, displayHeight);
return result;
}
/**
* Checks to see if a GLES error has been raised.
*/
public static void checkGlError(String op) {
int error = GLES20.glGetError();
if (error != GLES20.GL_NO_ERROR) {
String msg = op + ": glError 0x" + Integer.toHexString(error);
Log.e("OpenGlUtils", msg);
throw new RuntimeException(msg);
}
}
}
================================================
FILE: MagicShow/src/main/java/com/zero/magicshow/common/utils/Rotation.java
================================================
/*
* Copyright (C) 2012 CyberAgent
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.zero.magicshow.common.utils;
public enum Rotation {
NORMAL, ROTATION_90, ROTATION_180, ROTATION_270;
/**
* Retrieves the int representation of the Rotation.
*
* @return 0, 90, 180 or 270
*/
public int asInt() {
switch (this) {
case NORMAL: return 0;
case ROTATION_90: return 90;
case ROTATION_180: return 180;
case ROTATION_270: return 270;
default: throw new IllegalStateException("Unknown Rotation!");
}
}
/**
* Create a Rotation from an integer. Needs to be either 0, 90, 180 or 270.
*
* @param rotation 0, 90, 180 or 270
* @return Rotation object
*/
public static Rotation fromInt(int rotation) {
switch (rotation) {
case 0: return NORMAL;
case 90: return ROTATION_90;
case 180: return ROTATION_180;
case 270: return ROTATION_270;
case 360: return NORMAL;
default: throw new IllegalStateException(
rotation + " is an unknown rotation. Needs to be either 0, 90, 180 or 270!");
}
}
}
================================================
FILE: MagicShow/src/main/java/com/zero/magicshow/common/utils/RxBus.java
================================================
package com.zero.magicshow.common.utils;
import android.support.annotation.NonNull;
import android.util.Log;
import com.zero.zerolib.manager.PostManager;
import java.util.Vector;
import rx.android.schedulers.AndroidSchedulers;
import rx.functions.Action1;
import rx.functions.Actions;
import rx.internal.util.ActionSubscriber;
import rx.subjects.PublishSubject;
import rx.subjects.Subject;
/**
* Created by hongli on 2017/8/31.
*/
public class RxBus {
private static RxBus instance;
private Vector subjectList = new Vector<>();
private RxBus() {
}
public static synchronized RxBus getInstance() {
if (null == instance) {
instance = new RxBus();
}
return instance;
}
// public synchronized ZeroSubject register(int actionType) {
// Subject subject = PublishSubject.create();
// ZeroSubject zeroSubject = new ZeroSubject(subject,actionType,null);
// subjectList.add(zeroSubject);
// return zeroSubject;
// }
//
// public synchronized void unregister(Object object) {
// subjectList.remove(object);
// }
public synchronized void registerMain(int actionType, Action1 action){
Subject subject = PublishSubject.create();
subject.observeOn(AndroidSchedulers.mainThread()).doOnError(new Action1() {
@Override
public void call(Throwable throwable) {
throwable.printStackTrace();
Log.e("RxBus","post error");
}
}).subscribe(new ActionSubscriber(action, new Action1() {
@Override
public void call(Throwable throwable) {
throwable.printStackTrace();
Log.e("RxBus","post error");
}
}, Actions.empty()));
ZeroSubject zeroSubject = new ZeroSubject(subject,actionType,action);
subjectList.add(zeroSubject);
}
public synchronized void unregisterMain(Action1 action) {
for(ZeroSubject zeroSubject : subjectList){
if(zeroSubject.getAction() == action){
subjectList.remove(zeroSubject);
break;
}
}
}
public synchronized void unregisterMain(int actionType) {
for(ZeroSubject zeroSubject : subjectList){
if(zeroSubject.getActionType() == actionType){
subjectList.remove(zeroSubject);
break;
}
}
}
public synchronized void unregisterAllMain(int actionType) {
Vector subjectRemoveList = new Vector<>();
for(ZeroSubject zeroSubject : subjectList){
if(zeroSubject.getActionType() == actionType){
subjectRemoveList.add(zeroSubject);
}
}
subjectList.removeAll(subjectRemoveList);
}
public synchronized void unregisterCategory(int actionType) {
for(ZeroSubject zeroSubject : subjectList){
if(zeroSubject.getActionType() == actionType){
subjectList.remove(zeroSubject);
break;
}
}
}
public void post(@NonNull final Object content, final int actionType) {
PostManager.getInstance().postMain(new Runnable() {
@Override
public void run() {
synchronized (this) {
try{
for (ZeroSubject subject : subjectList) {
if (subject != null && subject.getActionType() == actionType) {
try{
subject.getSubject().onNext(content);
}catch (Exception e){
e.printStackTrace();
}
}
}
}catch (Exception e){
e.printStackTrace();
}
}
}
},0);
}
}
================================================
FILE: MagicShow/src/main/java/com/zero/magicshow/common/utils/SavePictureTask.java
================================================
package com.zero.magicshow.common.utils;
import android.graphics.Bitmap;
import android.os.AsyncTask;
import com.zero.magicshow.common.entity.MagicShowResultEntity;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
public class SavePictureTask extends AsyncTask{
private OnPictureSaveListener onPictureSaveListener;
private File file;
public SavePictureTask(File file, OnPictureSaveListener listener){
this.onPictureSaveListener = listener;
this.file = file;
}
@Override
protected void onPreExecute() {
super.onPreExecute();
}
@Override
protected void onPostExecute(final MagicShowResultEntity result) {
if(result != null){
if (onPictureSaveListener != null){
onPictureSaveListener.onSaved(result);
}
// BaseUtil.scanFile(result);
}
}
@Override
protected MagicShowResultEntity doInBackground(Bitmap... params) {
if(file == null)
return null;
MagicShowResultEntity resultEntity = new MagicShowResultEntity();
resultEntity.setAngle(params[0].getHeight() > params[0].getWidth() ? 90 : 0);
resultEntity.setFilePath(saveBitmap(params[0]));
return resultEntity;
}
private String saveBitmap(Bitmap bitmap) {
if (file.exists()) {
file.delete();
}
try {
// Log.e("HongLi","bitmap degree:" + bitmap.getConfig());
FileOutputStream out = new FileOutputStream(file);
bitmap.compress(Bitmap.CompressFormat.JPEG, 100, out);
out.flush();
out.close();
bitmap.recycle();
return file.toString();
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
return null;
}
public interface OnPictureSaveListener{
void onSaved(MagicShowResultEntity resultEntity);
}
}
================================================
FILE: MagicShow/src/main/java/com/zero/magicshow/common/utils/TextureRotationUtil.java
================================================
/*
* Copyright (C) 2012 CyberAgent
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.zero.magicshow.common.utils;
public class TextureRotationUtil {
public static final float TEXTURE_NO_ROTATION[] = {
0.0f, 1.0f,
1.0f, 1.0f,
0.0f, 0.0f,
1.0f, 0.0f,
};
public static final float TEXTURE_ROTATED_90[] = {
1.0f, 1.0f,
1.0f, 0.0f,
0.0f, 1.0f,
0.0f, 0.0f,
};
public static final float TEXTURE_ROTATED_180[] = {
1.0f, 0.0f,
0.0f, 0.0f,
1.0f, 1.0f,
0.0f, 1.0f,
};
public static final float TEXTURE_ROTATED_270[] = {
0.0f, 0.0f,
0.0f, 1.0f,
1.0f, 0.0f,
1.0f, 1.0f,
};
public static final float CUBE[] = {
-1.0f, -1.0f,
1.0f, -1.0f,
-1.0f, 1.0f,
1.0f, 1.0f,
};
private TextureRotationUtil() {}
public static float[] getRotation(final Rotation rotation, final boolean flipHorizontal,
final boolean flipVertical) {
float[] rotatedTex;
switch (rotation) {
case ROTATION_90:
rotatedTex = TEXTURE_ROTATED_90;
break;
case ROTATION_180:
rotatedTex = TEXTURE_ROTATED_180;
break;
case ROTATION_270:
rotatedTex = TEXTURE_ROTATED_270;
break;
case NORMAL:
default:
rotatedTex = TEXTURE_NO_ROTATION;
break;
}
if (flipHorizontal) {
rotatedTex = new float[]{
flip(rotatedTex[0]), rotatedTex[1],
flip(rotatedTex[2]), rotatedTex[3],
flip(rotatedTex[4]), rotatedTex[5],
flip(rotatedTex[6]), rotatedTex[7],
};
}
if (flipVertical) {
rotatedTex = new float[]{
rotatedTex[0], flip(rotatedTex[1]),
rotatedTex[2], flip(rotatedTex[3]),
rotatedTex[4], flip(rotatedTex[5]),
rotatedTex[6], flip(rotatedTex[7]),
};
}
return rotatedTex;
}
private static float flip(final float i) {
if (i == 0.0f) {
return 1.0f;
}
return 0.0f;
}
}
================================================
FILE: MagicShow/src/main/java/com/zero/magicshow/common/utils/ZeroSubject.java
================================================
package com.zero.magicshow.common.utils;
import rx.functions.Action1;
import rx.subjects.Subject;
/**
* Created by hongli on 2017/8/31.
*/
public class ZeroSubject{
private int actionType;
private Subject subject;
private Action1 action;
public ZeroSubject(Subject subject, int actionType, Action1 action) {
this.actionType = actionType;
this.subject = subject;
this.action = action;
}
public int getActionType(){
return actionType;
}
public Subject getSubject(){
return subject;
}
public Action1 getAction() {
return action;
}
}
================================================
FILE: MagicShow/src/main/java/com/zero/magicshow/core/MagicEngine.java
================================================
package com.zero.magicshow.core;
import com.zero.magicshow.core.camera.CameraEngine;
import com.zero.magicshow.core.filter.utils.MagicFilterType;
import com.zero.magicshow.common.utils.SavePictureTask;
import com.zero.magicshow.common.utils.MagicParams;
import com.zero.magicshow.core.widget.MagicCameraView;
import com.zero.magicshow.core.widget.MagicImageView;
import com.zero.magicshow.common.base.MagicBaseView;
import com.zero.zerolib.manager.PostManager;
import java.io.File;
/**
* Created by why8222 on 2016/2/25.
*/
public class MagicEngine {
private static MagicEngine magicEngine;
private float skinSmoothLevel;
private float whiteSkinLevel;
public static MagicEngine getInstance(){
if(magicEngine == null)
throw new NullPointerException("MagicEngine must be built first");
else
return magicEngine;
}
private MagicEngine(Builder builder){
}
public void setFilter(MagicFilterType type){
MagicParams.magicBaseView.setFilter(type);
}
public MagicFilterType getFilterType(){
return MagicParams.magicBaseView.getFilterType();
}
public void adjustFilter(float rang, MagicFilterType type){
if(MagicParams.magicBaseView instanceof MagicImageView) {
((MagicImageView) MagicParams.magicBaseView).adjustFilter(rang,type);
}
}
public void savePicture(File file, SavePictureTask.OnPictureSaveListener listener){
SavePictureTask savePictureTask = new SavePictureTask(file, listener);
MagicParams.magicBaseView.savePicture(savePictureTask);
}
public void startRecord(){
if(MagicParams.magicBaseView instanceof MagicCameraView)
((MagicCameraView)MagicParams.magicBaseView).changeRecordingState(true);
}
public void stopRecord(){
if(MagicParams.magicBaseView instanceof MagicCameraView)
((MagicCameraView)MagicParams.magicBaseView).changeRecordingState(false);
}
public void setBeautyLevel(int level){
if(MagicParams.magicBaseView instanceof MagicCameraView && MagicParams.beautyLevel != level) {
MagicParams.beautyLevel = level;
((MagicCameraView) MagicParams.magicBaseView).onBeautyLevelChanged();
}
}
public void initBeauty(){
if(MagicParams.magicBaseView instanceof MagicImageView) {
PostManager.getInstance().postSlow(new Runnable() {
@Override
public void run() {
((MagicImageView) MagicParams.magicBaseView).initMagicBeautify();
}
},0);
}
}
public void uninitBeauty(){
if(MagicParams.magicBaseView instanceof MagicImageView) {
PostManager.getInstance().postSlow(new Runnable() {
@Override
public void run() {
((MagicImageView) MagicParams.magicBaseView).uninitMagicBeautify();
}
},0);
}
}
public void setSkinSmooth(float level){
skinSmoothLevel = level;
PostManager.getInstance().removeSlow(setSkinSmoothRun);
PostManager.getInstance().postSlow(setSkinSmoothRun,0);
}
public void setWhiteSkin(float level){
whiteSkinLevel = level;
PostManager.getInstance().removeSlow(setWhiteSkinRun);
PostManager.getInstance().postSlow(setWhiteSkinRun,0);
}
public void commitImage(){
if(MagicParams.magicBaseView instanceof MagicImageView) {
((MagicImageView) MagicParams.magicBaseView).commit();
}
}
public void restoreImage(){
if(MagicParams.magicBaseView instanceof MagicImageView) {
((MagicImageView) MagicParams.magicBaseView).restore();
}
}
private Runnable setSkinSmoothRun = new Runnable() {
@Override
public void run() {
if(MagicParams.magicBaseView instanceof MagicImageView) {
((MagicImageView) MagicParams.magicBaseView).setSkinSmooth(skinSmoothLevel);
}
}
};
private Runnable setWhiteSkinRun = new Runnable() {
@Override
public void run() {
if(MagicParams.magicBaseView instanceof MagicImageView) {
((MagicImageView) MagicParams.magicBaseView).setWhiteSkin(whiteSkinLevel);
}
}
};
public void switchCamera(){
CameraEngine.switchCamera();
}
public static class Builder{
public MagicEngine build(MagicBaseView magicBaseView) {
MagicParams.context = magicBaseView.getContext();
MagicParams.magicBaseView = magicBaseView;
magicEngine = new MagicEngine(this);
return magicEngine;
}
public Builder setVideoPath(String path){
MagicParams.videoPath = path;
return this;
}
public Builder setVideoName(String name){
MagicParams.videoName = name;
return this;
}
}
}
================================================
FILE: MagicShow/src/main/java/com/zero/magicshow/core/beautify/MagicJni.java
================================================
package com.zero.magicshow.core.beautify;
import android.graphics.Bitmap;
import java.nio.ByteBuffer;
/**
* Created by why8222 on 2016/2/29.
*/
public class MagicJni {
static{
System.loadLibrary("MagicJni");
}
//初始化
public static native void jniInitMagicBeautify(ByteBuffer handler);
public static native void jniUnInitMagicBeautify();
//局部均方差磨皮
public static native void jniStartSkinSmooth(float denoiseLevel);
//log曲线美白
public static native void jniStartWhiteSkin(float whitenLevel);
//Bitmap操作
public static native ByteBuffer jniStoreBitmapData(Bitmap bitmap);
public static native void jniFreeBitmapData(ByteBuffer handler);
public static native Bitmap jniGetBitmapFromStoredBitmapData(ByteBuffer handler);
}
================================================
FILE: MagicShow/src/main/java/com/zero/magicshow/core/camera/CameraEngine.java
================================================
package com.zero.magicshow.core.camera;
import android.app.Activity;
import android.graphics.PixelFormat;
import android.graphics.SurfaceTexture;
import android.hardware.Camera;
import android.hardware.Camera.CameraInfo;
import android.hardware.Camera.Parameters;
import android.hardware.Camera.Size;
import android.util.Log;
import android.view.Surface;
import android.view.SurfaceView;
import com.zero.magicshow.common.config.CameraConfig;
import com.zero.magicshow.common.utils.CameraParamUtil;
import com.zero.magicshow.core.camera.utils.CameraUtils;
import java.io.IOException;
import java.util.List;
import static android.hardware.Camera.Parameters.WHITE_BALANCE_AUTO;
public class CameraEngine {
private static Camera camera = null;
public static int cameraID = 0;
private static SurfaceTexture surfaceTexture;
private static SurfaceView surfaceView;
public static Camera getCamera(){
return camera;
}
public static boolean openCamera(){
if(camera == null){
try{
camera = Camera.open(cameraID);
setDefaultParameters();
return true;
}catch(RuntimeException e){
return false;
}
}
return false;
}
public static boolean openCamera(int id){
if(camera == null){
try{
camera = Camera.open(id);
cameraID = id;
setDefaultParameters();
return true;
}catch(RuntimeException e){
return false;
}
}
return false;
}
public static void releaseCamera(boolean isDestory){
if(camera != null){
camera.setPreviewCallback(null);
camera.stopPreview();
camera.release();
if(isDestory){
cameraID = 0;
}
// cameraID = 0;
camera = null;
}
}
public void resumeCamera(){
openCamera();
}
public void setParameters(Parameters parameters){
camera.setParameters(parameters);
}
public Parameters getParameters(){
if(camera != null)
camera.getParameters();
return null;
}
public static void switchCamera(){
releaseCamera(false);
cameraID = cameraID == 0 ? 1 : 0;
openCamera(cameraID);
startPreview(surfaceTexture);
}
private static void setDefaultParameters(){
Parameters parameters = camera.getParameters();
// if (parameters.getSupportedFocusModes().contains(
// Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE)) {
// parameters.setFocusMode(Parameters.FOCUS_MODE_CONTINUOUS_PICTURE);
// }
//增加对聚焦模式的判断
List focusModesList = parameters.getSupportedFocusModes();
if (CameraParamUtil.getInstance().isSupportedFocusMode(focusModesList, Camera.Parameters.FOCUS_MODE_AUTO)) {
parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_AUTO);
}else if(CameraParamUtil.getInstance().isSupportedFocusMode(focusModesList, Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO)){
parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
}
Size previewSize = CameraUtils.getLargePreviewSize(camera);
parameters.setPreviewSize(previewSize.width, previewSize.height);
// Size pictureSize = CameraUtils.getLargePictureSize(camera);
parameters.setPictureSize(CameraConfig.pictureWidth, CameraConfig.pictureHeight);
// parameters.setRotation(cameraID == 0 ? 90 : 270);
parameters.setPictureFormat(PixelFormat.JPEG);//设置拍照后存储的图片格式
parameters.setWhiteBalance(WHITE_BALANCE_AUTO);
//设置曝光值为1,酒吧比较暗,增加曝光
parameters.setExposureCompensation(1);
parameters.setSceneMode(Camera.Parameters.SCENE_MODE_AUTO);
parameters.setAntibanding(Camera.Parameters.ANTIBANDING_AUTO);
camera.setParameters(parameters);
camera.autoFocus(autoFocusCallback);
Camera.CameraInfo info = new Camera.CameraInfo();
Camera.getCameraInfo(cameraID, info);
int degrees = 90;//getDisplayRotation(activity);
int result;
if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
result = (info.orientation + degrees) % 360;
result = (360 - result) % 360; // compensate the mirror
} else { // back-facing
result = (info.orientation - degrees + 360) % 360;
}
camera.setDisplayOrientation(result);
}
public static int getDisplayRotation(Activity activity) {
int rotation = activity.getWindowManager().getDefaultDisplay().getRotation();
switch (rotation) {
case Surface.ROTATION_0: return 0;
case Surface.ROTATION_90: return 90;
case Surface.ROTATION_180: return 180;
case Surface.ROTATION_270: return 270;
}
return 0;
}
private static Camera.AutoFocusCallback autoFocusCallback = new Camera.AutoFocusCallback() {
@Override
public void onAutoFocus(boolean success, Camera camera) {
if (success) {
camera.cancelAutoFocus();
onFocusEnd();
}
}
};
public static void onFocusEnd() {
// mFoucsView.setVisibility(INVISIBLE);
}
private static Size getPreviewSize(){
return camera.getParameters().getPreviewSize();
}
private static Size getPictureSize(){
return camera.getParameters().getPictureSize();
}
public static void startPreview(SurfaceTexture surfaceTexture){
if(camera != null)
try {
camera.setPreviewTexture(surfaceTexture);
CameraEngine.surfaceTexture = surfaceTexture;
camera.startPreview();
} catch (IOException e) {
e.printStackTrace();
}
}
public static void startPreview(){
if(camera != null)
camera.startPreview();
}
public static void stopPreview(){
camera.stopPreview();
}
public static void setRotation(int rotation){
Camera.Parameters params = camera.getParameters();
params.setRotation(rotation);
camera.setParameters(params);
}
public static void takePicture(Camera.ShutterCallback shutterCallback, Camera.PictureCallback rawCallback,
Camera.PictureCallback jpegCallback){
camera.takePicture(shutterCallback, rawCallback, jpegCallback);
}
public static com.zero.magicshow.core.camera.utils.CameraInfo getCameraInfo(){
if(null == camera){
return null;
}
com.zero.magicshow.core.camera.utils.CameraInfo info = new com.zero.magicshow.core.camera.utils.CameraInfo();
Size size = getPreviewSize();
CameraInfo cameraInfo = new CameraInfo();
Camera.getCameraInfo(cameraID, cameraInfo);
info.previewWidth = size.width;
info.previewHeight = size.height;
info.orientation = cameraInfo.orientation;
info.isFront = cameraID == 1 ? true : false;
size = getPictureSize();
info.pictureWidth = CameraConfig.pictureWidth;
info.pictureHeight = CameraConfig.pictureHeight;
Log.e("HongLi","size.width:" + size.width + ";size.height:" + size.height +
";info.previewWidth:" + info.previewWidth + ";info.previewHeight:" + info.previewHeight + ";isFront:" + info.isFront);
return info;
}
}
================================================
FILE: MagicShow/src/main/java/com/zero/magicshow/core/camera/utils/CameraInfo.java
================================================
package com.zero.magicshow.core.camera.utils;
/**
* Created by why8222 on 2016/2/25.
*/
public class CameraInfo {
public int previewWidth;
public int previewHeight;
public int orientation;
public boolean isFront;
public int pictureWidth;
public int pictureHeight;
}
================================================
FILE: MagicShow/src/main/java/com/zero/magicshow/core/camera/utils/CameraUtils.java
================================================
package com.zero.magicshow.core.camera.utils;
import android.hardware.Camera;
import java.util.List;
/**
* Created by why8222 on 2016/2/25.
*/
public class CameraUtils {
public static Camera.Size getLargePictureSize(Camera camera){
if(camera != null){
List sizes = camera.getParameters().getSupportedPictureSizes();
Camera.Size temp = sizes.get(0);
for(int i = 1;i < sizes.size();i ++){
float scale = (float)(sizes.get(i).height) / sizes.get(i).width;
if(temp.width < sizes.get(i).width && scale < 0.6f && scale > 0.5f)
temp = sizes.get(i);
}
return temp;
}
return null;
}
public static Camera.Size getLargePreviewSize(Camera camera){
if(camera != null){
List sizes = camera.getParameters().getSupportedPreviewSizes();
Camera.Size temp = sizes.get(0);
for(int i = 1;i < sizes.size();i ++){
if(temp.width < sizes.get(i).width)
temp = sizes.get(i);
}
return temp;
}
return null;
}
}
================================================
FILE: MagicShow/src/main/java/com/zero/magicshow/core/encoder/gles/EglCore.java
================================================
/*
* Copyright 2013 Google Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.zero.magicshow.core.encoder.gles;
import android.graphics.SurfaceTexture;
import android.opengl.EGL14;
import android.opengl.EGLConfig;
import android.opengl.EGLContext;
import android.opengl.EGLDisplay;
import android.opengl.EGLExt;
import android.opengl.EGLSurface;
import android.util.Log;
import android.view.Surface;
/**
* Core EGL state (display, context, config).
*
* The EGLContext must only be attached to one thread at a time. This class is not thread-safe.
*/
public final class EglCore {
private static final String TAG = "EglCore";
/**
* Constructor flag: surface must be recordable. This discourages EGL from using a
* pixel format that cannot be converted efficiently to something usable by the video
* encoder.
*/
public static final int FLAG_RECORDABLE = 0x01;
/**
* Constructor flag: ask for GLES3, fall back to GLES2 if not available. Without this
* flag, GLES2 is used.
*/
public static final int FLAG_TRY_GLES3 = 0x02;
// Android-specific extension.
private static final int EGL_RECORDABLE_ANDROID = 0x3142;
private EGLDisplay mEGLDisplay = EGL14.EGL_NO_DISPLAY;
private EGLContext mEGLContext = EGL14.EGL_NO_CONTEXT;
private EGLConfig mEGLConfig = null;
private int mGlVersion = -1;
/**
* Prepares EGL display and context.
*
* Equivalent to EglCore(null, 0).
*/
public EglCore() {
this(null, 0);
}
/**
* Prepares EGL display and context.
*
* @param sharedContext The context to share, or null if sharing is not desired.
* @param flags Configuration bit flags, e.g. FLAG_RECORDABLE.
*/
public EglCore(EGLContext sharedContext, int flags) {
if (mEGLDisplay != EGL14.EGL_NO_DISPLAY) {
throw new RuntimeException("EGL already set up");
}
if (sharedContext == null) {
sharedContext = EGL14.EGL_NO_CONTEXT;
}
mEGLDisplay = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY);
if (mEGLDisplay == EGL14.EGL_NO_DISPLAY) {
throw new RuntimeException("unable to get EGL14 display");
}
int[] version = new int[2];
if (!EGL14.eglInitialize(mEGLDisplay, version, 0, version, 1)) {
mEGLDisplay = null;
throw new RuntimeException("unable to initialize EGL14");
}
// Try to get a GLES3 context, if requested.
if ((flags & FLAG_TRY_GLES3) != 0) {
//Log.d(TAG, "Trying GLES 3");
EGLConfig config = getConfig(flags, 3);
if (config != null) {
int[] attrib3_list = {
EGL14.EGL_CONTEXT_CLIENT_VERSION, 3,
EGL14.EGL_NONE
};
EGLContext context = EGL14.eglCreateContext(mEGLDisplay, config, sharedContext,
attrib3_list, 0);
if (EGL14.eglGetError() == EGL14.EGL_SUCCESS) {
//Log.d(TAG, "Got GLES 3 config");
mEGLConfig = config;
mEGLContext = context;
mGlVersion = 3;
}
}
}
if (mEGLContext == EGL14.EGL_NO_CONTEXT) { // GLES 2 only, or GLES 3 attempt failed
//Log.d(TAG, "Trying GLES 2");
EGLConfig config = getConfig(flags, 2);
if (config == null) {
throw new RuntimeException("Unable to find a suitable EGLConfig");
}
int[] attrib2_list = {
EGL14.EGL_CONTEXT_CLIENT_VERSION, 2,
EGL14.EGL_NONE
};
EGLContext context = EGL14.eglCreateContext(mEGLDisplay, config, sharedContext,
attrib2_list, 0);
checkEglError("eglCreateContext");
mEGLConfig = config;
mEGLContext = context;
mGlVersion = 2;
}
// Confirm with query.
int[] values = new int[1];
EGL14.eglQueryContext(mEGLDisplay, mEGLContext, EGL14.EGL_CONTEXT_CLIENT_VERSION,
values, 0);
Log.d(TAG, "EGLContext created, client version " + values[0]);
}
/**
* Finds a suitable EGLConfig.
*
* @param flags Bit flags from constructor.
* @param version Must be 2 or 3.
*/
private EGLConfig getConfig(int flags, int version) {
int renderableType = EGL14.EGL_OPENGL_ES2_BIT;
if (version >= 3) {
renderableType |= EGLExt.EGL_OPENGL_ES3_BIT_KHR;
}
// The actual surface is generally RGBA or RGBX, so situationally omitting alpha
// doesn't really help. It can also lead to a huge performance hit on glReadPixels()
// when reading into a GL_RGBA buffer.
int[] attribList = {
EGL14.EGL_RED_SIZE, 8,
EGL14.EGL_GREEN_SIZE, 8,
EGL14.EGL_BLUE_SIZE, 8,
EGL14.EGL_ALPHA_SIZE, 8,
//EGL14.EGL_DEPTH_SIZE, 16,
//EGL14.EGL_STENCIL_SIZE, 8,
EGL14.EGL_RENDERABLE_TYPE, renderableType,
EGL14.EGL_NONE, 0, // placeholder for recordable [@-3]
EGL14.EGL_NONE
};
if ((flags & FLAG_RECORDABLE) != 0) {
attribList[attribList.length - 3] = EGL_RECORDABLE_ANDROID;
attribList[attribList.length - 2] = 1;
}
EGLConfig[] configs = new EGLConfig[1];
int[] numConfigs = new int[1];
if (!EGL14.eglChooseConfig(mEGLDisplay, attribList, 0, configs, 0, configs.length,
numConfigs, 0)) {
Log.w(TAG, "unable to find RGB8888 / " + version + " EGLConfig");
return null;
}
return configs[0];
}
/**
* Discards all resources held by this class, notably the EGL context. This must be
* called from the thread where the context was created.
*
* On completion, no context will be current.
*/
public void release() {
if (mEGLDisplay != EGL14.EGL_NO_DISPLAY) {
// Android is unusual in that it uses a reference-counted EGLDisplay. So for
// every eglInitialize() we need an eglTerminate().
EGL14.eglMakeCurrent(mEGLDisplay, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE,
EGL14.EGL_NO_CONTEXT);
EGL14.eglDestroyContext(mEGLDisplay, mEGLContext);
EGL14.eglReleaseThread();
EGL14.eglTerminate(mEGLDisplay);
}
mEGLDisplay = EGL14.EGL_NO_DISPLAY;
mEGLContext = EGL14.EGL_NO_CONTEXT;
mEGLConfig = null;
}
@Override
protected void finalize() throws Throwable {
try {
if (mEGLDisplay != EGL14.EGL_NO_DISPLAY) {
// We're limited here -- finalizers don't run on the thread that holds
// the EGL state, so if a surface or context is still current on another
// thread we can't fully release it here. Exceptions thrown from here
// are quietly discarded. Complain in the log file.
Log.w(TAG, "WARNING: EglCore was not explicitly released -- state may be leaked");
release();
}
} finally {
super.finalize();
}
}
/**
* Destroys the specified surface. Note the EGLSurface won't actually be destroyed if it's
* still current in a context.
*/
public void releaseSurface(EGLSurface eglSurface) {
EGL14.eglDestroySurface(mEGLDisplay, eglSurface);
}
/**
* Creates an EGL surface associated with a Surface.
*
* If this is destined for MediaCodec, the EGLConfig should have the "recordable" attribute.
*/
public EGLSurface createWindowSurface(Object surface) {
if (!(surface instanceof Surface) && !(surface instanceof SurfaceTexture)) {
throw new RuntimeException("invalid surface: " + surface);
}
// Create a window surface, and attach it to the Surface we received.
int[] surfaceAttribs = {
EGL14.EGL_NONE
};
EGLSurface eglSurface = EGL14.eglCreateWindowSurface(mEGLDisplay, mEGLConfig, surface,
surfaceAttribs, 0);
checkEglError("eglCreateWindowSurface");
if (eglSurface == null) {
throw new RuntimeException("surface was null");
}
return eglSurface;
}
/**
* Creates an EGL surface associated with an offscreen buffer.
*/
public EGLSurface createOffscreenSurface(int width, int height) {
int[] surfaceAttribs = {
EGL14.EGL_WIDTH, width,
EGL14.EGL_HEIGHT, height,
EGL14.EGL_NONE
};
EGLSurface eglSurface = EGL14.eglCreatePbufferSurface(mEGLDisplay, mEGLConfig,
surfaceAttribs, 0);
checkEglError("eglCreatePbufferSurface");
if (eglSurface == null) {
throw new RuntimeException("surface was null");
}
return eglSurface;
}
/**
* Makes our EGL context current, using the supplied surface for both "draw" and "read".
*/
public void makeCurrent(EGLSurface eglSurface) {
if (mEGLDisplay == EGL14.EGL_NO_DISPLAY) {
// called makeCurrent() before create?
Log.d(TAG, "NOTE: makeCurrent w/o display");
}
if (!EGL14.eglMakeCurrent(mEGLDisplay, eglSurface, eglSurface, mEGLContext)) {
throw new RuntimeException("eglMakeCurrent failed");
}
}
/**
* Makes our EGL context current, using the supplied "draw" and "read" surfaces.
*/
public void makeCurrent(EGLSurface drawSurface, EGLSurface readSurface) {
if (mEGLDisplay == EGL14.EGL_NO_DISPLAY) {
// called makeCurrent() before create?
Log.d(TAG, "NOTE: makeCurrent w/o display");
}
if (!EGL14.eglMakeCurrent(mEGLDisplay, drawSurface, readSurface, mEGLContext)) {
throw new RuntimeException("eglMakeCurrent(draw,read) failed");
}
}
/**
* Makes no context current.
*/
public void makeNothingCurrent() {
if (!EGL14.eglMakeCurrent(mEGLDisplay, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE,
EGL14.EGL_NO_CONTEXT)) {
throw new RuntimeException("eglMakeCurrent failed");
}
}
/**
* Calls eglSwapBuffers. Use this to "publish" the current frame.
*
* @return false on failure
*/
public boolean swapBuffers(EGLSurface eglSurface) {
return EGL14.eglSwapBuffers(mEGLDisplay, eglSurface);
}
/**
* Sends the presentation time stamp to EGL. Time is expressed in nanoseconds.
*/
public void setPresentationTime(EGLSurface eglSurface, long nsecs) {
EGLExt.eglPresentationTimeANDROID(mEGLDisplay, eglSurface, nsecs);
}
/**
* Returns true if our context and the specified surface are current.
*/
public boolean isCurrent(EGLSurface eglSurface) {
return mEGLContext.equals(EGL14.eglGetCurrentContext()) &&
eglSurface.equals(EGL14.eglGetCurrentSurface(EGL14.EGL_DRAW));
}
/**
* Performs a simple surface query.
*/
public int querySurface(EGLSurface eglSurface, int what) {
int[] value = new int[1];
EGL14.eglQuerySurface(mEGLDisplay, eglSurface, what, value, 0);
return value[0];
}
/**
* Queries a string value.
*/
public String queryString(int what) {
return EGL14.eglQueryString(mEGLDisplay, what);
}
/**
* Returns the GLES version this context is configured for (currently 2 or 3).
*/
public int getGlVersion() {
return mGlVersion;
}
/**
* Writes the current display, context, and surface to the log.
*/
public static void logCurrent(String msg) {
EGLDisplay display;
EGLContext context;
EGLSurface surface;
display = EGL14.eglGetCurrentDisplay();
context = EGL14.eglGetCurrentContext();
surface = EGL14.eglGetCurrentSurface(EGL14.EGL_DRAW);
Log.i(TAG, "Current EGL (" + msg + "): display=" + display + ", context=" + context +
", surface=" + surface);
}
/**
* Checks for EGL errors. Throws an exception if an error has been raised.
*/
private void checkEglError(String msg) {
int error;
if ((error = EGL14.eglGetError()) != EGL14.EGL_SUCCESS) {
throw new RuntimeException(msg + ": EGL error: 0x" + Integer.toHexString(error));
}
}
}
================================================
FILE: MagicShow/src/main/java/com/zero/magicshow/core/encoder/gles/EglSurfaceBase.java
================================================
/*
* Copyright 2013 Google Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.zero.magicshow.core.encoder.gles;
import android.graphics.Bitmap;
import android.opengl.EGL14;
import android.opengl.EGLSurface;
import android.opengl.GLES20;
import android.util.Log;
import com.zero.magicshow.common.utils.OpenGlUtils;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.nio.IntBuffer;
/**
* Common base class for EGL surfaces.
*
* There can be multiple surfaces associated with a single context.
*/
public class EglSurfaceBase {
protected static final String TAG = "EglSurfaceBase";
// EglCore object we're associated with. It may be associated with multiple surfaces.
protected EglCore mEglCore;
private EGLSurface mEGLSurface = EGL14.EGL_NO_SURFACE;
protected int mWidth = -1;
protected int mHeight = -1;
protected EglSurfaceBase(EglCore eglCore) {
mEglCore = eglCore;
}
/**
* Creates a window surface.
*
* @param surface May be a Surface or SurfaceTexture.
*/
public void createWindowSurface(Object surface) {
if (mEGLSurface != EGL14.EGL_NO_SURFACE) {
throw new IllegalStateException("surface already created");
}
mEGLSurface = mEglCore.createWindowSurface(surface);
// Don't cache width/height here, because the size of the underlying surface can change
// out from under us (see e.g. HardwareScalerActivity).
//mWidth = mEglCore.querySurface(mEGLSurface, EGL14.EGL_WIDTH);
//mHeight = mEglCore.querySurface(mEGLSurface, EGL14.EGL_HEIGHT);
}
/**
* Creates an off-screen surface.
*/
public void createOffscreenSurface(int width, int height) {
if (mEGLSurface != EGL14.EGL_NO_SURFACE) {
throw new IllegalStateException("surface already created");
}
mEGLSurface = mEglCore.createOffscreenSurface(width, height);
mWidth = width;
mHeight = height;
}
/**
* Returns the surface's width, in pixels.
*
* If this is called on a window surface, and the underlying surface is in the process
* of changing size, we may not see the new size right away (e.g. in the "surfaceChanged"
* callback). The size should match after the next buffer swap.
*/
public int getWidth() {
if (mWidth < 0) {
return mEglCore.querySurface(mEGLSurface, EGL14.EGL_WIDTH);
} else {
return mWidth;
}
}
/**
* Returns the surface's height, in pixels.
*/
public int getHeight() {
if (mHeight < 0) {
return mEglCore.querySurface(mEGLSurface, EGL14.EGL_HEIGHT);
} else {
return mHeight;
}
}
/**
* Release the EGL surface.
*/
public void releaseEglSurface() {
mEglCore.releaseSurface(mEGLSurface);
mEGLSurface = EGL14.EGL_NO_SURFACE;
mWidth = mHeight = -1;
}
/**
* Makes our EGL context and surface current.
*/
public void makeCurrent() {
mEglCore.makeCurrent(mEGLSurface);
}
/**
* Makes our EGL context and surface current for drawing, using the supplied surface
* for reading.
*/
public void makeCurrentReadFrom(EglSurfaceBase readSurface) {
mEglCore.makeCurrent(mEGLSurface, readSurface.mEGLSurface);
}
/**
* Calls eglSwapBuffers. Use this to "publish" the current frame.
*
* @return false on failure
*/
public boolean swapBuffers() {
boolean result = mEglCore.swapBuffers(mEGLSurface);
if (!result) {
Log.d(TAG, "WARNING: swapBuffers() failed");
}
return result;
}
/**
* Sends the presentation time stamp to EGL.
*
* @param nsecs Timestamp, in nanoseconds.
*/
public void setPresentationTime(long nsecs) {
mEglCore.setPresentationTime(mEGLSurface, nsecs);
}
/**
* Saves the EGL surface to a file.
*
* Expects that this object's EGL surface is current.
*/
public void saveFrame(File file) throws IOException {
if (!mEglCore.isCurrent(mEGLSurface)) {
throw new RuntimeException("Expected EGL context/surface is not current");
}
// glReadPixels fills in a "direct" ByteBuffer with what is essentially big-endian RGBA
// data (i.e. a byte of red, followed by a byte of green...). While the Bitmap
// constructor that takes an int[] wants little-endian ARGB (blue/red swapped), the
// Bitmap "copy pixels" method wants the same format GL provides.
//
// Ideally we'd have some way to re-use the ByteBuffer, especially if we're calling
// here often.
//
// Making this even more interesting is the upside-down nature of GL, which means
// our output will look upside down relative to what appears on screen if the
// typical GL conventions are used.
String filename = file.toString();
int width = getWidth();
int height = getHeight();
IntBuffer ib = IntBuffer.allocate(width * height);
GLES20.glReadPixels(0, 0, width, height, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, ib);
OpenGlUtils.checkGlError("glReadPixels");
BufferedOutputStream bos = null;
try {
bos = new BufferedOutputStream(new FileOutputStream(filename));
Bitmap bmp = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
bmp.copyPixelsFromBuffer(IntBuffer.wrap(ib.array()));
bmp.compress(Bitmap.CompressFormat.PNG, 90, bos);
bmp.recycle();
} finally {
if (bos != null) bos.close();
}
Log.d(TAG, "Saved " + width + "x" + height + " frame as '" + filename + "'");
}
}
================================================
FILE: MagicShow/src/main/java/com/zero/magicshow/core/encoder/video/TextureMovieEncoder.java
================================================
/*
* Copyright 2013 Google Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.zero.magicshow.core.encoder.video;
import java.io.File;
import java.io.IOException;
import java.lang.ref.WeakReference;
import java.nio.FloatBuffer;
import android.graphics.SurfaceTexture;
import android.opengl.EGLContext;
import android.os.Handler;
import android.os.Looper;
import android.os.Message;
import android.util.Log;
import com.zero.magicshow.core.camera.utils.CameraInfo;
import com.zero.magicshow.core.filter.base.MagicCameraInputFilter;
import com.zero.magicshow.core.filter.base.gpuimage.GPUImageFilter;
import com.zero.magicshow.core.filter.utils.MagicFilterFactory;
import com.zero.magicshow.core.filter.utils.MagicFilterType;
import com.zero.magicshow.core.encoder.gles.EglCore;
/**
* Encode a movie from frames rendered from an external texture image.
*
* The object wraps an encoder running on a dedicated thread. The various control messages
* may be sent from arbitrary threads (typically the app UI thread). The encoder thread
* manages both sides of the encoder (feeding and draining); the only external input is
* the GL texture.
*
* The design is complicated slightly by the need to create an EGL context that shares state
* with a view that gets restarted if (say) the device orientation changes. When the view
* in question is a GLSurfaceView, we don't have full control over the EGL context creation
* on that side, so we have to bend a bit backwards here.
*
* To use:
*
*
create TextureMovieEncoder object
*
create an EncoderConfig
*
call TextureMovieEncoder#startRecording() with the config
*
call TextureMovieEncoder#setTextureId() with the texture object that receives frames
*
for each frame, after latching it with SurfaceTexture#updateTexImage(),
* call TextureMovieEncoder#frameAvailable().
*
*
* TODO: tweak the API (esp. textureId) so it's less awkward for simple use cases.
*/
public class TextureMovieEncoder implements Runnable {
private static final String TAG = "";
private static final boolean VERBOSE = false;
private static final int MSG_START_RECORDING = 0;
private static final int MSG_STOP_RECORDING = 1;
private static final int MSG_FRAME_AVAILABLE = 2;
private static final int MSG_SET_TEXTURE_ID = 3;
private static final int MSG_UPDATE_SHARED_CONTEXT = 4;
private static final int MSG_QUIT = 5;
// ----- accessed exclusively by encoder thread -----
private WindowSurface mInputWindowSurface;
private EglCore mEglCore;
private MagicCameraInputFilter mInput;
private int mTextureId;
private VideoEncoderCore mVideoEncoder;
// ----- accessed by multiple threads -----
private volatile EncoderHandler mHandler;
private Object mReadyFence = new Object(); // guards ready/running
private boolean mReady;
private boolean mRunning;
private GPUImageFilter filter;
private FloatBuffer gLCubeBuffer;
private FloatBuffer gLTextureBuffer;
public TextureMovieEncoder() {
}
/**
* Encoder configuration.
*
* Object is immutable, which means we can safely pass it between threads without
* explicit synchronization (and don't need to worry about it getting tweaked out from
* under us).
*
* TODO: make frame rate and iframe interval configurable? Maybe use builder pattern
* with reasonable defaults for those and bit rate.
*/
public static class EncoderConfig {
final File mOutputFile;
final int mWidth;
final int mHeight;
final int mBitRate;
final EGLContext mEglContext;
public EncoderConfig(File outputFile, int width, int height, int bitRate,
EGLContext sharedEglContext, CameraInfo info) {
mOutputFile = outputFile;
mWidth = width;
mHeight = height;
mBitRate = bitRate;
mEglContext = sharedEglContext;
}
@Override
public String toString() {
return "EncoderConfig: " + mWidth + "x" + mHeight + " @" + mBitRate +
" to '" + mOutputFile.toString() + "' ctxt=" + mEglContext;
}
}
/**
* Tells the video recorder to start recording. (Call from non-encoder thread.)
*
* Creates a new thread, which will create an encoder using the provided configuration.
*
* Returns after the recorder thread has started and is ready to accept Messages. The
* encoder may not yet be fully configured.
*/
public void startRecording(EncoderConfig config) {
Log.d(TAG, "Encoder: startRecording()");
synchronized (mReadyFence) {
if (mRunning) {
Log.w(TAG, "Encoder thread already running");
return;
}
mRunning = true;
new Thread(this, "TextureMovieEncoder").start();
while (!mReady) {
try {
mReadyFence.wait();
} catch (InterruptedException ie) {
// ignore
}
}
}
mHandler.sendMessage(mHandler.obtainMessage(MSG_START_RECORDING, config));
}
/**
* Tells the video recorder to stop recording. (Call from non-encoder thread.)
*
* Returns immediately; the encoder/muxer may not yet be finished creating the movie.
*
* TODO: have the encoder thread invoke a callback on the UI thread just before it shuts down
* so we can provide reasonable status UI (and let the caller know that movie encoding
* has completed).
*/
public void stopRecording() {
mHandler.sendMessage(mHandler.obtainMessage(MSG_STOP_RECORDING));
mHandler.sendMessage(mHandler.obtainMessage(MSG_QUIT));
// We don't know when these will actually finish (or even start). We don't want to
// delay the UI thread though, so we return immediately.
}
/**
* Returns true if recording has been started.
*/
public boolean isRecording() {
synchronized (mReadyFence) {
return mRunning;
}
}
/**
* Tells the video recorder to refresh its EGL surface. (Call from non-encoder thread.)
*/
public void updateSharedContext(EGLContext sharedContext) {
mHandler.sendMessage(mHandler.obtainMessage(MSG_UPDATE_SHARED_CONTEXT, sharedContext));
}
/**
* Tells the video recorder that a new frame is available. (Call from non-encoder thread.)
*
* This function sends a message and returns immediately. This isn't sufficient -- we
* don't want the caller to latch a new frame until we're done with this one -- but we
* can get away with it so long as the input frame rate is reasonable and the encoder
* thread doesn't stall.
*
* TODO: either block here until the texture has been rendered onto the encoder surface,
* or have a separate "block if still busy" method that the caller can execute immediately
* before it calls updateTexImage(). The latter is preferred because we don't want to
* stall the caller while this thread does work.
*/
public void frameAvailable(SurfaceTexture st) {
synchronized (mReadyFence) {
if (!mReady) {
return;
}
}
float[] transform = new float[16]; // TODO - avoid alloc every frame
st.getTransformMatrix(transform);
long timestamp = st.getTimestamp();
if (timestamp == 0) {
// Seeing this after device is toggled off/on with power button. The
// first frame back has a zero timestamp.
//
// MPEG4Writer thinks this is cause to abort() in native code, so it's very
// important that we just ignore the frame.
Log.w(TAG, "HEY: got SurfaceTexture with timestamp of zero");
return;
}
mHandler.sendMessage(mHandler.obtainMessage(MSG_FRAME_AVAILABLE,
(int) (timestamp >> 32), (int) timestamp, transform));
}
/**
* Tells the video recorder what texture name to use. This is the external texture that
* we're receiving camera previews in. (Call from non-encoder thread.)
*
* TODO: do something less clumsy
*/
public void setTextureId(int id) {
synchronized (mReadyFence) {
if (!mReady) {
return;
}
}
mHandler.sendMessage(mHandler.obtainMessage(MSG_SET_TEXTURE_ID, id, 0, null));
}
/**
* Encoder thread entry point. Establishes Looper/Handler and waits for messages.
*
* @see Thread#run()
*/
@Override
public void run() {
// Establish a Looper for this thread, and define a Handler for it.
Looper.prepare();
synchronized (mReadyFence) {
mHandler = new EncoderHandler(this);
mReady = true;
mReadyFence.notify();
}
Looper.loop();
Log.d(TAG, "Encoder thread exiting");
synchronized (mReadyFence) {
mReady = mRunning = false;
mHandler = null;
}
}
/**
* Handles encoder state change requests. The handler is created on the encoder thread.
*/
private static class EncoderHandler extends Handler {
private WeakReference mWeakEncoder;
public EncoderHandler(TextureMovieEncoder encoder) {
mWeakEncoder = new WeakReference(encoder);
}
@Override // runs on encoder thread
public void handleMessage(Message inputMessage) {
int what = inputMessage.what;
Object obj = inputMessage.obj;
TextureMovieEncoder encoder = mWeakEncoder.get();
if (encoder == null) {
Log.w(TAG, "EncoderHandler.handleMessage: encoder is null");
return;
}
switch (what) {
case MSG_START_RECORDING:
encoder.handleStartRecording((EncoderConfig) obj);
break;
case MSG_STOP_RECORDING:
encoder.handleStopRecording();
break;
case MSG_FRAME_AVAILABLE:
long timestamp = (((long) inputMessage.arg1) << 32) |
(((long) inputMessage.arg2) & 0xffffffffL);
encoder.handleFrameAvailable((float[]) obj, timestamp);
break;
case MSG_SET_TEXTURE_ID:
encoder.handleSetTexture(inputMessage.arg1);
break;
case MSG_UPDATE_SHARED_CONTEXT:
encoder.handleUpdateSharedContext((EGLContext) inputMessage.obj);
break;
case MSG_QUIT:
Looper.myLooper().quit();
break;
default:
throw new RuntimeException("Unhandled msg what=" + what);
}
}
}
/**
* Starts recording.
*/
private void handleStartRecording(EncoderConfig config) {
Log.d(TAG, "handleStartRecording " + config);
prepareEncoder(config.mEglContext, config.mWidth, config.mHeight, config.mBitRate,
config.mOutputFile);
}
/**
* Handles notification of an available frame.
*
* The texture is rendered onto the encoder's input surface, along with a moving
* box (just because we can).
*
* @param transform The texture transform, from SurfaceTexture.
* @param timestampNanos The frame's timestamp, from SurfaceTexture.
*/
private void handleFrameAvailable(float[] transform, long timestampNanos) {
if (VERBOSE) Log.d(TAG, "handleFrameAvailable tr=" + transform);
mVideoEncoder.drainEncoder(false);
mInput.setTextureTransformMatrix(transform);
if(filter == null) {
mInput.onDrawFrame(mTextureId, gLCubeBuffer, gLTextureBuffer);
}else {
filter.onDrawFrame(mTextureId, gLCubeBuffer, gLTextureBuffer);
}
mInputWindowSurface.setPresentationTime(timestampNanos);
mInputWindowSurface.swapBuffers();
}
/**
* Handles a request to stop encoding.
*/
private void handleStopRecording() {
Log.d(TAG, "handleStopRecording");
mVideoEncoder.drainEncoder(true);
releaseEncoder();
}
/**
* Sets the texture name that SurfaceTexture will use when frames are received.
*/
private void handleSetTexture(int id) {
//Log.d(TAG, "handleSetTexture " + id);
mTextureId = id;
}
/**
* Tears down the EGL surface and context we've been using to feed the MediaCodec input
* surface, and replaces it with a new one that shares with the new context.
*
* This is useful if the old context we were sharing with went away (maybe a GLSurfaceView
* that got torn down) and we need to hook up with the new one.
*/
private void handleUpdateSharedContext(EGLContext newSharedContext) {
Log.d(TAG, "handleUpdatedSharedContext " + newSharedContext);
// Release the EGLSurface and EGLContext.
mInputWindowSurface.releaseEglSurface();
mInput.destroy();
mEglCore.release();
// Create a new EGLContext and recreate the window surface.
mEglCore = new EglCore(newSharedContext, EglCore.FLAG_RECORDABLE);
mInputWindowSurface.recreate(mEglCore);
mInputWindowSurface.makeCurrent();
// Create new programs and such for the new context.
mInput = new MagicCameraInputFilter();
mInput.init();
filter = MagicFilterFactory.initFilters(type);
if(filter != null){
filter.init();
filter.onInputSizeChanged(mPreviewWidth, mPreviewHeight);
filter.onDisplaySizeChanged(mVideoWidth, mVideoHeight);
}
}
private void prepareEncoder(EGLContext sharedContext, int width, int height, int bitRate,
File outputFile) {
try {
mVideoEncoder = new VideoEncoderCore(width, height, bitRate, outputFile);
} catch (IOException ioe) {
throw new RuntimeException(ioe);
}
mVideoWidth = width;
mVideoHeight = height;
mEglCore = new EglCore(sharedContext, EglCore.FLAG_RECORDABLE);
mInputWindowSurface = new WindowSurface(mEglCore, mVideoEncoder.getInputSurface(), true);
mInputWindowSurface.makeCurrent();
mInput = new MagicCameraInputFilter();
mInput.init();
filter = MagicFilterFactory.initFilters(type);
if(filter != null){
filter.init();
filter.onInputSizeChanged(mPreviewWidth, mPreviewHeight);
filter.onDisplaySizeChanged(mVideoWidth, mVideoHeight);
}
}
private void releaseEncoder() {
mVideoEncoder.release();
if (mInputWindowSurface != null) {
mInputWindowSurface.release();
mInputWindowSurface = null;
}
if (mInput != null) {
mInput.destroy();
mInput = null;
}
if (mEglCore != null) {
mEglCore.release();
mEglCore = null;
}
if(filter != null){
filter.destroy();
filter = null;
type = MagicFilterType.NONE;
}
}
private MagicFilterType type = MagicFilterType.NONE;
public void setFilter(MagicFilterType type) {
this.type = type;
}
private int mPreviewWidth = -1;
private int mPreviewHeight = -1;
private int mVideoWidth = -1;
private int mVideoHeight = -1;
public void setPreviewSize(int width, int height){
mPreviewWidth = width;
mPreviewHeight = height;
}
public void setTextureBuffer(FloatBuffer gLTextureBuffer) {
this.gLTextureBuffer = gLTextureBuffer;
}
public void setCubeBuffer(FloatBuffer gLCubeBuffer) {
this.gLCubeBuffer = gLCubeBuffer;
}
}
================================================
FILE: MagicShow/src/main/java/com/zero/magicshow/core/encoder/video/VideoEncoderCore.java
================================================
/*
* Copyright 2014 Google Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.zero.magicshow.core.encoder.video;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaFormat;
import android.media.MediaMuxer;
import android.util.Log;
import android.view.Surface;
import java.io.File;
import java.io.IOException;
import java.nio.ByteBuffer;
/**
* This class wraps up the core components used for surface-input video encoding.
*
* Once created, frames are fed to the input surface. Remember to provide the presentation
* time stamp, and always call drainEncoder() before swapBuffers() to ensure that the
* producer side doesn't get backed up.
*
* This class is not thread-safe, with one exception: it is valid to use the input surface
* on one thread, and drain the output on a different thread.
*/
public class VideoEncoderCore {
private static final String TAG = "VideoEncoderCore";
private static final boolean VERBOSE = false;
// TODO: these ought to be configurable as well
private static final String MIME_TYPE = "video/avc"; // H.264 Advanced Video Coding
private static final int FRAME_RATE = 30; // 30fps
private static final int IFRAME_INTERVAL = 5; // 5 seconds between I-frames
private Surface mInputSurface;
private MediaMuxer mMuxer;
private MediaCodec mEncoder;
private MediaCodec.BufferInfo mBufferInfo;
private int mTrackIndex;
private boolean mMuxerStarted;
/**
* Configures encoder and muxer state, and prepares the input Surface.
*/
public VideoEncoderCore(int width, int height, int bitRate, File outputFile)
throws IOException {
mBufferInfo = new MediaCodec.BufferInfo();
MediaFormat format = MediaFormat.createVideoFormat(MIME_TYPE, width, height);
// Set some properties. Failing to specify some of these can cause the MediaCodec
// configure() call to throw an unhelpful exception.
format.setInteger(MediaFormat.KEY_COLOR_FORMAT,
MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
format.setInteger(MediaFormat.KEY_BIT_RATE, bitRate);
format.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE);
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL);
if (VERBOSE) Log.d(TAG, "format: " + format);
// Create a MediaCodec encoder, and configure it with our format. Get a Surface
// we can use for input and wrap it with a class that handles the EGL work.
mEncoder = MediaCodec.createEncoderByType(MIME_TYPE);
mEncoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
mInputSurface = mEncoder.createInputSurface();
mEncoder.start();
// Create a MediaMuxer. We can't add the video track and start() the muxer here,
// because our MediaFormat doesn't have the Magic Goodies. These can only be
// obtained from the encoder after it has started processing data.
//
// We're not actually interested in multiplexing audio. We just want to convert
// the raw H.264 elementary stream we get from MediaCodec into a .mp4 file.
mMuxer = new MediaMuxer(outputFile.toString(),
MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
mTrackIndex = -1;
mMuxerStarted = false;
}
/**
* Returns the encoder's input surface.
*/
public Surface getInputSurface() {
return mInputSurface;
}
/**
* Releases encoder resources.
*/
public void release() {
if (VERBOSE) Log.d(TAG, "releasing encoder objects");
if (mEncoder != null) {
mEncoder.stop();
mEncoder.release();
mEncoder = null;
}
if (mMuxer != null) {
// TODO: stop() throws an exception if you haven't fed it any data. Keep track
// of frames submitted, and don't call stop() if we haven't written anything.
mMuxer.stop();
mMuxer.release();
mMuxer = null;
}
}
/**
* Extracts all pending data from the encoder and forwards it to the muxer.
*
* If endOfStream is not set, this returns when there is no more data to drain. If it
* is set, we send EOS to the encoder, and then iterate until we see EOS on the output.
* Calling this with endOfStream set should be done once, right before stopping the muxer.
*
* We're just using the muxer to get a .mp4 file (instead of a raw H.264 stream). We're
* not recording audio.
*/
public void drainEncoder(boolean endOfStream) {
final int TIMEOUT_USEC = 10000;
if (VERBOSE) Log.d(TAG, "drainEncoder(" + endOfStream + ")");
if (endOfStream) {
if (VERBOSE) Log.d(TAG, "sending EOS to encoder");
mEncoder.signalEndOfInputStream();
}
ByteBuffer[] encoderOutputBuffers = mEncoder.getOutputBuffers();
while (true) {
int encoderStatus = mEncoder.dequeueOutputBuffer(mBufferInfo, TIMEOUT_USEC);
if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
// no output available yet
if (!endOfStream) {
break; // out of while
} else {
if (VERBOSE) Log.d(TAG, "no output available, spinning to await EOS");
}
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
// not expected for an encoder
encoderOutputBuffers = mEncoder.getOutputBuffers();
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
// should happen before receiving buffers, and should only happen once
if (mMuxerStarted) {
throw new RuntimeException("format changed twice");
}
MediaFormat newFormat = mEncoder.getOutputFormat();
Log.d(TAG, "encoder output format changed: " + newFormat);
// now that we have the Magic Goodies, start the muxer
mTrackIndex = mMuxer.addTrack(newFormat);
mMuxer.start();
mMuxerStarted = true;
} else if (encoderStatus < 0) {
Log.w(TAG, "unexpected result from encoder.dequeueOutputBuffer: " +
encoderStatus);
// let's ignore it
} else {
ByteBuffer encodedData = encoderOutputBuffers[encoderStatus];
if (encodedData == null) {
throw new RuntimeException("encoderOutputBuffer " + encoderStatus +
" was null");
}
if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
// The codec config data was pulled out and fed to the muxer when we got
// the INFO_OUTPUT_FORMAT_CHANGED status. Ignore it.
if (VERBOSE) Log.d(TAG, "ignoring BUFFER_FLAG_CODEC_CONFIG");
mBufferInfo.size = 0;
}
if (mBufferInfo.size != 0) {
if (!mMuxerStarted) {
throw new RuntimeException("muxer hasn't started");
}
// adjust the ByteBuffer values to match BufferInfo (not needed?)
encodedData.position(mBufferInfo.offset);
encodedData.limit(mBufferInfo.offset + mBufferInfo.size);
mMuxer.writeSampleData(mTrackIndex, encodedData, mBufferInfo);
if (VERBOSE) {
Log.d(TAG, "sent " + mBufferInfo.size + " bytes to muxer, ts=" +
mBufferInfo.presentationTimeUs);
}
}
mEncoder.releaseOutputBuffer(encoderStatus, false);
if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
if (!endOfStream) {
Log.w(TAG, "reached end of stream unexpectedly");
} else {
if (VERBOSE) Log.d(TAG, "end of stream reached");
}
break; // out of while
}
}
}
}
}
================================================
FILE: MagicShow/src/main/java/com/zero/magicshow/core/encoder/video/WindowSurface.java
================================================
/*
* Copyright 2013 Google Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.zero.magicshow.core.encoder.video;
import android.graphics.SurfaceTexture;
import android.view.Surface;
import com.zero.magicshow.core.encoder.gles.EglCore;
import com.zero.magicshow.core.encoder.gles.EglSurfaceBase;
/**
* Recordable EGL window surface.
*
* It's good practice to explicitly release() the surface, preferably from a "finally" block.
*/
public class WindowSurface extends EglSurfaceBase {
private Surface mSurface;
private boolean mReleaseSurface;
/**
* Associates an EGL surface with the native window surface.
*
* Set releaseSurface to true if you want the Surface to be released when release() is
* called. This is convenient, but can interfere with framework classes that expect to
* manage the Surface themselves (e.g. if you release a SurfaceView's Surface, the
* surfaceDestroyed() callback won't fire).
*/
public WindowSurface(EglCore eglCore, Surface surface, boolean releaseSurface) {
super(eglCore);
createWindowSurface(surface);
mSurface = surface;
mReleaseSurface = releaseSurface;
}
/**
* Associates an EGL surface with the SurfaceTexture.
*/
public WindowSurface(EglCore eglCore, SurfaceTexture surfaceTexture) {
super(eglCore);
createWindowSurface(surfaceTexture);
}
/**
* Releases any resources associated with the EGL surface (and, if configured to do so,
* with the Surface as well).
*
* Does not require that the surface's EGL context be current.
*/
public void release() {
releaseEglSurface();
if (mSurface != null) {
if (mReleaseSurface) {
mSurface.release();
}
mSurface = null;
}
}
/**
* Recreate the EGLSurface, using the new EglBase. The caller should have already
* freed the old EGLSurface with releaseEglSurface().
*
* This is useful when we want to update the EGLSurface associated with a Surface.
* For example, if we want to share with a different EGLContext, which can only
* be done by tearing down and recreating the context. (That's handled by the caller;
* this just creates a new EGLSurface for the Surface we were handed earlier.)
*