Repository: ksvc/ks265codec
Branch: master
Commit: cd09626e643a
Files: 172
Total size: 178.8 MB
Directory structure:
gitextract_saaj9mjn/
├── 2017.03.12decSpeed_ksc265InffmpegVSopenhevcInffmpeg.xlsx
├── 2017.09.07qy265VSx264(sdk)_ios.xlsm
├── 2017.09.11qy265VSx264(sdk)_android.xlsm
├── 2017.09.13ksc265VSx264(sdk)-x265-vp9-intel265-classA-E&Game_v2613.xlsm
├── Android_demo/
│ ├── .gitignore
│ ├── KSY265CodecDemo/
│ │ ├── .gitignore
│ │ ├── app/
│ │ │ ├── .gitignore
│ │ │ ├── CMakeLists.txt
│ │ │ ├── build.gradle
│ │ │ ├── proguard-rules.pro
│ │ │ └── src/
│ │ │ └── main/
│ │ │ ├── AndroidManifest.xml
│ │ │ ├── java/
│ │ │ │ └── com/
│ │ │ │ ├── ianhanniballake/
│ │ │ │ │ └── localstorage/
│ │ │ │ │ └── LocalStorageProvider.java
│ │ │ │ ├── ipaulpro/
│ │ │ │ │ └── afilechooser/
│ │ │ │ │ ├── FileChooserActivity.java
│ │ │ │ │ ├── FileListAdapter.java
│ │ │ │ │ ├── FileListFragment.java
│ │ │ │ │ ├── FileLoader.java
│ │ │ │ │ └── utils/
│ │ │ │ │ └── FileUtils.java
│ │ │ │ └── ksyun/
│ │ │ │ └── media/
│ │ │ │ └── ksy265codec/
│ │ │ │ └── demo/
│ │ │ │ ├── decoder/
│ │ │ │ │ └── hevdecoder/
│ │ │ │ │ ├── GLPlayView.java
│ │ │ │ │ ├── GLRenderer.java
│ │ │ │ │ └── NativeMediaPlayer.java
│ │ │ │ ├── encoder/
│ │ │ │ │ ├── Encoder.java
│ │ │ │ │ └── EncoderWrapper.java
│ │ │ │ └── ui/
│ │ │ │ ├── BaseFragment.java
│ │ │ │ ├── DecoderFragment.java
│ │ │ │ ├── DecoderSettings.java
│ │ │ │ ├── DecoderSettingsFragment.java
│ │ │ │ ├── EncoderFragment.java
│ │ │ │ ├── EncoderSettings.java
│ │ │ │ ├── EncoderSettingsFragment.java
│ │ │ │ ├── HelpFragment.java
│ │ │ │ ├── KSY265CodecDemoApp.java
│ │ │ │ ├── MainActivity.java
│ │ │ │ ├── MyFragmentAdapter.java
│ │ │ │ ├── ProgressDialogFragment.java
│ │ │ │ └── Settings.java
│ │ │ ├── jni/
│ │ │ │ ├── Android.mk
│ │ │ │ ├── Application.mk
│ │ │ │ ├── decoder/
│ │ │ │ │ ├── Android.mk
│ │ │ │ │ └── jniplayer/
│ │ │ │ │ ├── Android.mk
│ │ │ │ │ ├── Chromium_LICENSE.txt
│ │ │ │ │ ├── gl_renderer.cpp
│ │ │ │ │ ├── gl_renderer.h
│ │ │ │ │ ├── jni_utils.cpp
│ │ │ │ │ ├── jni_utils.h
│ │ │ │ │ ├── jniplayer.cpp
│ │ │ │ │ ├── jniplayer.h
│ │ │ │ │ ├── jniplayer_new.cpp
│ │ │ │ │ ├── yuv2rgb565.cpp
│ │ │ │ │ └── yuv2rgb565.h
│ │ │ │ └── encoder/
│ │ │ │ ├── Android.mk
│ │ │ │ ├── encoderwrapper.c
│ │ │ │ ├── encoderwrapper.h
│ │ │ │ └── log.h
│ │ │ └── res/
│ │ │ ├── drawable/
│ │ │ │ ├── selector_tab_background.xml
│ │ │ │ ├── tab_home_btn.xml
│ │ │ │ └── tab_view_btn.xml
│ │ │ ├── layout/
│ │ │ │ ├── activity_main.xml
│ │ │ │ ├── decoder_settings.xml
│ │ │ │ ├── encoder_settings.xml
│ │ │ │ ├── file.xml
│ │ │ │ ├── fragment_item.xml
│ │ │ │ ├── help.xml
│ │ │ │ ├── main_tab_layout.xml
│ │ │ │ └── tab_content.xml
│ │ │ ├── values/
│ │ │ │ ├── bool.xml
│ │ │ │ ├── colors.xml
│ │ │ │ ├── dimens.xml
│ │ │ │ ├── strings.xml
│ │ │ │ ├── styles.xml
│ │ │ │ └── themes.xml
│ │ │ └── xml/
│ │ │ └── mimetypes.xml
│ │ ├── build.gradle
│ │ ├── gradle/
│ │ │ └── wrapper/
│ │ │ ├── gradle-wrapper.jar
│ │ │ └── gradle-wrapper.properties
│ │ ├── gradle.properties
│ │ ├── gradlew
│ │ ├── gradlew.bat
│ │ └── settings.gradle
│ ├── README.md
│ └── prebuilt/
│ ├── arm64-v8a/
│ │ ├── libcpufeatures.a
│ │ ├── libqydecoder.a
│ │ ├── libqyencoder.a
│ │ └── libx264.a
│ ├── armeabi-v7a/
│ │ ├── libcpufeatures.a
│ │ ├── libqydecoder.a
│ │ ├── libqyencoder.a
│ │ └── libx264.a
│ └── include/
│ ├── lenthevcdec.h
│ ├── qy265dec.h
│ ├── qy265def.h
│ ├── qy265enc.h
│ ├── qyauth_env.h
│ ├── x264.h
│ └── x264_config.h
├── README.md
├── android_arm64/
│ ├── appdecoder
│ └── appencoder
├── centos_x64/
│ ├── READme.txt
│ ├── appdecoder
│ └── appencoder
├── iOS_demo/
│ ├── KSY265CodecDemo_iOS/
│ │ ├── AppDelegate.h
│ │ ├── AppDelegate.m
│ │ ├── Assets.xcassets/
│ │ │ ├── AppIcon.appiconset/
│ │ │ │ └── Contents.json
│ │ │ ├── Contents.json
│ │ │ ├── first.imageset/
│ │ │ │ └── Contents.json
│ │ │ ├── recycle-bin.imageset/
│ │ │ │ └── Contents.json
│ │ │ └── second.imageset/
│ │ │ └── Contents.json
│ │ ├── Base.lproj/
│ │ │ ├── LaunchScreen.storyboard
│ │ │ └── Main.storyboard
│ │ ├── BaseViewController.h
│ │ ├── BaseViewController.m
│ │ ├── DecoderHelperViewController.h
│ │ ├── DecoderHelperViewController.m
│ │ ├── EncoderHelperViewController.h
│ │ ├── EncoderHelperViewController.m
│ │ ├── FirstViewController.h
│ │ ├── FirstViewController.m
│ │ ├── GLRenderer.h
│ │ ├── GLRenderer.m
│ │ ├── GLView.h
│ │ ├── GLView.m
│ │ ├── Info.plist
│ │ ├── KSYMovieEncoder.h
│ │ ├── KSYMovieEncoder.m
│ │ ├── KSYMoviePlayer.h
│ │ ├── KSYMoviePlayer.m
│ │ ├── MovieEncoder.h
│ │ ├── MovieEncoder.m
│ │ ├── MoviePlayer.h
│ │ ├── MoviePlayer.m
│ │ ├── MoviesViewController.h
│ │ ├── MoviesViewController.m
│ │ ├── PlayViewController.h
│ │ ├── PlayViewController.m
│ │ ├── PlayViewController.xib
│ │ ├── SecondViewController.h
│ │ ├── SecondViewController.m
│ │ ├── SettingsDecoderViewController.h
│ │ ├── SettingsDecoderViewController.m
│ │ ├── SettingsEncoderViewController.h
│ │ ├── SettingsEncoderViewController.m
│ │ └── main.m
│ ├── KSY265CodecDemo_iOS.xcodeproj/
│ │ ├── project.pbxproj
│ │ ├── project.xcworkspace/
│ │ │ ├── contents.xcworkspacedata
│ │ │ └── xcuserdata/
│ │ │ ├── jiangdong.xcuserdatad/
│ │ │ │ └── UserInterfaceState.xcuserstate
│ │ │ └── ksyun.xcuserdatad/
│ │ │ └── UserInterfaceState.xcuserstate
│ │ └── xcuserdata/
│ │ ├── jiangdong.xcuserdatad/
│ │ │ ├── xcdebugger/
│ │ │ │ └── Breakpoints_v2.xcbkptlist
│ │ │ └── xcschemes/
│ │ │ ├── KSY265CodecDemo_iOS.xcscheme
│ │ │ └── xcschememanagement.plist
│ │ └── ksyun.xcuserdatad/
│ │ ├── xcdebugger/
│ │ │ └── Breakpoints_v2.xcbkptlist
│ │ └── xcschemes/
│ │ ├── KSY265CodecDemo_iOS.xcscheme
│ │ └── xcschememanagement.plist
│ ├── combox/
│ │ ├── AYHCustomComboBox.h
│ │ ├── AYHCustomComboBox.m
│ │ └── AYHCustomComboBoxDelegate.h
│ ├── ksy265codec/
│ │ ├── libqycommon.a
│ │ ├── libqydecoder.a
│ │ ├── libqyencoder.a
│ │ ├── qy265dec.h
│ │ ├── qy265def.h
│ │ └── qy265enc.h
│ ├── lenthevcdec/
│ │ ├── lenthevcdec.h
│ │ └── liblenthevcdec.a
│ ├── resource/
│ │ ├── 1280x720_15.yuv
│ │ ├── 640x480_15.yuv
│ │ └── 960x540_15.yuv
│ └── x264/
│ ├── libx264.a
│ ├── x264.h
│ └── x264_config.h
└── ubuntu_x64/
├── READme.txt
├── appdecoder
└── appencoder
================================================
FILE CONTENTS
================================================
================================================
FILE: Android_demo/.gitignore
================================================
KSY265CodecDemo/.idea
KSY265CodecDemo/app/src/main/libs
KSY265CodecDemo/app/src/main/obj
================================================
FILE: Android_demo/KSY265CodecDemo/.gitignore
================================================
*.iml
.gradle
/local.properties
/.idea/workspace.xml
/.idea/libraries
.DS_Store
/build
/captures
.externalNativeBuild
================================================
FILE: Android_demo/KSY265CodecDemo/app/.gitignore
================================================
/build
================================================
FILE: Android_demo/KSY265CodecDemo/app/CMakeLists.txt
================================================
# For more information about using CMake with Android Studio, read the
# documentation: https://d.android.com/studio/projects/add-native-code.html
# Sets the minimum version of CMake required to build the native library.
cmake_minimum_required(VERSION 3.4.1)
# Creates and names a library, sets it as either STATIC
# or SHARED, and provides the relative paths to its source code.
# You can define multiple libraries, and CMake builds them for you.
# Gradle automatically packages shared libraries with your APK.
add_library( # Sets the name of the library.
native-lib
# Sets the library as a shared library.
SHARED
# Provides a relative path to your source file(s).
src/main/jni/encoderwrapper.c )
# Specifies a path to native header files.
include_directories(../../prebuilt/include )
# Searches for a specified prebuilt library and stores the path as a
# variable. Because CMake includes system libraries in the search path by
# default, you only need to specify the name of the public NDK library
# you want to add. CMake verifies that the library exists before
# completing its build.
find_library( # Sets the name of the path variable.
log-lib
# Specifies the name of the NDK library that
# you want CMake to locate.
log )
# Specifies libraries CMake should link to your target library. You
# can link multiple libraries, such as libraries you define in this
# build script, prebuilt third-party libraries, or system libraries.
target_link_libraries( # Specifies the target library.
native-lib
# Links the target library to the log library
# included in the NDK.
${log-lib}
#x264
${CMAKE_CURRENT_SOURCE_DIR}/../../prebuilt/${ANDROID_ABI}/libx264.a
#ksy265
${CMAKE_CURRENT_SOURCE_DIR}/../../prebuilt/${ANDROID_ABI}/libqy265.a
${CMAKE_CURRENT_SOURCE_DIR}/../../prebuilt/${ANDROID_ABI}/libcpufeatures.a)
================================================
FILE: Android_demo/KSY265CodecDemo/app/build.gradle
================================================
apply plugin: 'com.android.application'
android {
compileSdkVersion 25
buildToolsVersion "25.0.2"
defaultConfig {
applicationId "com.ksyun.media.ksy265codec.demo"
minSdkVersion 14
targetSdkVersion 17
versionCode 1
versionName "1.0.0"
externalNativeBuild {
ndkBuild {
//abiFilters 'x86', 'armeabi', 'armeabi-v7a', 'arm64-v8a'
abiFilters "armeabi-v7a"
}
}
}
buildTypes {
release {
minifyEnabled false
proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro'
}
debug {
jniDebuggable true
}
}
sourceSets{
main{
jniLibs.srcDirs 'src/main/libs'
jni.srcDirs = []
}
}
externalNativeBuild {
ndkBuild {
path "src/main/jni/Android.mk"
}
}
}
dependencies {
compile fileTree(include: ['*.jar'], dir: 'libs')
compile 'com.android.support:appcompat-v7:25.3.0'
compile 'com.android.support:percent:25.3.0'
}
================================================
FILE: Android_demo/KSY265CodecDemo/app/proguard-rules.pro
================================================
# Add project specific ProGuard rules here.
# By default, the flags in this file are appended to flags specified
# in /Users/sujia/Library/Android/sdk/tools/proguard/proguard-android.txt
# You can edit the include path and order by changing the proguardFiles
# directive in build.gradle.
#
# For more details, see
# http://developer.android.com/guide/developing/tools/proguard.html
# Add any project specific keep options here:
# If your project uses WebView with JS, uncomment the following
# and specify the fully qualified class name to the JavaScript interface
# class:
#-keepclassmembers class fqcn.of.javascript.interface.for.webview {
# public *;
#}
# Uncomment this to preserve the line number information for
# debugging stack traces.
#-keepattributes SourceFile,LineNumberTable
# If you keep the line number information, uncomment this to
# hide the original source file name.
#-renamesourcefileattribute SourceFile
================================================
FILE: Android_demo/KSY265CodecDemo/app/src/main/AndroidManifest.xml
================================================
================================================
FILE: Android_demo/KSY265CodecDemo/app/src/main/java/com/ianhanniballake/localstorage/LocalStorageProvider.java
================================================
package com.ianhanniballake.localstorage;
import android.content.res.AssetFileDescriptor;
import android.database.Cursor;
import android.database.MatrixCursor;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Point;
import android.os.CancellationSignal;
import android.os.Environment;
import android.os.ParcelFileDescriptor;
import android.provider.DocumentsContract.Document;
import android.provider.DocumentsContract.Root;
import android.provider.DocumentsProvider;
import android.util.Log;
import android.webkit.MimeTypeMap;
import com.ksyun.media.ksy265codec.demo.R;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
public class LocalStorageProvider extends DocumentsProvider {
public static final String AUTHORITY = "com.ianhanniballake.localstorage.documents";
/**
* Default root projection: everything but Root.COLUMN_MIME_TYPES
*/
private final static String[] DEFAULT_ROOT_PROJECTION = new String[] {
Root.COLUMN_ROOT_ID,
Root.COLUMN_FLAGS, Root.COLUMN_TITLE, Root.COLUMN_DOCUMENT_ID, Root.COLUMN_ICON,
Root.COLUMN_AVAILABLE_BYTES
};
/**
* Default document projection: everything but Document.COLUMN_ICON and
* Document.COLUMN_SUMMARY
*/
private final static String[] DEFAULT_DOCUMENT_PROJECTION = new String[] {
Document.COLUMN_DOCUMENT_ID,
Document.COLUMN_DISPLAY_NAME, Document.COLUMN_FLAGS, Document.COLUMN_MIME_TYPE,
Document.COLUMN_SIZE,
Document.COLUMN_LAST_MODIFIED
};
@Override
public Cursor queryRoots(final String[] projection) throws FileNotFoundException {
// Create a cursor with either the requested fields, or the default
// projection if "projection" is null.
final MatrixCursor result = new MatrixCursor(projection != null ? projection
: DEFAULT_ROOT_PROJECTION);
// Add Home directory
File homeDir = Environment.getExternalStorageDirectory();
final MatrixCursor.RowBuilder row = result.newRow();
// These columns are required
row.add(Root.COLUMN_ROOT_ID, homeDir.getAbsolutePath());
row.add(Root.COLUMN_DOCUMENT_ID, homeDir.getAbsolutePath());
row.add(Root.COLUMN_TITLE, getContext().getString(R.string.internal_storage));
row.add(Root.COLUMN_FLAGS, Root.FLAG_LOCAL_ONLY | Root.FLAG_SUPPORTS_CREATE);
row.add(Root.COLUMN_ICON, R.drawable.ic_provider);
// These columns are optional
row.add(Root.COLUMN_AVAILABLE_BYTES, homeDir.getFreeSpace());
// Root.COLUMN_MIME_TYPE is another optional column and useful if you
// have multiple roots with different
// types of mime types (roots that don't match the requested mime type
// are automatically hidden)
return result;
}
@Override
public String createDocument(final String parentDocumentId, final String mimeType,
final String displayName) throws FileNotFoundException {
File newFile = new File(parentDocumentId, displayName);
try {
newFile.createNewFile();
return newFile.getAbsolutePath();
} catch (IOException e) {
Log.e(LocalStorageProvider.class.getSimpleName(), "Error creating new file " + newFile);
}
return null;
}
@Override
public AssetFileDescriptor openDocumentThumbnail(final String documentId, final Point sizeHint,
final CancellationSignal signal) throws FileNotFoundException {
// Assume documentId points to an image file. Build a thumbnail no
// larger than twice the sizeHint
BitmapFactory.Options options = new BitmapFactory.Options();
options.inJustDecodeBounds = true;
BitmapFactory.decodeFile(documentId, options);
final int targetHeight = 2 * sizeHint.y;
final int targetWidth = 2 * sizeHint.x;
final int height = options.outHeight;
final int width = options.outWidth;
options.inSampleSize = 1;
if (height > targetHeight || width > targetWidth) {
final int halfHeight = height / 2;
final int halfWidth = width / 2;
// Calculate the largest inSampleSize value that is a power of 2 and
// keeps both
// height and width larger than the requested height and width.
while ((halfHeight / options.inSampleSize) > targetHeight
|| (halfWidth / options.inSampleSize) > targetWidth) {
options.inSampleSize *= 2;
}
}
options.inJustDecodeBounds = false;
Bitmap bitmap = BitmapFactory.decodeFile(documentId, options);
// Write out the thumbnail to a temporary file
File tempFile = null;
FileOutputStream out = null;
try {
tempFile = File.createTempFile("thumbnail", null, getContext().getCacheDir());
out = new FileOutputStream(tempFile);
bitmap.compress(Bitmap.CompressFormat.PNG, 90, out);
} catch (IOException e) {
Log.e(LocalStorageProvider.class.getSimpleName(), "Error writing thumbnail", e);
return null;
} finally {
if (out != null)
try {
out.close();
} catch (IOException e) {
Log.e(LocalStorageProvider.class.getSimpleName(), "Error closing thumbnail", e);
}
}
// It appears the Storage Framework UI caches these results quite
// aggressively so there is little reason to
// write your own caching layer beyond what you need to return a single
// AssetFileDescriptor
return new AssetFileDescriptor(ParcelFileDescriptor.open(tempFile,
ParcelFileDescriptor.MODE_READ_ONLY), 0,
AssetFileDescriptor.UNKNOWN_LENGTH);
}
@Override
public Cursor queryChildDocuments(final String parentDocumentId, final String[] projection,
final String sortOrder) throws FileNotFoundException {
// Create a cursor with either the requested fields, or the default
// projection if "projection" is null.
final MatrixCursor result = new MatrixCursor(projection != null ? projection
: DEFAULT_DOCUMENT_PROJECTION);
final File parent = new File(parentDocumentId);
for (File file : parent.listFiles()) {
// Don't show hidden files/folders
if (!file.getName().startsWith(".")) {
// Adds the file's display name, MIME type, size, and so on.
includeFile(result, file);
}
}
return result;
}
@Override
public Cursor queryDocument(final String documentId, final String[] projection)
throws FileNotFoundException {
// Create a cursor with either the requested fields, or the default
// projection if "projection" is null.
final MatrixCursor result = new MatrixCursor(projection != null ? projection
: DEFAULT_DOCUMENT_PROJECTION);
includeFile(result, new File(documentId));
return result;
}
private void includeFile(final MatrixCursor result, final File file)
throws FileNotFoundException {
final MatrixCursor.RowBuilder row = result.newRow();
// These columns are required
row.add(Document.COLUMN_DOCUMENT_ID, file.getAbsolutePath());
row.add(Document.COLUMN_DISPLAY_NAME, file.getName());
String mimeType = getDocumentType(file.getAbsolutePath());
row.add(Document.COLUMN_MIME_TYPE, mimeType);
int flags = file.canWrite() ? Document.FLAG_SUPPORTS_DELETE | Document.FLAG_SUPPORTS_WRITE
: 0;
// We only show thumbnails for image files - expect a call to
// openDocumentThumbnail for each file that has
// this flag set
if (mimeType.startsWith("image/"))
flags |= Document.FLAG_SUPPORTS_THUMBNAIL;
row.add(Document.COLUMN_FLAGS, flags);
// COLUMN_SIZE is required, but can be null
row.add(Document.COLUMN_SIZE, file.length());
// These columns are optional
row.add(Document.COLUMN_LAST_MODIFIED, file.lastModified());
// Document.COLUMN_ICON can be a resource id identifying a custom icon.
// The system provides default icons
// based on mime type
// Document.COLUMN_SUMMARY is optional additional information about the
// file
}
@Override
public String getDocumentType(final String documentId) throws FileNotFoundException {
File file = new File(documentId);
if (file.isDirectory())
return Document.MIME_TYPE_DIR;
// From FileProvider.getType(Uri)
final int lastDot = file.getName().lastIndexOf('.');
if (lastDot >= 0) {
final String extension = file.getName().substring(lastDot + 1);
final String mime = MimeTypeMap.getSingleton().getMimeTypeFromExtension(extension);
if (mime != null) {
return mime;
}
}
return "application/octet-stream";
}
@Override
public void deleteDocument(final String documentId) throws FileNotFoundException {
new File(documentId).delete();
}
@Override
public ParcelFileDescriptor openDocument(final String documentId, final String mode,
final CancellationSignal signal) throws FileNotFoundException {
File file = new File(documentId);
final boolean isWrite = (mode.indexOf('w') != -1);
if (isWrite) {
return ParcelFileDescriptor.open(file, ParcelFileDescriptor.MODE_READ_WRITE);
} else {
return ParcelFileDescriptor.open(file, ParcelFileDescriptor.MODE_READ_ONLY);
}
}
@Override
public boolean onCreate() {
return true;
}
}
================================================
FILE: Android_demo/KSY265CodecDemo/app/src/main/java/com/ipaulpro/afilechooser/FileChooserActivity.java
================================================
/*
* Copyright (C) 2013 Paul Burke
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.ipaulpro.afilechooser;
import android.app.ActionBar;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.net.Uri;
import android.os.Build;
import android.os.Bundle;
import android.os.Environment;
import android.support.v4.app.FragmentActivity;
import android.support.v4.app.FragmentManager;
import android.support.v4.app.FragmentManager.BackStackEntry;
import android.support.v4.app.FragmentManager.OnBackStackChangedListener;
import android.support.v4.app.FragmentTransaction;
import android.view.Menu;
import android.view.MenuItem;
import android.widget.Toast;
import com.ksyun.media.ksy265codec.demo.R;
import java.io.File;
/**
* Main Activity that handles the FileListFragments
*
* @version 2013-06-25
* @author paulburke (ipaulpro)
*/
public class FileChooserActivity extends FragmentActivity implements
OnBackStackChangedListener, FileListFragment.Callbacks {
public static final String PATH = "path";
public static final String EXTERNAL_BASE_PATH = Environment
.getExternalStorageDirectory().getAbsolutePath();
private static final boolean HAS_ACTIONBAR = Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB;
private FragmentManager mFragmentManager;
private BroadcastReceiver mStorageListener = new BroadcastReceiver() {
@Override
public void onReceive(Context context, Intent intent) {
Toast.makeText(context, R.string.storage_removed, Toast.LENGTH_LONG).show();
finishWithResult(null);
}
};
private String mPath;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
mFragmentManager = getSupportFragmentManager();
mFragmentManager.addOnBackStackChangedListener(this);
if (savedInstanceState == null) {
mPath = EXTERNAL_BASE_PATH;
addFragment();
} else {
mPath = savedInstanceState.getString(PATH);
}
setTitle(mPath);
}
@Override
protected void onPause() {
super.onPause();
unregisterStorageListener();
}
@Override
protected void onResume() {
super.onResume();
registerStorageListener();
}
@Override
protected void onSaveInstanceState(Bundle outState) {
super.onSaveInstanceState(outState);
outState.putString(PATH, mPath);
}
@Override
public void onBackStackChanged() {
int count = mFragmentManager.getBackStackEntryCount();
if (count > 0) {
BackStackEntry fragment = mFragmentManager.getBackStackEntryAt(count - 1);
mPath = fragment.getName();
} else {
mPath = EXTERNAL_BASE_PATH;
}
setTitle(mPath);
if (HAS_ACTIONBAR)
invalidateOptionsMenu();
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
if (HAS_ACTIONBAR) {
boolean hasBackStack = mFragmentManager.getBackStackEntryCount() > 0;
ActionBar actionBar = getActionBar();
actionBar.setDisplayHomeAsUpEnabled(hasBackStack);
actionBar.setHomeButtonEnabled(hasBackStack);
}
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
case android.R.id.home:
mFragmentManager.popBackStack();
return true;
}
return super.onOptionsItemSelected(item);
}
/**
* Add the initial Fragment with given path.
*/
private void addFragment() {
FileListFragment fragment = FileListFragment.newInstance(mPath);
mFragmentManager.beginTransaction()
.add(android.R.id.content, fragment).commit();
}
/**
* "Replace" the existing Fragment with a new one using given path. We're
* really adding a Fragment to the back stack.
*
* @param file The file (directory) to display.
*/
private void replaceFragment(File file) {
mPath = file.getAbsolutePath();
FileListFragment fragment = FileListFragment.newInstance(mPath);
mFragmentManager.beginTransaction()
.replace(android.R.id.content, fragment)
.setTransition(FragmentTransaction.TRANSIT_FRAGMENT_OPEN)
.addToBackStack(mPath).commit();
}
/**
* Finish this Activity with a result code and URI of the selected file.
*
* @param file The file selected.
*/
private void finishWithResult(File file) {
if (file != null) {
Uri uri = Uri.fromFile(file);
setResult(RESULT_OK, new Intent().setData(uri));
finish();
} else {
setResult(RESULT_CANCELED);
finish();
}
}
/**
* Called when the user selects a File
*
* @param file The file that was selected
*/
@Override
public void onFileSelected(File file) {
if (file != null) {
if (file.isDirectory()) {
replaceFragment(file);
} else {
finishWithResult(file);
}
} else {
Toast.makeText(FileChooserActivity.this, R.string.error_selecting_file,
Toast.LENGTH_SHORT).show();
}
}
/**
* Register the external storage BroadcastReceiver.
*/
private void registerStorageListener() {
IntentFilter filter = new IntentFilter();
filter.addAction(Intent.ACTION_MEDIA_REMOVED);
registerReceiver(mStorageListener, filter);
}
/**
* Unregister the external storage BroadcastReceiver.
*/
private void unregisterStorageListener() {
unregisterReceiver(mStorageListener);
}
}
================================================
FILE: Android_demo/KSY265CodecDemo/app/src/main/java/com/ipaulpro/afilechooser/FileListAdapter.java
================================================
/*
* Copyright (C) 2012 Paul Burke
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.ipaulpro.afilechooser;
import android.content.Context;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.BaseAdapter;
import android.widget.TextView;
import com.ksyun.media.ksy265codec.demo.R;
import java.io.File;
import java.util.ArrayList;
import java.util.List;
/**
* List adapter for Files.
*
* @version 2013-12-11
* @author paulburke (ipaulpro)
*/
public class FileListAdapter extends BaseAdapter {
private final static int ICON_FOLDER = R.drawable.ic_folder;
private final static int ICON_FILE = R.drawable.ic_file;
private final LayoutInflater mInflater;
private List mData = new ArrayList();
public FileListAdapter(Context context) {
mInflater = LayoutInflater.from(context);
}
public void add(File file) {
mData.add(file);
notifyDataSetChanged();
}
public void remove(File file) {
mData.remove(file);
notifyDataSetChanged();
}
public void insert(File file, int index) {
mData.add(index, file);
notifyDataSetChanged();
}
public void clear() {
mData.clear();
notifyDataSetChanged();
}
@Override
public File getItem(int position) {
return mData.get(position);
}
@Override
public long getItemId(int position) {
return position;
}
@Override
public int getCount() {
return mData.size();
}
public List getListItems() {
return mData;
}
/**
* Set the list items without notifying on the clear. This prevents loss of
* scroll position.
*
* @param data
*/
public void setListItems(List data) {
mData = data;
notifyDataSetChanged();
}
@Override
public View getView(int position, View convertView, ViewGroup parent) {
View row = convertView;
if (row == null)
row = mInflater.inflate(R.layout.file, parent, false);
TextView view = (TextView) row;
// Get the file at the current position
final File file = getItem(position);
// Set the TextView as the file name
view.setText(file.getName());
// If the item is not a directory, use the file icon
int icon = file.isDirectory() ? ICON_FOLDER : ICON_FILE;
view.setCompoundDrawablesWithIntrinsicBounds(icon, 0, 0, 0);
return row;
}
}
================================================
FILE: Android_demo/KSY265CodecDemo/app/src/main/java/com/ipaulpro/afilechooser/FileListFragment.java
================================================
/*
* Copyright (C) 2013 Paul Burke
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.ipaulpro.afilechooser;
import android.app.Activity;
import android.os.Bundle;
import android.os.Environment;
import android.support.v4.app.ListFragment;
import android.support.v4.app.LoaderManager;
import android.support.v4.content.Loader;
import android.view.View;
import android.widget.ListView;
import com.ksyun.media.ksy265codec.demo.R;
import java.io.File;
import java.util.List;
/**
* Fragment that displays a list of Files in a given path.
*
* @version 2013-12-11
* @author paulburke (ipaulpro)
*/
public class FileListFragment extends ListFragment implements
LoaderManager.LoaderCallbacks> {
/**
* Interface to listen for events.
*/
public interface Callbacks {
/**
* Called when a file is selected from the list.
*
* @param file The file selected
*/
public void onFileSelected(File file);
}
private static final int LOADER_ID = 0;
private FileListAdapter mAdapter;
private String mPath;
private Callbacks mListener;
/**
* Create a new instance with the given file path.
*
* @param path The absolute path of the file (directory) to display.
* @return A new Fragment with the given file path.
*/
public static FileListFragment newInstance(String path) {
FileListFragment fragment = new FileListFragment();
Bundle args = new Bundle();
args.putString(FileChooserActivity.PATH, path);
fragment.setArguments(args);
return fragment;
}
@Override
public void onAttach(Activity activity) {
super.onAttach(activity);
try {
mListener = (Callbacks) activity;
} catch (ClassCastException e) {
throw new ClassCastException(activity.toString()
+ " must implement FileListFragment.Callbacks");
}
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
mAdapter = new FileListAdapter(getActivity());
mPath = getArguments() != null ? getArguments().getString(
FileChooserActivity.PATH) : Environment
.getExternalStorageDirectory().getAbsolutePath();
}
@Override
public void onActivityCreated(Bundle savedInstanceState) {
setEmptyText(getString(R.string.empty_directory));
setListAdapter(mAdapter);
setListShown(false);
getLoaderManager().initLoader(LOADER_ID, null, this);
super.onActivityCreated(savedInstanceState);
}
@Override
public void onListItemClick(ListView l, View v, int position, long id) {
FileListAdapter adapter = (FileListAdapter) l.getAdapter();
if (adapter != null) {
File file = (File) adapter.getItem(position);
mPath = file.getAbsolutePath();
mListener.onFileSelected(file);
}
}
@Override
public Loader> onCreateLoader(int id, Bundle args) {
return new FileLoader(getActivity(), mPath);
}
@Override
public void onLoadFinished(Loader> loader, List data) {
mAdapter.setListItems(data);
if (isResumed())
setListShown(true);
else
setListShownNoAnimation(true);
}
@Override
public void onLoaderReset(Loader> loader) {
mAdapter.clear();
}
}
================================================
FILE: Android_demo/KSY265CodecDemo/app/src/main/java/com/ipaulpro/afilechooser/FileLoader.java
================================================
/*
* Copyright (C) 2013 Paul Burke
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.ipaulpro.afilechooser;
import android.content.Context;
import android.os.FileObserver;
import android.support.v4.content.AsyncTaskLoader;
import com.ipaulpro.afilechooser.utils.FileUtils;
import java.io.File;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
/**
* Loader that returns a list of Files in a given file path.
*
* @version 2013-12-11
* @author paulburke (ipaulpro)
*/
public class FileLoader extends AsyncTaskLoader> {
private static final int FILE_OBSERVER_MASK = FileObserver.CREATE
| FileObserver.DELETE | FileObserver.DELETE_SELF
| FileObserver.MOVED_FROM | FileObserver.MOVED_TO
| FileObserver.MODIFY | FileObserver.MOVE_SELF;
private FileObserver mFileObserver;
private List mData;
private String mPath;
public FileLoader(Context context, String path) {
super(context);
this.mPath = path;
}
@Override
public List loadInBackground() {
ArrayList list = new ArrayList();
// Current directory File instance
final File pathDir = new File(mPath);
// List file in this directory with the directory filter
final File[] dirs = pathDir.listFiles(FileUtils.sDirFilter);
if (dirs != null) {
// Sort the folders alphabetically
Arrays.sort(dirs, FileUtils.sComparator);
// Add each folder to the File list for the list adapter
for (File dir : dirs)
list.add(dir);
}
// List file in this directory with the file filter
final File[] files = pathDir.listFiles(FileUtils.sFileFilter);
if (files != null) {
// Sort the files alphabetically
Arrays.sort(files, FileUtils.sComparator);
// Add each file to the File list for the list adapter
for (File file : files)
list.add(file);
}
return list;
}
@Override
public void deliverResult(List data) {
if (isReset()) {
onReleaseResources(data);
return;
}
List oldData = mData;
mData = data;
if (isStarted())
super.deliverResult(data);
if (oldData != null && oldData != data)
onReleaseResources(oldData);
}
@Override
protected void onStartLoading() {
if (mData != null)
deliverResult(mData);
if (mFileObserver == null) {
mFileObserver = new FileObserver(mPath, FILE_OBSERVER_MASK) {
@Override
public void onEvent(int event, String path) {
onContentChanged();
}
};
}
mFileObserver.startWatching();
if (takeContentChanged() || mData == null)
forceLoad();
}
@Override
protected void onStopLoading() {
cancelLoad();
}
@Override
protected void onReset() {
onStopLoading();
if (mData != null) {
onReleaseResources(mData);
mData = null;
}
}
@Override
public void onCanceled(List data) {
super.onCanceled(data);
onReleaseResources(data);
}
protected void onReleaseResources(List data) {
if (mFileObserver != null) {
mFileObserver.stopWatching();
mFileObserver = null;
}
}
}
================================================
FILE: Android_demo/KSY265CodecDemo/app/src/main/java/com/ipaulpro/afilechooser/utils/FileUtils.java
================================================
/*
* Copyright (C) 2007-2008 OpenIntents.org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.ipaulpro.afilechooser.utils;
import android.content.ContentResolver;
import android.content.ContentUris;
import android.content.Context;
import android.content.Intent;
import android.database.Cursor;
import android.database.DatabaseUtils;
import android.graphics.Bitmap;
import android.net.Uri;
import android.os.Build;
import android.os.Environment;
import android.provider.DocumentsContract;
import android.provider.MediaStore;
import android.util.Log;
import android.webkit.MimeTypeMap;
import com.ianhanniballake.localstorage.LocalStorageProvider;
import java.io.File;
import java.io.FileFilter;
import java.text.DecimalFormat;
import java.util.Comparator;
/**
* @version 2009-07-03
* @author Peli
* @version 2013-12-11
* @author paulburke (ipaulpro)
*/
public class FileUtils {
private FileUtils() {} //private constructor to enforce Singleton pattern
/** TAG for log messages. */
static final String TAG = "FileUtils";
private static final boolean DEBUG = false; // Set to true to enable logging
public static final String MIME_TYPE_AUDIO = "audio/*";
public static final String MIME_TYPE_TEXT = "text/*";
public static final String MIME_TYPE_IMAGE = "image/*";
public static final String MIME_TYPE_VIDEO = "video/*";
public static final String MIME_TYPE_APP = "application/*";
public static final String HIDDEN_PREFIX = ".";
/**
* Gets the extension of a file name, like ".png" or ".jpg".
*
* @param uri
* @return Extension including the dot("."); "" if there is no extension;
* null if uri was null.
*/
public static String getExtension(String uri) {
if (uri == null) {
return null;
}
int dot = uri.lastIndexOf(".");
if (dot >= 0) {
return uri.substring(dot);
} else {
// No extension.
return "";
}
}
/**
* @return Whether the URI is a local one.
*/
public static boolean isLocal(String url) {
if (url != null && !url.startsWith("http://") && !url.startsWith("https://")) {
return true;
}
return false;
}
/**
* @return True if Uri is a MediaStore Uri.
* @author paulburke
*/
public static boolean isMediaUri(Uri uri) {
return "media".equalsIgnoreCase(uri.getAuthority());
}
/**
* Convert File into Uri.
*
* @param file
* @return uri
*/
public static Uri getUri(File file) {
if (file != null) {
return Uri.fromFile(file);
}
return null;
}
/**
* Returns the path only (without file name).
*
* @param file
* @return
*/
public static File getPathWithoutFilename(File file) {
if (file != null) {
if (file.isDirectory()) {
// no file to be split off. Return everything
return file;
} else {
String filename = file.getName();
String filepath = file.getAbsolutePath();
// Construct path without file name.
String pathwithoutname = filepath.substring(0,
filepath.length() - filename.length());
if (pathwithoutname.endsWith("/")) {
pathwithoutname = pathwithoutname.substring(0, pathwithoutname.length() - 1);
}
return new File(pathwithoutname);
}
}
return null;
}
/**
* @return The MIME type for the given file.
*/
public static String getMimeType(File file) {
String extension = getExtension(file.getName());
if (extension.length() > 0)
return MimeTypeMap.getSingleton().getMimeTypeFromExtension(extension.substring(1));
return "application/octet-stream";
}
/**
* @return The MIME type for the give Uri.
*/
public static String getMimeType(Context context, Uri uri) {
File file = new File(getPath(context, uri));
return getMimeType(file);
}
/**
* @param uri The Uri to check.
* @return Whether the Uri authority is {@link LocalStorageProvider}.
* @author paulburke
*/
public static boolean isLocalStorageDocument(Uri uri) {
return LocalStorageProvider.AUTHORITY.equals(uri.getAuthority());
}
/**
* @param uri The Uri to check.
* @return Whether the Uri authority is ExternalStorageProvider.
* @author paulburke
*/
public static boolean isExternalStorageDocument(Uri uri) {
return "com.android.externalstorage.documents".equals(uri.getAuthority());
}
/**
* @param uri The Uri to check.
* @return Whether the Uri authority is DownloadsProvider.
* @author paulburke
*/
public static boolean isDownloadsDocument(Uri uri) {
return "com.android.providers.downloads.documents".equals(uri.getAuthority());
}
/**
* @param uri The Uri to check.
* @return Whether the Uri authority is MediaProvider.
* @author paulburke
*/
public static boolean isMediaDocument(Uri uri) {
return "com.android.providers.media.documents".equals(uri.getAuthority());
}
/**
* @param uri The Uri to check.
* @return Whether the Uri authority is Google Photos.
*/
public static boolean isGooglePhotosUri(Uri uri) {
return "com.google.android.apps.photos.content".equals(uri.getAuthority());
}
/**
* Get the value of the data column for this Uri. This is useful for
* MediaStore Uris, and other file-based ContentProviders.
*
* @param context The context.
* @param uri The Uri to query.
* @param selection (Optional) Filter used in the query.
* @param selectionArgs (Optional) Selection arguments used in the query.
* @return The value of the _data column, which is typically a file path.
* @author paulburke
*/
public static String getDataColumn(Context context, Uri uri, String selection,
String[] selectionArgs) {
Cursor cursor = null;
final String column = "_data";
final String[] projection = {
column
};
try {
cursor = context.getContentResolver().query(uri, projection, selection, selectionArgs,
null);
if (cursor != null && cursor.moveToFirst()) {
if (DEBUG)
DatabaseUtils.dumpCursor(cursor);
final int column_index = cursor.getColumnIndexOrThrow(column);
return cursor.getString(column_index);
}
} finally {
if (cursor != null)
cursor.close();
}
return null;
}
/**
* Get a file path from a Uri. This will get the the path for Storage Access
* Framework Documents, as well as the _data field for the MediaStore and
* other file-based ContentProviders.
*
* Callers should check whether the path is local before assuming it
* represents a local file.
*
* @param context The context.
* @param uri The Uri to query.
* @see #isLocal(String)
* @see #getFile(Context, Uri)
* @author paulburke
*/
public static String getPath(final Context context, final Uri uri) {
if (DEBUG)
Log.d(TAG + " File -",
"Authority: " + uri.getAuthority() +
", Fragment: " + uri.getFragment() +
", Port: " + uri.getPort() +
", Query: " + uri.getQuery() +
", Scheme: " + uri.getScheme() +
", Host: " + uri.getHost() +
", Segments: " + uri.getPathSegments().toString()
);
final boolean isKitKat = Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT;
// DocumentProvider
if (isKitKat && DocumentsContract.isDocumentUri(context, uri)) {
// LocalStorageProvider
if (isLocalStorageDocument(uri)) {
// The path is the id
return DocumentsContract.getDocumentId(uri);
}
// ExternalStorageProvider
else if (isExternalStorageDocument(uri)) {
final String docId = DocumentsContract.getDocumentId(uri);
final String[] split = docId.split(":");
final String type = split[0];
if ("primary".equalsIgnoreCase(type)) {
return Environment.getExternalStorageDirectory() + "/" + split[1];
}
// TODO handle non-primary volumes
}
// DownloadsProvider
else if (isDownloadsDocument(uri)) {
final String id = DocumentsContract.getDocumentId(uri);
final Uri contentUri = ContentUris.withAppendedId(
Uri.parse("content://downloads/public_downloads"), Long.valueOf(id));
return getDataColumn(context, contentUri, null, null);
}
// MediaProvider
else if (isMediaDocument(uri)) {
final String docId = DocumentsContract.getDocumentId(uri);
final String[] split = docId.split(":");
final String type = split[0];
Uri contentUri = null;
if ("image".equals(type)) {
contentUri = MediaStore.Images.Media.EXTERNAL_CONTENT_URI;
} else if ("video".equals(type)) {
contentUri = MediaStore.Video.Media.EXTERNAL_CONTENT_URI;
} else if ("audio".equals(type)) {
contentUri = MediaStore.Audio.Media.EXTERNAL_CONTENT_URI;
}
final String selection = "_id=?";
final String[] selectionArgs = new String[] {
split[1]
};
return getDataColumn(context, contentUri, selection, selectionArgs);
}
}
// MediaStore (and general)
else if ("content".equalsIgnoreCase(uri.getScheme())) {
// Return the remote address
if (isGooglePhotosUri(uri))
return uri.getLastPathSegment();
return getDataColumn(context, uri, null, null);
}
// File
else if ("file".equalsIgnoreCase(uri.getScheme())) {
return uri.getPath();
}
return null;
}
/**
* Convert Uri into File, if possible.
*
* @return file A local file that the Uri was pointing to, or null if the
* Uri is unsupported or pointed to a remote resource.
* @see #getPath(Context, Uri)
* @author paulburke
*/
public static File getFile(Context context, Uri uri) {
if (uri != null) {
String path = getPath(context, uri);
if (path != null && isLocal(path)) {
return new File(path);
}
}
return null;
}
/**
* Get the file size in a human-readable string.
*
* @param size
* @return
* @author paulburke
*/
public static String getReadableFileSize(int size) {
final int BYTES_IN_KILOBYTES = 1024;
final DecimalFormat dec = new DecimalFormat("###.#");
final String KILOBYTES = " KB";
final String MEGABYTES = " MB";
final String GIGABYTES = " GB";
float fileSize = 0;
String suffix = KILOBYTES;
if (size > BYTES_IN_KILOBYTES) {
fileSize = size / BYTES_IN_KILOBYTES;
if (fileSize > BYTES_IN_KILOBYTES) {
fileSize = fileSize / BYTES_IN_KILOBYTES;
if (fileSize > BYTES_IN_KILOBYTES) {
fileSize = fileSize / BYTES_IN_KILOBYTES;
suffix = GIGABYTES;
} else {
suffix = MEGABYTES;
}
}
}
return String.valueOf(dec.format(fileSize) + suffix);
}
/**
* Attempt to retrieve the thumbnail of given File from the MediaStore. This
* should not be called on the UI thread.
*
* @param context
* @param file
* @return
* @author paulburke
*/
public static Bitmap getThumbnail(Context context, File file) {
return getThumbnail(context, getUri(file), getMimeType(file));
}
/**
* Attempt to retrieve the thumbnail of given Uri from the MediaStore. This
* should not be called on the UI thread.
*
* @param context
* @param uri
* @return
* @author paulburke
*/
public static Bitmap getThumbnail(Context context, Uri uri) {
return getThumbnail(context, uri, getMimeType(context, uri));
}
/**
* Attempt to retrieve the thumbnail of given Uri from the MediaStore. This
* should not be called on the UI thread.
*
* @param context
* @param uri
* @param mimeType
* @return
* @author paulburke
*/
public static Bitmap getThumbnail(Context context, Uri uri, String mimeType) {
if (DEBUG)
Log.d(TAG, "Attempting to get thumbnail");
if (!isMediaUri(uri)) {
Log.e(TAG, "You can only retrieve thumbnails for images and videos.");
return null;
}
Bitmap bm = null;
if (uri != null) {
final ContentResolver resolver = context.getContentResolver();
Cursor cursor = null;
try {
cursor = resolver.query(uri, null, null, null, null);
if (cursor.moveToFirst()) {
final int id = cursor.getInt(0);
if (DEBUG)
Log.d(TAG, "Got thumb ID: " + id);
if (mimeType.contains("video")) {
bm = MediaStore.Video.Thumbnails.getThumbnail(
resolver,
id,
MediaStore.Video.Thumbnails.MINI_KIND,
null);
}
else if (mimeType.contains(FileUtils.MIME_TYPE_IMAGE)) {
bm = MediaStore.Images.Thumbnails.getThumbnail(
resolver,
id,
MediaStore.Images.Thumbnails.MINI_KIND,
null);
}
}
} catch (Exception e) {
if (DEBUG)
Log.e(TAG, "getThumbnail", e);
} finally {
if (cursor != null)
cursor.close();
}
}
return bm;
}
/**
* File and folder comparator. TODO Expose sorting option method
*
* @author paulburke
*/
public static Comparator sComparator = new Comparator() {
@Override
public int compare(File f1, File f2) {
// Sort alphabetically by lower case, which is much cleaner
return f1.getName().toLowerCase().compareTo(
f2.getName().toLowerCase());
}
};
/**
* File (not directories) filter.
*
* @author paulburke
*/
public static FileFilter sFileFilter = new FileFilter() {
@Override
public boolean accept(File file) {
final String fileName = file.getName();
// Return files only (not directories) and skip hidden files
return file.isFile() && !fileName.startsWith(HIDDEN_PREFIX);
}
};
/**
* Folder (directories) filter.
*
* @author paulburke
*/
public static FileFilter sDirFilter = new FileFilter() {
@Override
public boolean accept(File file) {
final String fileName = file.getName();
// Return directories only and skip hidden directories
return file.isDirectory() && !fileName.startsWith(HIDDEN_PREFIX);
}
};
/**
* Get the Intent for selecting content to be used in an Intent Chooser.
*
* @return The intent for opening a file with Intent.createChooser()
* @author paulburke
*/
public static Intent createGetContentIntent() {
// Implicitly allow the user to select a particular kind of data
final Intent intent = new Intent(Intent.ACTION_GET_CONTENT);
// The MIME data type filter
intent.setType("*/*");
// Only return URIs that can be opened with ContentResolver
intent.addCategory(Intent.CATEGORY_OPENABLE);
return intent;
}
/**
* 用来根据文件后缀名过滤文件的工具类
*/
public static class FileFilterBySuffixs implements FileFilter {
private String suffixs;
/**
* 可传入一个或多个后缀名,不调用此方法,默认搜索除隐藏文件外的全部文件
* @param suffixs 后缀后,如 3gp|mp3|mp4
*/
public FileFilterBySuffixs(String suffixs) {
this.suffixs = suffixs;
}
@Override
public boolean accept(File file) {
final String fileName = file.getName();
if (null == suffixs) {
// Return files only (not directories) and skip hidden files
return file.isFile() && !fileName.startsWith(HIDDEN_PREFIX);
} else {
return file.isFile() && !fileName.startsWith(HIDDEN_PREFIX) && fileName.matches("^.*?\\.(" + suffixs + ")$");
}
}
}
public static void setFileFilter(FileFilterBySuffixs filter) {
if (filter != null) {
sFileFilter = filter;
}
}
}
================================================
FILE: Android_demo/KSY265CodecDemo/app/src/main/java/com/ksyun/media/ksy265codec/demo/decoder/hevdecoder/GLPlayView.java
================================================
package com.ksyun.media.ksy265codec.demo.decoder.hevdecoder;
import android.content.Context;
import android.graphics.PixelFormat;
import android.opengl.GLSurfaceView;
import android.util.AttributeSet;
import android.util.Log;
import javax.microedition.khronos.egl.EGL10;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.egl.EGLContext;
import javax.microedition.khronos.egl.EGLDisplay;
public class GLPlayView extends GLSurfaceView {
private static String TAG = "GLPlayView";
private static final boolean DEBUG = true;
public GLPlayView(Context context) {
super(context);
init(false, 0, 0);
}
public GLPlayView(Context context, AttributeSet attrs) {
super(context, attrs);
init(false, 0, 0);
}
public GLPlayView(Context context, boolean translucent, int depth, int stencil) {
super(context);
init(translucent, depth, stencil);
}
private void init(boolean translucent, int depth, int stencil) {
/* By default, GLSurfaceView() creates a RGB_565 opaque surface.
* If we want a translucent one, we should change the surface's
* format here, using PixelFormat.TRANSLUCENT for GL Surfaces
* is interpreted as any 32-bit surface with alpha by SurfaceFlinger.
*/
if (translucent) {
this.getHolder().setFormat(PixelFormat.TRANSLUCENT);
}
/* Setup the context factory for 2.0 rendering.
* See ContextFactory class definition below
*/
setEGLContextFactory(new ContextFactory());
/* We need to choose an EGLConfig that matches the format of
* our surface exactly. This is going to be done in our
* custom config chooser. See ConfigChooser class definition
* below.
*/
setEGLConfigChooser( translucent ?
new ConfigChooser(8, 8, 8, 8, depth, stencil) :
new ConfigChooser(5, 6, 5, 0, depth, stencil) );
/* Set the renderer responsible for frame rendering */
setRenderer(new GLRenderer());
setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY);
}
private static void checkEglError(String prompt, EGL10 egl) {
int error;
while ((error = egl.eglGetError()) != EGL10.EGL_SUCCESS) {
Log.e(TAG, String.format("%s: EGL error: 0x%x", prompt, error));
}
}
private static class ContextFactory implements GLSurfaceView.EGLContextFactory {
private static int EGL_CONTEXT_CLIENT_VERSION = 0x3098;
public EGLContext createContext(EGL10 egl, EGLDisplay display, EGLConfig eglConfig) {
Log.i(TAG, "creating OpenGL ES 2.0 context");
checkEglError("Before eglCreateContext", egl);
int[] attrib_list = {EGL_CONTEXT_CLIENT_VERSION, 2, EGL10.EGL_NONE };
EGLContext context = egl.eglCreateContext(display, eglConfig, EGL10.EGL_NO_CONTEXT, attrib_list);
checkEglError("After eglCreateContext", egl);
return context;
}
@Override
public void destroyContext(EGL10 egl, EGLDisplay display, EGLContext context) {
egl.eglDestroyContext(display, context);
}
}
private static class ConfigChooser implements GLSurfaceView.EGLConfigChooser {
public ConfigChooser(int r, int g, int b, int a, int depth, int stencil) {
mRedSize = r;
mGreenSize = g;
mBlueSize = b;
mAlphaSize = a;
mDepthSize = depth;
mStencilSize = stencil;
}
/* This EGL config specification is used to specify 2.0 rendering.
* We use a minimum size of 4 bits for red/green/blue, but will
* perform actual matching in chooseConfig() below.
*/
private static int EGL_OPENGL_ES2_BIT = 4;
private static int[] s_configAttribs2 =
{
EGL10.EGL_RED_SIZE, 4,
EGL10.EGL_GREEN_SIZE, 4,
EGL10.EGL_BLUE_SIZE, 4,
EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
EGL10.EGL_NONE
};
@Override
public EGLConfig chooseConfig(EGL10 egl, EGLDisplay display) {
/* Get the number of minimally matching EGL configurations
*/
int[] num_config = new int[1];
egl.eglChooseConfig(display, s_configAttribs2, null, 0, num_config);
int numConfigs = num_config[0];
if (numConfigs <= 0) {
throw new IllegalArgumentException("No configs match configSpec");
}
/* Allocate then read the array of minimally matching EGL configs
*/
EGLConfig[] configs = new EGLConfig[numConfigs];
egl.eglChooseConfig(display, s_configAttribs2, configs, numConfigs, num_config);
if (DEBUG) {
printConfigs(egl, display, configs);
}
/* Now return the "best" one
*/
return chooseConfig(egl, display, configs);
}
public EGLConfig chooseConfig(EGL10 egl, EGLDisplay display,
EGLConfig[] configs) {
for(EGLConfig config : configs) {
int d = findConfigAttrib(egl, display, config,
EGL10.EGL_DEPTH_SIZE, 0);
int s = findConfigAttrib(egl, display, config,
EGL10.EGL_STENCIL_SIZE, 0);
// We need at least mDepthSize and mStencilSize bits
if (d < mDepthSize || s < mStencilSize)
continue;
// We want an *exact* match for red/green/blue/alpha
int r = findConfigAttrib(egl, display, config,
EGL10.EGL_RED_SIZE, 0);
int g = findConfigAttrib(egl, display, config,
EGL10.EGL_GREEN_SIZE, 0);
int b = findConfigAttrib(egl, display, config,
EGL10.EGL_BLUE_SIZE, 0);
int a = findConfigAttrib(egl, display, config,
EGL10.EGL_ALPHA_SIZE, 0);
if (r == mRedSize && g == mGreenSize && b == mBlueSize && a == mAlphaSize) {
Log.i(TAG, "choosed config: (r,g,b,a) = (" + r + ", " + g + ", " + b + ", " + a + ")");
return config;
}
}
return null;
}
private int findConfigAttrib(EGL10 egl, EGLDisplay display,
EGLConfig config, int attribute, int defaultValue) {
if (egl.eglGetConfigAttrib(display, config, attribute, mValue)) {
return mValue[0];
}
return defaultValue;
}
private void printConfigs(EGL10 egl, EGLDisplay display,
EGLConfig[] configs) {
int numConfigs = configs.length;
Log.i(TAG, String.format("%d configurations", numConfigs));
for (int i = 0; i < numConfigs; i++) {
Log.i(TAG, String.format("Configuration %d:\n", i));
printConfig(egl, display, configs[i]);
}
}
private void printConfig(EGL10 egl, EGLDisplay display,
EGLConfig config) {
int[] attributes = {
EGL10.EGL_BUFFER_SIZE,
EGL10.EGL_ALPHA_SIZE,
EGL10.EGL_BLUE_SIZE,
EGL10.EGL_GREEN_SIZE,
EGL10.EGL_RED_SIZE,
EGL10.EGL_DEPTH_SIZE,
EGL10.EGL_STENCIL_SIZE,
EGL10.EGL_CONFIG_CAVEAT,
EGL10.EGL_CONFIG_ID,
EGL10.EGL_LEVEL,
EGL10.EGL_MAX_PBUFFER_HEIGHT,
EGL10.EGL_MAX_PBUFFER_PIXELS,
EGL10.EGL_MAX_PBUFFER_WIDTH,
EGL10.EGL_NATIVE_RENDERABLE,
EGL10.EGL_NATIVE_VISUAL_ID,
EGL10.EGL_NATIVE_VISUAL_TYPE,
0x3030, // EGL10.EGL_PRESERVED_RESOURCES,
EGL10.EGL_SAMPLES,
EGL10.EGL_SAMPLE_BUFFERS,
EGL10.EGL_SURFACE_TYPE,
EGL10.EGL_TRANSPARENT_TYPE,
EGL10.EGL_TRANSPARENT_RED_VALUE,
EGL10.EGL_TRANSPARENT_GREEN_VALUE,
EGL10.EGL_TRANSPARENT_BLUE_VALUE,
0x3039, // EGL10.EGL_BIND_TO_TEXTURE_RGB,
0x303A, // EGL10.EGL_BIND_TO_TEXTURE_RGBA,
0x303B, // EGL10.EGL_MIN_SWAP_INTERVAL,
0x303C, // EGL10.EGL_MAX_SWAP_INTERVAL,
EGL10.EGL_LUMINANCE_SIZE,
EGL10.EGL_ALPHA_MASK_SIZE,
EGL10.EGL_COLOR_BUFFER_TYPE,
EGL10.EGL_RENDERABLE_TYPE,
0x3042 // EGL10.EGL_CONFORMANT
};
String[] names = {
"EGL_BUFFER_SIZE",
"EGL_ALPHA_SIZE",
"EGL_BLUE_SIZE",
"EGL_GREEN_SIZE",
"EGL_RED_SIZE",
"EGL_DEPTH_SIZE",
"EGL_STENCIL_SIZE",
"EGL_CONFIG_CAVEAT",
"EGL_CONFIG_ID",
"EGL_LEVEL",
"EGL_MAX_PBUFFER_HEIGHT",
"EGL_MAX_PBUFFER_PIXELS",
"EGL_MAX_PBUFFER_WIDTH",
"EGL_NATIVE_RENDERABLE",
"EGL_NATIVE_VISUAL_ID",
"EGL_NATIVE_VISUAL_TYPE",
"EGL_PRESERVED_RESOURCES",
"EGL_SAMPLES",
"EGL_SAMPLE_BUFFERS",
"EGL_SURFACE_TYPE",
"EGL_TRANSPARENT_TYPE",
"EGL_TRANSPARENT_RED_VALUE",
"EGL_TRANSPARENT_GREEN_VALUE",
"EGL_TRANSPARENT_BLUE_VALUE",
"EGL_BIND_TO_TEXTURE_RGB",
"EGL_BIND_TO_TEXTURE_RGBA",
"EGL_MIN_SWAP_INTERVAL",
"EGL_MAX_SWAP_INTERVAL",
"EGL_LUMINANCE_SIZE",
"EGL_ALPHA_MASK_SIZE",
"EGL_COLOR_BUFFER_TYPE",
"EGL_RENDERABLE_TYPE",
"EGL_CONFORMANT"
};
int[] value = new int[1];
for (int i = 0; i < attributes.length; i++) {
int attribute = attributes[i];
String name = names[i];
if ( egl.eglGetConfigAttrib(display, config, attribute, value)) {
Log.i(TAG, String.format(" %s: %d\n", name, value[0]));
} else {
// Log.w(TAG, String.format(" %s: failed\n", name));
while (egl.eglGetError() != EGL10.EGL_SUCCESS);
}
}
}
// Subclasses can adjust these values:
protected int mRedSize;
protected int mGreenSize;
protected int mBlueSize;
protected int mAlphaSize;
protected int mDepthSize;
protected int mStencilSize;
private int[] mValue = new int[1];
}
}
================================================
FILE: Android_demo/KSY265CodecDemo/app/src/main/java/com/ksyun/media/ksy265codec/demo/decoder/hevdecoder/GLRenderer.java
================================================
package com.ksyun.media.ksy265codec.demo.decoder.hevdecoder;
import android.opengl.GLSurfaceView;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;
/**
* @author shengbin
*
*/
public class GLRenderer implements GLSurfaceView.Renderer {
private native int nativeInit();
private native int nativeSetup(int w, int h);
private native void nativeDrawFrame();
@Override
public void onDrawFrame(GL10 arg0) {
nativeDrawFrame();
}
@Override
public void onSurfaceChanged(GL10 arg0, int w, int h) {
nativeSetup(w, h);
}
@Override
public void onSurfaceCreated(GL10 arg0, EGLConfig arg1) {
nativeInit();
}
static {
System.loadLibrary("jniplayer");
}
}
================================================
FILE: Android_demo/KSY265CodecDemo/app/src/main/java/com/ksyun/media/ksy265codec/demo/decoder/hevdecoder/NativeMediaPlayer.java
================================================
package com.ksyun.media.ksy265codec.demo.decoder.hevdecoder;
import android.graphics.Bitmap;
import android.graphics.Bitmap.Config;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Matrix;
import android.graphics.Paint;
import android.opengl.GLSurfaceView;
import android.os.Handler;
import android.os.Looper;
import android.util.Log;
import android.view.Surface;
import android.view.Surface.OutOfResourcesException;
import android.view.SurfaceHolder;
import android.widget.TextView;
import android.content.Context;
import java.io.File;
import java.io.FileFilter;
import java.util.regex.Pattern;
import com.ksyun.media.ksy265codec.demo.ui.Settings;
public class NativeMediaPlayer {
public static final int MEDIA_INFO_FRAMERATE_VIDEO = 900;
public static final int MEDIA_INFO_END_OF_FILE = 909;
private int mNativeContext; // accessed by native methods
private Surface mSurface;
private GLSurfaceView mGLSurfaceView;
private TextView mInfoTextView;
private Bitmap mFrameBitmap = null;
private int mDisplayWidth = 0;
private int mDisplayHeight = 0;
private int mDisplayFPS = -1;
private int mDisplayAvgFPS = -1;
private int mDecodeFPS = -1;
private int mBitrateVideo = -1;
private int mBitrateAudio = -1;
private boolean mShowInfo = true;
private boolean mShowInfoGL = true;
private String mInfo = "";
private OnCompletionListener mListener = null;
private final Handler mMainHandler;
private boolean mNeedSetup = true;
public interface OnCompletionListener {
public void onCompletion(int frame_count);
}
public void setCompletionListener(OnCompletionListener listener) {
this.mListener = listener;
}
public NativeMediaPlayer() {
mMainHandler = new Handler(Looper.getMainLooper());
}
public void init() {
native_init();
}
public void setDisplay(SurfaceHolder sh) {
if (sh != null) {
mSurface = sh.getSurface();
} else
mSurface = null;
}
public void setGLDisplay(GLSurfaceView glView, TextView tv) {
mGLSurfaceView = glView;
mInfoTextView = tv;
}
public void setDisplaySize(int w, int h) {
mDisplayHeight = h;
mDisplayWidth = w;
mNeedSetup = true;
}
/**
* Gets the number of cores available in this device, across all processors.
* Requires: Ability to peruse the filesystem at "/sys/devices/system/cpu"
*
* @return The number of cores, or 1 if failed to get result
*/
private int getNumCores() {
// Private Class to display only CPU devices in the directory listing
class CpuFilter implements FileFilter {
@Override
public boolean accept(File pathname) {
// Check if filename is "cpu", followed by a single digit number
if (Pattern.matches("cpu[0-9]+", pathname.getName())) {
return true;
}
return false;
}
}
try {
// Get directory containing CPU info
File dir = new File("/sys/devices/system/cpu/");
// Filter to only list the devices we care about
File[] files = dir.listFiles(new CpuFilter());
// Return the number of cores (virtual CPU devices)
return files.length;
} catch (Exception e) {
// Default to return 1 core
return 1;
}
}
public int prepare(Context context,int type, int disableRender) {
// android maintains the preferences for us, so use directly
int num = Settings.getInstance().getDecoderSettings().getThreads();
if (0 == num) {
int cores = getNumCores();// Runtime.getRuntime().availableProcessors();
if (cores <= 1)
num = 1;
else
num = (cores < 5) ? ((cores * 3 + 1) / 2) : 8;
Log.d("NativeMediaPlayer", cores + " cores detected! use " + num
+ " threads.\n");
}
float fps = Settings.getInstance().getDecoderSettings().getFPS();
return native_prepare(context,type, disableRender, num, fps);
}
public int prepare(Context context,int type, int disableRender,
int threadNum, float fps) {
return native_prepare(context,type, disableRender, threadNum, fps);
}
public int start() {
int w = getVideoWidth(), h = getVideoHeight();
if (w > 0 && h > 0)
mFrameBitmap = Bitmap.createBitmap(w, h, Config.RGB_565);
return native_start();
}
public void stop() {
native_stop();
if (mFrameBitmap != null) {
mFrameBitmap.recycle();
mFrameBitmap = null;
}
}
public void pause() {
native_pause();
}
public void go() {
native_go();
}
public void seekTo(int msec) {
}
public void setShowInfo(boolean show) {
mShowInfo = show;
if (mShowInfo == false && mInfoTextView != null) {
mInfoTextView.setText("");
}
}
private void setupDisplay() {
int videoWidth = getVideoWidth(), videoHeight = getVideoHeight();
int screenWidth, screenHeight, displayWidth = 0, displayHeight = 0;
screenHeight = mDisplayHeight;
screenWidth = mDisplayWidth;
displayWidth = videoWidth;
displayHeight = videoHeight;
if (displayHeight > screenHeight) {
displayHeight = screenHeight;
displayWidth = displayHeight * videoWidth / videoHeight;
displayWidth -= displayWidth % 4;
}
if (displayWidth > screenWidth) {
displayWidth = screenWidth;
displayHeight = displayWidth * videoHeight / videoWidth;
displayHeight -= displayHeight % 4;
}
setDisplaySize(displayWidth, displayHeight);
}
/**
* Called from native code
*/
public int drawFrame(int width, int height) {
boolean useGL = false;
if (useGL) {
mGLSurfaceView.requestRender();
if (mShowInfoGL) {
mInfo = "";
Paint paint = new Paint();
paint.setColor(Color.WHITE);
paint.setTextSize(40);
if (width > 0) {
mInfo += ("Video Size:" + width + "x" + height);
}
if (mDisplayFPS > 0) {
mInfo += (" Display FPS:" + mDisplayFPS);
}
if (mDisplayAvgFPS > 0) {
mInfo += String.format(" Average FPS:%.2f",
mDisplayAvgFPS / 4096.0);
}
mInfoTextView.post(new Runnable() {
@Override
public void run() {
mInfoTextView.setText(mInfo);
}
});
mShowInfoGL = false;
}
return 0;
}
if (mSurface == null) {
return 0;
}
if (mNeedSetup) {
setupDisplay();
mNeedSetup = false;
}
// draw without OpenGL
Canvas canvas = null;
try {
canvas = mSurface.lockCanvas(null);
} catch (IllegalArgumentException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (OutOfResourcesException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
canvas.drawColor(Color.BLACK);
if (null == mFrameBitmap || mFrameBitmap.getWidth() != width) {
// video size has changed, we need to create a new frame bitmap
// correspondingly
mFrameBitmap = Bitmap.createBitmap(width, height, Config.RGB_565);
}
renderBitmap(mFrameBitmap);
if (mDisplayWidth != mFrameBitmap.getWidth()) {
Matrix matrix = new Matrix();
float scaleWidth = ((float) mDisplayWidth) / width;
float scaleHeight = ((float) mDisplayHeight) / height;
matrix.postScale(scaleWidth, scaleHeight);
matrix.postTranslate((canvas.getWidth() - mDisplayWidth) / 2,
(canvas.getHeight() - mDisplayHeight) / 2);
if (mFrameBitmap.getWidth() < 640) {
// small bitmap, able to use filter
Paint paint = new Paint();
paint.setFilterBitmap(true);
canvas.drawBitmap(mFrameBitmap, matrix, paint);
} else {
canvas.drawBitmap(mFrameBitmap, matrix, null);
}
} else {
canvas.drawBitmap(mFrameBitmap,
(canvas.getWidth() - mDisplayWidth) / 2,
(canvas.getHeight() - mDisplayHeight) / 2, null);
}
if (mShowInfo) {
Paint paint = new Paint();
paint.setColor(Color.WHITE);
paint.setTextSize(40);
String info = "";
if (width > 0) {
info += ("Video Size:" + width + "x" + height);
}
if (mDisplayFPS > 0) {
info += (" Display FPS:" + mDisplayFPS);
}
if (mDisplayAvgFPS > 0) {
info += String.format(" Average FPS:%.2f",
mDisplayAvgFPS / 4096.0);
}
if (mDecodeFPS > 0) {
info += (" Decode FPS:" + mDecodeFPS);
}
canvas.drawText(info, 20, 60, paint);
info = "";
if (mBitrateVideo > 0) {
info += "Bitrate: video " + Integer.toString(mBitrateVideo);
}
if (mBitrateAudio > 0) {
info += ", audio " + Integer.toString(mBitrateAudio);
}
if (mBitrateVideo > 0 || mBitrateAudio > 0) {
info += ", total "
+ Integer.toString(mBitrateVideo + mBitrateAudio)
+ " kbit/s";
}
canvas.drawText(info, 20, 100, paint);
}
mSurface.unlockCanvasAndPost(canvas);
return 0;
}
/**
* Called from native code when an interesting event happens.
*/
public void postEventFromNative(int what, int arg1, int arg2) {
switch (what) {
case MEDIA_INFO_FRAMERATE_VIDEO:
mDisplayFPS = arg1;
mDisplayAvgFPS = arg2;
if (mShowInfo) {
mShowInfoGL = true;
}
break;
case MEDIA_INFO_END_OF_FILE:
final int frame_num = arg1;
mMainHandler.post(new Runnable() {
@Override
public void run() {
if (mListener != null) {
mListener.onCompletion(frame_num);
}
}
});
break;
}
}
// set output file name
public void setOutput(String outputFileName) {
native_set_output(outputFileName);
}
private native void native_init();
private native int native_prepare(Context context,int decoderType, int disableRender, int threadNum, float renderFPS);
private native int native_start();
private native int native_stop();
private native int native_pause();
private native int native_go();
private native int native_seekTo(int msec);
private native static int hasNeon();
public native int setDataSource(String path);
public native int getVideoWidth();
public native int getVideoHeight();
public native boolean isPlaying();
public native int getCurrentPosition();
public native float getDuration();
public native float getDecodeTime();
public native float getDecodeFPS();
private native static void renderBitmap(Bitmap bitmap);
public native void native_set_output(String output);
public native String getVersion();
static {
System.loadLibrary("lenthevcdec");
System.loadLibrary("jniplayer");
}
}
================================================
FILE: Android_demo/KSY265CodecDemo/app/src/main/java/com/ksyun/media/ksy265codec/demo/encoder/Encoder.java
================================================
package com.ksyun.media.ksy265codec.demo.encoder;
import android.content.Context;
import com.ksyun.media.ksy265codec.demo.ui.EncoderSettings;
/**
* Created by sujia on 2017/3/29.
*/
public class Encoder {
private EncoderWrapper mWrapper;
public Encoder(EncoderSettings settings) {
mWrapper = new EncoderWrapper(settings);
}
//return -1 if failed
public int open(String path) {
if (mWrapper != null) {
return mWrapper.open(path);
}
return -1;
}
//return -1 if failed
public int encode(Context context) {
if (mWrapper != null) {
return mWrapper.encode(context);
}
return -1;
}
public int getEncodedFrameNum() {
if (mWrapper != null) {
return mWrapper.getEncodedFrameNum();
}
return 0;
}
public float getEncodeFPS() {
if (mWrapper != null) {
return mWrapper.getEncodeFPS();
}
return 0;
}
public float getCompressRatio() {
if (mWrapper != null) {
return mWrapper.getCompressRatio();
}
return 1;
}
public float getEncodeTime() {
if (mWrapper != null) {
return mWrapper.getEncodeTime();
}
return 0;
}
public double getPSNR() {
if (mWrapper != null) {
return mWrapper.getPSNR();
}
return 0;
}
public String getVersion() {
if (mWrapper != null) {
return mWrapper.getVersion();
}
return "0.1";
}
public float getEncodeBitrate() {
if (mWrapper != null) {
return mWrapper.getEncodeBitrate();
}
return 0;
}
public float getDuration() {
if (mWrapper != null) {
return mWrapper.getDuration();
}
return 0;
}
public String getInputFilePath() {
if (mWrapper != null) {
return mWrapper.getInputFilePath();
}
return null;
}
public String getOutputFilePath() {
if (mWrapper != null) {
return mWrapper.getOutputFilePath();
}
return null;
}
}
================================================
FILE: Android_demo/KSY265CodecDemo/app/src/main/java/com/ksyun/media/ksy265codec/demo/encoder/EncoderWrapper.java
================================================
package com.ksyun.media.ksy265codec.demo.encoder;
import android.content.Context;
import com.ksyun.media.ksy265codec.demo.ui.EncoderSettings;
import java.io.File;
/**
* Created by sujia on 2017/3/29.
*/
public class EncoderWrapper {
private String mInputFilePath;
private String mOutputFilePath;
private EncoderSettings mSettings;
private long mInstance = 0;
public EncoderWrapper(EncoderSettings settings) {
this.mSettings = settings;
mInstance = native_init();
}
//return -1 if failed
public int open(String path) {
if (path != null && path.endsWith(".yuv")) {
mInputFilePath = path;
return native_open(mInstance, mInputFilePath);
}
return -1;
}
//return -1 if failed
public int encode(Context context) {
if (mSettings.getEncoderName().equals(EncoderSettings.Encoders[0])) {//KSC265
int dotIndex = mInputFilePath.lastIndexOf(".");
String fileName = mInputFilePath.substring(0, dotIndex);
mOutputFilePath = fileName + ".265";
return native_ksy265_encoder(context,mInstance, mOutputFilePath,
mSettings.getProfile(), mSettings.getDelay(),
mSettings.getWidth(), mSettings.getHeight(),
mSettings.getFps(), mSettings.getBitrate(),
mSettings.getThreads());
} else if(mSettings.getEncoderName().equals(EncoderSettings.Encoders[1])) {//x264
int dotIndex = mInputFilePath.lastIndexOf(".");
String fileName = mInputFilePath.substring(0, dotIndex);
mOutputFilePath = fileName + ".264";
return native_x264_encode(mInstance, mOutputFilePath,
mSettings.getProfile(), mSettings.getDelay(),
mSettings.getWidth(), mSettings.getHeight(),
mSettings.getFps(), mSettings.getBitrate(),
mSettings.getThreads());
}
return -1;
}
public String getInputFilePath() {
return mInputFilePath;
}
public String getOutputFilePath() {
return mOutputFilePath;
}
public float getEncodeFPS() {
return native_get_real_fps(mInstance);
}
public int getEncodedFrameNum() {
return native_get_encoded_frame_num(mInstance);
}
public float getCompressRatio() {
if (mInputFilePath == null ||
mOutputFilePath == null) {
return 0;
} else {
long inFileLength = new File(mInputFilePath).length();
long outFileLength = new File(mOutputFilePath).length();
if (outFileLength != 0) {
return inFileLength / outFileLength;
} else {
return 0;
}
}
}
public float getEncodeTime() {
return native_get_real_time(mInstance);
}
public double getPSNR() {
return native_get_psnr(mInstance);
}
public float getDuration() {
return getEncodedFrameNum() / mSettings.getFps();
}
public float getEncodeBitrate() {
float encodeTime = getDuration();
if (mOutputFilePath !=null &&
encodeTime != 0) {
long outFileLength = new File(mOutputFilePath).length();
return (outFileLength * 8) / encodeTime / 1000;
} else {
return 0;
}
}
public String getVersion() {
if (mSettings.getEncoderName().equals(EncoderSettings.Encoders[0])) {//KSC265
return native_get_ksy265_version();
} else if (mSettings.getEncoderName().equals(EncoderSettings.Encoders[1])) {//x264
return native_get_x264_version();
}
return "0.1";
}
public native long native_init();
public native int native_open(long ptr, String path);
public native int native_x264_encode(long ptr, String path,
String profile, String delay,
int width, int height,
Float fps, int bitrate, int threads);
public native int native_ksy265_encoder(Context context,long ptr, String outputFilePath,
String profile, String delay,
int width, int height,
Float fps, int bitrate, int threads);
public native float native_get_real_fps(long ptr);
public native int native_get_encoded_frame_num(long ptr);
public native String native_get_x264_version();
public native String native_get_ksy265_version();
public native float native_get_real_time(long ptr);
public native float native_get_psnr(long ptr);
static {
System.loadLibrary("native-lib");
}
}
================================================
FILE: Android_demo/KSY265CodecDemo/app/src/main/java/com/ksyun/media/ksy265codec/demo/ui/BaseFragment.java
================================================
package com.ksyun.media.ksy265codec.demo.ui;
import android.content.Intent;
import android.net.Uri;
import android.os.Bundle;
import android.support.v4.app.Fragment;
import android.text.method.ScrollingMovementMethod;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.SurfaceView;
import android.view.View;
import android.view.ViewGroup;
import android.widget.Button;
import android.widget.EditText;
import android.widget.TextView;
import com.ipaulpro.afilechooser.FileChooserActivity;
import com.ipaulpro.afilechooser.utils.FileUtils;
import com.ksyun.media.ksy265codec.demo.R;
import static android.app.Activity.RESULT_OK;
import static android.content.ContentValues.TAG;
/**
* Created by sujia on 2017/3/27.
*/
public class BaseFragment extends Fragment {
private static final int REQUEST_CODE = 6384; // onActivityResult request code
protected Button mSettingButton;
protected Button mHelpButton;
protected Button mNavButton;
protected Button mStartButton;
private ButtonObserver mButtonObserver;
protected EditText mFilePathEditTxt;
protected String mInputFilePath;
protected String mOutputFilePath;
protected TextView mTitleText;
protected TextView mInfoText;
protected SurfaceView mSurfaceView;
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
View view = inflater.inflate(R.layout.fragment_item, null);
mTitleText = (TextView) view.findViewById(R.id.title_txt);
mInfoText = (TextView) view.findViewById(R.id.info_txt);
mInfoText.setMovementMethod(ScrollingMovementMethod.getInstance());
mButtonObserver = new ButtonObserver();
mSettingButton = (Button) view.findViewById(R.id.settings);
mSettingButton.setOnClickListener(mButtonObserver);
mHelpButton = (Button) view.findViewById(R.id.help);
mHelpButton.setOnClickListener(mButtonObserver);
mNavButton = (Button) view.findViewById(R.id.nav);
mNavButton.setOnClickListener(mButtonObserver);
mStartButton = (Button) view.findViewById(R.id.start);
mStartButton.setOnClickListener(mButtonObserver);
mFilePathEditTxt = (EditText) view.findViewById(R.id.filepath);
mSurfaceView = (SurfaceView) view.findViewById(R.id.surface_view);
mSurfaceView.setVisibility(View.GONE);
return view;
}
private class ButtonObserver implements View.OnClickListener {
@Override
public void onClick(View view) {
switch (view.getId()) {
case R.id.settings:
onSettingsClicked();
break;
case R.id.help:
onHelpClicked();
break;
case R.id.nav:
onNavClicked();
break;
case R.id.start:
onStartClicked();
break;
default:
break;
}
}
}
protected void onSettingsClicked() {
}
protected void onHelpClicked() {
}
protected void onNavClicked() {
showChooser();
}
private void showChooser() {
//set file filter
FileUtils.setFileFilter(new FileUtils.FileFilterBySuffixs("yuv|264|h264|avc|265|hevc|h265|hm91|hm10|bit|hvc"));
Intent intent = new Intent(getContext(), FileChooserActivity.class);
startActivityForResult(intent, REQUEST_CODE);
}
@Override
public void onActivityResult(int requestCode, int resultCode, Intent data) {
switch (requestCode) {
case REQUEST_CODE:
// If the file selection was successful
if (resultCode == RESULT_OK) {
if (data != null) {
// Get the URI of the selected file
final Uri uri = data.getData();
Log.i(TAG, "Uri = " + uri.toString());
try {
// Get the file path from the URI
mInputFilePath = FileUtils.getPath(getContext(), uri);
mFilePathEditTxt.setText(mInputFilePath);
} catch (Exception e) {
Log.e(TAG, "File select error: " + e);
}
}
}
break;
}
super.onActivityResult(requestCode, resultCode, data);
}
protected void onStartClicked() {
}
protected void toggleView(boolean enable) {
mSettingButton.setEnabled(enable);
mNavButton.setEnabled(enable);
mStartButton.setEnabled(enable);
mHelpButton.setEnabled(enable);
}
}
================================================
FILE: Android_demo/KSY265CodecDemo/app/src/main/java/com/ksyun/media/ksy265codec/demo/ui/DecoderFragment.java
================================================
package com.ksyun.media.ksy265codec.demo.ui;
import android.os.Bundle;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.SurfaceHolder;
import android.view.View;
import android.view.ViewGroup;
import android.widget.Toast;
import com.ksyun.media.ksy265codec.demo.decoder.hevdecoder.NativeMediaPlayer;
import java.io.File;
import java.io.FileFilter;
import java.util.regex.Pattern;
/**
* Created by sujia on 2017/3/27.
*/
public class DecoderFragment extends BaseFragment implements DecoderSettingsFragment.OnSettingsChangeListener,
SurfaceHolder.Callback, NativeMediaPlayer.OnCompletionListener {
private DecoderSettings mSettings = null;
private static final String TAG = "DecoderFragment";
private NativeMediaPlayer mPlayer;
private boolean mPrepared = false;
private int mWidth;
private int mHeight;
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
View view = super.onCreateView(inflater, container, savedInstanceState);
mSettings = Settings.getInstance().getDecoderSettings();
updateUI();
mSurfaceView.getHolder().addCallback(this);
mPlayer = new NativeMediaPlayer();
mPlayer.setCompletionListener(this);
return view;
}
@Override
protected void onSettingsClicked() {
// Create an instance of the dialog fragment and show it
DecoderSettingsFragment settingFragment = new DecoderSettingsFragment();
settingFragment.setListener(this);
settingFragment.show(this.getFragmentManager(), "setting dialog");
}
@Override
public void onSettingsChanged(DecoderSettings settings) {
this.mSettings = settings;
updateUI();
}
@Override
protected void onHelpClicked() {
// Create an instance of the dialog fragment and show it
HelpFragment settingFragment = new HelpFragment();
settingFragment.setType(1);
settingFragment.show(getFragmentManager(), "decode help dialog");
}
private void updateUI() {
mTitleText.setText( mSettings.getDecoderName() + "解码器");
if (mSettings.getFPS() != -1) {
mSurfaceView.setVisibility(View.VISIBLE);
} else {
mSurfaceView.setVisibility(View.GONE);
}
}
/**
* Gets the number of cores available in this device, across all processors.
* Requires: Ability to peruse the filesystem at "/sys/devices/system/cpu"
*
* @return The number of cores, or 1 if failed to get result
*/
private int getNumCores() {
// Private Class to display only CPU devices in the directory listing
class CpuFilter implements FileFilter {
@Override
public boolean accept(File pathname) {
// Check if filename is "cpu", followed by a single digit number
if (Pattern.matches("cpu[0-9]+", pathname.getName())) {
return true;
}
return false;
}
}
try {
// Get directory containing CPU info
File dir = new File("/sys/devices/system/cpu/");
// Filter to only list the devices we care about
File[] files = dir.listFiles(new CpuFilter());
// Return the number of cores (virtual CPU devices)
return files.length;
} catch (Exception e) {
// Default to return 1 core
return 1;
}
}
@Override
protected void onStartClicked() {
if (mSettings == null) {
Toast.makeText(getContext(), "解码参数未配置",
Toast.LENGTH_SHORT).show();
return;
}
if (mInputFilePath == null) {
Toast.makeText(getContext(), "请选择输入文件",
Toast.LENGTH_SHORT).show();
return;
}
mPrepared = false;
mPlayer.init();
int ret = mPlayer.setDataSource(mInputFilePath);
if (ret != 0) {
Toast.makeText(getContext(),
"请检查输入文件格式",
Toast.LENGTH_SHORT).show();
return;
}
mPlayer.setDisplay(mSurfaceView.getHolder());
mPlayer.setDisplaySize(mWidth, mHeight);
int num = mSettings.getThreads();
if (0 == num) {
int cores = getNumCores();// Runtime.getRuntime().availableProcessors();
if (cores <= 1)
num = 1;
else if(mSettings.decoderIndex == 1) { // lenthevcdec
num = (cores < 5) ? ((cores * 3 + 1) / 2) : 8;
}
Log.d(TAG, cores + " cores detected! use " + num
+ " threads.\n");
}
//0: ksc265
//1: lenthevcdec
int decoderType = mSettings.decoderIndex == 0 ? 0 : 1;
ret = mPlayer.prepare(getContext(),decoderType, mSettings.getFPS() == -1 ? 1 : 0,
num, mSettings.getFPS());
if ( ret < 0 ) {
Toast.makeText(getContext(),
"打开文件" + mInputFilePath + "失败,返回值: " + ret,
Toast.LENGTH_SHORT).show();
return;
} else {
mPrepared = true;
}
if (mSettings.enableYUVOutput) {
int dotIndex = mInputFilePath.lastIndexOf(".");
String inputFileName = mInputFilePath.substring(0, dotIndex);
mOutputFilePath = inputFileName + (mSettings.decoderIndex == 0 ? ".ksc" : ".lent" ) +".yuv";
mPlayer.setOutput(mOutputFilePath);
}
toggleView(false);
if (mPrepared) {
mPlayer.start();
}
}
//////////////////////////////////////////
//implements SurfaceHolder.Callback
@Override
public void surfaceCreated(SurfaceHolder surfaceHolder) {
}
@Override
public void surfaceChanged(SurfaceHolder surfaceHolder, int i, int i1, int i2) {
mWidth = i1;
mHeight = i2;
if (mPlayer != null) {
mPlayer.setDisplaySize(mWidth, mHeight);
}
}
@Override
public void surfaceDestroyed(SurfaceHolder surfaceHolder) {
mPlayer.stop();
}
// end of: implements SurfaceHolder.Callback
/////////////////////////////////////////////
@Override
public void onCompletion(int frame_count) {
updateInfo(frame_count);
mPlayer.stop();
toggleView(true);
}
private void updateInfo(int frame_num) {
String last_info = mInfoText.getText().toString();
String info;
if (mSettings.enableYUVOutput) {
info = String.format("解码器版本: %s \n" +
"\n" +
"\n" +
"解码参数: %s -b %s -o %s -threads %d \n" +
"\n" +
"\n" +
"分辨率: %d * %d \n" +
"线程数: %s \n" +
"解码时间: %.2f s\n" +
"解码帧数 %d \n" +
"解码速度 %.2f f/s\n" +
"渲染帧率 %s \n",
mPlayer.getVersion(), mSettings.getDecoderName(),
mInputFilePath, mOutputFilePath, mSettings.getThreads(),
mPlayer.getVideoWidth(), mPlayer.getVideoHeight(),
mSettings.getThreadsStr(), mPlayer.getDecodeTime(),
frame_num, mPlayer.getDecodeFPS(), mSettings.getFPSStr());
} else {
info = String.format("解码器版本: %s \n" +
"\n" +
"\n" +
"解码参数: %s -b %s -threads %d \n" +
"\n" +
"\n" +
"分辨率: %d * %d \n" +
"线程数: %s \n" +
"解码时间: %.2f s\n" +
"解码帧数 %d \n" +
"解码速度 %.2f f/s\n" +
"渲染帧率 %s \n",
mPlayer.getVersion(), mSettings.getDecoderName(),
mInputFilePath, mSettings.getThreads(),
mPlayer.getVideoWidth(), mPlayer.getVideoHeight(),
mSettings.getThreadsStr(), mPlayer.getDecodeTime(),
frame_num, mPlayer.getDecodeFPS(), mSettings.getFPSStr());
}
mInfoText.setText(info +
"\n" +
"\n" +
last_info);
}
}
================================================
FILE: Android_demo/KSY265CodecDemo/app/src/main/java/com/ksyun/media/ksy265codec/demo/ui/DecoderSettings.java
================================================
package com.ksyun.media.ksy265codec.demo.ui;
import android.content.SharedPreferences;
/**
* Created by sujia on 2017/3/28.
*/
public class DecoderSettings {
public final static String DECODER_SETTINGS_DECODER = "decoder_settings_decoder";
public final static String DECODER_SETTINGS_THREADS = "decoder_settings_threads";
public final static String DECODER_SETTINGS_FPS = "decoder_settings_fps";
public final static String DECODER_SETTINGS_RENDER = "decoder_settings_render";
public final static String DECODER_SETTINGS_OUTPUT = "decoder_settings_output";
public final static String[] Decoders = new String[] {"KSC265", "lenthevcdec"};
public final static String[] Threads = new String[] {"0 (auto)", "1", "2",
"3", "4", "5", "6", "7", "8","9", "10"};
public final static String[] FPS = new String[] {"0 (fullspeed)", "24",
"-1 (off)"};
public int decoderIndex;
public int threadsIndex;
public int fpsIndex;//渲染帧率
public boolean enableYUVOutput;
public DecoderSettings() {
this.decoderIndex = 0;
this.threadsIndex = 0;
this.fpsIndex = 0;
this.enableYUVOutput = false;
}
public DecoderSettings(SharedPreferences sharedPreferences) {
this.decoderIndex = sharedPreferences.getInt(DECODER_SETTINGS_DECODER, 0);
this.threadsIndex = sharedPreferences.getInt(DECODER_SETTINGS_THREADS, 0);
this.fpsIndex = sharedPreferences.getInt(DECODER_SETTINGS_FPS, 0);
this.enableYUVOutput = sharedPreferences.getBoolean(DECODER_SETTINGS_OUTPUT, false);
}
public String getDecoderName() {
if (decoderIndex <= Decoders.length -1) {
return Decoders[decoderIndex];
} else {
return "unknow";
}
}
public int getThreads() {
return threadsIndex;
}
public String getThreadsStr() {
if (threadsIndex <= Threads.length -1) {
return Threads[threadsIndex];
} else {
return "";
}
}
public int getFPS() {
switch (fpsIndex) {
case 0:
return 0;
case 1:
return 24;
case 2:
return -1;
default:
return 0;
}
}
public String getFPSStr() {
if (fpsIndex <= FPS.length -1) {
return FPS[fpsIndex];
} else {
return "";
}
}
}
================================================
FILE: Android_demo/KSY265CodecDemo/app/src/main/java/com/ksyun/media/ksy265codec/demo/ui/DecoderSettingsFragment.java
================================================
package com.ksyun.media.ksy265codec.demo.ui;
import android.app.Dialog;
import android.os.Bundle;
import android.support.annotation.NonNull;
import android.support.v4.app.DialogFragment;
import android.support.v7.app.AlertDialog;
import android.view.LayoutInflater;
import android.view.View;
import android.widget.ArrayAdapter;
import android.widget.Button;
import android.widget.RadioButton;
import android.widget.Spinner;
import com.ksyun.media.ksy265codec.demo.R;
/**
* Created by sujia on 2017/3/28.
*/
public class DecoderSettingsFragment extends DialogFragment {
private Spinner mDecoderSpinner;
private Spinner mThreadSpinner;
private Spinner mFpsSpinner;
private Button mButton;
private RadioButton mEnableOutputButton;
private RadioButton mDisableOutputButton;
private DecoderSettings mSettings;
public interface OnSettingsChangeListener {
public void onSettingsChanged(DecoderSettings settings);
}
// Use this instance of the interface to deliver action events
OnSettingsChangeListener mListener;
public DecoderSettingsFragment() {
mSettings = Settings.getInstance().getDecoderSettings();
}
public void setListener(OnSettingsChangeListener listener) {
mListener = listener;
}
@NonNull
@Override
public Dialog onCreateDialog(Bundle savedInstanceState) {
AlertDialog.Builder builder = new AlertDialog.Builder(getActivity());
// Get the layout inflater
LayoutInflater inflater = getActivity().getLayoutInflater();
// Inflate and set the layout for the dialog
// Pass null as the parent view because its going in the dialog layout
View view = inflater.inflate(R.layout.decoder_settings, null);
mSettings = Settings.getInstance().getDecoderSettings();
initView(view);
builder.setView(view);
return builder.create();
}
private void initView(View view) {
ArrayAdapter decodersAdapter = new ArrayAdapter<>(getContext(),
android.R.layout.simple_spinner_item, DecoderSettings.Decoders);
decodersAdapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item);
mDecoderSpinner = (Spinner) view.findViewById(R.id.decoder_settings_decoder_spinner);
mDecoderSpinner.setAdapter(decodersAdapter);
if (mSettings.decoderIndex <= DecoderSettings.Decoders.length) {
mDecoderSpinner.setSelection(mSettings.decoderIndex);
}
ArrayAdapter threadsAdapter = new ArrayAdapter<>(getContext(),
android.R.layout.simple_spinner_item, DecoderSettings.Threads);
threadsAdapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item);
mThreadSpinner = (Spinner) view.findViewById(R.id.decoder_settings_threads_spinner);
mThreadSpinner.setAdapter(threadsAdapter);
if (mSettings.threadsIndex <= DecoderSettings.Threads.length) {
mThreadSpinner.setSelection(mSettings.threadsIndex);
}
ArrayAdapter fpsAdapter = new ArrayAdapter<>(getContext(),
android.R.layout.simple_spinner_item, DecoderSettings.FPS);
fpsAdapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item);
mFpsSpinner = (Spinner) view.findViewById(R.id.decoder_settings_fps_spinner);
mFpsSpinner.setAdapter(fpsAdapter);
if (mSettings.fpsIndex <= DecoderSettings.FPS.length) {
mFpsSpinner.setSelection(mSettings.fpsIndex);
}
mEnableOutputButton = (RadioButton) view.findViewById(R.id.decoder_settings_enable_yuv_output);
mDisableOutputButton = (RadioButton) view.findViewById(R.id.decoder_settings_disable_yuv_output);
if (mSettings.enableYUVOutput) {
mEnableOutputButton.setChecked(true);
mDisableOutputButton.setChecked(false);
} else {
mEnableOutputButton.setChecked(false);
mDisableOutputButton.setChecked(true);
}
mButton = (Button) view.findViewById(R.id.decoder_settings_sure);
mButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
mSettings.decoderIndex = mDecoderSpinner.getSelectedItemPosition();
mSettings.threadsIndex = mThreadSpinner.getSelectedItemPosition();
mSettings.fpsIndex = mFpsSpinner.getSelectedItemPosition();
mSettings.enableYUVOutput = mEnableOutputButton.isChecked();
Settings.getInstance().saveDecoderSettings(mSettings);
if (mListener != null) {
mListener.onSettingsChanged(mSettings);
}
dismiss();
}
});
}
}
================================================
FILE: Android_demo/KSY265CodecDemo/app/src/main/java/com/ksyun/media/ksy265codec/demo/ui/EncoderFragment.java
================================================
package com.ksyun.media.ksy265codec.demo.ui;
import android.os.AsyncTask;
import android.os.Bundle;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.Toast;
import com.ksyun.media.ksy265codec.demo.encoder.Encoder;
/**
* Created by sujia on 2017/3/27.
*/
public class EncoderFragment extends BaseFragment implements EncoderSettingsFragment.OnSettingsChangeListener {
private EncoderSettings mSettings = null;
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
View view = super.onCreateView(inflater, container, savedInstanceState);
mSettings = Settings.getInstance().getEncoderSettings();
mTitleText.setText( mSettings.getEncoderName() + "编码器");
return view;
}
@Override
protected void onSettingsClicked() {
// Create an instance of the dialog fragment and show it
EncoderSettingsFragment settingFragment = new EncoderSettingsFragment();
settingFragment.setListener(this);
settingFragment.show(this.getFragmentManager(), "encoder setting dialog");
}
@Override
public void onSettingsChanged(EncoderSettings settings) {
mSettings = settings;
mTitleText.setText( mSettings.getEncoderName() + "编码器");
}
@Override
protected void onHelpClicked() {
// Create an instance of the dialog fragment and show it
HelpFragment settingFragment = new HelpFragment();
settingFragment.setType(0);
settingFragment.show(getFragmentManager(), "encode help dialog");
}
@Override
protected void onStartClicked() {
if (mInputFilePath == null) {
Toast.makeText(getContext(), "请选择yuv文件",
Toast.LENGTH_SHORT).show();
return;
}
EncodeTask task = new EncodeTask();
task.execute();
}
private class EncodeTask extends AsyncTask {
private ProgressDialogFragment mProgressDialog;
private Encoder mEncoder;
@Override
protected void onPreExecute() {
mEncoder = new Encoder(mSettings);
//Create progress dialog here and show it
mProgressDialog = new ProgressDialogFragment();
mProgressDialog.show(getFragmentManager(), "show progress dialog");
toggleView(false);
}
@Override
protected Void doInBackground(Void... params) {
// Execute query here
encodeYUV(mEncoder);
return null;
}
@Override
protected void onPostExecute(Void result) {
super.onPostExecute(result);
//update your listView adapter here
//Dismiss your dialog
toggleView(true);
mProgressDialog.dismissAllowingStateLoss();
updateInfo(mEncoder);
}
}
private void encodeYUV(Encoder encoder) {
if(encoder.open(mInputFilePath) < 0) {
getActivity().runOnUiThread(new Runnable() {
@Override
public void run() {
Toast.makeText(getContext(),
"打开yuv文件错误",
Toast.LENGTH_SHORT).show();
}
});
return;
}
if(mSettings.getHeight() == 0 ||
mSettings.getWidth() == 0 ||
Integer.parseInt(mSettings.bitrate) <= 0 ||
Integer.parseInt(mSettings.fps) <= 0) {
getActivity().runOnUiThread(new Runnable() {
@Override
public void run() {
Toast.makeText(getContext(),
"请检查编码参数设置",
Toast.LENGTH_SHORT).show();
}
});
return;
}
if(encoder.encode(getContext()) < 0) {
getActivity().runOnUiThread(new Runnable() {
@Override
public void run() {
Toast.makeText(getContext(),
"编码失败,请检查输入文件格式",
Toast.LENGTH_SHORT).show();
}
});
return;
}
}
private void updateInfo(Encoder encoder) {
String last_info = mInfoText.getText().toString();
String info;
if (mSettings.getEncoderName().equals(EncoderSettings.Encoders[0])) {//KSC265
info = String.format("编码器版本: %s \n " +
" \n" +
"编码参数: %s -i %s -preset %s -latency %s" +
" -wdt %d -hgt %d -fr %.2f -threads %d -br %d -b %s \n" +
" \n" +
"编码时间: %.2f s \n" +
"编码帧数: %d \n" +
"编码速度: %.2f f/s \n" +
"压缩比: %.2f \n" +
"PSNR: %.2f \n" +
"\n " +
"视频信息 \n " +
"码率: %.2f kbps \n" +
"分辨率: %s \n" +
"帧率: %.2f f/s\n" +
"文件总时长: %.2f s\n",
encoder.getVersion(), mSettings.getEncoderName(),
encoder.getInputFilePath(), mSettings.getProfile(), mSettings.getDelay(),
mSettings.getWidth(), mSettings.getHeight(), mSettings.getFps(),
mSettings.getThreads(), mSettings.getBitrate(), encoder.getOutputFilePath(),
encoder.getEncodeTime(), encoder.getEncodedFrameNum(),
encoder.getEncodeFPS(), encoder.getCompressRatio(),
encoder.getPSNR(),
encoder.getEncodeBitrate(), mSettings.getResolution(),
mSettings.getFps(), encoder.getDuration());
} else {//x264
String delayShow;
if (mSettings.getDelay().equals(EncoderSettings.Delays[0])) {//zerolatency
delayShow = "--bframes 0 --tune zerolatency";
} else if(mSettings.getDelay().equals(EncoderSettings.Delays[1])) {//livestreaming
delayShow = "--bframes 3";
} else {//offline
delayShow = "--bframes 7";
}
info = String.format("编码器版本: %s \n " +
" \n" +
"编码参数: %s -i %s --preset %s %s " +
"--input-res %dx%d --fps %.2f --threads %d --bitrate %d " +
"-o %s \n" +
" \n" +
"编码时间: %.2f s \n" +
"编码帧数: %d \n" +
"编码速度: %.2f f/s \n" +
"压缩比: %.2f \n" +
"PSNR: %.2f \n" +
"\n " +
"视频信息 \n" +
"码率: %.2f kbps \n" +
"分辨率: %s \n" +
"帧率: %.2f f/s\n" +
"文件总时长: %.2f s\n",
encoder.getVersion(), mSettings.getEncoderName(),
encoder.getInputFilePath(), mSettings.getProfile(), delayShow,
mSettings.getWidth(), mSettings.getHeight(), mSettings.getFps(),
mSettings.getThreads(), mSettings.getBitrate(), encoder.getOutputFilePath(),
encoder.getEncodeTime(), encoder.getEncodedFrameNum(),
encoder.getEncodeFPS(), encoder.getCompressRatio(),
encoder.getPSNR(),
encoder.getEncodeBitrate(), mSettings.getResolution(),
mSettings.getFps(), encoder.getDuration());
}
mInfoText.setText( info +
"\n" +
"\n" +
"\n" +
last_info);
}
}
================================================
FILE: Android_demo/KSY265CodecDemo/app/src/main/java/com/ksyun/media/ksy265codec/demo/ui/EncoderSettings.java
================================================
package com.ksyun.media.ksy265codec.demo.ui;
import android.content.SharedPreferences;
import android.util.Log;
/**
* Created by sujia on 2017/3/28.
*/
public class EncoderSettings {
public final static String TAG = "EncoderSettings";
public final static String ENCODER_SETTINGS_ENCODER = "encoder_settings_encoder";
public final static String ENCODER_SETTINGS_PROFILE = "encoder_settings_profile";
public final static String ENCODER_SETTINGS_DELAY = "encoder_settings_delay";
public final static String ENCODER_SETTINGS_RESOLUTION = "encoder_settings_resolution";
public final static String ENCODER_SETTINGS_RESOLUTION_IDX = "encoder_settings_resolution_idx";
public final static String ENCODER_SETTINGS_FPS = "encoder_settings_fps";
public final static String ENCODER_SETTINGS_THREADS = "encoder_settings_threads";
public final static String ENCODER_SETTINGS_BITRATE = "encoder_settings_bitrate";
public final static String[] Encoders = new String[] {"KSC265", "x264"};
public final static String[] Profiles = new String[] {"superfast", "veryfast", "fast",
"medium", "slow", "veryslow", "placebo"};
public final static String[] Delays = new String[] {"zerolatency", "livestreaming",
"offline"};
public final static String[] Resolutions = new String [] {"1280*720", "960*540", "640*360",
"640*480", "360*640", "368*640", "自定义"};
public int encoderIndex;
public int profileIndex;
public int delayIndex;
public int resIndex;
public String bitrate;
public String resolution;
public String fps;
public String threads;
public EncoderSettings() {
this.encoderIndex = 0;// ksc265
this.profileIndex = 1;//veryfast
this.delayIndex = 2;//offline
this.resolution = Resolutions[0];
this.resIndex = 0;//1280*720
this.fps = "15";
this.threads = "1";
this.bitrate = "500";
}
public EncoderSettings(SharedPreferences sharedPreferences) {
this.encoderIndex = sharedPreferences.getInt(ENCODER_SETTINGS_ENCODER, 0);
this.profileIndex = sharedPreferences.getInt(ENCODER_SETTINGS_PROFILE, 0);
this.delayIndex = sharedPreferences.getInt(ENCODER_SETTINGS_DELAY, 0);
this.resIndex = sharedPreferences.getInt(ENCODER_SETTINGS_RESOLUTION_IDX, 0);
this.resolution = sharedPreferences.getString(ENCODER_SETTINGS_RESOLUTION, Resolutions[0]);
this.fps = sharedPreferences.getString(ENCODER_SETTINGS_FPS, "15");
this.threads = sharedPreferences.getString(ENCODER_SETTINGS_THREADS, "1");
this.bitrate = sharedPreferences.getString(ENCODER_SETTINGS_BITRATE, "500");
}
public String getEncoderName() {
if (encoderIndex <= Encoders.length -1) {
return Encoders[encoderIndex];
} else {
return "unknow";
}
}
public String getProfile() {
if (profileIndex <= Profiles.length -1) {
return Profiles[profileIndex];
} else {
return "";
}
}
public String getDelay() {
if (delayIndex <= Delays.length -1) {
return Delays[delayIndex];
} else {
return "";
}
}
public int getBitrate() {
return Integer.parseInt(bitrate);
}
public String getResolution() {
if (resIndex < Resolutions.length -1) {
return Resolutions[resIndex];
} else {
return resolution;
}
}
public int getWidth() {
String[] res = getResolution().split("\\*");
if (res != null &&
res.length == 2) {
return Integer.parseInt(res[0]);
} else {
Log.e(TAG, "分辨率解析错误,格式必须为 宽*高");
return 0;
}
}
public int getHeight() {
String[] res = getResolution().split("\\*");
if (res != null &&
res.length == 2) {
return Integer.parseInt(res[1]);
} else {
Log.e(TAG, "分辨率解析错误,格式必须为 宽*高");
return 0;
}
}
public Float getFps() {
return Float.parseFloat(fps);
}
public int getThreads() {
return Integer.parseInt(threads);
}
}
================================================
FILE: Android_demo/KSY265CodecDemo/app/src/main/java/com/ksyun/media/ksy265codec/demo/ui/EncoderSettingsFragment.java
================================================
package com.ksyun.media.ksy265codec.demo.ui;
import android.app.Dialog;
import android.os.Bundle;
import android.support.annotation.NonNull;
import android.support.v4.app.DialogFragment;
import android.support.v7.app.AlertDialog;
import android.view.LayoutInflater;
import android.view.View;
import android.widget.AdapterView;
import android.widget.ArrayAdapter;
import android.widget.Button;
import android.widget.EditText;
import android.widget.Spinner;
import com.ksyun.media.ksy265codec.demo.R;
/**
* Created by sujia on 2017/3/28.
*/
public class EncoderSettingsFragment extends DialogFragment {
private Spinner mEncoderSpinner;
private Spinner mProfileSpinner;
private Spinner mDelaySpinner;
private EditText mResulutionEditTxt;
private Spinner mResSpinner;
private EditText mFpsEditTxt;
private EditText mThreadsEditTxt;
private EditText mBitrateEditTxt;
private Button mButton;
private EncoderSettings mSettings;
public interface OnSettingsChangeListener {
public void onSettingsChanged(EncoderSettings settings);
}
// Use this instance of the interface to deliver action events
OnSettingsChangeListener mListener;
public EncoderSettingsFragment() {
mSettings = Settings.getInstance().getEncoderSettings();
}
public void setListener(OnSettingsChangeListener listener) {
mListener = listener;
}
@NonNull
@Override
public Dialog onCreateDialog(Bundle savedInstanceState) {
AlertDialog.Builder builder = new AlertDialog.Builder(getActivity());
// Get the layout inflater
LayoutInflater inflater = getActivity().getLayoutInflater();
// Inflate and set the layout for the dialog
// Pass null as the parent view because its going in the dialog layout
View view = inflater.inflate(R.layout.encoder_settings, null);
mSettings = Settings.getInstance().getEncoderSettings();
initView(view);
builder.setView(view);
return builder.create();
}
private void initView(View view) {
ArrayAdapter encodersAdapter = new ArrayAdapter<>(getContext(),
android.R.layout.simple_spinner_item, EncoderSettings.Encoders);
encodersAdapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item);
mEncoderSpinner = (Spinner) view.findViewById(R.id.encoder_settings_encoder_spinner);
mEncoderSpinner.setAdapter(encodersAdapter);
if (mSettings.encoderIndex <= EncoderSettings.Encoders.length) {
mEncoderSpinner.setSelection(mSettings.encoderIndex);
}
ArrayAdapter profilesAdapter = new ArrayAdapter<>(getContext(),
android.R.layout.simple_spinner_item, EncoderSettings.Profiles);
profilesAdapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item);
mProfileSpinner = (Spinner) view.findViewById(R.id.encoder_settings_profile_spinner);
mProfileSpinner.setAdapter(profilesAdapter);
if (mSettings.profileIndex <= EncoderSettings.Profiles.length) {
mProfileSpinner.setSelection(mSettings.profileIndex);
}
ArrayAdapter delayAdapter = new ArrayAdapter<>(getContext(),
android.R.layout.simple_spinner_item, EncoderSettings.Delays);
delayAdapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item);
mDelaySpinner = (Spinner) view.findViewById(R.id.encoder_settings_delay_spinner);
mDelaySpinner.setAdapter(delayAdapter);
if (mSettings.delayIndex <= EncoderSettings.Delays.length) {
mDelaySpinner.setSelection(mSettings.delayIndex);
}
mResulutionEditTxt = (EditText) view.findViewById(R.id.encoder_settings_resolution);
mResulutionEditTxt.setText(mSettings.resolution);
mResulutionEditTxt.setVisibility(View.VISIBLE);
if (mSettings.resIndex == EncoderSettings.Resolutions.length -1) {
mResulutionEditTxt.setVisibility(View.VISIBLE);
mResulutionEditTxt.requestFocus();
} else {
mResulutionEditTxt.setVisibility(View.GONE);
}
ArrayAdapter resAdapter = new ArrayAdapter<>(getContext(),
android.R.layout.simple_spinner_item, EncoderSettings.Resolutions);
resAdapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item);
mResSpinner = (Spinner) view.findViewById(R.id.encoder_settings_resolution_spinner);
mResSpinner.setAdapter(resAdapter);
if (mSettings.resIndex <= EncoderSettings.Resolutions.length -1) {
mResSpinner.setSelection(mSettings.resIndex);
}
mResSpinner.setOnItemSelectedListener(new AdapterView.OnItemSelectedListener() {
@Override
public void onItemSelected(AdapterView> parent, View view, int position, long id) {
if (mResulutionEditTxt == null) {
return;
}
if (position == EncoderSettings.Resolutions.length -1) {
mResulutionEditTxt.setVisibility(View.VISIBLE);
mResulutionEditTxt.requestFocus();
} else {
mResulutionEditTxt.setVisibility(View.GONE);
}
}
@Override
public void onNothingSelected(AdapterView> parent) {
}
});
mFpsEditTxt = (EditText) view.findViewById(R.id.encoder_settings_fps);
mFpsEditTxt.setText(mSettings.fps);
mThreadsEditTxt = (EditText) view.findViewById(R.id.encoder_settings_threads);
mThreadsEditTxt.setText(mSettings.threads);
mBitrateEditTxt = (EditText) view.findViewById(R.id.encoder_settings_bitrate);
mBitrateEditTxt.setText(mSettings.bitrate);
mButton = (Button) view.findViewById(R.id.encoder_settings_sure);
mButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
mSettings.encoderIndex = mEncoderSpinner.getSelectedItemPosition();
mSettings.profileIndex = mProfileSpinner.getSelectedItemPosition();
mSettings.delayIndex = mDelaySpinner.getSelectedItemPosition();
mSettings.resolution = mResulutionEditTxt.getText().toString();
mSettings.resIndex = mResSpinner.getSelectedItemPosition();
mSettings.fps = mFpsEditTxt.getText().toString();
mSettings.threads = mThreadsEditTxt.getText().toString();
mSettings.bitrate = mBitrateEditTxt.getText().toString();
Settings.getInstance().saveEncoderSettings(mSettings);
if (mListener != null) {
mListener.onSettingsChanged(mSettings);
}
dismiss();
}
});
}
}
================================================
FILE: Android_demo/KSY265CodecDemo/app/src/main/java/com/ksyun/media/ksy265codec/demo/ui/HelpFragment.java
================================================
package com.ksyun.media.ksy265codec.demo.ui;
import android.app.Dialog;
import android.os.Bundle;
import android.support.annotation.NonNull;
import android.support.v4.app.DialogFragment;
import android.support.v7.app.AlertDialog;
import android.view.LayoutInflater;
import android.view.View;
import android.widget.TextView;
import com.ksyun.media.ksy265codec.demo.R;
/**
* Created by sujia on 2017/3/28.
*/
public class HelpFragment extends DialogFragment {
private int type;//0 encode, 1 decode
public HelpFragment() {
}
public void setType(int type) {
this.type = type;
}
@NonNull
@Override
public Dialog onCreateDialog(Bundle savedInstanceState) {
AlertDialog.Builder builder = new AlertDialog.Builder(getActivity());
// Get the layout inflater
LayoutInflater inflater = getActivity().getLayoutInflater();
// Inflate and set the layout for the dialog
// Pass null as the parent view because its going in the dialog layout
View view = inflater.inflate(R.layout.help, null);
if (type == 0) {
TextView info = (TextView) view.findViewById(R.id.help_info);
info.setText(R.string.encode_help_info);
} else if (type == 1) {
TextView info = (TextView) view.findViewById(R.id.help_info);
info.setText(R.string.decode_help_info);
}
builder.setView(view);
return builder.create();
}
}
================================================
FILE: Android_demo/KSY265CodecDemo/app/src/main/java/com/ksyun/media/ksy265codec/demo/ui/KSY265CodecDemoApp.java
================================================
package com.ksyun.media.ksy265codec.demo.ui;
import android.app.Application;
/**
* Created by sujia on 2017/3/28.
*/
public class KSY265CodecDemoApp extends Application {
@Override
public void onCreate() {
super.onCreate();
Settings.getInstance().init(this);
}
}
================================================
FILE: Android_demo/KSY265CodecDemo/app/src/main/java/com/ksyun/media/ksy265codec/demo/ui/MainActivity.java
================================================
package com.ksyun.media.ksy265codec.demo.ui;
import android.os.Bundle;
import android.support.v4.app.Fragment;
import android.support.v4.app.FragmentActivity;
import android.support.v4.app.FragmentTabHost;
import android.support.v4.view.ViewPager;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ImageView;
import android.widget.TabHost;
import android.widget.TabWidget;
import android.widget.TextView;
import com.ksyun.media.ksy265codec.demo.R;
import java.util.ArrayList;
import java.util.List;
public class MainActivity extends FragmentActivity implements
ViewPager.OnPageChangeListener, TabHost.OnTabChangeListener {
private FragmentTabHost mTabHost;
private LayoutInflater mLayoutInflater;
private Class fragmentArray[] = { EncoderFragment.class, DecoderFragment.class };
private int tab_imageViewArray[] = { R.drawable.tab_home_btn, R.drawable.tab_home_btn };
private String tab_textViewArray[] = { "编码", "解码"};
private List list = new ArrayList();
private ViewPager mViewPager;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
initView();//初始化控件
initPage();//初始化页面
}
// 控件初始化控件
private void initView() {
mViewPager = (ViewPager) findViewById(R.id.pager);
/*实现OnPageChangeListener接口,目的是监听Tab选项卡的变化,然后通知ViewPager适配器切换界面*/
/*简单来说,是为了让ViewPager滑动的时候能够带着底部菜单联动*/
mViewPager.addOnPageChangeListener(this);//设置页面切换时的监听器
mLayoutInflater = LayoutInflater.from(this);//加载布局管理器
/*实例化FragmentTabHost对象并进行绑定*/
mTabHost = (FragmentTabHost) findViewById(android.R.id.tabhost);//绑定tahost
mTabHost.setup(this, getSupportFragmentManager(), R.id.pager);//绑定viewpager
/*实现setOnTabChangedListener接口,目的是为监听界面切换),然后实现TabHost里面图片文字的选中状态切换*/
/*简单来说,是为了当点击下面菜单时,上面的ViewPager能滑动到对应的Fragment*/
mTabHost.setOnTabChangedListener(this);
int count = tab_textViewArray.length;
/*新建Tabspec选项卡并设置Tab菜单栏的内容和绑定对应的Fragment*/
for (int i = 0; i < count; i++) {
// 给每个Tab按钮设置标签、图标和文字
TabHost.TabSpec tabSpec = mTabHost.newTabSpec(tab_textViewArray[i])
.setIndicator(getTabItemView(i));
// 将Tab按钮添加进Tab选项卡中,并绑定Fragment
mTabHost.addTab(tabSpec, fragmentArray[i], null);
mTabHost.setTag(i);
mTabHost.getTabWidget().getChildAt(i)
.setBackgroundResource(R.drawable.selector_tab_background);//设置Tab被选中的时候颜色改变
}
}
/*初始化Fragment*/
private void initPage() {
EncoderFragment fragment1 = new EncoderFragment();
DecoderFragment fragment2 = new DecoderFragment();
list.add(fragment1);
list.add(fragment2);
//绑定Fragment适配器
mViewPager.setAdapter(new MyFragmentAdapter(getSupportFragmentManager(), list));
mTabHost.getTabWidget().setDividerDrawable(null);
}
private View getTabItemView(int i) {
//将xml布局转换为view对象
View view = mLayoutInflater.inflate(R.layout.tab_content, null);
//利用view对象,找到布局中的组件,并设置内容,然后返回视图
ImageView mTab_ImageView = (ImageView) view
.findViewById(R.id.tab_imageview);
TextView mTab_TextView = (TextView) view.findViewById(R.id.tab_textview);
mTab_ImageView.setBackgroundResource(tab_imageViewArray[i]);
mTab_TextView.setText(tab_textViewArray[i]);
return view;
}
@Override
public void onPageScrollStateChanged(int arg0) {
}//arg0 ==1的时候表示正在滑动,arg0==2的时候表示滑动完毕了,arg0==0的时候表示什么都没做,就是停在那。
@Override
public void onPageScrolled(int arg0, float arg1, int arg2) {
}//表示在前一个页面滑动到后一个页面的时候,在前一个页面滑动前调用的方法
@Override
public void onPageSelected(int arg0) {//arg0是表示你当前选中的页面位置Postion,这事件是在你页面跳转完毕的时候调用的。
TabWidget widget = mTabHost.getTabWidget();
int oldFocusability = widget.getDescendantFocusability();
widget.setDescendantFocusability(ViewGroup.FOCUS_BLOCK_DESCENDANTS);//设置View覆盖子类控件而直接获得焦点
mTabHost.setCurrentTab(arg0);//根据位置Postion设置当前的Tab
widget.setDescendantFocusability(oldFocusability);//设置取消分割线
}
@Override
public void onTabChanged(String tabId) {//Tab改变的时候调用
int position = mTabHost.getCurrentTab();
mViewPager.setCurrentItem(position);//把选中的Tab的位置赋给适配器,让它控制页面切换
}
}
================================================
FILE: Android_demo/KSY265CodecDemo/app/src/main/java/com/ksyun/media/ksy265codec/demo/ui/MyFragmentAdapter.java
================================================
package com.ksyun.media.ksy265codec.demo.ui;
import android.support.v4.app.Fragment;
import android.support.v4.app.FragmentManager;
import android.support.v4.app.FragmentPagerAdapter;
import java.util.List;
/**
* Created by Carson_Ho on 16/5/23.
*/
public class MyFragmentAdapter extends FragmentPagerAdapter {
List list;
public MyFragmentAdapter(FragmentManager fm, List list) {
super(fm);
this.list = list;
}
@Override
public Fragment getItem(int arg0) {
return list.get(arg0);
}//根据Item的位置返回对应位置的Fragment,绑定item和Fragment
@Override
public int getCount() {
return list.size();
}//设置Item的数量
}
================================================
FILE: Android_demo/KSY265CodecDemo/app/src/main/java/com/ksyun/media/ksy265codec/demo/ui/ProgressDialogFragment.java
================================================
package com.ksyun.media.ksy265codec.demo.ui;
import android.app.Dialog;
import android.app.ProgressDialog;
import android.os.Bundle;
import android.support.v4.app.DialogFragment;
/**
* Created by sujia on 2017/4/7.
*/
public class ProgressDialogFragment extends DialogFragment
{
@Override
public void onCreate(Bundle savedInstanceState)
{
super.onCreate(savedInstanceState);
setCancelable(false);
}
@Override
public Dialog onCreateDialog(Bundle savedInstanceState)
{
ProgressDialog dialog = new ProgressDialog(getActivity());
dialog.setTitle("请等待");
dialog.setMessage("编码中...");
dialog.setIndeterminate(true);
dialog.setProgressStyle(ProgressDialog.STYLE_SPINNER);
return dialog;
}
}
================================================
FILE: Android_demo/KSY265CodecDemo/app/src/main/java/com/ksyun/media/ksy265codec/demo/ui/Settings.java
================================================
package com.ksyun.media.ksy265codec.demo.ui;
import android.content.Context;
import android.content.SharedPreferences;
import android.util.Log;
/**
* Created by sujia on 2017/3/28.
*/
public class Settings {
private static final String TAG = "settings";
private static final boolean TRACE = true;
private final String FILE_NAME = "ksy265codecdemo_settings";
private SharedPreferences mSharedPreferences;
private SharedPreferences.Editor mEditor;
private static Settings sInstance;
private EncoderSettings mEncoderSettings;
private DecoderSettings mDecoderSettings;
public static Settings getInstance() {
if (sInstance == null) {
synchronized (Settings.class) {
if (sInstance == null) {
sInstance = new Settings();
}
}
}
return sInstance;
}
public void init(Context context) throws IllegalArgumentException {
if (context == null) {
throw new IllegalArgumentException("the context must not null");
}
if (mSharedPreferences == null) {
mSharedPreferences = context.getSharedPreferences(FILE_NAME,
context.MODE_PRIVATE);
mEditor = mSharedPreferences.edit();
}
}
public EncoderSettings getEncoderSettings() {
if (mSharedPreferences == null) {
if (mEncoderSettings == null) {
if(TRACE) {
Log.w(TAG, "please call init before call this function");
}
mEncoderSettings = new EncoderSettings();
return mEncoderSettings;
}
}
if (mEncoderSettings == null) {
if(mSharedPreferences != null) {
mEncoderSettings = new EncoderSettings(mSharedPreferences);
} else {
mEncoderSettings = new EncoderSettings();
}
}
return mEncoderSettings;
}
public DecoderSettings getDecoderSettings() {
if (mSharedPreferences == null) {
if (mDecoderSettings == null) {
if(TRACE) {
Log.w(TAG, "please call init before call this function");
}
mDecoderSettings = new DecoderSettings();
return mDecoderSettings;
}
}
if (mDecoderSettings == null) {
if(mSharedPreferences != null) {
mDecoderSettings = new DecoderSettings(mSharedPreferences);
} else {
mDecoderSettings = new DecoderSettings();
}
}
return mDecoderSettings;
}
public void saveEncoderSettings(EncoderSettings settings) {
if (mSharedPreferences == null) {
return;
}
if (mEditor != null) {
mEditor.putInt(EncoderSettings.ENCODER_SETTINGS_ENCODER, settings.encoderIndex);
mEditor.putInt(EncoderSettings.ENCODER_SETTINGS_PROFILE, settings.profileIndex);
mEditor.putInt(EncoderSettings.ENCODER_SETTINGS_DELAY, settings.delayIndex);
mEditor.putString(EncoderSettings.ENCODER_SETTINGS_RESOLUTION, settings.resolution);
mEditor.putInt(EncoderSettings.ENCODER_SETTINGS_RESOLUTION_IDX, settings.resIndex);
mEditor.putString(EncoderSettings.ENCODER_SETTINGS_THREADS, settings.threads);
mEditor.putString(EncoderSettings.ENCODER_SETTINGS_FPS, settings.fps);
mEditor.putString(EncoderSettings.ENCODER_SETTINGS_BITRATE, settings.bitrate);
mEditor.commit();
}
}
public void saveDecoderSettings(DecoderSettings settings) {
if (mSharedPreferences == null) {
return;
}
if (mEditor != null) {
mEditor.putInt(DecoderSettings.DECODER_SETTINGS_DECODER, settings.decoderIndex);
mEditor.putInt(DecoderSettings.DECODER_SETTINGS_THREADS, settings.threadsIndex);
mEditor.putInt(DecoderSettings.DECODER_SETTINGS_FPS, settings.fpsIndex);
mEditor.putBoolean(DecoderSettings.DECODER_SETTINGS_OUTPUT, settings.enableYUVOutput);
mEditor.commit();
}
}
}
================================================
FILE: Android_demo/KSY265CodecDemo/app/src/main/jni/Android.mk
================================================
LOCAL_PATH := $(call my-dir)
include $(call all-subdir-makefiles)
================================================
FILE: Android_demo/KSY265CodecDemo/app/src/main/jni/Application.mk
================================================
APP_ABI := armeabi-v7a
DEBUG := $(NDK_DEBUG)
ifndef NDK_DEBUG
DEBUG := 0
endif
ifeq ($(DEBUG),true)
DEBUG := 1
endif
ifeq ($(DEBUG),1)
APP_CFLAGS += -O0 -g
APP_OPTIM := debug
else
APP_CFLAGS += -O2
APP_OPTIM := release
endif
APP_STL := gnustl_static
APP_PLATFORM := android-9
#NDK_TOOLCHAIN_VERSION := 4.9
APP_STL := gnustl_static
================================================
FILE: Android_demo/KSY265CodecDemo/app/src/main/jni/decoder/Android.mk
================================================
LOCAL_PATH := $(call my-dir)
include $(call all-subdir-makefiles)
================================================
FILE: Android_demo/KSY265CodecDemo/app/src/main/jni/decoder/jniplayer/Android.mk
================================================
LOCAL_PATH := $(call my-dir)
ARCH_ABI := $(TARGET_ARCH_ABI)
PREBUILT_PATH := $(LOCAL_PATH)/../../../../../../../prebuilt
#
# Prebuilt Shared library
#
include $(CLEAR_VARS)
LOCAL_MODULE := lenthevcdec
LOCAL_SRC_FILES := $(PREBUILT_PATH)/$(TARGET_ARCH_ABI)/liblenthevcdec.so
include $(PREBUILT_SHARED_LIBRARY)
include $(CLEAR_VARS)
LOCAL_MODULE := qydecoder
LOCAL_SRC_FILES := $(PREBUILT_PATH)/$(TARGET_ARCH_ABI)/libqydecoder.a
include $(PREBUILT_STATIC_LIBRARY)
#
# jniplayer.so
#
include $(CLEAR_VARS)
ifeq ($(TARGET_ARCH_ABI), armeabi-v7a)
LENT_CFLAGS := -DARCH_ARM=1 -DHAVE_NEON=1
endif
ifeq ($(TARGET_ARCH_ABI), x86)
LENT_CFLAGS := -DARCH_X86_32=1
endif
LOCAL_C_INCLUDES += $(PREBUILT_PATH)/include
LOCAL_SRC_FILES := jniplayer.cpp jni_utils.cpp yuv2rgb565.cpp gl_renderer.cpp
LOCAL_LDLIBS := -llog -lz -ljnigraphics -lGLESv2
LOCAL_CFLAGS += $(LENT_CFLAGS)
LOCAL_SHARED_LIBRARIES := lenthevcdec
LOCAL_STATIC_LIBRARIES += qydecoder gnustl_static cpufeatures
LOCAL_MODULE := jniplayer
include $(BUILD_SHARED_LIBRARY)
================================================
FILE: Android_demo/KSY265CodecDemo/app/src/main/jni/decoder/jniplayer/Chromium_LICENSE.txt
================================================
Copyright (c) 2010 The Chromium Authors. All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following disclaimer
in the documentation and/or other materials provided with the
distribution.
* Neither the name of Google Inc. nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
================================================
FILE: Android_demo/KSY265CodecDemo/app/src/main/jni/decoder/jniplayer/gl_renderer.cpp
================================================
// gl_renderer.cpp : render YUV data directly using GPU with OpenGL ES 2.0
//
// Copyright (c) 2013 Strongene Ltd. All Right Reserved.
// http://www.strongene.com
//
// Contributors:
// Shengbin Meng
// James Deng
//
// You are free to re-use this as the basis for your own application
// in source and binary forms, with or without modification, provided
// that the following conditions are met:
//
// * Redistributions of source code must retain the above copyright
// notice and this list of conditions.
// * Redistributions in binary form must reproduce the above
// copyright notice and this list of conditions in the documentation
// and/or other materials provided with the distribution.
#include
#include
#include
#include
#include
#include
#include
#include
#include "jniplayer.h"
#include "gl_renderer.h"
#include "jni_utils.h"
extern VideoFrame gVF;
extern pthread_mutex_t gVFMutex;
#define LOG_TAG "gl_renderer"
#define ENABLE_LOGD 0
#if ENABLE_LOGD
#define LOGD(...) __android_log_print(ANDROID_LOG_DEBUG,LOG_TAG,__VA_ARGS__)
#else
#define LOGD(...)
#endif
#define LOGI(...) __android_log_print(ANDROID_LOG_INFO,LOG_TAG,__VA_ARGS__)
#define LOGE(...) __android_log_print(ANDROID_LOG_ERROR,LOG_TAG,__VA_ARGS__)
static GLuint gProgram;
static GLuint gTexIds[3];
static GLuint gAttribPosition;
static GLuint gAttribTexCoord;
static GLuint gUniformTexY;
static GLuint gUniformTexU;
static GLuint gUniformTexV;
static int backingWidth, backingHeight;
static int needSetup = 0;
static const char gVertexShader[] =
"attribute vec4 a_position;\n"
"attribute vec2 a_texCoord;\n"
"varying vec2 v_tc;\n"
"void main()\n"
"{\n"
" gl_Position = a_position;\n"
" v_tc = a_texCoord;\n"
"}\n";
static const char gFragmentShader[] =
"varying lowp vec2 v_tc;\n"
"uniform sampler2D u_texY;\n"
"uniform sampler2D u_texU;\n"
"uniform sampler2D u_texV;\n"
"void main(void)\n"
"{\n"
"mediump vec3 yuv;\n"
"lowp vec3 rgb;\n"
"yuv.x = texture2D(u_texY, v_tc).r;\n"
"yuv.y = texture2D(u_texU, v_tc).r - 0.5;\n"
"yuv.z = texture2D(u_texV, v_tc).r - 0.5;\n"
"rgb = mat3( 1, 1, 1,\n"
"0, -0.39465, 2.03211,\n"
"1.13983, -0.58060, 0) * yuv;\n"
"gl_FragColor = vec4(rgb, 1);\n"
"}\n";
static void printGLString(const char *name, GLenum s) {
const char *v = (const char *) glGetString(s);
LOGI("GL %s = %s\n", name, v);
}
static GLuint loadShader(GLenum shaderType, const char* pSource) {
GLuint shader = glCreateShader(shaderType);
if (shader) {
glShaderSource(shader, 1, &pSource, NULL);
glCompileShader(shader);
GLint compiled = 0;
glGetShaderiv(shader, GL_COMPILE_STATUS, &compiled);
if (!compiled) {
GLint infoLen = 0;
glGetShaderiv(shader, GL_INFO_LOG_LENGTH, &infoLen);
if (infoLen) {
char* buf = (char*) malloc(infoLen);
if (buf) {
glGetShaderInfoLog(shader, infoLen, NULL, buf);
LOGE("Could not compile shader %d:\n%s\n", shaderType, buf);
free(buf);
}
glDeleteShader(shader);
shader = 0;
}
}
}
return shader;
}
static GLuint createProgram(const char* pVertexSource,
const char* pFragmentSource) {
GLuint vertexShader = loadShader(GL_VERTEX_SHADER, pVertexSource);
if (!vertexShader) {
return 0;
}
GLuint fragmentShader = loadShader(GL_FRAGMENT_SHADER, pFragmentSource);
if (!fragmentShader) {
return 0;
}
GLuint program = glCreateProgram();
if (program) {
glAttachShader(program, vertexShader);
glAttachShader(program, fragmentShader);
glLinkProgram(program);
GLint linkStatus = GL_FALSE;
glGetProgramiv(program, GL_LINK_STATUS, &linkStatus);
if (linkStatus != GL_TRUE) {
GLint bufLength = 0;
glGetProgramiv(program, GL_INFO_LOG_LENGTH, &bufLength);
if (bufLength) {
char* buf = (char*) malloc(bufLength);
if (buf) {
glGetProgramInfoLog(program, bufLength, NULL, buf);
LOGE("Could not link program:\n%s\n", buf);
free(buf);
}
}
glDeleteProgram(program);
program = 0;
}
}
return program;
}
static GLfloat vertexPositions[] = {
-1.0, -1.0, 0.0,
1.0, -1.0, 0.0,
-1.0, 1.0, 0.0,
1.0, 1.0, 0.0
};
static GLfloat textureCoords[] = {
0.0, 1.0,
1.0, 1.0,
0.0, 0.0,
1.0, 0.0
};
static int init() {
printGLString("Version", GL_VERSION);
printGLString("Vendor", GL_VENDOR);
printGLString("Renderer", GL_RENDERER);
printGLString("Extensions", GL_EXTENSIONS);
// create and use our program
gProgram = createProgram(gVertexShader, gFragmentShader);
if (!gProgram) {
LOGE("Could not create program.");
return -1;
}
glUseProgram(gProgram);
// get the location of attributes in our shader
gAttribPosition = glGetAttribLocation(gProgram, "a_position");
gAttribTexCoord = glGetAttribLocation(gProgram, "a_texCoord");
// get the location of uniforms in our shader
gUniformTexY = glGetUniformLocation(gProgram, "u_texY");
gUniformTexU = glGetUniformLocation(gProgram, "u_texU");
gUniformTexV = glGetUniformLocation(gProgram, "u_texV");
// can enable only once
glEnableVertexAttribArray(gAttribPosition);
glEnableVertexAttribArray(gAttribTexCoord);
// set the value of uniforms (uniforms all have constant value)
glUniform1i(gUniformTexY, 0);
glUniform1i(gUniformTexU, 1);
glUniform1i(gUniformTexV, 2);
// generate and set parameters for the textures
glEnable (GL_TEXTURE_2D);
glGenTextures(3, gTexIds);
for (int i = 0; i < 3; i++) {
glActiveTexture(GL_TEXTURE0 + i);
glBindTexture(GL_TEXTURE_2D, gTexIds[i]);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
}
return 0;
}
static int setupGraphics(int w, int h) {
LOGI("setupGraphics(%d, %d)", w, h);
backingWidth = w;
backingHeight = h;
needSetup = 1;
return 0;
}
void glDrawFrame() {
pthread_mutex_lock(&gVFMutex);
if (gVF.yuv_data[0] == NULL) {
LOGI("gVF.yuv_data[0] == NULL");
pthread_mutex_unlock(&gVFMutex);
return;
}
double pts = gVF.pts;
if (needSetup) {
LOGI("Will setup ... \n");
GLuint width = gVF.width;
GLuint height = gVF.height;
float aspect = (float) width / (float) height;
if (aspect >= (float) backingWidth / (float) backingHeight) {
// fill screen in width, and leave space in Y
float scale = (float) backingWidth / (float) width;
float maxY = ((float) height * scale) / (float) backingHeight;
vertexPositions[1] = vertexPositions[4] = -maxY;
vertexPositions[7] = vertexPositions[10] = maxY;
} else {
// fill screen in height, and leave space in X
float scale = (float) backingHeight / (float) height;
float maxX = ((float) width * scale) / (float) backingWidth;
vertexPositions[0] = vertexPositions[6] = -maxX;
vertexPositions[3] = vertexPositions[9] = maxX;
}
// modify the texture coordinates
float texCoord = ((float) width) / gVF.linesize_y;
textureCoords[2] = textureCoords[6] = texCoord;
// set the value of attributes
glVertexAttribPointer(gAttribPosition, 3, GL_FLOAT, 0, 0,
vertexPositions);
glVertexAttribPointer(gAttribTexCoord, 2, GL_FLOAT, 0, 0,
textureCoords);
glViewport(0, 0, backingWidth, backingHeight);
LOGI("setup finished\n");
needSetup = 0;
}
glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
glClear (GL_COLOR_BUFFER_BIT);
LOGD("before upload: %u (%f)", getms(), pts);
// upload textures
glActiveTexture(GL_TEXTURE0 + 0);
glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, gVF.linesize_y, gVF.height, 0, GL_LUMINANCE, GL_UNSIGNED_BYTE, gVF.yuv_data[0]);
glActiveTexture(GL_TEXTURE0 + 1);
glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, gVF.linesize_uv, gVF.height / 2, 0, GL_LUMINANCE, GL_UNSIGNED_BYTE, gVF.yuv_data[1]);
glActiveTexture(GL_TEXTURE0 + 2);
glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, gVF.linesize_uv, gVF.height / 2, 0, GL_LUMINANCE, GL_UNSIGNED_BYTE, gVF.yuv_data[2]);
pthread_mutex_unlock(&gVFMutex);
LOGD("after upload: %u (%f)", getms(), pts);
LOGD("before glDrawArrays: %u (%f)", getms(), pts);
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
LOGD("after glDrawArrays: %u (%f)", getms(), pts);
}
jint nativeInit(JNIEnv * env, jobject obj) {
int ret = init();
if (ret < 0) {
LOGE("initialize failed!");
}
return ret;
}
jint nativeSetup(JNIEnv * env, jobject obj, jint width, jint height) {
int ret = setupGraphics(width, height);
if (ret < 0) {
LOGE("setup failed!");
}
return ret;
}
void nativeDrawFrame(JNIEnv * env, jobject obj) {
glDrawFrame();
}
static JNINativeMethod methods[] = {
{ "nativeInit", "()I", (void *) nativeInit },
{ "nativeSetup", "(II)I", (void *) nativeSetup },
{ "nativeDrawFrame", "()V", (void *) nativeDrawFrame },
};
int register_renderer(JNIEnv *env) {
return jniRegisterNativeMethods(env, "com/ksyun/media/ksy265codec/demo/decoder/hevdecoder/GLRenderer",
methods, sizeof(methods) / sizeof(methods[0]));
}
================================================
FILE: Android_demo/KSY265CodecDemo/app/src/main/jni/decoder/jniplayer/gl_renderer.h
================================================
#ifndef __GL_RENDERER_H__
#define __GL_RENDERER_H__
void glDrawFrame();
#endif
================================================
FILE: Android_demo/KSY265CodecDemo/app/src/main/jni/decoder/jniplayer/jni_utils.cpp
================================================
#include
#include
#include "jni_utils.h"
#define LOG_TAG "jni_utils"
static JavaVM *gVM;
extern int register_player(JNIEnv *env);
extern int register_renderer(JNIEnv *env);
/*
* Throw an exception with the specified class and an optional message.
*/
int jniThrowException(JNIEnv* env, const char* className, const char* msg) {
jclass exceptionClass = env->FindClass(className);
if (exceptionClass == NULL) {
LOGE("Unable to find exception class %s", className);
return -1;
}
if (env->ThrowNew(exceptionClass, msg) != JNI_OK) {
LOGE("Failed throwing '%s' '%s'", className, msg);
}
return 0;
}
JNIEnv* getJNIEnv() {
JNIEnv* env = NULL;
int ret = gVM->GetEnv((void**) &env, JNI_VERSION_1_4);
if (ret == JNI_OK) {
return env;
} else if (ret == JNI_EDETACHED) {
jint attachSuccess = gVM->AttachCurrentThread(&env, NULL);
if (attachSuccess != 0) {
LOGE("attach current thread failed \n");
return NULL;
}
} else {
LOGE("obtain JNIEnv failed, return: %d \n", ret);
}
return env;
}
void detachJVM() {
int ret;
ret = gVM->DetachCurrentThread();
if (ret == JNI_OK) {
LOGI("detach return OK: %d", ret);
} else {
LOGE("detach return NOT OK: %d", ret);
}
}
/*
* Register native JNI-callable methods.
*
* "className" looks like "java/lang/String".
*/
int jniRegisterNativeMethods(JNIEnv* env, const char* className,
const JNINativeMethod* gMethods, int numMethods) {
jclass clazz;
LOGI("Registering %s natives\n", className);
clazz = env->FindClass(className);
if (clazz == NULL) {
LOGE("Native registration unable to find class '%s'\n", className);
return -1;
}
if (env->RegisterNatives(clazz, gMethods, numMethods) < 0) {
LOGE("RegisterNatives failed for '%s'\n", className);
return -1;
}
return 0;
}
jint JNI_OnLoad(JavaVM* vm, void* reserved) {
JNIEnv* env = NULL;
jint result = JNI_ERR;
gVM = vm;
if (vm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) {
LOGE("GetEnv failed!");
return JNI_ERR;
}
LOGI("loading . . .");
if (register_player(env) != JNI_OK) {
LOGE("can't register player");
return JNI_ERR;
}
if (register_renderer(env) != JNI_OK) {
LOGE("can't register renderer");
return JNI_ERR;
}
LOGI("loaded");
return JNI_VERSION_1_4;
}
================================================
FILE: Android_demo/KSY265CodecDemo/app/src/main/jni/decoder/jniplayer/jni_utils.h
================================================
#ifndef __JNI_UTILS_H__
#define __JNI_UTILS_H__
#include
#include
#include
#ifdef __cplusplus
#define __STDC_CONSTANT_MACROS
#define __STDC_LIMIT_MACROS
#ifdef _STDINT_H
#undef _STDINT_H
#endif
#include
#define __STDC_FORMAT_MACROS
#endif
#define ENABLE_LOGD 0
#if ENABLE_LOGD
#define LOGD(...) __android_log_print(ANDROID_LOG_DEBUG,LOG_TAG,__VA_ARGS__)
#else
#define LOGD(...)
#endif
#define LOGI(...) __android_log_print(ANDROID_LOG_INFO,LOG_TAG,__VA_ARGS__)
#define LOGE(...) __android_log_print(ANDROID_LOG_ERROR,LOG_TAG,__VA_ARGS__)
int jniThrowException(JNIEnv* env, const char* className, const char* msg);
int jniRegisterNativeMethods(JNIEnv* env, const char* className, const JNINativeMethod* gMethods, int numMethods);
JNIEnv* getJNIEnv();
void detachJVM();
#endif /* __JNI_UTILS_H__ */
================================================
FILE: Android_demo/KSY265CodecDemo/app/src/main/jni/decoder/jniplayer/jniplayer.cpp
================================================
// jniplayer.cpp : decode H.265/HEVC video data in separate native thread
//
// Copyright (c) 2013 Strongene Ltd. All Right Reserved.
// http://www.strongene.com
//
// Contributors:
// Shengbin Meng
// James Deng
//
// You are free to re-use this as the basis for your own application
// in source and binary forms, with or without modification, provided
// that the following conditions are met:
//
// * Redistributions of source code must retain the above copyright
// notice and this list of conditions.
// * Redistributions in binary form must reproduce the above
// copyright notice and this list of conditions in the documentation
// and/or other materials provided with the distribution.
#include
#include
#include
#include
#include
#include
#include "jniplayer.h"
#include "jni_utils.h"
#include "yuv2rgb565.h"
#include "gl_renderer.h"
#ifdef __cplusplus
#define __STDC_CONSTANT_MACROS
#define __STDC_LIMIT_MACROS
#ifdef _STDINT_H
#undef _STDINT_H
#endif
#include
#define __STDC_FORMAT_MACROS
#endif
extern "C" {
#include "lenthevcdec.h"
#include "qy265dec.h"
#include "qyauth_env.h"
}
#define LOG_TAG "jniplayer"
#define ENABLE_LOGD 0
#if ENABLE_LOGD
#define LOGD(...) __android_log_print(ANDROID_LOG_DEBUG,LOG_TAG,__VA_ARGS__)
#else
#define LOGD(...)
#endif
#define LOGI(...) __android_log_print(ANDROID_LOG_INFO,LOG_TAG,__VA_ARGS__)
#define LOGE(...) __android_log_print(ANDROID_LOG_ERROR,LOG_TAG,__VA_ARGS__)
#ifndef _countof
#define _countof(a) (sizeof(a) / sizeof((a)[0]))
#endif
#define LOOP_PLAY 0
#if ARCH_ARM
#define USE_SWSCALE 0
#else
#define USE_SWSCALE 0
#endif
struct MediaInfo
{
int width;
int height;
char data_src[1024];
int raw_bs;
};
VideoFrame gVF = {0, 0, 0, 0, 0, {NULL, NULL, NULL}};
pthread_mutex_t gVFMutex = PTHREAD_MUTEX_INITIALIZER;
static MediaInfo media;
static pthread_t decode_thread;
static struct SwsContext *p_sws_ctx;
static const char* const kClassPathName = "com/ksyun/media/ksy265codec/demo/decoder/hevdecoder/NativeMediaPlayer";
// for lenthevcdec
static const uint32_t AU_COUNT_MAX = 1024 * 1024;
static const uint32_t AU_BUF_SIZE_MAX = 1024 * 1024 * 80;
static uint32_t au_pos[AU_COUNT_MAX]; // too big array, use static to save stack space
static uint32_t au_count, au_buf_size;
static uint8_t *au_buf = NULL;
static lenthevcdec_ctx lent_ctx = NULL;
static volatile int exit_decode_thread = 0;
static volatile int is_playing = 0;
static int frames_sum = 0;
static double tstart = 0;
static int frames = 0;
static double tlast = 0;
static float renderFPS = 0;
static double avg_fps = 0;
static uint64_t renderInterval = 0;
static struct timeval timeStart;
static int frame_count = 0;
static double real_time = 0;
static float real_fps = 0;
static int use_ksy = 0;
static void* ksydec_ctx = NULL;
static QY265Frame decframe;
static int disable_render = 0;
static bool enable_output_yuv = 0;
static char output_path[1024];
typedef struct Context {
jobject obj;
jmethodID drawFrame;
jmethodID postEvent;
} Context;
static Context *ctx;
static void freeContext(void* opaque) {
Context* ctx = (Context*) opaque;
if (ctx && ctx->obj) {
JNIEnv *env = NULL;
env = getJNIEnv();
env->DeleteGlobalRef(ctx->obj);
}
free(ctx);
}
static void postEventToJava(void *opaque, int msg, int ext1, int ext2)
{
Context* ctx = (Context*) opaque;
if (ctx == NULL) {
return;
}
int isAttached = 0;
JNIEnv *env = NULL;
env = getJNIEnv();
if (env == NULL) {
return;
}
env->CallVoidMethod(ctx->obj, ctx->postEvent, msg, ext1, ext2);
if (env->ExceptionCheck()) {
env->ExceptionDescribe();
}
}
static int callJavaDrawFrame(void* opaque, int width, int height)
{
Context* ctx = (Context*) opaque;
if (ctx == NULL) {
return 0;
}
int isAttached = 0;
JNIEnv *env = NULL;
env = getJNIEnv();
if (env == NULL) {
return 0;
}
int ret = env->CallIntMethod(ctx->obj, ctx->drawFrame, width, height);
if (env->ExceptionCheck()) {
env->ExceptionDescribe();
}
return ret;
}
uint32_t getms()
{
struct timeval t;
gettimeofday(&t, NULL);
return (t.tv_sec * 1000) + (t.tv_usec / 1000);
}
int drawFrame(VideoFrame * vf)
{
int64_t timePassed, delay;
LOGD("enter drawFrame:%u (%f)", getms(), vf->pts);
if(disable_render)
goto show_display;
// copy decode frame to global buffer
pthread_mutex_lock(&gVFMutex);
if ( gVF.linesize_y != vf->linesize_y || gVF.linesize_uv != vf->linesize_uv || gVF.height != vf->height ) {
if ( NULL != gVF.yuv_data[0] )
free(gVF.yuv_data[0]);
gVF.yuv_data[0] = gVF.yuv_data[1] = gVF.yuv_data[2] = NULL;
gVF.yuv_data[0] = (uint8_t*)malloc(vf->linesize_y * vf->height + vf->linesize_uv * vf->height );
if ( NULL == gVF.yuv_data[0] ) {
LOGE("malloc failed!\n");
return -1;
}
gVF.yuv_data[1] = gVF.yuv_data[0] + vf->linesize_y*vf->height;
gVF.yuv_data[2] = gVF.yuv_data[1] + vf->linesize_uv*vf->height/2;
}
gVF.width = vf->width;
gVF.height = vf->height;
gVF.linesize_y = vf->linesize_y;
gVF.linesize_uv = vf->linesize_uv;
gVF.pts = vf->pts;
memcpy(gVF.yuv_data[0], vf->yuv_data[0], vf->linesize_y*vf->height);
memcpy(gVF.yuv_data[1], vf->yuv_data[1], vf->linesize_uv*vf->height/2);
memcpy(gVF.yuv_data[2], vf->yuv_data[2], vf->linesize_uv*vf->height/2);
pthread_mutex_unlock(&gVFMutex);
// wait for display
struct timeval timeNow;
gettimeofday(&timeNow, NULL);
timePassed = ((int64_t)(timeNow.tv_sec - timeStart.tv_sec))*1000000 + (timeNow.tv_usec - timeStart.tv_usec);
delay = vf->pts - timePassed;
if (delay > 0) {
usleep(delay);
}
show_display:
// update information
gettimeofday(&timeNow, NULL);
double tnow = timeNow.tv_sec + (timeNow.tv_usec / 1000000.0);
if (tlast == 0) tlast = tnow;
if (tstart == 0) tstart = tnow;
if (tnow > tlast + 1) {
LOGI("Video Display FPS:%i", (int)frames);
frames_sum += frames;
avg_fps = frames_sum / (tnow - tstart);
LOGI("Video AVG FPS:%.2lf", avg_fps);
postEventToJava(ctx, 900, int(frames), int(avg_fps * 4096));
tlast = tlast + 1;
frames = 0;
}
frames++;
// request display
LOGD("before request draw:%u (%f)", getms(), vf->pts);
if (disable_render) {
return 0;
} else {
return callJavaDrawFrame(ctx, vf->width, vf->height);
}
}
int lent_hevc_get_sps(uint8_t* buf, int size, uint8_t** sps_ptr)
{
int i, nal_type, sps_pos;
sps_pos = -1;
for ( i = 0; i < (size - 4); i++ ) {
if ( 0 == buf[i] && 0 == buf[i+1] && 1 == buf[i+2] ) {
nal_type = (buf[i+3] & 0x7E) >> 1;
if ( 33 != nal_type && sps_pos >= 0 ) {
break;
}
if ( 33 == nal_type ) { // sps
sps_pos = i;
}
i += 2;
}
}
if ( sps_pos < 0 )
return 0;
if ( i == (size - 4) )
i = size;
*sps_ptr = buf + sps_pos;
return i - sps_pos;
}
int lent_hevc_get_frame(uint8_t* buf, int size, int *is_idr)
{
static int seq_hdr = 0;
int i, nal_type, idr = 0;
for ( i = 0; i < (size - 6); i++ ) {
if ( 0 == buf[i] && 0 == buf[i+1] && 1 == buf[i+2] ) {
nal_type = (buf[i+3] & 0x7E) >> 1;
if ( nal_type <= 21 ) {
if ( buf[i+5] & 0x80 ) { /* first slice in pic */
if ( !seq_hdr )
break;
else
seq_hdr = 0;
}
}
if ( nal_type >= 32 && nal_type <= 34 ) {
if ( !seq_hdr ) {
seq_hdr = 1;
idr = 1;
break;
}
seq_hdr = 1;
}
i += 2;
}
}
if ( i == (size - 6) )
i = size;
if ( NULL != is_idr )
*is_idr = idr;
return i;
}
static int write_pic_yv12(int w, int h, uint8_t* buf[3], short stride[3], FILE *fp)
{
uint8_t *line;
int line_len, line_count, i, j, pitch;
for ( i = 0; i < 3; i++ ) {
line = buf[i];
pitch = stride[i];
line_len = (0 == i) ? w : (w / 2);
line_count = (0 == i) ? h : (h / 2);
for ( j = 0; j < line_count; j++ ) {
if ( fwrite(line, 1, line_len, fp) != line_len )
return -1;
line += pitch;
}
}
return 0;
}
void* rawbs_runDecoder(void *p)
{
int32_t got_frame, width, height, stride[3];
uint8_t* pixels[3];
int64_t pts, got_pts;
int ret, i;
struct timeval tv_start, tv_end;
if ( NULL == lent_ctx || NULL == au_buf )
return NULL;
FILE* out_file = NULL;
if (enable_output_yuv) {
out_file = fopen(output_path, "wb");
if (out_file == NULL) {
LOGE("open outout file %s faile", output_path);
goto exit;
}
}
decode:
// decode all AUs
frame_count = 0;
real_time = 0;
gettimeofday(&tv_start, NULL);
LOGD("dec %d\n", au_count);
for ( i = 0; i < au_count && !exit_decode_thread; i++ ) {
pts = i * 40;
got_frame = 0;
uint32_t start_time = getms();
LOGD("before decode: %u", start_time);
if(use_ksy) {
QY265DecodeFrame(ksydec_ctx, au_buf + au_pos[i], au_pos[i + 1] - au_pos[i], &ret, 0);
if ( ret < 0 ) {
LOGE("call QY265DecodeFrame failed! ret = %d i: %d /%d\n", ret, i, au_count);
}
QY265DecoderGetDecodedFrame(ksydec_ctx, &decframe, &ret, 0);
if ( ret == 0 && decframe.bValid ) {
got_frame = 1;
width = decframe.frameinfo.nWidth;
height = decframe.frameinfo.nHeight;
stride[0] = decframe.iStride[0];
stride[1] = decframe.iStride[1];
pixels[0] = decframe.pData[0];
pixels[1] = decframe.pData[1];
pixels[2] = decframe.pData[2];
}
} else {
ret = lenthevcdec_decode_frame(lent_ctx, au_buf + au_pos[i], au_pos[i + 1] - au_pos[i], pts,
&got_frame, &width, &height, stride, (void**)pixels, &got_pts);
if ( ret < 0 ) {
LOGE("call lenthevcdec_decode_frame failed! ret = %d\n", ret);
goto exit;
}
}
uint32_t end_time = getms();
LOGD("after decode: %u", end_time);
uint32_t dec_time = end_time - start_time;
if ( got_frame > 0 ) {
LOGD("decoding time: %u - %u = %u\n", end_time, start_time, dec_time);
LOGD("decode frame: pts = %" PRId64 ", linesize = {%d,%d,%d}\n", got_pts, stride[0], stride[1], stride[2]);
if ( media.width != width || media.height != height ) {
LOGD("Video dimensions change! %dx%d -> %dx%d\n", media.width, media.height, width, height);
media.width = width;
media.height = height;
}
// draw frame to screen
VideoFrame vf;
vf.width = width;
vf.height = height;
vf.linesize_y = stride[0];
vf.linesize_uv = stride[1];
vf.pts = renderInterval * frame_count;
vf.yuv_data[0] = pixels[0];
vf.yuv_data[1] = pixels[1];
vf.yuv_data[2] = pixels[2];
if (enable_output_yuv &&
out_file != NULL) {
write_pic_yv12(decframe.frameinfo.nWidth, decframe.frameinfo.nHeight,
(uint8_t**)decframe.pData, decframe.iStride, out_file);
}
if (frame_count == 0) {
gettimeofday(&timeStart, NULL);
}
drawFrame(&vf);
if(use_ksy)
QY265DecoderReturnDecodedFrame(ksydec_ctx, &decframe);
frame_count++;
}
}
#if LOOP_PLAY
if (!exit_decode_thread) {
LOGI("automatically play again\n");
goto decode;
}
#endif
LOGE("flush %d + %d /%d \n", i,frame_count, au_count);
// flush decoder
while ( !exit_decode_thread ) {
got_frame = 0;
if(use_ksy) {
QY265DecoderGetDecodedFrame(ksydec_ctx, &decframe, &ret, 0);
if ( ret == 0 && frame_count < au_count){
if (decframe.bValid) {
got_frame = 1;
width = decframe.frameinfo.nWidth;
height = decframe.frameinfo.nHeight;
stride[0] = decframe.iStride[0];
stride[1] = decframe.iStride[1];
pixels[0] = decframe.pData[0];
pixels[1] = decframe.pData[1];
pixels[2] = decframe.pData[2];
}
}
else {
break;
}
} else {
ret = lenthevcdec_decode_frame(lent_ctx, NULL, 0, pts,
&got_frame, &width, &height, stride, (void**)pixels, &got_pts);
if ( ret < 0 || got_frame <= 0)
break;
}
if ( got_frame > 0 ) {
if ( media.width != width || media.height != height ) {
LOGD("Video dimensions change! %dx%d -> %dx%d\n", media.width, media.height, width, height);
media.width = width;
media.height = height;
}
// draw frame to screen
VideoFrame vf;
vf.width = width;
vf.height = height;
vf.linesize_y = stride[0];
vf.linesize_uv = stride[1];
vf.pts = renderInterval * frame_count;
vf.yuv_data[0] = pixels[0];
vf.yuv_data[1] = pixels[1];
vf.yuv_data[2] = pixels[2];
if (enable_output_yuv &&
out_file != NULL) {
write_pic_yv12(decframe.frameinfo.nWidth, decframe.frameinfo.nHeight,
(uint8_t**)decframe.pData, decframe.iStride, out_file);
}
drawFrame(&vf);
if(use_ksy)
QY265DecoderReturnDecodedFrame(ksydec_ctx, &decframe);
frame_count++;
}
}
exit:
if ( NULL != au_buf )
free(au_buf);
au_buf = NULL;
au_buf_size = 0;
if ( NULL != lent_ctx )
lenthevcdec_destroy(lent_ctx);
lent_ctx = NULL;
if ( NULL != ksydec_ctx )
QY265DecoderDestroy(ksydec_ctx);
ksydec_ctx = NULL;
gettimeofday(&tv_end, NULL);
real_time = (tv_end.tv_sec + (tv_end.tv_usec / 1000000.0)) - (tv_start.tv_sec + (tv_start.tv_usec / 1000000.0));
real_fps = frame_count / real_time;
postEventToJava(ctx, 909, int(frame_count), 0);// end of file
detachJVM();
is_playing = 0;
LOGI("decode thread exit\n");
exit_decode_thread = 0;
if (out_file != NULL) {
fclose(out_file);
}
return NULL;
}
static int
MediaPlayer_setDataSource(JNIEnv *env, jobject thiz, jstring path)
{
const char *pathStr = env->GetStringUTFChars(path, NULL);
memset(&media, 0, sizeof(media));
strcpy(media.data_src, pathStr);
// Make sure that local ref is released before a potential exception
env->ReleaseStringUTFChars(path, pathStr);
// is raw HEVC bitstream file ?
static const char * hevc_raw_bs_ext[] = {".hevc", ".hm91", ".hm10", ".bit", ".hvc", ".h265", ".265"};
char * ext = strrchr(media.data_src, '.');
if ( NULL != ext ) {
int i;
for ( i = 0; i < _countof(hevc_raw_bs_ext); i++ ) {
if ( strcasecmp(hevc_raw_bs_ext[i], ext) == 0 )
break;
}
if ( i < _countof(hevc_raw_bs_ext) )
media.raw_bs = 1;
}
return 0;
}
static int rawbs_prepare(JNIEnv *env, jobject context, int threads)
{
FILE *in_file;
int32_t got_frame, width, height, stride[3];
uint8_t* pixels[3];
int64_t pts, got_pts;
uint8_t *sps;
lenthevcdec_ctx one_thread_ctx;
int compatibility, frame_count, sps_len, ret, i;
in_file = NULL;
au_buf = NULL;
lent_ctx = NULL;
one_thread_ctx = NULL;
// get compatibility version
compatibility = 0x7fffffff;
if ( strncasecmp(".hm91", media.data_src + (strlen(media.data_src) - 5), 5) == 0 )
compatibility = 91;
else if ( strncasecmp(".hm10", media.data_src + (strlen(media.data_src) - 5), 5) == 0 )
compatibility = 100;
// read file
in_file = fopen(media.data_src, "rb");
if ( NULL == in_file ) {
LOGE("Can not open input file '%s'\n", media.data_src);
goto error_exit;
}
fseek(in_file, 0, SEEK_END);
au_buf_size = ftell(in_file);
fseek(in_file, 0, SEEK_SET);
LOGD("file size is %d bytes\n", au_buf_size);
if ( au_buf_size > AU_BUF_SIZE_MAX )
au_buf_size = AU_BUF_SIZE_MAX;
au_buf = (uint8_t*)malloc(au_buf_size);
if ( NULL == au_buf ) {
LOGE("call malloc failed! size is %d\n", au_buf_size);
goto error_exit;
}
if ( fread(au_buf, 1, au_buf_size, in_file) != au_buf_size ) {
LOGE("call fread failed!\n");
goto error_exit;
}
fclose(in_file);
in_file = NULL;
LOGD("%d bytes read to address %p\n", au_buf_size, au_buf);
// find all AU
au_count = 0;
for ( i = 0; i < au_buf_size && au_count < (AU_COUNT_MAX - 1); i+=3 ) {
i += lent_hevc_get_frame(au_buf + i, au_buf_size - i, NULL);
if (i < au_buf_size) {
au_pos[au_count++] = i;
}
LOGD("AU[%d] = %d\n", au_count - 1, au_pos[au_count - 1]);
}
au_pos[au_count] = au_buf_size; // include last AU
LOGD("found %d AUs\n", au_count);
if(use_ksy) {
int hr = QY_OK;
QY265DecConfig config = {0};
config.threads = threads;
config.bEnableOutputRecToFile = 0;
config.strRecYuvFileName = NULL;
TCounterEnv* tCounterEnv = (TCounterEnv*) malloc(sizeof(TCounterEnv));
tCounterEnv->context = context;
env->GetJavaVM(&tCounterEnv->jvm);
config.pAuth = tCounterEnv;
ksydec_ctx = QY265DecoderCreate(&config, &hr);
if(ksydec_ctx == NULL) {
LOGE("call QY265DecoderCreate fail..");
goto error_exit;
}
LOGD("call QY265DecoderCreate Succeed..");
}
// open lentoid HEVC decoder
LOGI("create lentoid decoder: compatibility = %d, threads = %d\n", compatibility, threads);
lent_ctx = lenthevcdec_create(threads, compatibility, NULL);
if ( NULL == lent_ctx ) {
LOGE("call lenthevcdec_create failed!\n");
goto error_exit;
}
LOGD("get decoder %p\n", lent_ctx);
// find sps, decode it and get video resolution
sps_len = lent_hevc_get_sps(au_buf, au_buf_size, &sps);
if ( sps_len > 0 ) {
// get a one-thread decoder to decode SPS
one_thread_ctx = lenthevcdec_create(1, compatibility, NULL);
if ( NULL == lent_ctx )
goto error_exit;
width = 0;
height = 0;
ret = lenthevcdec_decode_frame(one_thread_ctx, sps, sps_len, 0, &got_frame, &width, &height, stride, (void**)pixels, &pts);
if ( 0 != width && 0 != height ) {
media.width = width;
media.height = height;
LOGD("Video dimensions is %dx%d\n", width, height);
}
lenthevcdec_destroy(one_thread_ctx);
one_thread_ctx = NULL;
}
return 0;
error_exit:
if ( NULL != in_file )
fclose(in_file);
in_file = NULL;
if ( NULL != au_buf )
free(au_buf);
au_buf = NULL;
au_buf_size = 0;
if ( NULL != lent_ctx )
lenthevcdec_destroy(lent_ctx);
lent_ctx = NULL;
if ( NULL != one_thread_ctx )
lenthevcdec_destroy(one_thread_ctx);
one_thread_ctx = NULL;
if ( NULL != ksydec_ctx )
QY265DecoderDestroy(ksydec_ctx);
ksydec_ctx = NULL;
return -1;
}
static int
MediaPlayer_prepare(JNIEnv *env, jobject thiz, jobject context, jint decoderType, jint render, jint threadNumber, jfloat fps) {
LOGD("MediaPlayer_prepare: %d threads, fps %f\n", threadNumber, fps);
renderFPS = fps;
if (fps == 0) {
renderInterval = 1;
} else {
renderInterval = 1.0 / fps * 1000000; // us
}
if (decoderType == 0) {
use_ksy = 1;
} else {
use_ksy = 0;
}
disable_render = render;
return rawbs_prepare(env, context, threadNumber);
}
static int
MediaPlayer_start(JNIEnv *env, jobject thiz)
{
LOGI("start decoding thread");
pthread_create(&decode_thread, NULL, rawbs_runDecoder, NULL);
return 0;
}
static int
MediaPlayer_pause(JNIEnv *env, jobject thiz)
{
return 0;
}
static int
MediaPlayer_go(JNIEnv *env, jobject thiz)
{
return 0;
}
static int
MediaPlayer_stop(JNIEnv *env, jobject thiz)
{
void* result;
exit_decode_thread = 1;
pthread_join(decode_thread, &result);
exit_decode_thread = 0;
if (p_sws_ctx != NULL) {
// sws_freeContext(p_sws_ctx);
p_sws_ctx = NULL;
}
if ( NULL != gVF.yuv_data[0] )
free(gVF.yuv_data[0]);
memset(&gVF, 0, sizeof(gVF));
LOGI("media player stopped\n");
return 0;
}
static bool
MediaPlayer_isPlaying(JNIEnv *env, jobject thiz)
{
return is_playing;
}
static int
MediaPlayer_seekTo(JNIEnv *env, jobject thiz, jint msec)
{
return 0;
}
static int
MediaPlayer_getVideoWidth(JNIEnv *env, jobject thiz)
{
int w = media.width;
return w;
}
static int
MediaPlayer_getVideoHeight(JNIEnv *env, jobject thiz)
{
int h = media.height;
return h;
}
static int
MediaPlayer_getCurrentPosition(JNIEnv *env, jobject thiz)
{
int msec = 0;
return msec;
}
static jfloat
MediaPlayer_getDuration(JNIEnv *env, jobject thiz)
{
//TODO: 通过视频实际帧率计算
return frame_count / renderFPS;
}
static jfloat
MediaPlayer_getDecodeTime(JNIEnv *env, jobject thiz)
{
return real_time;
}
static jfloat
MediaPlayer_getDecodeFPS(JNIEnv *env, jobject thiz)
{
return real_fps;
}
// ----------------------------------------------------------------------------
static void MediaPlayer_native_init(JNIEnv *env, jobject thiz)
{
jclass clazz;
clazz = env->FindClass("com/ksyun/media/ksy265codec/demo/decoder/hevdecoder/NativeMediaPlayer");
if (clazz == NULL) {
jniThrowException(env, "java/lang/RuntimeException", "Can't find MediaPlayer");
return;
}
p_sws_ctx = NULL;
frames_sum = 0;
tstart = 0;
frames = 0;
tlast = 0;
renderFPS = 0;
renderInterval = 0;
disable_render = 0;
use_ksy = 0;
if (thiz) {
ctx = (Context *) calloc(1, sizeof(Context));
if (ctx == NULL) {
return;
}
jclass clazz = env->GetObjectClass(thiz);
ctx->obj = env->NewGlobalRef(thiz);
ctx->postEvent = env->GetMethodID(clazz, "postEventFromNative", "(III)V");
if (ctx->postEvent == NULL) {
jniThrowException(env, "java/lang/RuntimeException", "Can't find MediaPlayer.postEventFromNative");
return;
}
ctx->drawFrame = env->GetMethodID(clazz, "drawFrame", "(II)I");
if (ctx->drawFrame == NULL) {
jniThrowException(env, "java/lang/RuntimeException", "Can't find MediaPlayer.drawFrame");
return;
}
}
}
static void
MediaPlayer_renderBitmap(JNIEnv *env, jobject obj, jobject bitmap)
{
void* pixels;
int ret;
if ((ret = AndroidBitmap_lockPixels(env, bitmap, &pixels)) < 0) {
LOGE("AndroidBitmap_lockPixels() failed ! error=%d", ret);
}
// Convert the image from its native format to RGB565
uint32_t start_time = getms();
LOGD("before scale: %d", getms());
#if USE_SWSCALE
// use swscale, which may be optimized with SSE for x86 arch
if (p_sws_ctx == NULL) {
p_sws_ctx = sws_getContext( gVF.width,
gVF.height,
PIX_FMT_YUV420P,
gVF.width,
gVF.height,
PIX_FMT_RGB565, SWS_BICUBIC|SWS_CPU_CAPS_MMX|SWS_CPU_CAPS_MMX2|SWS_CPU_CAPS_SSE2, NULL, NULL, NULL);
}
if (p_sws_ctx != NULL) {
unsigned char *src[4];
int src_stride[4];
unsigned char *dst[4];
int dst_stride[4];
src_stride[0] = gVF.linesize_y;
src_stride[1] = src_stride[2] = gVF.linesize_uv;
dst[0] = (unsigned char*)pixels;
dst_stride[0] = gVF.width * 2;
sws_scale(p_sws_ctx, (const uint8_t * const *)gVF.yuv_data, src_stride, 0, gVF.height, dst, dst_stride);
}
#else
ConvertYCbCrToRGB565( gVF.yuv_data[0],
gVF.yuv_data[1],
gVF.yuv_data[2],
(uint8_t*)pixels,
gVF.width,
gVF.height,
gVF.linesize_y,
gVF.linesize_uv,
gVF.width * 2,
420 );
#endif
uint32_t end_time = getms();
LOGD("after scale: %d", getms());
LOGD("scale time: %dms", end_time - start_time);
AndroidBitmap_unlockPixels(env, bitmap);
}
static void MediaPlayer_set_output(JNIEnv *env, jobject thiz, jstring path) {
const char *pathStr = env->GetStringUTFChars(path, NULL);
enable_output_yuv = true;
strcpy(output_path, pathStr);
// Make sure that local ref is released before a potential exception
env->ReleaseStringUTFChars(path, pathStr);
}
static jstring MediaPlayer_getVersion(JNIEnv *env, jobject thiz) {
if (use_ksy) {
return env->NewStringUTF(strLibQy265Version);
} else {
char version[20];
sprintf(version, "%d", lenthevcdec_version());
return env->NewStringUTF(version);
}
}
// ----------------------------------------------------------------------------
static JNINativeMethod gMethods[] = {
{ "setDataSource", "(Ljava/lang/String;)I", (void *) MediaPlayer_setDataSource },
{ "native_prepare", "(Landroid/content/Context;IIIF)I", (void *) MediaPlayer_prepare },
{ "native_start", "()I", (void *) MediaPlayer_start },
{ "native_stop", "()I", (void *) MediaPlayer_stop },
{ "getVideoWidth", "()I", (void *) MediaPlayer_getVideoWidth },
{ "getVideoHeight", "()I", (void *) MediaPlayer_getVideoHeight },
{ "native_seekTo", "(I)I", (void *) MediaPlayer_seekTo },
{ "native_pause", "()I", (void *) MediaPlayer_pause },
{ "native_go", "()I", (void *) MediaPlayer_go },
{ "isPlaying", "()Z", (void *) MediaPlayer_isPlaying },
{ "getCurrentPosition", "()I", (void *) MediaPlayer_getCurrentPosition },
{ "getDuration", "()F", (void *) MediaPlayer_getDuration },
{ "getDecodeTime", "()F", (void *) MediaPlayer_getDecodeTime },
{ "getDecodeFPS", "()F", (void *) MediaPlayer_getDecodeFPS },
{ "native_init", "()V", (void *) MediaPlayer_native_init },
{ "renderBitmap", "(Landroid/graphics/Bitmap;)V", (void *) MediaPlayer_renderBitmap },
{ "native_set_output", "(Ljava/lang/String;)V", (void *) MediaPlayer_set_output },
{ "getVersion", "()Ljava/lang/String;", (jstring*) MediaPlayer_getVersion },
};
int register_player(JNIEnv *env) {
return jniRegisterNativeMethods(env, kClassPathName, gMethods, sizeof(gMethods) / sizeof(gMethods[0]));
}
================================================
FILE: Android_demo/KSY265CodecDemo/app/src/main/jni/decoder/jniplayer/jniplayer.h
================================================
#ifndef __JNIPLAYER_H__
#define __JNIPLAYER_H__
struct VideoFrame
{
int width;
int height;
int linesize_y;
int linesize_uv;
double pts;
uint8_t *yuv_data[3];
};
uint32_t getms();
#endif /* __JNIPLAYER_H__ */
================================================
FILE: Android_demo/KSY265CodecDemo/app/src/main/jni/decoder/jniplayer/jniplayer_new.cpp
================================================
// jniplayer.cpp : decode H.265/HEVC video data in separate native thread
//
// Copyright (c) 2013 Strongene Ltd. All Right Reserved.
// http://www.strongene.com
//
// Contributors:
// Shengbin Meng
// James Deng
//
// You are free to re-use this as the basis for your own application
// in source and binary forms, with or without modification, provided
// that the following conditions are met:
//
// * Redistributions of source code must retain the above copyright
// notice and this list of conditions.
// * Redistributions in binary form must reproduce the above
// copyright notice and this list of conditions in the documentation
// and/or other materials provided with the distribution.
#include
#include
#include
#include
#include
#include
#include "jniplayer.h"
#include "jni_utils.h"
#include "yuv2rgb565.h"
#include "gl_renderer.h"
#ifdef __cplusplus
#define __STDC_CONSTANT_MACROS
#define __STDC_LIMIT_MACROS
#ifdef _STDINT_H
#undef _STDINT_H
#endif
#include
#define __STDC_FORMAT_MACROS
#endif
extern "C" {
#include "lenthevcdec.h"
#include "qy265dec.h"
}
#define LOG_TAG "jniplayer"
#define ENABLE_LOGD 0
#if ENABLE_LOGD
#define LOGD(...) __android_log_print(ANDROID_LOG_DEBUG,LOG_TAG,__VA_ARGS__)
#else
#define LOGD(...)
#endif
#define LOGI(...) __android_log_print(ANDROID_LOG_INFO,LOG_TAG,__VA_ARGS__)
#define LOGE(...) __android_log_print(ANDROID_LOG_ERROR,LOG_TAG,__VA_ARGS__)
#ifndef _countof
#define _countof(a) (sizeof(a) / sizeof((a)[0]))
#endif
#define LOOP_PLAY 0
#if ARCH_ARM
#define USE_SWSCALE 0
#else
#define USE_SWSCALE 0
#endif
struct fields_t {
jmethodID drawFrame;
jmethodID postEvent;
};
struct MediaInfo
{
int width;
int height;
char data_src[1024];
int raw_bs;
};
VideoFrame gVF = {0, 0, 0, 0, 0, {NULL, NULL, NULL}};
pthread_mutex_t gVFMutex = PTHREAD_MUTEX_INITIALIZER;
static fields_t fields;
static JNIEnv *gEnv = NULL;
static JNIEnv *gEnvLocal = NULL;
static jclass gClass = NULL;
static MediaInfo media;
static pthread_t decode_thread;
static struct SwsContext *p_sws_ctx;
static const char* const kClassPathName = "com/ksyun/media/ksy265codec/demo/decoder/hevdecoder/NativeMediaPlayer";
// for lenthevcdec
static const uint32_t AU_COUNT_MAX = 1024 * 1024;
static const uint32_t AU_BUF_SIZE_MAX = 1024 * 1024 * 50;
static uint32_t au_pos[AU_COUNT_MAX]; // too big array, use static to save stack space
static uint32_t au_count, au_buf_size;
static uint8_t *au_buf = NULL;
static lenthevcdec_ctx lent_ctx = NULL;
static volatile int exit_decode_thread = 0;
static volatile int is_playing = 0;
static int frames_sum = 0;
static double tstart = 0;
static int frames = 0;
static double tlast = 0;
static float renderFPS = 0;
static uint64_t renderInterval = 0;
static struct timeval timeStart;
static int use_ksy = 0;
static void* ksydec_ctx = NULL;
static QY265Frame decframe;
static int disable_render = 0;
static inline int next_p2(int a) {
int rval=1;
while(rvalCallStaticVoidMethod(gClass, fields.postEvent, msg, ext1, ext2, 0);
}
int drawFrame(VideoFrame * vf)
{
LOGD("enter drawFrame:%u (%f)", getms(), vf->pts);
if(disable_render)
return 0;
// copy decode frame to global buffer
pthread_mutex_lock(&gVFMutex);
if ( gVF.linesize_y != vf->linesize_y || gVF.linesize_uv != vf->linesize_uv || gVF.height != vf->height ) {
if ( NULL != gVF.yuv_data[0] )
free(gVF.yuv_data[0]);
if ( NULL != gVF.yuv_data[1] )
free(gVF.yuv_data[1]);
if ( NULL != gVF.yuv_data[2] )
free(gVF.yuv_data[2]);
gVF.yuv_data[0] = gVF.yuv_data[1] = gVF.yuv_data[2] = NULL;
gVF.yuv_data[0] = (uint8_t*)malloc(vf->linesize_y * vf->height + vf->linesize_uv * vf->height );
if ( NULL == gVF.yuv_data[0] ) {
LOGE("malloc failed!\n");
return -1;
}
gVF.yuv_data[1] = gVF.yuv_data[0] + vf->linesize_y*vf->height;
gVF.yuv_data[2] = gVF.yuv_data[1] + vf->linesize_uv*vf->height/2;
}
gVF.width = vf->width;
gVF.height = vf->height;
gVF.linesize_y = vf->linesize_y;
gVF.linesize_uv = vf->linesize_uv;
gVF.pts = vf->pts;
if(use_ksy) {
uint8_t *dst[3] = {gVF.yuv_data[0], gVF.yuv_data[1], gVF.yuv_data[2]};
uint8_t *src[3] = {decframe.pData[0], decframe.pData[1], decframe.pData[2]};
for (int j = 0; j < gVF.height/2; ++j) {
memcpy(dst[0], src[0], gVF.linesize_y);
dst[0] += gVF.linesize_y;
src[0] += decframe.iStride[0];
memcpy(dst[0], src[0], gVF.linesize_y);
dst[0] += gVF.linesize_y;
src[0] += decframe.iStride[0];
memcpy(dst[1], src[1], gVF.linesize_uv);
dst[1] += gVF.linesize_uv;
src[1] += decframe.iStride[1];
memcpy(dst[2], src[2], gVF.linesize_uv);
dst[2] += gVF.linesize_uv;
src[2] += decframe.iStride[2];
}
} else {
memcpy(gVF.yuv_data[0], vf->yuv_data[0], vf->linesize_y*vf->height);
memcpy(gVF.yuv_data[1], vf->yuv_data[1], vf->linesize_uv*vf->height/2);
memcpy(gVF.yuv_data[2], vf->yuv_data[2], vf->linesize_uv*vf->height/2);
}
pthread_mutex_unlock(&gVFMutex);
// wait for display
struct timeval timeNow;
gettimeofday(&timeNow, NULL);
int64_t timePassed = ((int64_t)(timeNow.tv_sec - timeStart.tv_sec))*1000000 + (timeNow.tv_usec - timeStart.tv_usec);
int64_t delay = vf->pts - timePassed;
if (delay > 0) {
usleep(delay);
}
// update information
gettimeofday(&timeNow, NULL);
double tnow = timeNow.tv_sec + (timeNow.tv_usec / 1000000.0);
if (tlast == 0) tlast = tnow;
if (tstart == 0) tstart = tnow;
if (tnow > tlast + 1) {
double avg_fps;
LOGI("Video Display FPS:%i", (int)frames);
frames_sum += frames;
avg_fps = frames_sum / (tnow - tstart);
LOGI("Video AVG FPS:%.2lf", avg_fps);
postEvent(900, int(frames), int(avg_fps * 4096));
tlast = tlast + 1;
frames = 0;
}
frames++;
// request display
if (gEnvLocal == NULL) gEnvLocal = getJNIEnv();
LOGD("before request draw:%u (%f)", getms(), vf->pts);
return gEnvLocal->CallStaticIntMethod(gClass, fields.drawFrame, vf->width, vf->height);
}
int lent_hevc_get_sps(uint8_t* buf, int size, uint8_t** sps_ptr)
{
int i, nal_type, sps_pos;
sps_pos = -1;
for ( i = 0; i < (size - 4); i++ ) {
if ( 0 == buf[i] && 0 == buf[i+1] && 1 == buf[i+2] ) {
nal_type = (buf[i+3] & 0x7E) >> 1;
if ( 33 != nal_type && sps_pos >= 0 ) {
break;
}
if ( 33 == nal_type ) { // sps
sps_pos = i;
}
i += 2;
}
}
if ( sps_pos < 0 )
return 0;
if ( i == (size - 4) )
i = size;
*sps_ptr = buf + sps_pos;
return i - sps_pos;
}
int lent_hevc_get_frame(uint8_t* buf, int size, int *is_idr)
{
static int seq_hdr = 0;
int i, nal_type, idr = 0;
for ( i = 0; i < (size - 6); i++ ) {
if ( 0 == buf[i] && 0 == buf[i+1] && 1 == buf[i+2] ) {
nal_type = (buf[i+3] & 0x7E) >> 1;
if ( nal_type <= 21 ) {
if ( buf[i+5] & 0x80 ) { /* first slice in pic */
if ( !seq_hdr )
break;
else
seq_hdr = 0;
}
}
if ( nal_type >= 32 && nal_type <= 34 ) {
if ( !seq_hdr ) {
seq_hdr = 1;
idr = 1;
break;
}
seq_hdr = 1;
}
i += 2;
}
}
if ( i == (size - 6) )
i = size;
if ( NULL != is_idr )
*is_idr = idr;
return i;
}
void* rawbs_runDecoder(void *p)
{
int32_t got_frame, width, height, stride[3];
uint8_t* pixels[3];
int64_t pts, got_pts;
int frame_count, ret, i;
if ( (NULL == lent_ctx && ksydec_ctx == NULL) || NULL == au_buf )
return NULL;
decode:
// decode all AUs
frame_count = 0;
for ( i = 0; i < au_count && !exit_decode_thread; i++ ) {
pts = i * 40;
got_frame = 0;
uint32_t start_time = getms();
LOGD("before decode: %u", start_time);
if(use_ksy) {
QY265DecodeFrame(ksydec_ctx, au_buf + au_pos[i], au_pos[i + 1] - au_pos[i], &ret, 0);
if ( ret < 0 ) {
LOGE("call QY265DecodeFrame failed! ret = %d\n", ret);
goto exit;
}
QY265DecoderGetDecodedFrame(ksydec_ctx, &decframe, &ret, 0);
if ( ret == 0 && decframe.bValid ) {
got_frame = 1;
width = decframe.frameinfo.nWidth;
height = decframe.frameinfo.nHeight;
stride[0] = decframe.iStride[0];
stride[1] = decframe.iStride[1];
pixels[0] = decframe.pData[0];
pixels[1] = decframe.pData[1];
pixels[2] = decframe.pData[2];
}
else
got_frame = 0;
} else {
ret = lenthevcdec_decode_frame(lent_ctx, au_buf + au_pos[i], au_pos[i + 1] - au_pos[i], pts,
&got_frame, &width, &height, stride, (void**)pixels, &got_pts);
if ( ret < 0 ) {
LOGE("call lenthevcdec_decode_frame failed! ret = %d\n", ret);
goto exit;
}
}
uint32_t end_time = getms();
LOGD("after decode: %u", end_time);
uint32_t dec_time = end_time - start_time;
if ( got_frame > 0 ) {
LOGD("decoding time: %u - %u = %u\n", end_time, start_time, dec_time);
LOGD("decode frame: pts = %" PRId64 ", linesize = {%d,%d,%d}\n", got_pts, stride[0], stride[1], stride[2]);
if ( media.width != width || media.height != height ) {
LOGD("Video dimensions change! %dx%d -> %dx%d\n", media.width, media.height, width, height);
media.width = width;
media.height = height;
}
// draw frame to screen
VideoFrame vf;
vf.width = width;
vf.height = height;
vf.linesize_y = stride[0];
vf.linesize_uv = stride[1];
vf.pts = renderInterval * frame_count;
vf.yuv_data[0] = pixels[0];
vf.yuv_data[1] = pixels[1];
vf.yuv_data[2] = pixels[2];
if (frame_count == 0) {
gettimeofday(&timeStart, NULL);
}
drawFrame(&vf);
if(use_ksy)
QY265DecoderReturnDecodedFrame(ksydec_ctx, &decframe);
frame_count++;
}
}
#if LOOP_PLAY
if (!exit_decode_thread) {
LOGI("automatically play again\n");
goto decode;
}
#endif
// flush decoder
while ( !exit_decode_thread ) {
got_frame = 0;
if(use_ksy) {
QY265DecoderGetDecodedFrame(ksydec_ctx, &decframe, &ret, 0);
if ( ret == 0 && decframe.bValid ) {
got_frame = 1;
width = decframe.frameinfo.nWidth;
height = decframe.frameinfo.nHeight;
stride[0] = decframe.iStride[0];
stride[1] = decframe.iStride[1];
pixels[0] = decframe.pData[0];
pixels[1] = decframe.pData[1];
pixels[2] = decframe.pData[2];
} else
break;
} else {
ret = lenthevcdec_decode_frame(lent_ctx, NULL, 0, pts,
&got_frame, &width, &height, stride, (void**)pixels, &got_pts);
if ( ret < 0 || got_frame <= 0)
break;
}
if ( got_frame > 0 ) {
if ( media.width != width || media.height != height ) {
LOGD("Video dimensions change! %dx%d -> %dx%d\n", media.width, media.height, width, height);
media.width = width;
media.height = height;
}
// draw frame to screen
VideoFrame vf;
vf.width = width;
vf.height = height;
vf.linesize_y = stride[0];
vf.linesize_uv = stride[1];
vf.pts = renderInterval * frame_count;
vf.yuv_data[0] = pixels[0];
vf.yuv_data[1] = pixels[1];
vf.yuv_data[2] = pixels[2];
drawFrame(&vf);
if(use_ksy)
QY265DecoderReturnDecodedFrame(ksydec_ctx, &decframe);
frame_count++;
}
}
exit:
if ( NULL != au_buf )
free(au_buf);
au_buf = 0;
if ( NULL != lent_ctx )
lenthevcdec_destroy(lent_ctx);
lent_ctx = NULL;
if ( ksydec_ctx != NULL )
QY265DecoderDestroy(ksydec_ctx);
ksydec_ctx = NULL;
postEvent(909, int(frame_count), 0); // end of file
detachJVM();
is_playing = 0;
LOGI("decode thread exit\n");
exit_decode_thread = 0;
return NULL;
}
static int
MediaPlayer_setDataSource(JNIEnv *env, jobject thiz, jstring path)
{
const char *pathStr = env->GetStringUTFChars(path, NULL);
memset(&media, 0, sizeof(media));
strcpy(media.data_src, pathStr);
// Make sure that local ref is released before a potential exception
env->ReleaseStringUTFChars(path, pathStr);
// is raw HEVC bitstream file ?
static const char * hevc_raw_bs_ext[] = {".hevc", ".hm91", ".hm10", ".bit", ".hvc", ".h265", ".265"};
char * ext = strrchr(media.data_src, '.');
if ( NULL != ext ) {
int i;
for ( i = 0; i < _countof(hevc_raw_bs_ext); i++ ) {
if ( strcasecmp(hevc_raw_bs_ext[i], ext) == 0 )
break;
}
if ( i < _countof(hevc_raw_bs_ext) )
media.raw_bs = 1;
}
return 0;
}
static int rawbs_prepare(int threads)
{
FILE *in_file;
int32_t got_frame, width, height, stride[3];
uint8_t* pixels[3];
int64_t pts, got_pts;
uint8_t *sps;
lenthevcdec_ctx one_thread_ctx;
int compatibility, frame_count, sps_len, ret, i;
in_file = NULL;
au_buf = NULL;
lent_ctx = NULL;
one_thread_ctx = NULL;
ksydec_ctx = NULL;
// get compatibility version
compatibility = 0x7fffffff;
if ( strncasecmp(".hm91", media.data_src + (strlen(media.data_src) - 5), 5) == 0 )
compatibility = 91;
else if ( strncasecmp(".hm10", media.data_src + (strlen(media.data_src) - 5), 5) == 0 )
compatibility = 100;
// read file
in_file = fopen(media.data_src, "rb");
if ( NULL == in_file ) {
LOGE("Can not open input file '%s'\n", media.data_src);
goto error_exit;
}
fseek(in_file, 0, SEEK_END);
au_buf_size = ftell(in_file);
fseek(in_file, 0, SEEK_SET);
LOGE("file size is %d bytes\n", au_buf_size);
if ( au_buf_size > AU_BUF_SIZE_MAX )
au_buf_size = AU_BUF_SIZE_MAX;
au_buf = (uint8_t*)malloc(au_buf_size);
if ( NULL == au_buf ) {
LOGE("call malloc failed! size is %d\n", au_buf_size);
goto error_exit;
}
if ( fread(au_buf, 1, au_buf_size, in_file) != au_buf_size ) {
LOGE("call fread failed!\n");
goto error_exit;
}
fclose(in_file);
in_file = NULL;
LOGE("%d bytes read to address %p\n", au_buf_size, au_buf);
// find all AU
au_count = 0;
for ( i = 0; i < au_buf_size && au_count < (AU_COUNT_MAX - 1); i+=3 ) {
i += lent_hevc_get_frame(au_buf + i, au_buf_size - i, NULL);
if (i < au_buf_size) {
au_pos[au_count++] = i;
}
LOGD("AU[%d] = %d\n", au_count - 1, au_pos[au_count - 1]);
}
au_pos[au_count] = au_buf_size; // include last AU
LOGE("found %d AUs\n", au_count);
// open lentoid HEVC decoder
if(use_ksy) {
int hr = QY_OK;
QY265DecConfig config;
config.threads = threads;
config.bEnableOutputRecToFile = 0;
config.strRecYuvFileName = NULL;
ksydec_ctx = QY265DecoderCreate(&config, &hr);
if(ksydec_ctx == NULL) {
LOGE("call QY265DecoderCreate fail..");
goto error_exit;
}
LOGE("call QY265DecoderCreate Succeed..");
}
LOGI("create lentoid decoder: compatibility = %d, threads = %d\n", compatibility, threads);
lent_ctx = lenthevcdec_create(threads, compatibility, NULL);
if ( NULL == lent_ctx ) {
LOGE("call lenthevcdec_create failed!\n");
goto error_exit;
}
LOGD("get decoder %p\n", lent_ctx);
// find sps, decode it and get video resolution
sps_len = lent_hevc_get_sps(au_buf, au_buf_size, &sps);
if ( sps_len > 0 ) {
// get a one-thread decoder to decode SPS
one_thread_ctx = lenthevcdec_create(1, compatibility, NULL);
if ( NULL == lent_ctx ) {
LOGE("call lenthevcdec_create fail..");
goto error_exit;
}
width = 0;
height = 0;
ret = lenthevcdec_decode_frame(one_thread_ctx, sps, sps_len, 0, &got_frame, &width, &height, stride, (void**)pixels, &pts);
if ( 0 != width && 0 != height ) {
media.width = width;
media.height = height;
LOGE("Video dimensions is %dx%d\n", width, height);
}
lenthevcdec_destroy(one_thread_ctx);
one_thread_ctx = NULL;
if(use_ksy) {
gVF.linesize_y = next_p2(width);
gVF.linesize_uv = next_p2(width/2);
gVF.yuv_data[0] = (uint8_t*)malloc( gVF.linesize_y * height);
gVF.yuv_data[1] = (uint8_t*)malloc( gVF.linesize_uv * height/2);
gVF.yuv_data[2] = (uint8_t*)malloc( gVF.linesize_uv * height/2);
LOGE("linesize:%d, %d", gVF.linesize_y, gVF.linesize_uv);
}
}
return 0;
error_exit:
if ( NULL != in_file )
fclose(in_file);
in_file = NULL;
if ( NULL != au_buf )
free(au_buf);
au_buf = NULL;
if ( NULL != lent_ctx )
lenthevcdec_destroy(lent_ctx);
lent_ctx = NULL;
if ( NULL != one_thread_ctx )
lenthevcdec_destroy(one_thread_ctx);
one_thread_ctx = NULL;
if ( NULL != ksydec_ctx)
QY265DecoderDestroy(ksydec_ctx);
ksydec_ctx = NULL;
return -1;
}
static int
MediaPlayer_prepare(JNIEnv *env, jobject thiz, jint decoderType, jint render, jint threadNumber, jfloat fps)
{
LOGE("MediaPlayer_prepare: decoderType:%d, %d threads, fps %f\n", decoderType, threadNumber, fps);
renderFPS = fps;
if (fps == 0) renderInterval = 1;
else {
renderInterval = 1.0 / fps * 1000000; // us
}
if (decoderType == 0)
use_ksy = 1;
disable_render = render;
return rawbs_prepare(threadNumber);
}
static int
MediaPlayer_start(JNIEnv *env, jobject thiz)
{
LOGI("start decoding thread");
pthread_create(&decode_thread, NULL, rawbs_runDecoder, NULL);
return 0;
}
static int
MediaPlayer_pause(JNIEnv *env, jobject thiz)
{
return 0;
}
static int
MediaPlayer_go(JNIEnv *env, jobject thiz)
{
return 0;
}
static int
MediaPlayer_stop(JNIEnv *env, jobject thiz)
{
void* result;
exit_decode_thread = 1;
pthread_join(decode_thread, &result);
exit_decode_thread = 0;
if (p_sws_ctx != NULL) {
// sws_freeContext(p_sws_ctx);
p_sws_ctx = NULL;
}
if ( NULL != gVF.yuv_data[0] )
free(gVF.yuv_data[0]);
memset(&gVF, 0, sizeof(gVF));
LOGI("media player stopped\n");
return 0;
}
static bool
MediaPlayer_isPlaying(JNIEnv *env, jobject thiz)
{
return is_playing;
}
static int
MediaPlayer_seekTo(JNIEnv *env, jobject thiz, jint msec)
{
return 0;
}
static int
MediaPlayer_getVideoWidth(JNIEnv *env, jobject thiz)
{
int w = media.width;
return w;
}
static int
MediaPlayer_getVideoHeight(JNIEnv *env, jobject thiz)
{
int h = media.height;
return h;
}
static int
MediaPlayer_getCurrentPosition(JNIEnv *env, jobject thiz)
{
int msec = 0;
return msec;
}
static int
MediaPlayer_getDuration(JNIEnv *env, jobject thiz)
{
int msec = 0;
return msec;
}
// ----------------------------------------------------------------------------
static void MediaPlayer_native_init(JNIEnv *env)
{
jclass clazz;
clazz = env->FindClass("com/ksyun/media/ksy265codec/demo/decoder/hevdecoder/NativeMediaPlayer");
if (clazz == NULL) {
jniThrowException(env, "java/lang/RuntimeException", "Can't find MediaPlayer");
return;
}
fields.postEvent = env->GetStaticMethodID(clazz, "postEventFromNative", "(III)V");
if (fields.postEvent == NULL) {
jniThrowException(env, "java/lang/RuntimeException", "Can't find MediaPlayer.postEventFromNative");
return;
}
fields.drawFrame = env->GetStaticMethodID(clazz, "drawFrame","(II)I");
if (fields.drawFrame == NULL) {
jniThrowException(env, "java/lang/RuntimeException", "Can't find MediaPlayer.drawFrame");
return;
}
gClass = NULL;
gEnv = NULL;
gEnvLocal = NULL;
p_sws_ctx = NULL;
frames_sum = 0;
tstart = 0;
frames = 0;
tlast = 0;
renderFPS = 0;
renderInterval = 0;
disable_render = 0;
}
static void
MediaPlayer_native_setup(JNIEnv *env, jobject thiz, jobject weak_this)
{
// Hold onto the MediaPlayer class for use in calling the static method
// that posts events to the application thread.
jclass clazz = env->GetObjectClass(thiz);
if (clazz == NULL) {
jniThrowException(env, "java/lang/Exception", kClassPathName);
return;
}
gClass = (jclass)env->NewGlobalRef(clazz);
gEnv = env;
}
static void
MediaPlayer_renderBitmap(JNIEnv *env, jobject obj, jobject bitmap)
{
void* pixels;
int ret;
if ((ret = AndroidBitmap_lockPixels(env, bitmap, &pixels)) < 0) {
LOGE("AndroidBitmap_lockPixels() failed ! error=%d", ret);
}
// Convert the image from its native format to RGB565
uint32_t start_time = getms();
LOGD("before scale: %d", getms());
#if USE_SWSCALE
// use swscale, which may be optimized with SSE for x86 arch
if (p_sws_ctx == NULL) {
p_sws_ctx = sws_getContext( gVF.width,
gVF.height,
PIX_FMT_YUV420P,
gVF.width,
gVF.height,
PIX_FMT_RGB565, SWS_BICUBIC|SWS_CPU_CAPS_MMX|SWS_CPU_CAPS_MMX2|SWS_CPU_CAPS_SSE2, NULL, NULL, NULL);
}
if (p_sws_ctx != NULL) {
unsigned char *src[4];
int src_stride[4];
unsigned char *dst[4];
int dst_stride[4];
src_stride[0] = gVF.linesize_y;
src_stride[1] = src_stride[2] = gVF.linesize_uv;
dst[0] = (unsigned char*)pixels;
dst_stride[0] = gVF.width * 2;
sws_scale(p_sws_ctx, (const uint8_t * const *)gVF.yuv_data, src_stride, 0, gVF.height, dst, dst_stride);
}
#else
ConvertYCbCrToRGB565( gVF.yuv_data[0],
gVF.yuv_data[1],
gVF.yuv_data[2],
(uint8_t*)pixels,
gVF.width,
gVF.height,
gVF.linesize_y,
gVF.linesize_uv,
gVF.width * 2,
420 );
#endif
uint32_t end_time = getms();
LOGD("after scale: %d", getms());
LOGD("scale time: %dms", end_time - start_time);
AndroidBitmap_unlockPixels(env, bitmap);
}
// ----------------------------------------------------------------------------
static JNINativeMethod gMethods[] = {
{ "setDataSource", "(Ljava/lang/String;)I", (void *) MediaPlayer_setDataSource },
{ "native_prepare", "(IIIF)I", (void *) MediaPlayer_prepare },
{ "native_start", "()I", (void *) MediaPlayer_start },
{ "native_stop", "()I", (void *) MediaPlayer_stop },
{ "getVideoWidth", "()I", (void *) MediaPlayer_getVideoWidth },
{ "getVideoHeight", "()I", (void *) MediaPlayer_getVideoHeight },
{ "native_seekTo", "(I)I", (void *) MediaPlayer_seekTo },
{ "native_pause", "()I", (void *) MediaPlayer_pause },
{ "native_go", "()I", (void *) MediaPlayer_go },
{ "isPlaying", "()Z", (void *) MediaPlayer_isPlaying },
{ "getCurrentPosition", "()I", (void *) MediaPlayer_getCurrentPosition },
{ "getDuration", "()I", (void *) MediaPlayer_getDuration },
{ "native_init", "()V", (void *) MediaPlayer_native_init },
{ "native_setup", "(Ljava/lang/Object;)V", (void *) MediaPlayer_native_setup },
{ "renderBitmap", "(Landroid/graphics/Bitmap;)V", (void *) MediaPlayer_renderBitmap },
};
int register_player(JNIEnv *env) {
return jniRegisterNativeMethods(env, kClassPathName, gMethods, sizeof(gMethods) / sizeof(gMethods[0]));
}
================================================
FILE: Android_demo/KSY265CodecDemo/app/src/main/jni/decoder/jniplayer/yuv2rgb565.cpp
================================================
// Copyright (c) 2010 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
// contributor Siarhei Siamashka
// This file is modified based on:
// http://dxr.mozilla.org/mozilla-central/source/gfx/ycbcr/yuv_convert_arm.cpp
#include "yuv2rgb565.h"
#if ARCH_ARM && HAVE_NEON
/***************************************
* convert in neon:
*/
void __attribute((noinline,optimize("-fomit-frame-pointer")))
yuv42x_to_rgb565_row_neon(uint16_t *dst,
const uint8_t *y,
const uint8_t *u,
const uint8_t *v,
int n,
int oddflag)
{
static __attribute__((aligned(16))) uint16_t acc_r[8] = {
22840, 22840, 22840, 22840, 22840, 22840, 22840, 22840,
};
static __attribute__((aligned(16))) uint16_t acc_g[8] = {
17312, 17312, 17312, 17312, 17312, 17312, 17312, 17312,
};
static __attribute__((aligned(16))) uint16_t acc_b[8] = {
28832, 28832, 28832, 28832, 28832, 28832, 28832, 28832,
};
/*
* Registers:
* q0, q1 : d0, d1, d2, d3 - are used for initial loading of YUV data
* q2 : d4, d5 - are used for storing converted RGB data
* q3 : d6, d7 - are used for temporary storage
*
* q4-q7 - reserved
*
* q8, q9 : d16, d17, d18, d19 - are used for expanded Y data
* q10 : d20, d21
* q11 : d22, d23
* q12 : d24, d25
* q13 : d26, d27
* q13, q14, q15 - various constants (#16, #149, #204, #50, #104, #154)
*/
asm volatile (
".fpu neon\n"
/* Allow to build on targets not supporting neon, and force the object file
* target to avoid bumping the final binary target */
".arch armv7-a\n"
".object_arch armv4t\n"
".macro convert_macroblock size\n"
/* load up to 16 source pixels */
".if \\size == 16\n"
"pld [%[y], #64]\n"
"pld [%[u], #64]\n"
"pld [%[v], #64]\n"
"vld1.8 {d1}, [%[y]]!\n"
"vld1.8 {d3}, [%[y]]!\n"
"vld1.8 {d0}, [%[u]]!\n"
"vld1.8 {d2}, [%[v]]!\n"
".elseif \\size == 8\n"
"vld1.8 {d1}, [%[y]]!\n"
"vld1.8 {d0[0]}, [%[u]]!\n"
"vld1.8 {d0[1]}, [%[u]]!\n"
"vld1.8 {d0[2]}, [%[u]]!\n"
"vld1.8 {d0[3]}, [%[u]]!\n"
"vld1.8 {d2[0]}, [%[v]]!\n"
"vld1.8 {d2[1]}, [%[v]]!\n"
"vld1.8 {d2[2]}, [%[v]]!\n"
"vld1.8 {d2[3]}, [%[v]]!\n"
".elseif \\size == 4\n"
"vld1.8 {d1[0]}, [%[y]]!\n"
"vld1.8 {d1[1]}, [%[y]]!\n"
"vld1.8 {d1[2]}, [%[y]]!\n"
"vld1.8 {d1[3]}, [%[y]]!\n"
"vld1.8 {d0[0]}, [%[u]]!\n"
"vld1.8 {d0[1]}, [%[u]]!\n"
"vld1.8 {d2[0]}, [%[v]]!\n"
"vld1.8 {d2[1]}, [%[v]]!\n"
".elseif \\size == 2\n"
"vld1.8 {d1[0]}, [%[y]]!\n"
"vld1.8 {d1[1]}, [%[y]]!\n"
"vld1.8 {d0[0]}, [%[u]]!\n"
"vld1.8 {d2[0]}, [%[v]]!\n"
".elseif \\size == 1\n"
"vld1.8 {d1[0]}, [%[y]]!\n"
"vld1.8 {d0[0]}, [%[u]]!\n"
"vld1.8 {d2[0]}, [%[v]]!\n"
".else\n"
".error \"unsupported macroblock size\"\n"
".endif\n"
/* d1 - Y data (first 8 bytes) */
/* d3 - Y data (next 8 bytes) */
/* d0 - U data, d2 - V data */
/* split even and odd Y color components */
"vuzp.8 d1, d3\n" /* d1 - evenY, d3 - oddY */
/* clip upper and lower boundaries */
"vqadd.u8 q0, q0, q4\n"
"vqadd.u8 q1, q1, q4\n"
"vqsub.u8 q0, q0, q5\n"
"vqsub.u8 q1, q1, q5\n"
"vshr.u8 d4, d2, #1\n" /* d4 = V >> 1 */
"vmull.u8 q8, d1, d27\n" /* q8 = evenY * 149 */
"vmull.u8 q9, d3, d27\n" /* q9 = oddY * 149 */
"vld1.16 {d20, d21}, [%[acc_r], :128]\n" /* q10 - initialize accumulator for red */
"vsubw.u8 q10, q10, d4\n" /* red acc -= (V >> 1) */
"vmlsl.u8 q10, d2, d28\n" /* red acc -= V * 204 */
"vld1.16 {d22, d23}, [%[acc_g], :128]\n" /* q11 - initialize accumulator for green */
"vmlsl.u8 q11, d2, d30\n" /* green acc -= V * 104 */
"vmlsl.u8 q11, d0, d29\n" /* green acc -= U * 50 */
"vld1.16 {d24, d25}, [%[acc_b], :128]\n" /* q12 - initialize accumulator for blue */
"vmlsl.u8 q12, d0, d30\n" /* blue acc -= U * 104 */
"vmlsl.u8 q12, d0, d31\n" /* blue acc -= U * 154 */
"vhsub.s16 q3, q8, q10\n" /* calculate even red components */
"vhsub.s16 q10, q9, q10\n" /* calculate odd red components */
"vqshrun.s16 d0, q3, #6\n" /* right shift, narrow and saturate even red components */
"vqshrun.s16 d3, q10, #6\n" /* right shift, narrow and saturate odd red components */
"vhadd.s16 q3, q8, q11\n" /* calculate even green components */
"vhadd.s16 q11, q9, q11\n" /* calculate odd green components */
"vqshrun.s16 d1, q3, #6\n" /* right shift, narrow and saturate even green components */
"vqshrun.s16 d4, q11, #6\n" /* right shift, narrow and saturate odd green components */
"vhsub.s16 q3, q8, q12\n" /* calculate even blue components */
"vhsub.s16 q12, q9, q12\n" /* calculate odd blue components */
"vqshrun.s16 d2, q3, #6\n" /* right shift, narrow and saturate even blue components */
"vqshrun.s16 d5, q12, #6\n" /* right shift, narrow and saturate odd blue components */
"vzip.8 d0, d3\n" /* join even and odd red components */
"vzip.8 d1, d4\n" /* join even and odd green components */
"vzip.8 d2, d5\n" /* join even and odd blue components */
"vshll.u8 q3, d0, #8\n\t"
"vshll.u8 q8, d1, #8\n\t"
"vshll.u8 q9, d2, #8\n\t"
"vsri.u16 q3, q8, #5\t\n"
"vsri.u16 q3, q9, #11\t\n"
/* store pixel data to memory */
".if \\size == 16\n"
" vst1.16 {d6, d7}, [%[dst]]!\n"
" vshll.u8 q3, d3, #8\n\t"
" vshll.u8 q8, d4, #8\n\t"
" vshll.u8 q9, d5, #8\n\t"
" vsri.u16 q3, q8, #5\t\n"
" vsri.u16 q3, q9, #11\t\n"
" vst1.16 {d6, d7}, [%[dst]]!\n"
".elseif \\size == 8\n"
" vst1.16 {d6, d7}, [%[dst]]!\n"
".elseif \\size == 4\n"
" vst1.16 {d6}, [%[dst]]!\n"
".elseif \\size == 2\n"
" vst1.16 {d6[0]}, [%[dst]]!\n"
" vst1.16 {d6[1]}, [%[dst]]!\n"
".elseif \\size == 1\n"
" vst1.16 {d6[0]}, [%[dst]]!\n"
".endif\n"
".endm\n"
"vmov.u8 d8, #15\n" /* add this to U/V to saturate upper boundary */
"vmov.u8 d9, #20\n" /* add this to Y to saturate upper boundary */
"vmov.u8 d10, #31\n" /* sub this from U/V to saturate lower boundary */
"vmov.u8 d11, #36\n" /* sub this from Y to saturate lower boundary */
"vmov.u8 d26, #16\n"
"vmov.u8 d27, #149\n"
"vmov.u8 d28, #204\n"
"vmov.u8 d29, #50\n"
"vmov.u8 d30, #104\n"
"vmov.u8 d31, #154\n"
"cmp %[oddflag], #0\n"
"beq 1f\n"
"convert_macroblock 1\n"
"sub %[n], %[n], #1\n"
"1:\n"
"subs %[n], %[n], #16\n"
"blt 2f\n"
"1:\n"
"convert_macroblock 16\n"
"subs %[n], %[n], #16\n"
"bge 1b\n"
"2:\n"
"tst %[n], #8\n"
"beq 3f\n"
"convert_macroblock 8\n"
"3:\n"
"tst %[n], #4\n"
"beq 4f\n"
"convert_macroblock 4\n"
"4:\n"
"tst %[n], #2\n"
"beq 5f\n"
"convert_macroblock 2\n"
"5:\n"
"tst %[n], #1\n"
"beq 6f\n"
"convert_macroblock 1\n"
"6:\n"
".purgem convert_macroblock\n"
: [y] "+&r" (y), [u] "+&r" (u), [v] "+&r" (v), [dst] "+&r" (dst), [n] "+&r" (n)
: [acc_r] "r" (&acc_r[0]), [acc_g] "r" (&acc_g[0]), [acc_b] "r" (&acc_b[0]),
[oddflag] "r" (oddflag)
: "cc", "memory",
"d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
"d8", "d9", "d10", "d11", /* "d12", "d13", "d14", "d15", */
"d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
"d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31"
);
}
void ConvertYCbCrToRGB565_neon( const uint8_t* y_buf,
const uint8_t* u_buf,
const uint8_t* v_buf,
uint8_t* rgb_buf,
int pic_width,
int pic_height,
int y_stride,
int uv_stride,
int rgb_stride,
int yuv_type)
{
int x_shift;
int y_shift;
x_shift = (yuv_type != 444); //YUV 4:4:4
y_shift = (yuv_type == 420); //YUV 4:2:0
/*
From Wiki: The Y'V12 format is essentially the same as Y'UV420p,
but it has the U and V data reversed: the Y' values are followed by the V values, with the U values last.
*/
for (int i = 0; i < pic_height; i++) {
int yoffs;
int uvoffs;
yoffs = y_stride * i;
uvoffs = uv_stride * (i>>y_shift);
yuv42x_to_rgb565_row_neon((uint16_t*)(rgb_buf + rgb_stride * i),
y_buf + yoffs,
u_buf + uvoffs,
v_buf + uvoffs,
pic_width,
0);
}
}
#endif //ARCH_ARM && HAVE_NEON
/*************************************
* convert in c:
*/
/*
* Use NS_CLAMP to force a value (such as a preference) into a range.
*/
#define NS_CLAMP(x, low, high) (((x) > (high)) ? (high) : (((x) < (low)) ? (low) : (x)))
/*Convert a single pixel from Y'CbCr to RGB565.
This uses the exact same formulas as the asm, even though we could make the
constants a lot more accurate with 32-bit wide registers.*/
static uint16_t yu2rgb565(int y, int u, int v, int dither) {
/*This combines the constant offset that needs to be added during the Y'CbCr
conversion with a rounding offset that depends on the dither parameter.*/
static const int DITHER_BIAS[4][3] = {
{-14240, 8704, -17696},
{-14240+128,8704+64, -17696+128},
{-14240+256,8704+128,-17696+256},
{-14240+384,8704+192,-17696+384}
};
int r;
int g;
int b;
r = NS_CLAMP((74*y+102*v+DITHER_BIAS[dither][0])>>9, 0, 31);
g = NS_CLAMP((74*y-25*u-52*v+DITHER_BIAS[dither][1])>>8, 0, 63);
b = NS_CLAMP((74*y+129*u+DITHER_BIAS[dither][2])>>9, 0, 31);
return (uint16_t)(r<<11 | g<<5 | b);
}
void yuv_to_rgb565_row_c(uint16_t *dst,
const uint8_t *y,
const uint8_t *u,
const uint8_t *v,
int x_shift,
int pic_width)
{
int x;
for (x = 0; x < pic_width; x++)
{
dst[x] = yu2rgb565(y[x],
u[x>>x_shift],
v[x>>x_shift],
2); // Disable dithering for now.
}
}
void ConvertYCbCrToRGB565_c( const uint8_t* y_buf,
const uint8_t* u_buf,
const uint8_t* v_buf,
uint8_t* rgb_buf,
int pic_width,
int pic_height,
int y_stride,
int uv_stride,
int rgb_stride,
int yuv_type)
{
int x_shift;
int y_shift;
x_shift = (yuv_type != 444); //YUV 4:4:4
y_shift = (yuv_type == 420); //YUV 4:2:0
/*
From Wiki: The Y'V12 format is essentially the same as Y'UV420p,
but it has the U and V data reversed: the Y' values are followed by the V values, with the U values last.
*/
for (int i = 0; i < pic_height; i++) {
int yoffs;
int uvoffs;
yoffs = y_stride * i;
uvoffs = uv_stride * (i>>y_shift);
yuv_to_rgb565_row_c((uint16_t*)(rgb_buf + rgb_stride * i),
y_buf + yoffs,
u_buf + uvoffs,
v_buf + uvoffs,
x_shift,
pic_width);
}
}
void ConvertYCbCrToRGB565( const uint8_t* y_buf,
const uint8_t* u_buf,
const uint8_t* v_buf,
uint8_t* rgb_buf,
int pic_width,
int pic_height,
int y_stride,
int uv_stride,
int rgb_stride,
int yuv_type)
{
#if HAVE_NEON
ConvertYCbCrToRGB565_neon(y_buf, u_buf, v_buf, rgb_buf, pic_width, pic_height, y_stride, uv_stride, rgb_stride, yuv_type);
#else
ConvertYCbCrToRGB565_c(y_buf, u_buf, v_buf, rgb_buf, pic_width, pic_height, y_stride, uv_stride, rgb_stride, yuv_type);
#endif
}
================================================
FILE: Android_demo/KSY265CodecDemo/app/src/main/jni/decoder/jniplayer/yuv2rgb565.h
================================================
#include
void ConvertYCbCrToRGB565_neon( const uint8_t* y_buf,
const uint8_t* u_buf,
const uint8_t* v_buf,
uint8_t* rgb_buf,
int pic_width,
int pic_height,
int y_stride,
int uv_stride,
int rgb_stride,
int yuv_type);
void ConvertYCbCrToRGB565_c( const uint8_t* y_buf,
const uint8_t* u_buf,
const uint8_t* v_buf,
uint8_t* rgb_buf,
int pic_width,
int pic_height,
int y_stride,
int uv_stride,
int rgb_stride,
int yuv_type);
void ConvertYCbCrToRGB565( const uint8_t* y_buf,
const uint8_t* u_buf,
const uint8_t* v_buf,
uint8_t* rgb_buf,
int pic_width,
int pic_height,
int y_stride,
int uv_stride,
int rgb_stride,
int yuv_type);
================================================
FILE: Android_demo/KSY265CodecDemo/app/src/main/jni/encoder/Android.mk
================================================
LOCAL_PATH := $(call my-dir)
PREBUILT_PATH := $(LOCAL_PATH)/../../../../../../prebuilt
include $(CLEAR_VARS)
LOCAL_MODULE := x264
LOCAL_SRC_FILES := $(PREBUILT_PATH)/$(TARGET_ARCH_ABI)/libx264.a
include $(PREBUILT_STATIC_LIBRARY)
ifneq ($(TARGET_ARCH_ABI),x86)
ifneq ($(TARGET_ARCH_ABI),x86_64)
include $(CLEAR_VARS)
LOCAL_MODULE := qy265
LOCAL_SRC_FILES := $(PREBUILT_PATH)/$(TARGET_ARCH_ABI)/libqyencoder.a
include $(PREBUILT_STATIC_LIBRARY)
endif
endif
include $(CLEAR_VARS)
LOCAL_LDLIBS += -llog
LOCAL_MODULE := native-lib
LOCAL_CONLYFLAGS += -std=c99
LOCAL_C_INCLUDES += $(PREBUILT_PATH)/include
LOCAL_SRC_FILES += encoderwrapper.c
LOCAL_STATIC_LIBRARIES += x264 qy265 gnustl_static cpufeatures
LOCAL_DISABLE_FATAL_LINKER_WARNINGS := true
include $(BUILD_SHARED_LIBRARY)
$(call import-module,android/cpufeatures)
================================================
FILE: Android_demo/KSY265CodecDemo/app/src/main/jni/encoder/encoderwrapper.c
================================================
#include
#include
#include
#include
#include
#include
#include "x264.h"
#include "qy265enc.h"
#include "qy265def.h"
#include "qyauth_env.h"
#include "encoderwrapper.h"
#include "log.h"
#define LOG_TAG "encoder"
typedef struct Encoder {
FILE* in_file;
float real_fps;
float real_time;
double avg_psnr;
int frame_num;
} Encoder;
static inline Encoder* getInstance(jlong ptr)
{
return (Encoder*)(intptr_t) ptr;
}
jlong Java_com_ksyun_media_ksy265codec_demo_encoder_EncoderWrapper_native_1init
(JNIEnv *env,
jobject instance) {
Encoder* thiz = (Encoder*)calloc(1, sizeof(Encoder));
thiz->real_fps = 0;
thiz->frame_num = 0;
thiz->avg_psnr = 0;
thiz->real_time =0;
return (jlong)(intptr_t)thiz;
}
jint Java_com_ksyun_media_ksy265codec_demo_encoder_EncoderWrapper_native_1open
(JNIEnv *env,
jobject instance,
jlong ptr,
jstring path_) {
const char *path = (*env)->GetStringUTFChars(env, path_, 0);
Encoder* thiz = getInstance(ptr);
thiz->in_file = fopen(path, "r");
if (NULL == thiz->in_file) {
LOGD("open file failed with %d", errno);
(*env)->ReleaseStringUTFChars(env, path_, path);
return -1;
}
thiz->real_fps = 0;
thiz->frame_num = 0;
thiz->avg_psnr = 0;
thiz->real_time =0;
(*env)->ReleaseStringUTFChars(env, path_, path);
return 0;
}
jint Java_com_ksyun_media_ksy265codec_demo_encoder_EncoderWrapper_native_1x264_1encode
(JNIEnv *env,
jobject instance,
jlong ptr,
jstring path_,
jstring profile_,
jstring delay_,
jint width,
jint height,
jobject fps,
jint bitrate,
jint threads) {
const char *path = (*env)->GetStringUTFChars(env, path_, 0);
const char *profile = (*env)->GetStringUTFChars(env, profile_, 0);
const char *delay = (*env)->GetStringUTFChars(env, delay_, 0);
Encoder* thiz = getInstance(ptr);
x264_param_t param;
x264_picture_t pic;
x264_picture_t pic_out;
x264_t *h;
int i_frame = 0;
int i_frame_size;
x264_nal_t *nal;
int i_nal;
clock_t clock_start, clock_end, clock_used;
struct timeval tv_start, tv_end;
double real_time;
int64_t ms_used;
FILE *out_file;
double sum_psnr_y = 0.0;
double sum_psnr_u = 0.0;
double sum_psnr_v = 0.0;
if ( NULL != path ) {
out_file = fopen(path, "wb");
if ( NULL == out_file ) {
LOGE("open output file failed with %d", errno);
fclose(thiz->in_file);
(*env)->ReleaseStringUTFChars(env, path_, path);
(*env)->ReleaseStringUTFChars(env, profile_, profile);
(*env)->ReleaseStringUTFChars(env, delay_, delay);
return -1;
}
}
LOGD("profile %s", profile);
/* Get default params for preset/tuning */
if (strlen(delay) == 11 && strncmp(delay, "zerolatency", 11)) {
if( x264_param_default_preset( ¶m, profile, "zerolatency" ) < 0 )
goto fail;
} else {
if( x264_param_default_preset( ¶m, profile, NULL ) < 0 )
goto fail;
}
/* Configure non-default params */
param.i_csp = X264_CSP_I420;
param.i_width = width;
param.i_height = height;
param.b_vfr_input = 0;
param.b_repeat_headers = 1;
param.b_annexb = 1;
if (strlen(delay) == 11 && strncmp(delay, "zerolatency", 11)) {
param.i_bframe = 0;
} else if (strlen(delay) == 13 && strncmp(delay, "livestreaming", 13)) {
param.i_bframe = 3;
} else if (strlen(delay) == 7 && strncmp(delay, "offline", 7)) {
param.i_bframe = 7;
}
param.i_threads = threads;
jclass floatClass = (*env)->FindClass(env, "java/lang/Float");
jmethodID floatMethod = (*env)->GetMethodID(env, floatClass, "floatValue", "()F");
jfloat val = (*env)->CallFloatMethod(env, fps, floatMethod);
LOGD("x264 fps %.6f", val);
param.i_fps_num = val;
param.i_fps_den = 1;
param.rc.i_bitrate = bitrate;
param.rc.i_rc_method = X264_RC_ABR;
param.analyse.b_psnr = 1;
/* Apply profile restrictions. */
if( x264_param_apply_profile( ¶m, "high" ) < 0 )
goto fail;
if( x264_picture_alloc( &pic, param.i_csp, param.i_width, param.i_height ) < 0 )
goto fail;
h = x264_encoder_open( ¶m );
if( !h )
goto fail;
int luma_size = param.i_width * param.i_height;
int chroma_size = luma_size / 4;
gettimeofday(&tv_start, NULL);
clock_start = clock();
/* Encode frames */
for( ;; i_frame++ )
{
/* Read input frame */
if( fread( pic.img.plane[0], 1, luma_size, thiz->in_file ) != luma_size )
break;
if( fread( pic.img.plane[1], 1, chroma_size, thiz->in_file ) != chroma_size )
break;
if( fread( pic.img.plane[2], 1, chroma_size, thiz->in_file ) != chroma_size )
break;
pic.i_pts = i_frame;
i_frame_size = x264_encoder_encode( h, &nal, &i_nal, &pic, &pic_out );
if( i_frame_size < 0 )
goto fail;
else if( i_frame_size )
{
if (param.analyse.b_psnr){
sum_psnr_y += pic_out.prop.f_psnr[0];
sum_psnr_u += pic_out.prop.f_psnr[1];
sum_psnr_v += pic_out.prop.f_psnr[2];
}
if( !fwrite( nal->p_payload, i_frame_size, 1, out_file ) )
goto fail;
}
}
/* Flush delayed frames */
while( x264_encoder_delayed_frames( h ) )
{
i_frame_size = x264_encoder_encode( h, &nal, &i_nal, NULL, &pic_out );
if( i_frame_size < 0 )
goto fail;
else if( i_frame_size )
{
if (param.analyse.b_psnr){
sum_psnr_y += pic_out.prop.f_psnr[0];
sum_psnr_u += pic_out.prop.f_psnr[1];
sum_psnr_v += pic_out.prop.f_psnr[2];
}
if( !fwrite( nal->p_payload, i_frame_size, 1, out_file ) )
goto fail;
}
}
clock_end = clock();
gettimeofday(&tv_end, NULL);
clock_used = clock_end - clock_start;
ms_used = (int64_t)(clock_used * 1000.0 / CLOCKS_PER_SEC);
real_time = (tv_end.tv_sec + (tv_end.tv_usec / 1000000.0)) - (tv_start.tv_sec + (tv_start.tv_usec / 1000000.0));
float realFPS = i_frame / real_time;
double avg_psnr = (6*sum_psnr_y+sum_psnr_u+sum_psnr_v)/(8*i_frame);
thiz->frame_num = i_frame;
thiz->real_fps = realFPS;
thiz->real_time = real_time;
thiz->avg_psnr = avg_psnr;
x264_encoder_close( h );
x264_picture_clean( &pic );
fclose(thiz->in_file);
fclose(out_file);
(*env)->ReleaseStringUTFChars(env, path_, path);
(*env)->ReleaseStringUTFChars(env, profile_, profile);
(*env)->ReleaseStringUTFChars(env, delay_, delay);
return 0;
fail:
fclose(thiz->in_file);
fclose(out_file);
(*env)->ReleaseStringUTFChars(env, path_, path);
(*env)->ReleaseStringUTFChars(env, profile_, profile);
(*env)->ReleaseStringUTFChars(env, delay_, delay);
return -1;
}
static double ksy265_psnr = 0;
void ksy265log(const char* msg) {
LOGD("ksy265 log: %s", msg);
//psnr值出现在编码器的log中,形如"bitrate, psnr: 503.1069 40.4723 47.0057 45.9163"
char* psnr = strstr(msg, "psnr");
if (psnr != NULL) {
psnr += 4;
char *p;
const char* d = " :\t";
p = strtok(psnr, d);
double y =0, u = 0, v = 0;
//skip bitrate
p = strtok(NULL, d);
if (p != NULL)
y = strtod(p, NULL);
p = strtok(NULL, d);
if (p != NULL)
u = strtod(p, NULL);
p = strtok(NULL, d);
if (p != NULL)
v = strtod(p, NULL);
ksy265_psnr = (y*6 + u + v) / 8;
LOGD("psnr %f, y %f , u %f, v %f \n", ksy265_psnr, y, u, v);
}
}
jint Java_com_ksyun_media_ksy265codec_demo_encoder_EncoderWrapper_native_1ksy265_1encoder
(JNIEnv *env,
jobject instance,
jobject context,
jlong ptr,
jstring path_,
jstring profile_,
jstring delay_,
jint width,
jint height,
jobject fps,
jint bitrate,
jint threads) {
const char *path = (*env)->GetStringUTFChars(env, path_, 0);
const char *profile = (*env)->GetStringUTFChars(env, profile_, 0);
const char *delay = (*env)->GetStringUTFChars(env, delay_, 0);
QY265EncConfig param;
QY265YUV yuv;
QY265Picture pic;
QY265Picture pic_out;
QY265Nal *nal;
void *h;
int i_frame = 0;
int i_frame_size;
int i_nal;
clock_t clock_start, clock_end, clock_used;
struct timeval tv_start, tv_end;
double real_time;
int64_t ms_used;
FILE *out_file;
int errorCode;
Encoder *thiz = getInstance(ptr);
if (NULL != path) {
out_file = fopen(path, "w");
if (NULL == out_file) {
perror("open output file");
fclose(thiz->in_file);
(*env)->ReleaseStringUTFChars(env, path_, path);
(*env)->ReleaseStringUTFChars(env, profile_, profile);
(*env)->ReleaseStringUTFChars(env, delay_, delay);
return -1;
}
}
/* Get default params for preset/tuning */
if (QY265ConfigDefaultPreset(¶m, profile, NULL, delay) < 0)
goto fail;
param.picWidth = width;
param.picHeight = height;
param.threads = threads;
jclass floatClass = (*env)->FindClass(env, "java/lang/Float");
jmethodID floatMethod = (*env)->GetMethodID(env, floatClass, "floatValue", "()F");
jfloat val = (*env)->CallFloatMethod(env, fps, floatMethod);
LOGD("265 fps %.6f", val);
param.frameRate = val;
param.bitrateInkbps = bitrate;
TCounterEnv* tCounterEnv = (TCounterEnv*) malloc(sizeof(TCounterEnv));
tCounterEnv->context = context;
(*env)->GetJavaVM(env, &tCounterEnv->jvm);
param.pAuth = tCounterEnv;
param.calcPsnr = 1;
QY265SetLogPrintf(ksy265log);
yuv.pData[0] = (unsigned char *)malloc(param.picWidth * param.picHeight * 3/2);
yuv.pData[1] = yuv.pData[0] + param.picWidth * param.picHeight;
yuv.pData[2] = yuv.pData[0] + param.picWidth * param.picHeight * 5/4;
yuv.iWidth = param.picWidth;
yuv.iHeight = param.picHeight;
yuv.iStride[0] = yuv.iWidth;
yuv.iStride[1] = yuv.iStride[2] = yuv.iWidth/2;
h = QY265EncoderOpen( ¶m , &errorCode);
if( !h )
goto fail;
pic.yuv = &yuv;
memset(&pic_out,0,sizeof(pic_out));
int luma_size = param.picWidth * param.picHeight;
int chroma_size = luma_size / 4;
gettimeofday(&tv_start, NULL);
clock_start = clock();
/* Encode frames */
for( ;; i_frame++ )
{
/* Read input frame */
if( fread( pic.yuv->pData[0], 1, luma_size, thiz->in_file ) != luma_size )
break;
if( fread( pic.yuv->pData[1], 1, chroma_size, thiz->in_file ) != chroma_size )
break;
if( fread( pic.yuv->pData[2], 1, chroma_size, thiz->in_file ) != chroma_size )
break;
pic.pts = i_frame;
i_frame_size = QY265EncoderEncodeFrame( h, &nal, &i_nal, &pic, &pic_out, 0 );
if( i_frame_size < 0 )
goto fail;
for(int i = 0; i < i_nal; i++){
if( !fwrite( nal[i].pPayload, nal[i].iSize, 1, out_file ) )
goto fail;
}
}
/* Flush delayed frames */
while( QY265EncoderDelayedFrames( h ) )
{
i_frame_size = QY265EncoderEncodeFrame( h, &nal, &i_nal, NULL, &pic_out, 0 );
if( i_frame_size < 0 )
goto fail;
for(int i = 0; i < i_nal; i++){
if( !fwrite( nal[i].pPayload, nal[i].iSize, 1, out_file ) )
goto fail;
}
}
clock_end = clock();
gettimeofday(&tv_end, NULL);
clock_used = clock_end - clock_start;
ms_used = (int64_t)(clock_used * 1000.0 / CLOCKS_PER_SEC);
real_time = (tv_end.tv_sec + (tv_end.tv_usec / 1000000.0)) - (tv_start.tv_sec + (tv_start.tv_usec / 1000000.0));
float realFPS = i_frame / real_time;
printf("%d frame encoded\n"
"\ttime\tfps\n"
"CPU\t%lldms\t%.2f\n"
"Real\t%.3fs\t%.2f.\n",
i_frame,
ms_used, i_frame * 1000.0 / ms_used,
real_time, realFPS);
QY265EncoderClose( h );
thiz->frame_num = i_frame;
thiz->real_fps = realFPS;
thiz->real_time = real_time;
thiz->avg_psnr = ksy265_psnr;
free(yuv.pData[0]);
fclose(thiz->in_file);
fclose(out_file);
(*env)->ReleaseStringUTFChars(env, path_, path);
(*env)->ReleaseStringUTFChars(env, profile_, profile);
(*env)->ReleaseStringUTFChars(env, delay_, delay);
return 0;
fail:
fclose(thiz->in_file);
fclose(out_file);
(*env)->ReleaseStringUTFChars(env, path_, path);
(*env)->ReleaseStringUTFChars(env, profile_, profile);
(*env)->ReleaseStringUTFChars(env, delay_, delay);
return -1;
}
jfloat Java_com_ksyun_media_ksy265codec_demo_encoder_EncoderWrapper_native_1get_1real_1fps
(JNIEnv *env,
jobject instance,
jlong ptr) {
Encoder* thiz = getInstance(ptr);
return thiz->real_fps;
}
jint Java_com_ksyun_media_ksy265codec_demo_encoder_EncoderWrapper_native_1get_1encoded_1frame_1num
(JNIEnv *env,
jobject instance,jlong ptr) {
Encoder* thiz = getInstance(ptr);
return thiz->frame_num;
}
JNIEXPORT jstring JNICALL
Java_com_ksyun_media_ksy265codec_demo_encoder_EncoderWrapper_native_1get_1x264_1version(
JNIEnv *env, jobject instance) {
return (*env)->NewStringUTF(env, X264_POINTVER);
}
JNIEXPORT jstring JNICALL
Java_com_ksyun_media_ksy265codec_demo_encoder_EncoderWrapper_native_1get_1ksy265_1version(
JNIEnv *env, jobject instance) {
return (*env)->NewStringUTF(env, strLibQy265Version);
}
JNIEXPORT jfloat JNICALL
Java_com_ksyun_media_ksy265codec_demo_encoder_EncoderWrapper_native_1get_1real_1time(JNIEnv *env,
jobject instance,
jlong ptr) {
Encoder* thiz = getInstance(ptr);
return thiz->real_time;
}
JNIEXPORT jfloat JNICALL
Java_com_ksyun_media_ksy265codec_demo_encoder_EncoderWrapper_native_1get_1psnr(JNIEnv *env,
jobject instance,
jlong ptr) {
Encoder* thiz = getInstance(ptr);
return thiz->avg_psnr;
}
================================================
FILE: Android_demo/KSY265CodecDemo/app/src/main/jni/encoder/encoderwrapper.h
================================================
/* DO NOT EDIT THIS FILE - it is machine generated */
#include
/* Header for class com_ksyun_media_ksy265codec_demo_encoder_EncoderWrapper */
#ifndef _Included_com_ksyun_media_ksy265codec_demo_encoder_EncoderWrapper
#define _Included_com_ksyun_media_ksy265codec_demo_encoder_EncoderWrapper
#ifdef __cplusplus
extern "C" {
#endif
/*
* Class: com_ksyun_media_ksy265codec_demo_encoder_EncoderWrapper
* Method: native_init
* Signature: ()V
*/
JNIEXPORT jlong JNICALL
Java_com_ksyun_media_ksy265codec_demo_encoder_EncoderWrapper_native_1init(JNIEnv *, jobject);
/*
* Class: com_ksyun_media_ksy265codec_demo_encoder_EncoderWrapper
* Method: native_open
* Signature: (JLjava/lang/String;)I
*/
JNIEXPORT jint JNICALL Java_com_ksyun_media_ksy265codec_demo_encoder_EncoderWrapper_native_1open
(JNIEnv *, jobject, jlong, jstring);
/*
* Class: com_ksyun_media_ksy265codec_demo_encoder_EncoderWrapper
* Method: native_x264_encode
* Signature: (JLjava/lang/String;Ljava/lang/String;Ljava/lang/String;IILjava/lang/Float;II)I
*/
JNIEXPORT jint JNICALL Java_com_ksyun_media_ksy265codec_demo_encoder_EncoderWrapper_native_1x264_1encode
(JNIEnv *, jobject, jlong, jstring, jstring, jstring, jint, jint, jobject, jint, jint);
/*
* Class: com_ksyun_media_ksy265codec_demo_encoder_EncoderWrapper
* Method: native_ksy265_encoder
* Signature: (JLjava/lang/String;Ljava/lang/String;Ljava/lang/String;IILjava/lang/Float;II)I
*/
JNIEXPORT jint JNICALL Java_com_ksyun_media_ksy265codec_demo_encoder_EncoderWrapper_native_1ksy265_1encoder
(JNIEnv *, jobject, jobject , jlong, jstring, jstring, jstring, jint, jint, jobject, jint, jint);
/*
* Class: com_ksyun_media_ksy265codec_demo_encoder_EncoderWrapper
* Method: native_get_real_fps
* Signature: (J)F
*/
JNIEXPORT jfloat JNICALL
Java_com_ksyun_media_ksy265codec_demo_encoder_EncoderWrapper_native_1get_1real_1fps(JNIEnv *,
jobject, jlong);
/*
* Class: com_ksyun_media_ksy265codec_demo_encoder_EncoderWrapper
* Method: native_get_encoded_frame_num
* Signature: (J)I
*/
JNIEXPORT jint JNICALL
Java_com_ksyun_media_ksy265codec_demo_encoder_EncoderWrapper_native_1get_1encoded_1frame_1num(
JNIEnv *, jobject, jlong);
#ifdef __cplusplus
}
#endif
#endif
================================================
FILE: Android_demo/KSY265CodecDemo/app/src/main/jni/encoder/log.h
================================================
//
// Created by sujia on 2017/3/29.
//
#ifndef KSY265CODECDEMO_LOG_H
#define KSY265CODECDEMO_LOG_H
#include
#define LOGD(fmt, args...) __android_log_print(ANDROID_LOG_DEBUG, LOG_TAG, fmt, ##args)
#define LOGI(fmt, args...) __android_log_print(ANDROID_LOG_INFO, LOG_TAG, fmt, ##args)
#define LOGW(fmt, args...) __android_log_print(ANDROID_LOG_WARN, LOG_TAG, fmt, ##args)
#define LOGE(fmt, args...) __android_log_print(ANDROID_LOG_ERROR, LOG_TAG, fmt, ##args)
#endif //KSY265CODECDEMO_LOG_H
================================================
FILE: Android_demo/KSY265CodecDemo/app/src/main/res/drawable/selector_tab_background.xml
================================================
================================================
FILE: Android_demo/KSY265CodecDemo/app/src/main/res/drawable/tab_home_btn.xml
================================================
================================================
FILE: Android_demo/KSY265CodecDemo/app/src/main/res/drawable/tab_view_btn.xml
================================================
================================================
FILE: Android_demo/KSY265CodecDemo/app/src/main/res/layout/activity_main.xml
================================================
================================================
FILE: Android_demo/KSY265CodecDemo/app/src/main/res/layout/decoder_settings.xml
================================================
>
================================================
FILE: Android_demo/KSY265CodecDemo/app/src/main/res/layout/encoder_settings.xml
================================================
================================================
FILE: Android_demo/KSY265CodecDemo/app/src/main/res/layout/file.xml
================================================
================================================
FILE: Android_demo/KSY265CodecDemo/app/src/main/res/layout/fragment_item.xml
================================================
================================================
FILE: Android_demo/KSY265CodecDemo/app/src/main/res/layout/help.xml
================================================
android:text="github地址:"/>
================================================
FILE: Android_demo/KSY265CodecDemo/app/src/main/res/layout/main_tab_layout.xml
================================================
================================================
FILE: Android_demo/KSY265CodecDemo/app/src/main/res/layout/tab_content.xml
================================================
================================================
FILE: Android_demo/KSY265CodecDemo/app/src/main/res/values/bool.xml
================================================
true
false
================================================
FILE: Android_demo/KSY265CodecDemo/app/src/main/res/values/colors.xml
================================================
#3F51B5
#303F9F
#FF4081
================================================
FILE: Android_demo/KSY265CodecDemo/app/src/main/res/values/dimens.xml
================================================
0dp
16dp
================================================
FILE: Android_demo/KSY265CodecDemo/app/src/main/res/values/strings.xml
================================================
KSY265CodecDemo
设置
帮助
浏览文件
确认
编码器使用说明: 先设置编码器参数,然后选择文件,最好确定即开始编码
解码器使用说明: 先设置解码器参数(其中选择渲染帧率为-1(off)时,是关闭渲染功能),然后选择文件,最好确定即开始解码
Empty Directory
Storage was removed or unmounted.
Select a file
Error selecting File
Internal storage
KSY Yun
================================================
FILE: Android_demo/KSY265CodecDemo/app/src/main/res/values/styles.xml
================================================
================================================
FILE: Android_demo/KSY265CodecDemo/app/src/main/res/values/themes.xml
================================================
================================================
FILE: Android_demo/KSY265CodecDemo/app/src/main/res/xml/mimetypes.xml
================================================
================================================
FILE: Android_demo/KSY265CodecDemo/build.gradle
================================================
// Top-level build file where you can add configuration options common to all sub-projects/modules.
buildscript {
repositories {
jcenter()
maven {
url 'https://maven.google.com/'
name 'Google'
}
}
dependencies {
classpath 'com.android.tools.build:gradle:2.3.0'
// NOTE: Do not place your application dependencies here; they belong
// in the individual module build.gradle files
}
}
allprojects {
repositories {
jcenter()
maven {
url 'https://maven.google.com/'
name 'Google'
}
}
}
task clean(type: Delete) {
delete rootProject.buildDir
}
================================================
FILE: Android_demo/KSY265CodecDemo/gradle/wrapper/gradle-wrapper.properties
================================================
#Mon Mar 27 15:56:29 CST 2017
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-3.3-all.zip
================================================
FILE: Android_demo/KSY265CodecDemo/gradle.properties
================================================
# Project-wide Gradle settings.
# IDE (e.g. Android Studio) users:
# Gradle settings configured through the IDE *will override*
# any settings specified in this file.
# For more details on how to configure your build environment visit
# http://www.gradle.org/docs/current/userguide/build_environment.html
# Specifies the JVM arguments used for the daemon process.
# The setting is particularly useful for tweaking memory settings.
org.gradle.jvmargs=-Xmx1536m
# When configured, Gradle will run in incubating parallel mode.
# This option should only be used with decoupled projects. More details, visit
# http://www.gradle.org/docs/current/userguide/multi_project_builds.html#sec:decoupled_projects
# org.gradle.parallel=true
systemProp.http.proxyHost=120.92.42.188
systemProp.http.proxyPort=52179
================================================
FILE: Android_demo/KSY265CodecDemo/gradlew
================================================
#!/usr/bin/env bash
##############################################################################
##
## Gradle start up script for UN*X
##
##############################################################################
# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
DEFAULT_JVM_OPTS=""
APP_NAME="Gradle"
APP_BASE_NAME=`basename "$0"`
# Use the maximum available, or set MAX_FD != -1 to use that value.
MAX_FD="maximum"
warn ( ) {
echo "$*"
}
die ( ) {
echo
echo "$*"
echo
exit 1
}
# OS specific support (must be 'true' or 'false').
cygwin=false
msys=false
darwin=false
case "`uname`" in
CYGWIN* )
cygwin=true
;;
Darwin* )
darwin=true
;;
MINGW* )
msys=true
;;
esac
# Attempt to set APP_HOME
# Resolve links: $0 may be a link
PRG="$0"
# Need this for relative symlinks.
while [ -h "$PRG" ] ; do
ls=`ls -ld "$PRG"`
link=`expr "$ls" : '.*-> \(.*\)$'`
if expr "$link" : '/.*' > /dev/null; then
PRG="$link"
else
PRG=`dirname "$PRG"`"/$link"
fi
done
SAVED="`pwd`"
cd "`dirname \"$PRG\"`/" >/dev/null
APP_HOME="`pwd -P`"
cd "$SAVED" >/dev/null
CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
# Determine the Java command to use to start the JVM.
if [ -n "$JAVA_HOME" ] ; then
if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
# IBM's JDK on AIX uses strange locations for the executables
JAVACMD="$JAVA_HOME/jre/sh/java"
else
JAVACMD="$JAVA_HOME/bin/java"
fi
if [ ! -x "$JAVACMD" ] ; then
die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
else
JAVACMD="java"
which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
# Increase the maximum file descriptors if we can.
if [ "$cygwin" = "false" -a "$darwin" = "false" ] ; then
MAX_FD_LIMIT=`ulimit -H -n`
if [ $? -eq 0 ] ; then
if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
MAX_FD="$MAX_FD_LIMIT"
fi
ulimit -n $MAX_FD
if [ $? -ne 0 ] ; then
warn "Could not set maximum file descriptor limit: $MAX_FD"
fi
else
warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
fi
fi
# For Darwin, add options to specify how the application appears in the dock
if $darwin; then
GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
fi
# For Cygwin, switch paths to Windows format before running java
if $cygwin ; then
APP_HOME=`cygpath --path --mixed "$APP_HOME"`
CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
JAVACMD=`cygpath --unix "$JAVACMD"`
# We build the pattern for arguments to be converted via cygpath
ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
SEP=""
for dir in $ROOTDIRSRAW ; do
ROOTDIRS="$ROOTDIRS$SEP$dir"
SEP="|"
done
OURCYGPATTERN="(^($ROOTDIRS))"
# Add a user-defined pattern to the cygpath arguments
if [ "$GRADLE_CYGPATTERN" != "" ] ; then
OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
fi
# Now convert the arguments - kludge to limit ourselves to /bin/sh
i=0
for arg in "$@" ; do
CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
else
eval `echo args$i`="\"$arg\""
fi
i=$((i+1))
done
case $i in
(0) set -- ;;
(1) set -- "$args0" ;;
(2) set -- "$args0" "$args1" ;;
(3) set -- "$args0" "$args1" "$args2" ;;
(4) set -- "$args0" "$args1" "$args2" "$args3" ;;
(5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
(6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
(7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
(8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
(9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
esac
fi
# Split up the JVM_OPTS And GRADLE_OPTS values into an array, following the shell quoting and substitution rules
function splitJvmOpts() {
JVM_OPTS=("$@")
}
eval splitJvmOpts $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS
JVM_OPTS[${#JVM_OPTS[*]}]="-Dorg.gradle.appname=$APP_BASE_NAME"
exec "$JAVACMD" "${JVM_OPTS[@]}" -classpath "$CLASSPATH" org.gradle.wrapper.GradleWrapperMain "$@"
================================================
FILE: Android_demo/KSY265CodecDemo/gradlew.bat
================================================
@if "%DEBUG%" == "" @echo off
@rem ##########################################################################
@rem
@rem Gradle startup script for Windows
@rem
@rem ##########################################################################
@rem Set local scope for the variables with windows NT shell
if "%OS%"=="Windows_NT" setlocal
@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
set DEFAULT_JVM_OPTS=
set DIRNAME=%~dp0
if "%DIRNAME%" == "" set DIRNAME=.
set APP_BASE_NAME=%~n0
set APP_HOME=%DIRNAME%
@rem Find java.exe
if defined JAVA_HOME goto findJavaFromJavaHome
set JAVA_EXE=java.exe
%JAVA_EXE% -version >NUL 2>&1
if "%ERRORLEVEL%" == "0" goto init
echo.
echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
echo.
echo Please set the JAVA_HOME variable in your environment to match the
echo location of your Java installation.
goto fail
:findJavaFromJavaHome
set JAVA_HOME=%JAVA_HOME:"=%
set JAVA_EXE=%JAVA_HOME%/bin/java.exe
if exist "%JAVA_EXE%" goto init
echo.
echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
echo.
echo Please set the JAVA_HOME variable in your environment to match the
echo location of your Java installation.
goto fail
:init
@rem Get command-line arguments, handling Windowz variants
if not "%OS%" == "Windows_NT" goto win9xME_args
if "%@eval[2+2]" == "4" goto 4NT_args
:win9xME_args
@rem Slurp the command line arguments.
set CMD_LINE_ARGS=
set _SKIP=2
:win9xME_args_slurp
if "x%~1" == "x" goto execute
set CMD_LINE_ARGS=%*
goto execute
:4NT_args
@rem Get arguments from the 4NT Shell from JP Software
set CMD_LINE_ARGS=%$
:execute
@rem Setup the command line
set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
@rem Execute Gradle
"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%
:end
@rem End local scope for the variables with windows NT shell
if "%ERRORLEVEL%"=="0" goto mainEnd
:fail
rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
rem the _cmd.exe /c_ return code!
if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
exit /b 1
:mainEnd
if "%OS%"=="Windows_NT" endlocal
:omega
================================================
FILE: Android_demo/KSY265CodecDemo/settings.gradle
================================================
include ':app'
================================================
FILE: Android_demo/README.md
================================================
KSY 265 Demo
================================================
FILE: Android_demo/prebuilt/arm64-v8a/libqyencoder.a
================================================
[File too large to display: 12.8 MB]
================================================
FILE: Android_demo/prebuilt/include/lenthevcdec.h
================================================
#ifndef __LENTHEVCDEC_H__
#define __LENTHEVCDEC_H__
#ifdef __cplusplus
extern "C" {
#endif
#include
#if defined(_WIN32) || defined(WIN32)
#define LENTAPI __stdcall
#else
#define LENTAPI
#endif
typedef void* lenthevcdec_ctx;
int LENTAPI lenthevcdec_version(void);
lenthevcdec_ctx LENTAPI lenthevcdec_create(int threads, int compatibility, void* reserved);
void LENTAPI lenthevcdec_destroy(lenthevcdec_ctx ctx);
void LENTAPI lenthevcdec_flush(lenthevcdec_ctx ctx);
/* bs & bs_len: intput bitstream
* pts: input play timestamp
* got_frame: return 1 if we got frame, then the pixels & line_stride & got_pts is valid
* width & height: picture size
* line_stride & pixels: output picture pixel data
* got_pts: pts of output frame
* return: byte count used by decoder, or negative number for error
*/
int LENTAPI lenthevcdec_decode_frame(lenthevcdec_ctx ctx,
const void* bs, int bs_len,
int64_t pts,
int* got_frame,
int* width, int* height,
int line_stride[3],
void* pixels[3],
int64_t* got_pts);
#ifdef __cplusplus
}
#endif
#endif/*__LENTHEVCDEC_H__*/
================================================
FILE: Android_demo/prebuilt/include/qy265dec.h
================================================
///////////////////////////////////////////////////
//
// KingSoft H265 Codec Library
//
// Copyright(c) 2013-2014 KingSoft, Inc.
// www.KingSoft.cn
//
///////////////////////////////////////////////////
/************************************************************************************
* decInf.h: interface of decoder for user
*
* \date 2013-09-28: first version
*
************************************************************************************/
#ifndef _QY265_DECODER_INTERFACE_H_
#define _QY265_DECODER_INTERFACE_H_
#include "qy265def.h"
// config parameters for Decoder
typedef struct QY265DecConfig {
void* pAuth; //QYAuth, invalid if don't need aksk auth
int threads; // number of threads used in decoding (0: auto)
int bEnableOutputRecToFile; // For debug: write reconstruct YUV to File
char* strRecYuvFileName; // For debug: file name of YUV
// when bEnableOutputRecToFile = 1
int logLevel; //For debug: log level
}QY265DecConfig;
// information of decoded frame
typedef struct QY265FrameInfo {
int nWidth; // frame width
int nHeight; // frame height
long long pts; // time stamp
int bIllegalStream; // input bit stream is illegal
int poc;
}QY265FrameInfo;
// decoded frame with data and information
typedef struct QY265Frame {
int bValid; //if == 0, no more valid output frame
unsigned char* pData[3]; // Y U V
short iStride[3]; // stride for each component
QY265FrameInfo frameinfo;
#ifdef EMSCRIPTEN//TEST_YUVPLANE
unsigned char* pYUVPlane; //liner buffer for yuv 420p
#endif
}QY265Frame;
#if defined(__cplusplus)
extern "C" {
#endif//__cplusplus
/************************************************************************************
* I/F for all usrs
************************************************************************************/
// create decoder, return handle of decoder
_h_dll_export void* QY265DecoderCreate(QY265DecConfig* pDecConfig, int * pStat);
// destroy decoder with specific handle
_h_dll_export void QY265DecoderDestroy(void* pDecoder);
// set config to specific decoder
_h_dll_export void QY265DecoderSetDecConfig(void *pDecoder, QY265DecConfig* pDecConfig, int * pStat);
//the input of this function should be one or more NALs;
//if only one NAL, with or without start bytes are both OK
_h_dll_export void QY265DecodeFrame(void *pDecoder, unsigned char* pData, int iLen, int * pStat, const long long pts);
// bSkip = false : same as QY265DecodeFrame
// bSkip = true : only decode slice headers in pData, slice data skipped
_h_dll_export void QY265DecodeFrameEnSkip(void *pDecoder, unsigned char* pData, int iLen, int * pStat, const long long pts, int bSkip);
//flush decoding, called at end
_h_dll_export void QY265DecodeFlush(void *pDecoder, int bClearCachedPics, int * pStat);
// retrieve the output, the function are used for synchronized output, this function need to call several time until get NULL
// if bForceLogo == true, only one frame buffer inside, need return before get next output
_h_dll_export void QY265DecoderGetDecodedFrame(void *pDecoder, QY265Frame* pFrame, int * pStat, int bForceLogo);
// return the frame buffer which QY265DecoderGetOutput get from decoder, each valid QY265DecoderGetOutput should match with a ReturnFrame
_h_dll_export void QY265DecoderReturnDecodedFrame( void *pDecoder, QY265Frame* pFrame);
/**
* dump latest decoded VUI parameters
* @param_input pDecoder: decoder instance
* @param_output vui: fill with decoded vui parameters
* @param_output bValid: =0 if no valid vui parameters decoded,
* otherwise =1
*/
_h_dll_export void QY265DumpVUIParameters(void* pDecoder, vui_parameters* vui, int* bValid);
#if defined(__cplusplus)
}
#endif//__cplusplus
#endif//header
================================================
FILE: Android_demo/prebuilt/include/qy265def.h
================================================
#ifndef _QY265_DEF_H_
#define _QY265_DEF_H_
// ****************************************
// error type
// ****************************************
enum
{
QY_OK = (0x00000000), // Success codes
QY_FAIL = (0x80000001), // Unspecified error
QY_OUTOFMEMORY = (0x80000002), // Ran out of memory
QY_POINTER = (0x80000003), // Invalid pointer
QY_NOTSUPPORTED = (0x80000004),// NOT support feature encoutnered
QY_AUTH_INVALID = (0x80000005), // authentication invalid
QY_SEARCHING_ACCESS_POINT = (0x00000001), // in process of searching first access point
QY_REF_PIC_NOT_FOUND = (0x00000007), // reference picture not found, can be ignored
#if defined(EMSCRIPTEN)||defined(_TEST_FOR_EMSCRIPTEN)
QY_NEED_MORE_DATA = (0x00000008), //need push more data
#endif
QY_BITSTREAM_ERROR = (0x00000009), // detecting bitstream error, can be ignored
QY_CTU_REENCODE = (0x81000000), //ctu reencode
};
enum NAL_UNIT_TYPE{
NAL_UNIT_TYPE_TRAIL_N = 0,
NAL_UNIT_TYPE_TRAIL_R = 1,
NAL_UNIT_TYPE_TSA_N = 2,
NAL_UNIT_TYPE_TSA_R = 3,
NAL_UNIT_TYPE_STSA_N = 4,
NAL_UNIT_TYPE_STSA_R = 5,
NAL_UNIT_TYPE_RADL_N = 6,
NAL_UNIT_TYPE_RADL_R = 7,
NAL_UNIT_TYPE_RASL_N = 8,
NAL_UNIT_TYPE_RASL_R = 9,
//reserved
NAL_UNIT_TYPE_RSV_VCL_N10 = 10,
NAL_UNIT_TYPE_RSV_VCL_N12 = 12,
NAL_UNIT_TYPE_RSV_VCL_N14 = 13,
NAL_UNIT_TYPE_RSV_VCL_R11 = 11,
NAL_UNIT_TYPE_RSV_VCL_R13 = 13,
NAL_UNIT_TYPE_RSV_VCL_R15 = 15,
NAL_UNIT_TYPE_BLA_W_LP = 16,
NAL_UNIT_TYPE_BLA_W_RADL = 17,
NAL_UNIT_TYPE_BLA_N_LP = 18,
NAL_UNIT_TYPE_IDR_W_RADL = 19,
NAL_UNIT_TYPE_IDR_N_LP = 20,
NAL_UNIT_TYPE_CRA_NUT = 21,
NAL_UNIT_TYPE_RSV_IRAP_VCL22 = 22,
NAL_UNIT_TYPE_RSV_IRAP_VCL23 = 23,
NAL_UNIT_TYPE_RSV_VCL24 = 24,
NAL_UNIT_TYPE_RSV_VCL25 = 25,
NAL_UNIT_TYPE_RSV_VCL26 = 26,
NAL_UNIT_TYPE_RSV_VCL27 = 27,
NAL_UNIT_TYPE_RSV_VCL28 = 28,
NAL_UNIT_TYPE_RSV_VCL29 = 29,
NAL_UNIT_TYPE_RSV_VCL30 = 30,
NAL_UNIT_TYPE_RSV_VCL31 = 31,
NAL_UNIT_TYPE_VPS_NUT = 32,
NAL_UNIT_TYPE_SPS_NUT = 33,
NAL_UNIT_TYPE_PPS_NUT = 34,
NAL_UNIT_TYPE_AUD_NUT = 35,
NAL_UNIT_TYPE_EOS_NUT = 36,
NAL_UNIT_TYPE_EOB_NUT = 37,
NAL_UNIT_TYPE_FD_NUT = 38,
NAL_UNIT_TYPE_PREFIX_SEI_NUT = 39,
NAL_UNIT_TYPE_SUFFIX_SEI_NUT = 40,
NAL_UNIT_TYPE_RSV_NVCL41 = 41,
NAL_UNIT_TYPE_RSV_NVCL42 = 42,
NAL_UNIT_TYPE_RSV_NVCL43 = 43,
NAL_UNIT_TYPE_RSV_NVCL44 = 44,
NAL_UNIT_TYPE_RSV_NVCL45 = 45,
NAL_UNIT_TYPE_RSV_NVCL46 = 46,
NAL_UNIT_TYPE_RSV_NVCL47 = 47,
NAL_UNIT_TYPE_UNSPEC48 = 48,
NAL_UNIT_TYPE_UNSPEC49 = 49,
NAL_UNIT_TYPE_UNSPEC50 = 50,
NAL_UNIT_TYPE_UNSPEC51 = 51,
NAL_UNIT_TYPE_UNSPEC52 = 52,
NAL_UNIT_TYPE_UNSPEC53 = 53,
NAL_UNIT_TYPE_UNSPEC54 = 54,
NAL_UNIT_TYPE_UNSPEC55 = 55,
NAL_UNIT_TYPE_UNSPEC56 = 56,
NAL_UNIT_TYPE_UNSPEC57 = 57,
NAL_UNIT_TYPE_UNSPEC58 = 58,
NAL_UNIT_TYPE_UNSPEC59 = 59,
NAL_UNIT_TYPE_UNSPEC60 = 60,
NAL_UNIT_TYPE_UNSPEC61 = 61,
NAL_UNIT_TYPE_UNSPEC62 = 62,
NAL_UNIT_TYPE_UNSPEC63 = 63,
};
// ****************************************
// VUI
// ****************************************
typedef struct vui_parameters{
// --- sample aspect ratio (SAR) ---
unsigned char aspect_ratio_info_present_flag;
unsigned short sar_width; // sar_width and sar_height are zero if unspecified
unsigned short sar_height;
// --- overscan ---
unsigned char overscan_info_present_flag;
unsigned char overscan_appropriate_flag;
// --- video signal type ---
unsigned char video_signal_type_present_flag;
unsigned char video_format;
unsigned char video_full_range_flag;
unsigned char colour_description_present_flag;
unsigned char colour_primaries;
unsigned char transfer_characteristics;
unsigned char matrix_coeffs;
// --- chroma / interlaced ---
unsigned char chroma_loc_info_present_flag;
unsigned char chroma_sample_loc_type_top_field;
unsigned char chroma_sample_loc_type_bottom_field;
unsigned char neutral_chroma_indication_flag;
unsigned char field_seq_flag;
unsigned char frame_field_info_present_flag;
// --- default display window ---
unsigned char default_display_window_flag;
unsigned int def_disp_win_left_offset;
unsigned int def_disp_win_right_offset;
unsigned int def_disp_win_top_offset;
unsigned int def_disp_win_bottom_offset;
// --- timing ---
unsigned char vui_timing_info_present_flag;
unsigned int vui_num_units_in_tick;
unsigned int vui_time_scale;
unsigned char vui_poc_proportional_to_timing_flag;
unsigned int vui_num_ticks_poc_diff_one;
// --- hrd parameters ---
unsigned char vui_hrd_parameters_present_flag;
//hrd_parameters vui_hrd_parameters;
// --- bitstream restriction ---
unsigned char bitstream_restriction_flag;
unsigned char tiles_fixed_structure_flag;
unsigned char motion_vectors_over_pic_boundaries_flag;
unsigned char restricted_ref_pic_lists_flag;
unsigned short min_spatial_segmentation_idc;
unsigned char max_bytes_per_pic_denom;
unsigned char max_bits_per_min_cu_denom;
unsigned char log2_max_mv_length_horizontal;
unsigned char log2_max_mv_length_vertical;
}vui_parameters;
#if defined(SWIG) || defined(__AVM2__)
#define _h_dll_export
#else
#ifdef WIN32
#define _h_dll_export __declspec(dllexport)
#else // for GCC
#define _h_dll_export __attribute__ ((visibility("default")))
#endif
#endif //SWIG
typedef void (*QYLogPrintf)(const char* msg);
typedef void (*QYAuthWarning)();
#if defined(__cplusplus)
extern "C" {
#endif//__cplusplus
// log output callback func pointer
// if pFuncCB == NULL, use the default printf
_h_dll_export void QY265SetLogPrintf ( QYLogPrintf pFuncCB);
// auth trouble warning callback func pointer
_h_dll_export void QY265SetAuthWarning ( QYAuthWarning pFuncCB);
#if defined(__cplusplus)
}
#endif//__cplusplus
//libqy265 version number string
_h_dll_export extern const char strLibQy265Version[];
#endif
================================================
FILE: Android_demo/prebuilt/include/qy265enc.h
================================================
///////////////////////////////////////////////////
//
// Kingsoft H265 Codec Library
//
// Copyright(c) Kingsoft cloud Inc.
// http://www.ksyun.com/
//
///////////////////////////////////////////////////
/************************************************************************************
* encInf.h: interface of encoder for user
*
* \date 2013-09-28: first version
*
************************************************************************************/
#ifndef _QY265_ENCODER_INTERFACE_H_
#define _QY265_ENCODER_INTERFACE_H_
#include "qy265def.h"
// ****************************************
// base configuration
// ****************************************
//app type
typedef enum QY265Tune_tag{
QY265TUNE_DEFAULT = 0,
QY265TUNE_SELFSHOW = 1,
QY265TUNE_GAME = 2,
QY265TUNE_MOVIE = 3,
QY265TUNE_SCREEN = 4
}QY265Tune;
typedef enum QY265Preset_tag{
QY265PRESET_ULTRAFAST = 0,
QY265PRESET_SUPERFAST = 1,
QY265PRESET_VERYFAST = 2,
QY265PRESET_FAST = 3,
QY265PRESET_MEDIUM = 4,
QY265PRESET_SLOW = 5,
QY265PRESET_SLOWER = 6,
QY265PRESET_VERYSLOW = 7,
QY265PRESET_PLACEBO = 8,
}QY265Preset;
typedef enum QY265Latency_tag{
QY265LATENCY_ZERO = 0,
QY265LATENCY_LOWDELAY = 1,
QY265LATENCY_LIVESTREMING = 2,
QY265LATENCY_DEFAULT = 3,
}QY265Latency;
//base configuration
typedef struct QY265EncConfig{
void* pAuth; //QYAuth, invalid if don't need aksk auth
QY265Tune tune; //
QY265Preset preset;
QY265Latency latency;
int profileId; //currently, support 1 and 3 separately for main and main still profile
int bHeaderBeforeKeyframe; //whether output vps,sps,pps before key frame, default 1. dis/enable 0/1
int picWidth; // input frame width
int picHeight; // input frame height
double frameRate; // input frame rate
int bframes; // num of bi-pred frames, -1: using default
int temporalLayer; // works with QY265LATENCY_ZERO, separate P frames into temporal layers, 0 or 1
int vpp_denoise; // vpp denoise 0 disable, 1 gentle, 2 medium, 3 aggressive
int vpp_edge; // vpp edge enhance 0 disable, 1 gentle, 2 medium, 3 aggressive
int vpp_color; // vpp color enhance 0 disable, 1 gentle, 2 medium, 3 aggressive
int vpp_hdr; // vpp HDR enhance 0 disable, 1 enable
double vpp_hdr_strength; // vpp HDR strength, 0~5
int vpp_hdr_iter; // vpp HDR iteration, 2 or 3
double vpp_hdr_sigma_s; // vpp HDR parameter 0~100
double vpp_hdr_sigma_r; // vpp HDR parameter 0~100
double vpp_recur_filter; // vpp Recursive Filter 0~30
int rc; // rc type 0 disable,1 cbr,2 abr,3 crf,4 cvbr,5 cvq, default 2
int bitrateInkbps; // target bit rate in kbps, valid when rctype is cbr abd vbr
int vbv_buffer_size; // buf size of vbv
int vbv_max_rate; // max rate of vbv
int vbv_min_rate; // max rate of vbv
int qp; // valid when rctype is disable, default 26
int crf; // valid when rctype is crf,default 24
int visual_quality; // valid when rctype is cvq,[70-100], default 95
int iIntraPeriod; // I-Frame period, -1 = only first
int qpmin; //minimal qp, valid when rc != 0, 0~51
int qpmax; //maximal qp, valid when rc != 0, 1~51, qpmax = 0 means 51
int enFrameSkip; //1: enable frame skip for ratecontrol, default 0
//* Execute Properties
int enWavefront; //enable wave front parallel
int enFrameParallel; //enable frame parallel
int threads; // number of threads used in encoding ( for wavefront, frame parallel, or enable both )
//* vui_parameters
//vui_parameters_present_flag equal to 1 specifies that the vui_parameters() syntax in struct vui should set by usr
int vui_parameters_present_flag;
struct{
/* video_signal_type_present_flag. If this is set then
* video_format, video_full_range_flag and colour_description_present_flag
* will be added to the VUI. The default is false */
int video_signal_type_present_flag;
/* Video format of the source video. 0 = component, 1 = PAL, 2 = NTSC,
* 3 = SECAM, 4 = MAC, 5 = unspecified video format is the default */
int video_format;
/* video_full_range_flag indicates the black level and range of the luma
* and chroma signals as derived from E'Y, E'PB, and E'PR or E'R, E'G,
* and E'B real-valued component signals. The default is false */
int video_full_range_flag;
/* colour_description_present_flag in the VUI. If this is set then
* color_primaries, transfer_characteristics and matrix_coeffs are to be
* added to the VUI. The default is false */
int colour_description_present_flag;
/* colour_primaries holds the chromacity coordinates of the source
* primaries. The default is 2 */
int colour_primaries;
/* transfer_characteristics indicates the opto-electronic transfer
* characteristic of the source picture. The default is 2 */
int transfer_characteristics;
/* matrix_coeffs used to derive the luma and chroma signals from
* the red, blue and green primaries. The default is 2 */
int matrix_coeffs;
}vui;
//* tool list
int logLevel; //log level (-1: dbg; 0: info; 1:warn; 2:err; 3:fatal)
int lookahead; // rc lookahead settings
int calcPsnr; //0:not calc psnr; 1: print total psnr; 2: print each frame
int calcSsim; //0:not calc ssim; 1: print total ssim; 2: print each frame
int shortLoadingForPlayer; //reduce b frames after I frame, for shorting the loading time of VOD for some players
//ZEL_2PASS:parameters for 2pass
int iPass; //Multi pass rate control,0,disable 2pass encode method; 1: first pass; 2: second pass;
char statFileName[256]; //log file produced from first pass, seet by user
double fRateTolerance;//default 2.0f,0.5 is suitable to reduce the largest bitrate, and 0.1 is to make the bitrate stable
int rdoq;//1:enabling rdoq
int me;//0: DIA, 1: HEX, 2: UMH, 3:EPZS,
int part;//enabling 2nxn, nx2n pu
int do64;//1:enabling 64x64 cu
int tuInter;//inter RQT tu depth, 0~3, -1 means auto
int tuIntra;//intra RQT tu depth, 0~3, -1 means auto
int smooth;//1: enabling strong intra smoothing
int transskip;//1: enabling transform skip
int subme;// 0 : disable 1 : fast, 2 : square full
int satdInter;//1:enabling hardmad sad
int satdIntra;//1:enabling hardmad sad
int searchrange;//search range
int refnum;// reference number
int ref0;//add interface for ref0
int sao;//sao enabling, 0: disable; 1:faster; 2: faster; 3: usual; 4:complex
int longTermRef;//0:disabling longterm reference 1:enable;
int iAqMode;// adaptive quantization 0~3, 0: disable
double fAqStrength;//strength of adaptive quantizaiton, 0~3.0, default 1.0
int rasl; // enable RASL NAL for CRA,default 1, if not enable RASL, then CRA is act like IDR
}QY265EncConfig;
// ****************************************
// callback functions
// ****************************************
//the encoder works in asynchronous mode (for supports of B frames)
//once calling on EncodeFrame not corresponds to one Frame's bitstream output
//thus, use callback function on Frame Encoded
//also, buffer of srcYUV should be reserved for encoder, until it's done
// CALLBACK method to feed the encoded bit stream
// input frame data and info
typedef struct QY265YUV{
int iWidth; // input frame width
int iHeight; // input frame height
unsigned char* pData[3]; // input frame Y U V
int iStride[3]; // stride for Y U V
}QY265YUV;
// input frame data and info
typedef struct QY265Picture{
int iSliceType; // specified by output pictures
int poc; // ignored on input
long long pts;
long long dts;
QY265YUV* yuv;
}QY265Picture;
typedef struct QY265Nal
{
int naltype;
int tid;
int iSize;
long long pts;
unsigned char* pPayload;
}QY265Nal;
#if defined(__cplusplus)
extern "C" {
#endif//__cplusplus
/**
* create encoder
* @param pCfg : base config of encoder
* @param errorCode: error code
* @return encoder handle
*/
_h_dll_export void* QY265EncoderOpen(QY265EncConfig* pCfg, int *errorCode);
// destroy encoder
_h_dll_export void QY265EncoderClose(void* pEncoder);
// reconfig encoder
_h_dll_export void QY265EncoderReconfig(void* pEncoder,QY265EncConfig* pCfg);
// return the VPS, SPS and PPS that will be used for the whole stream.
_h_dll_export int QY265EncoderEncodeHeaders(void* pEncoder,QY265Nal** pNals,int* iNalCount);
/**
* Encode one frame add logo or not
*
* @param pEncoder handle of encoder
* @param pNals pointer array of output NAL units
* @param iNalCount output NAL unit count
* @param pInPic input frame
* @param pOutPic output frame
* @param bForceLogo add logo on the input frame ( when auth failed)
* @return if succeed, return 0; if failed, return the error code
*/
_h_dll_export int QY265EncoderEncodeFrame(void* pEncoder, QY265Nal** pNals, int* iNalCount, QY265Picture* pInpic, QY265Picture* pOutpic, int bForceLogo);
// Request encoder to encode a Key Frame
_h_dll_export void QY265EncoderKeyFrameRequest(void* pEncoder);
// current buffered frames
_h_dll_export int QY265EncoderDelayedFrames(void* pEncoder);
static const char* const qy265_preset_names[] = { "ultrafast", "superfast", "veryfast", "fast", "medium", "slow", "slower", "veryslow", "placebo", 0 };
static const char* const qy265_tunes_names[] = { "default", "selfshow", "game", "movie", "screen", 0 };
static const char* const qy265_latency_names[] = { "zerolatency", "lowdelay", "livestreaming", "default", 0 };
// get default config values by preset, tune and latency. enum format
_h_dll_export int QY265ConfigDefault(QY265EncConfig* pConfig, QY265Preset preset, QY265Tune tune, QY265Latency latency);
// get default config values by preset, tune and latency. string format
_h_dll_export int QY265ConfigDefaultPreset(QY265EncConfig* pConfig, char* preset, char* tune, char* latency);
#define QY265_PARAM_BAD_NAME (-1)
#define QY265_PARAM_BAD_VALUE (-2)
_h_dll_export int QY265ConfigParse(QY265EncConfig *p, const char *name, const char *value);
#if defined(__cplusplus)
}
#endif//__cplusplus
#endif
================================================
FILE: Android_demo/prebuilt/include/qyauth_env.h
================================================
#ifndef _KS_AUTH_ENV_H_
#define _KS_AUTH_ENV_H_
#if !defined(WIN32)
#include
#endif
#define MAX_URL_LEN 2048
#define MAX_LEN 512
#define AUTH_ADDR_NUM 3
//***********************************
//* KSPrivateAuthEnv used as AUTH struct
//* when private server auth method is adopted
//***********************************
#ifndef KSAUTH_PRIVATE_AUTH
#define KSAUTH_PRIVATE_AUTH 0
#endif
//***********************************
//for Android, TCounterEnv used as AUTH struct
//when adopt Count auth method
#ifndef __PLATFORM_COUNTER_ENV__
#define __PLATFORM_COUNTER_ENV__
#ifdef ANDROID
#include
typedef struct _TCounterEnv
{
JavaVM *jvm;
jobject context;
}TCounterEnv;
#endif
#endif
#ifdef WIN32
#define _ks_dll_export __declspec(dllexport)
#else // for GCC
#define _ks_dll_export __attribute__ ((visibility("default")))
#endif
_ks_dll_export extern const char strKsc265AuthVersion[];
#endif //header
================================================
FILE: Android_demo/prebuilt/include/x264.h
================================================
/*****************************************************************************
* x264.h: x264 public header
*****************************************************************************
* Copyright (C) 2003-2016 x264 project
*
* Authors: Laurent Aimar
* Loren Merritt
* Fiona Glaser
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02111, USA.
*
* This program is also available under a commercial proprietary license.
* For more information, contact us at licensing@x264.com.
*****************************************************************************/
#ifndef X264_X264_H
#define X264_X264_H
#ifdef __cplusplus
extern "C" {
#endif
#if !defined(_STDINT_H) && !defined(_STDINT_H_) && !defined(_STDINT_H_INCLUDED) && !defined(_STDINT) &&\
!defined(_SYS_STDINT_H_) && !defined(_INTTYPES_H) && !defined(_INTTYPES_H_) && !defined(_INTTYPES)
# ifdef _MSC_VER
# pragma message("You must include stdint.h or inttypes.h before x264.h")
# else
# warning You must include stdint.h or inttypes.h before x264.h
# endif
#endif
#include
#include "x264_config.h"
#define X264_BUILD 148
/* Application developers planning to link against a shared library version of
* libx264 from a Microsoft Visual Studio or similar development environment
* will need to define X264_API_IMPORTS before including this header.
* This clause does not apply to MinGW, similar development environments, or non
* Windows platforms. */
#ifdef X264_API_IMPORTS
#define X264_API __declspec(dllimport)
#else
#define X264_API
#endif
/* x264_t:
* opaque handler for encoder */
typedef struct x264_t x264_t;
/****************************************************************************
* NAL structure and functions
****************************************************************************/
enum nal_unit_type_e
{
NAL_UNKNOWN = 0,
NAL_SLICE = 1,
NAL_SLICE_DPA = 2,
NAL_SLICE_DPB = 3,
NAL_SLICE_DPC = 4,
NAL_SLICE_IDR = 5, /* ref_idc != 0 */
NAL_SEI = 6, /* ref_idc == 0 */
NAL_SPS = 7,
NAL_PPS = 8,
NAL_AUD = 9,
NAL_FILLER = 12,
/* ref_idc == 0 for 6,9,10,11,12 */
};
enum nal_priority_e
{
NAL_PRIORITY_DISPOSABLE = 0,
NAL_PRIORITY_LOW = 1,
NAL_PRIORITY_HIGH = 2,
NAL_PRIORITY_HIGHEST = 3,
};
/* The data within the payload is already NAL-encapsulated; the ref_idc and type
* are merely in the struct for easy access by the calling application.
* All data returned in an x264_nal_t, including the data in p_payload, is no longer
* valid after the next call to x264_encoder_encode. Thus it must be used or copied
* before calling x264_encoder_encode or x264_encoder_headers again. */
typedef struct x264_nal_t
{
int i_ref_idc; /* nal_priority_e */
int i_type; /* nal_unit_type_e */
int b_long_startcode;
int i_first_mb; /* If this NAL is a slice, the index of the first MB in the slice. */
int i_last_mb; /* If this NAL is a slice, the index of the last MB in the slice. */
/* Size of payload (including any padding) in bytes. */
int i_payload;
/* If param->b_annexb is set, Annex-B bytestream with startcode.
* Otherwise, startcode is replaced with a 4-byte size.
* This size is the size used in mp4/similar muxing; it is equal to i_payload-4 */
uint8_t *p_payload;
/* Size of padding in bytes. */
int i_padding;
} x264_nal_t;
/****************************************************************************
* Encoder parameters
****************************************************************************/
/* CPU flags */
/* x86 */
#define X264_CPU_CMOV 0x0000001
#define X264_CPU_MMX 0x0000002
#define X264_CPU_MMX2 0x0000004 /* MMX2 aka MMXEXT aka ISSE */
#define X264_CPU_MMXEXT X264_CPU_MMX2
#define X264_CPU_SSE 0x0000008
#define X264_CPU_SSE2 0x0000010
#define X264_CPU_SSE3 0x0000020
#define X264_CPU_SSSE3 0x0000040
#define X264_CPU_SSE4 0x0000080 /* SSE4.1 */
#define X264_CPU_SSE42 0x0000100 /* SSE4.2 */
#define X264_CPU_LZCNT 0x0000200 /* Phenom support for "leading zero count" instruction. */
#define X264_CPU_AVX 0x0000400 /* AVX support: requires OS support even if YMM registers aren't used. */
#define X264_CPU_XOP 0x0000800 /* AMD XOP */
#define X264_CPU_FMA4 0x0001000 /* AMD FMA4 */
#define X264_CPU_FMA3 0x0002000 /* FMA3 */
#define X264_CPU_AVX2 0x0004000 /* AVX2 */
#define X264_CPU_BMI1 0x0008000 /* BMI1 */
#define X264_CPU_BMI2 0x0010000 /* BMI2 */
/* x86 modifiers */
#define X264_CPU_CACHELINE_32 0x0020000 /* avoid memory loads that span the border between two cachelines */
#define X264_CPU_CACHELINE_64 0x0040000 /* 32/64 is the size of a cacheline in bytes */
#define X264_CPU_SSE2_IS_SLOW 0x0080000 /* avoid most SSE2 functions on Athlon64 */
#define X264_CPU_SSE2_IS_FAST 0x0100000 /* a few functions are only faster on Core2 and Phenom */
#define X264_CPU_SLOW_SHUFFLE 0x0200000 /* The Conroe has a slow shuffle unit (relative to overall SSE performance) */
#define X264_CPU_STACK_MOD4 0x0400000 /* if stack is only mod4 and not mod16 */
#define X264_CPU_SLOW_CTZ 0x0800000 /* BSR/BSF x86 instructions are really slow on some CPUs */
#define X264_CPU_SLOW_ATOM 0x1000000 /* The Atom is terrible: slow SSE unaligned loads, slow
* SIMD multiplies, slow SIMD variable shifts, slow pshufb,
* cacheline split penalties -- gather everything here that
* isn't shared by other CPUs to avoid making half a dozen
* new SLOW flags. */
#define X264_CPU_SLOW_PSHUFB 0x2000000 /* such as on the Intel Atom */
#define X264_CPU_SLOW_PALIGNR 0x4000000 /* such as on the AMD Bobcat */
/* PowerPC */
#define X264_CPU_ALTIVEC 0x0000001
/* ARM and AArch64 */
#define X264_CPU_ARMV6 0x0000001
#define X264_CPU_NEON 0x0000002 /* ARM NEON */
#define X264_CPU_FAST_NEON_MRC 0x0000004 /* Transfer from NEON to ARM register is fast (Cortex-A9) */
#define X264_CPU_ARMV8 0x0000008
/* MIPS */
#define X264_CPU_MSA 0x0000001 /* MIPS MSA */
/* Analyse flags */
#define X264_ANALYSE_I4x4 0x0001 /* Analyse i4x4 */
#define X264_ANALYSE_I8x8 0x0002 /* Analyse i8x8 (requires 8x8 transform) */
#define X264_ANALYSE_PSUB16x16 0x0010 /* Analyse p16x8, p8x16 and p8x8 */
#define X264_ANALYSE_PSUB8x8 0x0020 /* Analyse p8x4, p4x8, p4x4 */
#define X264_ANALYSE_BSUB16x16 0x0100 /* Analyse b16x8, b8x16 and b8x8 */
#define X264_DIRECT_PRED_NONE 0
#define X264_DIRECT_PRED_SPATIAL 1
#define X264_DIRECT_PRED_TEMPORAL 2
#define X264_DIRECT_PRED_AUTO 3
#define X264_ME_DIA 0
#define X264_ME_HEX 1
#define X264_ME_UMH 2
#define X264_ME_ESA 3
#define X264_ME_TESA 4
#define X264_CQM_FLAT 0
#define X264_CQM_JVT 1
#define X264_CQM_CUSTOM 2
#define X264_RC_CQP 0
#define X264_RC_CRF 1
#define X264_RC_ABR 2
#define X264_QP_AUTO 0
#define X264_AQ_NONE 0
#define X264_AQ_VARIANCE 1
#define X264_AQ_AUTOVARIANCE 2
#define X264_AQ_AUTOVARIANCE_BIASED 3
#define X264_B_ADAPT_NONE 0
#define X264_B_ADAPT_FAST 1
#define X264_B_ADAPT_TRELLIS 2
#define X264_WEIGHTP_NONE 0
#define X264_WEIGHTP_SIMPLE 1
#define X264_WEIGHTP_SMART 2
#define X264_B_PYRAMID_NONE 0
#define X264_B_PYRAMID_STRICT 1
#define X264_B_PYRAMID_NORMAL 2
#define X264_KEYINT_MIN_AUTO 0
#define X264_KEYINT_MAX_INFINITE (1<<30)
static const char * const x264_direct_pred_names[] = { "none", "spatial", "temporal", "auto", 0 };
static const char * const x264_motion_est_names[] = { "dia", "hex", "umh", "esa", "tesa", 0 };
static const char * const x264_b_pyramid_names[] = { "none", "strict", "normal", 0 };
static const char * const x264_overscan_names[] = { "undef", "show", "crop", 0 };
static const char * const x264_vidformat_names[] = { "component", "pal", "ntsc", "secam", "mac", "undef", 0 };
static const char * const x264_fullrange_names[] = { "off", "on", 0 };
static const char * const x264_colorprim_names[] = { "", "bt709", "undef", "", "bt470m", "bt470bg", "smpte170m", "smpte240m", "film", "bt2020", "smpte428",
"smpte431", "smpte432", 0 };
static const char * const x264_transfer_names[] = { "", "bt709", "undef", "", "bt470m", "bt470bg", "smpte170m", "smpte240m", "linear", "log100", "log316",
"iec61966-2-4", "bt1361e", "iec61966-2-1", "bt2020-10", "bt2020-12", "smpte2084", "smpte428", 0 };
static const char * const x264_colmatrix_names[] = { "GBR", "bt709", "undef", "", "fcc", "bt470bg", "smpte170m", "smpte240m", "YCgCo", "bt2020nc", "bt2020c",
"smpte2085", 0 };
static const char * const x264_nal_hrd_names[] = { "none", "vbr", "cbr", 0 };
/* Colorspace type */
#define X264_CSP_MASK 0x00ff /* */
#define X264_CSP_NONE 0x0000 /* Invalid mode */
#define X264_CSP_I420 0x0001 /* yuv 4:2:0 planar */
#define X264_CSP_YV12 0x0002 /* yvu 4:2:0 planar */
#define X264_CSP_NV12 0x0003 /* yuv 4:2:0, with one y plane and one packed u+v */
#define X264_CSP_NV21 0x0004 /* yuv 4:2:0, with one y plane and one packed v+u */
#define X264_CSP_I422 0x0005 /* yuv 4:2:2 planar */
#define X264_CSP_YV16 0x0006 /* yvu 4:2:2 planar */
#define X264_CSP_NV16 0x0007 /* yuv 4:2:2, with one y plane and one packed u+v */
#define X264_CSP_V210 0x0008 /* 10-bit yuv 4:2:2 packed in 32 */
#define X264_CSP_I444 0x0009 /* yuv 4:4:4 planar */
#define X264_CSP_YV24 0x000a /* yvu 4:4:4 planar */
#define X264_CSP_BGR 0x000b /* packed bgr 24bits */
#define X264_CSP_BGRA 0x000c /* packed bgr 32bits */
#define X264_CSP_RGB 0x000d /* packed rgb 24bits */
#define X264_CSP_MAX 0x000e /* end of list */
#define X264_CSP_VFLIP 0x1000 /* the csp is vertically flipped */
#define X264_CSP_HIGH_DEPTH 0x2000 /* the csp has a depth of 16 bits per pixel component */
/* Slice type */
#define X264_TYPE_AUTO 0x0000 /* Let x264 choose the right type */
#define X264_TYPE_IDR 0x0001
#define X264_TYPE_I 0x0002
#define X264_TYPE_P 0x0003
#define X264_TYPE_BREF 0x0004 /* Non-disposable B-frame */
#define X264_TYPE_B 0x0005
#define X264_TYPE_KEYFRAME 0x0006 /* IDR or I depending on b_open_gop option */
#define IS_X264_TYPE_I(x) ((x)==X264_TYPE_I || (x)==X264_TYPE_IDR || (x)==X264_TYPE_KEYFRAME)
#define IS_X264_TYPE_B(x) ((x)==X264_TYPE_B || (x)==X264_TYPE_BREF)
/* Log level */
#define X264_LOG_NONE (-1)
#define X264_LOG_ERROR 0
#define X264_LOG_WARNING 1
#define X264_LOG_INFO 2
#define X264_LOG_DEBUG 3
/* Threading */
#define X264_THREADS_AUTO 0 /* Automatically select optimal number of threads */
#define X264_SYNC_LOOKAHEAD_AUTO (-1) /* Automatically select optimal lookahead thread buffer size */
/* HRD */
#define X264_NAL_HRD_NONE 0
#define X264_NAL_HRD_VBR 1
#define X264_NAL_HRD_CBR 2
/* Zones: override ratecontrol or other options for specific sections of the video.
* See x264_encoder_reconfig() for which options can be changed.
* If zones overlap, whichever comes later in the list takes precedence. */
typedef struct x264_zone_t
{
int i_start, i_end; /* range of frame numbers */
int b_force_qp; /* whether to use qp vs bitrate factor */
int i_qp;
float f_bitrate_factor;
struct x264_param_t *param;
} x264_zone_t;
typedef struct x264_param_t
{
/* CPU flags */
unsigned int cpu;
int i_threads; /* encode multiple frames in parallel */
int i_lookahead_threads; /* multiple threads for lookahead analysis */
int b_sliced_threads; /* Whether to use slice-based threading. */
int b_deterministic; /* whether to allow non-deterministic optimizations when threaded */
int b_cpu_independent; /* force canonical behavior rather than cpu-dependent optimal algorithms */
int i_sync_lookahead; /* threaded lookahead buffer */
/* Video Properties */
int i_width;
int i_height;
int i_csp; /* CSP of encoded bitstream */
int i_level_idc;
int i_frame_total; /* number of frames to encode if known, else 0 */
/* NAL HRD
* Uses Buffering and Picture Timing SEIs to signal HRD
* The HRD in H.264 was not designed with VFR in mind.
* It is therefore not recommendeded to use NAL HRD with VFR.
* Furthermore, reconfiguring the VBV (via x264_encoder_reconfig)
* will currently generate invalid HRD. */
int i_nal_hrd;
struct
{
/* they will be reduced to be 0 < x <= 65535 and prime */
int i_sar_height;
int i_sar_width;
int i_overscan; /* 0=undef, 1=no overscan, 2=overscan */
/* see h264 annex E for the values of the following */
int i_vidformat;
int b_fullrange;
int i_colorprim;
int i_transfer;
int i_colmatrix;
int i_chroma_loc; /* both top & bottom */
} vui;
/* Bitstream parameters */
int i_frame_reference; /* Maximum number of reference frames */
int i_dpb_size; /* Force a DPB size larger than that implied by B-frames and reference frames.
* Useful in combination with interactive error resilience. */
int i_keyint_max; /* Force an IDR keyframe at this interval */
int i_keyint_min; /* Scenecuts closer together than this are coded as I, not IDR. */
int i_scenecut_threshold; /* how aggressively to insert extra I frames */
int b_intra_refresh; /* Whether or not to use periodic intra refresh instead of IDR frames. */
int i_bframe; /* how many b-frame between 2 references pictures */
int i_bframe_adaptive;
int i_bframe_bias;
int i_bframe_pyramid; /* Keep some B-frames as references: 0=off, 1=strict hierarchical, 2=normal */
int b_open_gop;
int b_bluray_compat;
int i_avcintra_class;
int b_deblocking_filter;
int i_deblocking_filter_alphac0; /* [-6, 6] -6 light filter, 6 strong */
int i_deblocking_filter_beta; /* [-6, 6] idem */
int b_cabac;
int i_cabac_init_idc;
int b_interlaced;
int b_constrained_intra;
int i_cqm_preset;
char *psz_cqm_file; /* filename (in UTF-8) of CQM file, JM format */
uint8_t cqm_4iy[16]; /* used only if i_cqm_preset == X264_CQM_CUSTOM */
uint8_t cqm_4py[16];
uint8_t cqm_4ic[16];
uint8_t cqm_4pc[16];
uint8_t cqm_8iy[64];
uint8_t cqm_8py[64];
uint8_t cqm_8ic[64];
uint8_t cqm_8pc[64];
/* Log */
void (*pf_log)( void *, int i_level, const char *psz, va_list );
void *p_log_private;
int i_log_level;
int b_full_recon; /* fully reconstruct frames, even when not necessary for encoding. Implied by psz_dump_yuv */
char *psz_dump_yuv; /* filename (in UTF-8) for reconstructed frames */
/* Encoder analyser parameters */
struct
{
unsigned int intra; /* intra partitions */
unsigned int inter; /* inter partitions */
int b_transform_8x8;
int i_weighted_pred; /* weighting for P-frames */
int b_weighted_bipred; /* implicit weighting for B-frames */
int i_direct_mv_pred; /* spatial vs temporal mv prediction */
int i_chroma_qp_offset;
int i_me_method; /* motion estimation algorithm to use (X264_ME_*) */
int i_me_range; /* integer pixel motion estimation search range (from predicted mv) */
int i_mv_range; /* maximum length of a mv (in pixels). -1 = auto, based on level */
int i_mv_range_thread; /* minimum space between threads. -1 = auto, based on number of threads. */
int i_subpel_refine; /* subpixel motion estimation quality */
int b_chroma_me; /* chroma ME for subpel and mode decision in P-frames */
int b_mixed_references; /* allow each mb partition to have its own reference number */
int i_trellis; /* trellis RD quantization */
int b_fast_pskip; /* early SKIP detection on P-frames */
int b_dct_decimate; /* transform coefficient thresholding on P-frames */
int i_noise_reduction; /* adaptive pseudo-deadzone */
float f_psy_rd; /* Psy RD strength */
float f_psy_trellis; /* Psy trellis strength */
int b_psy; /* Toggle all psy optimizations */
int b_mb_info; /* Use input mb_info data in x264_picture_t */
int b_mb_info_update; /* Update the values in mb_info according to the results of encoding. */
/* the deadzone size that will be used in luma quantization */
int i_luma_deadzone[2]; /* {inter, intra} */
int b_psnr; /* compute and print PSNR stats */
int b_ssim; /* compute and print SSIM stats */
} analyse;
/* Rate control parameters */
struct
{
int i_rc_method; /* X264_RC_* */
int i_qp_constant; /* 0 to (51 + 6*(x264_bit_depth-8)). 0=lossless */
int i_qp_min; /* min allowed QP value */
int i_qp_max; /* max allowed QP value */
int i_qp_step; /* max QP step between frames */
int i_bitrate;
float f_rf_constant; /* 1pass VBR, nominal QP */
float f_rf_constant_max; /* In CRF mode, maximum CRF as caused by VBV */
float f_rate_tolerance;
int i_vbv_max_bitrate;
int i_vbv_buffer_size;
float f_vbv_buffer_init; /* <=1: fraction of buffer_size. >1: kbit */
float f_ip_factor;
float f_pb_factor;
/* VBV filler: force CBR VBV and use filler bytes to ensure hard-CBR.
* Implied by NAL-HRD CBR. */
int b_filler;
int i_aq_mode; /* psy adaptive QP. (X264_AQ_*) */
float f_aq_strength;
int b_mb_tree; /* Macroblock-tree ratecontrol. */
int i_lookahead;
/* 2pass */
int b_stat_write; /* Enable stat writing in psz_stat_out */
char *psz_stat_out; /* output filename (in UTF-8) of the 2pass stats file */
int b_stat_read; /* Read stat from psz_stat_in and use it */
char *psz_stat_in; /* input filename (in UTF-8) of the 2pass stats file */
/* 2pass params (same as ffmpeg ones) */
float f_qcompress; /* 0.0 => cbr, 1.0 => constant qp */
float f_qblur; /* temporally blur quants */
float f_complexity_blur; /* temporally blur complexity */
x264_zone_t *zones; /* ratecontrol overrides */
int i_zones; /* number of zone_t's */
char *psz_zones; /* alternate method of specifying zones */
} rc;
/* Cropping Rectangle parameters: added to those implicitly defined by
non-mod16 video resolutions. */
struct
{
unsigned int i_left;
unsigned int i_top;
unsigned int i_right;
unsigned int i_bottom;
} crop_rect;
/* frame packing arrangement flag */
int i_frame_packing;
/* Muxing parameters */
int b_aud; /* generate access unit delimiters */
int b_repeat_headers; /* put SPS/PPS before each keyframe */
int b_annexb; /* if set, place start codes (4 bytes) before NAL units,
* otherwise place size (4 bytes) before NAL units. */
int i_sps_id; /* SPS and PPS id number */
int b_vfr_input; /* VFR input. If 1, use timebase and timestamps for ratecontrol purposes.
* If 0, use fps only. */
int b_pulldown; /* use explicity set timebase for CFR */
uint32_t i_fps_num;
uint32_t i_fps_den;
uint32_t i_timebase_num; /* Timebase numerator */
uint32_t i_timebase_den; /* Timebase denominator */
int b_tff;
/* Pulldown:
* The correct pic_struct must be passed with each input frame.
* The input timebase should be the timebase corresponding to the output framerate. This should be constant.
* e.g. for 3:2 pulldown timebase should be 1001/30000
* The PTS passed with each frame must be the PTS of the frame after pulldown is applied.
* Frame doubling and tripling require b_vfr_input set to zero (see H.264 Table D-1)
*
* Pulldown changes are not clearly defined in H.264. Therefore, it is the calling app's responsibility to manage this.
*/
int b_pic_struct;
/* Fake Interlaced.
*
* Used only when b_interlaced=0. Setting this flag makes it possible to flag the stream as PAFF interlaced yet
* encode all frames progessively. It is useful for encoding 25p and 30p Blu-Ray streams.
*/
int b_fake_interlaced;
/* Don't optimize header parameters based on video content, e.g. ensure that splitting an input video, compressing
* each part, and stitching them back together will result in identical SPS/PPS. This is necessary for stitching
* with container formats that don't allow multiple SPS/PPS. */
int b_stitchable;
int b_opencl; /* use OpenCL when available */
int i_opencl_device; /* specify count of GPU devices to skip, for CLI users */
void *opencl_device_id; /* pass explicit cl_device_id as void*, for API users */
char *psz_clbin_file; /* filename (in UTF-8) of the compiled OpenCL kernel cache file */
/* Slicing parameters */
int i_slice_max_size; /* Max size per slice in bytes; includes estimated NAL overhead. */
int i_slice_max_mbs; /* Max number of MBs per slice; overrides i_slice_count. */
int i_slice_min_mbs; /* Min number of MBs per slice */
int i_slice_count; /* Number of slices per frame: forces rectangular slices. */
int i_slice_count_max; /* Absolute cap on slices per frame; stops applying slice-max-size
* and slice-max-mbs if this is reached. */
/* Optional callback for freeing this x264_param_t when it is done being used.
* Only used when the x264_param_t sits in memory for an indefinite period of time,
* i.e. when an x264_param_t is passed to x264_t in an x264_picture_t or in zones.
* Not used when x264_encoder_reconfig is called directly. */
void (*param_free)( void* );
/* Optional low-level callback for low-latency encoding. Called for each output NAL unit
* immediately after the NAL unit is finished encoding. This allows the calling application
* to begin processing video data (e.g. by sending packets over a network) before the frame
* is done encoding.
*
* This callback MUST do the following in order to work correctly:
* 1) Have available an output buffer of at least size nal->i_payload*3/2 + 5 + 64.
* 2) Call x264_nal_encode( h, dst, nal ), where dst is the output buffer.
* After these steps, the content of nal is valid and can be used in the same way as if
* the NAL unit were output by x264_encoder_encode.
*
* This does not need to be synchronous with the encoding process: the data pointed to
* by nal (both before and after x264_nal_encode) will remain valid until the next
* x264_encoder_encode call. The callback must be re-entrant.
*
* This callback does not work with frame-based threads; threads must be disabled
* or sliced-threads enabled. This callback also does not work as one would expect
* with HRD -- since the buffering period SEI cannot be calculated until the frame
* is finished encoding, it will not be sent via this callback.
*
* Note also that the NALs are not necessarily returned in order when sliced threads is
* enabled. Accordingly, the variable i_first_mb and i_last_mb are available in
* x264_nal_t to help the calling application reorder the slices if necessary.
*
* When this callback is enabled, x264_encoder_encode does not return valid NALs;
* the calling application is expected to acquire all output NALs through the callback.
*
* It is generally sensible to combine this callback with a use of slice-max-mbs or
* slice-max-size.
*
* The opaque pointer is the opaque pointer from the input frame associated with this
* NAL unit. This helps distinguish between nalu_process calls from different sources,
* e.g. if doing multiple encodes in one process.
*/
void (*nalu_process)( x264_t *h, x264_nal_t *nal, void *opaque );
} x264_param_t;
void x264_nal_encode( x264_t *h, uint8_t *dst, x264_nal_t *nal );
/****************************************************************************
* H.264 level restriction information
****************************************************************************/
typedef struct x264_level_t
{
int level_idc;
int mbps; /* max macroblock processing rate (macroblocks/sec) */
int frame_size; /* max frame size (macroblocks) */
int dpb; /* max decoded picture buffer (mbs) */
int bitrate; /* max bitrate (kbit/sec) */
int cpb; /* max vbv buffer (kbit) */
int mv_range; /* max vertical mv component range (pixels) */
int mvs_per_2mb; /* max mvs per 2 consecutive mbs. */
int slice_rate; /* ?? */
int mincr; /* min compression ratio */
int bipred8x8; /* limit bipred to >=8x8 */
int direct8x8; /* limit b_direct to >=8x8 */
int frame_only; /* forbid interlacing */
} x264_level_t;
/* all of the levels defined in the standard, terminated by .level_idc=0 */
X264_API extern const x264_level_t x264_levels[];
/****************************************************************************
* Basic parameter handling functions
****************************************************************************/
/* x264_param_default:
* fill x264_param_t with default values and do CPU detection */
void x264_param_default( x264_param_t * );
/* x264_param_parse:
* set one parameter by name.
* returns 0 on success, or returns one of the following errors.
* note: BAD_VALUE occurs only if it can't even parse the value,
* numerical range is not checked until x264_encoder_open() or
* x264_encoder_reconfig().
* value=NULL means "true" for boolean options, but is a BAD_VALUE for non-booleans. */
#define X264_PARAM_BAD_NAME (-1)
#define X264_PARAM_BAD_VALUE (-2)
int x264_param_parse( x264_param_t *, const char *name, const char *value );
/****************************************************************************
* Advanced parameter handling functions
****************************************************************************/
/* These functions expose the full power of x264's preset-tune-profile system for
* easy adjustment of large numbers of internal parameters.
*
* In order to replicate x264CLI's option handling, these functions MUST be called
* in the following order:
* 1) x264_param_default_preset
* 2) Custom user options (via param_parse or directly assigned variables)
* 3) x264_param_apply_fastfirstpass
* 4) x264_param_apply_profile
*
* Additionally, x264CLI does not apply step 3 if the preset chosen is "placebo"
* or --slow-firstpass is set. */
/* x264_param_default_preset:
* The same as x264_param_default, but also use the passed preset and tune
* to modify the default settings.
* (either can be NULL, which implies no preset or no tune, respectively)
*
* Currently available presets are, ordered from fastest to slowest: */
static const char * const x264_preset_names[] = { "ultrafast", "superfast", "veryfast", "faster", "fast", "medium", "slow", "slower", "veryslow", "placebo", 0 };
/* The presets can also be indexed numerically, as in:
* x264_param_default_preset( ¶m, "3", ... )
* with ultrafast mapping to "0" and placebo mapping to "9". This mapping may
* of course change if new presets are added in between, but will always be
* ordered from fastest to slowest.
*
* Warning: the speed of these presets scales dramatically. Ultrafast is a full
* 100 times faster than placebo!
*
* Currently available tunings are: */
static const char * const x264_tune_names[] = { "film", "animation", "grain", "stillimage", "psnr", "ssim", "fastdecode", "zerolatency", 0 };
/* Multiple tunings can be used if separated by a delimiter in ",./-+",
* however multiple psy tunings cannot be used.
* film, animation, grain, stillimage, psnr, and ssim are psy tunings.
*
* returns 0 on success, negative on failure (e.g. invalid preset/tune name). */
int x264_param_default_preset( x264_param_t *, const char *preset, const char *tune );
/* x264_param_apply_fastfirstpass:
* If first-pass mode is set (rc.b_stat_read == 0, rc.b_stat_write == 1),
* modify the encoder settings to disable options generally not useful on
* the first pass. */
void x264_param_apply_fastfirstpass( x264_param_t * );
/* x264_param_apply_profile:
* Applies the restrictions of the given profile.
* Currently available profiles are, from most to least restrictive: */
static const char * const x264_profile_names[] = { "baseline", "main", "high", "high10", "high422", "high444", 0 };
/* (can be NULL, in which case the function will do nothing)
*
* Does NOT guarantee that the given profile will be used: if the restrictions
* of "High" are applied to settings that are already Baseline-compatible, the
* stream will remain baseline. In short, it does not increase settings, only
* decrease them.
*
* returns 0 on success, negative on failure (e.g. invalid profile name). */
int x264_param_apply_profile( x264_param_t *, const char *profile );
/****************************************************************************
* Picture structures and functions
****************************************************************************/
/* x264_bit_depth:
* Specifies the number of bits per pixel that x264 uses. This is also the
* bit depth that x264 encodes in. If this value is > 8, x264 will read
* two bytes of input data for each pixel sample, and expect the upper
* (16-x264_bit_depth) bits to be zero.
* Note: The flag X264_CSP_HIGH_DEPTH must be used to specify the
* colorspace depth as well. */
X264_API extern const int x264_bit_depth;
/* x264_chroma_format:
* Specifies the chroma formats that x264 supports encoding. When this
* value is non-zero, then it represents a X264_CSP_* that is the only
* chroma format that x264 supports encoding. If the value is 0 then
* there are no restrictions. */
X264_API extern const int x264_chroma_format;
enum pic_struct_e
{
PIC_STRUCT_AUTO = 0, // automatically decide (default)
PIC_STRUCT_PROGRESSIVE = 1, // progressive frame
// "TOP" and "BOTTOM" are not supported in x264 (PAFF only)
PIC_STRUCT_TOP_BOTTOM = 4, // top field followed by bottom
PIC_STRUCT_BOTTOM_TOP = 5, // bottom field followed by top
PIC_STRUCT_TOP_BOTTOM_TOP = 6, // top field, bottom field, top field repeated
PIC_STRUCT_BOTTOM_TOP_BOTTOM = 7, // bottom field, top field, bottom field repeated
PIC_STRUCT_DOUBLE = 8, // double frame
PIC_STRUCT_TRIPLE = 9, // triple frame
};
typedef struct x264_hrd_t
{
double cpb_initial_arrival_time;
double cpb_final_arrival_time;
double cpb_removal_time;
double dpb_output_time;
} x264_hrd_t;
/* Arbitrary user SEI:
* Payload size is in bytes and the payload pointer must be valid.
* Payload types and syntax can be found in Annex D of the H.264 Specification.
* SEI payload alignment bits as described in Annex D must be included at the
* end of the payload if needed.
* The payload should not be NAL-encapsulated.
* Payloads are written first in order of input, apart from in the case when HRD
* is enabled where payloads are written after the Buffering Period SEI. */
typedef struct x264_sei_payload_t
{
int payload_size;
int payload_type;
uint8_t *payload;
} x264_sei_payload_t;
typedef struct x264_sei_t
{
int num_payloads;
x264_sei_payload_t *payloads;
/* In: optional callback to free each payload AND x264_sei_payload_t when used. */
void (*sei_free)( void* );
} x264_sei_t;
typedef struct x264_image_t
{
int i_csp; /* Colorspace */
int i_plane; /* Number of image planes */
int i_stride[4]; /* Strides for each plane */
uint8_t *plane[4]; /* Pointers to each plane */
} x264_image_t;
typedef struct x264_image_properties_t
{
/* All arrays of data here are ordered as follows:
* each array contains one offset per macroblock, in raster scan order. In interlaced
* mode, top-field MBs and bottom-field MBs are interleaved at the row level.
* Macroblocks are 16x16 blocks of pixels (with respect to the luma plane). For the
* purposes of calculating the number of macroblocks, width and height are rounded up to
* the nearest 16. If in interlaced mode, height is rounded up to the nearest 32 instead. */
/* In: an array of quantizer offsets to be applied to this image during encoding.
* These are added on top of the decisions made by x264.
* Offsets can be fractional; they are added before QPs are rounded to integer.
* Adaptive quantization must be enabled to use this feature. Behavior if quant
* offsets differ between encoding passes is undefined. */
float *quant_offsets;
/* In: optional callback to free quant_offsets when used.
* Useful if one wants to use a different quant_offset array for each frame. */
void (*quant_offsets_free)( void* );
/* In: optional array of flags for each macroblock.
* Allows specifying additional information for the encoder such as which macroblocks
* remain unchanged. Usable flags are listed below.
* x264_param_t.analyse.b_mb_info must be set to use this, since x264 needs to track
* extra data internally to make full use of this information.
*
* Out: if b_mb_info_update is set, x264 will update this array as a result of encoding.
*
* For "MBINFO_CONSTANT", it will remove this flag on any macroblock whose decoded
* pixels have changed. This can be useful for e.g. noting which areas of the
* frame need to actually be blitted. Note: this intentionally ignores the effects
* of deblocking for the current frame, which should be fine unless one needs exact
* pixel-perfect accuracy.
*
* Results for MBINFO_CONSTANT are currently only set for P-frames, and are not
* guaranteed to enumerate all blocks which haven't changed. (There may be false
* negatives, but no false positives.)
*/
uint8_t *mb_info;
/* In: optional callback to free mb_info when used. */
void (*mb_info_free)( void* );
/* The macroblock is constant and remains unchanged from the previous frame. */
#define X264_MBINFO_CONSTANT (1<<0)
/* More flags may be added in the future. */
/* Out: SSIM of the the frame luma (if x264_param_t.b_ssim is set) */
double f_ssim;
/* Out: Average PSNR of the frame (if x264_param_t.b_psnr is set) */
double f_psnr_avg;
/* Out: PSNR of Y, U, and V (if x264_param_t.b_psnr is set) */
double f_psnr[3];
/* Out: Average effective CRF of the encoded frame */
double f_crf_avg;
} x264_image_properties_t;
typedef struct x264_picture_t
{
/* In: force picture type (if not auto)
* If x264 encoding parameters are violated in the forcing of picture types,
* x264 will correct the input picture type and log a warning.
* Out: type of the picture encoded */
int i_type;
/* In: force quantizer for != X264_QP_AUTO */
int i_qpplus1;
/* In: pic_struct, for pulldown/doubling/etc...used only if b_pic_struct=1.
* use pic_struct_e for pic_struct inputs
* Out: pic_struct element associated with frame */
int i_pic_struct;
/* Out: whether this frame is a keyframe. Important when using modes that result in
* SEI recovery points being used instead of IDR frames. */
int b_keyframe;
/* In: user pts, Out: pts of encoded picture (user)*/
int64_t i_pts;
/* Out: frame dts. When the pts of the first frame is close to zero,
* initial frames may have a negative dts which must be dealt with by any muxer */
int64_t i_dts;
/* In: custom encoding parameters to be set from this frame forwards
(in coded order, not display order). If NULL, continue using
parameters from the previous frame. Some parameters, such as
aspect ratio, can only be changed per-GOP due to the limitations
of H.264 itself; in this case, the caller must force an IDR frame
if it needs the changed parameter to apply immediately. */
x264_param_t *param;
/* In: raw image data */
/* Out: reconstructed image data. x264 may skip part of the reconstruction process,
e.g. deblocking, in frames where it isn't necessary. To force complete
reconstruction, at a small speed cost, set b_full_recon. */
x264_image_t img;
/* In: optional information to modify encoder decisions for this frame
* Out: information about the encoded frame */
x264_image_properties_t prop;
/* Out: HRD timing information. Output only when i_nal_hrd is set. */
x264_hrd_t hrd_timing;
/* In: arbitrary user SEI (e.g subtitles, AFDs) */
x264_sei_t extra_sei;
/* private user data. copied from input to output frames. */
void *opaque;
} x264_picture_t;
/* x264_picture_init:
* initialize an x264_picture_t. Needs to be done if the calling application
* allocates its own x264_picture_t as opposed to using x264_picture_alloc. */
void x264_picture_init( x264_picture_t *pic );
/* x264_picture_alloc:
* alloc data for a picture. You must call x264_picture_clean on it.
* returns 0 on success, or -1 on malloc failure or invalid colorspace. */
int x264_picture_alloc( x264_picture_t *pic, int i_csp, int i_width, int i_height );
/* x264_picture_clean:
* free associated resource for a x264_picture_t allocated with
* x264_picture_alloc ONLY */
void x264_picture_clean( x264_picture_t *pic );
/****************************************************************************
* Encoder functions
****************************************************************************/
/* Force a link error in the case of linking against an incompatible API version.
* Glue #defines exist to force correct macro expansion; the final output of the macro
* is x264_encoder_open_##X264_BUILD (for purposes of dlopen). */
#define x264_encoder_glue1(x,y) x##y
#define x264_encoder_glue2(x,y) x264_encoder_glue1(x,y)
#define x264_encoder_open x264_encoder_glue2(x264_encoder_open_,X264_BUILD)
/* x264_encoder_open:
* create a new encoder handler, all parameters from x264_param_t are copied */
x264_t *x264_encoder_open( x264_param_t * );
/* x264_encoder_reconfig:
* various parameters from x264_param_t are copied.
* this takes effect immediately, on whichever frame is encoded next;
* due to delay, this may not be the next frame passed to encoder_encode.
* if the change should apply to some particular frame, use x264_picture_t->param instead.
* returns 0 on success, negative on parameter validation error.
* not all parameters can be changed; see the actual function for a detailed breakdown.
*
* since not all parameters can be changed, moving from preset to preset may not always
* fully copy all relevant parameters, but should still work usably in practice. however,
* more so than for other presets, many of the speed shortcuts used in ultrafast cannot be
* switched out of; using reconfig to switch between ultrafast and other presets is not
* recommended without a more fine-grained breakdown of parameters to take this into account. */
int x264_encoder_reconfig( x264_t *, x264_param_t * );
/* x264_encoder_parameters:
* copies the current internal set of parameters to the pointer provided
* by the caller. useful when the calling application needs to know
* how x264_encoder_open has changed the parameters, or the current state
* of the encoder after multiple x264_encoder_reconfig calls.
* note that the data accessible through pointers in the returned param struct
* (e.g. filenames) should not be modified by the calling application. */
void x264_encoder_parameters( x264_t *, x264_param_t * );
/* x264_encoder_headers:
* return the SPS and PPS that will be used for the whole stream.
* *pi_nal is the number of NAL units outputted in pp_nal.
* returns the number of bytes in the returned NALs.
* returns negative on error.
* the payloads of all output NALs are guaranteed to be sequential in memory. */
int x264_encoder_headers( x264_t *, x264_nal_t **pp_nal, int *pi_nal );
/* x264_encoder_encode:
* encode one picture.
* *pi_nal is the number of NAL units outputted in pp_nal.
* returns the number of bytes in the returned NALs.
* returns negative on error and zero if no NAL units returned.
* the payloads of all output NALs are guaranteed to be sequential in memory. */
int x264_encoder_encode( x264_t *, x264_nal_t **pp_nal, int *pi_nal, x264_picture_t *pic_in, x264_picture_t *pic_out );
/* x264_encoder_close:
* close an encoder handler */
void x264_encoder_close( x264_t * );
/* x264_encoder_delayed_frames:
* return the number of currently delayed (buffered) frames
* this should be used at the end of the stream, to know when you have all the encoded frames. */
int x264_encoder_delayed_frames( x264_t * );
/* x264_encoder_maximum_delayed_frames( x264_t *h ):
* return the maximum number of delayed (buffered) frames that can occur with the current
* parameters. */
int x264_encoder_maximum_delayed_frames( x264_t *h );
/* x264_encoder_intra_refresh:
* If an intra refresh is not in progress, begin one with the next P-frame.
* If an intra refresh is in progress, begin one as soon as the current one finishes.
* Requires that b_intra_refresh be set.
*
* Useful for interactive streaming where the client can tell the server that packet loss has
* occurred. In this case, keyint can be set to an extremely high value so that intra refreshes
* only occur when calling x264_encoder_intra_refresh.
*
* In multi-pass encoding, if x264_encoder_intra_refresh is called differently in each pass,
* behavior is undefined.
*
* Should not be called during an x264_encoder_encode. */
void x264_encoder_intra_refresh( x264_t * );
/* x264_encoder_invalidate_reference:
* An interactive error resilience tool, designed for use in a low-latency one-encoder-few-clients
* system. When the client has packet loss or otherwise incorrectly decodes a frame, the encoder
* can be told with this command to "forget" the frame and all frames that depend on it, referencing
* only frames that occurred before the loss. This will force a keyframe if no frames are left to
* reference after the aforementioned "forgetting".
*
* It is strongly recommended to use a large i_dpb_size in this case, which allows the encoder to
* keep around extra, older frames to fall back on in case more recent frames are all invalidated.
* Unlike increasing i_frame_reference, this does not increase the number of frames used for motion
* estimation and thus has no speed impact. It is also recommended to set a very large keyframe
* interval, so that keyframes are not used except as necessary for error recovery.
*
* x264_encoder_invalidate_reference is not currently compatible with the use of B-frames or intra
* refresh.
*
* In multi-pass encoding, if x264_encoder_invalidate_reference is called differently in each pass,
* behavior is undefined.
*
* Should not be called during an x264_encoder_encode, but multiple calls can be made simultaneously.
*
* Returns 0 on success, negative on failure. */
int x264_encoder_invalidate_reference( x264_t *, int64_t pts );
#ifdef __cplusplus
}
#endif
#endif
================================================
FILE: Android_demo/prebuilt/include/x264_config.h
================================================
#define X264_BIT_DEPTH 8
#define X264_GPL 1
#define X264_INTERLACED 1
#define X264_CHROMA_FORMAT 0
#define X264_REV 2744
#define X264_REV_DIFF 4
#define X264_VERSION " r2744+4M e192189"
#define X264_POINTVER "0.148.2744+4M e192189"
================================================
FILE: README.md
================================================
# *KSC265codec(v2.6.1.3)*
It should be noted that, the libraries used in our ios/andriod demos have expiration time.
## ENCODER
Usage: command line examples
```
AppEncoder_x64 -i p_3840x2160_50.yuv -preset ultrafast/veryfast/slow/veryslow -latency offline -wdt 3840 -hgt 2160 -fr 50 -rc 1 -br 20000 -iper 128 -b test.265
AppEncoder_x64 -i p_3840x2160_50.yuv -preset ultrafast/veryfast/slow/veryslow -latency offline -wdt 3840 -hgt 2160 -fr 50 -rc 0 -qp 27 -iper 128 -b test.265
AppEncoder_x64 -i p_3840x2160_50.yuv -preset ultrafast/veryfast/slow/veryslow -latency offline -wdt 3840 -hgt 2160 -fr 50 -rc 3 -crf 24 -iper 128 -b test.265
```
### Basic parameters:
-preset [preset_value],
which specifies the encoding speed by the character string [preset_value], among strings of "superfast", "veryfast", "fast", "medium", "slow", "veryslow" and "placebo".
-latency [latency_value],
which specifies the encoding latency by the character string [lactency_value], among strings of "zerolatency", "livestreaming", "offline". Note that, in the presets of ultrafast~veryfast, the latency under "livestreaming" and "offline" are the same.
-i [input_filename],
which specifies the address of the input YUV file in 4:2:0 sampling format by a character string [input_filename].
-wdt [width],
which specifies the image width of the input video by a positive integer value [width].
-hgt [height],
which specifies the image height of the input video by a positive integer value [height].
-fr [framerate],
which specifies the frame rate of the input video by a positive integer value [framerate].
-iper [intraperiod],
which specifies the maximum distances between consecutive I pictures by a positive integer value [intraperiod].
-rc [rctype],
which specifies the rate control type by the positive integer value [rctype] valuing among values 0(fixed qp), 1(cbr), 2(abr) and 3(crf). There are four cases:
* -br [bitrate] should be followed. If [rctype] equals to 1 or 2, a parameter -br [bitrate] should be followed and specifies the target encoding bit-rate by the positive value [bitrate] (kbps,kilo bit rate per second).
* -qp [qp_value] should be followed. If [rctype] equals to 0, a parameter -qp [qp_value] should be followed and specifies the target encoding quantization parameter by the positive value [qp_value] ranging from 0 to 51.
* -crf [crf_value] should be followed. If [rctype] equals to 3, a parameter -crf [crf_value] should be followed and specifies the target crf parameter by the positive value [crf_value] ranging from 0 to 51.
-b [stream_filename],
which specifies the address of the output stream file in HEVC/H.265 format by a character string [stream_filename]. Default: no stream is output.
### Optional parameters:
-v or -V [version],
which is utilized to print the version and copyright of the encoder.
-psnr [psnrcalc],
which specifies psnr calculation method by a non-negative value [psnrcalc], and
* 0 (as a default value) means disabling psnr calculation,
* 1 means enabling psnr calculation and outputing the overall psnr result.
* 2 means enabling psnr calculation and outputing psnr info for each frame.
-o [reconstructYUV],
which specifies the address of the reconstrcuted yuv file in 4:2:0 format by a character string [reconstructYUV]. Default: no reconstructed YUV file is output.
-frms [frame_no],
which specifies the number of frames to be encoded for the input video by a positive integer value [frame_no]. Default: [frame_no] = -1, when all input frames are encoded.
-threads [thread_no],
which specifies the number of threads used to encode the input video by a non-negative value [thread_no]. Default: [thread_no] = 0, when all available threads can be utilized.
-bframes[value1], -vbv-maxrate [value2] , -vbv-bufsize[value3],
which specifies similar meanings as similar values defined in x264
## DECODER
Usage: command line examples
```
AppDecoder_x64.exe -b test.265 -o test.yuv -threads 2
```
### Basic parameters:
-v or -V [version]
which specifies the decoder version and copyright.
-b [bitstream],
which specifies input bit-stream file by a character string [bitstream].
### Optional parameters:
-o [output],
which specifies the decoded yuv file name by a character string [output].
-threads [threadnum],
which specifies the number of threads used for decoding process by a non-negative value [threadnum]. Default: [threadnum] = 0, when all available threads can be utilized.
## Performance of decoder
KSC265 decoder is compared with openHEVC in ffmpeg on ARM64@Andriod, ARM64@iOS and x86 platforms.
| decoding speed
( ksc265inFFmpeg / openHEVCInFFmpeg) | iOS
(ipad mini2) | Andriod
(VIVOxplay5a) | PC
(E5-2690 v3 @ 2.60GHz) |
| ---------------------------------------- | ------------------- | ------------------------ | ----------------------------- |
| 1 thread | 2.90 | 2.85 | 2.11 |
| full threads | 2.69 | 2.99 | 3.89 |
On average, as above table shows, KSC265 decoder can achieve more than 2/2.5 times the speed of openHEVC in ffmpeg on x86/ARM, and details can be found in the excels for decoding performance. Moreover, as following table shows, the decoding speed of KSC265 now can well support the 1080p@25fps applications.
| decoding speed of ksc265inFFmpeg
(in frames per second) | iOS
(ipad mini2) | Andriod
(VIVOxplay5a) | PC
(E5-2690 v3 @ 2.60GHz) |
| ---------------------------------------- | ------------------- | ------------------------ | ----------------------------- |
| 1920x1080 @ 1thread | 32.06 | 32.94 | 177.19 |
| 1280x720 @ 1thread | 77.88 | 89.60 | 346.24 |
| 1920x1080 @ full threads | 51.13 | 90.44 | 939.25 |
| 1280x720 @ full threads | 120.20 | 187.16 | 1976.24 |
## Performance of encoder
KSC265 encoder is firstly compared with X265-v2.4, X264 and vp9 on Win7@i7-4790@4threads using following parameters:
```
x264.exe -o out.264 BQSquare_416x240_60.yuv --input-res 416x240 --preset [superfast|veryfast|slow|placebo] --fps [framerate] --profile high --aq-mode 0 --no-psy --psnr --bitrate [btrNumber] --threads 1/0 --keyint [framerate * 10] --frames 1000000
AppEncoder_x64.exe -b out.265 -i BQSquare_416x240_60.yuv -preset [veryfast|slow|veryslow] -threads 1/0 -psnr 2 -rc 1 -br [btrNumber] -frms 1000000 -iper [framerate * 10]
x265.exe -o out.265 --input BQSquare_416x240_60.yuv --input-res 416x240 --preset [ultrafast|ultrafast|slow|veryslow] --fps [framerate] --aq-mode 0 --no-psy-rd --no-psy-rdoq --psnr --bitrate [btrNumber] --frame-threads [1|0] --no-wpp/--wpp --keyint [framerate * 10] --frames 1000000
vpxenc.exe --codec=vp9 --passes=1 --[rt|goog|best] --fps=[framerate]/1 --i420 --end-usage=vbr --target-bitrate=[btrNumber] --kf-max-dist=[framerate * 10] --cpu-used=8 --threads=[1|4] --psnr -w 416 -h 240 -o out.vp9 BQSquare_416x240_60.yuv --frame-parallel=0
```
Then on test sequences of JCTVC CLASS-A ~ CLASS-E, and one class of game videos@30fps, compared to x264(20161020), x265-v2.4 and vp9 in the speed form of encoded frames per second (fps), the average performance of KS265 can be summarized by the follows.
| | KSC265 vs. X264 | KSC265 vs. X264 | KSC265 vs. X265 | KSC265 vs. X265 | KSC265 vs. vp9 | KSC265 vs. vp9 |
| ------------------------ | ----------------------- | ---------------------- | ----------------------- | ---------------------- | ----------------------- | --------------------- |
| full-thread comparisons | Bitsaving @same quality | Speedup @same quality | Bitsaving @same quality | Speedup @same bitrate | Bitsaving @same quality | Speedup @same bitrate |
| RealTime+ | 43.7% | -5.4% | 26.0% | 212.6% | 30.5% | 147.9% |
| RealTime | 41.2% | 8.9% | 34.5% | 123.2% | 38.0% | 75.5% |
| Transcode | 36.2% | -5.1% | 23.7% | 199.1% | 32.6% | 179.0% |
| Best Ratio | 35.3% | 83.3% | 10.9% | 84.1% | 23.1% | 778.0% |
Secondly, then on test sequences of JCTVC CLASS-A ~ CLASS-E, and one class of showself videos@15fps, compared to x264(20161020), x265-v2.4 and vp9 in the speed form of encoded frames per second (fps), the average performance of KS265 on OPPOR9s@1thread and ipad min2 @1thread can be summarized by the follows.
| | Andriod&iOS | Andriod | iOS |
| ------------------------ | ----------------------- | ---------------------- | ---------------------- |
| full-thread comparisons | Bitsaving @same quality | Speedup @same quality | Speedup @same quality |
| superfast | 43.9% | -4.1% | 6.3% |
| veryfast | 43.4% | -2.2% | 1.2% |
| fast | 38.0% | 7.3% | 4.7% |
| medium | 36.2% | 4.8% | 7.1% |
The details are described in the excel document.
================================================
FILE: centos_x64/READme.txt
================================================
CentOS release 6.5
gcc 4.8.2-15
================================================
FILE: iOS_demo/KSY265CodecDemo_iOS/AppDelegate.h
================================================
//
// AppDelegate.h
// KSY265CodecDemo_iOS
//
// Created by 江东 on 17/3/17.
// Copyright © 2017年 江东. All rights reserved.
//
#import
@interface AppDelegate : UIResponder
@property (strong, nonatomic) UIWindow *window;
@end
================================================
FILE: iOS_demo/KSY265CodecDemo_iOS/AppDelegate.m
================================================
//
// AppDelegate.m
// KSY265CodecDemo_iOS
//
// Created by 江东 on 17/3/17.
// Copyright © 2017年 江东. All rights reserved.
//
#import "AppDelegate.h"
@interface AppDelegate ()
@end
@implementation AppDelegate
- (BOOL)application:(UIApplication *)application didFinishLaunchingWithOptions:(NSDictionary *)launchOptions {
// Override point for customization after application launch.
return YES;
}
- (void)applicationWillResignActive:(UIApplication *)application {
// Sent when the application is about to move from active to inactive state. This can occur for certain types of temporary interruptions (such as an incoming phone call or SMS message) or when the user quits the application and it begins the transition to the background state.
// Use this method to pause ongoing tasks, disable timers, and invalidate graphics rendering callbacks. Games should use this method to pause the game.
}
- (void)applicationDidEnterBackground:(UIApplication *)application {
// Use this method to release shared resources, save user data, invalidate timers, and store enough application state information to restore your application to its current state in case it is terminated later.
// If your application supports background execution, this method is called instead of applicationWillTerminate: when the user quits.
}
- (void)applicationWillEnterForeground:(UIApplication *)application {
// Called as part of the transition from the background to the active state; here you can undo many of the changes made on entering the background.
}
- (void)applicationDidBecomeActive:(UIApplication *)application {
// Restart any tasks that were paused (or not yet started) while the application was inactive. If the application was previously in the background, optionally refresh the user interface.
}
- (void)applicationWillTerminate:(UIApplication *)application {
// Called when the application is about to terminate. Save data if appropriate. See also applicationDidEnterBackground:.
}
@end
================================================
FILE: iOS_demo/KSY265CodecDemo_iOS/Assets.xcassets/AppIcon.appiconset/Contents.json
================================================
{
"images" : [
{
"idiom" : "iphone",
"size" : "20x20",
"scale" : "2x"
},
{
"idiom" : "iphone",
"size" : "20x20",
"scale" : "3x"
},
{
"idiom" : "iphone",
"size" : "29x29",
"scale" : "2x"
},
{
"idiom" : "iphone",
"size" : "29x29",
"scale" : "3x"
},
{
"idiom" : "iphone",
"size" : "40x40",
"scale" : "2x"
},
{
"idiom" : "iphone",
"size" : "40x40",
"scale" : "3x"
},
{
"idiom" : "iphone",
"size" : "60x60",
"scale" : "2x"
},
{
"idiom" : "iphone",
"size" : "60x60",
"scale" : "3x"
},
{
"idiom" : "ipad",
"size" : "20x20",
"scale" : "1x"
},
{
"idiom" : "ipad",
"size" : "20x20",
"scale" : "2x"
},
{
"idiom" : "ipad",
"size" : "29x29",
"scale" : "1x"
},
{
"idiom" : "ipad",
"size" : "29x29",
"scale" : "2x"
},
{
"idiom" : "ipad",
"size" : "40x40",
"scale" : "1x"
},
{
"idiom" : "ipad",
"size" : "40x40",
"scale" : "2x"
},
{
"idiom" : "ipad",
"size" : "76x76",
"scale" : "1x"
},
{
"idiom" : "ipad",
"size" : "76x76",
"scale" : "2x"
},
{
"idiom" : "ipad",
"size" : "83.5x83.5",
"scale" : "2x"
}
],
"info" : {
"version" : 1,
"author" : "xcode"
}
}
================================================
FILE: iOS_demo/KSY265CodecDemo_iOS/Assets.xcassets/Contents.json
================================================
{
"info" : {
"version" : 1,
"author" : "xcode"
}
}
================================================
FILE: iOS_demo/KSY265CodecDemo_iOS/Assets.xcassets/first.imageset/Contents.json
================================================
{
"images" : [
{
"idiom" : "universal",
"filename" : "first.pdf"
}
],
"info" : {
"version" : 1,
"author" : "xcode"
}
}
================================================
FILE: iOS_demo/KSY265CodecDemo_iOS/Assets.xcassets/recycle-bin.imageset/Contents.json
================================================
{
"images" : [
{
"idiom" : "universal",
"filename" : "recycle-bin.png",
"scale" : "1x"
},
{
"idiom" : "universal",
"scale" : "2x"
},
{
"idiom" : "universal",
"scale" : "3x"
}
],
"info" : {
"version" : 1,
"author" : "xcode"
}
}
================================================
FILE: iOS_demo/KSY265CodecDemo_iOS/Assets.xcassets/second.imageset/Contents.json
================================================
{
"images" : [
{
"idiom" : "universal",
"filename" : "second.pdf"
}
],
"info" : {
"version" : 1,
"author" : "xcode"
}
}
================================================
FILE: iOS_demo/KSY265CodecDemo_iOS/Base.lproj/LaunchScreen.storyboard
================================================
================================================
FILE: iOS_demo/KSY265CodecDemo_iOS/Base.lproj/Main.storyboard
================================================
================================================
FILE: iOS_demo/KSY265CodecDemo_iOS/BaseViewController.h
================================================
//
// BaseViewController.h
// KSYVideoClipsDemo
//
// Created by iVermisseDich on 2017/2/15.
// Copyright © 2017年 com.ksyun. All rights reserved.
//
#import
@interface BaseViewController : UIViewController
- (UISegmentedControl *)segmentedControlWithItems:(NSArray<__kindof NSString *> *) items;
- (UIButton *)buttonWithTitle:(NSString *)title
action:(SEL)action;
- (UIButton *)addButtonWithTitle:(NSString *)title action:(SEL)action;
- (void)addViews:(NSArray<__kindof UIView *> *)btns
withFrame:(CGRect)frame;
- (void)addViews2:(NSArray<__kindof UIView *> *)btns
withFrame:(CGRect)frame;
- (void)addViews3:(NSArray<__kindof UIView *> *)btns
withFrame:(CGRect)frame;
- (void)addViews4:(NSArray<__kindof UIView *> *)btns
withFrame:(CGRect)frame;
- (UILabel *)addLable:(NSString*)title;
- (UITextField *)addTextField: (NSString*)text;
- (void) toast:(NSString*)message;
@end
================================================
FILE: iOS_demo/KSY265CodecDemo_iOS/BaseViewController.m
================================================
//
// BaseViewController.m
// KSYVideoClipsDemo
//
// Created by iVermisseDich on 2017/2/15.
// Copyright © 2017年 com.ksyun. All rights reserved.
//
#import "BaseViewController.h"
#define kSpace 20
@interface BaseViewController ()
@end
@implementation BaseViewController
- (void)viewDidLoad {
[super viewDidLoad];
// Do any additional setup after loading the view.
}
#pragma mark - tool funcs
- (UIButton *)addButtonWithTitle:(NSString *)title action:(SEL)action{
UIButton *button = [UIButton buttonWithType:UIButtonTypeRoundedRect];
[button setTitle:title forState: UIControlStateNormal];
button.backgroundColor = [UIColor lightGrayColor];
[button addTarget:self action:action forControlEvents:UIControlEventTouchUpInside];
button.layer.masksToBounds = YES;
button.layer.cornerRadius = 5;
button.layer.borderColor = [UIColor blackColor].CGColor;
button.layer.borderWidth = 1;
[self.view addSubview:button];
return button;
}
// custom segmentedContrl
- (UISegmentedControl *)segmentedControlWithItems: (NSArray<__kindof NSString *> *) items {
UISegmentedControl * segC;
segC = [[UISegmentedControl alloc] initWithItems:items];
segC.selectedSegmentIndex = 0;
segC.layer.cornerRadius = 5;
segC.backgroundColor = [UIColor lightGrayColor];
[segC addTarget:self
action:@selector(didSegCtrlValueChanged:)
forControlEvents:UIControlEventValueChanged];
return segC;
}
// custom button
- (void)addViews:(NSArray<__kindof UIView *> *)btns withFrame:(CGRect)frame{
CGFloat width = (frame.size.width - (btns.count + 1) * 5) / btns.count;
CGFloat height = frame.size.height;
CGFloat xPos = frame.origin.x+5;
CGFloat yPos = frame.origin.y;
for (UIView *view in btns) {
view.frame = CGRectMake(xPos, yPos, width, height);
[self.view addSubview:view];
xPos += width + 5;
}
}
- (void)addViews2:(NSArray<__kindof UIView *> *)btns withFrame:(CGRect)frame{
CGFloat width = frame.size.width;
CGFloat height = frame.size.height;
CGFloat xPos = frame.origin.x+5;
CGFloat yPos = frame.origin.y;
btns[0].frame = CGRectMake(xPos, yPos, width*2/3-10, height);
[self.view addSubview:btns[0]];
xPos += width*2/3;
btns[1].frame = CGRectMake(xPos, yPos, width/3-10, height);
[self.view addSubview:btns[1]];
}
- (void)addViews3:(NSArray<__kindof UIView *> *)btns withFrame:(CGRect)frame{
CGFloat width = frame.size.width;
CGFloat height = frame.size.height;
CGFloat xPos = frame.origin.x+5;
CGFloat yPos = frame.origin.y;
btns[0].frame = CGRectMake(xPos, yPos, width*1/3-10, height);
[self.view addSubview:btns[0]];
xPos += width*1/3;
btns[1].frame = CGRectMake(xPos, yPos, width*2/3-10, height);
[self.view addSubview:btns[1]];
}
- (void)addViews4:(NSArray<__kindof UIView *> *)btns withFrame:(CGRect)frame{
CGFloat width = frame.size.width;
CGFloat height = frame.size.height;
CGFloat xPos = frame.origin.x+5;
CGFloat yPos = frame.origin.y;
btns[0].frame = CGRectMake(xPos, yPos, width*1/3-10, height);
[self.view addSubview:btns[0]];
xPos += width*1/3;
btns[1].frame = CGRectMake(xPos, yPos, width*1/2-10, height);
[self.view addSubview:btns[1]];
xPos += width*1/2;
btns[2].frame = CGRectMake(xPos, yPos, width*1/6-10, height);
[self.view addSubview:btns[2]];
}
/*
- (void)addMidViews:(NSArray<__kindof UIView *> *)btns withFrame:(CGRect)frame{
CGFloat width = frame.size.width;
CGFloat height = frame.size.height;
CGFloat xPos = 5;
CGFloat yPos = frame.origin.y;
btns[0].frame = CGRectMake(xPos, yPos, width*2/3-10, height);
[self.view addSubview:btns[0]];
}*/
- (UILabel *)addLable:(NSString*)title{
UILabel * lbl = [[UILabel alloc] init];
lbl.text = title;
lbl.textAlignment = NSTextAlignmentCenter;
[self.view addSubview:lbl];
lbl.backgroundColor = [UIColor colorWithWhite:0.8 alpha:0.3];
return lbl;
}
- (UITextField *)addTextField: (NSString*)text{
UITextField * textF;
textF = [[UITextField alloc] init];
textF.text = text;
textF.borderStyle = UITextBorderStyleRoundedRect;
[self.view addSubview:textF];
return textF;
}
- (UIButton *)buttonWithTitle:(NSString *)title action:(SEL)action{
UIButton *button = [UIButton buttonWithType:UIButtonTypeRoundedRect];
[button setTitle:title forState:UIControlStateNormal];
button.backgroundColor = [UIColor lightGrayColor];
button.alpha = 0.9;
button.layer.cornerRadius = 10;
button.clipsToBounds = YES;
[button addTarget:self
action:action
forControlEvents:UIControlEventTouchUpInside];
return button;
}
- (void) toast:(NSString*)message{
UIAlertView *toast = [[UIAlertView alloc] initWithTitle:nil
message:message
delegate:nil
cancelButtonTitle:nil
otherButtonTitles:nil, nil];
[toast show];
double duration = 0.5; // duration in seconds
dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t)(duration * NSEC_PER_SEC)), dispatch_get_main_queue(), ^{
[toast dismissWithClickedButtonIndex:0 animated:YES];
});
}
@end
================================================
FILE: iOS_demo/KSY265CodecDemo_iOS/DecoderHelperViewController.h
================================================
//
// SettingsViewController.h
// IPGateway
//
// Created by Meng Shengbin on 2/1/12.
// Copyright (c) 2012 Peking University. All rights reserved.
//
#import
#import "BaseViewController.h"
@interface DecoderHelperViewController : BaseViewController
@end
================================================
FILE: iOS_demo/KSY265CodecDemo_iOS/DecoderHelperViewController.m
================================================
//
// SettingsViewController.m
// IPGateway
//
// Created by Meng Shengbin on 2/1/12.
// Copyright (c) 2012 Peking University. All rights reserved.
//
#import "DecoderHelperViewController.h"
@implementation DecoderHelperViewController {
}
- (void)viewDidLoad {
[super viewDidLoad];
[self setupUI];
}
- (void)setupUI{
self.view.backgroundColor = [UIColor whiteColor];
//add helper title text
UILabel *lblSetting = [self addLable:@"帮助"];
[self addViews:@[lblSetting] withFrame:CGRectMake(self.view.frame.size.width/3, 40, self.view.frame.size.width/3, 40)];
//编码器使用说明
UILabel *encoderInstructions = [self addLable:@"解码器使用说明:先设置解码器参数(其中选择渲染频率为-1(off)时,是关闭渲染功能),然后选择文件,最后确定即开始解码" ];
encoderInstructions.numberOfLines = 0;
encoderInstructions.textAlignment = NSTextAlignmentLeft;
[self addViews:@[encoderInstructions] withFrame:CGRectMake(0, 100, self.view.frame.size.width, 40*4)];
//github地址
UILabel *gitHubSite = [self addLable:@"github: https://github.com/ksvc/ks265codec" ];
gitHubSite.numberOfLines = 0;
gitHubSite.textAlignment = NSTextAlignmentLeft;
[self addViews:@[gitHubSite] withFrame:CGRectMake(0, 300, self.view.frame.size.width, 40*2)];
UIButton *btnBack = [self addButtonWithTitle:@"返回" action:@selector(onDone:)];
[self addViews:@[btnBack] withFrame:CGRectMake(self.view.frame.size.width*2/3, 450, self.view.frame.size.width/3, 40)];
}
#pragma mark - actions
- (void)onDone:(UIButton *)btn {
[self dismissViewControllerAnimated:FALSE completion:nil];
}
- (void)didReceiveMemoryWarning {
[super didReceiveMemoryWarning];
// Dispose of any resources that can be recreated.
}
@end
================================================
FILE: iOS_demo/KSY265CodecDemo_iOS/EncoderHelperViewController.h
================================================
//
// SettingsViewController.h
// IPGateway
//
// Created by Meng Shengbin on 2/1/12.
// Copyright (c) 2012 Peking University. All rights reserved.
//
#import
#import "BaseViewController.h"
@interface EncoderHelperViewController : BaseViewController
@end
================================================
FILE: iOS_demo/KSY265CodecDemo_iOS/EncoderHelperViewController.m
================================================
//
// SettingsViewController.m
// IPGateway
//
// Created by Meng Shengbin on 2/1/12.
// Copyright (c) 2012 Peking University. All rights reserved.
//
#import "EncoderHelperViewController.h"
@implementation EncoderHelperViewController {
}
- (void)viewDidLoad {
[super viewDidLoad];
[self setupUI];
}
- (void)setupUI{
self.view.backgroundColor = [UIColor whiteColor];
//add helper title text
UILabel *lblSetting = [self addLable:@"帮助"];
[self addViews:@[lblSetting] withFrame:CGRectMake(self.view.frame.size.width/3, 40, self.view.frame.size.width/3, 40)];
//编码器使用说明
UILabel *encoderInstructions = [self addLable:@"编码器使用说明:先设置编码器参数,然后选择文件,最后确定即开始编码" ];
encoderInstructions.numberOfLines = 0;
encoderInstructions.textAlignment = NSTextAlignmentLeft;
[self addViews:@[encoderInstructions] withFrame:CGRectMake(0, 100, self.view.frame.size.width, 40*4)];
//github地址
UILabel *gitHubSite = [self addLable:@"github: https://github.com/ksvc/ks265codec" ];
gitHubSite.numberOfLines = 0;
gitHubSite.textAlignment = NSTextAlignmentLeft;
[self addViews:@[gitHubSite] withFrame:CGRectMake(0, 300, self.view.frame.size.width, 40*2)];
//add back button
UIButton *btnBack = [self addButtonWithTitle:@"返回" action:@selector(onDone:)];
[self addViews:@[btnBack] withFrame:CGRectMake(self.view.frame.size.width*2/3, 450, self.view.frame.size.width/3, 40)];
}
#pragma mark - actions
- (void)onDone:(UIButton *)btn {
[self dismissViewControllerAnimated:FALSE completion:nil];
}
- (void)didReceiveMemoryWarning {
[super didReceiveMemoryWarning];
// Dispose of any resources that can be recreated.
}
@end
================================================
FILE: iOS_demo/KSY265CodecDemo_iOS/FirstViewController.h
================================================
//
// FirstViewController.h
// KSY265CodecDemo_iOS
//
// Created by 江东 on 17/3/17.
// Copyright © 2017年 江东. All rights reserved.
//
#import
#import "BaseViewController.h"
@interface FirstViewController : BaseViewController
@end
================================================
FILE: iOS_demo/KSY265CodecDemo_iOS/FirstViewController.m
================================================
//
// FirstViewController.m
// KSY265CodecDemo_iOS
//
// Created by 江东 on 17/3/17.
// Copyright © 2017年 江东. All rights reserved.
//
#import "FirstViewController.h"
#import "SettingsEncoderViewController.h"
#import "EncoderHelperViewController.h"
#import "MoviesViewController.h"
#import "MovieEncoder.h"
#import "KSYMovieEncoder.h"
#import "qy265enc.h"
#import "x264.h"
@interface FirstViewController (){
UITextField *encoderFile;
UITextView *infoView;
SettingsEncoderViewController *setEncoderVC;
MoviesViewController *listVC;
}
@property (nonatomic, retain) MovieEncoder *enc;
@end
@implementation FirstViewController
- (void)viewDidLoad {
[super viewDidLoad];
[self setupUI];
self.enc = nil;
setEncoderVC = [[SettingsEncoderViewController alloc] initDefaultCfg];
listVC = [[MoviesViewController alloc] initWithSuffix:@".yuv"];
listVC.tableBlock = ^(NSString* filePath){
NSLog(@"%@", filePath);
encoderFile.text = filePath;
};
}
-(void)viewDidAppear:(BOOL)animated
{
[super viewDidAppear:animated];
[self copyFile2Documents:@"960x540_15"];
[self copyFile2Documents:@"1280x720_15"];
[self copyFile2Documents:@"640x480_15"];
}
- (void)setupUI{
self.view.backgroundColor = [UIColor whiteColor];
//add set button
UIButton *btnSet = [self addButtonWithTitle:@"设置" action:@selector(onSetEncoder:)];
//add help button
UIButton *btnHelp = [self addButtonWithTitle:@"帮助" action:@selector(onHelp:)];
//add encoder text
UILabel *lblEncoder = [self addLable:@"KSC265编码器"];
[self addViews:@[btnSet, lblEncoder, btnHelp] withFrame:CGRectMake(0, 40, self.view.frame.size.width, 40)];
//add browse file button
UIButton *selectBtn = [self addButtonWithTitle:@"浏览(.yuv)文件" action:@selector(didClickSelectBtn:)];
[self addViews:@[selectBtn] withFrame:CGRectMake(0, 120, self.view.frame.size.width/3, 40)];
//input encoder file
encoderFile = [self addTextField:NULL ];
UIButton *doneBtn = [self addButtonWithTitle:@"确定" action:@selector(onDone:)];
[self addViews2:@[encoderFile,doneBtn] withFrame:CGRectMake(0, 180, self.view.frame.size.width, 40)];
// info
infoView = [[UITextView alloc] init];
infoView.editable = NO;
infoView.textAlignment = NSTextAlignmentLeft;
infoView.backgroundColor = [UIColor colorWithWhite:0.8 alpha:0.3];
infoView.font = [UIFont systemFontOfSize:13];
infoView.layer.cornerRadius = 2;
infoView.clipsToBounds = YES;
infoView.layoutManager.allowsNonContiguousLayout = NO;
[self addViews:@[infoView] withFrame:CGRectMake(0, 260, self.view.frame.size.width, self.view.frame.size.height- 260 - 20)];
}
- (void)startEncoder:(NSString *) filePath
{
NSString *encoder = [[NSUserDefaults standardUserDefaults] valueForKey:@"encoder"];
if (self.enc == nil) {
if ([encoder isEqualToString:@"x264"]) {
self.enc = [[MovieEncoder alloc] init];
NSString* string = [NSString stringWithFormat:@"%s" ,X264_POINTVER];
[[NSUserDefaults standardUserDefaults] setValue:string forKey:@"version"];
}
else {
self.enc = [[KSYMovieEncoder alloc] init];
NSString* string = [NSString stringWithFormat:@"%s" , strLibQy265Version];
[[NSUserDefaults standardUserDefaults] setValue:string forKey:@"version"];
}
}
int ret = [self.enc openMovie:filePath];
if(ret != 0) {
UIAlertView * alert = [[UIAlertView alloc] initWithTitle:@"Message" message:@"Get movie data failed! Please check your source or try again." delegate:nil cancelButtonTitle:@"OK" otherButtonTitles:nil];
[alert show];
return ;
} else {
int ret = [self.enc encoder];
if(ret != 0) {
UIAlertView * alert = [[UIAlertView alloc] initWithTitle:@"Message" message:@"Can't encode this yuv! Please check its format." delegate:nil cancelButtonTitle:@"OK" otherButtonTitles:nil];
[alert show];
return ;
}
NSString *encoder = [[NSUserDefaults standardUserDefaults] valueForKey:@"encoder"];
NSString *fps = [[NSUserDefaults standardUserDefaults] valueForKey:@"fps"];
NSString *bitRate = [[NSUserDefaults standardUserDefaults] valueForKey:@"bitRate"];
NSString *threads = [[NSUserDefaults standardUserDefaults] valueForKey:@"threads"];
NSString *profile = [[NSUserDefaults standardUserDefaults] valueForKey:@"profile"];
NSString *delayed = [[NSUserDefaults standardUserDefaults] valueForKey:@"delayed"];
NSString *version = [[NSUserDefaults standardUserDefaults] valueForKey:@"version"];
NSString *psnr = [[NSUserDefaults standardUserDefaults] valueForKey:@"psnr"];
NSFileManager *manager = [NSFileManager defaultManager];
NSDictionary *outDic = [manager attributesOfItemAtPath:self.enc.out_file_string error:nil];
NSDictionary *inDic = [manager attributesOfItemAtPath:filePath error:nil];
unsigned long long outLength = outDic.fileSize;
unsigned long long inLength = inDic.fileSize;
if ([encoder isEqualToString:@"x264"]) {
NSString *delayShow;
if ([delayed isEqualToString:@"zerolatency"]) {
delayShow = @"--bframes 0 --tune zerolatency";
}
else if([delayed isEqualToString:@"livestreaming"]){
delayShow = @"--bframes 3";
}
else{
delayShow = @"--bframes 7";
}
infoView.text = [NSString stringWithFormat:@"%@\n编码器版本:%@\n编码参数:%@ --preset %@ %@ --input-res %ldx%ld --fps %@ --threads %@ --bitrate %@ -o %@ %@\n\n编码时间:%.2lf s\n编码帧数:%ld\n编码速度:%.2lf f/s\n压缩比:%llu\nPSNR:%.2lf\n\n视频信息\n码率:%.2lf kbps\n分辨率:%@\n帧率:%@\n文件总时长:%.2lf s\n\n\n",
infoView.text,
version,
encoder,
profile,
delayShow,
self.enc.width,
self.enc.height,
fps,
threads,
bitRate,
[self.enc.out_file_string lastPathComponent],
encoderFile.text,
self.enc.real_time,
self.enc.frameNum,
self.enc.realFPS,
inLength/outLength,
self.enc.avg_psnr,
outLength*8.0/(1000.0*(self.enc.frameNum/[fps floatValue])),
NSStringFromCGSize(CGSizeMake(self.enc.width, self.enc.height)),
fps,
self.enc.frameNum/[fps floatValue]];
}
else{
infoView.text = [NSString stringWithFormat:@"%@\n编码器版本:%@\n编码参数:%@ -i %@ -preset %@ -latency %@ -wdt %ld -hgt %ld -fr %@ -threads %@ -br %@ -b %@\n\n编码时间:%.2lf s\n编码帧数:%ld\n编码速度:%.2lf f/s\n压缩比:%llu\nPSNR:%@\n\n视频信息\n码率:%.2lf kbps\n分辨率:%@\n帧率:%@\n文件总时长:%.2lf s\n\n\n",
infoView.text,
version,
encoder,
encoderFile.text,
profile,
delayed,
self.enc.width,
self.enc.height,
fps,
threads,
bitRate,
[self.enc.out_file_string lastPathComponent],
self.enc.real_time,
self.enc.frameNum,
self.enc.realFPS,
inLength/outLength,
psnr,
outLength*8.0/(1000.0*(self.enc.frameNum/[fps floatValue])),
NSStringFromCGSize(CGSizeMake(self.enc.width, self.enc.height)),
fps,
self.enc.frameNum/[fps floatValue]];
}
[infoView scrollRangeToVisible:NSMakeRange(infoView.text.length, 1)];
}
self.enc = nil;
}
#pragma mark - actions
- (void)onSetEncoder:(UIButton *)btn {
[self presentViewController:setEncoderVC animated:true completion:nil];
}
- (void)onHelp:(UIButton *)btn {
EncoderHelperViewController *encoderHelperVC = [[EncoderHelperViewController alloc] init];
[self presentViewController:encoderHelperVC animated:true completion:nil];
}
- (void)didClickSelectBtn:(UIButton *)btn{
UINavigationController *naVC = [[UINavigationController alloc]initWithRootViewController: listVC];
[self presentViewController:naVC animated:YES completion:nil];
}
- (void)onDone:(UIButton *)btn {
[encoderFile resignFirstResponder];
NSString *dir = [NSHomeDirectory() stringByAppendingString:@"/Documents/"];
NSString *encFile = [dir stringByAppendingPathComponent:encoderFile.text];
[self startEncoder:encFile];
}
- (void)didReceiveMemoryWarning {
[super didReceiveMemoryWarning];
// Dispose of any resources that can be recreated.
}
-(NSString*) copyFile2Documents:(NSString*)fileName
{
NSFileManager*fileManager =[NSFileManager defaultManager];
NSError*error;
NSArray*paths =NSSearchPathForDirectoriesInDomains(NSDocumentDirectory,NSUserDomainMask, YES);
NSString*documentsDirectory =[paths objectAtIndex:0];
NSString*destPath =[documentsDirectory stringByAppendingPathComponent:fileName];
destPath = [destPath stringByAppendingString:@".yuv"];
// 如果目标目录也就是(Documents)目录没有数据库文件的时候,才会复制一份,否则不复制
if(![fileManager fileExistsAtPath:destPath]){
NSString* sourcePath =[[NSBundle mainBundle] pathForResource:fileName ofType:@"yuv"];
[fileManager copyItemAtPath:sourcePath toPath:destPath error:&error];
}
return destPath;
}
@end
================================================
FILE: iOS_demo/KSY265CodecDemo_iOS/GLRenderer.h
================================================
//
// GLRenderer.h
// HEVDecoder
//
// Created by Shengbin Meng on 11/21/13.
// Copyright (c) 2013 Peking University. All rights reserved.
//
#import
#import
@protocol RenderStateListener
- (void) bufferDone;
@end
@interface GLRenderer : NSObject
- (void) setRenderStateListener:(id) lis;
- (int) resizeFromLayer:(CAEAGLLayer *)layer;
- (void) render: (void*) data;
@end
================================================
FILE: iOS_demo/KSY265CodecDemo_iOS/GLRenderer.m
================================================
//
// GLRenderer.m
// HEVDecoder
//
// Created by Shengbin Meng on 11/21/13.
// Copyright (c) 2013 Peking University. All rights reserved.
//
#import "GLRenderer.h"
#import "MoviePlayer.h"
#import
#define ENABLE_LOGD 0
#if ENABLE_LOGD
#define LOGD(...) printf(__VA_ARGS__)
#else
#define LOGD(...)
#endif
#define LOGI LOGD
#define LOGE LOGD
static const char gVertexShader[] =
"attribute vec4 a_position;\n"
"attribute vec2 a_texCoord;\n"
"varying vec2 v_tc;\n"
"void main()\n"
"{\n"
" gl_Position = a_position;\n"
" v_tc = a_texCoord;\n"
"}\n";
static const char gFragmentShader[] =
"varying lowp vec2 v_tc;\n"
"uniform sampler2D u_texY;\n"
"uniform sampler2D u_texU;\n"
"uniform sampler2D u_texV;\n"
"void main(void)\n"
"{\n"
"mediump vec3 yuv;\n"
"lowp vec3 rgb;\n"
"yuv.x = texture2D(u_texY, v_tc).r;\n"
"yuv.y = texture2D(u_texU, v_tc).r - 0.5;\n"
"yuv.z = texture2D(u_texV, v_tc).r - 0.5;\n"
"rgb = mat3( 1, 1, 1,\n"
"0, -0.39465, 2.03211,\n"
"1.13983, -0.58060, 0) * yuv;\n"
"gl_FragColor = vec4(rgb, 1);\n"
"}\n";
static void printGLString(const char *name, GLenum s) {
LOGI("GL %s = %s\n", name, glGetString(s););
}
static GLuint loadShader(GLenum shaderType, const char* pSource) {
GLuint shader = glCreateShader(shaderType);
if (shader) {
glShaderSource(shader, 1, &pSource, NULL);
glCompileShader(shader);
GLint compiled = 0;
glGetShaderiv(shader, GL_COMPILE_STATUS, &compiled);
if (!compiled) {
GLint infoLen = 0;
glGetShaderiv(shader, GL_INFO_LOG_LENGTH, &infoLen);
if (infoLen) {
char* buf = (char*) malloc(infoLen);
if (buf) {
glGetShaderInfoLog(shader, infoLen, NULL, buf);
LOGE("Could not compile shader %d:\n%s\n",
shaderType, buf);
free(buf);
}
glDeleteShader(shader);
shader = 0;
}
}
}
return shader;
}
static GLuint createProgram(const char* pVertexSource, const char* pFragmentSource) {
GLuint vertexShader = loadShader(GL_VERTEX_SHADER, pVertexSource);
if (!vertexShader) {
return 0;
}
GLuint fragmentShader = loadShader(GL_FRAGMENT_SHADER, pFragmentSource);
if (!fragmentShader) {
return 0;
}
GLuint program = glCreateProgram();
if (program) {
glAttachShader(program, vertexShader);
glAttachShader(program, fragmentShader);
glLinkProgram(program);
GLint linkStatus = GL_FALSE;
glGetProgramiv(program, GL_LINK_STATUS, &linkStatus);
if (linkStatus != GL_TRUE) {
GLint bufLength = 0;
glGetProgramiv(program, GL_INFO_LOG_LENGTH, &bufLength);
if (bufLength) {
char* buf = (char*) malloc(bufLength);
if (buf) {
glGetProgramInfoLog(program, bufLength, NULL, buf);
LOGE("Could not link program:\n%s\n", buf);
free(buf);
}
}
glDeleteProgram(program);
program = 0;
}
}
return program;
}
static GLfloat vertexPositions[] = {
-1.0, -1.0, 0.0,
1.0, -1.0, 0.0,
-1.0, 1.0, 0.0,
1.0, 1.0, 0.0
};
static GLfloat textureCoords[] = {
0.0, 1.0,
1.0, 1.0,
0.0, 0.0,
1.0, 0.0
};
@implementation GLRenderer
{
EAGLContext *context;
GLint backingWidth, backingHeight;
GLuint defaultFramebuffer, colorRenderbuffer;
GLuint gProgram;
GLuint gTexIds[3];
GLuint gAttribPosition;
GLuint gAttribTexCoord;
GLuint gUniformTexY;
GLuint gUniformTexU;
GLuint gUniformTexV;
id listener;
int needSetup;
}
- (id)init
{
self = [super init];
if (self == nil) {
return nil;
}
context = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2];
if (!context || ![EAGLContext setCurrentContext:context]) {
return nil;
}
printGLString("Version", GL_VERSION);
printGLString("Vendor", GL_VENDOR);
printGLString("Renderer", GL_RENDERER);
printGLString("Extensions", GL_EXTENSIONS);
// create and use our program
gProgram = createProgram(gVertexShader, gFragmentShader);
if (!gProgram) {
LOGE("Could not create program. \n");
return nil;
}
glUseProgram(gProgram);
// get the location of attributes in our shader
gAttribPosition = glGetAttribLocation(gProgram, "a_position");
gAttribTexCoord = glGetAttribLocation(gProgram, "a_texCoord");
// get the location of uniforms in our shader
gUniformTexY = glGetUniformLocation(gProgram, "u_texY");
gUniformTexU = glGetUniformLocation(gProgram, "u_texU");
gUniformTexV = glGetUniformLocation(gProgram, "u_texV");
// can enable only once
glEnableVertexAttribArray(gAttribPosition);
glEnableVertexAttribArray(gAttribTexCoord);
// set the value of uniforms (uniforms all have constant value)
glUniform1i(gUniformTexY, 0);
glUniform1i(gUniformTexU, 1);
glUniform1i(gUniformTexV, 2);
// generate and set parameters for the textures
glEnable(GL_TEXTURE_2D);
glGenTextures(3, gTexIds);
for (int i = 0; i < 3; i++) {
glActiveTexture(GL_TEXTURE0 + i);
glBindTexture ( GL_TEXTURE_2D, gTexIds[i] );
glTexParameteri ( GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR );
glTexParameteri ( GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR );
glTexParameteri ( GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE );
glTexParameteri ( GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE );
}
// genarate frame and render buffers
glGenFramebuffers(1, &defaultFramebuffer);
glGenRenderbuffers(1, &colorRenderbuffer);
glBindFramebuffer(GL_FRAMEBUFFER, defaultFramebuffer);
glBindRenderbuffer(GL_RENDERBUFFER, colorRenderbuffer);
glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_RENDERBUFFER, colorRenderbuffer);
needSetup = 0;
return self;
}
- (void) setRenderStateListener:(id) lis
{
listener = lis;
}
- (int) resizeFromLayer:(CAEAGLLayer *)layer
{
// Allocate color buffer backing based on the current layer size
glBindRenderbuffer(GL_RENDERBUFFER, colorRenderbuffer);
[context renderbufferStorage:GL_RENDERBUFFER fromDrawable:layer];
glGetRenderbufferParameteriv(GL_RENDERBUFFER, GL_RENDERBUFFER_WIDTH, &backingWidth);
glGetRenderbufferParameteriv(GL_RENDERBUFFER, GL_RENDERBUFFER_HEIGHT, &backingHeight);
if (glCheckFramebufferStatus(GL_FRAMEBUFFER) != GL_FRAMEBUFFER_COMPLETE) {
LOGI("failed to make complete framebuffer object %x \n", glCheckFramebufferStatus(GL_FRAMEBUFFER));
return -1;
}
needSetup = 1;
return 0;
}
- (void) render: (void*) data {
struct VideoFrame *gVF = (struct VideoFrame*)data;
if (needSetup) {
[EAGLContext setCurrentContext:context];
GLuint width = gVF->width;
GLuint height = gVF->height;
float aspect = (float)width / (float)height;
if(aspect >= (float)backingWidth/(float)backingHeight) {
// fill screen in width, and leave space in Y
float scale = (float)backingWidth / (float) width;
float maxY = ((float)height * scale) / (float) backingHeight;
vertexPositions[1] = vertexPositions[4] = - maxY;
vertexPositions[7] = vertexPositions[10] = maxY;
} else {
// fill screen in height, and leave space in X
float scale = (float) backingHeight / (float) height;
float maxX = ((float) width * scale) / (float) backingWidth;
vertexPositions[0] = vertexPositions[6] = - maxX;
vertexPositions[3] = vertexPositions[9] = maxX;
}
// modify the texture coordinates
float texCoord = ((float)width) / gVF->linesize_y;
textureCoords[2] = textureCoords[6] = texCoord;
// set the value of attributes
glVertexAttribPointer(gAttribPosition, 3, GL_FLOAT, 0, 0, vertexPositions);
glVertexAttribPointer(gAttribTexCoord, 2, GL_FLOAT, 0, 0, textureCoords);
glViewport(0, 0, backingWidth, backingHeight);
LOGI("setup finished\n");
needSetup = 0;
}
glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT);
LOGD("before upload: %u (%f) \n", getms(), gVF->pts);
// upload textures
glActiveTexture(GL_TEXTURE0 + 0);
glTexImage2D ( GL_TEXTURE_2D, 0, GL_LUMINANCE, gVF->linesize_y, gVF->height, 0, GL_LUMINANCE, GL_UNSIGNED_BYTE, gVF->yuv_data[0]);
glActiveTexture(GL_TEXTURE0 + 1);
glTexImage2D ( GL_TEXTURE_2D, 0, GL_LUMINANCE, gVF->linesize_uv, gVF->height/2, 0, GL_LUMINANCE, GL_UNSIGNED_BYTE, gVF->yuv_data[1]);
glActiveTexture(GL_TEXTURE0 + 2);
glTexImage2D ( GL_TEXTURE_2D, 0, GL_LUMINANCE, gVF->linesize_uv, gVF->height/2, 0, GL_LUMINANCE, GL_UNSIGNED_BYTE, gVF->yuv_data[2]);
[listener bufferDone];
LOGD("after upload: %u (%f) \n", getms(), gVF->pts);
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
LOGD("after glDrawArrays: %u (%f) \n", getms(), gVF->pts);
[context presentRenderbuffer:GL_RENDERBUFFER];
}
- (void)dealloc
{
// delete buffers
if (defaultFramebuffer) {
glDeleteFramebuffers(1, &defaultFramebuffer);
defaultFramebuffer = 0;
}
if (colorRenderbuffer) {
glDeleteRenderbuffers(1, &colorRenderbuffer);
colorRenderbuffer = 0;
}
// tear down context
if ([EAGLContext currentContext] == context) {
[EAGLContext setCurrentContext:nil];
}
context = nil;
}
@end
================================================
FILE: iOS_demo/KSY265CodecDemo_iOS/GLView.h
================================================
//
// GLView.h
// HEVDecoder
//
// Created by Shengbin Meng on 11/21/13.
// Copyright (c) 2013 Peking University. All rights reserved.
//
#import
#import "GLRenderer.h"
@interface GLView : UIView
@property (nonatomic, retain) GLRenderer *renderer;
@end
================================================
FILE: iOS_demo/KSY265CodecDemo_iOS/GLView.m
================================================
//
// GLView.m
// HEVDecoder
//
// Created by Shengbin Meng on 11/21/13.
// Copyright (c) 2013 Peking University. All rights reserved.
//
#import "GLView.h"
@implementation GLView
+ (Class)layerClass
{
return [CAEAGLLayer class];
}
// this is called when the view is loaded from xib files
- (id)initWithCoder:(NSCoder *)coder
{
self = [super initWithCoder:coder];
if (self) {
// configure the properties of the layer
CAEAGLLayer *eaglLayer = (CAEAGLLayer *)self.layer;
eaglLayer.opaque = TRUE;
eaglLayer.drawableProperties = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithBool:FALSE], kEAGLDrawablePropertyRetainedBacking, kEAGLColorFormatRGBA8, kEAGLDrawablePropertyColorFormat, nil];
self.renderer = [[GLRenderer alloc] init];
if (self.renderer == nil) {
return nil;
}
}
return self;
}
- (void)layoutSubviews
{
[self.renderer resizeFromLayer:(CAEAGLLayer*)self.layer];
[super layoutSubviews];
}
@end
================================================
FILE: iOS_demo/KSY265CodecDemo_iOS/Info.plist
================================================
CFBundleDevelopmentRegion
en
CFBundleExecutable
$(EXECUTABLE_NAME)
CFBundleIdentifier
$(PRODUCT_BUNDLE_IDENTIFIER)
CFBundleInfoDictionaryVersion
6.0
CFBundleName
$(PRODUCT_NAME)
CFBundlePackageType
APPL
CFBundleShortVersionString
1.0
CFBundleVersion
1
LSRequiresIPhoneOS
UIFileSharingEnabled
UILaunchStoryboardName
LaunchScreen
UIMainStoryboardFile
Main
UIRequiredDeviceCapabilities
armv7
UIStatusBarTintParameters
UINavigationBar
Style
UIBarStyleDefault
Translucent
UISupportedInterfaceOrientations
UIInterfaceOrientationPortrait
UIInterfaceOrientationLandscapeLeft
UIInterfaceOrientationLandscapeRight
UISupportedInterfaceOrientations~ipad
UIInterfaceOrientationPortrait
UIInterfaceOrientationPortraitUpsideDown
UIInterfaceOrientationLandscapeLeft
UIInterfaceOrientationLandscapeRight
================================================
FILE: iOS_demo/KSY265CodecDemo_iOS/KSYMovieEncoder.h
================================================
//
// MoviePlayer.h
// HEVDecoder
//
// Created by Shengbin Meng on 13-2-25.
// Copyright (c) 2013 Peking University. All rights reserved.
//
#import
@interface KSYMovieEncoder : NSObject
@property NSInteger width;
@property NSInteger height;
@property NSInteger frameNum;
@property float realFPS;
@property float real_time;
@property NSString *out_file_string;
- (int) openMovie:(NSString*) path;
- (int) encoder;
@end
================================================
FILE: iOS_demo/KSY265CodecDemo_iOS/KSYMovieEncoder.m
================================================
//
// MoviePlayer.m
// HEVDecoder
//
// Created by Shengbin Meng on 13-2-25.
// Copyright (c) 2013 Peking University. All rights reserved.
//
#import "KSYMovieEncoder.h"
#include
#include
#include
#include
#include "qy265def.h"
void logPrint(const char* msg){
if(strncmp(msg, "\n", sizeof("\n"))){
NSString * message = [[NSString alloc]initWithUTF8String:msg];
NSLog(@"message:%@",message);
NSString *regulaStr = @"\\d+\\.\\d+";
NSRegularExpression *regex = [NSRegularExpression regularExpressionWithPattern:regulaStr
options:NSRegularExpressionCaseInsensitive
error:nil];
NSArray *arrayOfAllMatches = [regex matchesInString:message options:0 range:NSMakeRange(0, [message length])];
if(arrayOfAllMatches.count >= 4){
float Y_PSNR = [[message substringWithRange:((NSTextCheckingResult *)arrayOfAllMatches[1]).range] floatValue];
float U_PSNR = [[message substringWithRange:((NSTextCheckingResult *)arrayOfAllMatches[2]).range] floatValue];
float V_PSNR = [[message substringWithRange:((NSTextCheckingResult *)arrayOfAllMatches[3]).range] floatValue];
float PSNR = (6* Y_PSNR + U_PSNR + V_PSNR)/8;
NSString *stringPSNR = [NSString stringWithFormat:@"%.2f",PSNR];
[[NSUserDefaults standardUserDefaults] setValue:stringPSNR forKey:@"psnr"];
}
}
return;
};
@implementation KSYMovieEncoder
{
NSString *moviePath;
FILE *in_file;
}
- (id) init
{
self = [super init];
QY265SetLogPrintf(logPrint);
return self;
}
- (int) openMovie:(NSString*) path
{
moviePath = path;
in_file = fopen([moviePath UTF8String], "rb");
if(NULL == in_file) {
printf("can not open input file '%s'!\n", [moviePath UTF8String]);
return -1;
}
return 0;
}
- (int) encoder
{
QY265EncConfig param;
QY265YUV yuv;
QY265Picture pic;
QY265Picture pic_out;
QY265Nal *nal;
void *h;
int i_frame = 0;
int i_frame_size;
int i_nal;
clock_t clock_start, clock_end, clock_used;
struct timeval tv_start, tv_end;
double real_time;
int64_t ms_used;
FILE *out_file;
int errorCode;
_out_file_string = [NSString stringWithFormat:@"%@.265", moviePath];
if ( NULL != _out_file_string ) {
out_file = fopen([_out_file_string UTF8String], "wb");
if ( NULL == out_file ) {
perror("open output file");
fclose(in_file);
return -1;
}
}
NSString *resolution = [[NSUserDefaults standardUserDefaults] valueForKey:@"resolution"];
NSArray *arrayofRes = [resolution componentsSeparatedByString:@"*"];
NSString *fps = [[NSUserDefaults standardUserDefaults] valueForKey:@"fps"];
NSString *bitRate = [[NSUserDefaults standardUserDefaults] valueForKey:@"bitRate"];
NSString *threads = [[NSUserDefaults standardUserDefaults] valueForKey:@"threads"];
NSString *profile = [[NSUserDefaults standardUserDefaults] valueForKey:@"profile"];
NSString *delayed = [[NSUserDefaults standardUserDefaults] valueForKey:@"delayed"];
/* Get default params for preset/tuning */
if( QY265ConfigDefaultPreset( ¶m, [profile UTF8String], NULL, [delayed UTF8String]) < 0 )
goto fail;
param.picWidth = [arrayofRes[0] intValue];
param.picHeight = [arrayofRes[1] intValue];
param.threads = [threads intValue];
param.frameRate = [fps floatValue];
if([bitRate intValue])
param.bitrateInkbps = [bitRate intValue];
param.calcPsnr = 1;
yuv.pData[0] = (unsigned char *)malloc(param.picWidth * param.picHeight * 3/2);
yuv.pData[1] = yuv.pData[0] + param.picWidth * param.picHeight;
yuv.pData[2] = yuv.pData[0] + param.picWidth * param.picHeight * 5/4;
yuv.iWidth = param.picWidth;
yuv.iHeight = param.picHeight;
yuv.iStride[0] = yuv.iWidth;
yuv.iStride[1] = yuv.iStride[2] = yuv.iWidth/2;
h = QY265EncoderOpen( ¶m, &errorCode );
if( !h )
goto fail;
pic.yuv = &yuv;
memset(&pic_out,0,sizeof(pic_out));
int luma_size = param.picWidth * param.picHeight;
int chroma_size = luma_size / 4;
gettimeofday(&tv_start, NULL);
clock_start = clock();
/* Encode frames */
for( ;; i_frame++ )
{
/* Read input frame */
if( fread( pic.yuv->pData[0], 1, luma_size, in_file ) != luma_size )
break;
if( fread( pic.yuv->pData[1], 1, chroma_size, in_file ) != chroma_size )
break;
if( fread( pic.yuv->pData[2], 1, chroma_size, in_file ) != chroma_size )
break;
pic.pts = i_frame;
i_frame_size = QY265EncoderEncodeFrame( h, &nal, &i_nal, &pic, &pic_out, 0 );
if( i_frame_size < 0 )
goto fail;
for(int i = 0; i < i_nal; i++){
if( !fwrite( nal[i].pPayload, nal[i].iSize, 1, out_file ) )
goto fail;
}
}
/* Flush delayed frames */
while( QY265EncoderDelayedFrames( h ) )
{
i_frame_size = QY265EncoderEncodeFrame( h, &nal, &i_nal, NULL, &pic_out, 0 );
if( i_frame_size < 0 )
goto fail;
for(int i = 0; i < i_nal; i++){
if( !fwrite( nal[i].pPayload, nal[i].iSize, 1, out_file ) )
goto fail;
}
}
clock_end = clock();
gettimeofday(&tv_end, NULL);
clock_used = clock_end - clock_start;
ms_used = (int64_t)(clock_used * 1000.0 / CLOCKS_PER_SEC);
real_time = (tv_end.tv_sec + (tv_end.tv_usec / 1000000.0)) - (tv_start.tv_sec + (tv_start.tv_usec / 1000000.0));
float realFPS = i_frame / real_time;
printf("%d frame encoded\n"
"\ttime\tfps\n"
"CPU\t%lldms\t%.2f\n"
"Real\t%.3fs\t%.2f.\n",
i_frame,
ms_used, i_frame * 1000.0 / ms_used,
real_time, realFPS);
self.width = param.picWidth;
self.height = param.picHeight;
self.frameNum = i_frame;
self.realFPS = realFPS;
self.real_time = real_time;
QY265EncoderClose( h );
free(yuv.pData[0]);
fclose(in_file);
fclose(out_file);
return 0;
fail:
fclose(in_file);
fclose(out_file);
return -1;
}
@end
================================================
FILE: iOS_demo/KSY265CodecDemo_iOS/KSYMoviePlayer.h
================================================
//
// MoviePlayer.h
// HEVDecoder
//
// Created by Shengbin Meng on 13-2-25.
// Copyright (c) 2013 Peking University. All rights reserved.
//
#import
#import "GLRenderer.h"
#import "GLView.h"
#import "MoviePlayer.h"
@interface KSYMoviePlayer : NSObject
@property (nonatomic, retain) GLRenderer *renderer;
@property NSInteger width;
@property NSInteger height;
@property NSInteger frameNum;
@property float realFPS;
@property float real_time;
@property bool decodeEnd;
@property NSString *out_file_string;
- (int) openMovie:(NSString*) path;
- (int) play;
- (int) stop;
- (int)test:(int) thread_num;
@end
================================================
FILE: iOS_demo/KSY265CodecDemo_iOS/KSYMoviePlayer.m
================================================
//
// MoviePlayer.m
// HEVDecoder
//
// Created by Shengbin Meng on 13-2-25.
// Copyright (c) 2013 Peking University. All rights reserved.
//
#import "KSYMoviePlayer.h"
#import "GLRenderer.h"
#include "lenthevcdec.h"
#include
#include
#include "qy265dec.h"
#define AU_COUNT_MAX (1024 * 256)
#define AU_BUF_SIZE_MAX (1024 * 1024 * 128)
static inline int next_p2(int a) {
int rval=1;
while(rvalpts - timePassed;
if (delay > 0) {
usleep(delay);
}
gettimeofday(&timeNow, NULL);
double tnow = timeNow.tv_sec + (timeNow.tv_usec / 1000000.0);
if (tlast == 0) tlast = tnow;
if (tstart == 0) tstart = tnow;
if (tnow > tlast + 1) {
double avg_fps;
printf("Video Display FPS:%i\n", (int)frames);
frames_sum += frames;
avg_fps = frames_sum / (tnow - tstart);
printf("Video AVG FPS:%.2lf\n", avg_fps);
//self.infoString = [NSString stringWithFormat:@"size:%dx%d, fps:%d", vf->width, vf->height, frames];
tlast = tlast + 1;
frames = 0;
}
frames++;
while(isBusy && !stopRender) usleep(50);
isBusy = YES;
[renderer render:vf];
}
static int lent_hevc_get_sps(uint8_t* buf, int size, uint8_t** sps_ptr)
{
int i, nal_type, sps_pos;
sps_pos = -1;
for ( i = 0; i < (size - 4); i++ ) {
if ( 0 == buf[i] && 0 == buf[i+1] && 1 == buf[i+2] ) {
nal_type = (buf[i+3] & 0x7E) >> 1;
if ( 33 != nal_type && sps_pos >= 0 ) {
break;
}
if ( 33 == nal_type ) { // sps
sps_pos = i;
}
i += 2;
}
}
if ( sps_pos < 0 )
return 0;
if ( i == (size - 4) )
i = size;
*sps_ptr = buf + sps_pos;
return i - sps_pos;
}
static int lent_hevc_get_frame(uint8_t* buf, int size, int *is_idr)
{
static int seq_hdr = 0;
int i, nal_type, idr = 0;
for ( i = 0; i < (size - 6); i++ ) {
if ( 0 == buf[i] && 0 == buf[i+1] && 1 == buf[i+2] ) {
nal_type = (buf[i+3] & 0x7E) >> 1;
if ( nal_type <= 21 ) {
if ( buf[i+5] & 0x80 ) { /* first slice in pic */
if ( !seq_hdr )
break;
else
seq_hdr = 0;
}
}
if ( nal_type >= 32 && nal_type <= 34 ) {
if ( !seq_hdr ) {
seq_hdr = 1;
idr = 1;
break;
}
seq_hdr = 1;
}
i += 2;
}
}
if ( i == (size - 6) )
i = size;
if ( NULL != is_idr )
*is_idr = idr;
return i;
}
- (int) ksy_hevc_prepare:(int) thread_num
{
// open hevc decoder
int hr = QY_OK;
QY265DecConfig ksycfg ={0};
ksycfg.threads = thread_num;
_ksydec = QY265DecoderCreate(&ksycfg, &hr );
if ( NULL == _ksydec ) {
fprintf(stderr, "call QY265DecoderCreate failed!\n");
return -1;
}
fflush(stdout);
FILE *in_file = fopen([moviePath UTF8String], "rb");
if ( NULL == in_file ) {
fprintf(stderr, " failed! can not open input file '%s'!\n",
[moviePath UTF8String]);
return -1;
}
fseek(in_file, 0, SEEK_END);
au_buf_size = ftell(in_file);
fseek(in_file, 0, SEEK_SET);
printf("(%d bytes) ... ", au_buf_size);
if ( au_buf_size > AU_BUF_SIZE_MAX )
au_buf_size = AU_BUF_SIZE_MAX;
au_buf = (uint8_t*)malloc(au_buf_size);
if ( NULL == au_buf ) {
perror("allocate AU buffer");
fclose(in_file);
return -1;
}
if ( fread(au_buf, 1, au_buf_size, in_file) != au_buf_size ) {
perror("read intput file failed");
fclose(in_file);
return -1;
}
fclose(in_file);
printf("done. %d bytes read.\n", au_buf_size);
// find all AUs
au_count = 0;
for (int i = 0; i < au_buf_size && au_count < (AU_COUNT_MAX - 1); i+=3 ) {
i += lent_hevc_get_frame(au_buf + i, au_buf_size - i, NULL);
au_pos[au_count++] = i;
}
au_pos[au_count] = au_buf_size; // include last AU
printf("found %d AUs\n", au_count);
int ret;
uint8_t *sps;
int sps_len = lent_hevc_get_sps(au_buf, au_buf_size, &sps);
if ( sps_len > 0 ) {
lenthevcdec_ctx one_thread_ctx = lenthevcdec_create(1, INT32_MAX, NULL);
lenthevcdec_frame out_frame;
memset(&out_frame, 0, sizeof(lenthevcdec_frame));
out_frame.size = sizeof(lenthevcdec_frame);
ret = lenthevcdec_decode_frame(one_thread_ctx, sps, sps_len, 0, &out_frame);
if ( 0 != out_frame.width && 0 != out_frame.height ) {
//printf("Video dimensions is %dx%d\n", out_frame.width, out_frame.height);
// initialization that depends on width and heigt
//frame.width = out_frame.width;
//frame.height = out_frame.height;
}
lenthevcdec_destroy(one_thread_ctx);
frame.linesize_y = next_p2(out_frame.width);
frame.linesize_uv = next_p2(out_frame.width/2);
frame.yuv_data[0] = malloc( frame.linesize_y * out_frame.height);
frame.yuv_data[1] = malloc( frame.linesize_uv * out_frame.height/2);
frame.yuv_data[2] = malloc( frame.linesize_uv * out_frame.height/2);
}
return 0;
}
- (int) openMovie:(NSString*) path
{
moviePath = path;
if(!fopen([moviePath UTF8String], "rb")) {
printf("can not open input file '%s'!\n", [moviePath UTF8String]);
return -1;
}
return 0;
}
- (int) play
{
// prepare decoder
float renderFPS = 0;
NSString *num = [[NSUserDefaults standardUserDefaults] valueForKey:@"threadNum"];
int thread_num = [num integerValue];
NSString *fps = [[NSUserDefaults standardUserDefaults] valueForKey:@"renderFPS"];
renderFPS = [fps floatValue];
if ([fps isEqualToString:@"-1 (off)"]) {
_skipRender = YES;
}
if (renderFPS == 0) {
renderInterval = 1;
}
else {
renderInterval = 1.0 / renderFPS * 1000000; // us
}
printf("will play with decoding thread number: %d, and FPS: %.2f", thread_num, renderFPS);
/* open output file */
out_file = NULL;
_out_file_string = NULL;
NSString *flag = [[NSUserDefaults standardUserDefaults] valueForKey:@"outputFlag"];
if ([flag isEqualToString:@"YES"]) {
_out_file_string = [NSString stringWithFormat:@"%@.ksc.yuv", moviePath];
if ( NULL != _out_file_string ) {
out_file = fopen([_out_file_string UTF8String], "wb");
if ( NULL == out_file ) {
perror("open output file");
return -1;
}
}
}
int ret = [self ksy_hevc_prepare:thread_num];
if (ret < 0) {
if (au_buf != NULL) {
free(au_buf);
}
if (_ksydec != NULL) {
QY265DecoderDestroy(_ksydec);
_ksydec = NULL;
}
return ret;
}
decodeThread = [[NSThread alloc] initWithTarget:self selector:@selector(ksydecodeVideo) object:nil];
[decodeThread start];
return 0;
}
- (int) stop {
exit_decode_thread = 1;
stopRender = YES;
return 0;
}
- (void) ksydecodeVideo {
exit_decode_thread = 0;
[self setupRenderer];
// decode video
int64_t pts, ms_used;
clock_t clock_start, clock_end, clock_used;
struct timeval tv_start, tv_end;
double real_time;
int ret;
int frame_count = 0;
gettimeofday(&tv_start, NULL);
clock_start = clock();
for (int i = 0; i < au_count; i++ ) {
if (exit_decode_thread) {
break;
}
pts = i * 40;
unsigned char* pD = au_buf + au_pos[i];
int len = au_pos[i + 1] - au_pos[i];
if (len > 0) {
QY265DecodeFrame(_ksydec, pD, len, &ret, 0);
if ( ret < 0 ) {
fprintf(stderr, "decode_frame failed[%d]\n", ret);
return ;
}
}
QY265DecoderGetDecodedFrame(_ksydec, &decframe, &ret, 0);
if (ret == 0 && decframe.bValid) {
frame.width = decframe.frameinfo.nWidth;
frame.height = decframe.frameinfo.nHeight;
frame.pts = frame_count * renderInterval;
if (out_file){
ret = write_pic_yv12(decframe.frameinfo.nWidth, decframe.frameinfo.nHeight, (UInt8 **)decframe.pData, decframe.iStride, out_file);
if ( ret < 0 ) {
perror("write output file");
return;
}
}
if (frame_count == 0) {
gettimeofday(&timeStart, NULL);
}
frame_count++;
[self renderFrame:&frame];
QY265DecoderReturnDecodedFrame(_ksydec, &decframe);
}
}
printf("========== %d ========\n", frame_count);
// flush decoder
while (1){
if (exit_decode_thread) {
break;
}
QY265DecoderGetDecodedFrame(_ksydec, &decframe, &ret, 0);
if (ret == 0 && frame_count < au_count - 1) {
if (decframe.bValid){
frame.pts = frame_count * renderInterval;
if (out_file){
ret = write_pic_yv12(decframe.frameinfo.nWidth, decframe.frameinfo.nHeight, (UInt8 **)decframe.pData, decframe.iStride, out_file);
if ( ret < 0 ) {
perror("write output file");
return;
}
}
frame_count++;
[self renderFrame:&frame];
QY265DecoderReturnDecodedFrame(_ksydec, &decframe);
}
}
else {
if (out_file){
ret = write_pic_yv12(decframe.frameinfo.nWidth, decframe.frameinfo.nHeight, (UInt8 **)decframe.pData, decframe.iStride, out_file);
if ( ret < 0 ) {
perror("write output file");
return;
}
}
break;
}
}
clock_end = clock();
gettimeofday(&tv_end, NULL);
clock_used = clock_end - clock_start;
ms_used = (int64_t)(clock_used * 1000.0 / CLOCKS_PER_SEC);
real_time = (tv_end.tv_sec + (tv_end.tv_usec / 1000000.0)) - (tv_start.tv_sec + (tv_start.tv_usec / 1000000.0));
float realFPS = frame_count / real_time;
printf("%d frame decoded\n"
"\ttime\tfps\n"
"CPU\t%lldms\t%.2f\n"
"Real\t%.3fs\t%.2f.\n",
frame_count,
ms_used, frame_count * 1000.0 / ms_used,
real_time, realFPS);
self.width = frame.width;
self.height = frame.height;
self.frameNum = frame_count;
self.realFPS = realFPS;
self.real_time = real_time;
self.decodeEnd = 1;
free(au_buf);
au_buf = NULL;
free(frame.yuv_data[0]);
free(frame.yuv_data[1]);
free(frame.yuv_data[2]);
if (_ksydec) {
QY265DecoderDestroy(_ksydec);
_ksydec = NULL;
}
if (out_file)
fclose(out_file);
exit_decode_thread = 0;
}
static int write_pic_yv12(int w, int h, uint8_t* buf[3], short stride[3], FILE *fp)
{
uint8_t *line;
int line_len, line_count, i, j, pitch;
for ( i = 0; i < 3; i++ ) {
line = buf[i];
pitch = stride[i];
line_len = (0 == i) ? w : (w / 2);
line_count = (0 == i) ? h : (h / 2);
for ( j = 0; j < line_count; j++ ) {
if ( fwrite(line, 1, line_len, fp) != line_len )
return -1;
line += pitch;
}
}
return 0;
}
- (int)test:(int) thread_num{
return 0;
}
@end
================================================
FILE: iOS_demo/KSY265CodecDemo_iOS/MovieEncoder.h
================================================
//
// MoviePlayer.h
// HEVDecoder
//
// Created by Shengbin Meng on 13-2-25.
// Copyright (c) 2013 Peking University. All rights reserved.
//
#import
@interface MovieEncoder : NSObject
@property NSInteger width;
@property NSInteger height;
@property NSInteger frameNum;
@property float realFPS;
@property float real_time;
@property double avg_psnr;
@property NSString *out_file_string;
- (int) openMovie:(NSString*) path;
- (int) encoder;
@end
================================================
FILE: iOS_demo/KSY265CodecDemo_iOS/MovieEncoder.m
================================================
//
// MoviePlayer.m
// HEVDecoder
//
// Created by Shengbin Meng on 13-2-25.
// Copyright (c) 2013 Peking University. All rights reserved.
//
#import "MovieEncoder.h"
#include
#include
#include
#include
@implementation MovieEncoder
{
NSString *moviePath;
FILE *in_file;
}
- (id) init
{
self = [super init];
return self;
}
- (int) openMovie:(NSString*) path
{
moviePath = path;
in_file = fopen([moviePath UTF8String], "rb");
if(NULL == in_file) {
printf("can not open input file '%s'!\n", [moviePath UTF8String]);
return -1;
}
return 0;
}
- (int) encoder
{
x264_param_t param;
x264_picture_t pic;
x264_picture_t pic_out;
x264_t *h;
int i_frame = 0;
int i_frame_size;
x264_nal_t *nal;
int i_nal;
clock_t clock_start, clock_end, clock_used;
struct timeval tv_start, tv_end;
double real_time;
int64_t ms_used;
FILE *out_file;
double sum_psnr_y = 0.0;
double sum_psnr_u = 0.0;
double sum_psnr_v = 0.0;
_out_file_string = [NSString stringWithFormat:@"%@.264", moviePath];
if ( NULL != _out_file_string ) {
out_file = fopen([_out_file_string UTF8String], "wb");
if ( NULL == out_file ) {
perror("open output file");
fclose(in_file);
return -1;
}
}
NSString *resolution = [[NSUserDefaults standardUserDefaults] valueForKey:@"resolution"];
NSArray *arrayofRes = [resolution componentsSeparatedByString:@"*"];
NSString *fps = [[NSUserDefaults standardUserDefaults] valueForKey:@"fps"];
NSString *bitRate = [[NSUserDefaults standardUserDefaults] valueForKey:@"bitRate"];
NSString *threads = [[NSUserDefaults standardUserDefaults] valueForKey:@"threads"];
NSString *profile = [[NSUserDefaults standardUserDefaults] valueForKey:@"profile"];
NSString *delayed = [[NSUserDefaults standardUserDefaults] valueForKey:@"delayed"];
/* Get default params for preset/tuning */
if ([delayed isEqualToString:@"zerolatency"]) {
if( x264_param_default_preset( ¶m, [profile UTF8String], "zerolatency" ) < 0 )
goto fail;
}
else {
if( x264_param_default_preset( ¶m, [profile UTF8String], NULL ) < 0 )
goto fail;
}
/* Configure non-default params */
param.i_csp = X264_CSP_I420;
param.i_width = [arrayofRes[0] intValue];
param.i_height = [arrayofRes[1] intValue];
param.b_vfr_input = 0;
param.b_repeat_headers = 1;
param.b_annexb = 1;
if([bitRate intValue]){
param.rc.i_bitrate = [bitRate intValue];
param.rc.i_rc_method = X264_RC_ABR;
}
if ([delayed isEqualToString:@"zerolatency"]) {
param.i_bframe = 0;
}
else if([delayed isEqualToString:@"livestreaming"]){
param.i_bframe = 3;
}
else{
param.i_bframe = 7;
}
param.i_threads = [threads intValue];
param.i_fps_num = [fps floatValue];
param.i_fps_den = 1;
param.analyse.b_psnr = 1;
/* Apply profile restrictions. */
if( x264_param_apply_profile( ¶m, "high" ) < 0 )
goto fail;
if( x264_picture_alloc( &pic, param.i_csp, param.i_width, param.i_height ) < 0 )
goto fail;
h = x264_encoder_open( ¶m );
if( !h )
goto fail;
int luma_size = param.i_width * param.i_height;
int chroma_size = luma_size / 4;
gettimeofday(&tv_start, NULL);
clock_start = clock();
/* Encode frames */
for( ;; i_frame++ )
{
/* Read input frame */
if( fread( pic.img.plane[0], 1, luma_size, in_file ) != luma_size )
break;
if( fread( pic.img.plane[1], 1, chroma_size, in_file ) != chroma_size )
break;
if( fread( pic.img.plane[2], 1, chroma_size, in_file ) != chroma_size )
break;
pic.i_pts = i_frame;
i_frame_size = x264_encoder_encode( h, &nal, &i_nal, &pic, &pic_out );
if( i_frame_size < 0 )
goto fail;
else if( i_frame_size )
{
if (param.analyse.b_psnr){
sum_psnr_y += pic_out.prop.f_psnr[0];
sum_psnr_u += pic_out.prop.f_psnr[1];
sum_psnr_v += pic_out.prop.f_psnr[2];
}
if( !fwrite( nal->p_payload, i_frame_size, 1, out_file ) )
goto fail;
}
}
/* Flush delayed frames */
while( x264_encoder_delayed_frames( h ) )
{
i_frame_size = x264_encoder_encode( h, &nal, &i_nal, NULL, &pic_out );
if( i_frame_size < 0 )
goto fail;
else if( i_frame_size )
{
if (param.analyse.b_psnr){
sum_psnr_y += pic_out.prop.f_psnr[0];
sum_psnr_u += pic_out.prop.f_psnr[1];
sum_psnr_v += pic_out.prop.f_psnr[2];
}
if( !fwrite( nal->p_payload, i_frame_size, 1, out_file ) )
goto fail;
}
}
clock_end = clock();
gettimeofday(&tv_end, NULL);
clock_used = clock_end - clock_start;
ms_used = (int64_t)(clock_used * 1000.0 / CLOCKS_PER_SEC);
real_time = (tv_end.tv_sec + (tv_end.tv_usec / 1000000.0)) - (tv_start.tv_sec + (tv_start.tv_usec / 1000000.0));
float realFPS = i_frame / real_time;
double avg_psnr = (6*sum_psnr_y+sum_psnr_u+sum_psnr_v)/(8*i_frame);
printf("%d frame encoded\n"
"\ttime\tfps\n"
"CPU\t%lldms\t%.2f\n"
"Real\t%.3fs\t%.2f.\n"
"PSNR\t%.2f\n",
i_frame,
ms_used, i_frame * 1000.0 / ms_used,
real_time, realFPS, avg_psnr);
self.width = param.i_width;
self.height = param.i_height;
self.frameNum = i_frame;
self.realFPS = realFPS;
self.real_time = real_time;
self.avg_psnr = avg_psnr;
x264_encoder_close( h );
x264_picture_clean( &pic );
fclose(in_file);
fclose(out_file);
return 0;
fail:
fclose(in_file);
fclose(out_file);
return -1;
}
@end
================================================
FILE: iOS_demo/KSY265CodecDemo_iOS/MoviePlayer.h
================================================
//
// MoviePlayer.h
// HEVDecoder
//
// Created by Shengbin Meng on 13-2-25.
// Copyright (c) 2013 Peking University. All rights reserved.
//
#import
#import "GLRenderer.h"
#import "GLView.h"
struct VideoFrame
{
int width;
int height;
int linesize_y;
int linesize_uv;
double pts;
uint8_t *yuv_data[3];
};
uint32_t getms();
@interface MoviePlayer : NSObject
@property (nonatomic, retain) GLRenderer *renderer;
@property NSInteger width;
@property NSInteger height;
@property NSInteger frameNum;
@property float realFPS;
@property float real_time;
@property bool decodeEnd;
@property NSString *out_file_string;
- (int) openMovie:(NSString*) path;
- (int) play;
- (int) stop;
- (int)test:(int) thread_num;
@end
================================================
FILE: iOS_demo/KSY265CodecDemo_iOS/MoviePlayer.m
================================================
//
// MoviePlayer.m
// HEVDecoder
//
// Created by Shengbin Meng on 13-2-25.
// Copyright (c) 2013 Peking University. All rights reserved.
//
#import "MoviePlayer.h"
#import "GLRenderer.h"
#include "lenthevcdec.h"
#include
#include
#define AU_COUNT_MAX (1024 * 256)
#define AU_BUF_SIZE_MAX (1024 * 1024 * 128)
static unsigned int count_cores()
{
size_t len;
unsigned int ncpu = 0;
len = sizeof(ncpu);
sysctlbyname ("hw.ncpu", &ncpu, &len, NULL, 0);
return ncpu;
}
uint32_t getms()
{
struct timeval t;
gettimeofday(&t, NULL);
return (t.tv_sec * 1000) + (t.tv_usec / 1000);
}
@implementation MoviePlayer
{
NSString *moviePath;
NSThread *decodeThread;
BOOL isBusy, stopRender;
BOOL _bSkipRender;
int exit_decode_thread;
uint32_t au_pos[AU_COUNT_MAX];
uint32_t au_count, au_buf_size;
uint8_t *au_buf;
lenthevcdec_ctx ctx;
struct VideoFrame frame;
int frames;
int frames_sum;
double tstart, tlast;
uint64_t renderInterval;
struct timeval timeStart;
FILE *out_file;
}
@synthesize renderer;
- (id) init
{
self = [super init];
exit_decode_thread = 0;
ctx = NULL;
frames_sum = 0;
tstart = 0;
frames = 0;
tlast = 0;
renderInterval = 0;
isBusy = NO;
stopRender = NO;
_bSkipRender = NO;
self.decodeEnd = 0;
return self;
}
- (void) setupRenderer
{
[self.renderer setRenderStateListener:self];
}
- (void) bufferDone {
isBusy = NO;
}
- (void) renderFrame:(struct VideoFrame *) vf
{
vf = &frame;
if (_bSkipRender) {
return;
}
struct timeval timeNow;
gettimeofday(&timeNow, NULL);
int64_t timePassed = ((int64_t)(timeNow.tv_sec - timeStart.tv_sec))*1000000 + (timeNow.tv_usec - timeStart.tv_usec);
int64_t delay = vf->pts - timePassed;
if (delay > 0) {
usleep(delay);
}
gettimeofday(&timeNow, NULL);
double tnow = timeNow.tv_sec + (timeNow.tv_usec / 1000000.0);
if (tlast == 0) tlast = tnow;
if (tstart == 0) tstart = tnow;
if (tnow > tlast + 1) {
double avg_fps;
printf("Video Display FPS:%i\n", (int)frames);
frames_sum += frames;
avg_fps = frames_sum / (tnow - tstart);
printf("Video AVG FPS:%.2lf\n", avg_fps);
//self.infoString = [NSString stringWithFormat:@"size:%dx%d, fps:%d", vf->width, vf->height, frames];
tlast = tlast + 1;
frames = 0;
}
frames++;
while(isBusy && !stopRender) usleep(50);
isBusy = YES;
[renderer render:vf];
}
static int lent_hevc_get_sps(uint8_t* buf, int size, uint8_t** sps_ptr)
{
int i, nal_type, sps_pos;
sps_pos = -1;
for ( i = 0; i < (size - 4); i++ ) {
if ( 0 == buf[i] && 0 == buf[i+1] && 1 == buf[i+2] ) {
nal_type = (buf[i+3] & 0x7E) >> 1;
if ( 33 != nal_type && sps_pos >= 0 ) {
break;
}
if ( 33 == nal_type ) { // sps
sps_pos = i;
}
i += 2;
}
}
if ( sps_pos < 0 )
return 0;
if ( i == (size - 4) )
i = size;
*sps_ptr = buf + sps_pos;
return i - sps_pos;
}
static int lent_hevc_get_frame(uint8_t* buf, int size, int *is_idr)
{
static int seq_hdr = 0;
int i, nal_type, idr = 0;
for ( i = 0; i < (size - 6); i++ ) {
if ( 0 == buf[i] && 0 == buf[i+1] && 1 == buf[i+2] ) {
nal_type = (buf[i+3] & 0x7E) >> 1;
if ( nal_type <= 21 ) {
if ( buf[i+5] & 0x80 ) { /* first slice in pic */
if ( !seq_hdr )
break;
else
seq_hdr = 0;
}
}
if ( nal_type >= 32 && nal_type <= 34 ) {
if ( !seq_hdr ) {
seq_hdr = 1;
idr = 1;
break;
}
seq_hdr = 1;
}
i += 2;
}
}
if ( i == (size - 6) )
i = size;
if ( NULL != is_idr )
*is_idr = idr;
return i;
}
- (int) lent_hevc_prepare:(int) thread_num
{
// open hevc decoder
int compatibility = INT32_MAX;
if ([[moviePath pathExtension] isEqualToString:@"hm91"]) {
compatibility = 91;
} else if ([[moviePath pathExtension] isEqualToString:@"hm10"]) {
compatibility = 100;
}
if (thread_num == 0) {
thread_num = count_cores();
}
ctx = lenthevcdec_create(thread_num, compatibility, NULL);
if ( NULL == ctx ) {
fprintf(stderr, "call lenthevcdec_create failed!\n");
return -1;
}
printf("raw bitstream, compatibility: %s\n",
(91 == compatibility) ? "HM9.1" : ((100 == compatibility) ? "HM10.0" : "Unknown(Last)"));
// read intput file
printf("read input file ");
fflush(stdout);
FILE *in_file = fopen([moviePath UTF8String], "rb");
if ( NULL == in_file ) {
fprintf(stderr, " failed! can not open input file '%s'!\n",
[moviePath UTF8String]);
return -1;
}
fseek(in_file, 0, SEEK_END);
au_buf_size = ftell(in_file);
fseek(in_file, 0, SEEK_SET);
printf("(%d bytes) ... ", au_buf_size);
if ( au_buf_size > AU_BUF_SIZE_MAX )
au_buf_size = AU_BUF_SIZE_MAX;
au_buf = (uint8_t*)malloc(au_buf_size);
if ( NULL == au_buf ) {
perror("allocate AU buffer");
fclose(in_file);
return -1;
}
if ( fread(au_buf, 1, au_buf_size, in_file) != au_buf_size ) {
perror("read intput file failed");
fclose(in_file);
return -1;
}
fclose(in_file);
printf("done. %d bytes read.\n", au_buf_size);
// find all AUs
au_count = 0;
for (int i = 0; i < au_buf_size && au_count < (AU_COUNT_MAX - 1); i+=3 ) {
i += lent_hevc_get_frame(au_buf + i, au_buf_size - i, NULL);
au_pos[au_count++] = i;
}
au_pos[au_count] = au_buf_size; // include last AU
printf("found %d AUs\n", au_count);
int ret;
uint8_t *sps;
int sps_len = lent_hevc_get_sps(au_buf, au_buf_size, &sps);
if ( sps_len > 0 ) {
lenthevcdec_ctx one_thread_ctx = lenthevcdec_create(1, compatibility, NULL);
lenthevcdec_frame out_frame;
memset(&out_frame, 0, sizeof(lenthevcdec_frame));
out_frame.size = sizeof(lenthevcdec_frame);
ret = lenthevcdec_decode_frame(one_thread_ctx, sps, sps_len, 0, &out_frame);
if ( 0 != out_frame.width && 0 != out_frame.height ) {
//printf("Video dimensions is %dx%d\n", out_frame.width, out_frame.height);
// initialization that depends on width and heigt
}
lenthevcdec_destroy(one_thread_ctx);
}
return 0;
}
- (int) openMovie:(NSString*) path
{
moviePath = path;
if(!fopen([moviePath UTF8String], "rb")) {
printf("can not open input file '%s'!\n", [moviePath UTF8String]);
return -1;
}
return 0;
}
- (int) play
{
// prepare decoder
float renderFPS = 0;
NSString *num = [[NSUserDefaults standardUserDefaults] valueForKey:@"threadNum"];
int thread_num = [num integerValue];
NSString *fps = [[NSUserDefaults standardUserDefaults] valueForKey:@"renderFPS"];
renderFPS = [fps floatValue];
if ([fps isEqualToString:@"-1 (off)"]) {
_bSkipRender = YES;
}
if (renderFPS == 0) renderInterval = 1;
else {
renderInterval = 1.0 / renderFPS * 1000000; // us
}
printf("will play with decoding thread number: %d, FPS: %.2f", thread_num, renderFPS);
/* open output file */
out_file = NULL;
_out_file_string = NULL;
NSString *flag = [[NSUserDefaults standardUserDefaults] valueForKey:@"outputFlag"];
if ([flag isEqualToString:@"YES"]) {
_out_file_string = [NSString stringWithFormat:@"%@.lent.yuv", moviePath];
if ( NULL != _out_file_string ) {
out_file = fopen([_out_file_string UTF8String], "wb");
if ( NULL == out_file ) {
perror("open output file");
return -1;
}
}
}
int ret = [self lent_hevc_prepare:thread_num];
if (ret < 0) {
if (au_buf != NULL) free(au_buf);
if (ctx != NULL) lenthevcdec_destroy(ctx);
return ret;
}
decodeThread = [[NSThread alloc] initWithTarget:self selector:@selector(decodeVideo) object:nil];
[decodeThread start];
return 0;
}
- (int) stop
{
exit_decode_thread = 1;
stopRender = YES;
return 0;
}
- (void) decodeVideo
{
exit_decode_thread = 0;
[self setupRenderer];
// decode video
int64_t pts, ms_used;
clock_t clock_start, clock_end, clock_used;
struct timeval tv_start, tv_end;
double real_time;
int ret;
int frame_count = 0;
lenthevcdec_frame out_frame;
gettimeofday(&tv_start, NULL);
clock_start = clock();
for (int i = 0; i < au_count; i++ ) {
if (exit_decode_thread) {
break;
}
pts = i * 40;
out_frame.got_frame = 0;
ret = lenthevcdec_decode_frame(ctx, au_buf + au_pos[i], au_pos[i + 1] - au_pos[i], pts, &out_frame);
if ( ret < 0 ) {
fprintf(stderr, "lenthevcdec_decode_frame failed! ret=%d\n", ret);
return ;
}
if ( out_frame.got_frame > 0 ) {
// draw frame to screen
frame.yuv_data[0] = out_frame.pixels[0];
frame.yuv_data[1] = out_frame.pixels[1];
frame.yuv_data[2] = out_frame.pixels[2];
frame.linesize_y = out_frame.line_stride[0];
frame.linesize_uv = out_frame.line_stride[1];
frame.pts = frame_count * renderInterval;
frame.width = out_frame.width;
frame.height = out_frame.height;
// printf("decode frame %d, %dx%d, pts is %" PRId64 "\n",
// frame_count, width, height, got_pts);
//
if (out_file){
ret = write_pic_yv12(out_frame.width, out_frame.height, (UInt8 **)out_frame.pixels, out_frame.line_stride, out_file);
if ( ret < 0 ) {
perror("write output file");
return;
}
}
if (frame_count == 0) {
gettimeofday(&timeStart, NULL);
}
frame_count++;
[self renderFrame:&frame];
}
}
// flush decoder
while (1) {
if (exit_decode_thread) {
break;
}
out_frame.got_frame = 0;
ret = lenthevcdec_decode_frame(ctx, NULL, 0, pts, &out_frame);
if ( ret == 0 && out_frame.got_frame > 0 ) {
// draw frame to screen
frame.yuv_data[0] = out_frame.pixels[0];
frame.yuv_data[1] = out_frame.pixels[1];
frame.yuv_data[2] = out_frame.pixels[2];
frame.linesize_y = out_frame.line_stride[0];
frame.linesize_uv = out_frame.line_stride[1];
frame.pts = frame_count * renderInterval;
printf("decode frame %d, %dx%d, pts is %" PRId64 "\n",
frame_count, out_frame.width, out_frame.height, out_frame.got_pts);
if (out_file){
ret = write_pic_yv12(out_frame.width, out_frame.height, (UInt8 **)out_frame.pixels, out_frame.line_stride, out_file);
if ( ret < 0 ) {
perror("write output file");
return;
}
}
if (frame_count == 0) {
gettimeofday(&timeStart, NULL);
}
frame_count++;
[self renderFrame:&frame];
}
else{
break;
}
}
clock_end = clock();
gettimeofday(&tv_end, NULL);
clock_used = clock_end - clock_start;
ms_used = (int64_t)(clock_used * 1000.0 / CLOCKS_PER_SEC);
real_time = (tv_end.tv_sec + (tv_end.tv_usec / 1000000.0)) - (tv_start.tv_sec + (tv_start.tv_usec / 1000000.0));
float realFPS = frame_count / real_time;
printf("%d frame decoded\n"
"\ttime\tfps\n"
"CPU\t%lldms\t%.2f\n"
"Real\t%.3fs\t%.2f.\n",
frame_count,
ms_used, frame_count * 1000.0 / ms_used,
real_time, realFPS);
self.width = frame.width;
self.height = frame.height;
self.frameNum = frame_count;
self.realFPS = realFPS;
self.real_time = real_time;
self.decodeEnd = 1;
free(au_buf);
au_buf = NULL;
lenthevcdec_destroy(ctx);
if (out_file)
fclose(out_file);
exit_decode_thread = 0;
}
static int write_pic_yv12(int w, int h, uint8_t* buf[3], int stride[3], FILE *fp)
{
uint8_t *line;
int line_len, line_count, i, j, pitch;
for ( i = 0; i < 3; i++ ) {
line = buf[i];
pitch = stride[i];
line_len = (0 == i) ? w : (w / 2);
line_count = (0 == i) ? h : (h / 2);
for ( j = 0; j < line_count; j++ ) {
if ( fwrite(line, 1, line_len, fp) != line_len )
return -1;
line += pitch;
}
}
return 0;
}
- (int)test:(int) thread_num
{
int64_t pts;
int ret;
int frame_count = 0;
lenthevcdec_frame out_frame;
printf("%s\n threads:%d\n", [moviePath UTF8String],thread_num);
int compatibility = INT32_MAX;
if ([[moviePath pathExtension] isEqualToString:@"hm91"]) {
compatibility = 91;
} else if ([[moviePath pathExtension] isEqualToString:@"hm10"]) {
compatibility = 100;
}
ctx = lenthevcdec_create(thread_num, compatibility, NULL);
if ( NULL == ctx ) {
fprintf(stderr, "call lenthevcdec_create failed!\n");
return -1;
}
printf("raw bitstream, compatibility: %s\n",
(91 == compatibility) ? "HM9.1" : ((100 == compatibility) ? "HM10.0" : "Unknown(Last)"));
// read intput file
printf("read input file ");
fflush(stdout);
FILE *in_file = fopen([moviePath UTF8String], "rb");
if ( NULL == in_file ) {
fprintf(stderr, " failed! can not open input file '%s'!\n",
[moviePath UTF8String]);
return -1;
}
fseek(in_file, 0, SEEK_END);
au_buf_size = ftell(in_file);
fseek(in_file, 0, SEEK_SET);
printf("(%d bytes) ... ", au_buf_size);
if ( au_buf_size > AU_BUF_SIZE_MAX )
au_buf_size = AU_BUF_SIZE_MAX;
au_buf = (uint8_t*)malloc(au_buf_size);
if ( NULL == au_buf ) {
perror("allocate AU buffer");
fclose(in_file);
return -1;
}
if ( fread(au_buf, 1, au_buf_size, in_file) != au_buf_size ) {
perror("read intput file failed");
fclose(in_file);
return -1;
}
fclose(in_file);
printf("done. %d bytes read.\n", au_buf_size);
// find all AUs
au_count = 0;
for (int i = 0; i < au_buf_size && au_count < (AU_COUNT_MAX - 1); i+=3 ) {
i += lent_hevc_get_frame(au_buf + i, au_buf_size - i, NULL);
if ( i < au_buf_size )
au_pos[au_count++] = i;
}
au_pos[au_count] = au_buf_size; // include last AU
printf("found %d AUs\n", au_count);
/* open output file */
FILE *out_file = NULL;
NSString *out_file_string = [NSString stringWithFormat:@"%@.%d.yuv", moviePath, thread_num];
if ( NULL != out_file_string ) {
out_file = fopen([out_file_string UTF8String], "wb");
if ( NULL == out_file ) {
perror("open output file");
return 6;
}
}
for (int i = 0; i < au_count; i++ ) {
pts = i * 40;
out_frame.got_frame = 0;
ret = lenthevcdec_decode_frame(ctx, au_buf + au_pos[i], au_pos[i + 1] - au_pos[i], pts, &out_frame);
if ( ret < 0 ) {
fprintf(stderr, "lenthevcdec_decode_frame failed! ret=%d\n", ret);
return -1;
}
if ( out_frame.got_frame > 0 ) {
printf("decode frame %d, %dx%d, pts is %" PRId64 "\n",
frame_count, out_frame.width, out_frame.height, out_frame.got_pts);
ret = write_pic_yv12(out_frame.width, out_frame.height, (UInt8 **)out_frame.pixels, out_frame.line_stride, out_file);
if ( ret < 0 ) {
perror("write output file");
return 10;
}
frame_count++;
}
}
// flush decoder
while (1) {
out_frame.got_frame = 0;
ret = lenthevcdec_decode_frame(ctx, NULL, 0, pts, &out_frame);
if ( ret < 0 || out_frame.got_frame <= 0) {
break;
}
if ( out_frame.got_frame > 0 ) {
printf("decode frame %d, %dx%d, pts is %" PRId64 "\n",
frame_count, out_frame.width, out_frame.height, out_frame.got_pts);
ret = write_pic_yv12(out_frame.width, out_frame.height, (UInt8 **)out_frame.pixels, out_frame.line_stride, out_file);
if ( ret < 0 ) {
perror("write output file");
return 10;
}
frame_count++;
}
}
printf("%d frame decoded\n",
frame_count);
fclose(out_file);
free(au_buf);
au_buf = NULL;
lenthevcdec_destroy(ctx);
return 0;
}
@end
================================================
FILE: iOS_demo/KSY265CodecDemo_iOS/MoviesViewController.h
================================================
//
// MoviesViewController.h
// HEVDecoder
//
// Created by Shengbin Meng on 13-2-25.
// Copyright (c) 2013 Peking University. All rights reserved.
//
#import
@interface MoviesViewController : UITableViewController
-(id)initWithSuffix:(NSString *)suffix;
@property (nonatomic, retain) NSMutableArray *movieList;
@property (nonatomic, copy)void (^tableBlock)(NSString *fileName);
@end
================================================
FILE: iOS_demo/KSY265CodecDemo_iOS/MoviesViewController.m
================================================
//
// MoviesViewController.m
// HEVDecoder
//
// Created by Shengbin Meng on 13-2-25.
// Copyright (c) 2013 Peking University. All rights reserved.
//
#import "MoviesViewController.h"
//#import "PlayViewController.h"
//#import "SettingsViewController.h"
//#import "TestDecoderViewController.h"
@interface MoviesViewController (){
NSString * _suffix;
}
@end
@implementation MoviesViewController
- (id)initWithStyle:(UITableViewStyle)style
{
self = [super initWithStyle:style];
if (self) {
// Custom initialization
self.title = @"Movies";
}
_tableBlock = nil;
return self;
}
-(id)initWithSuffix:(NSString *)suffix
{
self = [super init];
_suffix = suffix;
return self;
}
-(id)init{
self = [super init];
_suffix = @"";
return self;
}
- (void)viewDidLoad
{
[super viewDidLoad];
if ([self.tableView respondsToSelector:@selector(registerClass:forCellReuseIdentifier:)]) {
// this is iOS 6.0 above
[self.tableView registerClass:[UITableViewCell class] forCellReuseIdentifier:@"Cell"];
}
self.navigationItem.leftBarButtonItem = [[UIBarButtonItem alloc] initWithBarButtonSystemItem:UIBarButtonSystemItemCancel target:self action:@selector(backAction)];
}
- (void)backAction{
[self dismissViewControllerAnimated:FALSE completion:nil];
}
- (void) viewWillAppear:(BOOL)animated
{
self.movieList = [[NSMutableArray alloc] init];
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSFileManager *manager = [NSFileManager defaultManager];
NSArray *fileList = [manager contentsOfDirectoryAtPath:documentsDirectory error:nil];
for (NSString *filename in fileList){
if([filename hasSuffix:_suffix]){
NSMutableDictionary *movie = [[NSMutableDictionary alloc] init];
[movie setObject:filename forKey:@"Filename"];
[movie setObject:[documentsDirectory stringByAppendingString:[@"/" stringByAppendingString:filename]] forKey:@"Path"];
[self.movieList addObject:movie];
}
}
[self.tableView reloadData];
}
- (void)didReceiveMemoryWarning
{
[super didReceiveMemoryWarning];
// Dispose of any resources that can be recreated.
}
#pragma mark - Table view data source
- (NSInteger)numberOfSectionsInTableView:(UITableView *)tableView
{
// Return the number of sections.
return 1;
}
- (NSInteger)tableView:(UITableView *)tableView numberOfRowsInSection:(NSInteger)section
{
// Return the number of rows in the section.
return self.movieList.count;
}
- (UITableViewCell *)tableView:(UITableView *)tableView cellForRowAtIndexPath:(NSIndexPath *)indexPath
{
static NSString *CellIdentifier = @"Cell";
UITableViewCell *cell;
if ([tableView respondsToSelector:@selector(dequeueReusableCellWithIdentifier:forIndexPath:)]) {
cell = [tableView dequeueReusableCellWithIdentifier:CellIdentifier forIndexPath:indexPath];
} else {
cell = [tableView dequeueReusableCellWithIdentifier:CellIdentifier];
if (cell == nil) {
cell = [[UITableViewCell alloc] initWithStyle:UITableViewCellStyleDefault reuseIdentifier:CellIdentifier];
}
}
// Configure the cell...
NSUInteger row = [indexPath row];
NSDictionary *movie = [self.movieList objectAtIndex:row];
cell.textLabel.text = [movie objectForKey:@"Filename"];
return cell;
}
// Override to support editing the table view.
- (void)tableView:(UITableView *)tableView commitEditingStyle:(UITableViewCellEditingStyle)editingStyle forRowAtIndexPath:(NSIndexPath *)indexPath
{
if (editingStyle == UITableViewCellEditingStyleDelete) {
// Delete the row from the data source
int index = [indexPath row];
NSFileManager *manager = [NSFileManager defaultManager];
[manager removeItemAtPath:[[self.movieList objectAtIndex:index] valueForKey:@"Path"] error:nil];
[self.movieList removeObjectAtIndex:index];
[tableView deleteRowsAtIndexPaths:@[indexPath] withRowAnimation:UITableViewRowAnimationFade];
}
else if (editingStyle == UITableViewCellEditingStyleInsert) {
// Create a new instance of the appropriate class, insert it into the array, and add a new row to the table view
}
}
#pragma mark - Table view delegate
- (void)tableView:(UITableView *) tableView didSelectRowAtIndexPath:(NSIndexPath *)indexPath
{
NSInteger row = [indexPath row];
NSDictionary *movie = [self.movieList objectAtIndex:row];
[[NSUserDefaults standardUserDefaults] setValue:[movie objectForKey:@"Path"] forKey:@"videoPath"];
NSString * path = [movie objectForKey:@"Filename"];
[tableView deselectRowAtIndexPath:indexPath animated:NO];
[self dismissViewControllerAnimated:FALSE completion:nil];
if (_tableBlock){
_tableBlock(path);
}
}
@end
================================================
FILE: iOS_demo/KSY265CodecDemo_iOS/PlayViewController.h
================================================
//
// PlayViewController.h
// HEVDecoder
//
// Created by Shengbin Meng on 13-2-25.
// Copyright (c) 2013 Peking University. All rights reserved.
//
#import
#import "MoviePlayer.h"
#import "KSYMoviePlayer.h"
@interface PlayViewController : UIViewController
@property (nonatomic, retain) IBOutlet UILabel *infoLabel;
@property (nonatomic, retain) IBOutlet UIButton *doneButton;
@property (nonatomic, retain) KSYMoviePlayer *player;
- (IBAction)doneButtonPressed:(id)sender;
@end
================================================
FILE: iOS_demo/KSY265CodecDemo_iOS/PlayViewController.m
================================================
//
// PlayViewController.m
// HEVDecoder
//
// Created by Shengbin Meng on 13-2-25.
// Copyright (c) 2013 Peking University. All rights reserved.
//
#import "PlayViewController.h"
#import "GLView.h"
@implementation PlayViewController
{
bool isPlaying;
}
- (id)initWithNibName:(NSString *)nibNameOrNil bundle:(NSBundle *)nibBundleOrNil
{
self = [super initWithNibName:nibNameOrNil bundle:nibBundleOrNil];
if (self) {
// Custom initialization
}
return self;
}
- (void) monitorPlaybackTime
{
if (!isPlaying) {
return;
}
[self.infoLabel setText:self.player.infoString];
[self performSelector:@selector(monitorPlaybackTime) withObject:nil afterDelay:1.0];
}
- (void)viewDidLoad
{
[super viewDidLoad];
NSString *decoder = [[NSUserDefaults standardUserDefaults] valueForKey:@"codec"];
// Do any additional setup after loading the view from its nib.
if (self.player == nil) {
if ([decoder isEqualToString:@"lenthevcdec"]) {
self.player = [[MoviePlayer alloc] init];
self.player.infoString = @"lenthevc decoding";
}
else {
self.player = [[KSYMoviePlayer alloc] init];
self.player.infoString = @"ksc265 decoding";
}
}
NSString * path = [[NSUserDefaults standardUserDefaults] valueForKey:@"videoPath"];
int ret = [self.player openMovie:path];
if(ret != 0) {
UIAlertView * alert = [[UIAlertView alloc] initWithTitle:@"Message" message:@"Get movie data failed! Please check your source or try again." delegate:nil cancelButtonTitle:@"OK" otherButtonTitles:nil];
[alert show];
return ;
} else {
self.player.renderer = ((GLView*)self.view).renderer;
[self.player setOutputViews:nil:self.infoLabel];
int ret = [self.player play];
if(ret != 0) {
UIAlertView * alert = [[UIAlertView alloc] initWithTitle:@"Message" message:@"Can't play this movie! Please check its format." delegate:nil cancelButtonTitle:@"OK" otherButtonTitles:nil];
[alert show];
return ;
}
isPlaying = YES;
[self monitorPlaybackTime];
}
}
- (void) viewWillAppear:(BOOL)animated
{
[super viewWillAppear:animated];
[[self.navigationController navigationBar] setHidden:YES];
}
- (void) viewWillDisappear:(BOOL)animated
{
[super viewWillDisappear:animated];
[[self.navigationController navigationBar] setHidden:NO];
}
- (void)didReceiveMemoryWarning
{
[super didReceiveMemoryWarning];
// Dispose of any resources that can be recreated.
}
- (NSUInteger)supportedInterfaceOrientations
{
return UIInterfaceOrientationMaskLandscapeRight;
}
-(BOOL)shouldAutorotate
{
return YES;
}
- (UIInterfaceOrientation)preferredInterfaceOrientationForPresentation
{
return UIInterfaceOrientationLandscapeRight;
}
- (IBAction)doneButtonPressed:(id)sender
{
isPlaying = NO;
[self.player stop];
[[UIApplication sharedApplication] setIdleTimerDisabled:NO];
[self.navigationController popViewControllerAnimated:YES];
}
@end
================================================
FILE: iOS_demo/KSY265CodecDemo_iOS/PlayViewController.xib
================================================
================================================
FILE: iOS_demo/KSY265CodecDemo_iOS/SecondViewController.h
================================================
//
// SecondViewController.h
// KSY265CodecDemo_iOS
//
// Created by 江东 on 17/3/17.
// Copyright © 2017年 江东. All rights reserved.
//
#import
#import "BaseViewController.h"
@interface SecondViewController : BaseViewController
@property (nonatomic, retain) NSMutableArray *movieList;
@end
================================================
FILE: iOS_demo/KSY265CodecDemo_iOS/SecondViewController.m
================================================
//
// SecondViewController.m
// KSY265CodecDemo_iOS
//
// Created by 江东 on 17/3/17.
// Copyright © 2017年 江东. All rights reserved.
//
#import "SecondViewController.h"
#import "SettingsDecoderViewController.h"
#import "DecoderHelperViewController.h"
#import "MoviesViewController.h"
#import "MoviePlayer.h"
#import "KSYMoviePlayer.h"
#import "GLView.h"
#import "qy265dec.h"
#include "lenthevcdec.h"
@interface SecondViewController ()
{
UILabel *lblDecoder;
UITextView *infoView;
UITextField *decoderFile;
UIButton *doneBtn;
UIButton *btnSet;
UIButton *btnHelp;
UIButton *selectBtn;
SettingsDecoderViewController *setDecoderVC;
MoviesViewController *listVC;
NSString *outputFlag;
}
@property (strong, nonatomic) IBOutlet UIView *playerView;
@property (weak, nonatomic) IBOutlet GLView *playView;
@property (nonatomic, retain) KSYMoviePlayer *player;
@end
@implementation SecondViewController
- (void)viewDidLoad {
[super viewDidLoad];
[self setupUI];
self.player = nil;
setDecoderVC = [[SettingsDecoderViewController alloc] initDefaultCfg];
listVC = [[MoviesViewController alloc] initWithSuffix:@".265"];
//__weak SecondViewController *weakself = self;
listVC.tableBlock = ^(NSString* filePath){
NSLog(@"%@", filePath);
decoderFile.text = filePath;
};
}
- (void)setupUI{
self.view.backgroundColor = [UIColor whiteColor];
//add set button
btnSet = [self addButtonWithTitle:@"设置" action:@selector(onSetDecoder:)];
//add help button
btnHelp = [self addButtonWithTitle:@"帮助" action:@selector(onHelp:)];
//add decoder text
lblDecoder = [self addLable:@"KSC265解码器"];
[self addViews:@[btnSet, lblDecoder, btnHelp] withFrame:CGRectMake(0, 40, self.view.frame.size.width, 40)];
//add browse file button
selectBtn = [self addButtonWithTitle:@"浏览(.265)文件" action:@selector(didClickSelectBtn:)];
[self addViews:@[selectBtn] withFrame:CGRectMake(0, 120, self.view.frame.size.width/3, 40)];
//input decoder file
decoderFile = [self addTextField:NULL ];
doneBtn = [self addButtonWithTitle:@"确定" action:@selector(onDone:)];
[self addViews2:@[decoderFile,doneBtn] withFrame:CGRectMake(0, 180, self.view.frame.size.width, 40)];
_playView.frame = CGRectMake(0, 240, self.view.frame.size.width, self.view.frame.size.height/4);
// info
infoView = [[UITextView alloc] init];
infoView.editable = NO;
infoView.textAlignment = NSTextAlignmentLeft;
infoView.backgroundColor = [UIColor colorWithWhite:0.8 alpha:0.3];
infoView.font = [UIFont systemFontOfSize:13];
infoView.layer.cornerRadius = 2;
infoView.clipsToBounds = YES;
infoView.layoutManager.allowsNonContiguousLayout = NO;
[self addViews:@[infoView] withFrame:CGRectMake(0, self.view.frame.size.height/4 + 280, self.view.frame.size.width, self.view.frame.size.height- (self.view.frame.size.height/4 + 280) - 20)];
}
- (void) monitorPlaybackTime
{
if (self.player.decodeEnd) {
[self stopPlay];
return;
}
[self performSelector:@selector(monitorPlaybackTime) withObject:nil afterDelay:1.0];
}
- (void)startPlay:(NSString *) filePath
{
NSString *decoder = [[NSUserDefaults standardUserDefaults] valueForKey:@"codec"];
if (self.player == nil) {
if ([decoder isEqualToString:@"lenthevcdec"]) {
self.player = [[MoviePlayer alloc] init];
NSString* string = [NSString stringWithFormat:@"%d" , lenthevcdec_version()];
[[NSUserDefaults standardUserDefaults] setValue:string forKey:@"version"];
}
else {
self.player = [[KSYMoviePlayer alloc] init];
NSString* string = [NSString stringWithFormat:@"%s" , strLibQy265Version];
[[NSUserDefaults standardUserDefaults] setValue:string forKey:@"version"];
}
}
int ret = [self.player openMovie:filePath];
if(ret != 0) {
UIAlertView * alert = [[UIAlertView alloc] initWithTitle:@"Message" message:@"Get movie data failed! Please check your source or try again." delegate:nil cancelButtonTitle:@"OK" otherButtonTitles:nil];
[alert show];
doneBtn.enabled = YES;
btnSet.enabled = YES;
selectBtn.enabled = YES;
return ;
} else {
NSString *fps = [[NSUserDefaults standardUserDefaults] valueForKey:@"renderFPS"];
if ([fps isEqualToString:@"-1 (off)"]) {
self.playView.hidden = YES;
}
else{
self.playView.hidden = NO;
[_playView.renderer resizeFromLayer:(CAEAGLLayer*)self.playView.layer];
}
self.player.renderer = _playView.renderer;
int ret = [self.player play];
if(ret != 0) {
UIAlertView * alert = [[UIAlertView alloc] initWithTitle:@"Message" message:@"Can't play this movie! Please check its format." delegate:nil cancelButtonTitle:@"OK" otherButtonTitles:nil];
[alert show];
doneBtn.enabled = YES;
btnSet.enabled = YES;
selectBtn.enabled = YES;
return ;
}
[self monitorPlaybackTime];
}
}
- (void)stopPlay{
NSString *decoder = [[NSUserDefaults standardUserDefaults] valueForKey:@"codec"];
NSString *threadNum = [[NSUserDefaults standardUserDefaults] valueForKey:@"threadNum"];
NSString *renderFPS = [[NSUserDefaults standardUserDefaults] valueForKey:@"renderFPS"];
NSString *version = [[NSUserDefaults standardUserDefaults] valueForKey:@"version"];
NSUInteger threads = [threadNum intValue];
if (self.player.out_file_string){
infoView.text = [NSString stringWithFormat:@"%@\n解码器版本:%@\n解码参数:%@ -b %@ -o %@ -threads %ld\n\n分辨率:%@\n渲染帧率:%@\n线程数:%@\n解码时间:%.2lf s\n解码帧数:%ld\n解码速度:%.2lf f/s\n\n",
infoView.text,
version,
decoder,
decoderFile.text,
[self.player.out_file_string lastPathComponent],
threads,
NSStringFromCGSize(CGSizeMake(self.player.width, self.player.height)),
renderFPS,
threadNum,
self.player.real_time,
self.player.frameNum,
self.player.realFPS];
}else{
infoView.text = [NSString stringWithFormat:@"%@\n解码器版本:%@\n解码参数:%@ -b %@ -threads %ld\n\n分辨率:%@\n渲染帧率:%@\n线程数:%@\n解码时间:%.2lf s\n解码帧数:%ld\n解码速度:%.2lf f/s\n\n",
infoView.text,
version,
decoder,
decoderFile.text,
threads,
NSStringFromCGSize(CGSizeMake(self.player.width, self.player.height)),
renderFPS,
threadNum,
self.player.real_time,
self.player.frameNum,
self.player.realFPS];
}
[infoView scrollRangeToVisible:NSMakeRange(infoView.text.length, 1)];
[self.player stop];
self.player = nil;
doneBtn.enabled = YES;
btnSet.enabled = YES;
selectBtn.enabled = YES;
}
#pragma mark - actions
- (void)onSetDecoder:(UIButton *)btn {
[self presentViewController:setDecoderVC animated:true completion:nil];
}
- (void)onHelp:(UIButton *)btn {
DecoderHelperViewController *decoderHelperVC = [[DecoderHelperViewController alloc] init];
[self presentViewController:decoderHelperVC animated:true completion:nil];
}
- (void)didClickSelectBtn:(UIButton *)send{
UINavigationController *naVC = [[UINavigationController alloc]initWithRootViewController: listVC];
[self presentViewController:naVC animated:YES completion:nil];
}
- (void)onDone:(UIButton *)btn {
btn.enabled = NO;
btnSet.enabled = NO;
selectBtn.enabled = NO;
[decoderFile resignFirstResponder];
NSString *dir = [NSHomeDirectory() stringByAppendingString:@"/Documents/"];
NSString *decFile = [dir stringByAppendingPathComponent:decoderFile.text];
[self startPlay:decFile];
}
- (void)didReceiveMemoryWarning {
[super didReceiveMemoryWarning];
// Dispose of any resources that can be recreated.
}
@end
================================================
FILE: iOS_demo/KSY265CodecDemo_iOS/SettingsDecoderViewController.h
================================================
//
// SettingsViewController.h
// IPGateway
//
// Created by Meng Shengbin on 2/1/12.
// Copyright (c) 2012 Peking University. All rights reserved.
//
#import
#import "BaseViewController.h"
@interface SettingsDecoderViewController : BaseViewController
@property UILabel *lblVideoDecoderUI;
@property UILabel *lblDecoderThreadNumUI;
@property UILabel *lblRenderFpsUI;
@property UILabel *lblOutputFlagUI;
@property UISegmentedControl *videoDecoderUI; //
@property UISegmentedControl *decoderThreadNumUI; //
@property UISegmentedControl *renderFpsUI; //
@property UISegmentedControl *outputFlagUI; //
//默认解码器配置
- (id)initDefaultCfg;
@end
================================================
FILE: iOS_demo/KSY265CodecDemo_iOS/SettingsDecoderViewController.m
================================================
//
// SettingsViewController.m
// IPGateway
//
// Created by Meng Shengbin on 2/1/12.
// Copyright (c) 2012 Peking University. All rights reserved.
//
#import "SettingsDecoderViewController.h"
@implementation SettingsDecoderViewController {
//Say you have an array of strings you want to present in the pickerview like this
UILabel *lblSetting;
UIButton *btnDone;
NSArray *arrayOfStrings;
NSArray *arrayOfStringsFPS;
NSArray *arrayOfStringsOutputFlag;
NSArray *decStrings;
}
- (id)initDefaultCfg {
self = [super init];
decStrings = [NSArray arrayWithObjects:@"ksc265dec", @"lenthevcdec", nil];
arrayOfStrings = [NSArray arrayWithObjects:@"0 (auto)", @"1", @"2", @"4", nil];
arrayOfStringsFPS = [NSArray arrayWithObjects:@"0 (full speed)", @"24", @"-1 (off)", nil];
arrayOfStringsOutputFlag = [NSArray arrayWithObjects:@"NO", @"YES", nil];
[[NSUserDefaults standardUserDefaults] setValue:[decStrings objectAtIndex:0] forKey:@"codec"];
[[NSUserDefaults standardUserDefaults] setValue:[arrayOfStrings objectAtIndex:0] forKey:@"threadNum"];
[[NSUserDefaults standardUserDefaults] setValue:[arrayOfStringsFPS objectAtIndex:0] forKey:@"renderFPS"];
[[NSUserDefaults standardUserDefaults] setValue:[arrayOfStringsOutputFlag objectAtIndex:0] forKey:@"outputFlag"];
return self;
}
- (void)viewDidLoad {
[super viewDidLoad];
[self setupUI];
}
- (void)setupUI{
self.view.backgroundColor = [UIColor whiteColor];
//add set title text
lblSetting = [self addLable:@"设置"];
[self addViews:@[lblSetting] withFrame:CGRectMake(self.view.frame.size.width/3, 40, self.view.frame.size.width/3, 40)];
//decoder
_lblVideoDecoderUI = [self addLable:@"视频解码器"];
_videoDecoderUI = [self addSegCtrlWithItems:decStrings];
[self addViews3:@[_lblVideoDecoderUI, _videoDecoderUI] withFrame:CGRectMake(0, 120, self.view.frame.size.width, 40)];
//decoder threads
_lblDecoderThreadNumUI = [self addLable:@"解码线程数"];
_decoderThreadNumUI = [self addSegCtrlWithItems:arrayOfStrings];
[self addViews3:@[_lblDecoderThreadNumUI, _decoderThreadNumUI] withFrame:CGRectMake(0, 200, self.view.frame.size.width, 40)];
//render fps
_lblRenderFpsUI = [self addLable:@"渲染帧率"];
_renderFpsUI = [self addSegCtrlWithItems:arrayOfStringsFPS];
[self addViews3:@[_lblRenderFpsUI, _renderFpsUI] withFrame:CGRectMake(0, 280, self.view.frame.size.width, 40)];
//output yuv settings
_lblOutputFlagUI = [self addLable:@"输出yuv"];
_outputFlagUI = [self addSegCtrlWithItems:arrayOfStringsOutputFlag];
[self addViews3:@[_lblOutputFlagUI, _outputFlagUI] withFrame:CGRectMake(0, 360, self.view.frame.size.width, 40)];
//add done button
btnDone = [self addButtonWithTitle:@"确定" action:@selector(onDone:)];
[self addViews:@[btnDone] withFrame:CGRectMake(self.view.frame.size.width*2/3, 440, self.view.frame.size.width/3, 40)];
}
#pragma mark - actions
- (void)onDone:(UIButton *)btn {
[[NSUserDefaults standardUserDefaults] setValue:[decStrings objectAtIndex:_videoDecoderUI.selectedSegmentIndex] forKey:@"codec"];
[[NSUserDefaults standardUserDefaults] setValue:[arrayOfStrings objectAtIndex:_decoderThreadNumUI.selectedSegmentIndex] forKey:@"threadNum"];
[[NSUserDefaults standardUserDefaults] setValue:[arrayOfStringsFPS objectAtIndex:_renderFpsUI.selectedSegmentIndex] forKey:@"renderFPS"];
[[NSUserDefaults standardUserDefaults] setValue:[arrayOfStringsOutputFlag objectAtIndex:_outputFlagUI.selectedSegmentIndex] forKey:@"outputFlag"];
/*
NSString *decoder = [[NSUserDefaults standardUserDefaults] valueForKey:@"codec"];
NSString *threadNum = [[NSUserDefaults standardUserDefaults] valueForKey:@"threadNum"];
NSString *renderFPS = [[NSUserDefaults standardUserDefaults] valueForKey:@"renderFPS"];
NSLog(@"set cfg:\n codec %@, threadNum %@, renderFPS %@", decoder, threadNum, renderFPS);
*/
[self dismissViewControllerAnimated:FALSE completion:nil];
}
#pragma mark - tool funcs
- (UISegmentedControl *)addSegCtrlWithItems: (NSArray *) items {
UISegmentedControl * segC;
segC = [[UISegmentedControl alloc] initWithItems:items];
segC.selectedSegmentIndex = 0;
segC.layer.cornerRadius = 5;
segC.backgroundColor = [UIColor lightGrayColor];
[self.view addSubview:segC];
return segC;
}
- (void)didReceiveMemoryWarning {
[super didReceiveMemoryWarning];
// Dispose of any resources that can be recreated.
}
@end
================================================
FILE: iOS_demo/KSY265CodecDemo_iOS/SettingsEncoderViewController.h
================================================
//
// SettingsViewController.h
// IPGateway
//
// Created by Meng Shengbin on 2/1/12.
// Copyright (c) 2012 Peking University. All rights reserved.
//
#import
#import "BaseViewController.h"
#import "AYHCustomComboBox.h"
@interface SettingsEncoderViewController : BaseViewController
@property UILabel *lblVideoEncoderUI;
@property UILabel *lblEncoderProfileUI;
@property UILabel *lblEncoderDelayedUI;
@property UILabel *lblResolutionUI;
@property UILabel *lblFpsUI;
@property UILabel *lblBitRateUI;
@property UILabel *lblTheadNumUI;
@property UISegmentedControl *videoEncoderUI; //
@property UISegmentedControl *encoderDelayedUI; //
@property UITextField *fps;
@property UITextField *theadNum;
@property UITextField *bitRate;
@property UITextField *resolutionText;
@property UIButton * resolutionButton;
@property AYHCustomComboBox* resolutionComboBox;
@property UIButton * profileButton;
@property AYHCustomComboBox* profileComboBox;
//默认编码器配置
- (id)initDefaultCfg;
@end
================================================
FILE: iOS_demo/KSY265CodecDemo_iOS/SettingsEncoderViewController.m
================================================
//
// SettingsViewController.m
// IPGateway
//
// Created by Meng Shengbin on 2/1/12.
// Copyright (c) 2012 Peking University. All rights reserved.
//
#import "SettingsEncoderViewController.h"
#define KINTEVAL 30
@implementation SettingsEncoderViewController {
NSArray *arrayOfStringsEnc;
NSArray *arrayOfStringsProfile;
NSArray *arrayOfStringsDelay;
BOOL isVisibleResolution;
BOOL isVisibleProfile;
}
- (id)initDefaultCfg {
self = [super init];
arrayOfStringsEnc = [NSArray arrayWithObjects:@"ksc265enc", @"x264", nil];
arrayOfStringsProfile = [NSArray arrayWithObjects:@"superfast",@"veryfast",@"fast",@"medium",@"slow",@"veryslow",@"placebo", nil];
arrayOfStringsDelay = [NSArray arrayWithObjects:@"zerolatency",@"livestreaming",@"offline", nil];
[[NSUserDefaults standardUserDefaults] setValue:[arrayOfStringsEnc objectAtIndex:0] forKey:@"encoder"];
[[NSUserDefaults standardUserDefaults] setValue:@"1280*720" forKey:@"resolution"];
[[NSUserDefaults standardUserDefaults] setValue:@"15" forKey:@"fps"];
[[NSUserDefaults standardUserDefaults] setValue:@"0" forKey:@"threads"];
[[NSUserDefaults standardUserDefaults] setValue:[arrayOfStringsProfile objectAtIndex:1] forKey:@"profile"];
[[NSUserDefaults standardUserDefaults] setValue:[arrayOfStringsDelay objectAtIndex:2] forKey:@"delayed"];
isVisibleResolution = NO;
isVisibleProfile = NO;
return self;
}
- (void)viewDidLoad {
[super viewDidLoad];
[self setupUI];
}
- (void)setupUI{
self.view.backgroundColor = [UIColor whiteColor];
//add set title text
UILabel *lblSetting = [self addLable:@"设置"];
[self addViews:@[lblSetting] withFrame:CGRectMake(self.view.frame.size.width/3, KINTEVAL, self.view.frame.size.width/3, KINTEVAL)];
//encoder
_lblVideoEncoderUI = [self addLable:@"视频编码器"];
_videoEncoderUI = [self addSegCtrlWithItems:arrayOfStringsEnc];
[self addViews3:@[_lblVideoEncoderUI, _videoEncoderUI] withFrame:CGRectMake(0, KINTEVAL*3, self.view.frame.size.width, KINTEVAL)];
//Resolution
_lblResolutionUI = [self addLable:@"分辨率"];
_resolutionButton = [self addButtonWithTitle:@"1280*720" action:@selector(buttonclick:)];
_resolutionButton.tag = 100;
[self addViews3:@[_lblResolutionUI, _resolutionButton] withFrame:CGRectMake(0, KINTEVAL*5, self.view.frame.size.width, KINTEVAL)];
//Resolution 自定义
_resolutionText = [self addTextField:@""];
_resolutionText.delegate = self;
[_resolutionText removeFromSuperview];
_resolutionComboBox = [[AYHCustomComboBox alloc] initWithFrame:CGRectMake(_resolutionButton.frame.origin.x, _resolutionButton.frame.origin.y+_resolutionButton.frame.size.height, _resolutionButton.frame.size.width, 100) DataCount:4 NotificationName:@"AYHComboBoxNationChanged"];
[_resolutionComboBox setTag:200];
[_resolutionComboBox setDelegate:self];
[_resolutionComboBox addItemsData: [[NSArray alloc] initWithObjects:@"1280*720",@"960*540",@"640*360",@"640*480",@"自定义",nil]];
[_resolutionComboBox flushData];
//fps
_lblFpsUI = [self addLable:@"帧率"];
_fps = [self addTextField:@"15"];
[self addViews3:@[_lblFpsUI, _fps] withFrame:CGRectMake(0, KINTEVAL*7, self.view.frame.size.width, KINTEVAL)];
//bitrate
_lblBitRateUI = [self addLable:@"码率(kbps)"];
_bitRate =[self addTextField:@"800"];
[self addViews3:@[_lblBitRateUI, _bitRate] withFrame:CGRectMake(0, KINTEVAL*9, self.view.frame.size.width, KINTEVAL)];
//encoder threads
_lblTheadNumUI = [self addLable:@"编码线程"];
_theadNum = [self addTextField:@"0" ];
[self addViews3:@[_lblTheadNumUI, _theadNum] withFrame:CGRectMake(0, KINTEVAL*11, self.view.frame.size.width, KINTEVAL)];
//encoder profile
_lblEncoderProfileUI = [self addLable:@"编码档次"];
_profileButton = [self addButtonWithTitle:@"veryfast" action:@selector(buttonclick:)];
_profileButton.tag = 101;
[self addViews3:@[_lblEncoderProfileUI, _profileButton] withFrame:CGRectMake(0, KINTEVAL*13, self.view.frame.size.width, KINTEVAL)];
_profileComboBox = [[AYHCustomComboBox alloc] initWithFrame:CGRectMake(_profileButton.frame.origin.x, _profileButton.frame.origin.y+_profileButton.frame.size.height, _profileButton.frame.size.width, 100) DataCount:4 NotificationName:@"AYHComboBoxNationChanged"];
[_profileComboBox setTag:201];
[_profileComboBox setDelegate:self];
[_profileComboBox addItemsData:arrayOfStringsProfile];
[_profileComboBox flushData];
//encoder delayed
_lblEncoderDelayedUI = [self addLable:@"延时"];
_encoderDelayedUI = [self addSegCtrlWithItems:arrayOfStringsDelay];
_encoderDelayedUI.selectedSegmentIndex = 2;
[self addViews3:@[_lblEncoderDelayedUI, _encoderDelayedUI] withFrame:CGRectMake(0, KINTEVAL*15, self.view.frame.size.width, KINTEVAL)];
//add done button
UIButton *btnDone = [self addButtonWithTitle:@"确定" action:@selector(onDone:)];
[self addViews:@[btnDone] withFrame:CGRectMake(self.view.frame.size.width*2/3, KINTEVAL*17, self.view.frame.size.width/3, KINTEVAL)];
UITapGestureRecognizer *tapGes = [[UITapGestureRecognizer alloc] initWithTarget:self.view action:@selector(endEditing:)];
tapGes.delegate = self;
[self.view addGestureRecognizer:tapGes];
}
#pragma mark AYHCustomComboBoxDelegate
- (void) CustomComboBoxChanged:(id) sender SelectedItem:(NSString *)selectedItem
{
AYHCustomComboBox* ccb = (AYHCustomComboBox*) sender;
if ([ccb tag]==200)
{
if([selectedItem isEqualToString:@"自定义"]){
[_resolutionButton removeFromSuperview];
[_resolutionComboBox removeFromSuperview];
[self addViews3:@[_lblResolutionUI, _resolutionText] withFrame:CGRectMake(0, KINTEVAL*5, self.view.frame.size.width, KINTEVAL)];
}else{
[_resolutionButton setTitle:selectedItem forState:UIControlStateNormal];
[_resolutionComboBox removeFromSuperview];
}
isVisibleResolution = NO;
}
else if([ccb tag]==201)
{
[_profileButton setTitle:selectedItem forState:UIControlStateNormal];
[_profileComboBox removeFromSuperview];
isVisibleProfile = NO;
}
}
#pragma mark - actions
- (void)onDone:(UIButton *)btn {
[[NSUserDefaults standardUserDefaults] setValue:[arrayOfStringsEnc objectAtIndex:_videoEncoderUI.selectedSegmentIndex] forKey:@"encoder"];
[[NSUserDefaults standardUserDefaults] setValue:_fps.text forKey:@"fps"];
[[NSUserDefaults standardUserDefaults] setValue:_theadNum.text forKey:@"threads"];
[[NSUserDefaults standardUserDefaults] setValue:_bitRate.text forKey:@"bitRate"];
[[NSUserDefaults standardUserDefaults] setValue:_profileButton.titleLabel.text forKey:@"profile"];
[[NSUserDefaults standardUserDefaults] setValue:[arrayOfStringsDelay objectAtIndex:_encoderDelayedUI.selectedSegmentIndex] forKey:@"delayed"];
if([self.view.subviews containsObject:_resolutionText])
{
[_resolutionText removeFromSuperview];
[self addViews3:@[_lblResolutionUI, _resolutionButton] withFrame:CGRectMake(0, KINTEVAL*5, self.view.frame.size.width, KINTEVAL)];
if(_resolutionText.text.length)
{
[[NSUserDefaults standardUserDefaults] setValue:_resolutionText.text forKey:@"resolution"];
}
}
else{
[[NSUserDefaults standardUserDefaults] setValue:_resolutionButton.titleLabel.text forKey:@"resolution"];
}
[self dismissViewControllerAnimated:FALSE completion:nil];
}
-(void)buttonclick:(UIButton *)sender {
UIButton* button = (UIButton*) sender;
if ([button tag]==100)
{
if (isVisibleResolution==NO)
{
[self.view addSubview:_resolutionComboBox];
isVisibleResolution = YES;
}
}
else if ([button tag]==101)
{
if (isVisibleProfile==NO)
{
[self.view addSubview:_profileComboBox];
isVisibleProfile = YES;
}
}
}
#pragma mark - tool funcs
- (UISegmentedControl *)addSegCtrlWithItems: (NSArray *) items {
UISegmentedControl * segC;
segC = [[UISegmentedControl alloc] initWithItems:items];
segC.selectedSegmentIndex = 0;
segC.layer.cornerRadius = 5;
segC.backgroundColor = [UIColor lightGrayColor];
[self.view addSubview:segC];
return segC;
}
- (void)didReceiveMemoryWarning {
[super didReceiveMemoryWarning];
// Dispose of any resources that can be recreated.
}
- (BOOL)gestureRecognizer:(UIGestureRecognizer *)gestureRecognizer shouldReceiveTouch:(UITouch *)touch
{
if ([NSStringFromClass([touch.view class]) isEqualToString:@"UITableViewCellContentView"]) {
return NO;
}
return YES;
}
- (void)textFieldDidEndEditing:(UITextField *)textField
{
if(textField == _resolutionText)
{
[[NSUserDefaults standardUserDefaults] setValue:_resolutionText.text forKey:@"resolution"];
}
}
@end
================================================
FILE: iOS_demo/KSY265CodecDemo_iOS/main.m
================================================
//
// main.m
// KSY265CodecDemo_iOS
//
// Created by 江东 on 17/3/17.
// Copyright © 2017年 江东. All rights reserved.
//
#import
#import "AppDelegate.h"
int main(int argc, char * argv[]) {
@autoreleasepool {
return UIApplicationMain(argc, argv, nil, NSStringFromClass([AppDelegate class]));
}
}
================================================
FILE: iOS_demo/KSY265CodecDemo_iOS.xcodeproj/project.pbxproj
================================================
// !$*UTF8*$!
{
archiveVersion = 1;
classes = {
};
objectVersion = 46;
objects = {
/* Begin PBXBuildFile section */
055AEDF41E7BD4280006FE5D /* main.m in Sources */ = {isa = PBXBuildFile; fileRef = 055AEDF31E7BD4280006FE5D /* main.m */; };
055AEDF71E7BD4280006FE5D /* AppDelegate.m in Sources */ = {isa = PBXBuildFile; fileRef = 055AEDF61E7BD4280006FE5D /* AppDelegate.m */; };
055AEDFA1E7BD4280006FE5D /* FirstViewController.m in Sources */ = {isa = PBXBuildFile; fileRef = 055AEDF91E7BD4280006FE5D /* FirstViewController.m */; };
055AEDFD1E7BD4280006FE5D /* SecondViewController.m in Sources */ = {isa = PBXBuildFile; fileRef = 055AEDFC1E7BD4280006FE5D /* SecondViewController.m */; };
055AEE001E7BD4280006FE5D /* Main.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 055AEDFE1E7BD4280006FE5D /* Main.storyboard */; };
055AEE021E7BD4280006FE5D /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 055AEE011E7BD4280006FE5D /* Assets.xcassets */; };
055AEE051E7BD4280006FE5D /* LaunchScreen.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 055AEE031E7BD4280006FE5D /* LaunchScreen.storyboard */; };
055AEE2D1E7BE7350006FE5D /* BaseViewController.m in Sources */ = {isa = PBXBuildFile; fileRef = 055AEE2C1E7BE7350006FE5D /* BaseViewController.m */; };
058715AE1E836C3C008D8860 /* libx264.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 058715AD1E836C3C008D8860 /* libx264.a */; };
058715B11E836FE8008D8860 /* MovieEncoder.m in Sources */ = {isa = PBXBuildFile; fileRef = 058715B01E836FE8008D8860 /* MovieEncoder.m */; };
058715B61E83BDB9008D8860 /* libqydecoder.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 058715B31E83BDB9008D8860 /* libqydecoder.a */; };
058715B71E83BDB9008D8860 /* libqyencoder.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 058715B41E83BDB9008D8860 /* libqyencoder.a */; };
058715BA1E83BEED008D8860 /* KSYMovieEncoder.m in Sources */ = {isa = PBXBuildFile; fileRef = 058715B91E83BEED008D8860 /* KSYMovieEncoder.m */; };
05C45C861E8E02C60045FE79 /* libqycommon.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 05C45C851E8E02C60045FE79 /* libqycommon.a */; };
05C45C8C1E8E06A10045FE79 /* libz.tbd in Frameworks */ = {isa = PBXBuildFile; fileRef = 05C45C8B1E8E06A10045FE79 /* libz.tbd */; };
05F746781E7E78820076E6EB /* SettingsDecoderViewController.m in Sources */ = {isa = PBXBuildFile; fileRef = 05F746751E7E78820076E6EB /* SettingsDecoderViewController.m */; };
05F746791E7E78820076E6EB /* SettingsEncoderViewController.m in Sources */ = {isa = PBXBuildFile; fileRef = 05F746771E7E78820076E6EB /* SettingsEncoderViewController.m */; };
05F7467C1E7E81E60076E6EB /* EncoderHelperViewController.m in Sources */ = {isa = PBXBuildFile; fileRef = 05F7467B1E7E81E60076E6EB /* EncoderHelperViewController.m */; };
05F7467F1E7E87D10076E6EB /* DecoderHelperViewController.m in Sources */ = {isa = PBXBuildFile; fileRef = 05F7467E1E7E87D10076E6EB /* DecoderHelperViewController.m */; };
05F746851E7FB53C0076E6EB /* MoviesViewController.m in Sources */ = {isa = PBXBuildFile; fileRef = 05F746841E7FB53C0076E6EB /* MoviesViewController.m */; };
05F7468B1E7FBDBA0076E6EB /* KSYMoviePlayer.m in Sources */ = {isa = PBXBuildFile; fileRef = 05F7468A1E7FBDBA0076E6EB /* KSYMoviePlayer.m */; };
05F7468E1E7FBDF70076E6EB /* MoviePlayer.m in Sources */ = {isa = PBXBuildFile; fileRef = 05F7468D1E7FBDF70076E6EB /* MoviePlayer.m */; };
05F746971E7FBE8A0076E6EB /* OpenGLES.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 05F746961E7FBE8A0076E6EB /* OpenGLES.framework */; };
05F746991E7FBECB0076E6EB /* liblenthevcdec.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 05F746981E7FBECB0076E6EB /* liblenthevcdec.a */; };
05F7469B1E7FBF230076E6EB /* QuartzCore.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 05F7469A1E7FBF230076E6EB /* QuartzCore.framework */; };
05F746A91E7FC1520076E6EB /* UIKit.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 05F746A81E7FC1520076E6EB /* UIKit.framework */; };
05F746AB1E7FC15B0076E6EB /* Foundation.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 05F746AA1E7FC15B0076E6EB /* Foundation.framework */; };
05F746AD1E7FC1680076E6EB /* CoreGraphics.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 05F746AC1E7FC1680076E6EB /* CoreGraphics.framework */; };
05F746BE1E7FCC890076E6EB /* GLRenderer.m in Sources */ = {isa = PBXBuildFile; fileRef = 05F746BB1E7FCC890076E6EB /* GLRenderer.m */; };
05F746BF1E7FCC890076E6EB /* GLView.m in Sources */ = {isa = PBXBuildFile; fileRef = 05F746BD1E7FCC890076E6EB /* GLView.m */; };
05F746C11E7FCDFE0076E6EB /* libc++.tbd in Frameworks */ = {isa = PBXBuildFile; fileRef = 05F746C01E7FCDFE0076E6EB /* libc++.tbd */; };
F28F15871E88EB6600E1A739 /* 1280x720_15.yuv in Resources */ = {isa = PBXBuildFile; fileRef = F28F15851E88EB6600E1A739 /* 1280x720_15.yuv */; };
F28F15881E88EB6600E1A739 /* 640x480_15.yuv in Resources */ = {isa = PBXBuildFile; fileRef = F28F15861E88EB6600E1A739 /* 640x480_15.yuv */; };
F28F158A1E88EFF700E1A739 /* 960x540_15.yuv in Resources */ = {isa = PBXBuildFile; fileRef = F28F15891E88EFF700E1A739 /* 960x540_15.yuv */; };
F28F159E1E890F7B00E1A739 /* AYHCustomComboBox.m in Sources */ = {isa = PBXBuildFile; fileRef = F28F159C1E890F7B00E1A739 /* AYHCustomComboBox.m */; };
/* End PBXBuildFile section */
/* Begin PBXContainerItemProxy section */
055AEE0C1E7BD4280006FE5D /* PBXContainerItemProxy */ = {
isa = PBXContainerItemProxy;
containerPortal = 055AEDE71E7BD4280006FE5D /* Project object */;
proxyType = 1;
remoteGlobalIDString = 055AEDEE1E7BD4280006FE5D;
remoteInfo = KSY265CodecDemo_iOS;
};
055AEE171E7BD4290006FE5D /* PBXContainerItemProxy */ = {
isa = PBXContainerItemProxy;
containerPortal = 055AEDE71E7BD4280006FE5D /* Project object */;
proxyType = 1;
remoteGlobalIDString = 055AEDEE1E7BD4280006FE5D;
remoteInfo = KSY265CodecDemo_iOS;
};
/* End PBXContainerItemProxy section */
/* Begin PBXFileReference section */
055AEDEF1E7BD4280006FE5D /* KSY265CodecDemo_iOS.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = KSY265CodecDemo_iOS.app; sourceTree = BUILT_PRODUCTS_DIR; };
055AEDF31E7BD4280006FE5D /* main.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = main.m; sourceTree = ""; };
055AEDF51E7BD4280006FE5D /* AppDelegate.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = AppDelegate.h; sourceTree = ""; };
055AEDF61E7BD4280006FE5D /* AppDelegate.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = AppDelegate.m; sourceTree = ""; };
055AEDF81E7BD4280006FE5D /* FirstViewController.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = FirstViewController.h; sourceTree = ""; };
055AEDF91E7BD4280006FE5D /* FirstViewController.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = FirstViewController.m; sourceTree = ""; };
055AEDFB1E7BD4280006FE5D /* SecondViewController.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = SecondViewController.h; sourceTree = ""; };
055AEDFC1E7BD4280006FE5D /* SecondViewController.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = SecondViewController.m; sourceTree = ""; };
055AEDFF1E7BD4280006FE5D /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/Main.storyboard; sourceTree = ""; };
055AEE011E7BD4280006FE5D /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = ""; };
055AEE041E7BD4280006FE5D /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/LaunchScreen.storyboard; sourceTree = ""; };
055AEE061E7BD4280006FE5D /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = ""; };
055AEE0B1E7BD4280006FE5D /* KSY265CodecDemo_iOSTests.xctest */ = {isa = PBXFileReference; explicitFileType = wrapper.cfbundle; includeInIndex = 0; path = KSY265CodecDemo_iOSTests.xctest; sourceTree = BUILT_PRODUCTS_DIR; };
055AEE161E7BD4290006FE5D /* KSY265CodecDemo_iOSUITests.xctest */ = {isa = PBXFileReference; explicitFileType = wrapper.cfbundle; includeInIndex = 0; path = KSY265CodecDemo_iOSUITests.xctest; sourceTree = BUILT_PRODUCTS_DIR; };
055AEE2B1E7BE7350006FE5D /* BaseViewController.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = BaseViewController.h; sourceTree = ""; };
055AEE2C1E7BE7350006FE5D /* BaseViewController.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = BaseViewController.m; sourceTree = ""; };
058715AD1E836C3C008D8860 /* libx264.a */ = {isa = PBXFileReference; lastKnownFileType = archive.ar; name = libx264.a; path = x264/libx264.a; sourceTree = ""; };
058715AF1E836FE8008D8860 /* MovieEncoder.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = MovieEncoder.h; path = KSY265CodecDemo_iOS/MovieEncoder.h; sourceTree = ""; };
058715B01E836FE8008D8860 /* MovieEncoder.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = MovieEncoder.m; path = KSY265CodecDemo_iOS/MovieEncoder.m; sourceTree = ""; };
058715B31E83BDB9008D8860 /* libqydecoder.a */ = {isa = PBXFileReference; lastKnownFileType = archive.ar; name = libqydecoder.a; path = ksy265codec/libqydecoder.a; sourceTree = ""; };
058715B41E83BDB9008D8860 /* libqyencoder.a */ = {isa = PBXFileReference; lastKnownFileType = archive.ar; name = libqyencoder.a; path = ksy265codec/libqyencoder.a; sourceTree = ""; };
058715B81E83BEED008D8860 /* KSYMovieEncoder.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = KSYMovieEncoder.h; path = KSY265CodecDemo_iOS/KSYMovieEncoder.h; sourceTree = ""; };
058715B91E83BEED008D8860 /* KSYMovieEncoder.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = KSYMovieEncoder.m; path = KSY265CodecDemo_iOS/KSYMovieEncoder.m; sourceTree = ""; };
05C45C851E8E02C60045FE79 /* libqycommon.a */ = {isa = PBXFileReference; lastKnownFileType = archive.ar; name = libqycommon.a; path = ksy265codec/libqycommon.a; sourceTree = ""; };
05C45C8B1E8E06A10045FE79 /* libz.tbd */ = {isa = PBXFileReference; lastKnownFileType = "sourcecode.text-based-dylib-definition"; name = libz.tbd; path = usr/lib/libz.tbd; sourceTree = SDKROOT; };
05F746741E7E78820076E6EB /* SettingsDecoderViewController.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = SettingsDecoderViewController.h; sourceTree = ""; };
05F746751E7E78820076E6EB /* SettingsDecoderViewController.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = SettingsDecoderViewController.m; sourceTree = ""; };
05F746761E7E78820076E6EB /* SettingsEncoderViewController.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = SettingsEncoderViewController.h; sourceTree = ""; };
05F746771E7E78820076E6EB /* SettingsEncoderViewController.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = SettingsEncoderViewController.m; sourceTree = ""; };
05F7467A1E7E81E60076E6EB /* EncoderHelperViewController.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = EncoderHelperViewController.h; sourceTree = ""; };
05F7467B1E7E81E60076E6EB /* EncoderHelperViewController.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = EncoderHelperViewController.m; sourceTree = ""; };
05F7467D1E7E87D10076E6EB /* DecoderHelperViewController.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = DecoderHelperViewController.h; sourceTree = ""; };
05F7467E1E7E87D10076E6EB /* DecoderHelperViewController.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = DecoderHelperViewController.m; sourceTree = ""; };
05F746831E7FB53C0076E6EB /* MoviesViewController.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = MoviesViewController.h; sourceTree = ""; };
05F746841E7FB53C0076E6EB /* MoviesViewController.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = MoviesViewController.m; sourceTree = ""; };
05F746891E7FBDBA0076E6EB /* KSYMoviePlayer.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = KSYMoviePlayer.h; path = KSY265CodecDemo_iOS/KSYMoviePlayer.h; sourceTree = ""; };
05F7468A1E7FBDBA0076E6EB /* KSYMoviePlayer.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = KSYMoviePlayer.m; path = KSY265CodecDemo_iOS/KSYMoviePlayer.m; sourceTree = ""; };
05F7468C1E7FBDF70076E6EB /* MoviePlayer.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = MoviePlayer.h; path = KSY265CodecDemo_iOS/MoviePlayer.h; sourceTree = ""; };
05F7468D1E7FBDF70076E6EB /* MoviePlayer.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = MoviePlayer.m; path = KSY265CodecDemo_iOS/MoviePlayer.m; sourceTree = ""; };
05F746961E7FBE8A0076E6EB /* OpenGLES.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = OpenGLES.framework; path = System/Library/Frameworks/OpenGLES.framework; sourceTree = SDKROOT; };
05F746981E7FBECB0076E6EB /* liblenthevcdec.a */ = {isa = PBXFileReference; lastKnownFileType = archive.ar; name = liblenthevcdec.a; path = lenthevcdec/liblenthevcdec.a; sourceTree = ""; };
05F7469A1E7FBF230076E6EB /* QuartzCore.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = QuartzCore.framework; path = System/Library/Frameworks/QuartzCore.framework; sourceTree = SDKROOT; };
05F746A81E7FC1520076E6EB /* UIKit.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = UIKit.framework; path = System/Library/Frameworks/UIKit.framework; sourceTree = SDKROOT; };
05F746AA1E7FC15B0076E6EB /* Foundation.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = Foundation.framework; path = System/Library/Frameworks/Foundation.framework; sourceTree = SDKROOT; };
05F746AC1E7FC1680076E6EB /* CoreGraphics.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = CoreGraphics.framework; path = System/Library/Frameworks/CoreGraphics.framework; sourceTree = SDKROOT; };
05F746BA1E7FCC890076E6EB /* GLRenderer.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GLRenderer.h; path = KSY265CodecDemo_iOS/GLRenderer.h; sourceTree = ""; };
05F746BB1E7FCC890076E6EB /* GLRenderer.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GLRenderer.m; path = KSY265CodecDemo_iOS/GLRenderer.m; sourceTree = ""; };
05F746BC1E7FCC890076E6EB /* GLView.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GLView.h; path = KSY265CodecDemo_iOS/GLView.h; sourceTree = ""; };
05F746BD1E7FCC890076E6EB /* GLView.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GLView.m; path = KSY265CodecDemo_iOS/GLView.m; sourceTree = ""; };
05F746C01E7FCDFE0076E6EB /* libc++.tbd */ = {isa = PBXFileReference; lastKnownFileType = "sourcecode.text-based-dylib-definition"; name = "libc++.tbd"; path = "usr/lib/libc++.tbd"; sourceTree = SDKROOT; };
F28F15851E88EB6600E1A739 /* 1280x720_15.yuv */ = {isa = PBXFileReference; lastKnownFileType = file; path = 1280x720_15.yuv; sourceTree = ""; };
F28F15861E88EB6600E1A739 /* 640x480_15.yuv */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text; path = 640x480_15.yuv; sourceTree = ""; };
F28F15891E88EFF700E1A739 /* 960x540_15.yuv */ = {isa = PBXFileReference; lastKnownFileType = file; path = 960x540_15.yuv; sourceTree = ""; };
F28F159B1E890F7B00E1A739 /* AYHCustomComboBox.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = AYHCustomComboBox.h; sourceTree = ""; };
F28F159C1E890F7B00E1A739 /* AYHCustomComboBox.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = AYHCustomComboBox.m; sourceTree = ""; };
F28F159D1E890F7B00E1A739 /* AYHCustomComboBoxDelegate.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = AYHCustomComboBoxDelegate.h; sourceTree = ""; };
/* End PBXFileReference section */
/* Begin PBXFrameworksBuildPhase section */
055AEDEC1E7BD4280006FE5D /* Frameworks */ = {
isa = PBXFrameworksBuildPhase;
buildActionMask = 2147483647;
files = (
05C45C8C1E8E06A10045FE79 /* libz.tbd in Frameworks */,
05F746C11E7FCDFE0076E6EB /* libc++.tbd in Frameworks */,
05F746AD1E7FC1680076E6EB /* CoreGraphics.framework in Frameworks */,
05F746AB1E7FC15B0076E6EB /* Foundation.framework in Frameworks */,
05F746A91E7FC1520076E6EB /* UIKit.framework in Frameworks */,
05F7469B1E7FBF230076E6EB /* QuartzCore.framework in Frameworks */,
05F746971E7FBE8A0076E6EB /* OpenGLES.framework in Frameworks */,
058715B61E83BDB9008D8860 /* libqydecoder.a in Frameworks */,
058715B71E83BDB9008D8860 /* libqyencoder.a in Frameworks */,
05C45C861E8E02C60045FE79 /* libqycommon.a in Frameworks */,
05F746991E7FBECB0076E6EB /* liblenthevcdec.a in Frameworks */,
058715AE1E836C3C008D8860 /* libx264.a in Frameworks */,
);
runOnlyForDeploymentPostprocessing = 0;
};
055AEE081E7BD4280006FE5D /* Frameworks */ = {
isa = PBXFrameworksBuildPhase;
buildActionMask = 2147483647;
files = (
);
runOnlyForDeploymentPostprocessing = 0;
};
055AEE131E7BD4290006FE5D /* Frameworks */ = {
isa = PBXFrameworksBuildPhase;
buildActionMask = 2147483647;
files = (
);
runOnlyForDeploymentPostprocessing = 0;
};
/* End PBXFrameworksBuildPhase section */
/* Begin PBXGroup section */
055AEDE61E7BD4280006FE5D = {
isa = PBXGroup;
children = (
05F746A11E7FC1000076E6EB /* Prebuilt */,
058715AA1E82B0DA008D8860 /* KSY265Encoder */,
05F7469C1E7FC0320076E6EB /* KSY265Decoder */,
055AEDF11E7BD4280006FE5D /* KSY265CodecDemo_iOS */,
F28F159A1E890F7B00E1A739 /* combox */,
F28F15841E88EB6600E1A739 /* resource */,
055AEDF01E7BD4280006FE5D /* Products */,
05F746951E7FBE890076E6EB /* Frameworks */,
);
sourceTree = "";
};
055AEDF01E7BD4280006FE5D /* Products */ = {
isa = PBXGroup;
children = (
055AEDEF1E7BD4280006FE5D /* KSY265CodecDemo_iOS.app */,
055AEE0B1E7BD4280006FE5D /* KSY265CodecDemo_iOSTests.xctest */,
055AEE161E7BD4290006FE5D /* KSY265CodecDemo_iOSUITests.xctest */,
);
name = Products;
sourceTree = "";
};
055AEDF11E7BD4280006FE5D /* KSY265CodecDemo_iOS */ = {
isa = PBXGroup;
children = (
055AEDF51E7BD4280006FE5D /* AppDelegate.h */,
055AEDF61E7BD4280006FE5D /* AppDelegate.m */,
055AEDF81E7BD4280006FE5D /* FirstViewController.h */,
055AEDF91E7BD4280006FE5D /* FirstViewController.m */,
055AEDFB1E7BD4280006FE5D /* SecondViewController.h */,
055AEDFC1E7BD4280006FE5D /* SecondViewController.m */,
055AEE2B1E7BE7350006FE5D /* BaseViewController.h */,
055AEE2C1E7BE7350006FE5D /* BaseViewController.m */,
05F746741E7E78820076E6EB /* SettingsDecoderViewController.h */,
05F746751E7E78820076E6EB /* SettingsDecoderViewController.m */,
05F746761E7E78820076E6EB /* SettingsEncoderViewController.h */,
05F746771E7E78820076E6EB /* SettingsEncoderViewController.m */,
05F7467A1E7E81E60076E6EB /* EncoderHelperViewController.h */,
05F7467B1E7E81E60076E6EB /* EncoderHelperViewController.m */,
05F7467D1E7E87D10076E6EB /* DecoderHelperViewController.h */,
05F7467E1E7E87D10076E6EB /* DecoderHelperViewController.m */,
05F746831E7FB53C0076E6EB /* MoviesViewController.h */,
05F746841E7FB53C0076E6EB /* MoviesViewController.m */,
055AEDFE1E7BD4280006FE5D /* Main.storyboard */,
055AEE011E7BD4280006FE5D /* Assets.xcassets */,
055AEE031E7BD4280006FE5D /* LaunchScreen.storyboard */,
055AEE061E7BD4280006FE5D /* Info.plist */,
055AEDF21E7BD4280006FE5D /* Supporting Files */,
);
path = KSY265CodecDemo_iOS;
sourceTree = "";
};
055AEDF21E7BD4280006FE5D /* Supporting Files */ = {
isa = PBXGroup;
children = (
055AEDF31E7BD4280006FE5D /* main.m */,
);
name = "Supporting Files";
sourceTree = "";
};
058715AA1E82B0DA008D8860 /* KSY265Encoder */ = {
isa = PBXGroup;
children = (
058715AF1E836FE8008D8860 /* MovieEncoder.h */,
058715B01E836FE8008D8860 /* MovieEncoder.m */,
058715B81E83BEED008D8860 /* KSYMovieEncoder.h */,
058715B91E83BEED008D8860 /* KSYMovieEncoder.m */,
);
name = KSY265Encoder;
sourceTree = "";
};
05F746951E7FBE890076E6EB /* Frameworks */ = {
isa = PBXGroup;
children = (
05C45C8B1E8E06A10045FE79 /* libz.tbd */,
05C45C851E8E02C60045FE79 /* libqycommon.a */,
058715B31E83BDB9008D8860 /* libqydecoder.a */,
058715B41E83BDB9008D8860 /* libqyencoder.a */,
058715AD1E836C3C008D8860 /* libx264.a */,
05F746C01E7FCDFE0076E6EB /* libc++.tbd */,
05F7469A1E7FBF230076E6EB /* QuartzCore.framework */,
05F746961E7FBE8A0076E6EB /* OpenGLES.framework */,
05F746A81E7FC1520076E6EB /* UIKit.framework */,
05F746AA1E7FC15B0076E6EB /* Foundation.framework */,
05F746AC1E7FC1680076E6EB /* CoreGraphics.framework */,
);
name = Frameworks;
sourceTree = "";
};
05F7469C1E7FC0320076E6EB /* KSY265Decoder */ = {
isa = PBXGroup;
children = (
05F746891E7FBDBA0076E6EB /* KSYMoviePlayer.h */,
05F7468A1E7FBDBA0076E6EB /* KSYMoviePlayer.m */,
05F7468C1E7FBDF70076E6EB /* MoviePlayer.h */,
05F7468D1E7FBDF70076E6EB /* MoviePlayer.m */,
05F746BA1E7FCC890076E6EB /* GLRenderer.h */,
05F746BB1E7FCC890076E6EB /* GLRenderer.m */,
05F746BC1E7FCC890076E6EB /* GLView.h */,
05F746BD1E7FCC890076E6EB /* GLView.m */,
);
name = KSY265Decoder;
sourceTree = "";
};
05F746A11E7FC1000076E6EB /* Prebuilt */ = {
isa = PBXGroup;
children = (
05F746981E7FBECB0076E6EB /* liblenthevcdec.a */,
);
name = Prebuilt;
sourceTree = "";
};
F28F15841E88EB6600E1A739 /* resource */ = {
isa = PBXGroup;
children = (
F28F15891E88EFF700E1A739 /* 960x540_15.yuv */,
F28F15851E88EB6600E1A739 /* 1280x720_15.yuv */,
F28F15861E88EB6600E1A739 /* 640x480_15.yuv */,
);
path = resource;
sourceTree = "";
};
F28F159A1E890F7B00E1A739 /* combox */ = {
isa = PBXGroup;
children = (
F28F159B1E890F7B00E1A739 /* AYHCustomComboBox.h */,
F28F159C1E890F7B00E1A739 /* AYHCustomComboBox.m */,
F28F159D1E890F7B00E1A739 /* AYHCustomComboBoxDelegate.h */,
);
path = combox;
sourceTree = "";
};
/* End PBXGroup section */
/* Begin PBXNativeTarget section */
055AEDEE1E7BD4280006FE5D /* KSY265CodecDemo_iOS */ = {
isa = PBXNativeTarget;
buildConfigurationList = 055AEE1F1E7BD4290006FE5D /* Build configuration list for PBXNativeTarget "KSY265CodecDemo_iOS" */;
buildPhases = (
055AEDEB1E7BD4280006FE5D /* Sources */,
055AEDEC1E7BD4280006FE5D /* Frameworks */,
055AEDED1E7BD4280006FE5D /* Resources */,
);
buildRules = (
);
dependencies = (
);
name = KSY265CodecDemo_iOS;
productName = KSY265CodecDemo_iOS;
productReference = 055AEDEF1E7BD4280006FE5D /* KSY265CodecDemo_iOS.app */;
productType = "com.apple.product-type.application";
};
055AEE0A1E7BD4280006FE5D /* KSY265CodecDemo_iOSTests */ = {
isa = PBXNativeTarget;
buildConfigurationList = 055AEE221E7BD4290006FE5D /* Build configuration list for PBXNativeTarget "KSY265CodecDemo_iOSTests" */;
buildPhases = (
055AEE071E7BD4280006FE5D /* Sources */,
055AEE081E7BD4280006FE5D /* Frameworks */,
055AEE091E7BD4280006FE5D /* Resources */,
);
buildRules = (
);
dependencies = (
055AEE0D1E7BD4280006FE5D /* PBXTargetDependency */,
);
name = KSY265CodecDemo_iOSTests;
productName = KSY265CodecDemo_iOSTests;
productReference = 055AEE0B1E7BD4280006FE5D /* KSY265CodecDemo_iOSTests.xctest */;
productType = "com.apple.product-type.bundle.unit-test";
};
055AEE151E7BD4290006FE5D /* KSY265CodecDemo_iOSUITests */ = {
isa = PBXNativeTarget;
buildConfigurationList = 055AEE251E7BD4290006FE5D /* Build configuration list for PBXNativeTarget "KSY265CodecDemo_iOSUITests" */;
buildPhases = (
055AEE121E7BD4290006FE5D /* Sources */,
055AEE131E7BD4290006FE5D /* Frameworks */,
055AEE141E7BD4290006FE5D /* Resources */,
);
buildRules = (
);
dependencies = (
055AEE181E7BD4290006FE5D /* PBXTargetDependency */,
);
name = KSY265CodecDemo_iOSUITests;
productName = KSY265CodecDemo_iOSUITests;
productReference = 055AEE161E7BD4290006FE5D /* KSY265CodecDemo_iOSUITests.xctest */;
productType = "com.apple.product-type.bundle.ui-testing";
};
/* End PBXNativeTarget section */
/* Begin PBXProject section */
055AEDE71E7BD4280006FE5D /* Project object */ = {
isa = PBXProject;
attributes = {
LastUpgradeCheck = 0820;
ORGANIZATIONNAME = "江东";
TargetAttributes = {
055AEDEE1E7BD4280006FE5D = {
CreatedOnToolsVersion = 8.2.1;
DevelopmentTeam = ZGJ54Q7R45;
ProvisioningStyle = Automatic;
};
055AEE0A1E7BD4280006FE5D = {
CreatedOnToolsVersion = 8.2.1;
DevelopmentTeam = ZGJ54Q7R45;
ProvisioningStyle = Automatic;
TestTargetID = 055AEDEE1E7BD4280006FE5D;
};
055AEE151E7BD4290006FE5D = {
CreatedOnToolsVersion = 8.2.1;
DevelopmentTeam = ZGJ54Q7R45;
ProvisioningStyle = Automatic;
TestTargetID = 055AEDEE1E7BD4280006FE5D;
};
};
};
buildConfigurationList = 055AEDEA1E7BD4280006FE5D /* Build configuration list for PBXProject "KSY265CodecDemo_iOS" */;
compatibilityVersion = "Xcode 3.2";
developmentRegion = English;
hasScannedForEncodings = 0;
knownRegions = (
en,
Base,
);
mainGroup = 055AEDE61E7BD4280006FE5D;
productRefGroup = 055AEDF01E7BD4280006FE5D /* Products */;
projectDirPath = "";
projectRoot = "";
targets = (
055AEDEE1E7BD4280006FE5D /* KSY265CodecDemo_iOS */,
055AEE0A1E7BD4280006FE5D /* KSY265CodecDemo_iOSTests */,
055AEE151E7BD4290006FE5D /* KSY265CodecDemo_iOSUITests */,
);
};
/* End PBXProject section */
/* Begin PBXResourcesBuildPhase section */
055AEDED1E7BD4280006FE5D /* Resources */ = {
isa = PBXResourcesBuildPhase;
buildActionMask = 2147483647;
files = (
055AEE051E7BD4280006FE5D /* LaunchScreen.storyboard in Resources */,
055AEE021E7BD4280006FE5D /* Assets.xcassets in Resources */,
F28F15881E88EB6600E1A739 /* 640x480_15.yuv in Resources */,
F28F158A1E88EFF700E1A739 /* 960x540_15.yuv in Resources */,
F28F15871E88EB6600E1A739 /* 1280x720_15.yuv in Resources */,
055AEE001E7BD4280006FE5D /* Main.storyboard in Resources */,
);
runOnlyForDeploymentPostprocessing = 0;
};
055AEE091E7BD4280006FE5D /* Resources */ = {
isa = PBXResourcesBuildPhase;
buildActionMask = 2147483647;
files = (
);
runOnlyForDeploymentPostprocessing = 0;
};
055AEE141E7BD4290006FE5D /* Resources */ = {
isa = PBXResourcesBuildPhase;
buildActionMask = 2147483647;
files = (
);
runOnlyForDeploymentPostprocessing = 0;
};
/* End PBXResourcesBuildPhase section */
/* Begin PBXSourcesBuildPhase section */
055AEDEB1E7BD4280006FE5D /* Sources */ = {
isa = PBXSourcesBuildPhase;
buildActionMask = 2147483647;
files = (
05F746BF1E7FCC890076E6EB /* GLView.m in Sources */,
05F746BE1E7FCC890076E6EB /* GLRenderer.m in Sources */,
055AEDFD1E7BD4280006FE5D /* SecondViewController.m in Sources */,
058715BA1E83BEED008D8860 /* KSYMovieEncoder.m in Sources */,
055AEE2D1E7BE7350006FE5D /* BaseViewController.m in Sources */,
F28F159E1E890F7B00E1A739 /* AYHCustomComboBox.m in Sources */,
05F746851E7FB53C0076E6EB /* MoviesViewController.m in Sources */,
05F746791E7E78820076E6EB /* SettingsEncoderViewController.m in Sources */,
055AEDF71E7BD4280006FE5D /* AppDelegate.m in Sources */,
058715B11E836FE8008D8860 /* MovieEncoder.m in Sources */,
05F7467F1E7E87D10076E6EB /* DecoderHelperViewController.m in Sources */,
055AEDFA1E7BD4280006FE5D /* FirstViewController.m in Sources */,
05F746781E7E78820076E6EB /* SettingsDecoderViewController.m in Sources */,
055AEDF41E7BD4280006FE5D /* main.m in Sources */,
05F7467C1E7E81E60076E6EB /* EncoderHelperViewController.m in Sources */,
05F7468E1E7FBDF70076E6EB /* MoviePlayer.m in Sources */,
05F7468B1E7FBDBA0076E6EB /* KSYMoviePlayer.m in Sources */,
);
runOnlyForDeploymentPostprocessing = 0;
};
055AEE071E7BD4280006FE5D /* Sources */ = {
isa = PBXSourcesBuildPhase;
buildActionMask = 2147483647;
files = (
);
runOnlyForDeploymentPostprocessing = 0;
};
055AEE121E7BD4290006FE5D /* Sources */ = {
isa = PBXSourcesBuildPhase;
buildActionMask = 2147483647;
files = (
);
runOnlyForDeploymentPostprocessing = 0;
};
/* End PBXSourcesBuildPhase section */
/* Begin PBXTargetDependency section */
055AEE0D1E7BD4280006FE5D /* PBXTargetDependency */ = {
isa = PBXTargetDependency;
target = 055AEDEE1E7BD4280006FE5D /* KSY265CodecDemo_iOS */;
targetProxy = 055AEE0C1E7BD4280006FE5D /* PBXContainerItemProxy */;
};
055AEE181E7BD4290006FE5D /* PBXTargetDependency */ = {
isa = PBXTargetDependency;
target = 055AEDEE1E7BD4280006FE5D /* KSY265CodecDemo_iOS */;
targetProxy = 055AEE171E7BD4290006FE5D /* PBXContainerItemProxy */;
};
/* End PBXTargetDependency section */
/* Begin PBXVariantGroup section */
055AEDFE1E7BD4280006FE5D /* Main.storyboard */ = {
isa = PBXVariantGroup;
children = (
055AEDFF1E7BD4280006FE5D /* Base */,
);
name = Main.storyboard;
sourceTree = "";
};
055AEE031E7BD4280006FE5D /* LaunchScreen.storyboard */ = {
isa = PBXVariantGroup;
children = (
055AEE041E7BD4280006FE5D /* Base */,
);
name = LaunchScreen.storyboard;
sourceTree = "";
};
/* End PBXVariantGroup section */
/* Begin XCBuildConfiguration section */
055AEE1D1E7BD4290006FE5D /* Debug */ = {
isa = XCBuildConfiguration;
buildSettings = {
ALWAYS_SEARCH_USER_PATHS = NO;
CLANG_ANALYZER_NONNULL = YES;
CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x";
CLANG_CXX_LIBRARY = "libc++";
CLANG_ENABLE_MODULES = YES;
CLANG_ENABLE_OBJC_ARC = YES;
CLANG_WARN_BOOL_CONVERSION = YES;
CLANG_WARN_CONSTANT_CONVERSION = YES;
CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
CLANG_WARN_EMPTY_BODY = YES;
CLANG_WARN_ENUM_CONVERSION = YES;
CLANG_WARN_INFINITE_RECURSION = YES;
CLANG_WARN_INT_CONVERSION = YES;
CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
CLANG_WARN_SUSPICIOUS_MOVE = YES;
CLANG_WARN_UNREACHABLE_CODE = YES;
CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
"CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer";
COPY_PHASE_STRIP = NO;
DEBUG_INFORMATION_FORMAT = dwarf;
ENABLE_STRICT_OBJC_MSGSEND = YES;
ENABLE_TESTABILITY = YES;
GCC_C_LANGUAGE_STANDARD = gnu99;
GCC_DYNAMIC_NO_PIC = NO;
GCC_NO_COMMON_BLOCKS = YES;
GCC_OPTIMIZATION_LEVEL = 0;
GCC_PREPROCESSOR_DEFINITIONS = (
"DEBUG=1",
"$(inherited)",
);
GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
GCC_WARN_UNDECLARED_SELECTOR = YES;
GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
GCC_WARN_UNUSED_FUNCTION = YES;
GCC_WARN_UNUSED_VARIABLE = YES;
HEADER_SEARCH_PATHS = (
KSY265CodecDemo_iOS/GLRenderView,
lenthevcdec,
ksy265dec,
);
IPHONEOS_DEPLOYMENT_TARGET = 8.0;
MTL_ENABLE_DEBUG_INFO = YES;
ONLY_ACTIVE_ARCH = YES;
SDKROOT = iphoneos;
TARGETED_DEVICE_FAMILY = "1,2";
};
name = Debug;
};
055AEE1E1E7BD4290006FE5D /* Release */ = {
isa = XCBuildConfiguration;
buildSettings = {
ALWAYS_SEARCH_USER_PATHS = NO;
CLANG_ANALYZER_NONNULL = YES;
CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x";
CLANG_CXX_LIBRARY = "libc++";
CLANG_ENABLE_MODULES = YES;
CLANG_ENABLE_OBJC_ARC = YES;
CLANG_WARN_BOOL_CONVERSION = YES;
CLANG_WARN_CONSTANT_CONVERSION = YES;
CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
CLANG_WARN_EMPTY_BODY = YES;
CLANG_WARN_ENUM_CONVERSION = YES;
CLANG_WARN_INFINITE_RECURSION = YES;
CLANG_WARN_INT_CONVERSION = YES;
CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
CLANG_WARN_SUSPICIOUS_MOVE = YES;
CLANG_WARN_UNREACHABLE_CODE = YES;
CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
"CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer";
COPY_PHASE_STRIP = NO;
DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym";
ENABLE_NS_ASSERTIONS = NO;
ENABLE_STRICT_OBJC_MSGSEND = YES;
GCC_C_LANGUAGE_STANDARD = gnu99;
GCC_NO_COMMON_BLOCKS = YES;
GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
GCC_WARN_UNDECLARED_SELECTOR = YES;
GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
GCC_WARN_UNUSED_FUNCTION = YES;
GCC_WARN_UNUSED_VARIABLE = YES;
HEADER_SEARCH_PATHS = (
KSY265CodecDemo_iOS/GLRenderView,
lenthevcdec,
ksy265dec,
);
IPHONEOS_DEPLOYMENT_TARGET = 8.0;
MTL_ENABLE_DEBUG_INFO = NO;
SDKROOT = iphoneos;
TARGETED_DEVICE_FAMILY = "1,2";
VALIDATE_PRODUCT = YES;
};
name = Release;
};
055AEE201E7BD4290006FE5D /* Debug */ = {
isa = XCBuildConfiguration;
buildSettings = {
ALWAYS_SEARCH_USER_PATHS = NO;
ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
DEVELOPMENT_TEAM = ZGJ54Q7R45;
ENABLE_BITCODE = NO;
HEADER_SEARCH_PATHS = (
KSY265CodecDemo_iOS/GLRenderView,
lenthevcdec,
ksy265codec,
x264,
);
INFOPLIST_FILE = KSY265CodecDemo_iOS/Info.plist;
IPHONEOS_DEPLOYMENT_TARGET = 8.0;
LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks";
LIBRARY_SEARCH_PATHS = (
"$(inherited)",
"$(PROJECT_DIR)/lenthevcdec",
"$(PROJECT_DIR)/ksy265dec",
"$(PROJECT_DIR)/x264",
"$(PROJECT_DIR)/ksy265codec",
"$(PROJECT_DIR)",
);
PRODUCT_BUNDLE_IDENTIFIER = "com.ksyun.ios.KSY265CodecDemo-iOS";
PRODUCT_NAME = "$(TARGET_NAME)";
};
name = Debug;
};
055AEE211E7BD4290006FE5D /* Release */ = {
isa = XCBuildConfiguration;
buildSettings = {
ALWAYS_SEARCH_USER_PATHS = NO;
ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
DEVELOPMENT_TEAM = ZGJ54Q7R45;
ENABLE_BITCODE = NO;
HEADER_SEARCH_PATHS = (
KSY265CodecDemo_iOS/GLRenderView,
lenthevcdec,
ksy265codec,
x264,
);
INFOPLIST_FILE = KSY265CodecDemo_iOS/Info.plist;
IPHONEOS_DEPLOYMENT_TARGET = 8.0;
LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks";
LIBRARY_SEARCH_PATHS = (
"$(inherited)",
"$(PROJECT_DIR)/lenthevcdec",
"$(PROJECT_DIR)/ksy265dec",
"$(PROJECT_DIR)/x264",
"$(PROJECT_DIR)/ksy265codec",
"$(PROJECT_DIR)",
);
PRODUCT_BUNDLE_IDENTIFIER = "com.ksyun.ios.KSY265CodecDemo-iOS";
PRODUCT_NAME = "$(TARGET_NAME)";
};
name = Release;
};
055AEE231E7BD4290006FE5D /* Debug */ = {
isa = XCBuildConfiguration;
buildSettings = {
BUNDLE_LOADER = "$(TEST_HOST)";
DEVELOPMENT_TEAM = ZGJ54Q7R45;
INFOPLIST_FILE = KSY265CodecDemo_iOSTests/Info.plist;
LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks @loader_path/Frameworks";
PRODUCT_BUNDLE_IDENTIFIER = "ksy.KSY265CodecDemo-iOSTests";
PRODUCT_NAME = "$(TARGET_NAME)";
TEST_HOST = "$(BUILT_PRODUCTS_DIR)/KSY265CodecDemo_iOS.app/KSY265CodecDemo_iOS";
};
name = Debug;
};
055AEE241E7BD4290006FE5D /* Release */ = {
isa = XCBuildConfiguration;
buildSettings = {
BUNDLE_LOADER = "$(TEST_HOST)";
DEVELOPMENT_TEAM = ZGJ54Q7R45;
INFOPLIST_FILE = KSY265CodecDemo_iOSTests/Info.plist;
LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks @loader_path/Frameworks";
PRODUCT_BUNDLE_IDENTIFIER = "ksy.KSY265CodecDemo-iOSTests";
PRODUCT_NAME = "$(TARGET_NAME)";
TEST_HOST = "$(BUILT_PRODUCTS_DIR)/KSY265CodecDemo_iOS.app/KSY265CodecDemo_iOS";
};
name = Release;
};
055AEE261E7BD4290006FE5D /* Debug */ = {
isa = XCBuildConfiguration;
buildSettings = {
DEVELOPMENT_TEAM = ZGJ54Q7R45;
INFOPLIST_FILE = KSY265CodecDemo_iOSUITests/Info.plist;
LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks @loader_path/Frameworks";
PRODUCT_BUNDLE_IDENTIFIER = "ksy.KSY265CodecDemo-iOSUITests";
PRODUCT_NAME = "$(TARGET_NAME)";
TEST_TARGET_NAME = KSY265CodecDemo_iOS;
};
name = Debug;
};
055AEE271E7BD4290006FE5D /* Release */ = {
isa = XCBuildConfiguration;
buildSettings = {
DEVELOPMENT_TEAM = ZGJ54Q7R45;
INFOPLIST_FILE = KSY265CodecDemo_iOSUITests/Info.plist;
LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks @loader_path/Frameworks";
PRODUCT_BUNDLE_IDENTIFIER = "ksy.KSY265CodecDemo-iOSUITests";
PRODUCT_NAME = "$(TARGET_NAME)";
TEST_TARGET_NAME = KSY265CodecDemo_iOS;
};
name = Release;
};
/* End XCBuildConfiguration section */
/* Begin XCConfigurationList section */
055AEDEA1E7BD4280006FE5D /* Build configuration list for PBXProject "KSY265CodecDemo_iOS" */ = {
isa = XCConfigurationList;
buildConfigurations = (
055AEE1D1E7BD4290006FE5D /* Debug */,
055AEE1E1E7BD4290006FE5D /* Release */,
);
defaultConfigurationIsVisible = 0;
defaultConfigurationName = Release;
};
055AEE1F1E7BD4290006FE5D /* Build configuration list for PBXNativeTarget "KSY265CodecDemo_iOS" */ = {
isa = XCConfigurationList;
buildConfigurations = (
055AEE201E7BD4290006FE5D /* Debug */,
055AEE211E7BD4290006FE5D /* Release */,
);
defaultConfigurationIsVisible = 0;
defaultConfigurationName = Release;
};
055AEE221E7BD4290006FE5D /* Build configuration list for PBXNativeTarget "KSY265CodecDemo_iOSTests" */ = {
isa = XCConfigurationList;
buildConfigurations = (
055AEE231E7BD4290006FE5D /* Debug */,
055AEE241E7BD4290006FE5D /* Release */,
);
defaultConfigurationIsVisible = 0;
defaultConfigurationName = Release;
};
055AEE251E7BD4290006FE5D /* Build configuration list for PBXNativeTarget "KSY265CodecDemo_iOSUITests" */ = {
isa = XCConfigurationList;
buildConfigurations = (
055AEE261E7BD4290006FE5D /* Debug */,
055AEE271E7BD4290006FE5D /* Release */,
);
defaultConfigurationIsVisible = 0;
defaultConfigurationName = Release;
};
/* End XCConfigurationList section */
};
rootObject = 055AEDE71E7BD4280006FE5D /* Project object */;
}
================================================
FILE: iOS_demo/KSY265CodecDemo_iOS.xcodeproj/project.xcworkspace/contents.xcworkspacedata
================================================
================================================
FILE: iOS_demo/KSY265CodecDemo_iOS.xcodeproj/xcuserdata/jiangdong.xcuserdatad/xcdebugger/Breakpoints_v2.xcbkptlist
================================================
================================================
FILE: iOS_demo/KSY265CodecDemo_iOS.xcodeproj/xcuserdata/jiangdong.xcuserdatad/xcschemes/KSY265CodecDemo_iOS.xcscheme
================================================
================================================
FILE: iOS_demo/KSY265CodecDemo_iOS.xcodeproj/xcuserdata/jiangdong.xcuserdatad/xcschemes/xcschememanagement.plist
================================================
SuppressBuildableAutocreation
055AEDEE1E7BD4280006FE5D
primary
055AEE0A1E7BD4280006FE5D
primary
055AEE151E7BD4290006FE5D
primary
================================================
FILE: iOS_demo/KSY265CodecDemo_iOS.xcodeproj/xcuserdata/ksyun.xcuserdatad/xcdebugger/Breakpoints_v2.xcbkptlist
================================================
================================================
FILE: iOS_demo/KSY265CodecDemo_iOS.xcodeproj/xcuserdata/ksyun.xcuserdatad/xcschemes/KSY265CodecDemo_iOS.xcscheme
================================================
================================================
FILE: iOS_demo/KSY265CodecDemo_iOS.xcodeproj/xcuserdata/ksyun.xcuserdatad/xcschemes/xcschememanagement.plist
================================================
SchemeUserState
KSY265CodecDemo_iOS.xcscheme
orderHint
0
SuppressBuildableAutocreation
055AEDEE1E7BD4280006FE5D
primary
055AEE0A1E7BD4280006FE5D
primary
055AEE151E7BD4290006FE5D
primary
================================================
FILE: iOS_demo/combox/AYHCustomComboBox.h
================================================
//
// AYHCustomComboBox.h
// TestCustomComboBox
//
// Created by AlimysoYang on 12-4-25.
// Copyright (c) 2012年 __Alimyso Software Ltd__. All rights reserved.
// QQ:86373007
#import
#import
#import "AYHCustomComboBoxDelegate.h"
#define kTableViewCellHeight 28.0f
@interface AYHCustomComboBox : UIView
{
NSString* NotificationName;
}
@property (strong, nonatomic) UITableView* ccbtableView;
@property (strong, nonatomic) NSMutableArray* ccbListData;
@property (assign, nonatomic) id delegate;
//初始化
- (id) initWithFrame:(CGRect)frame DataCount:(int) count NotificationName:(NSString*) notificationName;
//添加一个数据
- (void) addItemData:(NSString*) itemData;
//添加一组数据
- (void) addItemsData:(NSArray*) itemsData;
- (NSString*) getItemData;
//UITableView数据刷新
- (void) flushData;
@end
================================================
FILE: iOS_demo/combox/AYHCustomComboBox.m
================================================
//
// AYHCustomComboBox.m
// TestCustomComboBox
//
// Created by AlimysoYang on 12-4-25.
// Copyright (c) 2012年 __Alimyso Software Ltd__. All rights reserved.
// QQ:86373007
#import "AYHCustomComboBox.h"
@implementation AYHCustomComboBox
@synthesize ccbtableView, ccbListData;//, ccbTitle;
@synthesize delegate;
- (id)initWithFrame:(CGRect)frame DataCount:(int)count NotificationName:(NSString *)notificationName
{
self = [super initWithFrame:frame];
if (self)
{
NotificationName = [[NSString alloc] initWithString:notificationName];
ccbListData = [[NSMutableArray alloc] initWithCapacity:0];
//ccbTitle = [[NSString alloc] initWithString:@""];
ccbtableView = [[UITableView alloc] initWithFrame:CGRectMake(0, 0, frame.size.width, frame.size.height)];
[ccbtableView setDelegate:self];
[ccbtableView setDataSource:self];
[ccbtableView setBackgroundColor:[UIColor grayColor]];
[self addSubview:ccbtableView];
[self setBackgroundColor:[UIColor grayColor]];
self.layer.cornerRadius = 5.0f;
self.layer.borderWidth = 1.0f;
self.layer.borderColor = [UIColor blackColor].CGColor;
self.layer.masksToBounds = YES;
self.layer.borderWidth = 1;
}
return self;
}
/*
// Only override drawRect: if you perform custom drawing.
// An empty implementation adversely affects performance during animation.
- (void)drawRect:(CGRect)rect
{
// Drawing code
}
*/
- (void) addItemData:(NSString *)itemData
{
[ccbListData addObject:itemData];
}
- (void) addItemsData:(NSArray *)itemsData
{
[ccbListData addObjectsFromArray:itemsData];
}
- (NSString*) getItemData
{
return @"";
//return ccbTitle;
}
- (void) flushData
{
[self.ccbtableView reloadData];
}
- (NSInteger) numberOfSectionsInTableView:(UITableView *)tableView
{
return 1;
}
- (NSInteger) tableView:(UITableView *)tableView numberOfRowsInSection:(NSInteger)section
{
return [ccbListData count];
}
- (CGFloat) tableView:(UITableView *)tableView heightForRowAtIndexPath:(NSIndexPath *)indexPath
{
return kTableViewCellHeight;
}
- (UITableViewCell*) tableView:(UITableView *)tableView cellForRowAtIndexPath:(NSIndexPath *)indexPath
{
static NSString* CellIdentifier = @"CustomComboBoxCell";
UITableViewCell* cell = [tableView dequeueReusableCellWithIdentifier:CellIdentifier];
if (cell==nil)
cell = [[UITableViewCell alloc] initWithStyle:UITableViewCellStyleDefault reuseIdentifier:CellIdentifier];
cell.textLabel.font = [UIFont boldSystemFontOfSize:15.0f];
cell.textLabel.textAlignment = NSTextAlignmentCenter;
cell.textLabel.text = [ccbListData objectAtIndex:[indexPath row]];
return cell;
}
- (void) tableView:(UITableView *)tableView didSelectRowAtIndexPath:(NSIndexPath *)indexPath
{
NSString* selectItem = [ccbListData objectAtIndex:[indexPath row]];
//协议执行
[delegate CustomComboBoxChanged:self SelectedItem:selectItem];
//通知消息返回
//[[NSNotificationCenter defaultCenter] postNotificationName:NotificationName object:nil];
}
- (void) dealloc
{
}
@end
================================================
FILE: iOS_demo/combox/AYHCustomComboBoxDelegate.h
================================================
//
// AYHCustomComboBoxDelegate.h
// TestCustomComboBox
//
// Created by AlimysoYang on 12-4-25.
// Copyright (c) 2012年 __Alimyso Software Ltd__. All rights reserved.
// QQ:86373007
#import
@protocol AYHCustomComboBoxDelegate
- (void) CustomComboBoxChanged:(id) sender SelectedItem:(NSString*) selectedItem;
@end
================================================
FILE: iOS_demo/ksy265codec/qy265dec.h
================================================
///////////////////////////////////////////////////
//
// Qianyi H265 Codec Library
//
// Copyright(c) 2013-2014 Qianyi, Inc.
// www.qyvideo.cn
//
///////////////////////////////////////////////////
/************************************************************************************
* decInf.h: interface of decoder for user
*
* \date 2013-09-28: first version
*
************************************************************************************/
#ifndef _QY265_DECODER_INTERFACE_H_
#define _QY265_DECODER_INTERFACE_H_
#include "qy265def.h"
// config parameters for Decoder
typedef struct QY265DecConfig {
void* pAuth; //QYAuth, invalid if don't need aksk auth
int threads; // number of threads used in decoding (0: auto)
int bEnableOutputRecToFile; // For debug: write reconstruct YUV to File
char* strRecYuvFileName; // For debug: file name of YUV
// when bEnableOutputRecToFile = 1
int logLevel; //For debug: log level
}QY265DecConfig;
// information of decoded frame
typedef struct QY265FrameInfo {
int nWidth; // frame width
int nHeight; // frame height
long long pts; // time stamp
int bIllegalStream; // input bit stream is illegal
}QY265FrameInfo;
// decoded frame with data and information
typedef struct QY265Frame {
int bValid; //if == 0, no more valid output frame
unsigned char* pData[3]; // Y U V
short iStride[3]; // stride for each component
QY265FrameInfo frameinfo;
}QY265Frame;
#if defined(__cplusplus)
extern "C" {
#endif//__cplusplus
/************************************************************************************
* I/F for all usrs
************************************************************************************/
// create decoder, return handle of decoder
_h_dll_export void* QY265DecoderCreate(QY265DecConfig* pDecConfig, int * pStat);
// destroy decoder with specific handle
_h_dll_export void QY265DecoderDestroy(void* pDecoder);
// set config to specific decoder
_h_dll_export void QY265DecoderSetDecConfig(void *pDecoder, QY265DecConfig* pDecConfig, int * pStat);
//the input of this function should be one or more NALs;
//if only one NAL, with or without start bytes are both OK
_h_dll_export void QY265DecodeFrame(void *pDecoder, unsigned char* pData, int iLen, int * pStat, const long long pts);
// bSkip = false : same as QY265DecodeFrame
// bSkip = true : only decode slice headers in pData, slice data skipped
_h_dll_export void QY265DecodeFrameEnSkip(void *pDecoder, unsigned char* pData, int iLen, int * pStat, const long long pts, int bSkip);
//flush decoding, called at end
_h_dll_export void QY265DecodeFlush(void *pDecoder, int bClearCachedPics, int * pStat);
// retrieve the output, the function are used for synchronized output, this function need to call several time until get NULL
// if bForceLogo == true, only one frame buffer inside, need return before get next output
_h_dll_export void QY265DecoderGetDecodedFrame(void *pDecoder, QY265Frame* pFrame, int * pStat, int bForceLogo);
// return the frame buffer which QY265DecoderGetOutput get from decoder, each valid QY265DecoderGetOutput should match with a ReturnFrame
_h_dll_export void QY265DecoderReturnDecodedFrame( void *pDecoder, QY265Frame* pFrame);
/**
* dump latest decoded VUI parameters
* @param_input pDecoder: decoder instance
* @param_output vui: fill with decoded vui parameters
* @param_output bValid: =0 if no valid vui parameters decoded,
* otherwise =1
*/
_h_dll_export void QY265DumpVUIParameters(void* pDecoder, vui_parameters* vui, int* bValid);
#if defined(__cplusplus)
}
#endif//__cplusplus
#endif//header
================================================
FILE: iOS_demo/ksy265codec/qy265def.h
================================================
#ifndef _QY265_DEF_H_
#define _QY265_DEF_H_
// ****************************************
// error type
// ****************************************
enum
{
QY_OK = (0x00000000), // Success codes
QY_FAIL = (0x80000001), // Unspecified error
QY_OUTOFMEMORY = (0x80000002), // Ran out of memory
QY_POINTER = (0x80000003), // Invalid pointer
QY_NOTSUPPORTED = (0x80000004),// NOT support feature encoutnered
QY_AUTH_INVALID = (0x80000005), // authentication invalid
QY_SEARCHING_ACCESS_POINT = (0x00000001), // in process of searching first access point
QY_REF_PIC_NOT_FOUND = (0x00000007), // reference picture not found, can be ignored
#if defined(EMSCRIPTEN)||defined(_TEST_FOR_EMSCRIPTEN)
QY_NEED_MORE_DATA = (0x00000008), //need push more data
#endif
QY_BITSTREAM_ERROR = (0x00000009), // detecting bitstream error, can be ignored
};
enum NAL_UNIT_TYPE{
NAL_UNIT_TYPE_TRAIL_N = 0,
NAL_UNIT_TYPE_TRAIL_R = 1,
NAL_UNIT_TYPE_TSA_N = 2,
NAL_UNIT_TYPE_TSA_R = 3,
NAL_UNIT_TYPE_STSA_N = 4,
NAL_UNIT_TYPE_STSA_R = 5,
NAL_UNIT_TYPE_RADL_N = 6,
NAL_UNIT_TYPE_RADL_R = 7,
NAL_UNIT_TYPE_RASL_N = 8,
NAL_UNIT_TYPE_RASL_R = 9,
//reserved
NAL_UNIT_TYPE_RSV_VCL_N10 = 10,
NAL_UNIT_TYPE_RSV_VCL_N12 = 12,
NAL_UNIT_TYPE_RSV_VCL_N14 = 13,
NAL_UNIT_TYPE_RSV_VCL_R11 = 11,
NAL_UNIT_TYPE_RSV_VCL_R13 = 13,
NAL_UNIT_TYPE_RSV_VCL_R15 = 15,
NAL_UNIT_TYPE_BLA_W_LP = 16,
NAL_UNIT_TYPE_BLA_W_RADL = 17,
NAL_UNIT_TYPE_BLA_N_LP = 18,
NAL_UNIT_TYPE_IDR_W_RADL = 19,
NAL_UNIT_TYPE_IDR_N_LP = 20,
NAL_UNIT_TYPE_CRA_NUT = 21,
NAL_UNIT_TYPE_RSV_IRAP_VCL22 = 22,
NAL_UNIT_TYPE_RSV_IRAP_VCL23 = 23,
NAL_UNIT_TYPE_RSV_VCL24 = 24,
NAL_UNIT_TYPE_RSV_VCL25 = 25,
NAL_UNIT_TYPE_RSV_VCL26 = 26,
NAL_UNIT_TYPE_RSV_VCL27 = 27,
NAL_UNIT_TYPE_RSV_VCL28 = 28,
NAL_UNIT_TYPE_RSV_VCL29 = 29,
NAL_UNIT_TYPE_RSV_VCL30 = 30,
NAL_UNIT_TYPE_RSV_VCL31 = 31,
NAL_UNIT_TYPE_VPS_NUT = 32,
NAL_UNIT_TYPE_SPS_NUT = 33,
NAL_UNIT_TYPE_PPS_NUT = 34,
NAL_UNIT_TYPE_AUD_NUT = 35,
NAL_UNIT_TYPE_EOS_NUT = 36,
NAL_UNIT_TYPE_EOB_NUT = 37,
NAL_UNIT_TYPE_FD_NUT = 38,
NAL_UNIT_TYPE_PREFIX_SEI_NUT = 39,
NAL_UNIT_TYPE_SUFFIX_SEI_NUT = 40,
NAL_UNIT_TYPE_RSV_NVCL41 = 41,
NAL_UNIT_TYPE_RSV_NVCL42 = 42,
NAL_UNIT_TYPE_RSV_NVCL43 = 43,
NAL_UNIT_TYPE_RSV_NVCL44 = 44,
NAL_UNIT_TYPE_RSV_NVCL45 = 45,
NAL_UNIT_TYPE_RSV_NVCL46 = 46,
NAL_UNIT_TYPE_RSV_NVCL47 = 47,
NAL_UNIT_TYPE_UNSPEC48 = 48,
NAL_UNIT_TYPE_UNSPEC49 = 49,
NAL_UNIT_TYPE_UNSPEC50 = 50,
NAL_UNIT_TYPE_UNSPEC51 = 51,
NAL_UNIT_TYPE_UNSPEC52 = 52,
NAL_UNIT_TYPE_UNSPEC53 = 53,
NAL_UNIT_TYPE_UNSPEC54 = 54,
NAL_UNIT_TYPE_UNSPEC55 = 55,
NAL_UNIT_TYPE_UNSPEC56 = 56,
NAL_UNIT_TYPE_UNSPEC57 = 57,
NAL_UNIT_TYPE_UNSPEC58 = 58,
NAL_UNIT_TYPE_UNSPEC59 = 59,
NAL_UNIT_TYPE_UNSPEC60 = 60,
NAL_UNIT_TYPE_UNSPEC61 = 61,
NAL_UNIT_TYPE_UNSPEC62 = 62,
NAL_UNIT_TYPE_UNSPEC63 = 63,
};
// ****************************************
// VUI
// ****************************************
typedef struct vui_parameters{
// --- sample aspect ratio (SAR) ---
unsigned char aspect_ratio_info_present_flag;
unsigned short sar_width; // sar_width and sar_height are zero if unspecified
unsigned short sar_height;
// --- overscan ---
unsigned char overscan_info_present_flag;
unsigned char overscan_appropriate_flag;
// --- video signal type ---
unsigned char video_signal_type_present_flag;
unsigned char video_format;
unsigned char video_full_range_flag;
unsigned char colour_description_present_flag;
unsigned char colour_primaries;
unsigned char transfer_characteristics;
unsigned char matrix_coeffs;
// --- chroma / interlaced ---
unsigned char chroma_loc_info_present_flag;
unsigned char chroma_sample_loc_type_top_field;
unsigned char chroma_sample_loc_type_bottom_field;
unsigned char neutral_chroma_indication_flag;
unsigned char field_seq_flag;
unsigned char frame_field_info_present_flag;
// --- default display window ---
unsigned char default_display_window_flag;
unsigned int def_disp_win_left_offset;
unsigned int def_disp_win_right_offset;
unsigned int def_disp_win_top_offset;
unsigned int def_disp_win_bottom_offset;
// --- timing ---
unsigned char vui_timing_info_present_flag;
unsigned int vui_num_units_in_tick;
unsigned int vui_time_scale;
unsigned char vui_poc_proportional_to_timing_flag;
unsigned int vui_num_ticks_poc_diff_one;
// --- hrd parameters ---
unsigned char vui_hrd_parameters_present_flag;
//hrd_parameters vui_hrd_parameters;
// --- bitstream restriction ---
unsigned char bitstream_restriction_flag;
unsigned char tiles_fixed_structure_flag;
unsigned char motion_vectors_over_pic_boundaries_flag;
unsigned char restricted_ref_pic_lists_flag;
unsigned short min_spatial_segmentation_idc;
unsigned char max_bytes_per_pic_denom;
unsigned char max_bits_per_min_cu_denom;
unsigned char log2_max_mv_length_horizontal;
unsigned char log2_max_mv_length_vertical;
}vui_parameters;
#if defined(SWIG) || defined(__AVM2__)
#define _h_dll_export
#else
#ifdef WIN32
#define _h_dll_export __declspec(dllexport)
#else // for GCC
#define _h_dll_export __attribute__ ((visibility("default")))
#endif
#endif //SWIG
typedef void (*QYLogPrintf)(const char* msg);
typedef void (*QYAuthWarning)();
#if defined(__cplusplus)
extern "C" {
#endif//__cplusplus
// log output callback func pointer
// if pFuncCB == NULL, use the default printf
_h_dll_export void QY265SetLogPrintf ( QYLogPrintf pFuncCB);
// auth trouble warning callback func pointer
_h_dll_export void QY265SetAuthWarning ( QYAuthWarning pFuncCB);
#if defined(__cplusplus)
}
#endif//__cplusplus
//libqy265 version number string
_h_dll_export extern const char strLibQy265Version[];
#endif
================================================
FILE: iOS_demo/ksy265codec/qy265enc.h
================================================
///////////////////////////////////////////////////
//
// Kingsoft H265 Codec Library
//
// Copyright(c) Kingsoft cloud Inc.
// http://www.ksyun.com/
//
///////////////////////////////////////////////////
/************************************************************************************
* encInf.h: interface of encoder for user
*
* \date 2013-09-28: first version
*
************************************************************************************/
#ifndef _QY265_ENCODER_INTERFACE_H_
#define _QY265_ENCODER_INTERFACE_H_
#include "qy265def.h"
// ****************************************
// base configuration
// ****************************************
//app type
typedef enum QY265Tune_tag{
QY265TUNE_DEFAULT = 0,
QY265TUNE_SELFSHOW = 1,
QY265TUNE_GAME = 2,
QY265TUNE_MOVIE = 3,
QY265TUNE_SCREEN = 4
}QY265Tune;
typedef enum QY265Preset_tag{
QY265PRESET_SUPERFAST = 0,
QY265PRESET_VERYFAST = 1,
QY265PRESET_FAST = 2,
QY265PRESET_MEDIUM = 3,
QY265PRESET_SLOW = 4,
QY265PRESET_VERYSLOW = 5,
QY265PRESET_PLACEBO = 6,
}QY265Preset;
typedef enum QY265Latency_tag{
QY265LATENCY_ZERO = 0,
QY265LATENCY_LOWDELAY = 1,
QY265LATENCY_LIVESTREMING = 2,
QY265LATENCY_OFFLINE = 3,
}QY265Latency;
//base configuration
typedef struct QY265EncConfig{
void* pAuth; //QYAuth, invalid if don't need aksk auth
QY265Tune tune; //
QY265Preset preset;
QY265Latency latency;
int bHeaderBeforeKeyframe; //whether output vps,sps,pps before key frame, default 1. dis/enable 0/1
int picWidth; // input frame width
int picHeight; // input frame height
double frameRate; // input frame rate
int bframes; // num of bi-pred frames, -1: using default
int temporalLayer; // works with QY265LATENCY_ZERO, separate P frames into temporal layers, 0 or 1
int rc; // rc type 0 disable,1 cbr,2 abr,3 crf, default 2
int bitrateInkbps; // target bit rate in kbps, valid when rctype is cbr abd vbr
int vbv_buffer_size; // buf size of vbv
int vbv_max_rate; // max rate of vbv
int qp; // valid when rctype is disable, default 26
int crf; // valid when rctype is crf,default 24
int iIntraPeriod; // I-Frame period, -1 = only first
int qpmin; //minimal qp, valid when rc != 0, 0~51
int qpmax; //maximal qp, valid when rc != 0, 1~51, qpmax = 0 means 51
int enFrameSkip; //1: enable frame skip for ratecontrol, default 0
//* Execute Properties
int enWavefront; //enable wave front parallel
int enFrameParallel; //enable frame parallel
int threads; // number of threads used in encoding ( for wavefront, frame parallel, or enable both )
//* vui_parameters
//vui_parameters_present_flag equal to 1 specifies that the vui_parameters() syntax in struct vui should set by usr
int vui_parameters_present_flag;
struct{
/* video_signal_type_present_flag. If this is set then
* video_format, video_full_range_flag and colour_description_present_flag
* will be added to the VUI. The default is false */
int video_signal_type_present_flag;
/* Video format of the source video. 0 = component, 1 = PAL, 2 = NTSC,
* 3 = SECAM, 4 = MAC, 5 = unspecified video format is the default */
int video_format;
/* video_full_range_flag indicates the black level and range of the luma
* and chroma signals as derived from EY, EPB, and EPR or ER, EG,
* and EB real-valued component signals. The default is false */
int video_full_range_flag;
/* colour_description_present_flag in the VUI. If this is set then
* color_primaries, transfer_characteristics and matrix_coeffs are to be
* added to the VUI. The default is false */
int colour_description_present_flag;
/* colour_primaries holds the chromacity coordinates of the source
* primaries. The default is 2 */
int colour_primaries;
/* transfer_characteristics indicates the opto-electronic transfer
* characteristic of the source picture. The default is 2 */
int transfer_characteristics;
/* matrix_coeffs used to derive the luma and chroma signals from
* the red, blue and green primaries. The default is 2 */
int matrix_coeffs;
}vui;
//* debug
int logLevel;
int calcPsnr; //0:not calc psnr; 1: print total psnr; 2: print each frame
}QY265EncConfig;
// ****************************************
// callback functions
// ****************************************
//the encoder works in asynchronous mode (for supports of B frames)
//once calling on EncodeFrame not corresponds to one Frame's bitstream output
//thus, use callback function on Frame Encoded
//also, buffer of srcYUV should be reserved for encoder, until it's done
// CALLBACK method to feed the encoded bit stream
// input frame data and info
typedef struct QY265YUV{
int iWidth; // input frame width
int iHeight; // input frame height
unsigned char* pData[3]; // input frame Y U V
int iStride[3]; // stride for Y U V
}QY265YUV;
// input frame data and info
typedef struct QY265Picture{
int iSliceType; // specified by output pictures
int poc; // ignored on input
long long pts;
long long dts;
QY265YUV* yuv;
}QY265Picture;
typedef struct QY265Nal
{
int naltype;
int tid;
int iSize;
long long pts;
unsigned char* pPayload;
}QY265Nal;
#if defined(__cplusplus)
extern "C" {
#endif//__cplusplus
/**
* create encoder
* @param pCfg : base config of encoder
* @param errorCode: error code
* @return encoder handle
*/
_h_dll_export void* QY265EncoderOpen(QY265EncConfig* pCfg, int *errorCode);
// destroy encoder
_h_dll_export void QY265EncoderClose(void* pEncoder);
// reconfig encoder
_h_dll_export void QY265EncoderReconfig(void* pEncoder,QY265EncConfig* pCfg);
// return the VPS, SPS and PPS that will be used for the whole stream.
_h_dll_export int QY265EncoderEncodeHeaders(void* pEncoder,QY265Nal** pNals,int* iNalCount);
/**
* Encode one frame add logo or not
*
* @param pEncoder handle of encoder
* @param pNals pointer array of output NAL units
* @param iNalCount output NAL unit count
* @param pInPic input frame
* @param pOutPic output frame
* @param bForceLogo add logo on the input frame ( when auth failed)
* @return if succeed, return the total bin size of output, if failed, return the error code
*/
_h_dll_export int QY265EncoderEncodeFrame(void* pEncoder, QY265Nal** pNals, int* iNalCount, QY265Picture* pInpic, QY265Picture* pOutpic, int bForceLogo);
// Request encoder to encode a Key Frame
_h_dll_export void QY265EncoderKeyFrameRequest(void* pEncoder);
// current buffered frames
_h_dll_export int QY265EncoderDelayedFrames(void* pEncoder);
static const char* const qy265_preset_names[] = { "superfast", "veryfast", "fast", "medium", "slow", "veryslow", "placebo", 0 };
static const char* const qy265_tunes_names[] = { "default", "selfshow", "game", "movie", "screen", 0 };
static const char* const qy265_latency_names[] = { "zerolatency", "lowdelay", "livestreaming", "offline", 0 };
// get default config values by preset, tune and latency. enum format
_h_dll_export int QY265ConfigDefault(QY265EncConfig* pConfig, QY265Preset preset, QY265Tune tune, QY265Latency latency);
// get default config values by preset, tune and latency. string format
_h_dll_export int QY265ConfigDefaultPreset(QY265EncConfig* pConfig, char* preset, char* tune, char* latency);
#define QY265_PARAM_BAD_NAME (-1)
#define QY265_PARAM_BAD_VALUE (-2)
_h_dll_export int QY265ConfigParse(QY265EncConfig *p, const char *name, const char *value);
#if defined(__cplusplus)
}
#endif//__cplusplus
#endif
================================================
FILE: iOS_demo/lenthevcdec/lenthevcdec.h
================================================
#ifndef __LENTHEVCDEC_H__
#define __LENTHEVCDEC_H__
#ifdef __cplusplus
extern "C" {
#endif
#include
#if defined(_WIN32) || defined(WIN32)
#define LENTAPI __stdcall
#else
#define LENTAPI
#endif
typedef struct lenthevcdec_frame {
/* size in byte of this struct, initialized by caller for expand */
int32_t size;
/* width & height: picture size */
int32_t width;
int32_t height;
/* line_stride & pixels: output picture pixel data */
int32_t line_stride[3];
void* pixels[3];
/* bit depth of output picture pixel */
int32_t bit_depth;
/* return 1 if we got frame, then the pixels & line_stride & got_pts is valid */
int32_t got_frame;
/* pts of output frame */
int64_t got_pts;
/* 0 progressive, 1 top, 2 bottom */
int32_t pic_struct;
/* Sample Aspect Ratio */
int32_t sar_width;
int32_t sar_height;
} lenthevcdec_frame;
typedef void* lenthevcdec_ctx;
int LENTAPI lenthevcdec_version(void);
lenthevcdec_ctx LENTAPI lenthevcdec_create(int threads, int compatibility, void* reserved);
void LENTAPI lenthevcdec_destroy(lenthevcdec_ctx ctx);
void LENTAPI lenthevcdec_flush(lenthevcdec_ctx ctx);
/* bs & bs_len: intput bitstream
* pts: input play timestamp
* out_frame: output picture warpper
* return: byte count used by decoder, or negative number for error
*/
int LENTAPI lenthevcdec_decode_frame(lenthevcdec_ctx ctx,
const void* bs, int bs_len,
int64_t pts,
lenthevcdec_frame *out_frame);
#ifdef __cplusplus
}
#endif
#endif/*__LENTHEVCDEC_H__*/
================================================
FILE: iOS_demo/resource/1280x720_15.yuv
================================================
[File too large to display: 79.1 MB]
================================================
FILE: iOS_demo/resource/640x480_15.yuv
================================================
[File too large to display: 30.8 MB]
================================================
FILE: iOS_demo/resource/960x540_15.yuv
================================================
[File too large to display: 55.6 MB]
================================================
FILE: iOS_demo/x264/x264.h
================================================
/*****************************************************************************
* x264.h: x264 public header
*****************************************************************************
* Copyright (C) 2003-2016 x264 project
*
* Authors: Laurent Aimar
* Loren Merritt
* Fiona Glaser
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02111, USA.
*
* This program is also available under a commercial proprietary license.
* For more information, contact us at licensing@x264.com.
*****************************************************************************/
#ifndef X264_X264_H
#define X264_X264_H
#ifdef __cplusplus
extern "C" {
#endif
#if !defined(_STDINT_H) && !defined(_STDINT_H_) && !defined(_STDINT_H_INCLUDED) && !defined(_STDINT) &&\
!defined(_SYS_STDINT_H_) && !defined(_INTTYPES_H) && !defined(_INTTYPES_H_) && !defined(_INTTYPES)
# ifdef _MSC_VER
# pragma message("You must include stdint.h or inttypes.h before x264.h")
# else
# warning You must include stdint.h or inttypes.h before x264.h
# endif
#endif
#include
#include "x264_config.h"
#define X264_BUILD 148
/* Application developers planning to link against a shared library version of
* libx264 from a Microsoft Visual Studio or similar development environment
* will need to define X264_API_IMPORTS before including this header.
* This clause does not apply to MinGW, similar development environments, or non
* Windows platforms. */
#ifdef X264_API_IMPORTS
#define X264_API __declspec(dllimport)
#else
#define X264_API
#endif
/* x264_t:
* opaque handler for encoder */
typedef struct x264_t x264_t;
/****************************************************************************
* NAL structure and functions
****************************************************************************/
enum nal_unit_type_e
{
NAL_UNKNOWN = 0,
NAL_SLICE = 1,
NAL_SLICE_DPA = 2,
NAL_SLICE_DPB = 3,
NAL_SLICE_DPC = 4,
NAL_SLICE_IDR = 5, /* ref_idc != 0 */
NAL_SEI = 6, /* ref_idc == 0 */
NAL_SPS = 7,
NAL_PPS = 8,
NAL_AUD = 9,
NAL_FILLER = 12,
/* ref_idc == 0 for 6,9,10,11,12 */
};
enum nal_priority_e
{
NAL_PRIORITY_DISPOSABLE = 0,
NAL_PRIORITY_LOW = 1,
NAL_PRIORITY_HIGH = 2,
NAL_PRIORITY_HIGHEST = 3,
};
/* The data within the payload is already NAL-encapsulated; the ref_idc and type
* are merely in the struct for easy access by the calling application.
* All data returned in an x264_nal_t, including the data in p_payload, is no longer
* valid after the next call to x264_encoder_encode. Thus it must be used or copied
* before calling x264_encoder_encode or x264_encoder_headers again. */
typedef struct x264_nal_t
{
int i_ref_idc; /* nal_priority_e */
int i_type; /* nal_unit_type_e */
int b_long_startcode;
int i_first_mb; /* If this NAL is a slice, the index of the first MB in the slice. */
int i_last_mb; /* If this NAL is a slice, the index of the last MB in the slice. */
/* Size of payload (including any padding) in bytes. */
int i_payload;
/* If param->b_annexb is set, Annex-B bytestream with startcode.
* Otherwise, startcode is replaced with a 4-byte size.
* This size is the size used in mp4/similar muxing; it is equal to i_payload-4 */
uint8_t *p_payload;
/* Size of padding in bytes. */
int i_padding;
} x264_nal_t;
/****************************************************************************
* Encoder parameters
****************************************************************************/
/* CPU flags */
/* x86 */
#define X264_CPU_CMOV 0x0000001
#define X264_CPU_MMX 0x0000002
#define X264_CPU_MMX2 0x0000004 /* MMX2 aka MMXEXT aka ISSE */
#define X264_CPU_MMXEXT X264_CPU_MMX2
#define X264_CPU_SSE 0x0000008
#define X264_CPU_SSE2 0x0000010
#define X264_CPU_SSE3 0x0000020
#define X264_CPU_SSSE3 0x0000040
#define X264_CPU_SSE4 0x0000080 /* SSE4.1 */
#define X264_CPU_SSE42 0x0000100 /* SSE4.2 */
#define X264_CPU_LZCNT 0x0000200 /* Phenom support for "leading zero count" instruction. */
#define X264_CPU_AVX 0x0000400 /* AVX support: requires OS support even if YMM registers aren't used. */
#define X264_CPU_XOP 0x0000800 /* AMD XOP */
#define X264_CPU_FMA4 0x0001000 /* AMD FMA4 */
#define X264_CPU_FMA3 0x0002000 /* FMA3 */
#define X264_CPU_AVX2 0x0004000 /* AVX2 */
#define X264_CPU_BMI1 0x0008000 /* BMI1 */
#define X264_CPU_BMI2 0x0010000 /* BMI2 */
/* x86 modifiers */
#define X264_CPU_CACHELINE_32 0x0020000 /* avoid memory loads that span the border between two cachelines */
#define X264_CPU_CACHELINE_64 0x0040000 /* 32/64 is the size of a cacheline in bytes */
#define X264_CPU_SSE2_IS_SLOW 0x0080000 /* avoid most SSE2 functions on Athlon64 */
#define X264_CPU_SSE2_IS_FAST 0x0100000 /* a few functions are only faster on Core2 and Phenom */
#define X264_CPU_SLOW_SHUFFLE 0x0200000 /* The Conroe has a slow shuffle unit (relative to overall SSE performance) */
#define X264_CPU_STACK_MOD4 0x0400000 /* if stack is only mod4 and not mod16 */
#define X264_CPU_SLOW_CTZ 0x0800000 /* BSR/BSF x86 instructions are really slow on some CPUs */
#define X264_CPU_SLOW_ATOM 0x1000000 /* The Atom is terrible: slow SSE unaligned loads, slow
* SIMD multiplies, slow SIMD variable shifts, slow pshufb,
* cacheline split penalties -- gather everything here that
* isn't shared by other CPUs to avoid making half a dozen
* new SLOW flags. */
#define X264_CPU_SLOW_PSHUFB 0x2000000 /* such as on the Intel Atom */
#define X264_CPU_SLOW_PALIGNR 0x4000000 /* such as on the AMD Bobcat */
/* PowerPC */
#define X264_CPU_ALTIVEC 0x0000001
/* ARM and AArch64 */
#define X264_CPU_ARMV6 0x0000001
#define X264_CPU_NEON 0x0000002 /* ARM NEON */
#define X264_CPU_FAST_NEON_MRC 0x0000004 /* Transfer from NEON to ARM register is fast (Cortex-A9) */
#define X264_CPU_ARMV8 0x0000008
/* MIPS */
#define X264_CPU_MSA 0x0000001 /* MIPS MSA */
/* Analyse flags */
#define X264_ANALYSE_I4x4 0x0001 /* Analyse i4x4 */
#define X264_ANALYSE_I8x8 0x0002 /* Analyse i8x8 (requires 8x8 transform) */
#define X264_ANALYSE_PSUB16x16 0x0010 /* Analyse p16x8, p8x16 and p8x8 */
#define X264_ANALYSE_PSUB8x8 0x0020 /* Analyse p8x4, p4x8, p4x4 */
#define X264_ANALYSE_BSUB16x16 0x0100 /* Analyse b16x8, b8x16 and b8x8 */
#define X264_DIRECT_PRED_NONE 0
#define X264_DIRECT_PRED_SPATIAL 1
#define X264_DIRECT_PRED_TEMPORAL 2
#define X264_DIRECT_PRED_AUTO 3
#define X264_ME_DIA 0
#define X264_ME_HEX 1
#define X264_ME_UMH 2
#define X264_ME_ESA 3
#define X264_ME_TESA 4
#define X264_CQM_FLAT 0
#define X264_CQM_JVT 1
#define X264_CQM_CUSTOM 2
#define X264_RC_CQP 0
#define X264_RC_CRF 1
#define X264_RC_ABR 2
#define X264_QP_AUTO 0
#define X264_AQ_NONE 0
#define X264_AQ_VARIANCE 1
#define X264_AQ_AUTOVARIANCE 2
#define X264_AQ_AUTOVARIANCE_BIASED 3
#define X264_B_ADAPT_NONE 0
#define X264_B_ADAPT_FAST 1
#define X264_B_ADAPT_TRELLIS 2
#define X264_WEIGHTP_NONE 0
#define X264_WEIGHTP_SIMPLE 1
#define X264_WEIGHTP_SMART 2
#define X264_B_PYRAMID_NONE 0
#define X264_B_PYRAMID_STRICT 1
#define X264_B_PYRAMID_NORMAL 2
#define X264_KEYINT_MIN_AUTO 0
#define X264_KEYINT_MAX_INFINITE (1<<30)
static const char * const x264_direct_pred_names[] = { "none", "spatial", "temporal", "auto", 0 };
static const char * const x264_motion_est_names[] = { "dia", "hex", "umh", "esa", "tesa", 0 };
static const char * const x264_b_pyramid_names[] = { "none", "strict", "normal", 0 };
static const char * const x264_overscan_names[] = { "undef", "show", "crop", 0 };
static const char * const x264_vidformat_names[] = { "component", "pal", "ntsc", "secam", "mac", "undef", 0 };
static const char * const x264_fullrange_names[] = { "off", "on", 0 };
static const char * const x264_colorprim_names[] = { "", "bt709", "undef", "", "bt470m", "bt470bg", "smpte170m", "smpte240m", "film", "bt2020", "smpte428",
"smpte431", "smpte432", 0 };
static const char * const x264_transfer_names[] = { "", "bt709", "undef", "", "bt470m", "bt470bg", "smpte170m", "smpte240m", "linear", "log100", "log316",
"iec61966-2-4", "bt1361e", "iec61966-2-1", "bt2020-10", "bt2020-12", "smpte2084", "smpte428", 0 };
static const char * const x264_colmatrix_names[] = { "GBR", "bt709", "undef", "", "fcc", "bt470bg", "smpte170m", "smpte240m", "YCgCo", "bt2020nc", "bt2020c",
"smpte2085", 0 };
static const char * const x264_nal_hrd_names[] = { "none", "vbr", "cbr", 0 };
/* Colorspace type */
#define X264_CSP_MASK 0x00ff /* */
#define X264_CSP_NONE 0x0000 /* Invalid mode */
#define X264_CSP_I420 0x0001 /* yuv 4:2:0 planar */
#define X264_CSP_YV12 0x0002 /* yvu 4:2:0 planar */
#define X264_CSP_NV12 0x0003 /* yuv 4:2:0, with one y plane and one packed u+v */
#define X264_CSP_NV21 0x0004 /* yuv 4:2:0, with one y plane and one packed v+u */
#define X264_CSP_I422 0x0005 /* yuv 4:2:2 planar */
#define X264_CSP_YV16 0x0006 /* yvu 4:2:2 planar */
#define X264_CSP_NV16 0x0007 /* yuv 4:2:2, with one y plane and one packed u+v */
#define X264_CSP_V210 0x0008 /* 10-bit yuv 4:2:2 packed in 32 */
#define X264_CSP_I444 0x0009 /* yuv 4:4:4 planar */
#define X264_CSP_YV24 0x000a /* yvu 4:4:4 planar */
#define X264_CSP_BGR 0x000b /* packed bgr 24bits */
#define X264_CSP_BGRA 0x000c /* packed bgr 32bits */
#define X264_CSP_RGB 0x000d /* packed rgb 24bits */
#define X264_CSP_MAX 0x000e /* end of list */
#define X264_CSP_VFLIP 0x1000 /* the csp is vertically flipped */
#define X264_CSP_HIGH_DEPTH 0x2000 /* the csp has a depth of 16 bits per pixel component */
/* Slice type */
#define X264_TYPE_AUTO 0x0000 /* Let x264 choose the right type */
#define X264_TYPE_IDR 0x0001
#define X264_TYPE_I 0x0002
#define X264_TYPE_P 0x0003
#define X264_TYPE_BREF 0x0004 /* Non-disposable B-frame */
#define X264_TYPE_B 0x0005
#define X264_TYPE_KEYFRAME 0x0006 /* IDR or I depending on b_open_gop option */
#define IS_X264_TYPE_I(x) ((x)==X264_TYPE_I || (x)==X264_TYPE_IDR || (x)==X264_TYPE_KEYFRAME)
#define IS_X264_TYPE_B(x) ((x)==X264_TYPE_B || (x)==X264_TYPE_BREF)
/* Log level */
#define X264_LOG_NONE (-1)
#define X264_LOG_ERROR 0
#define X264_LOG_WARNING 1
#define X264_LOG_INFO 2
#define X264_LOG_DEBUG 3
/* Threading */
#define X264_THREADS_AUTO 0 /* Automatically select optimal number of threads */
#define X264_SYNC_LOOKAHEAD_AUTO (-1) /* Automatically select optimal lookahead thread buffer size */
/* HRD */
#define X264_NAL_HRD_NONE 0
#define X264_NAL_HRD_VBR 1
#define X264_NAL_HRD_CBR 2
/* Zones: override ratecontrol or other options for specific sections of the video.
* See x264_encoder_reconfig() for which options can be changed.
* If zones overlap, whichever comes later in the list takes precedence. */
typedef struct x264_zone_t
{
int i_start, i_end; /* range of frame numbers */
int b_force_qp; /* whether to use qp vs bitrate factor */
int i_qp;
float f_bitrate_factor;
struct x264_param_t *param;
} x264_zone_t;
typedef struct x264_param_t
{
/* CPU flags */
unsigned int cpu;
int i_threads; /* encode multiple frames in parallel */
int i_lookahead_threads; /* multiple threads for lookahead analysis */
int b_sliced_threads; /* Whether to use slice-based threading. */
int b_deterministic; /* whether to allow non-deterministic optimizations when threaded */
int b_cpu_independent; /* force canonical behavior rather than cpu-dependent optimal algorithms */
int i_sync_lookahead; /* threaded lookahead buffer */
/* Video Properties */
int i_width;
int i_height;
int i_csp; /* CSP of encoded bitstream */
int i_level_idc;
int i_frame_total; /* number of frames to encode if known, else 0 */
/* NAL HRD
* Uses Buffering and Picture Timing SEIs to signal HRD
* The HRD in H.264 was not designed with VFR in mind.
* It is therefore not recommendeded to use NAL HRD with VFR.
* Furthermore, reconfiguring the VBV (via x264_encoder_reconfig)
* will currently generate invalid HRD. */
int i_nal_hrd;
struct
{
/* they will be reduced to be 0 < x <= 65535 and prime */
int i_sar_height;
int i_sar_width;
int i_overscan; /* 0=undef, 1=no overscan, 2=overscan */
/* see h264 annex E for the values of the following */
int i_vidformat;
int b_fullrange;
int i_colorprim;
int i_transfer;
int i_colmatrix;
int i_chroma_loc; /* both top & bottom */
} vui;
/* Bitstream parameters */
int i_frame_reference; /* Maximum number of reference frames */
int i_dpb_size; /* Force a DPB size larger than that implied by B-frames and reference frames.
* Useful in combination with interactive error resilience. */
int i_keyint_max; /* Force an IDR keyframe at this interval */
int i_keyint_min; /* Scenecuts closer together than this are coded as I, not IDR. */
int i_scenecut_threshold; /* how aggressively to insert extra I frames */
int b_intra_refresh; /* Whether or not to use periodic intra refresh instead of IDR frames. */
int i_bframe; /* how many b-frame between 2 references pictures */
int i_bframe_adaptive;
int i_bframe_bias;
int i_bframe_pyramid; /* Keep some B-frames as references: 0=off, 1=strict hierarchical, 2=normal */
int b_open_gop;
int b_bluray_compat;
int i_avcintra_class;
int b_deblocking_filter;
int i_deblocking_filter_alphac0; /* [-6, 6] -6 light filter, 6 strong */
int i_deblocking_filter_beta; /* [-6, 6] idem */
int b_cabac;
int i_cabac_init_idc;
int b_interlaced;
int b_constrained_intra;
int i_cqm_preset;
char *psz_cqm_file; /* filename (in UTF-8) of CQM file, JM format */
uint8_t cqm_4iy[16]; /* used only if i_cqm_preset == X264_CQM_CUSTOM */
uint8_t cqm_4py[16];
uint8_t cqm_4ic[16];
uint8_t cqm_4pc[16];
uint8_t cqm_8iy[64];
uint8_t cqm_8py[64];
uint8_t cqm_8ic[64];
uint8_t cqm_8pc[64];
/* Log */
void (*pf_log)( void *, int i_level, const char *psz, va_list );
void *p_log_private;
int i_log_level;
int b_full_recon; /* fully reconstruct frames, even when not necessary for encoding. Implied by psz_dump_yuv */
char *psz_dump_yuv; /* filename (in UTF-8) for reconstructed frames */
/* Encoder analyser parameters */
struct
{
unsigned int intra; /* intra partitions */
unsigned int inter; /* inter partitions */
int b_transform_8x8;
int i_weighted_pred; /* weighting for P-frames */
int b_weighted_bipred; /* implicit weighting for B-frames */
int i_direct_mv_pred; /* spatial vs temporal mv prediction */
int i_chroma_qp_offset;
int i_me_method; /* motion estimation algorithm to use (X264_ME_*) */
int i_me_range; /* integer pixel motion estimation search range (from predicted mv) */
int i_mv_range; /* maximum length of a mv (in pixels). -1 = auto, based on level */
int i_mv_range_thread; /* minimum space between threads. -1 = auto, based on number of threads. */
int i_subpel_refine; /* subpixel motion estimation quality */
int b_chroma_me; /* chroma ME for subpel and mode decision in P-frames */
int b_mixed_references; /* allow each mb partition to have its own reference number */
int i_trellis; /* trellis RD quantization */
int b_fast_pskip; /* early SKIP detection on P-frames */
int b_dct_decimate; /* transform coefficient thresholding on P-frames */
int i_noise_reduction; /* adaptive pseudo-deadzone */
float f_psy_rd; /* Psy RD strength */
float f_psy_trellis; /* Psy trellis strength */
int b_psy; /* Toggle all psy optimizations */
int b_mb_info; /* Use input mb_info data in x264_picture_t */
int b_mb_info_update; /* Update the values in mb_info according to the results of encoding. */
/* the deadzone size that will be used in luma quantization */
int i_luma_deadzone[2]; /* {inter, intra} */
int b_psnr; /* compute and print PSNR stats */
int b_ssim; /* compute and print SSIM stats */
} analyse;
/* Rate control parameters */
struct
{
int i_rc_method; /* X264_RC_* */
int i_qp_constant; /* 0 to (51 + 6*(x264_bit_depth-8)). 0=lossless */
int i_qp_min; /* min allowed QP value */
int i_qp_max; /* max allowed QP value */
int i_qp_step; /* max QP step between frames */
int i_bitrate;
float f_rf_constant; /* 1pass VBR, nominal QP */
float f_rf_constant_max; /* In CRF mode, maximum CRF as caused by VBV */
float f_rate_tolerance;
int i_vbv_max_bitrate;
int i_vbv_buffer_size;
float f_vbv_buffer_init; /* <=1: fraction of buffer_size. >1: kbit */
float f_ip_factor;
float f_pb_factor;
/* VBV filler: force CBR VBV and use filler bytes to ensure hard-CBR.
* Implied by NAL-HRD CBR. */
int b_filler;
int i_aq_mode; /* psy adaptive QP. (X264_AQ_*) */
float f_aq_strength;
int b_mb_tree; /* Macroblock-tree ratecontrol. */
int i_lookahead;
/* 2pass */
int b_stat_write; /* Enable stat writing in psz_stat_out */
char *psz_stat_out; /* output filename (in UTF-8) of the 2pass stats file */
int b_stat_read; /* Read stat from psz_stat_in and use it */
char *psz_stat_in; /* input filename (in UTF-8) of the 2pass stats file */
/* 2pass params (same as ffmpeg ones) */
float f_qcompress; /* 0.0 => cbr, 1.0 => constant qp */
float f_qblur; /* temporally blur quants */
float f_complexity_blur; /* temporally blur complexity */
x264_zone_t *zones; /* ratecontrol overrides */
int i_zones; /* number of zone_t's */
char *psz_zones; /* alternate method of specifying zones */
} rc;
/* Cropping Rectangle parameters: added to those implicitly defined by
non-mod16 video resolutions. */
struct
{
unsigned int i_left;
unsigned int i_top;
unsigned int i_right;
unsigned int i_bottom;
} crop_rect;
/* frame packing arrangement flag */
int i_frame_packing;
/* Muxing parameters */
int b_aud; /* generate access unit delimiters */
int b_repeat_headers; /* put SPS/PPS before each keyframe */
int b_annexb; /* if set, place start codes (4 bytes) before NAL units,
* otherwise place size (4 bytes) before NAL units. */
int i_sps_id; /* SPS and PPS id number */
int b_vfr_input; /* VFR input. If 1, use timebase and timestamps for ratecontrol purposes.
* If 0, use fps only. */
int b_pulldown; /* use explicity set timebase for CFR */
uint32_t i_fps_num;
uint32_t i_fps_den;
uint32_t i_timebase_num; /* Timebase numerator */
uint32_t i_timebase_den; /* Timebase denominator */
int b_tff;
/* Pulldown:
* The correct pic_struct must be passed with each input frame.
* The input timebase should be the timebase corresponding to the output framerate. This should be constant.
* e.g. for 3:2 pulldown timebase should be 1001/30000
* The PTS passed with each frame must be the PTS of the frame after pulldown is applied.
* Frame doubling and tripling require b_vfr_input set to zero (see H.264 Table D-1)
*
* Pulldown changes are not clearly defined in H.264. Therefore, it is the calling app's responsibility to manage this.
*/
int b_pic_struct;
/* Fake Interlaced.
*
* Used only when b_interlaced=0. Setting this flag makes it possible to flag the stream as PAFF interlaced yet
* encode all frames progessively. It is useful for encoding 25p and 30p Blu-Ray streams.
*/
int b_fake_interlaced;
/* Don't optimize header parameters based on video content, e.g. ensure that splitting an input video, compressing
* each part, and stitching them back together will result in identical SPS/PPS. This is necessary for stitching
* with container formats that don't allow multiple SPS/PPS. */
int b_stitchable;
int b_opencl; /* use OpenCL when available */
int i_opencl_device; /* specify count of GPU devices to skip, for CLI users */
void *opencl_device_id; /* pass explicit cl_device_id as void*, for API users */
char *psz_clbin_file; /* filename (in UTF-8) of the compiled OpenCL kernel cache file */
/* Slicing parameters */
int i_slice_max_size; /* Max size per slice in bytes; includes estimated NAL overhead. */
int i_slice_max_mbs; /* Max number of MBs per slice; overrides i_slice_count. */
int i_slice_min_mbs; /* Min number of MBs per slice */
int i_slice_count; /* Number of slices per frame: forces rectangular slices. */
int i_slice_count_max; /* Absolute cap on slices per frame; stops applying slice-max-size
* and slice-max-mbs if this is reached. */
/* Optional callback for freeing this x264_param_t when it is done being used.
* Only used when the x264_param_t sits in memory for an indefinite period of time,
* i.e. when an x264_param_t is passed to x264_t in an x264_picture_t or in zones.
* Not used when x264_encoder_reconfig is called directly. */
void (*param_free)( void* );
/* Optional low-level callback for low-latency encoding. Called for each output NAL unit
* immediately after the NAL unit is finished encoding. This allows the calling application
* to begin processing video data (e.g. by sending packets over a network) before the frame
* is done encoding.
*
* This callback MUST do the following in order to work correctly:
* 1) Have available an output buffer of at least size nal->i_payload*3/2 + 5 + 64.
* 2) Call x264_nal_encode( h, dst, nal ), where dst is the output buffer.
* After these steps, the content of nal is valid and can be used in the same way as if
* the NAL unit were output by x264_encoder_encode.
*
* This does not need to be synchronous with the encoding process: the data pointed to
* by nal (both before and after x264_nal_encode) will remain valid until the next
* x264_encoder_encode call. The callback must be re-entrant.
*
* This callback does not work with frame-based threads; threads must be disabled
* or sliced-threads enabled. This callback also does not work as one would expect
* with HRD -- since the buffering period SEI cannot be calculated until the frame
* is finished encoding, it will not be sent via this callback.
*
* Note also that the NALs are not necessarily returned in order when sliced threads is
* enabled. Accordingly, the variable i_first_mb and i_last_mb are available in
* x264_nal_t to help the calling application reorder the slices if necessary.
*
* When this callback is enabled, x264_encoder_encode does not return valid NALs;
* the calling application is expected to acquire all output NALs through the callback.
*
* It is generally sensible to combine this callback with a use of slice-max-mbs or
* slice-max-size.
*
* The opaque pointer is the opaque pointer from the input frame associated with this
* NAL unit. This helps distinguish between nalu_process calls from different sources,
* e.g. if doing multiple encodes in one process.
*/
void (*nalu_process)( x264_t *h, x264_nal_t *nal, void *opaque );
} x264_param_t;
void x264_nal_encode( x264_t *h, uint8_t *dst, x264_nal_t *nal );
/****************************************************************************
* H.264 level restriction information
****************************************************************************/
typedef struct x264_level_t
{
int level_idc;
int mbps; /* max macroblock processing rate (macroblocks/sec) */
int frame_size; /* max frame size (macroblocks) */
int dpb; /* max decoded picture buffer (mbs) */
int bitrate; /* max bitrate (kbit/sec) */
int cpb; /* max vbv buffer (kbit) */
int mv_range; /* max vertical mv component range (pixels) */
int mvs_per_2mb; /* max mvs per 2 consecutive mbs. */
int slice_rate; /* ?? */
int mincr; /* min compression ratio */
int bipred8x8; /* limit bipred to >=8x8 */
int direct8x8; /* limit b_direct to >=8x8 */
int frame_only; /* forbid interlacing */
} x264_level_t;
/* all of the levels defined in the standard, terminated by .level_idc=0 */
X264_API extern const x264_level_t x264_levels[];
/****************************************************************************
* Basic parameter handling functions
****************************************************************************/
/* x264_param_default:
* fill x264_param_t with default values and do CPU detection */
void x264_param_default( x264_param_t * );
/* x264_param_parse:
* set one parameter by name.
* returns 0 on success, or returns one of the following errors.
* note: BAD_VALUE occurs only if it can't even parse the value,
* numerical range is not checked until x264_encoder_open() or
* x264_encoder_reconfig().
* value=NULL means "true" for boolean options, but is a BAD_VALUE for non-booleans. */
#define X264_PARAM_BAD_NAME (-1)
#define X264_PARAM_BAD_VALUE (-2)
int x264_param_parse( x264_param_t *, const char *name, const char *value );
/****************************************************************************
* Advanced parameter handling functions
****************************************************************************/
/* These functions expose the full power of x264's preset-tune-profile system for
* easy adjustment of large numbers of internal parameters.
*
* In order to replicate x264CLI's option handling, these functions MUST be called
* in the following order:
* 1) x264_param_default_preset
* 2) Custom user options (via param_parse or directly assigned variables)
* 3) x264_param_apply_fastfirstpass
* 4) x264_param_apply_profile
*
* Additionally, x264CLI does not apply step 3 if the preset chosen is "placebo"
* or --slow-firstpass is set. */
/* x264_param_default_preset:
* The same as x264_param_default, but also use the passed preset and tune
* to modify the default settings.
* (either can be NULL, which implies no preset or no tune, respectively)
*
* Currently available presets are, ordered from fastest to slowest: */
static const char * const x264_preset_names[] = { "ultrafast", "superfast", "veryfast", "faster", "fast", "medium", "slow", "slower", "veryslow", "placebo", 0 };
/* The presets can also be indexed numerically, as in:
* x264_param_default_preset( ¶m, "3", ... )
* with ultrafast mapping to "0" and placebo mapping to "9". This mapping may
* of course change if new presets are added in between, but will always be
* ordered from fastest to slowest.
*
* Warning: the speed of these presets scales dramatically. Ultrafast is a full
* 100 times faster than placebo!
*
* Currently available tunings are: */
static const char * const x264_tune_names[] = { "film", "animation", "grain", "stillimage", "psnr", "ssim", "fastdecode", "zerolatency", 0 };
/* Multiple tunings can be used if separated by a delimiter in ",./-+",
* however multiple psy tunings cannot be used.
* film, animation, grain, stillimage, psnr, and ssim are psy tunings.
*
* returns 0 on success, negative on failure (e.g. invalid preset/tune name). */
int x264_param_default_preset( x264_param_t *, const char *preset, const char *tune );
/* x264_param_apply_fastfirstpass:
* If first-pass mode is set (rc.b_stat_read == 0, rc.b_stat_write == 1),
* modify the encoder settings to disable options generally not useful on
* the first pass. */
void x264_param_apply_fastfirstpass( x264_param_t * );
/* x264_param_apply_profile:
* Applies the restrictions of the given profile.
* Currently available profiles are, from most to least restrictive: */
static const char * const x264_profile_names[] = { "baseline", "main", "high", "high10", "high422", "high444", 0 };
/* (can be NULL, in which case the function will do nothing)
*
* Does NOT guarantee that the given profile will be used: if the restrictions
* of "High" are applied to settings that are already Baseline-compatible, the
* stream will remain baseline. In short, it does not increase settings, only
* decrease them.
*
* returns 0 on success, negative on failure (e.g. invalid profile name). */
int x264_param_apply_profile( x264_param_t *, const char *profile );
/****************************************************************************
* Picture structures and functions
****************************************************************************/
/* x264_bit_depth:
* Specifies the number of bits per pixel that x264 uses. This is also the
* bit depth that x264 encodes in. If this value is > 8, x264 will read
* two bytes of input data for each pixel sample, and expect the upper
* (16-x264_bit_depth) bits to be zero.
* Note: The flag X264_CSP_HIGH_DEPTH must be used to specify the
* colorspace depth as well. */
X264_API extern const int x264_bit_depth;
/* x264_chroma_format:
* Specifies the chroma formats that x264 supports encoding. When this
* value is non-zero, then it represents a X264_CSP_* that is the only
* chroma format that x264 supports encoding. If the value is 0 then
* there are no restrictions. */
X264_API extern const int x264_chroma_format;
enum pic_struct_e
{
PIC_STRUCT_AUTO = 0, // automatically decide (default)
PIC_STRUCT_PROGRESSIVE = 1, // progressive frame
// "TOP" and "BOTTOM" are not supported in x264 (PAFF only)
PIC_STRUCT_TOP_BOTTOM = 4, // top field followed by bottom
PIC_STRUCT_BOTTOM_TOP = 5, // bottom field followed by top
PIC_STRUCT_TOP_BOTTOM_TOP = 6, // top field, bottom field, top field repeated
PIC_STRUCT_BOTTOM_TOP_BOTTOM = 7, // bottom field, top field, bottom field repeated
PIC_STRUCT_DOUBLE = 8, // double frame
PIC_STRUCT_TRIPLE = 9, // triple frame
};
typedef struct x264_hrd_t
{
double cpb_initial_arrival_time;
double cpb_final_arrival_time;
double cpb_removal_time;
double dpb_output_time;
} x264_hrd_t;
/* Arbitrary user SEI:
* Payload size is in bytes and the payload pointer must be valid.
* Payload types and syntax can be found in Annex D of the H.264 Specification.
* SEI payload alignment bits as described in Annex D must be included at the
* end of the payload if needed.
* The payload should not be NAL-encapsulated.
* Payloads are written first in order of input, apart from in the case when HRD
* is enabled where payloads are written after the Buffering Period SEI. */
typedef struct x264_sei_payload_t
{
int payload_size;
int payload_type;
uint8_t *payload;
} x264_sei_payload_t;
typedef struct x264_sei_t
{
int num_payloads;
x264_sei_payload_t *payloads;
/* In: optional callback to free each payload AND x264_sei_payload_t when used. */
void (*sei_free)( void* );
} x264_sei_t;
typedef struct x264_image_t
{
int i_csp; /* Colorspace */
int i_plane; /* Number of image planes */
int i_stride[4]; /* Strides for each plane */
uint8_t *plane[4]; /* Pointers to each plane */
} x264_image_t;
typedef struct x264_image_properties_t
{
/* All arrays of data here are ordered as follows:
* each array contains one offset per macroblock, in raster scan order. In interlaced
* mode, top-field MBs and bottom-field MBs are interleaved at the row level.
* Macroblocks are 16x16 blocks of pixels (with respect to the luma plane). For the
* purposes of calculating the number of macroblocks, width and height are rounded up to
* the nearest 16. If in interlaced mode, height is rounded up to the nearest 32 instead. */
/* In: an array of quantizer offsets to be applied to this image during encoding.
* These are added on top of the decisions made by x264.
* Offsets can be fractional; they are added before QPs are rounded to integer.
* Adaptive quantization must be enabled to use this feature. Behavior if quant
* offsets differ between encoding passes is undefined. */
float *quant_offsets;
/* In: optional callback to free quant_offsets when used.
* Useful if one wants to use a different quant_offset array for each frame. */
void (*quant_offsets_free)( void* );
/* In: optional array of flags for each macroblock.
* Allows specifying additional information for the encoder such as which macroblocks
* remain unchanged. Usable flags are listed below.
* x264_param_t.analyse.b_mb_info must be set to use this, since x264 needs to track
* extra data internally to make full use of this information.
*
* Out: if b_mb_info_update is set, x264 will update this array as a result of encoding.
*
* For "MBINFO_CONSTANT", it will remove this flag on any macroblock whose decoded
* pixels have changed. This can be useful for e.g. noting which areas of the
* frame need to actually be blitted. Note: this intentionally ignores the effects
* of deblocking for the current frame, which should be fine unless one needs exact
* pixel-perfect accuracy.
*
* Results for MBINFO_CONSTANT are currently only set for P-frames, and are not
* guaranteed to enumerate all blocks which haven't changed. (There may be false
* negatives, but no false positives.)
*/
uint8_t *mb_info;
/* In: optional callback to free mb_info when used. */
void (*mb_info_free)( void* );
/* The macroblock is constant and remains unchanged from the previous frame. */
#define X264_MBINFO_CONSTANT (1<<0)
/* More flags may be added in the future. */
/* Out: SSIM of the the frame luma (if x264_param_t.b_ssim is set) */
double f_ssim;
/* Out: Average PSNR of the frame (if x264_param_t.b_psnr is set) */
double f_psnr_avg;
/* Out: PSNR of Y, U, and V (if x264_param_t.b_psnr is set) */
double f_psnr[3];
/* Out: Average effective CRF of the encoded frame */
double f_crf_avg;
} x264_image_properties_t;
typedef struct x264_picture_t
{
/* In: force picture type (if not auto)
* If x264 encoding parameters are violated in the forcing of picture types,
* x264 will correct the input picture type and log a warning.
* Out: type of the picture encoded */
int i_type;
/* In: force quantizer for != X264_QP_AUTO */
int i_qpplus1;
/* In: pic_struct, for pulldown/doubling/etc...used only if b_pic_struct=1.
* use pic_struct_e for pic_struct inputs
* Out: pic_struct element associated with frame */
int i_pic_struct;
/* Out: whether this frame is a keyframe. Important when using modes that result in
* SEI recovery points being used instead of IDR frames. */
int b_keyframe;
/* In: user pts, Out: pts of encoded picture (user)*/
int64_t i_pts;
/* Out: frame dts. When the pts of the first frame is close to zero,
* initial frames may have a negative dts which must be dealt with by any muxer */
int64_t i_dts;
/* In: custom encoding parameters to be set from this frame forwards
(in coded order, not display order). If NULL, continue using
parameters from the previous frame. Some parameters, such as
aspect ratio, can only be changed per-GOP due to the limitations
of H.264 itself; in this case, the caller must force an IDR frame
if it needs the changed parameter to apply immediately. */
x264_param_t *param;
/* In: raw image data */
/* Out: reconstructed image data. x264 may skip part of the reconstruction process,
e.g. deblocking, in frames where it isn't necessary. To force complete
reconstruction, at a small speed cost, set b_full_recon. */
x264_image_t img;
/* In: optional information to modify encoder decisions for this frame
* Out: information about the encoded frame */
x264_image_properties_t prop;
/* Out: HRD timing information. Output only when i_nal_hrd is set. */
x264_hrd_t hrd_timing;
/* In: arbitrary user SEI (e.g subtitles, AFDs) */
x264_sei_t extra_sei;
/* private user data. copied from input to output frames. */
void *opaque;
} x264_picture_t;
/* x264_picture_init:
* initialize an x264_picture_t. Needs to be done if the calling application
* allocates its own x264_picture_t as opposed to using x264_picture_alloc. */
void x264_picture_init( x264_picture_t *pic );
/* x264_picture_alloc:
* alloc data for a picture. You must call x264_picture_clean on it.
* returns 0 on success, or -1 on malloc failure or invalid colorspace. */
int x264_picture_alloc( x264_picture_t *pic, int i_csp, int i_width, int i_height );
/* x264_picture_clean:
* free associated resource for a x264_picture_t allocated with
* x264_picture_alloc ONLY */
void x264_picture_clean( x264_picture_t *pic );
/****************************************************************************
* Encoder functions
****************************************************************************/
/* Force a link error in the case of linking against an incompatible API version.
* Glue #defines exist to force correct macro expansion; the final output of the macro
* is x264_encoder_open_##X264_BUILD (for purposes of dlopen). */
#define x264_encoder_glue1(x,y) x##y
#define x264_encoder_glue2(x,y) x264_encoder_glue1(x,y)
#define x264_encoder_open x264_encoder_glue2(x264_encoder_open_,X264_BUILD)
/* x264_encoder_open:
* create a new encoder handler, all parameters from x264_param_t are copied */
x264_t *x264_encoder_open( x264_param_t * );
/* x264_encoder_reconfig:
* various parameters from x264_param_t are copied.
* this takes effect immediately, on whichever frame is encoded next;
* due to delay, this may not be the next frame passed to encoder_encode.
* if the change should apply to some particular frame, use x264_picture_t->param instead.
* returns 0 on success, negative on parameter validation error.
* not all parameters can be changed; see the actual function for a detailed breakdown.
*
* since not all parameters can be changed, moving from preset to preset may not always
* fully copy all relevant parameters, but should still work usably in practice. however,
* more so than for other presets, many of the speed shortcuts used in ultrafast cannot be
* switched out of; using reconfig to switch between ultrafast and other presets is not
* recommended without a more fine-grained breakdown of parameters to take this into account. */
int x264_encoder_reconfig( x264_t *, x264_param_t * );
/* x264_encoder_parameters:
* copies the current internal set of parameters to the pointer provided
* by the caller. useful when the calling application needs to know
* how x264_encoder_open has changed the parameters, or the current state
* of the encoder after multiple x264_encoder_reconfig calls.
* note that the data accessible through pointers in the returned param struct
* (e.g. filenames) should not be modified by the calling application. */
void x264_encoder_parameters( x264_t *, x264_param_t * );
/* x264_encoder_headers:
* return the SPS and PPS that will be used for the whole stream.
* *pi_nal is the number of NAL units outputted in pp_nal.
* returns the number of bytes in the returned NALs.
* returns negative on error.
* the payloads of all output NALs are guaranteed to be sequential in memory. */
int x264_encoder_headers( x264_t *, x264_nal_t **pp_nal, int *pi_nal );
/* x264_encoder_encode:
* encode one picture.
* *pi_nal is the number of NAL units outputted in pp_nal.
* returns the number of bytes in the returned NALs.
* returns negative on error and zero if no NAL units returned.
* the payloads of all output NALs are guaranteed to be sequential in memory. */
int x264_encoder_encode( x264_t *, x264_nal_t **pp_nal, int *pi_nal, x264_picture_t *pic_in, x264_picture_t *pic_out );
/* x264_encoder_close:
* close an encoder handler */
void x264_encoder_close( x264_t * );
/* x264_encoder_delayed_frames:
* return the number of currently delayed (buffered) frames
* this should be used at the end of the stream, to know when you have all the encoded frames. */
int x264_encoder_delayed_frames( x264_t * );
/* x264_encoder_maximum_delayed_frames( x264_t *h ):
* return the maximum number of delayed (buffered) frames that can occur with the current
* parameters. */
int x264_encoder_maximum_delayed_frames( x264_t *h );
/* x264_encoder_intra_refresh:
* If an intra refresh is not in progress, begin one with the next P-frame.
* If an intra refresh is in progress, begin one as soon as the current one finishes.
* Requires that b_intra_refresh be set.
*
* Useful for interactive streaming where the client can tell the server that packet loss has
* occurred. In this case, keyint can be set to an extremely high value so that intra refreshes
* only occur when calling x264_encoder_intra_refresh.
*
* In multi-pass encoding, if x264_encoder_intra_refresh is called differently in each pass,
* behavior is undefined.
*
* Should not be called during an x264_encoder_encode. */
void x264_encoder_intra_refresh( x264_t * );
/* x264_encoder_invalidate_reference:
* An interactive error resilience tool, designed for use in a low-latency one-encoder-few-clients
* system. When the client has packet loss or otherwise incorrectly decodes a frame, the encoder
* can be told with this command to "forget" the frame and all frames that depend on it, referencing
* only frames that occurred before the loss. This will force a keyframe if no frames are left to
* reference after the aforementioned "forgetting".
*
* It is strongly recommended to use a large i_dpb_size in this case, which allows the encoder to
* keep around extra, older frames to fall back on in case more recent frames are all invalidated.
* Unlike increasing i_frame_reference, this does not increase the number of frames used for motion
* estimation and thus has no speed impact. It is also recommended to set a very large keyframe
* interval, so that keyframes are not used except as necessary for error recovery.
*
* x264_encoder_invalidate_reference is not currently compatible with the use of B-frames or intra
* refresh.
*
* In multi-pass encoding, if x264_encoder_invalidate_reference is called differently in each pass,
* behavior is undefined.
*
* Should not be called during an x264_encoder_encode, but multiple calls can be made simultaneously.
*
* Returns 0 on success, negative on failure. */
int x264_encoder_invalidate_reference( x264_t *, int64_t pts );
#ifdef __cplusplus
}
#endif
#endif
================================================
FILE: iOS_demo/x264/x264_config.h
================================================
#define X264_BIT_DEPTH 8
#define X264_GPL 1
#define X264_INTERLACED 1
#define X264_CHROMA_FORMAT 0
#define X264_REV 2744
#define X264_REV_DIFF 10
#define X264_VERSION " r2744+10M 0c6fcc5"
#define X264_POINTVER "0.148.2744+10M 0c6fcc5"
================================================
FILE: ubuntu_x64/READme.txt
================================================
ubuntu 14.04
gcc 4.8.4