Repository: ksvc/ks265codec Branch: master Commit: cd09626e643a Files: 172 Total size: 178.8 MB Directory structure: gitextract_saaj9mjn/ ├── 2017.03.12decSpeed_ksc265InffmpegVSopenhevcInffmpeg.xlsx ├── 2017.09.07qy265VSx264(sdk)_ios.xlsm ├── 2017.09.11qy265VSx264(sdk)_android.xlsm ├── 2017.09.13ksc265VSx264(sdk)-x265-vp9-intel265-classA-E&Game_v2613.xlsm ├── Android_demo/ │ ├── .gitignore │ ├── KSY265CodecDemo/ │ │ ├── .gitignore │ │ ├── app/ │ │ │ ├── .gitignore │ │ │ ├── CMakeLists.txt │ │ │ ├── build.gradle │ │ │ ├── proguard-rules.pro │ │ │ └── src/ │ │ │ └── main/ │ │ │ ├── AndroidManifest.xml │ │ │ ├── java/ │ │ │ │ └── com/ │ │ │ │ ├── ianhanniballake/ │ │ │ │ │ └── localstorage/ │ │ │ │ │ └── LocalStorageProvider.java │ │ │ │ ├── ipaulpro/ │ │ │ │ │ └── afilechooser/ │ │ │ │ │ ├── FileChooserActivity.java │ │ │ │ │ ├── FileListAdapter.java │ │ │ │ │ ├── FileListFragment.java │ │ │ │ │ ├── FileLoader.java │ │ │ │ │ └── utils/ │ │ │ │ │ └── FileUtils.java │ │ │ │ └── ksyun/ │ │ │ │ └── media/ │ │ │ │ └── ksy265codec/ │ │ │ │ └── demo/ │ │ │ │ ├── decoder/ │ │ │ │ │ └── hevdecoder/ │ │ │ │ │ ├── GLPlayView.java │ │ │ │ │ ├── GLRenderer.java │ │ │ │ │ └── NativeMediaPlayer.java │ │ │ │ ├── encoder/ │ │ │ │ │ ├── Encoder.java │ │ │ │ │ └── EncoderWrapper.java │ │ │ │ └── ui/ │ │ │ │ ├── BaseFragment.java │ │ │ │ ├── DecoderFragment.java │ │ │ │ ├── DecoderSettings.java │ │ │ │ ├── DecoderSettingsFragment.java │ │ │ │ ├── EncoderFragment.java │ │ │ │ ├── EncoderSettings.java │ │ │ │ ├── EncoderSettingsFragment.java │ │ │ │ ├── HelpFragment.java │ │ │ │ ├── KSY265CodecDemoApp.java │ │ │ │ ├── MainActivity.java │ │ │ │ ├── MyFragmentAdapter.java │ │ │ │ ├── ProgressDialogFragment.java │ │ │ │ └── Settings.java │ │ │ ├── jni/ │ │ │ │ ├── Android.mk │ │ │ │ ├── Application.mk │ │ │ │ ├── decoder/ │ │ │ │ │ ├── Android.mk │ │ │ │ │ └── jniplayer/ │ │ │ │ │ ├── Android.mk │ │ │ │ │ ├── Chromium_LICENSE.txt │ │ │ │ │ ├── gl_renderer.cpp │ │ │ │ │ ├── gl_renderer.h │ │ │ │ │ ├── jni_utils.cpp │ │ │ │ │ ├── jni_utils.h │ │ │ │ │ ├── jniplayer.cpp │ │ │ │ │ ├── jniplayer.h │ │ │ │ │ ├── jniplayer_new.cpp │ │ │ │ │ ├── yuv2rgb565.cpp │ │ │ │ │ └── yuv2rgb565.h │ │ │ │ └── encoder/ │ │ │ │ ├── Android.mk │ │ │ │ ├── encoderwrapper.c │ │ │ │ ├── encoderwrapper.h │ │ │ │ └── log.h │ │ │ └── res/ │ │ │ ├── drawable/ │ │ │ │ ├── selector_tab_background.xml │ │ │ │ ├── tab_home_btn.xml │ │ │ │ └── tab_view_btn.xml │ │ │ ├── layout/ │ │ │ │ ├── activity_main.xml │ │ │ │ ├── decoder_settings.xml │ │ │ │ ├── encoder_settings.xml │ │ │ │ ├── file.xml │ │ │ │ ├── fragment_item.xml │ │ │ │ ├── help.xml │ │ │ │ ├── main_tab_layout.xml │ │ │ │ └── tab_content.xml │ │ │ ├── values/ │ │ │ │ ├── bool.xml │ │ │ │ ├── colors.xml │ │ │ │ ├── dimens.xml │ │ │ │ ├── strings.xml │ │ │ │ ├── styles.xml │ │ │ │ └── themes.xml │ │ │ └── xml/ │ │ │ └── mimetypes.xml │ │ ├── build.gradle │ │ ├── gradle/ │ │ │ └── wrapper/ │ │ │ ├── gradle-wrapper.jar │ │ │ └── gradle-wrapper.properties │ │ ├── gradle.properties │ │ ├── gradlew │ │ ├── gradlew.bat │ │ └── settings.gradle │ ├── README.md │ └── prebuilt/ │ ├── arm64-v8a/ │ │ ├── libcpufeatures.a │ │ ├── libqydecoder.a │ │ ├── libqyencoder.a │ │ └── libx264.a │ ├── armeabi-v7a/ │ │ ├── libcpufeatures.a │ │ ├── libqydecoder.a │ │ ├── libqyencoder.a │ │ └── libx264.a │ └── include/ │ ├── lenthevcdec.h │ ├── qy265dec.h │ ├── qy265def.h │ ├── qy265enc.h │ ├── qyauth_env.h │ ├── x264.h │ └── x264_config.h ├── README.md ├── android_arm64/ │ ├── appdecoder │ └── appencoder ├── centos_x64/ │ ├── READme.txt │ ├── appdecoder │ └── appencoder ├── iOS_demo/ │ ├── KSY265CodecDemo_iOS/ │ │ ├── AppDelegate.h │ │ ├── AppDelegate.m │ │ ├── Assets.xcassets/ │ │ │ ├── AppIcon.appiconset/ │ │ │ │ └── Contents.json │ │ │ ├── Contents.json │ │ │ ├── first.imageset/ │ │ │ │ └── Contents.json │ │ │ ├── recycle-bin.imageset/ │ │ │ │ └── Contents.json │ │ │ └── second.imageset/ │ │ │ └── Contents.json │ │ ├── Base.lproj/ │ │ │ ├── LaunchScreen.storyboard │ │ │ └── Main.storyboard │ │ ├── BaseViewController.h │ │ ├── BaseViewController.m │ │ ├── DecoderHelperViewController.h │ │ ├── DecoderHelperViewController.m │ │ ├── EncoderHelperViewController.h │ │ ├── EncoderHelperViewController.m │ │ ├── FirstViewController.h │ │ ├── FirstViewController.m │ │ ├── GLRenderer.h │ │ ├── GLRenderer.m │ │ ├── GLView.h │ │ ├── GLView.m │ │ ├── Info.plist │ │ ├── KSYMovieEncoder.h │ │ ├── KSYMovieEncoder.m │ │ ├── KSYMoviePlayer.h │ │ ├── KSYMoviePlayer.m │ │ ├── MovieEncoder.h │ │ ├── MovieEncoder.m │ │ ├── MoviePlayer.h │ │ ├── MoviePlayer.m │ │ ├── MoviesViewController.h │ │ ├── MoviesViewController.m │ │ ├── PlayViewController.h │ │ ├── PlayViewController.m │ │ ├── PlayViewController.xib │ │ ├── SecondViewController.h │ │ ├── SecondViewController.m │ │ ├── SettingsDecoderViewController.h │ │ ├── SettingsDecoderViewController.m │ │ ├── SettingsEncoderViewController.h │ │ ├── SettingsEncoderViewController.m │ │ └── main.m │ ├── KSY265CodecDemo_iOS.xcodeproj/ │ │ ├── project.pbxproj │ │ ├── project.xcworkspace/ │ │ │ ├── contents.xcworkspacedata │ │ │ └── xcuserdata/ │ │ │ ├── jiangdong.xcuserdatad/ │ │ │ │ └── UserInterfaceState.xcuserstate │ │ │ └── ksyun.xcuserdatad/ │ │ │ └── UserInterfaceState.xcuserstate │ │ └── xcuserdata/ │ │ ├── jiangdong.xcuserdatad/ │ │ │ ├── xcdebugger/ │ │ │ │ └── Breakpoints_v2.xcbkptlist │ │ │ └── xcschemes/ │ │ │ ├── KSY265CodecDemo_iOS.xcscheme │ │ │ └── xcschememanagement.plist │ │ └── ksyun.xcuserdatad/ │ │ ├── xcdebugger/ │ │ │ └── Breakpoints_v2.xcbkptlist │ │ └── xcschemes/ │ │ ├── KSY265CodecDemo_iOS.xcscheme │ │ └── xcschememanagement.plist │ ├── combox/ │ │ ├── AYHCustomComboBox.h │ │ ├── AYHCustomComboBox.m │ │ └── AYHCustomComboBoxDelegate.h │ ├── ksy265codec/ │ │ ├── libqycommon.a │ │ ├── libqydecoder.a │ │ ├── libqyencoder.a │ │ ├── qy265dec.h │ │ ├── qy265def.h │ │ └── qy265enc.h │ ├── lenthevcdec/ │ │ ├── lenthevcdec.h │ │ └── liblenthevcdec.a │ ├── resource/ │ │ ├── 1280x720_15.yuv │ │ ├── 640x480_15.yuv │ │ └── 960x540_15.yuv │ └── x264/ │ ├── libx264.a │ ├── x264.h │ └── x264_config.h └── ubuntu_x64/ ├── READme.txt ├── appdecoder └── appencoder ================================================ FILE CONTENTS ================================================ ================================================ FILE: Android_demo/.gitignore ================================================ KSY265CodecDemo/.idea KSY265CodecDemo/app/src/main/libs KSY265CodecDemo/app/src/main/obj ================================================ FILE: Android_demo/KSY265CodecDemo/.gitignore ================================================ *.iml .gradle /local.properties /.idea/workspace.xml /.idea/libraries .DS_Store /build /captures .externalNativeBuild ================================================ FILE: Android_demo/KSY265CodecDemo/app/.gitignore ================================================ /build ================================================ FILE: Android_demo/KSY265CodecDemo/app/CMakeLists.txt ================================================ # For more information about using CMake with Android Studio, read the # documentation: https://d.android.com/studio/projects/add-native-code.html # Sets the minimum version of CMake required to build the native library. cmake_minimum_required(VERSION 3.4.1) # Creates and names a library, sets it as either STATIC # or SHARED, and provides the relative paths to its source code. # You can define multiple libraries, and CMake builds them for you. # Gradle automatically packages shared libraries with your APK. add_library( # Sets the name of the library. native-lib # Sets the library as a shared library. SHARED # Provides a relative path to your source file(s). src/main/jni/encoderwrapper.c ) # Specifies a path to native header files. include_directories(../../prebuilt/include ) # Searches for a specified prebuilt library and stores the path as a # variable. Because CMake includes system libraries in the search path by # default, you only need to specify the name of the public NDK library # you want to add. CMake verifies that the library exists before # completing its build. find_library( # Sets the name of the path variable. log-lib # Specifies the name of the NDK library that # you want CMake to locate. log ) # Specifies libraries CMake should link to your target library. You # can link multiple libraries, such as libraries you define in this # build script, prebuilt third-party libraries, or system libraries. target_link_libraries( # Specifies the target library. native-lib # Links the target library to the log library # included in the NDK. ${log-lib} #x264 ${CMAKE_CURRENT_SOURCE_DIR}/../../prebuilt/${ANDROID_ABI}/libx264.a #ksy265 ${CMAKE_CURRENT_SOURCE_DIR}/../../prebuilt/${ANDROID_ABI}/libqy265.a ${CMAKE_CURRENT_SOURCE_DIR}/../../prebuilt/${ANDROID_ABI}/libcpufeatures.a) ================================================ FILE: Android_demo/KSY265CodecDemo/app/build.gradle ================================================ apply plugin: 'com.android.application' android { compileSdkVersion 25 buildToolsVersion "25.0.2" defaultConfig { applicationId "com.ksyun.media.ksy265codec.demo" minSdkVersion 14 targetSdkVersion 17 versionCode 1 versionName "1.0.0" externalNativeBuild { ndkBuild { //abiFilters 'x86', 'armeabi', 'armeabi-v7a', 'arm64-v8a' abiFilters "armeabi-v7a" } } } buildTypes { release { minifyEnabled false proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro' } debug { jniDebuggable true } } sourceSets{ main{ jniLibs.srcDirs 'src/main/libs' jni.srcDirs = [] } } externalNativeBuild { ndkBuild { path "src/main/jni/Android.mk" } } } dependencies { compile fileTree(include: ['*.jar'], dir: 'libs') compile 'com.android.support:appcompat-v7:25.3.0' compile 'com.android.support:percent:25.3.0' } ================================================ FILE: Android_demo/KSY265CodecDemo/app/proguard-rules.pro ================================================ # Add project specific ProGuard rules here. # By default, the flags in this file are appended to flags specified # in /Users/sujia/Library/Android/sdk/tools/proguard/proguard-android.txt # You can edit the include path and order by changing the proguardFiles # directive in build.gradle. # # For more details, see # http://developer.android.com/guide/developing/tools/proguard.html # Add any project specific keep options here: # If your project uses WebView with JS, uncomment the following # and specify the fully qualified class name to the JavaScript interface # class: #-keepclassmembers class fqcn.of.javascript.interface.for.webview { # public *; #} # Uncomment this to preserve the line number information for # debugging stack traces. #-keepattributes SourceFile,LineNumberTable # If you keep the line number information, uncomment this to # hide the original source file name. #-renamesourcefileattribute SourceFile ================================================ FILE: Android_demo/KSY265CodecDemo/app/src/main/AndroidManifest.xml ================================================ ================================================ FILE: Android_demo/KSY265CodecDemo/app/src/main/java/com/ianhanniballake/localstorage/LocalStorageProvider.java ================================================ package com.ianhanniballake.localstorage; import android.content.res.AssetFileDescriptor; import android.database.Cursor; import android.database.MatrixCursor; import android.graphics.Bitmap; import android.graphics.BitmapFactory; import android.graphics.Point; import android.os.CancellationSignal; import android.os.Environment; import android.os.ParcelFileDescriptor; import android.provider.DocumentsContract.Document; import android.provider.DocumentsContract.Root; import android.provider.DocumentsProvider; import android.util.Log; import android.webkit.MimeTypeMap; import com.ksyun.media.ksy265codec.demo.R; import java.io.File; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; public class LocalStorageProvider extends DocumentsProvider { public static final String AUTHORITY = "com.ianhanniballake.localstorage.documents"; /** * Default root projection: everything but Root.COLUMN_MIME_TYPES */ private final static String[] DEFAULT_ROOT_PROJECTION = new String[] { Root.COLUMN_ROOT_ID, Root.COLUMN_FLAGS, Root.COLUMN_TITLE, Root.COLUMN_DOCUMENT_ID, Root.COLUMN_ICON, Root.COLUMN_AVAILABLE_BYTES }; /** * Default document projection: everything but Document.COLUMN_ICON and * Document.COLUMN_SUMMARY */ private final static String[] DEFAULT_DOCUMENT_PROJECTION = new String[] { Document.COLUMN_DOCUMENT_ID, Document.COLUMN_DISPLAY_NAME, Document.COLUMN_FLAGS, Document.COLUMN_MIME_TYPE, Document.COLUMN_SIZE, Document.COLUMN_LAST_MODIFIED }; @Override public Cursor queryRoots(final String[] projection) throws FileNotFoundException { // Create a cursor with either the requested fields, or the default // projection if "projection" is null. final MatrixCursor result = new MatrixCursor(projection != null ? projection : DEFAULT_ROOT_PROJECTION); // Add Home directory File homeDir = Environment.getExternalStorageDirectory(); final MatrixCursor.RowBuilder row = result.newRow(); // These columns are required row.add(Root.COLUMN_ROOT_ID, homeDir.getAbsolutePath()); row.add(Root.COLUMN_DOCUMENT_ID, homeDir.getAbsolutePath()); row.add(Root.COLUMN_TITLE, getContext().getString(R.string.internal_storage)); row.add(Root.COLUMN_FLAGS, Root.FLAG_LOCAL_ONLY | Root.FLAG_SUPPORTS_CREATE); row.add(Root.COLUMN_ICON, R.drawable.ic_provider); // These columns are optional row.add(Root.COLUMN_AVAILABLE_BYTES, homeDir.getFreeSpace()); // Root.COLUMN_MIME_TYPE is another optional column and useful if you // have multiple roots with different // types of mime types (roots that don't match the requested mime type // are automatically hidden) return result; } @Override public String createDocument(final String parentDocumentId, final String mimeType, final String displayName) throws FileNotFoundException { File newFile = new File(parentDocumentId, displayName); try { newFile.createNewFile(); return newFile.getAbsolutePath(); } catch (IOException e) { Log.e(LocalStorageProvider.class.getSimpleName(), "Error creating new file " + newFile); } return null; } @Override public AssetFileDescriptor openDocumentThumbnail(final String documentId, final Point sizeHint, final CancellationSignal signal) throws FileNotFoundException { // Assume documentId points to an image file. Build a thumbnail no // larger than twice the sizeHint BitmapFactory.Options options = new BitmapFactory.Options(); options.inJustDecodeBounds = true; BitmapFactory.decodeFile(documentId, options); final int targetHeight = 2 * sizeHint.y; final int targetWidth = 2 * sizeHint.x; final int height = options.outHeight; final int width = options.outWidth; options.inSampleSize = 1; if (height > targetHeight || width > targetWidth) { final int halfHeight = height / 2; final int halfWidth = width / 2; // Calculate the largest inSampleSize value that is a power of 2 and // keeps both // height and width larger than the requested height and width. while ((halfHeight / options.inSampleSize) > targetHeight || (halfWidth / options.inSampleSize) > targetWidth) { options.inSampleSize *= 2; } } options.inJustDecodeBounds = false; Bitmap bitmap = BitmapFactory.decodeFile(documentId, options); // Write out the thumbnail to a temporary file File tempFile = null; FileOutputStream out = null; try { tempFile = File.createTempFile("thumbnail", null, getContext().getCacheDir()); out = new FileOutputStream(tempFile); bitmap.compress(Bitmap.CompressFormat.PNG, 90, out); } catch (IOException e) { Log.e(LocalStorageProvider.class.getSimpleName(), "Error writing thumbnail", e); return null; } finally { if (out != null) try { out.close(); } catch (IOException e) { Log.e(LocalStorageProvider.class.getSimpleName(), "Error closing thumbnail", e); } } // It appears the Storage Framework UI caches these results quite // aggressively so there is little reason to // write your own caching layer beyond what you need to return a single // AssetFileDescriptor return new AssetFileDescriptor(ParcelFileDescriptor.open(tempFile, ParcelFileDescriptor.MODE_READ_ONLY), 0, AssetFileDescriptor.UNKNOWN_LENGTH); } @Override public Cursor queryChildDocuments(final String parentDocumentId, final String[] projection, final String sortOrder) throws FileNotFoundException { // Create a cursor with either the requested fields, or the default // projection if "projection" is null. final MatrixCursor result = new MatrixCursor(projection != null ? projection : DEFAULT_DOCUMENT_PROJECTION); final File parent = new File(parentDocumentId); for (File file : parent.listFiles()) { // Don't show hidden files/folders if (!file.getName().startsWith(".")) { // Adds the file's display name, MIME type, size, and so on. includeFile(result, file); } } return result; } @Override public Cursor queryDocument(final String documentId, final String[] projection) throws FileNotFoundException { // Create a cursor with either the requested fields, or the default // projection if "projection" is null. final MatrixCursor result = new MatrixCursor(projection != null ? projection : DEFAULT_DOCUMENT_PROJECTION); includeFile(result, new File(documentId)); return result; } private void includeFile(final MatrixCursor result, final File file) throws FileNotFoundException { final MatrixCursor.RowBuilder row = result.newRow(); // These columns are required row.add(Document.COLUMN_DOCUMENT_ID, file.getAbsolutePath()); row.add(Document.COLUMN_DISPLAY_NAME, file.getName()); String mimeType = getDocumentType(file.getAbsolutePath()); row.add(Document.COLUMN_MIME_TYPE, mimeType); int flags = file.canWrite() ? Document.FLAG_SUPPORTS_DELETE | Document.FLAG_SUPPORTS_WRITE : 0; // We only show thumbnails for image files - expect a call to // openDocumentThumbnail for each file that has // this flag set if (mimeType.startsWith("image/")) flags |= Document.FLAG_SUPPORTS_THUMBNAIL; row.add(Document.COLUMN_FLAGS, flags); // COLUMN_SIZE is required, but can be null row.add(Document.COLUMN_SIZE, file.length()); // These columns are optional row.add(Document.COLUMN_LAST_MODIFIED, file.lastModified()); // Document.COLUMN_ICON can be a resource id identifying a custom icon. // The system provides default icons // based on mime type // Document.COLUMN_SUMMARY is optional additional information about the // file } @Override public String getDocumentType(final String documentId) throws FileNotFoundException { File file = new File(documentId); if (file.isDirectory()) return Document.MIME_TYPE_DIR; // From FileProvider.getType(Uri) final int lastDot = file.getName().lastIndexOf('.'); if (lastDot >= 0) { final String extension = file.getName().substring(lastDot + 1); final String mime = MimeTypeMap.getSingleton().getMimeTypeFromExtension(extension); if (mime != null) { return mime; } } return "application/octet-stream"; } @Override public void deleteDocument(final String documentId) throws FileNotFoundException { new File(documentId).delete(); } @Override public ParcelFileDescriptor openDocument(final String documentId, final String mode, final CancellationSignal signal) throws FileNotFoundException { File file = new File(documentId); final boolean isWrite = (mode.indexOf('w') != -1); if (isWrite) { return ParcelFileDescriptor.open(file, ParcelFileDescriptor.MODE_READ_WRITE); } else { return ParcelFileDescriptor.open(file, ParcelFileDescriptor.MODE_READ_ONLY); } } @Override public boolean onCreate() { return true; } } ================================================ FILE: Android_demo/KSY265CodecDemo/app/src/main/java/com/ipaulpro/afilechooser/FileChooserActivity.java ================================================ /* * Copyright (C) 2013 Paul Burke * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.ipaulpro.afilechooser; import android.app.ActionBar; import android.content.BroadcastReceiver; import android.content.Context; import android.content.Intent; import android.content.IntentFilter; import android.net.Uri; import android.os.Build; import android.os.Bundle; import android.os.Environment; import android.support.v4.app.FragmentActivity; import android.support.v4.app.FragmentManager; import android.support.v4.app.FragmentManager.BackStackEntry; import android.support.v4.app.FragmentManager.OnBackStackChangedListener; import android.support.v4.app.FragmentTransaction; import android.view.Menu; import android.view.MenuItem; import android.widget.Toast; import com.ksyun.media.ksy265codec.demo.R; import java.io.File; /** * Main Activity that handles the FileListFragments * * @version 2013-06-25 * @author paulburke (ipaulpro) */ public class FileChooserActivity extends FragmentActivity implements OnBackStackChangedListener, FileListFragment.Callbacks { public static final String PATH = "path"; public static final String EXTERNAL_BASE_PATH = Environment .getExternalStorageDirectory().getAbsolutePath(); private static final boolean HAS_ACTIONBAR = Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB; private FragmentManager mFragmentManager; private BroadcastReceiver mStorageListener = new BroadcastReceiver() { @Override public void onReceive(Context context, Intent intent) { Toast.makeText(context, R.string.storage_removed, Toast.LENGTH_LONG).show(); finishWithResult(null); } }; private String mPath; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); mFragmentManager = getSupportFragmentManager(); mFragmentManager.addOnBackStackChangedListener(this); if (savedInstanceState == null) { mPath = EXTERNAL_BASE_PATH; addFragment(); } else { mPath = savedInstanceState.getString(PATH); } setTitle(mPath); } @Override protected void onPause() { super.onPause(); unregisterStorageListener(); } @Override protected void onResume() { super.onResume(); registerStorageListener(); } @Override protected void onSaveInstanceState(Bundle outState) { super.onSaveInstanceState(outState); outState.putString(PATH, mPath); } @Override public void onBackStackChanged() { int count = mFragmentManager.getBackStackEntryCount(); if (count > 0) { BackStackEntry fragment = mFragmentManager.getBackStackEntryAt(count - 1); mPath = fragment.getName(); } else { mPath = EXTERNAL_BASE_PATH; } setTitle(mPath); if (HAS_ACTIONBAR) invalidateOptionsMenu(); } @Override public boolean onCreateOptionsMenu(Menu menu) { if (HAS_ACTIONBAR) { boolean hasBackStack = mFragmentManager.getBackStackEntryCount() > 0; ActionBar actionBar = getActionBar(); actionBar.setDisplayHomeAsUpEnabled(hasBackStack); actionBar.setHomeButtonEnabled(hasBackStack); } return true; } @Override public boolean onOptionsItemSelected(MenuItem item) { switch (item.getItemId()) { case android.R.id.home: mFragmentManager.popBackStack(); return true; } return super.onOptionsItemSelected(item); } /** * Add the initial Fragment with given path. */ private void addFragment() { FileListFragment fragment = FileListFragment.newInstance(mPath); mFragmentManager.beginTransaction() .add(android.R.id.content, fragment).commit(); } /** * "Replace" the existing Fragment with a new one using given path. We're * really adding a Fragment to the back stack. * * @param file The file (directory) to display. */ private void replaceFragment(File file) { mPath = file.getAbsolutePath(); FileListFragment fragment = FileListFragment.newInstance(mPath); mFragmentManager.beginTransaction() .replace(android.R.id.content, fragment) .setTransition(FragmentTransaction.TRANSIT_FRAGMENT_OPEN) .addToBackStack(mPath).commit(); } /** * Finish this Activity with a result code and URI of the selected file. * * @param file The file selected. */ private void finishWithResult(File file) { if (file != null) { Uri uri = Uri.fromFile(file); setResult(RESULT_OK, new Intent().setData(uri)); finish(); } else { setResult(RESULT_CANCELED); finish(); } } /** * Called when the user selects a File * * @param file The file that was selected */ @Override public void onFileSelected(File file) { if (file != null) { if (file.isDirectory()) { replaceFragment(file); } else { finishWithResult(file); } } else { Toast.makeText(FileChooserActivity.this, R.string.error_selecting_file, Toast.LENGTH_SHORT).show(); } } /** * Register the external storage BroadcastReceiver. */ private void registerStorageListener() { IntentFilter filter = new IntentFilter(); filter.addAction(Intent.ACTION_MEDIA_REMOVED); registerReceiver(mStorageListener, filter); } /** * Unregister the external storage BroadcastReceiver. */ private void unregisterStorageListener() { unregisterReceiver(mStorageListener); } } ================================================ FILE: Android_demo/KSY265CodecDemo/app/src/main/java/com/ipaulpro/afilechooser/FileListAdapter.java ================================================ /* * Copyright (C) 2012 Paul Burke * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.ipaulpro.afilechooser; import android.content.Context; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.BaseAdapter; import android.widget.TextView; import com.ksyun.media.ksy265codec.demo.R; import java.io.File; import java.util.ArrayList; import java.util.List; /** * List adapter for Files. * * @version 2013-12-11 * @author paulburke (ipaulpro) */ public class FileListAdapter extends BaseAdapter { private final static int ICON_FOLDER = R.drawable.ic_folder; private final static int ICON_FILE = R.drawable.ic_file; private final LayoutInflater mInflater; private List mData = new ArrayList(); public FileListAdapter(Context context) { mInflater = LayoutInflater.from(context); } public void add(File file) { mData.add(file); notifyDataSetChanged(); } public void remove(File file) { mData.remove(file); notifyDataSetChanged(); } public void insert(File file, int index) { mData.add(index, file); notifyDataSetChanged(); } public void clear() { mData.clear(); notifyDataSetChanged(); } @Override public File getItem(int position) { return mData.get(position); } @Override public long getItemId(int position) { return position; } @Override public int getCount() { return mData.size(); } public List getListItems() { return mData; } /** * Set the list items without notifying on the clear. This prevents loss of * scroll position. * * @param data */ public void setListItems(List data) { mData = data; notifyDataSetChanged(); } @Override public View getView(int position, View convertView, ViewGroup parent) { View row = convertView; if (row == null) row = mInflater.inflate(R.layout.file, parent, false); TextView view = (TextView) row; // Get the file at the current position final File file = getItem(position); // Set the TextView as the file name view.setText(file.getName()); // If the item is not a directory, use the file icon int icon = file.isDirectory() ? ICON_FOLDER : ICON_FILE; view.setCompoundDrawablesWithIntrinsicBounds(icon, 0, 0, 0); return row; } } ================================================ FILE: Android_demo/KSY265CodecDemo/app/src/main/java/com/ipaulpro/afilechooser/FileListFragment.java ================================================ /* * Copyright (C) 2013 Paul Burke * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.ipaulpro.afilechooser; import android.app.Activity; import android.os.Bundle; import android.os.Environment; import android.support.v4.app.ListFragment; import android.support.v4.app.LoaderManager; import android.support.v4.content.Loader; import android.view.View; import android.widget.ListView; import com.ksyun.media.ksy265codec.demo.R; import java.io.File; import java.util.List; /** * Fragment that displays a list of Files in a given path. * * @version 2013-12-11 * @author paulburke (ipaulpro) */ public class FileListFragment extends ListFragment implements LoaderManager.LoaderCallbacks> { /** * Interface to listen for events. */ public interface Callbacks { /** * Called when a file is selected from the list. * * @param file The file selected */ public void onFileSelected(File file); } private static final int LOADER_ID = 0; private FileListAdapter mAdapter; private String mPath; private Callbacks mListener; /** * Create a new instance with the given file path. * * @param path The absolute path of the file (directory) to display. * @return A new Fragment with the given file path. */ public static FileListFragment newInstance(String path) { FileListFragment fragment = new FileListFragment(); Bundle args = new Bundle(); args.putString(FileChooserActivity.PATH, path); fragment.setArguments(args); return fragment; } @Override public void onAttach(Activity activity) { super.onAttach(activity); try { mListener = (Callbacks) activity; } catch (ClassCastException e) { throw new ClassCastException(activity.toString() + " must implement FileListFragment.Callbacks"); } } @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); mAdapter = new FileListAdapter(getActivity()); mPath = getArguments() != null ? getArguments().getString( FileChooserActivity.PATH) : Environment .getExternalStorageDirectory().getAbsolutePath(); } @Override public void onActivityCreated(Bundle savedInstanceState) { setEmptyText(getString(R.string.empty_directory)); setListAdapter(mAdapter); setListShown(false); getLoaderManager().initLoader(LOADER_ID, null, this); super.onActivityCreated(savedInstanceState); } @Override public void onListItemClick(ListView l, View v, int position, long id) { FileListAdapter adapter = (FileListAdapter) l.getAdapter(); if (adapter != null) { File file = (File) adapter.getItem(position); mPath = file.getAbsolutePath(); mListener.onFileSelected(file); } } @Override public Loader> onCreateLoader(int id, Bundle args) { return new FileLoader(getActivity(), mPath); } @Override public void onLoadFinished(Loader> loader, List data) { mAdapter.setListItems(data); if (isResumed()) setListShown(true); else setListShownNoAnimation(true); } @Override public void onLoaderReset(Loader> loader) { mAdapter.clear(); } } ================================================ FILE: Android_demo/KSY265CodecDemo/app/src/main/java/com/ipaulpro/afilechooser/FileLoader.java ================================================ /* * Copyright (C) 2013 Paul Burke * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.ipaulpro.afilechooser; import android.content.Context; import android.os.FileObserver; import android.support.v4.content.AsyncTaskLoader; import com.ipaulpro.afilechooser.utils.FileUtils; import java.io.File; import java.util.ArrayList; import java.util.Arrays; import java.util.List; /** * Loader that returns a list of Files in a given file path. * * @version 2013-12-11 * @author paulburke (ipaulpro) */ public class FileLoader extends AsyncTaskLoader> { private static final int FILE_OBSERVER_MASK = FileObserver.CREATE | FileObserver.DELETE | FileObserver.DELETE_SELF | FileObserver.MOVED_FROM | FileObserver.MOVED_TO | FileObserver.MODIFY | FileObserver.MOVE_SELF; private FileObserver mFileObserver; private List mData; private String mPath; public FileLoader(Context context, String path) { super(context); this.mPath = path; } @Override public List loadInBackground() { ArrayList list = new ArrayList(); // Current directory File instance final File pathDir = new File(mPath); // List file in this directory with the directory filter final File[] dirs = pathDir.listFiles(FileUtils.sDirFilter); if (dirs != null) { // Sort the folders alphabetically Arrays.sort(dirs, FileUtils.sComparator); // Add each folder to the File list for the list adapter for (File dir : dirs) list.add(dir); } // List file in this directory with the file filter final File[] files = pathDir.listFiles(FileUtils.sFileFilter); if (files != null) { // Sort the files alphabetically Arrays.sort(files, FileUtils.sComparator); // Add each file to the File list for the list adapter for (File file : files) list.add(file); } return list; } @Override public void deliverResult(List data) { if (isReset()) { onReleaseResources(data); return; } List oldData = mData; mData = data; if (isStarted()) super.deliverResult(data); if (oldData != null && oldData != data) onReleaseResources(oldData); } @Override protected void onStartLoading() { if (mData != null) deliverResult(mData); if (mFileObserver == null) { mFileObserver = new FileObserver(mPath, FILE_OBSERVER_MASK) { @Override public void onEvent(int event, String path) { onContentChanged(); } }; } mFileObserver.startWatching(); if (takeContentChanged() || mData == null) forceLoad(); } @Override protected void onStopLoading() { cancelLoad(); } @Override protected void onReset() { onStopLoading(); if (mData != null) { onReleaseResources(mData); mData = null; } } @Override public void onCanceled(List data) { super.onCanceled(data); onReleaseResources(data); } protected void onReleaseResources(List data) { if (mFileObserver != null) { mFileObserver.stopWatching(); mFileObserver = null; } } } ================================================ FILE: Android_demo/KSY265CodecDemo/app/src/main/java/com/ipaulpro/afilechooser/utils/FileUtils.java ================================================ /* * Copyright (C) 2007-2008 OpenIntents.org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.ipaulpro.afilechooser.utils; import android.content.ContentResolver; import android.content.ContentUris; import android.content.Context; import android.content.Intent; import android.database.Cursor; import android.database.DatabaseUtils; import android.graphics.Bitmap; import android.net.Uri; import android.os.Build; import android.os.Environment; import android.provider.DocumentsContract; import android.provider.MediaStore; import android.util.Log; import android.webkit.MimeTypeMap; import com.ianhanniballake.localstorage.LocalStorageProvider; import java.io.File; import java.io.FileFilter; import java.text.DecimalFormat; import java.util.Comparator; /** * @version 2009-07-03 * @author Peli * @version 2013-12-11 * @author paulburke (ipaulpro) */ public class FileUtils { private FileUtils() {} //private constructor to enforce Singleton pattern /** TAG for log messages. */ static final String TAG = "FileUtils"; private static final boolean DEBUG = false; // Set to true to enable logging public static final String MIME_TYPE_AUDIO = "audio/*"; public static final String MIME_TYPE_TEXT = "text/*"; public static final String MIME_TYPE_IMAGE = "image/*"; public static final String MIME_TYPE_VIDEO = "video/*"; public static final String MIME_TYPE_APP = "application/*"; public static final String HIDDEN_PREFIX = "."; /** * Gets the extension of a file name, like ".png" or ".jpg". * * @param uri * @return Extension including the dot("."); "" if there is no extension; * null if uri was null. */ public static String getExtension(String uri) { if (uri == null) { return null; } int dot = uri.lastIndexOf("."); if (dot >= 0) { return uri.substring(dot); } else { // No extension. return ""; } } /** * @return Whether the URI is a local one. */ public static boolean isLocal(String url) { if (url != null && !url.startsWith("http://") && !url.startsWith("https://")) { return true; } return false; } /** * @return True if Uri is a MediaStore Uri. * @author paulburke */ public static boolean isMediaUri(Uri uri) { return "media".equalsIgnoreCase(uri.getAuthority()); } /** * Convert File into Uri. * * @param file * @return uri */ public static Uri getUri(File file) { if (file != null) { return Uri.fromFile(file); } return null; } /** * Returns the path only (without file name). * * @param file * @return */ public static File getPathWithoutFilename(File file) { if (file != null) { if (file.isDirectory()) { // no file to be split off. Return everything return file; } else { String filename = file.getName(); String filepath = file.getAbsolutePath(); // Construct path without file name. String pathwithoutname = filepath.substring(0, filepath.length() - filename.length()); if (pathwithoutname.endsWith("/")) { pathwithoutname = pathwithoutname.substring(0, pathwithoutname.length() - 1); } return new File(pathwithoutname); } } return null; } /** * @return The MIME type for the given file. */ public static String getMimeType(File file) { String extension = getExtension(file.getName()); if (extension.length() > 0) return MimeTypeMap.getSingleton().getMimeTypeFromExtension(extension.substring(1)); return "application/octet-stream"; } /** * @return The MIME type for the give Uri. */ public static String getMimeType(Context context, Uri uri) { File file = new File(getPath(context, uri)); return getMimeType(file); } /** * @param uri The Uri to check. * @return Whether the Uri authority is {@link LocalStorageProvider}. * @author paulburke */ public static boolean isLocalStorageDocument(Uri uri) { return LocalStorageProvider.AUTHORITY.equals(uri.getAuthority()); } /** * @param uri The Uri to check. * @return Whether the Uri authority is ExternalStorageProvider. * @author paulburke */ public static boolean isExternalStorageDocument(Uri uri) { return "com.android.externalstorage.documents".equals(uri.getAuthority()); } /** * @param uri The Uri to check. * @return Whether the Uri authority is DownloadsProvider. * @author paulburke */ public static boolean isDownloadsDocument(Uri uri) { return "com.android.providers.downloads.documents".equals(uri.getAuthority()); } /** * @param uri The Uri to check. * @return Whether the Uri authority is MediaProvider. * @author paulburke */ public static boolean isMediaDocument(Uri uri) { return "com.android.providers.media.documents".equals(uri.getAuthority()); } /** * @param uri The Uri to check. * @return Whether the Uri authority is Google Photos. */ public static boolean isGooglePhotosUri(Uri uri) { return "com.google.android.apps.photos.content".equals(uri.getAuthority()); } /** * Get the value of the data column for this Uri. This is useful for * MediaStore Uris, and other file-based ContentProviders. * * @param context The context. * @param uri The Uri to query. * @param selection (Optional) Filter used in the query. * @param selectionArgs (Optional) Selection arguments used in the query. * @return The value of the _data column, which is typically a file path. * @author paulburke */ public static String getDataColumn(Context context, Uri uri, String selection, String[] selectionArgs) { Cursor cursor = null; final String column = "_data"; final String[] projection = { column }; try { cursor = context.getContentResolver().query(uri, projection, selection, selectionArgs, null); if (cursor != null && cursor.moveToFirst()) { if (DEBUG) DatabaseUtils.dumpCursor(cursor); final int column_index = cursor.getColumnIndexOrThrow(column); return cursor.getString(column_index); } } finally { if (cursor != null) cursor.close(); } return null; } /** * Get a file path from a Uri. This will get the the path for Storage Access * Framework Documents, as well as the _data field for the MediaStore and * other file-based ContentProviders.
*
* Callers should check whether the path is local before assuming it * represents a local file. * * @param context The context. * @param uri The Uri to query. * @see #isLocal(String) * @see #getFile(Context, Uri) * @author paulburke */ public static String getPath(final Context context, final Uri uri) { if (DEBUG) Log.d(TAG + " File -", "Authority: " + uri.getAuthority() + ", Fragment: " + uri.getFragment() + ", Port: " + uri.getPort() + ", Query: " + uri.getQuery() + ", Scheme: " + uri.getScheme() + ", Host: " + uri.getHost() + ", Segments: " + uri.getPathSegments().toString() ); final boolean isKitKat = Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT; // DocumentProvider if (isKitKat && DocumentsContract.isDocumentUri(context, uri)) { // LocalStorageProvider if (isLocalStorageDocument(uri)) { // The path is the id return DocumentsContract.getDocumentId(uri); } // ExternalStorageProvider else if (isExternalStorageDocument(uri)) { final String docId = DocumentsContract.getDocumentId(uri); final String[] split = docId.split(":"); final String type = split[0]; if ("primary".equalsIgnoreCase(type)) { return Environment.getExternalStorageDirectory() + "/" + split[1]; } // TODO handle non-primary volumes } // DownloadsProvider else if (isDownloadsDocument(uri)) { final String id = DocumentsContract.getDocumentId(uri); final Uri contentUri = ContentUris.withAppendedId( Uri.parse("content://downloads/public_downloads"), Long.valueOf(id)); return getDataColumn(context, contentUri, null, null); } // MediaProvider else if (isMediaDocument(uri)) { final String docId = DocumentsContract.getDocumentId(uri); final String[] split = docId.split(":"); final String type = split[0]; Uri contentUri = null; if ("image".equals(type)) { contentUri = MediaStore.Images.Media.EXTERNAL_CONTENT_URI; } else if ("video".equals(type)) { contentUri = MediaStore.Video.Media.EXTERNAL_CONTENT_URI; } else if ("audio".equals(type)) { contentUri = MediaStore.Audio.Media.EXTERNAL_CONTENT_URI; } final String selection = "_id=?"; final String[] selectionArgs = new String[] { split[1] }; return getDataColumn(context, contentUri, selection, selectionArgs); } } // MediaStore (and general) else if ("content".equalsIgnoreCase(uri.getScheme())) { // Return the remote address if (isGooglePhotosUri(uri)) return uri.getLastPathSegment(); return getDataColumn(context, uri, null, null); } // File else if ("file".equalsIgnoreCase(uri.getScheme())) { return uri.getPath(); } return null; } /** * Convert Uri into File, if possible. * * @return file A local file that the Uri was pointing to, or null if the * Uri is unsupported or pointed to a remote resource. * @see #getPath(Context, Uri) * @author paulburke */ public static File getFile(Context context, Uri uri) { if (uri != null) { String path = getPath(context, uri); if (path != null && isLocal(path)) { return new File(path); } } return null; } /** * Get the file size in a human-readable string. * * @param size * @return * @author paulburke */ public static String getReadableFileSize(int size) { final int BYTES_IN_KILOBYTES = 1024; final DecimalFormat dec = new DecimalFormat("###.#"); final String KILOBYTES = " KB"; final String MEGABYTES = " MB"; final String GIGABYTES = " GB"; float fileSize = 0; String suffix = KILOBYTES; if (size > BYTES_IN_KILOBYTES) { fileSize = size / BYTES_IN_KILOBYTES; if (fileSize > BYTES_IN_KILOBYTES) { fileSize = fileSize / BYTES_IN_KILOBYTES; if (fileSize > BYTES_IN_KILOBYTES) { fileSize = fileSize / BYTES_IN_KILOBYTES; suffix = GIGABYTES; } else { suffix = MEGABYTES; } } } return String.valueOf(dec.format(fileSize) + suffix); } /** * Attempt to retrieve the thumbnail of given File from the MediaStore. This * should not be called on the UI thread. * * @param context * @param file * @return * @author paulburke */ public static Bitmap getThumbnail(Context context, File file) { return getThumbnail(context, getUri(file), getMimeType(file)); } /** * Attempt to retrieve the thumbnail of given Uri from the MediaStore. This * should not be called on the UI thread. * * @param context * @param uri * @return * @author paulburke */ public static Bitmap getThumbnail(Context context, Uri uri) { return getThumbnail(context, uri, getMimeType(context, uri)); } /** * Attempt to retrieve the thumbnail of given Uri from the MediaStore. This * should not be called on the UI thread. * * @param context * @param uri * @param mimeType * @return * @author paulburke */ public static Bitmap getThumbnail(Context context, Uri uri, String mimeType) { if (DEBUG) Log.d(TAG, "Attempting to get thumbnail"); if (!isMediaUri(uri)) { Log.e(TAG, "You can only retrieve thumbnails for images and videos."); return null; } Bitmap bm = null; if (uri != null) { final ContentResolver resolver = context.getContentResolver(); Cursor cursor = null; try { cursor = resolver.query(uri, null, null, null, null); if (cursor.moveToFirst()) { final int id = cursor.getInt(0); if (DEBUG) Log.d(TAG, "Got thumb ID: " + id); if (mimeType.contains("video")) { bm = MediaStore.Video.Thumbnails.getThumbnail( resolver, id, MediaStore.Video.Thumbnails.MINI_KIND, null); } else if (mimeType.contains(FileUtils.MIME_TYPE_IMAGE)) { bm = MediaStore.Images.Thumbnails.getThumbnail( resolver, id, MediaStore.Images.Thumbnails.MINI_KIND, null); } } } catch (Exception e) { if (DEBUG) Log.e(TAG, "getThumbnail", e); } finally { if (cursor != null) cursor.close(); } } return bm; } /** * File and folder comparator. TODO Expose sorting option method * * @author paulburke */ public static Comparator sComparator = new Comparator() { @Override public int compare(File f1, File f2) { // Sort alphabetically by lower case, which is much cleaner return f1.getName().toLowerCase().compareTo( f2.getName().toLowerCase()); } }; /** * File (not directories) filter. * * @author paulburke */ public static FileFilter sFileFilter = new FileFilter() { @Override public boolean accept(File file) { final String fileName = file.getName(); // Return files only (not directories) and skip hidden files return file.isFile() && !fileName.startsWith(HIDDEN_PREFIX); } }; /** * Folder (directories) filter. * * @author paulburke */ public static FileFilter sDirFilter = new FileFilter() { @Override public boolean accept(File file) { final String fileName = file.getName(); // Return directories only and skip hidden directories return file.isDirectory() && !fileName.startsWith(HIDDEN_PREFIX); } }; /** * Get the Intent for selecting content to be used in an Intent Chooser. * * @return The intent for opening a file with Intent.createChooser() * @author paulburke */ public static Intent createGetContentIntent() { // Implicitly allow the user to select a particular kind of data final Intent intent = new Intent(Intent.ACTION_GET_CONTENT); // The MIME data type filter intent.setType("*/*"); // Only return URIs that can be opened with ContentResolver intent.addCategory(Intent.CATEGORY_OPENABLE); return intent; } /** * 用来根据文件后缀名过滤文件的工具类 */ public static class FileFilterBySuffixs implements FileFilter { private String suffixs; /** * 可传入一个或多个后缀名,不调用此方法,默认搜索除隐藏文件外的全部文件 * @param suffixs 后缀后,如 3gp|mp3|mp4 */ public FileFilterBySuffixs(String suffixs) { this.suffixs = suffixs; } @Override public boolean accept(File file) { final String fileName = file.getName(); if (null == suffixs) { // Return files only (not directories) and skip hidden files return file.isFile() && !fileName.startsWith(HIDDEN_PREFIX); } else { return file.isFile() && !fileName.startsWith(HIDDEN_PREFIX) && fileName.matches("^.*?\\.(" + suffixs + ")$"); } } } public static void setFileFilter(FileFilterBySuffixs filter) { if (filter != null) { sFileFilter = filter; } } } ================================================ FILE: Android_demo/KSY265CodecDemo/app/src/main/java/com/ksyun/media/ksy265codec/demo/decoder/hevdecoder/GLPlayView.java ================================================ package com.ksyun.media.ksy265codec.demo.decoder.hevdecoder; import android.content.Context; import android.graphics.PixelFormat; import android.opengl.GLSurfaceView; import android.util.AttributeSet; import android.util.Log; import javax.microedition.khronos.egl.EGL10; import javax.microedition.khronos.egl.EGLConfig; import javax.microedition.khronos.egl.EGLContext; import javax.microedition.khronos.egl.EGLDisplay; public class GLPlayView extends GLSurfaceView { private static String TAG = "GLPlayView"; private static final boolean DEBUG = true; public GLPlayView(Context context) { super(context); init(false, 0, 0); } public GLPlayView(Context context, AttributeSet attrs) { super(context, attrs); init(false, 0, 0); } public GLPlayView(Context context, boolean translucent, int depth, int stencil) { super(context); init(translucent, depth, stencil); } private void init(boolean translucent, int depth, int stencil) { /* By default, GLSurfaceView() creates a RGB_565 opaque surface. * If we want a translucent one, we should change the surface's * format here, using PixelFormat.TRANSLUCENT for GL Surfaces * is interpreted as any 32-bit surface with alpha by SurfaceFlinger. */ if (translucent) { this.getHolder().setFormat(PixelFormat.TRANSLUCENT); } /* Setup the context factory for 2.0 rendering. * See ContextFactory class definition below */ setEGLContextFactory(new ContextFactory()); /* We need to choose an EGLConfig that matches the format of * our surface exactly. This is going to be done in our * custom config chooser. See ConfigChooser class definition * below. */ setEGLConfigChooser( translucent ? new ConfigChooser(8, 8, 8, 8, depth, stencil) : new ConfigChooser(5, 6, 5, 0, depth, stencil) ); /* Set the renderer responsible for frame rendering */ setRenderer(new GLRenderer()); setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY); } private static void checkEglError(String prompt, EGL10 egl) { int error; while ((error = egl.eglGetError()) != EGL10.EGL_SUCCESS) { Log.e(TAG, String.format("%s: EGL error: 0x%x", prompt, error)); } } private static class ContextFactory implements GLSurfaceView.EGLContextFactory { private static int EGL_CONTEXT_CLIENT_VERSION = 0x3098; public EGLContext createContext(EGL10 egl, EGLDisplay display, EGLConfig eglConfig) { Log.i(TAG, "creating OpenGL ES 2.0 context"); checkEglError("Before eglCreateContext", egl); int[] attrib_list = {EGL_CONTEXT_CLIENT_VERSION, 2, EGL10.EGL_NONE }; EGLContext context = egl.eglCreateContext(display, eglConfig, EGL10.EGL_NO_CONTEXT, attrib_list); checkEglError("After eglCreateContext", egl); return context; } @Override public void destroyContext(EGL10 egl, EGLDisplay display, EGLContext context) { egl.eglDestroyContext(display, context); } } private static class ConfigChooser implements GLSurfaceView.EGLConfigChooser { public ConfigChooser(int r, int g, int b, int a, int depth, int stencil) { mRedSize = r; mGreenSize = g; mBlueSize = b; mAlphaSize = a; mDepthSize = depth; mStencilSize = stencil; } /* This EGL config specification is used to specify 2.0 rendering. * We use a minimum size of 4 bits for red/green/blue, but will * perform actual matching in chooseConfig() below. */ private static int EGL_OPENGL_ES2_BIT = 4; private static int[] s_configAttribs2 = { EGL10.EGL_RED_SIZE, 4, EGL10.EGL_GREEN_SIZE, 4, EGL10.EGL_BLUE_SIZE, 4, EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT, EGL10.EGL_NONE }; @Override public EGLConfig chooseConfig(EGL10 egl, EGLDisplay display) { /* Get the number of minimally matching EGL configurations */ int[] num_config = new int[1]; egl.eglChooseConfig(display, s_configAttribs2, null, 0, num_config); int numConfigs = num_config[0]; if (numConfigs <= 0) { throw new IllegalArgumentException("No configs match configSpec"); } /* Allocate then read the array of minimally matching EGL configs */ EGLConfig[] configs = new EGLConfig[numConfigs]; egl.eglChooseConfig(display, s_configAttribs2, configs, numConfigs, num_config); if (DEBUG) { printConfigs(egl, display, configs); } /* Now return the "best" one */ return chooseConfig(egl, display, configs); } public EGLConfig chooseConfig(EGL10 egl, EGLDisplay display, EGLConfig[] configs) { for(EGLConfig config : configs) { int d = findConfigAttrib(egl, display, config, EGL10.EGL_DEPTH_SIZE, 0); int s = findConfigAttrib(egl, display, config, EGL10.EGL_STENCIL_SIZE, 0); // We need at least mDepthSize and mStencilSize bits if (d < mDepthSize || s < mStencilSize) continue; // We want an *exact* match for red/green/blue/alpha int r = findConfigAttrib(egl, display, config, EGL10.EGL_RED_SIZE, 0); int g = findConfigAttrib(egl, display, config, EGL10.EGL_GREEN_SIZE, 0); int b = findConfigAttrib(egl, display, config, EGL10.EGL_BLUE_SIZE, 0); int a = findConfigAttrib(egl, display, config, EGL10.EGL_ALPHA_SIZE, 0); if (r == mRedSize && g == mGreenSize && b == mBlueSize && a == mAlphaSize) { Log.i(TAG, "choosed config: (r,g,b,a) = (" + r + ", " + g + ", " + b + ", " + a + ")"); return config; } } return null; } private int findConfigAttrib(EGL10 egl, EGLDisplay display, EGLConfig config, int attribute, int defaultValue) { if (egl.eglGetConfigAttrib(display, config, attribute, mValue)) { return mValue[0]; } return defaultValue; } private void printConfigs(EGL10 egl, EGLDisplay display, EGLConfig[] configs) { int numConfigs = configs.length; Log.i(TAG, String.format("%d configurations", numConfigs)); for (int i = 0; i < numConfigs; i++) { Log.i(TAG, String.format("Configuration %d:\n", i)); printConfig(egl, display, configs[i]); } } private void printConfig(EGL10 egl, EGLDisplay display, EGLConfig config) { int[] attributes = { EGL10.EGL_BUFFER_SIZE, EGL10.EGL_ALPHA_SIZE, EGL10.EGL_BLUE_SIZE, EGL10.EGL_GREEN_SIZE, EGL10.EGL_RED_SIZE, EGL10.EGL_DEPTH_SIZE, EGL10.EGL_STENCIL_SIZE, EGL10.EGL_CONFIG_CAVEAT, EGL10.EGL_CONFIG_ID, EGL10.EGL_LEVEL, EGL10.EGL_MAX_PBUFFER_HEIGHT, EGL10.EGL_MAX_PBUFFER_PIXELS, EGL10.EGL_MAX_PBUFFER_WIDTH, EGL10.EGL_NATIVE_RENDERABLE, EGL10.EGL_NATIVE_VISUAL_ID, EGL10.EGL_NATIVE_VISUAL_TYPE, 0x3030, // EGL10.EGL_PRESERVED_RESOURCES, EGL10.EGL_SAMPLES, EGL10.EGL_SAMPLE_BUFFERS, EGL10.EGL_SURFACE_TYPE, EGL10.EGL_TRANSPARENT_TYPE, EGL10.EGL_TRANSPARENT_RED_VALUE, EGL10.EGL_TRANSPARENT_GREEN_VALUE, EGL10.EGL_TRANSPARENT_BLUE_VALUE, 0x3039, // EGL10.EGL_BIND_TO_TEXTURE_RGB, 0x303A, // EGL10.EGL_BIND_TO_TEXTURE_RGBA, 0x303B, // EGL10.EGL_MIN_SWAP_INTERVAL, 0x303C, // EGL10.EGL_MAX_SWAP_INTERVAL, EGL10.EGL_LUMINANCE_SIZE, EGL10.EGL_ALPHA_MASK_SIZE, EGL10.EGL_COLOR_BUFFER_TYPE, EGL10.EGL_RENDERABLE_TYPE, 0x3042 // EGL10.EGL_CONFORMANT }; String[] names = { "EGL_BUFFER_SIZE", "EGL_ALPHA_SIZE", "EGL_BLUE_SIZE", "EGL_GREEN_SIZE", "EGL_RED_SIZE", "EGL_DEPTH_SIZE", "EGL_STENCIL_SIZE", "EGL_CONFIG_CAVEAT", "EGL_CONFIG_ID", "EGL_LEVEL", "EGL_MAX_PBUFFER_HEIGHT", "EGL_MAX_PBUFFER_PIXELS", "EGL_MAX_PBUFFER_WIDTH", "EGL_NATIVE_RENDERABLE", "EGL_NATIVE_VISUAL_ID", "EGL_NATIVE_VISUAL_TYPE", "EGL_PRESERVED_RESOURCES", "EGL_SAMPLES", "EGL_SAMPLE_BUFFERS", "EGL_SURFACE_TYPE", "EGL_TRANSPARENT_TYPE", "EGL_TRANSPARENT_RED_VALUE", "EGL_TRANSPARENT_GREEN_VALUE", "EGL_TRANSPARENT_BLUE_VALUE", "EGL_BIND_TO_TEXTURE_RGB", "EGL_BIND_TO_TEXTURE_RGBA", "EGL_MIN_SWAP_INTERVAL", "EGL_MAX_SWAP_INTERVAL", "EGL_LUMINANCE_SIZE", "EGL_ALPHA_MASK_SIZE", "EGL_COLOR_BUFFER_TYPE", "EGL_RENDERABLE_TYPE", "EGL_CONFORMANT" }; int[] value = new int[1]; for (int i = 0; i < attributes.length; i++) { int attribute = attributes[i]; String name = names[i]; if ( egl.eglGetConfigAttrib(display, config, attribute, value)) { Log.i(TAG, String.format(" %s: %d\n", name, value[0])); } else { // Log.w(TAG, String.format(" %s: failed\n", name)); while (egl.eglGetError() != EGL10.EGL_SUCCESS); } } } // Subclasses can adjust these values: protected int mRedSize; protected int mGreenSize; protected int mBlueSize; protected int mAlphaSize; protected int mDepthSize; protected int mStencilSize; private int[] mValue = new int[1]; } } ================================================ FILE: Android_demo/KSY265CodecDemo/app/src/main/java/com/ksyun/media/ksy265codec/demo/decoder/hevdecoder/GLRenderer.java ================================================ package com.ksyun.media.ksy265codec.demo.decoder.hevdecoder; import android.opengl.GLSurfaceView; import javax.microedition.khronos.egl.EGLConfig; import javax.microedition.khronos.opengles.GL10; /** * @author shengbin * */ public class GLRenderer implements GLSurfaceView.Renderer { private native int nativeInit(); private native int nativeSetup(int w, int h); private native void nativeDrawFrame(); @Override public void onDrawFrame(GL10 arg0) { nativeDrawFrame(); } @Override public void onSurfaceChanged(GL10 arg0, int w, int h) { nativeSetup(w, h); } @Override public void onSurfaceCreated(GL10 arg0, EGLConfig arg1) { nativeInit(); } static { System.loadLibrary("jniplayer"); } } ================================================ FILE: Android_demo/KSY265CodecDemo/app/src/main/java/com/ksyun/media/ksy265codec/demo/decoder/hevdecoder/NativeMediaPlayer.java ================================================ package com.ksyun.media.ksy265codec.demo.decoder.hevdecoder; import android.graphics.Bitmap; import android.graphics.Bitmap.Config; import android.graphics.Canvas; import android.graphics.Color; import android.graphics.Matrix; import android.graphics.Paint; import android.opengl.GLSurfaceView; import android.os.Handler; import android.os.Looper; import android.util.Log; import android.view.Surface; import android.view.Surface.OutOfResourcesException; import android.view.SurfaceHolder; import android.widget.TextView; import android.content.Context; import java.io.File; import java.io.FileFilter; import java.util.regex.Pattern; import com.ksyun.media.ksy265codec.demo.ui.Settings; public class NativeMediaPlayer { public static final int MEDIA_INFO_FRAMERATE_VIDEO = 900; public static final int MEDIA_INFO_END_OF_FILE = 909; private int mNativeContext; // accessed by native methods private Surface mSurface; private GLSurfaceView mGLSurfaceView; private TextView mInfoTextView; private Bitmap mFrameBitmap = null; private int mDisplayWidth = 0; private int mDisplayHeight = 0; private int mDisplayFPS = -1; private int mDisplayAvgFPS = -1; private int mDecodeFPS = -1; private int mBitrateVideo = -1; private int mBitrateAudio = -1; private boolean mShowInfo = true; private boolean mShowInfoGL = true; private String mInfo = ""; private OnCompletionListener mListener = null; private final Handler mMainHandler; private boolean mNeedSetup = true; public interface OnCompletionListener { public void onCompletion(int frame_count); } public void setCompletionListener(OnCompletionListener listener) { this.mListener = listener; } public NativeMediaPlayer() { mMainHandler = new Handler(Looper.getMainLooper()); } public void init() { native_init(); } public void setDisplay(SurfaceHolder sh) { if (sh != null) { mSurface = sh.getSurface(); } else mSurface = null; } public void setGLDisplay(GLSurfaceView glView, TextView tv) { mGLSurfaceView = glView; mInfoTextView = tv; } public void setDisplaySize(int w, int h) { mDisplayHeight = h; mDisplayWidth = w; mNeedSetup = true; } /** * Gets the number of cores available in this device, across all processors. * Requires: Ability to peruse the filesystem at "/sys/devices/system/cpu" * * @return The number of cores, or 1 if failed to get result */ private int getNumCores() { // Private Class to display only CPU devices in the directory listing class CpuFilter implements FileFilter { @Override public boolean accept(File pathname) { // Check if filename is "cpu", followed by a single digit number if (Pattern.matches("cpu[0-9]+", pathname.getName())) { return true; } return false; } } try { // Get directory containing CPU info File dir = new File("/sys/devices/system/cpu/"); // Filter to only list the devices we care about File[] files = dir.listFiles(new CpuFilter()); // Return the number of cores (virtual CPU devices) return files.length; } catch (Exception e) { // Default to return 1 core return 1; } } public int prepare(Context context,int type, int disableRender) { // android maintains the preferences for us, so use directly int num = Settings.getInstance().getDecoderSettings().getThreads(); if (0 == num) { int cores = getNumCores();// Runtime.getRuntime().availableProcessors(); if (cores <= 1) num = 1; else num = (cores < 5) ? ((cores * 3 + 1) / 2) : 8; Log.d("NativeMediaPlayer", cores + " cores detected! use " + num + " threads.\n"); } float fps = Settings.getInstance().getDecoderSettings().getFPS(); return native_prepare(context,type, disableRender, num, fps); } public int prepare(Context context,int type, int disableRender, int threadNum, float fps) { return native_prepare(context,type, disableRender, threadNum, fps); } public int start() { int w = getVideoWidth(), h = getVideoHeight(); if (w > 0 && h > 0) mFrameBitmap = Bitmap.createBitmap(w, h, Config.RGB_565); return native_start(); } public void stop() { native_stop(); if (mFrameBitmap != null) { mFrameBitmap.recycle(); mFrameBitmap = null; } } public void pause() { native_pause(); } public void go() { native_go(); } public void seekTo(int msec) { } public void setShowInfo(boolean show) { mShowInfo = show; if (mShowInfo == false && mInfoTextView != null) { mInfoTextView.setText(""); } } private void setupDisplay() { int videoWidth = getVideoWidth(), videoHeight = getVideoHeight(); int screenWidth, screenHeight, displayWidth = 0, displayHeight = 0; screenHeight = mDisplayHeight; screenWidth = mDisplayWidth; displayWidth = videoWidth; displayHeight = videoHeight; if (displayHeight > screenHeight) { displayHeight = screenHeight; displayWidth = displayHeight * videoWidth / videoHeight; displayWidth -= displayWidth % 4; } if (displayWidth > screenWidth) { displayWidth = screenWidth; displayHeight = displayWidth * videoHeight / videoWidth; displayHeight -= displayHeight % 4; } setDisplaySize(displayWidth, displayHeight); } /** * Called from native code */ public int drawFrame(int width, int height) { boolean useGL = false; if (useGL) { mGLSurfaceView.requestRender(); if (mShowInfoGL) { mInfo = ""; Paint paint = new Paint(); paint.setColor(Color.WHITE); paint.setTextSize(40); if (width > 0) { mInfo += ("Video Size:" + width + "x" + height); } if (mDisplayFPS > 0) { mInfo += (" Display FPS:" + mDisplayFPS); } if (mDisplayAvgFPS > 0) { mInfo += String.format(" Average FPS:%.2f", mDisplayAvgFPS / 4096.0); } mInfoTextView.post(new Runnable() { @Override public void run() { mInfoTextView.setText(mInfo); } }); mShowInfoGL = false; } return 0; } if (mSurface == null) { return 0; } if (mNeedSetup) { setupDisplay(); mNeedSetup = false; } // draw without OpenGL Canvas canvas = null; try { canvas = mSurface.lockCanvas(null); } catch (IllegalArgumentException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (OutOfResourcesException e) { // TODO Auto-generated catch block e.printStackTrace(); } canvas.drawColor(Color.BLACK); if (null == mFrameBitmap || mFrameBitmap.getWidth() != width) { // video size has changed, we need to create a new frame bitmap // correspondingly mFrameBitmap = Bitmap.createBitmap(width, height, Config.RGB_565); } renderBitmap(mFrameBitmap); if (mDisplayWidth != mFrameBitmap.getWidth()) { Matrix matrix = new Matrix(); float scaleWidth = ((float) mDisplayWidth) / width; float scaleHeight = ((float) mDisplayHeight) / height; matrix.postScale(scaleWidth, scaleHeight); matrix.postTranslate((canvas.getWidth() - mDisplayWidth) / 2, (canvas.getHeight() - mDisplayHeight) / 2); if (mFrameBitmap.getWidth() < 640) { // small bitmap, able to use filter Paint paint = new Paint(); paint.setFilterBitmap(true); canvas.drawBitmap(mFrameBitmap, matrix, paint); } else { canvas.drawBitmap(mFrameBitmap, matrix, null); } } else { canvas.drawBitmap(mFrameBitmap, (canvas.getWidth() - mDisplayWidth) / 2, (canvas.getHeight() - mDisplayHeight) / 2, null); } if (mShowInfo) { Paint paint = new Paint(); paint.setColor(Color.WHITE); paint.setTextSize(40); String info = ""; if (width > 0) { info += ("Video Size:" + width + "x" + height); } if (mDisplayFPS > 0) { info += (" Display FPS:" + mDisplayFPS); } if (mDisplayAvgFPS > 0) { info += String.format(" Average FPS:%.2f", mDisplayAvgFPS / 4096.0); } if (mDecodeFPS > 0) { info += (" Decode FPS:" + mDecodeFPS); } canvas.drawText(info, 20, 60, paint); info = ""; if (mBitrateVideo > 0) { info += "Bitrate: video " + Integer.toString(mBitrateVideo); } if (mBitrateAudio > 0) { info += ", audio " + Integer.toString(mBitrateAudio); } if (mBitrateVideo > 0 || mBitrateAudio > 0) { info += ", total " + Integer.toString(mBitrateVideo + mBitrateAudio) + " kbit/s"; } canvas.drawText(info, 20, 100, paint); } mSurface.unlockCanvasAndPost(canvas); return 0; } /** * Called from native code when an interesting event happens. */ public void postEventFromNative(int what, int arg1, int arg2) { switch (what) { case MEDIA_INFO_FRAMERATE_VIDEO: mDisplayFPS = arg1; mDisplayAvgFPS = arg2; if (mShowInfo) { mShowInfoGL = true; } break; case MEDIA_INFO_END_OF_FILE: final int frame_num = arg1; mMainHandler.post(new Runnable() { @Override public void run() { if (mListener != null) { mListener.onCompletion(frame_num); } } }); break; } } // set output file name public void setOutput(String outputFileName) { native_set_output(outputFileName); } private native void native_init(); private native int native_prepare(Context context,int decoderType, int disableRender, int threadNum, float renderFPS); private native int native_start(); private native int native_stop(); private native int native_pause(); private native int native_go(); private native int native_seekTo(int msec); private native static int hasNeon(); public native int setDataSource(String path); public native int getVideoWidth(); public native int getVideoHeight(); public native boolean isPlaying(); public native int getCurrentPosition(); public native float getDuration(); public native float getDecodeTime(); public native float getDecodeFPS(); private native static void renderBitmap(Bitmap bitmap); public native void native_set_output(String output); public native String getVersion(); static { System.loadLibrary("lenthevcdec"); System.loadLibrary("jniplayer"); } } ================================================ FILE: Android_demo/KSY265CodecDemo/app/src/main/java/com/ksyun/media/ksy265codec/demo/encoder/Encoder.java ================================================ package com.ksyun.media.ksy265codec.demo.encoder; import android.content.Context; import com.ksyun.media.ksy265codec.demo.ui.EncoderSettings; /** * Created by sujia on 2017/3/29. */ public class Encoder { private EncoderWrapper mWrapper; public Encoder(EncoderSettings settings) { mWrapper = new EncoderWrapper(settings); } //return -1 if failed public int open(String path) { if (mWrapper != null) { return mWrapper.open(path); } return -1; } //return -1 if failed public int encode(Context context) { if (mWrapper != null) { return mWrapper.encode(context); } return -1; } public int getEncodedFrameNum() { if (mWrapper != null) { return mWrapper.getEncodedFrameNum(); } return 0; } public float getEncodeFPS() { if (mWrapper != null) { return mWrapper.getEncodeFPS(); } return 0; } public float getCompressRatio() { if (mWrapper != null) { return mWrapper.getCompressRatio(); } return 1; } public float getEncodeTime() { if (mWrapper != null) { return mWrapper.getEncodeTime(); } return 0; } public double getPSNR() { if (mWrapper != null) { return mWrapper.getPSNR(); } return 0; } public String getVersion() { if (mWrapper != null) { return mWrapper.getVersion(); } return "0.1"; } public float getEncodeBitrate() { if (mWrapper != null) { return mWrapper.getEncodeBitrate(); } return 0; } public float getDuration() { if (mWrapper != null) { return mWrapper.getDuration(); } return 0; } public String getInputFilePath() { if (mWrapper != null) { return mWrapper.getInputFilePath(); } return null; } public String getOutputFilePath() { if (mWrapper != null) { return mWrapper.getOutputFilePath(); } return null; } } ================================================ FILE: Android_demo/KSY265CodecDemo/app/src/main/java/com/ksyun/media/ksy265codec/demo/encoder/EncoderWrapper.java ================================================ package com.ksyun.media.ksy265codec.demo.encoder; import android.content.Context; import com.ksyun.media.ksy265codec.demo.ui.EncoderSettings; import java.io.File; /** * Created by sujia on 2017/3/29. */ public class EncoderWrapper { private String mInputFilePath; private String mOutputFilePath; private EncoderSettings mSettings; private long mInstance = 0; public EncoderWrapper(EncoderSettings settings) { this.mSettings = settings; mInstance = native_init(); } //return -1 if failed public int open(String path) { if (path != null && path.endsWith(".yuv")) { mInputFilePath = path; return native_open(mInstance, mInputFilePath); } return -1; } //return -1 if failed public int encode(Context context) { if (mSettings.getEncoderName().equals(EncoderSettings.Encoders[0])) {//KSC265 int dotIndex = mInputFilePath.lastIndexOf("."); String fileName = mInputFilePath.substring(0, dotIndex); mOutputFilePath = fileName + ".265"; return native_ksy265_encoder(context,mInstance, mOutputFilePath, mSettings.getProfile(), mSettings.getDelay(), mSettings.getWidth(), mSettings.getHeight(), mSettings.getFps(), mSettings.getBitrate(), mSettings.getThreads()); } else if(mSettings.getEncoderName().equals(EncoderSettings.Encoders[1])) {//x264 int dotIndex = mInputFilePath.lastIndexOf("."); String fileName = mInputFilePath.substring(0, dotIndex); mOutputFilePath = fileName + ".264"; return native_x264_encode(mInstance, mOutputFilePath, mSettings.getProfile(), mSettings.getDelay(), mSettings.getWidth(), mSettings.getHeight(), mSettings.getFps(), mSettings.getBitrate(), mSettings.getThreads()); } return -1; } public String getInputFilePath() { return mInputFilePath; } public String getOutputFilePath() { return mOutputFilePath; } public float getEncodeFPS() { return native_get_real_fps(mInstance); } public int getEncodedFrameNum() { return native_get_encoded_frame_num(mInstance); } public float getCompressRatio() { if (mInputFilePath == null || mOutputFilePath == null) { return 0; } else { long inFileLength = new File(mInputFilePath).length(); long outFileLength = new File(mOutputFilePath).length(); if (outFileLength != 0) { return inFileLength / outFileLength; } else { return 0; } } } public float getEncodeTime() { return native_get_real_time(mInstance); } public double getPSNR() { return native_get_psnr(mInstance); } public float getDuration() { return getEncodedFrameNum() / mSettings.getFps(); } public float getEncodeBitrate() { float encodeTime = getDuration(); if (mOutputFilePath !=null && encodeTime != 0) { long outFileLength = new File(mOutputFilePath).length(); return (outFileLength * 8) / encodeTime / 1000; } else { return 0; } } public String getVersion() { if (mSettings.getEncoderName().equals(EncoderSettings.Encoders[0])) {//KSC265 return native_get_ksy265_version(); } else if (mSettings.getEncoderName().equals(EncoderSettings.Encoders[1])) {//x264 return native_get_x264_version(); } return "0.1"; } public native long native_init(); public native int native_open(long ptr, String path); public native int native_x264_encode(long ptr, String path, String profile, String delay, int width, int height, Float fps, int bitrate, int threads); public native int native_ksy265_encoder(Context context,long ptr, String outputFilePath, String profile, String delay, int width, int height, Float fps, int bitrate, int threads); public native float native_get_real_fps(long ptr); public native int native_get_encoded_frame_num(long ptr); public native String native_get_x264_version(); public native String native_get_ksy265_version(); public native float native_get_real_time(long ptr); public native float native_get_psnr(long ptr); static { System.loadLibrary("native-lib"); } } ================================================ FILE: Android_demo/KSY265CodecDemo/app/src/main/java/com/ksyun/media/ksy265codec/demo/ui/BaseFragment.java ================================================ package com.ksyun.media.ksy265codec.demo.ui; import android.content.Intent; import android.net.Uri; import android.os.Bundle; import android.support.v4.app.Fragment; import android.text.method.ScrollingMovementMethod; import android.util.Log; import android.view.LayoutInflater; import android.view.SurfaceView; import android.view.View; import android.view.ViewGroup; import android.widget.Button; import android.widget.EditText; import android.widget.TextView; import com.ipaulpro.afilechooser.FileChooserActivity; import com.ipaulpro.afilechooser.utils.FileUtils; import com.ksyun.media.ksy265codec.demo.R; import static android.app.Activity.RESULT_OK; import static android.content.ContentValues.TAG; /** * Created by sujia on 2017/3/27. */ public class BaseFragment extends Fragment { private static final int REQUEST_CODE = 6384; // onActivityResult request code protected Button mSettingButton; protected Button mHelpButton; protected Button mNavButton; protected Button mStartButton; private ButtonObserver mButtonObserver; protected EditText mFilePathEditTxt; protected String mInputFilePath; protected String mOutputFilePath; protected TextView mTitleText; protected TextView mInfoText; protected SurfaceView mSurfaceView; @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { View view = inflater.inflate(R.layout.fragment_item, null); mTitleText = (TextView) view.findViewById(R.id.title_txt); mInfoText = (TextView) view.findViewById(R.id.info_txt); mInfoText.setMovementMethod(ScrollingMovementMethod.getInstance()); mButtonObserver = new ButtonObserver(); mSettingButton = (Button) view.findViewById(R.id.settings); mSettingButton.setOnClickListener(mButtonObserver); mHelpButton = (Button) view.findViewById(R.id.help); mHelpButton.setOnClickListener(mButtonObserver); mNavButton = (Button) view.findViewById(R.id.nav); mNavButton.setOnClickListener(mButtonObserver); mStartButton = (Button) view.findViewById(R.id.start); mStartButton.setOnClickListener(mButtonObserver); mFilePathEditTxt = (EditText) view.findViewById(R.id.filepath); mSurfaceView = (SurfaceView) view.findViewById(R.id.surface_view); mSurfaceView.setVisibility(View.GONE); return view; } private class ButtonObserver implements View.OnClickListener { @Override public void onClick(View view) { switch (view.getId()) { case R.id.settings: onSettingsClicked(); break; case R.id.help: onHelpClicked(); break; case R.id.nav: onNavClicked(); break; case R.id.start: onStartClicked(); break; default: break; } } } protected void onSettingsClicked() { } protected void onHelpClicked() { } protected void onNavClicked() { showChooser(); } private void showChooser() { //set file filter FileUtils.setFileFilter(new FileUtils.FileFilterBySuffixs("yuv|264|h264|avc|265|hevc|h265|hm91|hm10|bit|hvc")); Intent intent = new Intent(getContext(), FileChooserActivity.class); startActivityForResult(intent, REQUEST_CODE); } @Override public void onActivityResult(int requestCode, int resultCode, Intent data) { switch (requestCode) { case REQUEST_CODE: // If the file selection was successful if (resultCode == RESULT_OK) { if (data != null) { // Get the URI of the selected file final Uri uri = data.getData(); Log.i(TAG, "Uri = " + uri.toString()); try { // Get the file path from the URI mInputFilePath = FileUtils.getPath(getContext(), uri); mFilePathEditTxt.setText(mInputFilePath); } catch (Exception e) { Log.e(TAG, "File select error: " + e); } } } break; } super.onActivityResult(requestCode, resultCode, data); } protected void onStartClicked() { } protected void toggleView(boolean enable) { mSettingButton.setEnabled(enable); mNavButton.setEnabled(enable); mStartButton.setEnabled(enable); mHelpButton.setEnabled(enable); } } ================================================ FILE: Android_demo/KSY265CodecDemo/app/src/main/java/com/ksyun/media/ksy265codec/demo/ui/DecoderFragment.java ================================================ package com.ksyun.media.ksy265codec.demo.ui; import android.os.Bundle; import android.util.Log; import android.view.LayoutInflater; import android.view.SurfaceHolder; import android.view.View; import android.view.ViewGroup; import android.widget.Toast; import com.ksyun.media.ksy265codec.demo.decoder.hevdecoder.NativeMediaPlayer; import java.io.File; import java.io.FileFilter; import java.util.regex.Pattern; /** * Created by sujia on 2017/3/27. */ public class DecoderFragment extends BaseFragment implements DecoderSettingsFragment.OnSettingsChangeListener, SurfaceHolder.Callback, NativeMediaPlayer.OnCompletionListener { private DecoderSettings mSettings = null; private static final String TAG = "DecoderFragment"; private NativeMediaPlayer mPlayer; private boolean mPrepared = false; private int mWidth; private int mHeight; @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { View view = super.onCreateView(inflater, container, savedInstanceState); mSettings = Settings.getInstance().getDecoderSettings(); updateUI(); mSurfaceView.getHolder().addCallback(this); mPlayer = new NativeMediaPlayer(); mPlayer.setCompletionListener(this); return view; } @Override protected void onSettingsClicked() { // Create an instance of the dialog fragment and show it DecoderSettingsFragment settingFragment = new DecoderSettingsFragment(); settingFragment.setListener(this); settingFragment.show(this.getFragmentManager(), "setting dialog"); } @Override public void onSettingsChanged(DecoderSettings settings) { this.mSettings = settings; updateUI(); } @Override protected void onHelpClicked() { // Create an instance of the dialog fragment and show it HelpFragment settingFragment = new HelpFragment(); settingFragment.setType(1); settingFragment.show(getFragmentManager(), "decode help dialog"); } private void updateUI() { mTitleText.setText( mSettings.getDecoderName() + "解码器"); if (mSettings.getFPS() != -1) { mSurfaceView.setVisibility(View.VISIBLE); } else { mSurfaceView.setVisibility(View.GONE); } } /** * Gets the number of cores available in this device, across all processors. * Requires: Ability to peruse the filesystem at "/sys/devices/system/cpu" * * @return The number of cores, or 1 if failed to get result */ private int getNumCores() { // Private Class to display only CPU devices in the directory listing class CpuFilter implements FileFilter { @Override public boolean accept(File pathname) { // Check if filename is "cpu", followed by a single digit number if (Pattern.matches("cpu[0-9]+", pathname.getName())) { return true; } return false; } } try { // Get directory containing CPU info File dir = new File("/sys/devices/system/cpu/"); // Filter to only list the devices we care about File[] files = dir.listFiles(new CpuFilter()); // Return the number of cores (virtual CPU devices) return files.length; } catch (Exception e) { // Default to return 1 core return 1; } } @Override protected void onStartClicked() { if (mSettings == null) { Toast.makeText(getContext(), "解码参数未配置", Toast.LENGTH_SHORT).show(); return; } if (mInputFilePath == null) { Toast.makeText(getContext(), "请选择输入文件", Toast.LENGTH_SHORT).show(); return; } mPrepared = false; mPlayer.init(); int ret = mPlayer.setDataSource(mInputFilePath); if (ret != 0) { Toast.makeText(getContext(), "请检查输入文件格式", Toast.LENGTH_SHORT).show(); return; } mPlayer.setDisplay(mSurfaceView.getHolder()); mPlayer.setDisplaySize(mWidth, mHeight); int num = mSettings.getThreads(); if (0 == num) { int cores = getNumCores();// Runtime.getRuntime().availableProcessors(); if (cores <= 1) num = 1; else if(mSettings.decoderIndex == 1) { // lenthevcdec num = (cores < 5) ? ((cores * 3 + 1) / 2) : 8; } Log.d(TAG, cores + " cores detected! use " + num + " threads.\n"); } //0: ksc265 //1: lenthevcdec int decoderType = mSettings.decoderIndex == 0 ? 0 : 1; ret = mPlayer.prepare(getContext(),decoderType, mSettings.getFPS() == -1 ? 1 : 0, num, mSettings.getFPS()); if ( ret < 0 ) { Toast.makeText(getContext(), "打开文件" + mInputFilePath + "失败,返回值: " + ret, Toast.LENGTH_SHORT).show(); return; } else { mPrepared = true; } if (mSettings.enableYUVOutput) { int dotIndex = mInputFilePath.lastIndexOf("."); String inputFileName = mInputFilePath.substring(0, dotIndex); mOutputFilePath = inputFileName + (mSettings.decoderIndex == 0 ? ".ksc" : ".lent" ) +".yuv"; mPlayer.setOutput(mOutputFilePath); } toggleView(false); if (mPrepared) { mPlayer.start(); } } ////////////////////////////////////////// //implements SurfaceHolder.Callback @Override public void surfaceCreated(SurfaceHolder surfaceHolder) { } @Override public void surfaceChanged(SurfaceHolder surfaceHolder, int i, int i1, int i2) { mWidth = i1; mHeight = i2; if (mPlayer != null) { mPlayer.setDisplaySize(mWidth, mHeight); } } @Override public void surfaceDestroyed(SurfaceHolder surfaceHolder) { mPlayer.stop(); } // end of: implements SurfaceHolder.Callback ///////////////////////////////////////////// @Override public void onCompletion(int frame_count) { updateInfo(frame_count); mPlayer.stop(); toggleView(true); } private void updateInfo(int frame_num) { String last_info = mInfoText.getText().toString(); String info; if (mSettings.enableYUVOutput) { info = String.format("解码器版本: %s \n" + "\n" + "\n" + "解码参数: %s -b %s -o %s -threads %d \n" + "\n" + "\n" + "分辨率: %d * %d \n" + "线程数: %s \n" + "解码时间: %.2f s\n" + "解码帧数 %d \n" + "解码速度 %.2f f/s\n" + "渲染帧率 %s \n", mPlayer.getVersion(), mSettings.getDecoderName(), mInputFilePath, mOutputFilePath, mSettings.getThreads(), mPlayer.getVideoWidth(), mPlayer.getVideoHeight(), mSettings.getThreadsStr(), mPlayer.getDecodeTime(), frame_num, mPlayer.getDecodeFPS(), mSettings.getFPSStr()); } else { info = String.format("解码器版本: %s \n" + "\n" + "\n" + "解码参数: %s -b %s -threads %d \n" + "\n" + "\n" + "分辨率: %d * %d \n" + "线程数: %s \n" + "解码时间: %.2f s\n" + "解码帧数 %d \n" + "解码速度 %.2f f/s\n" + "渲染帧率 %s \n", mPlayer.getVersion(), mSettings.getDecoderName(), mInputFilePath, mSettings.getThreads(), mPlayer.getVideoWidth(), mPlayer.getVideoHeight(), mSettings.getThreadsStr(), mPlayer.getDecodeTime(), frame_num, mPlayer.getDecodeFPS(), mSettings.getFPSStr()); } mInfoText.setText(info + "\n" + "\n" + last_info); } } ================================================ FILE: Android_demo/KSY265CodecDemo/app/src/main/java/com/ksyun/media/ksy265codec/demo/ui/DecoderSettings.java ================================================ package com.ksyun.media.ksy265codec.demo.ui; import android.content.SharedPreferences; /** * Created by sujia on 2017/3/28. */ public class DecoderSettings { public final static String DECODER_SETTINGS_DECODER = "decoder_settings_decoder"; public final static String DECODER_SETTINGS_THREADS = "decoder_settings_threads"; public final static String DECODER_SETTINGS_FPS = "decoder_settings_fps"; public final static String DECODER_SETTINGS_RENDER = "decoder_settings_render"; public final static String DECODER_SETTINGS_OUTPUT = "decoder_settings_output"; public final static String[] Decoders = new String[] {"KSC265", "lenthevcdec"}; public final static String[] Threads = new String[] {"0 (auto)", "1", "2", "3", "4", "5", "6", "7", "8","9", "10"}; public final static String[] FPS = new String[] {"0 (fullspeed)", "24", "-1 (off)"}; public int decoderIndex; public int threadsIndex; public int fpsIndex;//渲染帧率 public boolean enableYUVOutput; public DecoderSettings() { this.decoderIndex = 0; this.threadsIndex = 0; this.fpsIndex = 0; this.enableYUVOutput = false; } public DecoderSettings(SharedPreferences sharedPreferences) { this.decoderIndex = sharedPreferences.getInt(DECODER_SETTINGS_DECODER, 0); this.threadsIndex = sharedPreferences.getInt(DECODER_SETTINGS_THREADS, 0); this.fpsIndex = sharedPreferences.getInt(DECODER_SETTINGS_FPS, 0); this.enableYUVOutput = sharedPreferences.getBoolean(DECODER_SETTINGS_OUTPUT, false); } public String getDecoderName() { if (decoderIndex <= Decoders.length -1) { return Decoders[decoderIndex]; } else { return "unknow"; } } public int getThreads() { return threadsIndex; } public String getThreadsStr() { if (threadsIndex <= Threads.length -1) { return Threads[threadsIndex]; } else { return ""; } } public int getFPS() { switch (fpsIndex) { case 0: return 0; case 1: return 24; case 2: return -1; default: return 0; } } public String getFPSStr() { if (fpsIndex <= FPS.length -1) { return FPS[fpsIndex]; } else { return ""; } } } ================================================ FILE: Android_demo/KSY265CodecDemo/app/src/main/java/com/ksyun/media/ksy265codec/demo/ui/DecoderSettingsFragment.java ================================================ package com.ksyun.media.ksy265codec.demo.ui; import android.app.Dialog; import android.os.Bundle; import android.support.annotation.NonNull; import android.support.v4.app.DialogFragment; import android.support.v7.app.AlertDialog; import android.view.LayoutInflater; import android.view.View; import android.widget.ArrayAdapter; import android.widget.Button; import android.widget.RadioButton; import android.widget.Spinner; import com.ksyun.media.ksy265codec.demo.R; /** * Created by sujia on 2017/3/28. */ public class DecoderSettingsFragment extends DialogFragment { private Spinner mDecoderSpinner; private Spinner mThreadSpinner; private Spinner mFpsSpinner; private Button mButton; private RadioButton mEnableOutputButton; private RadioButton mDisableOutputButton; private DecoderSettings mSettings; public interface OnSettingsChangeListener { public void onSettingsChanged(DecoderSettings settings); } // Use this instance of the interface to deliver action events OnSettingsChangeListener mListener; public DecoderSettingsFragment() { mSettings = Settings.getInstance().getDecoderSettings(); } public void setListener(OnSettingsChangeListener listener) { mListener = listener; } @NonNull @Override public Dialog onCreateDialog(Bundle savedInstanceState) { AlertDialog.Builder builder = new AlertDialog.Builder(getActivity()); // Get the layout inflater LayoutInflater inflater = getActivity().getLayoutInflater(); // Inflate and set the layout for the dialog // Pass null as the parent view because its going in the dialog layout View view = inflater.inflate(R.layout.decoder_settings, null); mSettings = Settings.getInstance().getDecoderSettings(); initView(view); builder.setView(view); return builder.create(); } private void initView(View view) { ArrayAdapter decodersAdapter = new ArrayAdapter<>(getContext(), android.R.layout.simple_spinner_item, DecoderSettings.Decoders); decodersAdapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item); mDecoderSpinner = (Spinner) view.findViewById(R.id.decoder_settings_decoder_spinner); mDecoderSpinner.setAdapter(decodersAdapter); if (mSettings.decoderIndex <= DecoderSettings.Decoders.length) { mDecoderSpinner.setSelection(mSettings.decoderIndex); } ArrayAdapter threadsAdapter = new ArrayAdapter<>(getContext(), android.R.layout.simple_spinner_item, DecoderSettings.Threads); threadsAdapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item); mThreadSpinner = (Spinner) view.findViewById(R.id.decoder_settings_threads_spinner); mThreadSpinner.setAdapter(threadsAdapter); if (mSettings.threadsIndex <= DecoderSettings.Threads.length) { mThreadSpinner.setSelection(mSettings.threadsIndex); } ArrayAdapter fpsAdapter = new ArrayAdapter<>(getContext(), android.R.layout.simple_spinner_item, DecoderSettings.FPS); fpsAdapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item); mFpsSpinner = (Spinner) view.findViewById(R.id.decoder_settings_fps_spinner); mFpsSpinner.setAdapter(fpsAdapter); if (mSettings.fpsIndex <= DecoderSettings.FPS.length) { mFpsSpinner.setSelection(mSettings.fpsIndex); } mEnableOutputButton = (RadioButton) view.findViewById(R.id.decoder_settings_enable_yuv_output); mDisableOutputButton = (RadioButton) view.findViewById(R.id.decoder_settings_disable_yuv_output); if (mSettings.enableYUVOutput) { mEnableOutputButton.setChecked(true); mDisableOutputButton.setChecked(false); } else { mEnableOutputButton.setChecked(false); mDisableOutputButton.setChecked(true); } mButton = (Button) view.findViewById(R.id.decoder_settings_sure); mButton.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { mSettings.decoderIndex = mDecoderSpinner.getSelectedItemPosition(); mSettings.threadsIndex = mThreadSpinner.getSelectedItemPosition(); mSettings.fpsIndex = mFpsSpinner.getSelectedItemPosition(); mSettings.enableYUVOutput = mEnableOutputButton.isChecked(); Settings.getInstance().saveDecoderSettings(mSettings); if (mListener != null) { mListener.onSettingsChanged(mSettings); } dismiss(); } }); } } ================================================ FILE: Android_demo/KSY265CodecDemo/app/src/main/java/com/ksyun/media/ksy265codec/demo/ui/EncoderFragment.java ================================================ package com.ksyun.media.ksy265codec.demo.ui; import android.os.AsyncTask; import android.os.Bundle; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.Toast; import com.ksyun.media.ksy265codec.demo.encoder.Encoder; /** * Created by sujia on 2017/3/27. */ public class EncoderFragment extends BaseFragment implements EncoderSettingsFragment.OnSettingsChangeListener { private EncoderSettings mSettings = null; @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { View view = super.onCreateView(inflater, container, savedInstanceState); mSettings = Settings.getInstance().getEncoderSettings(); mTitleText.setText( mSettings.getEncoderName() + "编码器"); return view; } @Override protected void onSettingsClicked() { // Create an instance of the dialog fragment and show it EncoderSettingsFragment settingFragment = new EncoderSettingsFragment(); settingFragment.setListener(this); settingFragment.show(this.getFragmentManager(), "encoder setting dialog"); } @Override public void onSettingsChanged(EncoderSettings settings) { mSettings = settings; mTitleText.setText( mSettings.getEncoderName() + "编码器"); } @Override protected void onHelpClicked() { // Create an instance of the dialog fragment and show it HelpFragment settingFragment = new HelpFragment(); settingFragment.setType(0); settingFragment.show(getFragmentManager(), "encode help dialog"); } @Override protected void onStartClicked() { if (mInputFilePath == null) { Toast.makeText(getContext(), "请选择yuv文件", Toast.LENGTH_SHORT).show(); return; } EncodeTask task = new EncodeTask(); task.execute(); } private class EncodeTask extends AsyncTask { private ProgressDialogFragment mProgressDialog; private Encoder mEncoder; @Override protected void onPreExecute() { mEncoder = new Encoder(mSettings); //Create progress dialog here and show it mProgressDialog = new ProgressDialogFragment(); mProgressDialog.show(getFragmentManager(), "show progress dialog"); toggleView(false); } @Override protected Void doInBackground(Void... params) { // Execute query here encodeYUV(mEncoder); return null; } @Override protected void onPostExecute(Void result) { super.onPostExecute(result); //update your listView adapter here //Dismiss your dialog toggleView(true); mProgressDialog.dismissAllowingStateLoss(); updateInfo(mEncoder); } } private void encodeYUV(Encoder encoder) { if(encoder.open(mInputFilePath) < 0) { getActivity().runOnUiThread(new Runnable() { @Override public void run() { Toast.makeText(getContext(), "打开yuv文件错误", Toast.LENGTH_SHORT).show(); } }); return; } if(mSettings.getHeight() == 0 || mSettings.getWidth() == 0 || Integer.parseInt(mSettings.bitrate) <= 0 || Integer.parseInt(mSettings.fps) <= 0) { getActivity().runOnUiThread(new Runnable() { @Override public void run() { Toast.makeText(getContext(), "请检查编码参数设置", Toast.LENGTH_SHORT).show(); } }); return; } if(encoder.encode(getContext()) < 0) { getActivity().runOnUiThread(new Runnable() { @Override public void run() { Toast.makeText(getContext(), "编码失败,请检查输入文件格式", Toast.LENGTH_SHORT).show(); } }); return; } } private void updateInfo(Encoder encoder) { String last_info = mInfoText.getText().toString(); String info; if (mSettings.getEncoderName().equals(EncoderSettings.Encoders[0])) {//KSC265 info = String.format("编码器版本: %s \n " + " \n" + "编码参数: %s -i %s -preset %s -latency %s" + " -wdt %d -hgt %d -fr %.2f -threads %d -br %d -b %s \n" + " \n" + "编码时间: %.2f s \n" + "编码帧数: %d \n" + "编码速度: %.2f f/s \n" + "压缩比: %.2f \n" + "PSNR: %.2f \n" + "\n " + "视频信息 \n " + "码率: %.2f kbps \n" + "分辨率: %s \n" + "帧率: %.2f f/s\n" + "文件总时长: %.2f s\n", encoder.getVersion(), mSettings.getEncoderName(), encoder.getInputFilePath(), mSettings.getProfile(), mSettings.getDelay(), mSettings.getWidth(), mSettings.getHeight(), mSettings.getFps(), mSettings.getThreads(), mSettings.getBitrate(), encoder.getOutputFilePath(), encoder.getEncodeTime(), encoder.getEncodedFrameNum(), encoder.getEncodeFPS(), encoder.getCompressRatio(), encoder.getPSNR(), encoder.getEncodeBitrate(), mSettings.getResolution(), mSettings.getFps(), encoder.getDuration()); } else {//x264 String delayShow; if (mSettings.getDelay().equals(EncoderSettings.Delays[0])) {//zerolatency delayShow = "--bframes 0 --tune zerolatency"; } else if(mSettings.getDelay().equals(EncoderSettings.Delays[1])) {//livestreaming delayShow = "--bframes 3"; } else {//offline delayShow = "--bframes 7"; } info = String.format("编码器版本: %s \n " + " \n" + "编码参数: %s -i %s --preset %s %s " + "--input-res %dx%d --fps %.2f --threads %d --bitrate %d " + "-o %s \n" + " \n" + "编码时间: %.2f s \n" + "编码帧数: %d \n" + "编码速度: %.2f f/s \n" + "压缩比: %.2f \n" + "PSNR: %.2f \n" + "\n " + "视频信息 \n" + "码率: %.2f kbps \n" + "分辨率: %s \n" + "帧率: %.2f f/s\n" + "文件总时长: %.2f s\n", encoder.getVersion(), mSettings.getEncoderName(), encoder.getInputFilePath(), mSettings.getProfile(), delayShow, mSettings.getWidth(), mSettings.getHeight(), mSettings.getFps(), mSettings.getThreads(), mSettings.getBitrate(), encoder.getOutputFilePath(), encoder.getEncodeTime(), encoder.getEncodedFrameNum(), encoder.getEncodeFPS(), encoder.getCompressRatio(), encoder.getPSNR(), encoder.getEncodeBitrate(), mSettings.getResolution(), mSettings.getFps(), encoder.getDuration()); } mInfoText.setText( info + "\n" + "\n" + "\n" + last_info); } } ================================================ FILE: Android_demo/KSY265CodecDemo/app/src/main/java/com/ksyun/media/ksy265codec/demo/ui/EncoderSettings.java ================================================ package com.ksyun.media.ksy265codec.demo.ui; import android.content.SharedPreferences; import android.util.Log; /** * Created by sujia on 2017/3/28. */ public class EncoderSettings { public final static String TAG = "EncoderSettings"; public final static String ENCODER_SETTINGS_ENCODER = "encoder_settings_encoder"; public final static String ENCODER_SETTINGS_PROFILE = "encoder_settings_profile"; public final static String ENCODER_SETTINGS_DELAY = "encoder_settings_delay"; public final static String ENCODER_SETTINGS_RESOLUTION = "encoder_settings_resolution"; public final static String ENCODER_SETTINGS_RESOLUTION_IDX = "encoder_settings_resolution_idx"; public final static String ENCODER_SETTINGS_FPS = "encoder_settings_fps"; public final static String ENCODER_SETTINGS_THREADS = "encoder_settings_threads"; public final static String ENCODER_SETTINGS_BITRATE = "encoder_settings_bitrate"; public final static String[] Encoders = new String[] {"KSC265", "x264"}; public final static String[] Profiles = new String[] {"superfast", "veryfast", "fast", "medium", "slow", "veryslow", "placebo"}; public final static String[] Delays = new String[] {"zerolatency", "livestreaming", "offline"}; public final static String[] Resolutions = new String [] {"1280*720", "960*540", "640*360", "640*480", "360*640", "368*640", "自定义"}; public int encoderIndex; public int profileIndex; public int delayIndex; public int resIndex; public String bitrate; public String resolution; public String fps; public String threads; public EncoderSettings() { this.encoderIndex = 0;// ksc265 this.profileIndex = 1;//veryfast this.delayIndex = 2;//offline this.resolution = Resolutions[0]; this.resIndex = 0;//1280*720 this.fps = "15"; this.threads = "1"; this.bitrate = "500"; } public EncoderSettings(SharedPreferences sharedPreferences) { this.encoderIndex = sharedPreferences.getInt(ENCODER_SETTINGS_ENCODER, 0); this.profileIndex = sharedPreferences.getInt(ENCODER_SETTINGS_PROFILE, 0); this.delayIndex = sharedPreferences.getInt(ENCODER_SETTINGS_DELAY, 0); this.resIndex = sharedPreferences.getInt(ENCODER_SETTINGS_RESOLUTION_IDX, 0); this.resolution = sharedPreferences.getString(ENCODER_SETTINGS_RESOLUTION, Resolutions[0]); this.fps = sharedPreferences.getString(ENCODER_SETTINGS_FPS, "15"); this.threads = sharedPreferences.getString(ENCODER_SETTINGS_THREADS, "1"); this.bitrate = sharedPreferences.getString(ENCODER_SETTINGS_BITRATE, "500"); } public String getEncoderName() { if (encoderIndex <= Encoders.length -1) { return Encoders[encoderIndex]; } else { return "unknow"; } } public String getProfile() { if (profileIndex <= Profiles.length -1) { return Profiles[profileIndex]; } else { return ""; } } public String getDelay() { if (delayIndex <= Delays.length -1) { return Delays[delayIndex]; } else { return ""; } } public int getBitrate() { return Integer.parseInt(bitrate); } public String getResolution() { if (resIndex < Resolutions.length -1) { return Resolutions[resIndex]; } else { return resolution; } } public int getWidth() { String[] res = getResolution().split("\\*"); if (res != null && res.length == 2) { return Integer.parseInt(res[0]); } else { Log.e(TAG, "分辨率解析错误,格式必须为 宽*高"); return 0; } } public int getHeight() { String[] res = getResolution().split("\\*"); if (res != null && res.length == 2) { return Integer.parseInt(res[1]); } else { Log.e(TAG, "分辨率解析错误,格式必须为 宽*高"); return 0; } } public Float getFps() { return Float.parseFloat(fps); } public int getThreads() { return Integer.parseInt(threads); } } ================================================ FILE: Android_demo/KSY265CodecDemo/app/src/main/java/com/ksyun/media/ksy265codec/demo/ui/EncoderSettingsFragment.java ================================================ package com.ksyun.media.ksy265codec.demo.ui; import android.app.Dialog; import android.os.Bundle; import android.support.annotation.NonNull; import android.support.v4.app.DialogFragment; import android.support.v7.app.AlertDialog; import android.view.LayoutInflater; import android.view.View; import android.widget.AdapterView; import android.widget.ArrayAdapter; import android.widget.Button; import android.widget.EditText; import android.widget.Spinner; import com.ksyun.media.ksy265codec.demo.R; /** * Created by sujia on 2017/3/28. */ public class EncoderSettingsFragment extends DialogFragment { private Spinner mEncoderSpinner; private Spinner mProfileSpinner; private Spinner mDelaySpinner; private EditText mResulutionEditTxt; private Spinner mResSpinner; private EditText mFpsEditTxt; private EditText mThreadsEditTxt; private EditText mBitrateEditTxt; private Button mButton; private EncoderSettings mSettings; public interface OnSettingsChangeListener { public void onSettingsChanged(EncoderSettings settings); } // Use this instance of the interface to deliver action events OnSettingsChangeListener mListener; public EncoderSettingsFragment() { mSettings = Settings.getInstance().getEncoderSettings(); } public void setListener(OnSettingsChangeListener listener) { mListener = listener; } @NonNull @Override public Dialog onCreateDialog(Bundle savedInstanceState) { AlertDialog.Builder builder = new AlertDialog.Builder(getActivity()); // Get the layout inflater LayoutInflater inflater = getActivity().getLayoutInflater(); // Inflate and set the layout for the dialog // Pass null as the parent view because its going in the dialog layout View view = inflater.inflate(R.layout.encoder_settings, null); mSettings = Settings.getInstance().getEncoderSettings(); initView(view); builder.setView(view); return builder.create(); } private void initView(View view) { ArrayAdapter encodersAdapter = new ArrayAdapter<>(getContext(), android.R.layout.simple_spinner_item, EncoderSettings.Encoders); encodersAdapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item); mEncoderSpinner = (Spinner) view.findViewById(R.id.encoder_settings_encoder_spinner); mEncoderSpinner.setAdapter(encodersAdapter); if (mSettings.encoderIndex <= EncoderSettings.Encoders.length) { mEncoderSpinner.setSelection(mSettings.encoderIndex); } ArrayAdapter profilesAdapter = new ArrayAdapter<>(getContext(), android.R.layout.simple_spinner_item, EncoderSettings.Profiles); profilesAdapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item); mProfileSpinner = (Spinner) view.findViewById(R.id.encoder_settings_profile_spinner); mProfileSpinner.setAdapter(profilesAdapter); if (mSettings.profileIndex <= EncoderSettings.Profiles.length) { mProfileSpinner.setSelection(mSettings.profileIndex); } ArrayAdapter delayAdapter = new ArrayAdapter<>(getContext(), android.R.layout.simple_spinner_item, EncoderSettings.Delays); delayAdapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item); mDelaySpinner = (Spinner) view.findViewById(R.id.encoder_settings_delay_spinner); mDelaySpinner.setAdapter(delayAdapter); if (mSettings.delayIndex <= EncoderSettings.Delays.length) { mDelaySpinner.setSelection(mSettings.delayIndex); } mResulutionEditTxt = (EditText) view.findViewById(R.id.encoder_settings_resolution); mResulutionEditTxt.setText(mSettings.resolution); mResulutionEditTxt.setVisibility(View.VISIBLE); if (mSettings.resIndex == EncoderSettings.Resolutions.length -1) { mResulutionEditTxt.setVisibility(View.VISIBLE); mResulutionEditTxt.requestFocus(); } else { mResulutionEditTxt.setVisibility(View.GONE); } ArrayAdapter resAdapter = new ArrayAdapter<>(getContext(), android.R.layout.simple_spinner_item, EncoderSettings.Resolutions); resAdapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item); mResSpinner = (Spinner) view.findViewById(R.id.encoder_settings_resolution_spinner); mResSpinner.setAdapter(resAdapter); if (mSettings.resIndex <= EncoderSettings.Resolutions.length -1) { mResSpinner.setSelection(mSettings.resIndex); } mResSpinner.setOnItemSelectedListener(new AdapterView.OnItemSelectedListener() { @Override public void onItemSelected(AdapterView parent, View view, int position, long id) { if (mResulutionEditTxt == null) { return; } if (position == EncoderSettings.Resolutions.length -1) { mResulutionEditTxt.setVisibility(View.VISIBLE); mResulutionEditTxt.requestFocus(); } else { mResulutionEditTxt.setVisibility(View.GONE); } } @Override public void onNothingSelected(AdapterView parent) { } }); mFpsEditTxt = (EditText) view.findViewById(R.id.encoder_settings_fps); mFpsEditTxt.setText(mSettings.fps); mThreadsEditTxt = (EditText) view.findViewById(R.id.encoder_settings_threads); mThreadsEditTxt.setText(mSettings.threads); mBitrateEditTxt = (EditText) view.findViewById(R.id.encoder_settings_bitrate); mBitrateEditTxt.setText(mSettings.bitrate); mButton = (Button) view.findViewById(R.id.encoder_settings_sure); mButton.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { mSettings.encoderIndex = mEncoderSpinner.getSelectedItemPosition(); mSettings.profileIndex = mProfileSpinner.getSelectedItemPosition(); mSettings.delayIndex = mDelaySpinner.getSelectedItemPosition(); mSettings.resolution = mResulutionEditTxt.getText().toString(); mSettings.resIndex = mResSpinner.getSelectedItemPosition(); mSettings.fps = mFpsEditTxt.getText().toString(); mSettings.threads = mThreadsEditTxt.getText().toString(); mSettings.bitrate = mBitrateEditTxt.getText().toString(); Settings.getInstance().saveEncoderSettings(mSettings); if (mListener != null) { mListener.onSettingsChanged(mSettings); } dismiss(); } }); } } ================================================ FILE: Android_demo/KSY265CodecDemo/app/src/main/java/com/ksyun/media/ksy265codec/demo/ui/HelpFragment.java ================================================ package com.ksyun.media.ksy265codec.demo.ui; import android.app.Dialog; import android.os.Bundle; import android.support.annotation.NonNull; import android.support.v4.app.DialogFragment; import android.support.v7.app.AlertDialog; import android.view.LayoutInflater; import android.view.View; import android.widget.TextView; import com.ksyun.media.ksy265codec.demo.R; /** * Created by sujia on 2017/3/28. */ public class HelpFragment extends DialogFragment { private int type;//0 encode, 1 decode public HelpFragment() { } public void setType(int type) { this.type = type; } @NonNull @Override public Dialog onCreateDialog(Bundle savedInstanceState) { AlertDialog.Builder builder = new AlertDialog.Builder(getActivity()); // Get the layout inflater LayoutInflater inflater = getActivity().getLayoutInflater(); // Inflate and set the layout for the dialog // Pass null as the parent view because its going in the dialog layout View view = inflater.inflate(R.layout.help, null); if (type == 0) { TextView info = (TextView) view.findViewById(R.id.help_info); info.setText(R.string.encode_help_info); } else if (type == 1) { TextView info = (TextView) view.findViewById(R.id.help_info); info.setText(R.string.decode_help_info); } builder.setView(view); return builder.create(); } } ================================================ FILE: Android_demo/KSY265CodecDemo/app/src/main/java/com/ksyun/media/ksy265codec/demo/ui/KSY265CodecDemoApp.java ================================================ package com.ksyun.media.ksy265codec.demo.ui; import android.app.Application; /** * Created by sujia on 2017/3/28. */ public class KSY265CodecDemoApp extends Application { @Override public void onCreate() { super.onCreate(); Settings.getInstance().init(this); } } ================================================ FILE: Android_demo/KSY265CodecDemo/app/src/main/java/com/ksyun/media/ksy265codec/demo/ui/MainActivity.java ================================================ package com.ksyun.media.ksy265codec.demo.ui; import android.os.Bundle; import android.support.v4.app.Fragment; import android.support.v4.app.FragmentActivity; import android.support.v4.app.FragmentTabHost; import android.support.v4.view.ViewPager; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.ImageView; import android.widget.TabHost; import android.widget.TabWidget; import android.widget.TextView; import com.ksyun.media.ksy265codec.demo.R; import java.util.ArrayList; import java.util.List; public class MainActivity extends FragmentActivity implements ViewPager.OnPageChangeListener, TabHost.OnTabChangeListener { private FragmentTabHost mTabHost; private LayoutInflater mLayoutInflater; private Class fragmentArray[] = { EncoderFragment.class, DecoderFragment.class }; private int tab_imageViewArray[] = { R.drawable.tab_home_btn, R.drawable.tab_home_btn }; private String tab_textViewArray[] = { "编码", "解码"}; private List list = new ArrayList(); private ViewPager mViewPager; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); initView();//初始化控件 initPage();//初始化页面 } // 控件初始化控件 private void initView() { mViewPager = (ViewPager) findViewById(R.id.pager); /*实现OnPageChangeListener接口,目的是监听Tab选项卡的变化,然后通知ViewPager适配器切换界面*/ /*简单来说,是为了让ViewPager滑动的时候能够带着底部菜单联动*/ mViewPager.addOnPageChangeListener(this);//设置页面切换时的监听器 mLayoutInflater = LayoutInflater.from(this);//加载布局管理器 /*实例化FragmentTabHost对象并进行绑定*/ mTabHost = (FragmentTabHost) findViewById(android.R.id.tabhost);//绑定tahost mTabHost.setup(this, getSupportFragmentManager(), R.id.pager);//绑定viewpager /*实现setOnTabChangedListener接口,目的是为监听界面切换),然后实现TabHost里面图片文字的选中状态切换*/ /*简单来说,是为了当点击下面菜单时,上面的ViewPager能滑动到对应的Fragment*/ mTabHost.setOnTabChangedListener(this); int count = tab_textViewArray.length; /*新建Tabspec选项卡并设置Tab菜单栏的内容和绑定对应的Fragment*/ for (int i = 0; i < count; i++) { // 给每个Tab按钮设置标签、图标和文字 TabHost.TabSpec tabSpec = mTabHost.newTabSpec(tab_textViewArray[i]) .setIndicator(getTabItemView(i)); // 将Tab按钮添加进Tab选项卡中,并绑定Fragment mTabHost.addTab(tabSpec, fragmentArray[i], null); mTabHost.setTag(i); mTabHost.getTabWidget().getChildAt(i) .setBackgroundResource(R.drawable.selector_tab_background);//设置Tab被选中的时候颜色改变 } } /*初始化Fragment*/ private void initPage() { EncoderFragment fragment1 = new EncoderFragment(); DecoderFragment fragment2 = new DecoderFragment(); list.add(fragment1); list.add(fragment2); //绑定Fragment适配器 mViewPager.setAdapter(new MyFragmentAdapter(getSupportFragmentManager(), list)); mTabHost.getTabWidget().setDividerDrawable(null); } private View getTabItemView(int i) { //将xml布局转换为view对象 View view = mLayoutInflater.inflate(R.layout.tab_content, null); //利用view对象,找到布局中的组件,并设置内容,然后返回视图 ImageView mTab_ImageView = (ImageView) view .findViewById(R.id.tab_imageview); TextView mTab_TextView = (TextView) view.findViewById(R.id.tab_textview); mTab_ImageView.setBackgroundResource(tab_imageViewArray[i]); mTab_TextView.setText(tab_textViewArray[i]); return view; } @Override public void onPageScrollStateChanged(int arg0) { }//arg0 ==1的时候表示正在滑动,arg0==2的时候表示滑动完毕了,arg0==0的时候表示什么都没做,就是停在那。 @Override public void onPageScrolled(int arg0, float arg1, int arg2) { }//表示在前一个页面滑动到后一个页面的时候,在前一个页面滑动前调用的方法 @Override public void onPageSelected(int arg0) {//arg0是表示你当前选中的页面位置Postion,这事件是在你页面跳转完毕的时候调用的。 TabWidget widget = mTabHost.getTabWidget(); int oldFocusability = widget.getDescendantFocusability(); widget.setDescendantFocusability(ViewGroup.FOCUS_BLOCK_DESCENDANTS);//设置View覆盖子类控件而直接获得焦点 mTabHost.setCurrentTab(arg0);//根据位置Postion设置当前的Tab widget.setDescendantFocusability(oldFocusability);//设置取消分割线 } @Override public void onTabChanged(String tabId) {//Tab改变的时候调用 int position = mTabHost.getCurrentTab(); mViewPager.setCurrentItem(position);//把选中的Tab的位置赋给适配器,让它控制页面切换 } } ================================================ FILE: Android_demo/KSY265CodecDemo/app/src/main/java/com/ksyun/media/ksy265codec/demo/ui/MyFragmentAdapter.java ================================================ package com.ksyun.media.ksy265codec.demo.ui; import android.support.v4.app.Fragment; import android.support.v4.app.FragmentManager; import android.support.v4.app.FragmentPagerAdapter; import java.util.List; /** * Created by Carson_Ho on 16/5/23. */ public class MyFragmentAdapter extends FragmentPagerAdapter { List list; public MyFragmentAdapter(FragmentManager fm, List list) { super(fm); this.list = list; } @Override public Fragment getItem(int arg0) { return list.get(arg0); }//根据Item的位置返回对应位置的Fragment,绑定item和Fragment @Override public int getCount() { return list.size(); }//设置Item的数量 } ================================================ FILE: Android_demo/KSY265CodecDemo/app/src/main/java/com/ksyun/media/ksy265codec/demo/ui/ProgressDialogFragment.java ================================================ package com.ksyun.media.ksy265codec.demo.ui; import android.app.Dialog; import android.app.ProgressDialog; import android.os.Bundle; import android.support.v4.app.DialogFragment; /** * Created by sujia on 2017/4/7. */ public class ProgressDialogFragment extends DialogFragment { @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setCancelable(false); } @Override public Dialog onCreateDialog(Bundle savedInstanceState) { ProgressDialog dialog = new ProgressDialog(getActivity()); dialog.setTitle("请等待"); dialog.setMessage("编码中..."); dialog.setIndeterminate(true); dialog.setProgressStyle(ProgressDialog.STYLE_SPINNER); return dialog; } } ================================================ FILE: Android_demo/KSY265CodecDemo/app/src/main/java/com/ksyun/media/ksy265codec/demo/ui/Settings.java ================================================ package com.ksyun.media.ksy265codec.demo.ui; import android.content.Context; import android.content.SharedPreferences; import android.util.Log; /** * Created by sujia on 2017/3/28. */ public class Settings { private static final String TAG = "settings"; private static final boolean TRACE = true; private final String FILE_NAME = "ksy265codecdemo_settings"; private SharedPreferences mSharedPreferences; private SharedPreferences.Editor mEditor; private static Settings sInstance; private EncoderSettings mEncoderSettings; private DecoderSettings mDecoderSettings; public static Settings getInstance() { if (sInstance == null) { synchronized (Settings.class) { if (sInstance == null) { sInstance = new Settings(); } } } return sInstance; } public void init(Context context) throws IllegalArgumentException { if (context == null) { throw new IllegalArgumentException("the context must not null"); } if (mSharedPreferences == null) { mSharedPreferences = context.getSharedPreferences(FILE_NAME, context.MODE_PRIVATE); mEditor = mSharedPreferences.edit(); } } public EncoderSettings getEncoderSettings() { if (mSharedPreferences == null) { if (mEncoderSettings == null) { if(TRACE) { Log.w(TAG, "please call init before call this function"); } mEncoderSettings = new EncoderSettings(); return mEncoderSettings; } } if (mEncoderSettings == null) { if(mSharedPreferences != null) { mEncoderSettings = new EncoderSettings(mSharedPreferences); } else { mEncoderSettings = new EncoderSettings(); } } return mEncoderSettings; } public DecoderSettings getDecoderSettings() { if (mSharedPreferences == null) { if (mDecoderSettings == null) { if(TRACE) { Log.w(TAG, "please call init before call this function"); } mDecoderSettings = new DecoderSettings(); return mDecoderSettings; } } if (mDecoderSettings == null) { if(mSharedPreferences != null) { mDecoderSettings = new DecoderSettings(mSharedPreferences); } else { mDecoderSettings = new DecoderSettings(); } } return mDecoderSettings; } public void saveEncoderSettings(EncoderSettings settings) { if (mSharedPreferences == null) { return; } if (mEditor != null) { mEditor.putInt(EncoderSettings.ENCODER_SETTINGS_ENCODER, settings.encoderIndex); mEditor.putInt(EncoderSettings.ENCODER_SETTINGS_PROFILE, settings.profileIndex); mEditor.putInt(EncoderSettings.ENCODER_SETTINGS_DELAY, settings.delayIndex); mEditor.putString(EncoderSettings.ENCODER_SETTINGS_RESOLUTION, settings.resolution); mEditor.putInt(EncoderSettings.ENCODER_SETTINGS_RESOLUTION_IDX, settings.resIndex); mEditor.putString(EncoderSettings.ENCODER_SETTINGS_THREADS, settings.threads); mEditor.putString(EncoderSettings.ENCODER_SETTINGS_FPS, settings.fps); mEditor.putString(EncoderSettings.ENCODER_SETTINGS_BITRATE, settings.bitrate); mEditor.commit(); } } public void saveDecoderSettings(DecoderSettings settings) { if (mSharedPreferences == null) { return; } if (mEditor != null) { mEditor.putInt(DecoderSettings.DECODER_SETTINGS_DECODER, settings.decoderIndex); mEditor.putInt(DecoderSettings.DECODER_SETTINGS_THREADS, settings.threadsIndex); mEditor.putInt(DecoderSettings.DECODER_SETTINGS_FPS, settings.fpsIndex); mEditor.putBoolean(DecoderSettings.DECODER_SETTINGS_OUTPUT, settings.enableYUVOutput); mEditor.commit(); } } } ================================================ FILE: Android_demo/KSY265CodecDemo/app/src/main/jni/Android.mk ================================================ LOCAL_PATH := $(call my-dir) include $(call all-subdir-makefiles) ================================================ FILE: Android_demo/KSY265CodecDemo/app/src/main/jni/Application.mk ================================================ APP_ABI := armeabi-v7a DEBUG := $(NDK_DEBUG) ifndef NDK_DEBUG DEBUG := 0 endif ifeq ($(DEBUG),true) DEBUG := 1 endif ifeq ($(DEBUG),1) APP_CFLAGS += -O0 -g APP_OPTIM := debug else APP_CFLAGS += -O2 APP_OPTIM := release endif APP_STL := gnustl_static APP_PLATFORM := android-9 #NDK_TOOLCHAIN_VERSION := 4.9 APP_STL := gnustl_static ================================================ FILE: Android_demo/KSY265CodecDemo/app/src/main/jni/decoder/Android.mk ================================================ LOCAL_PATH := $(call my-dir) include $(call all-subdir-makefiles) ================================================ FILE: Android_demo/KSY265CodecDemo/app/src/main/jni/decoder/jniplayer/Android.mk ================================================ LOCAL_PATH := $(call my-dir) ARCH_ABI := $(TARGET_ARCH_ABI) PREBUILT_PATH := $(LOCAL_PATH)/../../../../../../../prebuilt # # Prebuilt Shared library # include $(CLEAR_VARS) LOCAL_MODULE := lenthevcdec LOCAL_SRC_FILES := $(PREBUILT_PATH)/$(TARGET_ARCH_ABI)/liblenthevcdec.so include $(PREBUILT_SHARED_LIBRARY) include $(CLEAR_VARS) LOCAL_MODULE := qydecoder LOCAL_SRC_FILES := $(PREBUILT_PATH)/$(TARGET_ARCH_ABI)/libqydecoder.a include $(PREBUILT_STATIC_LIBRARY) # # jniplayer.so # include $(CLEAR_VARS) ifeq ($(TARGET_ARCH_ABI), armeabi-v7a) LENT_CFLAGS := -DARCH_ARM=1 -DHAVE_NEON=1 endif ifeq ($(TARGET_ARCH_ABI), x86) LENT_CFLAGS := -DARCH_X86_32=1 endif LOCAL_C_INCLUDES += $(PREBUILT_PATH)/include LOCAL_SRC_FILES := jniplayer.cpp jni_utils.cpp yuv2rgb565.cpp gl_renderer.cpp LOCAL_LDLIBS := -llog -lz -ljnigraphics -lGLESv2 LOCAL_CFLAGS += $(LENT_CFLAGS) LOCAL_SHARED_LIBRARIES := lenthevcdec LOCAL_STATIC_LIBRARIES += qydecoder gnustl_static cpufeatures LOCAL_MODULE := jniplayer include $(BUILD_SHARED_LIBRARY) ================================================ FILE: Android_demo/KSY265CodecDemo/app/src/main/jni/decoder/jniplayer/Chromium_LICENSE.txt ================================================ Copyright (c) 2010 The Chromium Authors. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of Google Inc. nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ================================================ FILE: Android_demo/KSY265CodecDemo/app/src/main/jni/decoder/jniplayer/gl_renderer.cpp ================================================ // gl_renderer.cpp : render YUV data directly using GPU with OpenGL ES 2.0 // // Copyright (c) 2013 Strongene Ltd. All Right Reserved. // http://www.strongene.com // // Contributors: // Shengbin Meng // James Deng // // You are free to re-use this as the basis for your own application // in source and binary forms, with or without modification, provided // that the following conditions are met: // // * Redistributions of source code must retain the above copyright // notice and this list of conditions. // * Redistributions in binary form must reproduce the above // copyright notice and this list of conditions in the documentation // and/or other materials provided with the distribution. #include #include #include #include #include #include #include #include #include "jniplayer.h" #include "gl_renderer.h" #include "jni_utils.h" extern VideoFrame gVF; extern pthread_mutex_t gVFMutex; #define LOG_TAG "gl_renderer" #define ENABLE_LOGD 0 #if ENABLE_LOGD #define LOGD(...) __android_log_print(ANDROID_LOG_DEBUG,LOG_TAG,__VA_ARGS__) #else #define LOGD(...) #endif #define LOGI(...) __android_log_print(ANDROID_LOG_INFO,LOG_TAG,__VA_ARGS__) #define LOGE(...) __android_log_print(ANDROID_LOG_ERROR,LOG_TAG,__VA_ARGS__) static GLuint gProgram; static GLuint gTexIds[3]; static GLuint gAttribPosition; static GLuint gAttribTexCoord; static GLuint gUniformTexY; static GLuint gUniformTexU; static GLuint gUniformTexV; static int backingWidth, backingHeight; static int needSetup = 0; static const char gVertexShader[] = "attribute vec4 a_position;\n" "attribute vec2 a_texCoord;\n" "varying vec2 v_tc;\n" "void main()\n" "{\n" " gl_Position = a_position;\n" " v_tc = a_texCoord;\n" "}\n"; static const char gFragmentShader[] = "varying lowp vec2 v_tc;\n" "uniform sampler2D u_texY;\n" "uniform sampler2D u_texU;\n" "uniform sampler2D u_texV;\n" "void main(void)\n" "{\n" "mediump vec3 yuv;\n" "lowp vec3 rgb;\n" "yuv.x = texture2D(u_texY, v_tc).r;\n" "yuv.y = texture2D(u_texU, v_tc).r - 0.5;\n" "yuv.z = texture2D(u_texV, v_tc).r - 0.5;\n" "rgb = mat3( 1, 1, 1,\n" "0, -0.39465, 2.03211,\n" "1.13983, -0.58060, 0) * yuv;\n" "gl_FragColor = vec4(rgb, 1);\n" "}\n"; static void printGLString(const char *name, GLenum s) { const char *v = (const char *) glGetString(s); LOGI("GL %s = %s\n", name, v); } static GLuint loadShader(GLenum shaderType, const char* pSource) { GLuint shader = glCreateShader(shaderType); if (shader) { glShaderSource(shader, 1, &pSource, NULL); glCompileShader(shader); GLint compiled = 0; glGetShaderiv(shader, GL_COMPILE_STATUS, &compiled); if (!compiled) { GLint infoLen = 0; glGetShaderiv(shader, GL_INFO_LOG_LENGTH, &infoLen); if (infoLen) { char* buf = (char*) malloc(infoLen); if (buf) { glGetShaderInfoLog(shader, infoLen, NULL, buf); LOGE("Could not compile shader %d:\n%s\n", shaderType, buf); free(buf); } glDeleteShader(shader); shader = 0; } } } return shader; } static GLuint createProgram(const char* pVertexSource, const char* pFragmentSource) { GLuint vertexShader = loadShader(GL_VERTEX_SHADER, pVertexSource); if (!vertexShader) { return 0; } GLuint fragmentShader = loadShader(GL_FRAGMENT_SHADER, pFragmentSource); if (!fragmentShader) { return 0; } GLuint program = glCreateProgram(); if (program) { glAttachShader(program, vertexShader); glAttachShader(program, fragmentShader); glLinkProgram(program); GLint linkStatus = GL_FALSE; glGetProgramiv(program, GL_LINK_STATUS, &linkStatus); if (linkStatus != GL_TRUE) { GLint bufLength = 0; glGetProgramiv(program, GL_INFO_LOG_LENGTH, &bufLength); if (bufLength) { char* buf = (char*) malloc(bufLength); if (buf) { glGetProgramInfoLog(program, bufLength, NULL, buf); LOGE("Could not link program:\n%s\n", buf); free(buf); } } glDeleteProgram(program); program = 0; } } return program; } static GLfloat vertexPositions[] = { -1.0, -1.0, 0.0, 1.0, -1.0, 0.0, -1.0, 1.0, 0.0, 1.0, 1.0, 0.0 }; static GLfloat textureCoords[] = { 0.0, 1.0, 1.0, 1.0, 0.0, 0.0, 1.0, 0.0 }; static int init() { printGLString("Version", GL_VERSION); printGLString("Vendor", GL_VENDOR); printGLString("Renderer", GL_RENDERER); printGLString("Extensions", GL_EXTENSIONS); // create and use our program gProgram = createProgram(gVertexShader, gFragmentShader); if (!gProgram) { LOGE("Could not create program."); return -1; } glUseProgram(gProgram); // get the location of attributes in our shader gAttribPosition = glGetAttribLocation(gProgram, "a_position"); gAttribTexCoord = glGetAttribLocation(gProgram, "a_texCoord"); // get the location of uniforms in our shader gUniformTexY = glGetUniformLocation(gProgram, "u_texY"); gUniformTexU = glGetUniformLocation(gProgram, "u_texU"); gUniformTexV = glGetUniformLocation(gProgram, "u_texV"); // can enable only once glEnableVertexAttribArray(gAttribPosition); glEnableVertexAttribArray(gAttribTexCoord); // set the value of uniforms (uniforms all have constant value) glUniform1i(gUniformTexY, 0); glUniform1i(gUniformTexU, 1); glUniform1i(gUniformTexV, 2); // generate and set parameters for the textures glEnable (GL_TEXTURE_2D); glGenTextures(3, gTexIds); for (int i = 0; i < 3; i++) { glActiveTexture(GL_TEXTURE0 + i); glBindTexture(GL_TEXTURE_2D, gTexIds[i]); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); } return 0; } static int setupGraphics(int w, int h) { LOGI("setupGraphics(%d, %d)", w, h); backingWidth = w; backingHeight = h; needSetup = 1; return 0; } void glDrawFrame() { pthread_mutex_lock(&gVFMutex); if (gVF.yuv_data[0] == NULL) { LOGI("gVF.yuv_data[0] == NULL"); pthread_mutex_unlock(&gVFMutex); return; } double pts = gVF.pts; if (needSetup) { LOGI("Will setup ... \n"); GLuint width = gVF.width; GLuint height = gVF.height; float aspect = (float) width / (float) height; if (aspect >= (float) backingWidth / (float) backingHeight) { // fill screen in width, and leave space in Y float scale = (float) backingWidth / (float) width; float maxY = ((float) height * scale) / (float) backingHeight; vertexPositions[1] = vertexPositions[4] = -maxY; vertexPositions[7] = vertexPositions[10] = maxY; } else { // fill screen in height, and leave space in X float scale = (float) backingHeight / (float) height; float maxX = ((float) width * scale) / (float) backingWidth; vertexPositions[0] = vertexPositions[6] = -maxX; vertexPositions[3] = vertexPositions[9] = maxX; } // modify the texture coordinates float texCoord = ((float) width) / gVF.linesize_y; textureCoords[2] = textureCoords[6] = texCoord; // set the value of attributes glVertexAttribPointer(gAttribPosition, 3, GL_FLOAT, 0, 0, vertexPositions); glVertexAttribPointer(gAttribTexCoord, 2, GL_FLOAT, 0, 0, textureCoords); glViewport(0, 0, backingWidth, backingHeight); LOGI("setup finished\n"); needSetup = 0; } glClearColor(0.0f, 0.0f, 0.0f, 1.0f); glClear (GL_COLOR_BUFFER_BIT); LOGD("before upload: %u (%f)", getms(), pts); // upload textures glActiveTexture(GL_TEXTURE0 + 0); glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, gVF.linesize_y, gVF.height, 0, GL_LUMINANCE, GL_UNSIGNED_BYTE, gVF.yuv_data[0]); glActiveTexture(GL_TEXTURE0 + 1); glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, gVF.linesize_uv, gVF.height / 2, 0, GL_LUMINANCE, GL_UNSIGNED_BYTE, gVF.yuv_data[1]); glActiveTexture(GL_TEXTURE0 + 2); glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, gVF.linesize_uv, gVF.height / 2, 0, GL_LUMINANCE, GL_UNSIGNED_BYTE, gVF.yuv_data[2]); pthread_mutex_unlock(&gVFMutex); LOGD("after upload: %u (%f)", getms(), pts); LOGD("before glDrawArrays: %u (%f)", getms(), pts); glDrawArrays(GL_TRIANGLE_STRIP, 0, 4); LOGD("after glDrawArrays: %u (%f)", getms(), pts); } jint nativeInit(JNIEnv * env, jobject obj) { int ret = init(); if (ret < 0) { LOGE("initialize failed!"); } return ret; } jint nativeSetup(JNIEnv * env, jobject obj, jint width, jint height) { int ret = setupGraphics(width, height); if (ret < 0) { LOGE("setup failed!"); } return ret; } void nativeDrawFrame(JNIEnv * env, jobject obj) { glDrawFrame(); } static JNINativeMethod methods[] = { { "nativeInit", "()I", (void *) nativeInit }, { "nativeSetup", "(II)I", (void *) nativeSetup }, { "nativeDrawFrame", "()V", (void *) nativeDrawFrame }, }; int register_renderer(JNIEnv *env) { return jniRegisterNativeMethods(env, "com/ksyun/media/ksy265codec/demo/decoder/hevdecoder/GLRenderer", methods, sizeof(methods) / sizeof(methods[0])); } ================================================ FILE: Android_demo/KSY265CodecDemo/app/src/main/jni/decoder/jniplayer/gl_renderer.h ================================================ #ifndef __GL_RENDERER_H__ #define __GL_RENDERER_H__ void glDrawFrame(); #endif ================================================ FILE: Android_demo/KSY265CodecDemo/app/src/main/jni/decoder/jniplayer/jni_utils.cpp ================================================ #include #include #include "jni_utils.h" #define LOG_TAG "jni_utils" static JavaVM *gVM; extern int register_player(JNIEnv *env); extern int register_renderer(JNIEnv *env); /* * Throw an exception with the specified class and an optional message. */ int jniThrowException(JNIEnv* env, const char* className, const char* msg) { jclass exceptionClass = env->FindClass(className); if (exceptionClass == NULL) { LOGE("Unable to find exception class %s", className); return -1; } if (env->ThrowNew(exceptionClass, msg) != JNI_OK) { LOGE("Failed throwing '%s' '%s'", className, msg); } return 0; } JNIEnv* getJNIEnv() { JNIEnv* env = NULL; int ret = gVM->GetEnv((void**) &env, JNI_VERSION_1_4); if (ret == JNI_OK) { return env; } else if (ret == JNI_EDETACHED) { jint attachSuccess = gVM->AttachCurrentThread(&env, NULL); if (attachSuccess != 0) { LOGE("attach current thread failed \n"); return NULL; } } else { LOGE("obtain JNIEnv failed, return: %d \n", ret); } return env; } void detachJVM() { int ret; ret = gVM->DetachCurrentThread(); if (ret == JNI_OK) { LOGI("detach return OK: %d", ret); } else { LOGE("detach return NOT OK: %d", ret); } } /* * Register native JNI-callable methods. * * "className" looks like "java/lang/String". */ int jniRegisterNativeMethods(JNIEnv* env, const char* className, const JNINativeMethod* gMethods, int numMethods) { jclass clazz; LOGI("Registering %s natives\n", className); clazz = env->FindClass(className); if (clazz == NULL) { LOGE("Native registration unable to find class '%s'\n", className); return -1; } if (env->RegisterNatives(clazz, gMethods, numMethods) < 0) { LOGE("RegisterNatives failed for '%s'\n", className); return -1; } return 0; } jint JNI_OnLoad(JavaVM* vm, void* reserved) { JNIEnv* env = NULL; jint result = JNI_ERR; gVM = vm; if (vm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) { LOGE("GetEnv failed!"); return JNI_ERR; } LOGI("loading . . ."); if (register_player(env) != JNI_OK) { LOGE("can't register player"); return JNI_ERR; } if (register_renderer(env) != JNI_OK) { LOGE("can't register renderer"); return JNI_ERR; } LOGI("loaded"); return JNI_VERSION_1_4; } ================================================ FILE: Android_demo/KSY265CodecDemo/app/src/main/jni/decoder/jniplayer/jni_utils.h ================================================ #ifndef __JNI_UTILS_H__ #define __JNI_UTILS_H__ #include #include #include #ifdef __cplusplus #define __STDC_CONSTANT_MACROS #define __STDC_LIMIT_MACROS #ifdef _STDINT_H #undef _STDINT_H #endif #include #define __STDC_FORMAT_MACROS #endif #define ENABLE_LOGD 0 #if ENABLE_LOGD #define LOGD(...) __android_log_print(ANDROID_LOG_DEBUG,LOG_TAG,__VA_ARGS__) #else #define LOGD(...) #endif #define LOGI(...) __android_log_print(ANDROID_LOG_INFO,LOG_TAG,__VA_ARGS__) #define LOGE(...) __android_log_print(ANDROID_LOG_ERROR,LOG_TAG,__VA_ARGS__) int jniThrowException(JNIEnv* env, const char* className, const char* msg); int jniRegisterNativeMethods(JNIEnv* env, const char* className, const JNINativeMethod* gMethods, int numMethods); JNIEnv* getJNIEnv(); void detachJVM(); #endif /* __JNI_UTILS_H__ */ ================================================ FILE: Android_demo/KSY265CodecDemo/app/src/main/jni/decoder/jniplayer/jniplayer.cpp ================================================ // jniplayer.cpp : decode H.265/HEVC video data in separate native thread // // Copyright (c) 2013 Strongene Ltd. All Right Reserved. // http://www.strongene.com // // Contributors: // Shengbin Meng // James Deng // // You are free to re-use this as the basis for your own application // in source and binary forms, with or without modification, provided // that the following conditions are met: // // * Redistributions of source code must retain the above copyright // notice and this list of conditions. // * Redistributions in binary form must reproduce the above // copyright notice and this list of conditions in the documentation // and/or other materials provided with the distribution. #include #include #include #include #include #include #include "jniplayer.h" #include "jni_utils.h" #include "yuv2rgb565.h" #include "gl_renderer.h" #ifdef __cplusplus #define __STDC_CONSTANT_MACROS #define __STDC_LIMIT_MACROS #ifdef _STDINT_H #undef _STDINT_H #endif #include #define __STDC_FORMAT_MACROS #endif extern "C" { #include "lenthevcdec.h" #include "qy265dec.h" #include "qyauth_env.h" } #define LOG_TAG "jniplayer" #define ENABLE_LOGD 0 #if ENABLE_LOGD #define LOGD(...) __android_log_print(ANDROID_LOG_DEBUG,LOG_TAG,__VA_ARGS__) #else #define LOGD(...) #endif #define LOGI(...) __android_log_print(ANDROID_LOG_INFO,LOG_TAG,__VA_ARGS__) #define LOGE(...) __android_log_print(ANDROID_LOG_ERROR,LOG_TAG,__VA_ARGS__) #ifndef _countof #define _countof(a) (sizeof(a) / sizeof((a)[0])) #endif #define LOOP_PLAY 0 #if ARCH_ARM #define USE_SWSCALE 0 #else #define USE_SWSCALE 0 #endif struct MediaInfo { int width; int height; char data_src[1024]; int raw_bs; }; VideoFrame gVF = {0, 0, 0, 0, 0, {NULL, NULL, NULL}}; pthread_mutex_t gVFMutex = PTHREAD_MUTEX_INITIALIZER; static MediaInfo media; static pthread_t decode_thread; static struct SwsContext *p_sws_ctx; static const char* const kClassPathName = "com/ksyun/media/ksy265codec/demo/decoder/hevdecoder/NativeMediaPlayer"; // for lenthevcdec static const uint32_t AU_COUNT_MAX = 1024 * 1024; static const uint32_t AU_BUF_SIZE_MAX = 1024 * 1024 * 80; static uint32_t au_pos[AU_COUNT_MAX]; // too big array, use static to save stack space static uint32_t au_count, au_buf_size; static uint8_t *au_buf = NULL; static lenthevcdec_ctx lent_ctx = NULL; static volatile int exit_decode_thread = 0; static volatile int is_playing = 0; static int frames_sum = 0; static double tstart = 0; static int frames = 0; static double tlast = 0; static float renderFPS = 0; static double avg_fps = 0; static uint64_t renderInterval = 0; static struct timeval timeStart; static int frame_count = 0; static double real_time = 0; static float real_fps = 0; static int use_ksy = 0; static void* ksydec_ctx = NULL; static QY265Frame decframe; static int disable_render = 0; static bool enable_output_yuv = 0; static char output_path[1024]; typedef struct Context { jobject obj; jmethodID drawFrame; jmethodID postEvent; } Context; static Context *ctx; static void freeContext(void* opaque) { Context* ctx = (Context*) opaque; if (ctx && ctx->obj) { JNIEnv *env = NULL; env = getJNIEnv(); env->DeleteGlobalRef(ctx->obj); } free(ctx); } static void postEventToJava(void *opaque, int msg, int ext1, int ext2) { Context* ctx = (Context*) opaque; if (ctx == NULL) { return; } int isAttached = 0; JNIEnv *env = NULL; env = getJNIEnv(); if (env == NULL) { return; } env->CallVoidMethod(ctx->obj, ctx->postEvent, msg, ext1, ext2); if (env->ExceptionCheck()) { env->ExceptionDescribe(); } } static int callJavaDrawFrame(void* opaque, int width, int height) { Context* ctx = (Context*) opaque; if (ctx == NULL) { return 0; } int isAttached = 0; JNIEnv *env = NULL; env = getJNIEnv(); if (env == NULL) { return 0; } int ret = env->CallIntMethod(ctx->obj, ctx->drawFrame, width, height); if (env->ExceptionCheck()) { env->ExceptionDescribe(); } return ret; } uint32_t getms() { struct timeval t; gettimeofday(&t, NULL); return (t.tv_sec * 1000) + (t.tv_usec / 1000); } int drawFrame(VideoFrame * vf) { int64_t timePassed, delay; LOGD("enter drawFrame:%u (%f)", getms(), vf->pts); if(disable_render) goto show_display; // copy decode frame to global buffer pthread_mutex_lock(&gVFMutex); if ( gVF.linesize_y != vf->linesize_y || gVF.linesize_uv != vf->linesize_uv || gVF.height != vf->height ) { if ( NULL != gVF.yuv_data[0] ) free(gVF.yuv_data[0]); gVF.yuv_data[0] = gVF.yuv_data[1] = gVF.yuv_data[2] = NULL; gVF.yuv_data[0] = (uint8_t*)malloc(vf->linesize_y * vf->height + vf->linesize_uv * vf->height ); if ( NULL == gVF.yuv_data[0] ) { LOGE("malloc failed!\n"); return -1; } gVF.yuv_data[1] = gVF.yuv_data[0] + vf->linesize_y*vf->height; gVF.yuv_data[2] = gVF.yuv_data[1] + vf->linesize_uv*vf->height/2; } gVF.width = vf->width; gVF.height = vf->height; gVF.linesize_y = vf->linesize_y; gVF.linesize_uv = vf->linesize_uv; gVF.pts = vf->pts; memcpy(gVF.yuv_data[0], vf->yuv_data[0], vf->linesize_y*vf->height); memcpy(gVF.yuv_data[1], vf->yuv_data[1], vf->linesize_uv*vf->height/2); memcpy(gVF.yuv_data[2], vf->yuv_data[2], vf->linesize_uv*vf->height/2); pthread_mutex_unlock(&gVFMutex); // wait for display struct timeval timeNow; gettimeofday(&timeNow, NULL); timePassed = ((int64_t)(timeNow.tv_sec - timeStart.tv_sec))*1000000 + (timeNow.tv_usec - timeStart.tv_usec); delay = vf->pts - timePassed; if (delay > 0) { usleep(delay); } show_display: // update information gettimeofday(&timeNow, NULL); double tnow = timeNow.tv_sec + (timeNow.tv_usec / 1000000.0); if (tlast == 0) tlast = tnow; if (tstart == 0) tstart = tnow; if (tnow > tlast + 1) { LOGI("Video Display FPS:%i", (int)frames); frames_sum += frames; avg_fps = frames_sum / (tnow - tstart); LOGI("Video AVG FPS:%.2lf", avg_fps); postEventToJava(ctx, 900, int(frames), int(avg_fps * 4096)); tlast = tlast + 1; frames = 0; } frames++; // request display LOGD("before request draw:%u (%f)", getms(), vf->pts); if (disable_render) { return 0; } else { return callJavaDrawFrame(ctx, vf->width, vf->height); } } int lent_hevc_get_sps(uint8_t* buf, int size, uint8_t** sps_ptr) { int i, nal_type, sps_pos; sps_pos = -1; for ( i = 0; i < (size - 4); i++ ) { if ( 0 == buf[i] && 0 == buf[i+1] && 1 == buf[i+2] ) { nal_type = (buf[i+3] & 0x7E) >> 1; if ( 33 != nal_type && sps_pos >= 0 ) { break; } if ( 33 == nal_type ) { // sps sps_pos = i; } i += 2; } } if ( sps_pos < 0 ) return 0; if ( i == (size - 4) ) i = size; *sps_ptr = buf + sps_pos; return i - sps_pos; } int lent_hevc_get_frame(uint8_t* buf, int size, int *is_idr) { static int seq_hdr = 0; int i, nal_type, idr = 0; for ( i = 0; i < (size - 6); i++ ) { if ( 0 == buf[i] && 0 == buf[i+1] && 1 == buf[i+2] ) { nal_type = (buf[i+3] & 0x7E) >> 1; if ( nal_type <= 21 ) { if ( buf[i+5] & 0x80 ) { /* first slice in pic */ if ( !seq_hdr ) break; else seq_hdr = 0; } } if ( nal_type >= 32 && nal_type <= 34 ) { if ( !seq_hdr ) { seq_hdr = 1; idr = 1; break; } seq_hdr = 1; } i += 2; } } if ( i == (size - 6) ) i = size; if ( NULL != is_idr ) *is_idr = idr; return i; } static int write_pic_yv12(int w, int h, uint8_t* buf[3], short stride[3], FILE *fp) { uint8_t *line; int line_len, line_count, i, j, pitch; for ( i = 0; i < 3; i++ ) { line = buf[i]; pitch = stride[i]; line_len = (0 == i) ? w : (w / 2); line_count = (0 == i) ? h : (h / 2); for ( j = 0; j < line_count; j++ ) { if ( fwrite(line, 1, line_len, fp) != line_len ) return -1; line += pitch; } } return 0; } void* rawbs_runDecoder(void *p) { int32_t got_frame, width, height, stride[3]; uint8_t* pixels[3]; int64_t pts, got_pts; int ret, i; struct timeval tv_start, tv_end; if ( NULL == lent_ctx || NULL == au_buf ) return NULL; FILE* out_file = NULL; if (enable_output_yuv) { out_file = fopen(output_path, "wb"); if (out_file == NULL) { LOGE("open outout file %s faile", output_path); goto exit; } } decode: // decode all AUs frame_count = 0; real_time = 0; gettimeofday(&tv_start, NULL); LOGD("dec %d\n", au_count); for ( i = 0; i < au_count && !exit_decode_thread; i++ ) { pts = i * 40; got_frame = 0; uint32_t start_time = getms(); LOGD("before decode: %u", start_time); if(use_ksy) { QY265DecodeFrame(ksydec_ctx, au_buf + au_pos[i], au_pos[i + 1] - au_pos[i], &ret, 0); if ( ret < 0 ) { LOGE("call QY265DecodeFrame failed! ret = %d i: %d /%d\n", ret, i, au_count); } QY265DecoderGetDecodedFrame(ksydec_ctx, &decframe, &ret, 0); if ( ret == 0 && decframe.bValid ) { got_frame = 1; width = decframe.frameinfo.nWidth; height = decframe.frameinfo.nHeight; stride[0] = decframe.iStride[0]; stride[1] = decframe.iStride[1]; pixels[0] = decframe.pData[0]; pixels[1] = decframe.pData[1]; pixels[2] = decframe.pData[2]; } } else { ret = lenthevcdec_decode_frame(lent_ctx, au_buf + au_pos[i], au_pos[i + 1] - au_pos[i], pts, &got_frame, &width, &height, stride, (void**)pixels, &got_pts); if ( ret < 0 ) { LOGE("call lenthevcdec_decode_frame failed! ret = %d\n", ret); goto exit; } } uint32_t end_time = getms(); LOGD("after decode: %u", end_time); uint32_t dec_time = end_time - start_time; if ( got_frame > 0 ) { LOGD("decoding time: %u - %u = %u\n", end_time, start_time, dec_time); LOGD("decode frame: pts = %" PRId64 ", linesize = {%d,%d,%d}\n", got_pts, stride[0], stride[1], stride[2]); if ( media.width != width || media.height != height ) { LOGD("Video dimensions change! %dx%d -> %dx%d\n", media.width, media.height, width, height); media.width = width; media.height = height; } // draw frame to screen VideoFrame vf; vf.width = width; vf.height = height; vf.linesize_y = stride[0]; vf.linesize_uv = stride[1]; vf.pts = renderInterval * frame_count; vf.yuv_data[0] = pixels[0]; vf.yuv_data[1] = pixels[1]; vf.yuv_data[2] = pixels[2]; if (enable_output_yuv && out_file != NULL) { write_pic_yv12(decframe.frameinfo.nWidth, decframe.frameinfo.nHeight, (uint8_t**)decframe.pData, decframe.iStride, out_file); } if (frame_count == 0) { gettimeofday(&timeStart, NULL); } drawFrame(&vf); if(use_ksy) QY265DecoderReturnDecodedFrame(ksydec_ctx, &decframe); frame_count++; } } #if LOOP_PLAY if (!exit_decode_thread) { LOGI("automatically play again\n"); goto decode; } #endif LOGE("flush %d + %d /%d \n", i,frame_count, au_count); // flush decoder while ( !exit_decode_thread ) { got_frame = 0; if(use_ksy) { QY265DecoderGetDecodedFrame(ksydec_ctx, &decframe, &ret, 0); if ( ret == 0 && frame_count < au_count){ if (decframe.bValid) { got_frame = 1; width = decframe.frameinfo.nWidth; height = decframe.frameinfo.nHeight; stride[0] = decframe.iStride[0]; stride[1] = decframe.iStride[1]; pixels[0] = decframe.pData[0]; pixels[1] = decframe.pData[1]; pixels[2] = decframe.pData[2]; } } else { break; } } else { ret = lenthevcdec_decode_frame(lent_ctx, NULL, 0, pts, &got_frame, &width, &height, stride, (void**)pixels, &got_pts); if ( ret < 0 || got_frame <= 0) break; } if ( got_frame > 0 ) { if ( media.width != width || media.height != height ) { LOGD("Video dimensions change! %dx%d -> %dx%d\n", media.width, media.height, width, height); media.width = width; media.height = height; } // draw frame to screen VideoFrame vf; vf.width = width; vf.height = height; vf.linesize_y = stride[0]; vf.linesize_uv = stride[1]; vf.pts = renderInterval * frame_count; vf.yuv_data[0] = pixels[0]; vf.yuv_data[1] = pixels[1]; vf.yuv_data[2] = pixels[2]; if (enable_output_yuv && out_file != NULL) { write_pic_yv12(decframe.frameinfo.nWidth, decframe.frameinfo.nHeight, (uint8_t**)decframe.pData, decframe.iStride, out_file); } drawFrame(&vf); if(use_ksy) QY265DecoderReturnDecodedFrame(ksydec_ctx, &decframe); frame_count++; } } exit: if ( NULL != au_buf ) free(au_buf); au_buf = NULL; au_buf_size = 0; if ( NULL != lent_ctx ) lenthevcdec_destroy(lent_ctx); lent_ctx = NULL; if ( NULL != ksydec_ctx ) QY265DecoderDestroy(ksydec_ctx); ksydec_ctx = NULL; gettimeofday(&tv_end, NULL); real_time = (tv_end.tv_sec + (tv_end.tv_usec / 1000000.0)) - (tv_start.tv_sec + (tv_start.tv_usec / 1000000.0)); real_fps = frame_count / real_time; postEventToJava(ctx, 909, int(frame_count), 0);// end of file detachJVM(); is_playing = 0; LOGI("decode thread exit\n"); exit_decode_thread = 0; if (out_file != NULL) { fclose(out_file); } return NULL; } static int MediaPlayer_setDataSource(JNIEnv *env, jobject thiz, jstring path) { const char *pathStr = env->GetStringUTFChars(path, NULL); memset(&media, 0, sizeof(media)); strcpy(media.data_src, pathStr); // Make sure that local ref is released before a potential exception env->ReleaseStringUTFChars(path, pathStr); // is raw HEVC bitstream file ? static const char * hevc_raw_bs_ext[] = {".hevc", ".hm91", ".hm10", ".bit", ".hvc", ".h265", ".265"}; char * ext = strrchr(media.data_src, '.'); if ( NULL != ext ) { int i; for ( i = 0; i < _countof(hevc_raw_bs_ext); i++ ) { if ( strcasecmp(hevc_raw_bs_ext[i], ext) == 0 ) break; } if ( i < _countof(hevc_raw_bs_ext) ) media.raw_bs = 1; } return 0; } static int rawbs_prepare(JNIEnv *env, jobject context, int threads) { FILE *in_file; int32_t got_frame, width, height, stride[3]; uint8_t* pixels[3]; int64_t pts, got_pts; uint8_t *sps; lenthevcdec_ctx one_thread_ctx; int compatibility, frame_count, sps_len, ret, i; in_file = NULL; au_buf = NULL; lent_ctx = NULL; one_thread_ctx = NULL; // get compatibility version compatibility = 0x7fffffff; if ( strncasecmp(".hm91", media.data_src + (strlen(media.data_src) - 5), 5) == 0 ) compatibility = 91; else if ( strncasecmp(".hm10", media.data_src + (strlen(media.data_src) - 5), 5) == 0 ) compatibility = 100; // read file in_file = fopen(media.data_src, "rb"); if ( NULL == in_file ) { LOGE("Can not open input file '%s'\n", media.data_src); goto error_exit; } fseek(in_file, 0, SEEK_END); au_buf_size = ftell(in_file); fseek(in_file, 0, SEEK_SET); LOGD("file size is %d bytes\n", au_buf_size); if ( au_buf_size > AU_BUF_SIZE_MAX ) au_buf_size = AU_BUF_SIZE_MAX; au_buf = (uint8_t*)malloc(au_buf_size); if ( NULL == au_buf ) { LOGE("call malloc failed! size is %d\n", au_buf_size); goto error_exit; } if ( fread(au_buf, 1, au_buf_size, in_file) != au_buf_size ) { LOGE("call fread failed!\n"); goto error_exit; } fclose(in_file); in_file = NULL; LOGD("%d bytes read to address %p\n", au_buf_size, au_buf); // find all AU au_count = 0; for ( i = 0; i < au_buf_size && au_count < (AU_COUNT_MAX - 1); i+=3 ) { i += lent_hevc_get_frame(au_buf + i, au_buf_size - i, NULL); if (i < au_buf_size) { au_pos[au_count++] = i; } LOGD("AU[%d] = %d\n", au_count - 1, au_pos[au_count - 1]); } au_pos[au_count] = au_buf_size; // include last AU LOGD("found %d AUs\n", au_count); if(use_ksy) { int hr = QY_OK; QY265DecConfig config = {0}; config.threads = threads; config.bEnableOutputRecToFile = 0; config.strRecYuvFileName = NULL; TCounterEnv* tCounterEnv = (TCounterEnv*) malloc(sizeof(TCounterEnv)); tCounterEnv->context = context; env->GetJavaVM(&tCounterEnv->jvm); config.pAuth = tCounterEnv; ksydec_ctx = QY265DecoderCreate(&config, &hr); if(ksydec_ctx == NULL) { LOGE("call QY265DecoderCreate fail.."); goto error_exit; } LOGD("call QY265DecoderCreate Succeed.."); } // open lentoid HEVC decoder LOGI("create lentoid decoder: compatibility = %d, threads = %d\n", compatibility, threads); lent_ctx = lenthevcdec_create(threads, compatibility, NULL); if ( NULL == lent_ctx ) { LOGE("call lenthevcdec_create failed!\n"); goto error_exit; } LOGD("get decoder %p\n", lent_ctx); // find sps, decode it and get video resolution sps_len = lent_hevc_get_sps(au_buf, au_buf_size, &sps); if ( sps_len > 0 ) { // get a one-thread decoder to decode SPS one_thread_ctx = lenthevcdec_create(1, compatibility, NULL); if ( NULL == lent_ctx ) goto error_exit; width = 0; height = 0; ret = lenthevcdec_decode_frame(one_thread_ctx, sps, sps_len, 0, &got_frame, &width, &height, stride, (void**)pixels, &pts); if ( 0 != width && 0 != height ) { media.width = width; media.height = height; LOGD("Video dimensions is %dx%d\n", width, height); } lenthevcdec_destroy(one_thread_ctx); one_thread_ctx = NULL; } return 0; error_exit: if ( NULL != in_file ) fclose(in_file); in_file = NULL; if ( NULL != au_buf ) free(au_buf); au_buf = NULL; au_buf_size = 0; if ( NULL != lent_ctx ) lenthevcdec_destroy(lent_ctx); lent_ctx = NULL; if ( NULL != one_thread_ctx ) lenthevcdec_destroy(one_thread_ctx); one_thread_ctx = NULL; if ( NULL != ksydec_ctx ) QY265DecoderDestroy(ksydec_ctx); ksydec_ctx = NULL; return -1; } static int MediaPlayer_prepare(JNIEnv *env, jobject thiz, jobject context, jint decoderType, jint render, jint threadNumber, jfloat fps) { LOGD("MediaPlayer_prepare: %d threads, fps %f\n", threadNumber, fps); renderFPS = fps; if (fps == 0) { renderInterval = 1; } else { renderInterval = 1.0 / fps * 1000000; // us } if (decoderType == 0) { use_ksy = 1; } else { use_ksy = 0; } disable_render = render; return rawbs_prepare(env, context, threadNumber); } static int MediaPlayer_start(JNIEnv *env, jobject thiz) { LOGI("start decoding thread"); pthread_create(&decode_thread, NULL, rawbs_runDecoder, NULL); return 0; } static int MediaPlayer_pause(JNIEnv *env, jobject thiz) { return 0; } static int MediaPlayer_go(JNIEnv *env, jobject thiz) { return 0; } static int MediaPlayer_stop(JNIEnv *env, jobject thiz) { void* result; exit_decode_thread = 1; pthread_join(decode_thread, &result); exit_decode_thread = 0; if (p_sws_ctx != NULL) { // sws_freeContext(p_sws_ctx); p_sws_ctx = NULL; } if ( NULL != gVF.yuv_data[0] ) free(gVF.yuv_data[0]); memset(&gVF, 0, sizeof(gVF)); LOGI("media player stopped\n"); return 0; } static bool MediaPlayer_isPlaying(JNIEnv *env, jobject thiz) { return is_playing; } static int MediaPlayer_seekTo(JNIEnv *env, jobject thiz, jint msec) { return 0; } static int MediaPlayer_getVideoWidth(JNIEnv *env, jobject thiz) { int w = media.width; return w; } static int MediaPlayer_getVideoHeight(JNIEnv *env, jobject thiz) { int h = media.height; return h; } static int MediaPlayer_getCurrentPosition(JNIEnv *env, jobject thiz) { int msec = 0; return msec; } static jfloat MediaPlayer_getDuration(JNIEnv *env, jobject thiz) { //TODO: 通过视频实际帧率计算 return frame_count / renderFPS; } static jfloat MediaPlayer_getDecodeTime(JNIEnv *env, jobject thiz) { return real_time; } static jfloat MediaPlayer_getDecodeFPS(JNIEnv *env, jobject thiz) { return real_fps; } // ---------------------------------------------------------------------------- static void MediaPlayer_native_init(JNIEnv *env, jobject thiz) { jclass clazz; clazz = env->FindClass("com/ksyun/media/ksy265codec/demo/decoder/hevdecoder/NativeMediaPlayer"); if (clazz == NULL) { jniThrowException(env, "java/lang/RuntimeException", "Can't find MediaPlayer"); return; } p_sws_ctx = NULL; frames_sum = 0; tstart = 0; frames = 0; tlast = 0; renderFPS = 0; renderInterval = 0; disable_render = 0; use_ksy = 0; if (thiz) { ctx = (Context *) calloc(1, sizeof(Context)); if (ctx == NULL) { return; } jclass clazz = env->GetObjectClass(thiz); ctx->obj = env->NewGlobalRef(thiz); ctx->postEvent = env->GetMethodID(clazz, "postEventFromNative", "(III)V"); if (ctx->postEvent == NULL) { jniThrowException(env, "java/lang/RuntimeException", "Can't find MediaPlayer.postEventFromNative"); return; } ctx->drawFrame = env->GetMethodID(clazz, "drawFrame", "(II)I"); if (ctx->drawFrame == NULL) { jniThrowException(env, "java/lang/RuntimeException", "Can't find MediaPlayer.drawFrame"); return; } } } static void MediaPlayer_renderBitmap(JNIEnv *env, jobject obj, jobject bitmap) { void* pixels; int ret; if ((ret = AndroidBitmap_lockPixels(env, bitmap, &pixels)) < 0) { LOGE("AndroidBitmap_lockPixels() failed ! error=%d", ret); } // Convert the image from its native format to RGB565 uint32_t start_time = getms(); LOGD("before scale: %d", getms()); #if USE_SWSCALE // use swscale, which may be optimized with SSE for x86 arch if (p_sws_ctx == NULL) { p_sws_ctx = sws_getContext( gVF.width, gVF.height, PIX_FMT_YUV420P, gVF.width, gVF.height, PIX_FMT_RGB565, SWS_BICUBIC|SWS_CPU_CAPS_MMX|SWS_CPU_CAPS_MMX2|SWS_CPU_CAPS_SSE2, NULL, NULL, NULL); } if (p_sws_ctx != NULL) { unsigned char *src[4]; int src_stride[4]; unsigned char *dst[4]; int dst_stride[4]; src_stride[0] = gVF.linesize_y; src_stride[1] = src_stride[2] = gVF.linesize_uv; dst[0] = (unsigned char*)pixels; dst_stride[0] = gVF.width * 2; sws_scale(p_sws_ctx, (const uint8_t * const *)gVF.yuv_data, src_stride, 0, gVF.height, dst, dst_stride); } #else ConvertYCbCrToRGB565( gVF.yuv_data[0], gVF.yuv_data[1], gVF.yuv_data[2], (uint8_t*)pixels, gVF.width, gVF.height, gVF.linesize_y, gVF.linesize_uv, gVF.width * 2, 420 ); #endif uint32_t end_time = getms(); LOGD("after scale: %d", getms()); LOGD("scale time: %dms", end_time - start_time); AndroidBitmap_unlockPixels(env, bitmap); } static void MediaPlayer_set_output(JNIEnv *env, jobject thiz, jstring path) { const char *pathStr = env->GetStringUTFChars(path, NULL); enable_output_yuv = true; strcpy(output_path, pathStr); // Make sure that local ref is released before a potential exception env->ReleaseStringUTFChars(path, pathStr); } static jstring MediaPlayer_getVersion(JNIEnv *env, jobject thiz) { if (use_ksy) { return env->NewStringUTF(strLibQy265Version); } else { char version[20]; sprintf(version, "%d", lenthevcdec_version()); return env->NewStringUTF(version); } } // ---------------------------------------------------------------------------- static JNINativeMethod gMethods[] = { { "setDataSource", "(Ljava/lang/String;)I", (void *) MediaPlayer_setDataSource }, { "native_prepare", "(Landroid/content/Context;IIIF)I", (void *) MediaPlayer_prepare }, { "native_start", "()I", (void *) MediaPlayer_start }, { "native_stop", "()I", (void *) MediaPlayer_stop }, { "getVideoWidth", "()I", (void *) MediaPlayer_getVideoWidth }, { "getVideoHeight", "()I", (void *) MediaPlayer_getVideoHeight }, { "native_seekTo", "(I)I", (void *) MediaPlayer_seekTo }, { "native_pause", "()I", (void *) MediaPlayer_pause }, { "native_go", "()I", (void *) MediaPlayer_go }, { "isPlaying", "()Z", (void *) MediaPlayer_isPlaying }, { "getCurrentPosition", "()I", (void *) MediaPlayer_getCurrentPosition }, { "getDuration", "()F", (void *) MediaPlayer_getDuration }, { "getDecodeTime", "()F", (void *) MediaPlayer_getDecodeTime }, { "getDecodeFPS", "()F", (void *) MediaPlayer_getDecodeFPS }, { "native_init", "()V", (void *) MediaPlayer_native_init }, { "renderBitmap", "(Landroid/graphics/Bitmap;)V", (void *) MediaPlayer_renderBitmap }, { "native_set_output", "(Ljava/lang/String;)V", (void *) MediaPlayer_set_output }, { "getVersion", "()Ljava/lang/String;", (jstring*) MediaPlayer_getVersion }, }; int register_player(JNIEnv *env) { return jniRegisterNativeMethods(env, kClassPathName, gMethods, sizeof(gMethods) / sizeof(gMethods[0])); } ================================================ FILE: Android_demo/KSY265CodecDemo/app/src/main/jni/decoder/jniplayer/jniplayer.h ================================================ #ifndef __JNIPLAYER_H__ #define __JNIPLAYER_H__ struct VideoFrame { int width; int height; int linesize_y; int linesize_uv; double pts; uint8_t *yuv_data[3]; }; uint32_t getms(); #endif /* __JNIPLAYER_H__ */ ================================================ FILE: Android_demo/KSY265CodecDemo/app/src/main/jni/decoder/jniplayer/jniplayer_new.cpp ================================================ // jniplayer.cpp : decode H.265/HEVC video data in separate native thread // // Copyright (c) 2013 Strongene Ltd. All Right Reserved. // http://www.strongene.com // // Contributors: // Shengbin Meng // James Deng // // You are free to re-use this as the basis for your own application // in source and binary forms, with or without modification, provided // that the following conditions are met: // // * Redistributions of source code must retain the above copyright // notice and this list of conditions. // * Redistributions in binary form must reproduce the above // copyright notice and this list of conditions in the documentation // and/or other materials provided with the distribution. #include #include #include #include #include #include #include "jniplayer.h" #include "jni_utils.h" #include "yuv2rgb565.h" #include "gl_renderer.h" #ifdef __cplusplus #define __STDC_CONSTANT_MACROS #define __STDC_LIMIT_MACROS #ifdef _STDINT_H #undef _STDINT_H #endif #include #define __STDC_FORMAT_MACROS #endif extern "C" { #include "lenthevcdec.h" #include "qy265dec.h" } #define LOG_TAG "jniplayer" #define ENABLE_LOGD 0 #if ENABLE_LOGD #define LOGD(...) __android_log_print(ANDROID_LOG_DEBUG,LOG_TAG,__VA_ARGS__) #else #define LOGD(...) #endif #define LOGI(...) __android_log_print(ANDROID_LOG_INFO,LOG_TAG,__VA_ARGS__) #define LOGE(...) __android_log_print(ANDROID_LOG_ERROR,LOG_TAG,__VA_ARGS__) #ifndef _countof #define _countof(a) (sizeof(a) / sizeof((a)[0])) #endif #define LOOP_PLAY 0 #if ARCH_ARM #define USE_SWSCALE 0 #else #define USE_SWSCALE 0 #endif struct fields_t { jmethodID drawFrame; jmethodID postEvent; }; struct MediaInfo { int width; int height; char data_src[1024]; int raw_bs; }; VideoFrame gVF = {0, 0, 0, 0, 0, {NULL, NULL, NULL}}; pthread_mutex_t gVFMutex = PTHREAD_MUTEX_INITIALIZER; static fields_t fields; static JNIEnv *gEnv = NULL; static JNIEnv *gEnvLocal = NULL; static jclass gClass = NULL; static MediaInfo media; static pthread_t decode_thread; static struct SwsContext *p_sws_ctx; static const char* const kClassPathName = "com/ksyun/media/ksy265codec/demo/decoder/hevdecoder/NativeMediaPlayer"; // for lenthevcdec static const uint32_t AU_COUNT_MAX = 1024 * 1024; static const uint32_t AU_BUF_SIZE_MAX = 1024 * 1024 * 50; static uint32_t au_pos[AU_COUNT_MAX]; // too big array, use static to save stack space static uint32_t au_count, au_buf_size; static uint8_t *au_buf = NULL; static lenthevcdec_ctx lent_ctx = NULL; static volatile int exit_decode_thread = 0; static volatile int is_playing = 0; static int frames_sum = 0; static double tstart = 0; static int frames = 0; static double tlast = 0; static float renderFPS = 0; static uint64_t renderInterval = 0; static struct timeval timeStart; static int use_ksy = 0; static void* ksydec_ctx = NULL; static QY265Frame decframe; static int disable_render = 0; static inline int next_p2(int a) { int rval=1; while(rvalCallStaticVoidMethod(gClass, fields.postEvent, msg, ext1, ext2, 0); } int drawFrame(VideoFrame * vf) { LOGD("enter drawFrame:%u (%f)", getms(), vf->pts); if(disable_render) return 0; // copy decode frame to global buffer pthread_mutex_lock(&gVFMutex); if ( gVF.linesize_y != vf->linesize_y || gVF.linesize_uv != vf->linesize_uv || gVF.height != vf->height ) { if ( NULL != gVF.yuv_data[0] ) free(gVF.yuv_data[0]); if ( NULL != gVF.yuv_data[1] ) free(gVF.yuv_data[1]); if ( NULL != gVF.yuv_data[2] ) free(gVF.yuv_data[2]); gVF.yuv_data[0] = gVF.yuv_data[1] = gVF.yuv_data[2] = NULL; gVF.yuv_data[0] = (uint8_t*)malloc(vf->linesize_y * vf->height + vf->linesize_uv * vf->height ); if ( NULL == gVF.yuv_data[0] ) { LOGE("malloc failed!\n"); return -1; } gVF.yuv_data[1] = gVF.yuv_data[0] + vf->linesize_y*vf->height; gVF.yuv_data[2] = gVF.yuv_data[1] + vf->linesize_uv*vf->height/2; } gVF.width = vf->width; gVF.height = vf->height; gVF.linesize_y = vf->linesize_y; gVF.linesize_uv = vf->linesize_uv; gVF.pts = vf->pts; if(use_ksy) { uint8_t *dst[3] = {gVF.yuv_data[0], gVF.yuv_data[1], gVF.yuv_data[2]}; uint8_t *src[3] = {decframe.pData[0], decframe.pData[1], decframe.pData[2]}; for (int j = 0; j < gVF.height/2; ++j) { memcpy(dst[0], src[0], gVF.linesize_y); dst[0] += gVF.linesize_y; src[0] += decframe.iStride[0]; memcpy(dst[0], src[0], gVF.linesize_y); dst[0] += gVF.linesize_y; src[0] += decframe.iStride[0]; memcpy(dst[1], src[1], gVF.linesize_uv); dst[1] += gVF.linesize_uv; src[1] += decframe.iStride[1]; memcpy(dst[2], src[2], gVF.linesize_uv); dst[2] += gVF.linesize_uv; src[2] += decframe.iStride[2]; } } else { memcpy(gVF.yuv_data[0], vf->yuv_data[0], vf->linesize_y*vf->height); memcpy(gVF.yuv_data[1], vf->yuv_data[1], vf->linesize_uv*vf->height/2); memcpy(gVF.yuv_data[2], vf->yuv_data[2], vf->linesize_uv*vf->height/2); } pthread_mutex_unlock(&gVFMutex); // wait for display struct timeval timeNow; gettimeofday(&timeNow, NULL); int64_t timePassed = ((int64_t)(timeNow.tv_sec - timeStart.tv_sec))*1000000 + (timeNow.tv_usec - timeStart.tv_usec); int64_t delay = vf->pts - timePassed; if (delay > 0) { usleep(delay); } // update information gettimeofday(&timeNow, NULL); double tnow = timeNow.tv_sec + (timeNow.tv_usec / 1000000.0); if (tlast == 0) tlast = tnow; if (tstart == 0) tstart = tnow; if (tnow > tlast + 1) { double avg_fps; LOGI("Video Display FPS:%i", (int)frames); frames_sum += frames; avg_fps = frames_sum / (tnow - tstart); LOGI("Video AVG FPS:%.2lf", avg_fps); postEvent(900, int(frames), int(avg_fps * 4096)); tlast = tlast + 1; frames = 0; } frames++; // request display if (gEnvLocal == NULL) gEnvLocal = getJNIEnv(); LOGD("before request draw:%u (%f)", getms(), vf->pts); return gEnvLocal->CallStaticIntMethod(gClass, fields.drawFrame, vf->width, vf->height); } int lent_hevc_get_sps(uint8_t* buf, int size, uint8_t** sps_ptr) { int i, nal_type, sps_pos; sps_pos = -1; for ( i = 0; i < (size - 4); i++ ) { if ( 0 == buf[i] && 0 == buf[i+1] && 1 == buf[i+2] ) { nal_type = (buf[i+3] & 0x7E) >> 1; if ( 33 != nal_type && sps_pos >= 0 ) { break; } if ( 33 == nal_type ) { // sps sps_pos = i; } i += 2; } } if ( sps_pos < 0 ) return 0; if ( i == (size - 4) ) i = size; *sps_ptr = buf + sps_pos; return i - sps_pos; } int lent_hevc_get_frame(uint8_t* buf, int size, int *is_idr) { static int seq_hdr = 0; int i, nal_type, idr = 0; for ( i = 0; i < (size - 6); i++ ) { if ( 0 == buf[i] && 0 == buf[i+1] && 1 == buf[i+2] ) { nal_type = (buf[i+3] & 0x7E) >> 1; if ( nal_type <= 21 ) { if ( buf[i+5] & 0x80 ) { /* first slice in pic */ if ( !seq_hdr ) break; else seq_hdr = 0; } } if ( nal_type >= 32 && nal_type <= 34 ) { if ( !seq_hdr ) { seq_hdr = 1; idr = 1; break; } seq_hdr = 1; } i += 2; } } if ( i == (size - 6) ) i = size; if ( NULL != is_idr ) *is_idr = idr; return i; } void* rawbs_runDecoder(void *p) { int32_t got_frame, width, height, stride[3]; uint8_t* pixels[3]; int64_t pts, got_pts; int frame_count, ret, i; if ( (NULL == lent_ctx && ksydec_ctx == NULL) || NULL == au_buf ) return NULL; decode: // decode all AUs frame_count = 0; for ( i = 0; i < au_count && !exit_decode_thread; i++ ) { pts = i * 40; got_frame = 0; uint32_t start_time = getms(); LOGD("before decode: %u", start_time); if(use_ksy) { QY265DecodeFrame(ksydec_ctx, au_buf + au_pos[i], au_pos[i + 1] - au_pos[i], &ret, 0); if ( ret < 0 ) { LOGE("call QY265DecodeFrame failed! ret = %d\n", ret); goto exit; } QY265DecoderGetDecodedFrame(ksydec_ctx, &decframe, &ret, 0); if ( ret == 0 && decframe.bValid ) { got_frame = 1; width = decframe.frameinfo.nWidth; height = decframe.frameinfo.nHeight; stride[0] = decframe.iStride[0]; stride[1] = decframe.iStride[1]; pixels[0] = decframe.pData[0]; pixels[1] = decframe.pData[1]; pixels[2] = decframe.pData[2]; } else got_frame = 0; } else { ret = lenthevcdec_decode_frame(lent_ctx, au_buf + au_pos[i], au_pos[i + 1] - au_pos[i], pts, &got_frame, &width, &height, stride, (void**)pixels, &got_pts); if ( ret < 0 ) { LOGE("call lenthevcdec_decode_frame failed! ret = %d\n", ret); goto exit; } } uint32_t end_time = getms(); LOGD("after decode: %u", end_time); uint32_t dec_time = end_time - start_time; if ( got_frame > 0 ) { LOGD("decoding time: %u - %u = %u\n", end_time, start_time, dec_time); LOGD("decode frame: pts = %" PRId64 ", linesize = {%d,%d,%d}\n", got_pts, stride[0], stride[1], stride[2]); if ( media.width != width || media.height != height ) { LOGD("Video dimensions change! %dx%d -> %dx%d\n", media.width, media.height, width, height); media.width = width; media.height = height; } // draw frame to screen VideoFrame vf; vf.width = width; vf.height = height; vf.linesize_y = stride[0]; vf.linesize_uv = stride[1]; vf.pts = renderInterval * frame_count; vf.yuv_data[0] = pixels[0]; vf.yuv_data[1] = pixels[1]; vf.yuv_data[2] = pixels[2]; if (frame_count == 0) { gettimeofday(&timeStart, NULL); } drawFrame(&vf); if(use_ksy) QY265DecoderReturnDecodedFrame(ksydec_ctx, &decframe); frame_count++; } } #if LOOP_PLAY if (!exit_decode_thread) { LOGI("automatically play again\n"); goto decode; } #endif // flush decoder while ( !exit_decode_thread ) { got_frame = 0; if(use_ksy) { QY265DecoderGetDecodedFrame(ksydec_ctx, &decframe, &ret, 0); if ( ret == 0 && decframe.bValid ) { got_frame = 1; width = decframe.frameinfo.nWidth; height = decframe.frameinfo.nHeight; stride[0] = decframe.iStride[0]; stride[1] = decframe.iStride[1]; pixels[0] = decframe.pData[0]; pixels[1] = decframe.pData[1]; pixels[2] = decframe.pData[2]; } else break; } else { ret = lenthevcdec_decode_frame(lent_ctx, NULL, 0, pts, &got_frame, &width, &height, stride, (void**)pixels, &got_pts); if ( ret < 0 || got_frame <= 0) break; } if ( got_frame > 0 ) { if ( media.width != width || media.height != height ) { LOGD("Video dimensions change! %dx%d -> %dx%d\n", media.width, media.height, width, height); media.width = width; media.height = height; } // draw frame to screen VideoFrame vf; vf.width = width; vf.height = height; vf.linesize_y = stride[0]; vf.linesize_uv = stride[1]; vf.pts = renderInterval * frame_count; vf.yuv_data[0] = pixels[0]; vf.yuv_data[1] = pixels[1]; vf.yuv_data[2] = pixels[2]; drawFrame(&vf); if(use_ksy) QY265DecoderReturnDecodedFrame(ksydec_ctx, &decframe); frame_count++; } } exit: if ( NULL != au_buf ) free(au_buf); au_buf = 0; if ( NULL != lent_ctx ) lenthevcdec_destroy(lent_ctx); lent_ctx = NULL; if ( ksydec_ctx != NULL ) QY265DecoderDestroy(ksydec_ctx); ksydec_ctx = NULL; postEvent(909, int(frame_count), 0); // end of file detachJVM(); is_playing = 0; LOGI("decode thread exit\n"); exit_decode_thread = 0; return NULL; } static int MediaPlayer_setDataSource(JNIEnv *env, jobject thiz, jstring path) { const char *pathStr = env->GetStringUTFChars(path, NULL); memset(&media, 0, sizeof(media)); strcpy(media.data_src, pathStr); // Make sure that local ref is released before a potential exception env->ReleaseStringUTFChars(path, pathStr); // is raw HEVC bitstream file ? static const char * hevc_raw_bs_ext[] = {".hevc", ".hm91", ".hm10", ".bit", ".hvc", ".h265", ".265"}; char * ext = strrchr(media.data_src, '.'); if ( NULL != ext ) { int i; for ( i = 0; i < _countof(hevc_raw_bs_ext); i++ ) { if ( strcasecmp(hevc_raw_bs_ext[i], ext) == 0 ) break; } if ( i < _countof(hevc_raw_bs_ext) ) media.raw_bs = 1; } return 0; } static int rawbs_prepare(int threads) { FILE *in_file; int32_t got_frame, width, height, stride[3]; uint8_t* pixels[3]; int64_t pts, got_pts; uint8_t *sps; lenthevcdec_ctx one_thread_ctx; int compatibility, frame_count, sps_len, ret, i; in_file = NULL; au_buf = NULL; lent_ctx = NULL; one_thread_ctx = NULL; ksydec_ctx = NULL; // get compatibility version compatibility = 0x7fffffff; if ( strncasecmp(".hm91", media.data_src + (strlen(media.data_src) - 5), 5) == 0 ) compatibility = 91; else if ( strncasecmp(".hm10", media.data_src + (strlen(media.data_src) - 5), 5) == 0 ) compatibility = 100; // read file in_file = fopen(media.data_src, "rb"); if ( NULL == in_file ) { LOGE("Can not open input file '%s'\n", media.data_src); goto error_exit; } fseek(in_file, 0, SEEK_END); au_buf_size = ftell(in_file); fseek(in_file, 0, SEEK_SET); LOGE("file size is %d bytes\n", au_buf_size); if ( au_buf_size > AU_BUF_SIZE_MAX ) au_buf_size = AU_BUF_SIZE_MAX; au_buf = (uint8_t*)malloc(au_buf_size); if ( NULL == au_buf ) { LOGE("call malloc failed! size is %d\n", au_buf_size); goto error_exit; } if ( fread(au_buf, 1, au_buf_size, in_file) != au_buf_size ) { LOGE("call fread failed!\n"); goto error_exit; } fclose(in_file); in_file = NULL; LOGE("%d bytes read to address %p\n", au_buf_size, au_buf); // find all AU au_count = 0; for ( i = 0; i < au_buf_size && au_count < (AU_COUNT_MAX - 1); i+=3 ) { i += lent_hevc_get_frame(au_buf + i, au_buf_size - i, NULL); if (i < au_buf_size) { au_pos[au_count++] = i; } LOGD("AU[%d] = %d\n", au_count - 1, au_pos[au_count - 1]); } au_pos[au_count] = au_buf_size; // include last AU LOGE("found %d AUs\n", au_count); // open lentoid HEVC decoder if(use_ksy) { int hr = QY_OK; QY265DecConfig config; config.threads = threads; config.bEnableOutputRecToFile = 0; config.strRecYuvFileName = NULL; ksydec_ctx = QY265DecoderCreate(&config, &hr); if(ksydec_ctx == NULL) { LOGE("call QY265DecoderCreate fail.."); goto error_exit; } LOGE("call QY265DecoderCreate Succeed.."); } LOGI("create lentoid decoder: compatibility = %d, threads = %d\n", compatibility, threads); lent_ctx = lenthevcdec_create(threads, compatibility, NULL); if ( NULL == lent_ctx ) { LOGE("call lenthevcdec_create failed!\n"); goto error_exit; } LOGD("get decoder %p\n", lent_ctx); // find sps, decode it and get video resolution sps_len = lent_hevc_get_sps(au_buf, au_buf_size, &sps); if ( sps_len > 0 ) { // get a one-thread decoder to decode SPS one_thread_ctx = lenthevcdec_create(1, compatibility, NULL); if ( NULL == lent_ctx ) { LOGE("call lenthevcdec_create fail.."); goto error_exit; } width = 0; height = 0; ret = lenthevcdec_decode_frame(one_thread_ctx, sps, sps_len, 0, &got_frame, &width, &height, stride, (void**)pixels, &pts); if ( 0 != width && 0 != height ) { media.width = width; media.height = height; LOGE("Video dimensions is %dx%d\n", width, height); } lenthevcdec_destroy(one_thread_ctx); one_thread_ctx = NULL; if(use_ksy) { gVF.linesize_y = next_p2(width); gVF.linesize_uv = next_p2(width/2); gVF.yuv_data[0] = (uint8_t*)malloc( gVF.linesize_y * height); gVF.yuv_data[1] = (uint8_t*)malloc( gVF.linesize_uv * height/2); gVF.yuv_data[2] = (uint8_t*)malloc( gVF.linesize_uv * height/2); LOGE("linesize:%d, %d", gVF.linesize_y, gVF.linesize_uv); } } return 0; error_exit: if ( NULL != in_file ) fclose(in_file); in_file = NULL; if ( NULL != au_buf ) free(au_buf); au_buf = NULL; if ( NULL != lent_ctx ) lenthevcdec_destroy(lent_ctx); lent_ctx = NULL; if ( NULL != one_thread_ctx ) lenthevcdec_destroy(one_thread_ctx); one_thread_ctx = NULL; if ( NULL != ksydec_ctx) QY265DecoderDestroy(ksydec_ctx); ksydec_ctx = NULL; return -1; } static int MediaPlayer_prepare(JNIEnv *env, jobject thiz, jint decoderType, jint render, jint threadNumber, jfloat fps) { LOGE("MediaPlayer_prepare: decoderType:%d, %d threads, fps %f\n", decoderType, threadNumber, fps); renderFPS = fps; if (fps == 0) renderInterval = 1; else { renderInterval = 1.0 / fps * 1000000; // us } if (decoderType == 0) use_ksy = 1; disable_render = render; return rawbs_prepare(threadNumber); } static int MediaPlayer_start(JNIEnv *env, jobject thiz) { LOGI("start decoding thread"); pthread_create(&decode_thread, NULL, rawbs_runDecoder, NULL); return 0; } static int MediaPlayer_pause(JNIEnv *env, jobject thiz) { return 0; } static int MediaPlayer_go(JNIEnv *env, jobject thiz) { return 0; } static int MediaPlayer_stop(JNIEnv *env, jobject thiz) { void* result; exit_decode_thread = 1; pthread_join(decode_thread, &result); exit_decode_thread = 0; if (p_sws_ctx != NULL) { // sws_freeContext(p_sws_ctx); p_sws_ctx = NULL; } if ( NULL != gVF.yuv_data[0] ) free(gVF.yuv_data[0]); memset(&gVF, 0, sizeof(gVF)); LOGI("media player stopped\n"); return 0; } static bool MediaPlayer_isPlaying(JNIEnv *env, jobject thiz) { return is_playing; } static int MediaPlayer_seekTo(JNIEnv *env, jobject thiz, jint msec) { return 0; } static int MediaPlayer_getVideoWidth(JNIEnv *env, jobject thiz) { int w = media.width; return w; } static int MediaPlayer_getVideoHeight(JNIEnv *env, jobject thiz) { int h = media.height; return h; } static int MediaPlayer_getCurrentPosition(JNIEnv *env, jobject thiz) { int msec = 0; return msec; } static int MediaPlayer_getDuration(JNIEnv *env, jobject thiz) { int msec = 0; return msec; } // ---------------------------------------------------------------------------- static void MediaPlayer_native_init(JNIEnv *env) { jclass clazz; clazz = env->FindClass("com/ksyun/media/ksy265codec/demo/decoder/hevdecoder/NativeMediaPlayer"); if (clazz == NULL) { jniThrowException(env, "java/lang/RuntimeException", "Can't find MediaPlayer"); return; } fields.postEvent = env->GetStaticMethodID(clazz, "postEventFromNative", "(III)V"); if (fields.postEvent == NULL) { jniThrowException(env, "java/lang/RuntimeException", "Can't find MediaPlayer.postEventFromNative"); return; } fields.drawFrame = env->GetStaticMethodID(clazz, "drawFrame","(II)I"); if (fields.drawFrame == NULL) { jniThrowException(env, "java/lang/RuntimeException", "Can't find MediaPlayer.drawFrame"); return; } gClass = NULL; gEnv = NULL; gEnvLocal = NULL; p_sws_ctx = NULL; frames_sum = 0; tstart = 0; frames = 0; tlast = 0; renderFPS = 0; renderInterval = 0; disable_render = 0; } static void MediaPlayer_native_setup(JNIEnv *env, jobject thiz, jobject weak_this) { // Hold onto the MediaPlayer class for use in calling the static method // that posts events to the application thread. jclass clazz = env->GetObjectClass(thiz); if (clazz == NULL) { jniThrowException(env, "java/lang/Exception", kClassPathName); return; } gClass = (jclass)env->NewGlobalRef(clazz); gEnv = env; } static void MediaPlayer_renderBitmap(JNIEnv *env, jobject obj, jobject bitmap) { void* pixels; int ret; if ((ret = AndroidBitmap_lockPixels(env, bitmap, &pixels)) < 0) { LOGE("AndroidBitmap_lockPixels() failed ! error=%d", ret); } // Convert the image from its native format to RGB565 uint32_t start_time = getms(); LOGD("before scale: %d", getms()); #if USE_SWSCALE // use swscale, which may be optimized with SSE for x86 arch if (p_sws_ctx == NULL) { p_sws_ctx = sws_getContext( gVF.width, gVF.height, PIX_FMT_YUV420P, gVF.width, gVF.height, PIX_FMT_RGB565, SWS_BICUBIC|SWS_CPU_CAPS_MMX|SWS_CPU_CAPS_MMX2|SWS_CPU_CAPS_SSE2, NULL, NULL, NULL); } if (p_sws_ctx != NULL) { unsigned char *src[4]; int src_stride[4]; unsigned char *dst[4]; int dst_stride[4]; src_stride[0] = gVF.linesize_y; src_stride[1] = src_stride[2] = gVF.linesize_uv; dst[0] = (unsigned char*)pixels; dst_stride[0] = gVF.width * 2; sws_scale(p_sws_ctx, (const uint8_t * const *)gVF.yuv_data, src_stride, 0, gVF.height, dst, dst_stride); } #else ConvertYCbCrToRGB565( gVF.yuv_data[0], gVF.yuv_data[1], gVF.yuv_data[2], (uint8_t*)pixels, gVF.width, gVF.height, gVF.linesize_y, gVF.linesize_uv, gVF.width * 2, 420 ); #endif uint32_t end_time = getms(); LOGD("after scale: %d", getms()); LOGD("scale time: %dms", end_time - start_time); AndroidBitmap_unlockPixels(env, bitmap); } // ---------------------------------------------------------------------------- static JNINativeMethod gMethods[] = { { "setDataSource", "(Ljava/lang/String;)I", (void *) MediaPlayer_setDataSource }, { "native_prepare", "(IIIF)I", (void *) MediaPlayer_prepare }, { "native_start", "()I", (void *) MediaPlayer_start }, { "native_stop", "()I", (void *) MediaPlayer_stop }, { "getVideoWidth", "()I", (void *) MediaPlayer_getVideoWidth }, { "getVideoHeight", "()I", (void *) MediaPlayer_getVideoHeight }, { "native_seekTo", "(I)I", (void *) MediaPlayer_seekTo }, { "native_pause", "()I", (void *) MediaPlayer_pause }, { "native_go", "()I", (void *) MediaPlayer_go }, { "isPlaying", "()Z", (void *) MediaPlayer_isPlaying }, { "getCurrentPosition", "()I", (void *) MediaPlayer_getCurrentPosition }, { "getDuration", "()I", (void *) MediaPlayer_getDuration }, { "native_init", "()V", (void *) MediaPlayer_native_init }, { "native_setup", "(Ljava/lang/Object;)V", (void *) MediaPlayer_native_setup }, { "renderBitmap", "(Landroid/graphics/Bitmap;)V", (void *) MediaPlayer_renderBitmap }, }; int register_player(JNIEnv *env) { return jniRegisterNativeMethods(env, kClassPathName, gMethods, sizeof(gMethods) / sizeof(gMethods[0])); } ================================================ FILE: Android_demo/KSY265CodecDemo/app/src/main/jni/decoder/jniplayer/yuv2rgb565.cpp ================================================ // Copyright (c) 2010 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. // contributor Siarhei Siamashka // This file is modified based on: // http://dxr.mozilla.org/mozilla-central/source/gfx/ycbcr/yuv_convert_arm.cpp #include "yuv2rgb565.h" #if ARCH_ARM && HAVE_NEON /*************************************** * convert in neon: */ void __attribute((noinline,optimize("-fomit-frame-pointer"))) yuv42x_to_rgb565_row_neon(uint16_t *dst, const uint8_t *y, const uint8_t *u, const uint8_t *v, int n, int oddflag) { static __attribute__((aligned(16))) uint16_t acc_r[8] = { 22840, 22840, 22840, 22840, 22840, 22840, 22840, 22840, }; static __attribute__((aligned(16))) uint16_t acc_g[8] = { 17312, 17312, 17312, 17312, 17312, 17312, 17312, 17312, }; static __attribute__((aligned(16))) uint16_t acc_b[8] = { 28832, 28832, 28832, 28832, 28832, 28832, 28832, 28832, }; /* * Registers: * q0, q1 : d0, d1, d2, d3 - are used for initial loading of YUV data * q2 : d4, d5 - are used for storing converted RGB data * q3 : d6, d7 - are used for temporary storage * * q4-q7 - reserved * * q8, q9 : d16, d17, d18, d19 - are used for expanded Y data * q10 : d20, d21 * q11 : d22, d23 * q12 : d24, d25 * q13 : d26, d27 * q13, q14, q15 - various constants (#16, #149, #204, #50, #104, #154) */ asm volatile ( ".fpu neon\n" /* Allow to build on targets not supporting neon, and force the object file * target to avoid bumping the final binary target */ ".arch armv7-a\n" ".object_arch armv4t\n" ".macro convert_macroblock size\n" /* load up to 16 source pixels */ ".if \\size == 16\n" "pld [%[y], #64]\n" "pld [%[u], #64]\n" "pld [%[v], #64]\n" "vld1.8 {d1}, [%[y]]!\n" "vld1.8 {d3}, [%[y]]!\n" "vld1.8 {d0}, [%[u]]!\n" "vld1.8 {d2}, [%[v]]!\n" ".elseif \\size == 8\n" "vld1.8 {d1}, [%[y]]!\n" "vld1.8 {d0[0]}, [%[u]]!\n" "vld1.8 {d0[1]}, [%[u]]!\n" "vld1.8 {d0[2]}, [%[u]]!\n" "vld1.8 {d0[3]}, [%[u]]!\n" "vld1.8 {d2[0]}, [%[v]]!\n" "vld1.8 {d2[1]}, [%[v]]!\n" "vld1.8 {d2[2]}, [%[v]]!\n" "vld1.8 {d2[3]}, [%[v]]!\n" ".elseif \\size == 4\n" "vld1.8 {d1[0]}, [%[y]]!\n" "vld1.8 {d1[1]}, [%[y]]!\n" "vld1.8 {d1[2]}, [%[y]]!\n" "vld1.8 {d1[3]}, [%[y]]!\n" "vld1.8 {d0[0]}, [%[u]]!\n" "vld1.8 {d0[1]}, [%[u]]!\n" "vld1.8 {d2[0]}, [%[v]]!\n" "vld1.8 {d2[1]}, [%[v]]!\n" ".elseif \\size == 2\n" "vld1.8 {d1[0]}, [%[y]]!\n" "vld1.8 {d1[1]}, [%[y]]!\n" "vld1.8 {d0[0]}, [%[u]]!\n" "vld1.8 {d2[0]}, [%[v]]!\n" ".elseif \\size == 1\n" "vld1.8 {d1[0]}, [%[y]]!\n" "vld1.8 {d0[0]}, [%[u]]!\n" "vld1.8 {d2[0]}, [%[v]]!\n" ".else\n" ".error \"unsupported macroblock size\"\n" ".endif\n" /* d1 - Y data (first 8 bytes) */ /* d3 - Y data (next 8 bytes) */ /* d0 - U data, d2 - V data */ /* split even and odd Y color components */ "vuzp.8 d1, d3\n" /* d1 - evenY, d3 - oddY */ /* clip upper and lower boundaries */ "vqadd.u8 q0, q0, q4\n" "vqadd.u8 q1, q1, q4\n" "vqsub.u8 q0, q0, q5\n" "vqsub.u8 q1, q1, q5\n" "vshr.u8 d4, d2, #1\n" /* d4 = V >> 1 */ "vmull.u8 q8, d1, d27\n" /* q8 = evenY * 149 */ "vmull.u8 q9, d3, d27\n" /* q9 = oddY * 149 */ "vld1.16 {d20, d21}, [%[acc_r], :128]\n" /* q10 - initialize accumulator for red */ "vsubw.u8 q10, q10, d4\n" /* red acc -= (V >> 1) */ "vmlsl.u8 q10, d2, d28\n" /* red acc -= V * 204 */ "vld1.16 {d22, d23}, [%[acc_g], :128]\n" /* q11 - initialize accumulator for green */ "vmlsl.u8 q11, d2, d30\n" /* green acc -= V * 104 */ "vmlsl.u8 q11, d0, d29\n" /* green acc -= U * 50 */ "vld1.16 {d24, d25}, [%[acc_b], :128]\n" /* q12 - initialize accumulator for blue */ "vmlsl.u8 q12, d0, d30\n" /* blue acc -= U * 104 */ "vmlsl.u8 q12, d0, d31\n" /* blue acc -= U * 154 */ "vhsub.s16 q3, q8, q10\n" /* calculate even red components */ "vhsub.s16 q10, q9, q10\n" /* calculate odd red components */ "vqshrun.s16 d0, q3, #6\n" /* right shift, narrow and saturate even red components */ "vqshrun.s16 d3, q10, #6\n" /* right shift, narrow and saturate odd red components */ "vhadd.s16 q3, q8, q11\n" /* calculate even green components */ "vhadd.s16 q11, q9, q11\n" /* calculate odd green components */ "vqshrun.s16 d1, q3, #6\n" /* right shift, narrow and saturate even green components */ "vqshrun.s16 d4, q11, #6\n" /* right shift, narrow and saturate odd green components */ "vhsub.s16 q3, q8, q12\n" /* calculate even blue components */ "vhsub.s16 q12, q9, q12\n" /* calculate odd blue components */ "vqshrun.s16 d2, q3, #6\n" /* right shift, narrow and saturate even blue components */ "vqshrun.s16 d5, q12, #6\n" /* right shift, narrow and saturate odd blue components */ "vzip.8 d0, d3\n" /* join even and odd red components */ "vzip.8 d1, d4\n" /* join even and odd green components */ "vzip.8 d2, d5\n" /* join even and odd blue components */ "vshll.u8 q3, d0, #8\n\t" "vshll.u8 q8, d1, #8\n\t" "vshll.u8 q9, d2, #8\n\t" "vsri.u16 q3, q8, #5\t\n" "vsri.u16 q3, q9, #11\t\n" /* store pixel data to memory */ ".if \\size == 16\n" " vst1.16 {d6, d7}, [%[dst]]!\n" " vshll.u8 q3, d3, #8\n\t" " vshll.u8 q8, d4, #8\n\t" " vshll.u8 q9, d5, #8\n\t" " vsri.u16 q3, q8, #5\t\n" " vsri.u16 q3, q9, #11\t\n" " vst1.16 {d6, d7}, [%[dst]]!\n" ".elseif \\size == 8\n" " vst1.16 {d6, d7}, [%[dst]]!\n" ".elseif \\size == 4\n" " vst1.16 {d6}, [%[dst]]!\n" ".elseif \\size == 2\n" " vst1.16 {d6[0]}, [%[dst]]!\n" " vst1.16 {d6[1]}, [%[dst]]!\n" ".elseif \\size == 1\n" " vst1.16 {d6[0]}, [%[dst]]!\n" ".endif\n" ".endm\n" "vmov.u8 d8, #15\n" /* add this to U/V to saturate upper boundary */ "vmov.u8 d9, #20\n" /* add this to Y to saturate upper boundary */ "vmov.u8 d10, #31\n" /* sub this from U/V to saturate lower boundary */ "vmov.u8 d11, #36\n" /* sub this from Y to saturate lower boundary */ "vmov.u8 d26, #16\n" "vmov.u8 d27, #149\n" "vmov.u8 d28, #204\n" "vmov.u8 d29, #50\n" "vmov.u8 d30, #104\n" "vmov.u8 d31, #154\n" "cmp %[oddflag], #0\n" "beq 1f\n" "convert_macroblock 1\n" "sub %[n], %[n], #1\n" "1:\n" "subs %[n], %[n], #16\n" "blt 2f\n" "1:\n" "convert_macroblock 16\n" "subs %[n], %[n], #16\n" "bge 1b\n" "2:\n" "tst %[n], #8\n" "beq 3f\n" "convert_macroblock 8\n" "3:\n" "tst %[n], #4\n" "beq 4f\n" "convert_macroblock 4\n" "4:\n" "tst %[n], #2\n" "beq 5f\n" "convert_macroblock 2\n" "5:\n" "tst %[n], #1\n" "beq 6f\n" "convert_macroblock 1\n" "6:\n" ".purgem convert_macroblock\n" : [y] "+&r" (y), [u] "+&r" (u), [v] "+&r" (v), [dst] "+&r" (dst), [n] "+&r" (n) : [acc_r] "r" (&acc_r[0]), [acc_g] "r" (&acc_g[0]), [acc_b] "r" (&acc_b[0]), [oddflag] "r" (oddflag) : "cc", "memory", "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7", "d8", "d9", "d10", "d11", /* "d12", "d13", "d14", "d15", */ "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23", "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31" ); } void ConvertYCbCrToRGB565_neon( const uint8_t* y_buf, const uint8_t* u_buf, const uint8_t* v_buf, uint8_t* rgb_buf, int pic_width, int pic_height, int y_stride, int uv_stride, int rgb_stride, int yuv_type) { int x_shift; int y_shift; x_shift = (yuv_type != 444); //YUV 4:4:4 y_shift = (yuv_type == 420); //YUV 4:2:0 /* From Wiki: The Y'V12 format is essentially the same as Y'UV420p, but it has the U and V data reversed: the Y' values are followed by the V values, with the U values last. */ for (int i = 0; i < pic_height; i++) { int yoffs; int uvoffs; yoffs = y_stride * i; uvoffs = uv_stride * (i>>y_shift); yuv42x_to_rgb565_row_neon((uint16_t*)(rgb_buf + rgb_stride * i), y_buf + yoffs, u_buf + uvoffs, v_buf + uvoffs, pic_width, 0); } } #endif //ARCH_ARM && HAVE_NEON /************************************* * convert in c: */ /* * Use NS_CLAMP to force a value (such as a preference) into a range. */ #define NS_CLAMP(x, low, high) (((x) > (high)) ? (high) : (((x) < (low)) ? (low) : (x))) /*Convert a single pixel from Y'CbCr to RGB565. This uses the exact same formulas as the asm, even though we could make the constants a lot more accurate with 32-bit wide registers.*/ static uint16_t yu2rgb565(int y, int u, int v, int dither) { /*This combines the constant offset that needs to be added during the Y'CbCr conversion with a rounding offset that depends on the dither parameter.*/ static const int DITHER_BIAS[4][3] = { {-14240, 8704, -17696}, {-14240+128,8704+64, -17696+128}, {-14240+256,8704+128,-17696+256}, {-14240+384,8704+192,-17696+384} }; int r; int g; int b; r = NS_CLAMP((74*y+102*v+DITHER_BIAS[dither][0])>>9, 0, 31); g = NS_CLAMP((74*y-25*u-52*v+DITHER_BIAS[dither][1])>>8, 0, 63); b = NS_CLAMP((74*y+129*u+DITHER_BIAS[dither][2])>>9, 0, 31); return (uint16_t)(r<<11 | g<<5 | b); } void yuv_to_rgb565_row_c(uint16_t *dst, const uint8_t *y, const uint8_t *u, const uint8_t *v, int x_shift, int pic_width) { int x; for (x = 0; x < pic_width; x++) { dst[x] = yu2rgb565(y[x], u[x>>x_shift], v[x>>x_shift], 2); // Disable dithering for now. } } void ConvertYCbCrToRGB565_c( const uint8_t* y_buf, const uint8_t* u_buf, const uint8_t* v_buf, uint8_t* rgb_buf, int pic_width, int pic_height, int y_stride, int uv_stride, int rgb_stride, int yuv_type) { int x_shift; int y_shift; x_shift = (yuv_type != 444); //YUV 4:4:4 y_shift = (yuv_type == 420); //YUV 4:2:0 /* From Wiki: The Y'V12 format is essentially the same as Y'UV420p, but it has the U and V data reversed: the Y' values are followed by the V values, with the U values last. */ for (int i = 0; i < pic_height; i++) { int yoffs; int uvoffs; yoffs = y_stride * i; uvoffs = uv_stride * (i>>y_shift); yuv_to_rgb565_row_c((uint16_t*)(rgb_buf + rgb_stride * i), y_buf + yoffs, u_buf + uvoffs, v_buf + uvoffs, x_shift, pic_width); } } void ConvertYCbCrToRGB565( const uint8_t* y_buf, const uint8_t* u_buf, const uint8_t* v_buf, uint8_t* rgb_buf, int pic_width, int pic_height, int y_stride, int uv_stride, int rgb_stride, int yuv_type) { #if HAVE_NEON ConvertYCbCrToRGB565_neon(y_buf, u_buf, v_buf, rgb_buf, pic_width, pic_height, y_stride, uv_stride, rgb_stride, yuv_type); #else ConvertYCbCrToRGB565_c(y_buf, u_buf, v_buf, rgb_buf, pic_width, pic_height, y_stride, uv_stride, rgb_stride, yuv_type); #endif } ================================================ FILE: Android_demo/KSY265CodecDemo/app/src/main/jni/decoder/jniplayer/yuv2rgb565.h ================================================ #include void ConvertYCbCrToRGB565_neon( const uint8_t* y_buf, const uint8_t* u_buf, const uint8_t* v_buf, uint8_t* rgb_buf, int pic_width, int pic_height, int y_stride, int uv_stride, int rgb_stride, int yuv_type); void ConvertYCbCrToRGB565_c( const uint8_t* y_buf, const uint8_t* u_buf, const uint8_t* v_buf, uint8_t* rgb_buf, int pic_width, int pic_height, int y_stride, int uv_stride, int rgb_stride, int yuv_type); void ConvertYCbCrToRGB565( const uint8_t* y_buf, const uint8_t* u_buf, const uint8_t* v_buf, uint8_t* rgb_buf, int pic_width, int pic_height, int y_stride, int uv_stride, int rgb_stride, int yuv_type); ================================================ FILE: Android_demo/KSY265CodecDemo/app/src/main/jni/encoder/Android.mk ================================================ LOCAL_PATH := $(call my-dir) PREBUILT_PATH := $(LOCAL_PATH)/../../../../../../prebuilt include $(CLEAR_VARS) LOCAL_MODULE := x264 LOCAL_SRC_FILES := $(PREBUILT_PATH)/$(TARGET_ARCH_ABI)/libx264.a include $(PREBUILT_STATIC_LIBRARY) ifneq ($(TARGET_ARCH_ABI),x86) ifneq ($(TARGET_ARCH_ABI),x86_64) include $(CLEAR_VARS) LOCAL_MODULE := qy265 LOCAL_SRC_FILES := $(PREBUILT_PATH)/$(TARGET_ARCH_ABI)/libqyencoder.a include $(PREBUILT_STATIC_LIBRARY) endif endif include $(CLEAR_VARS) LOCAL_LDLIBS += -llog LOCAL_MODULE := native-lib LOCAL_CONLYFLAGS += -std=c99 LOCAL_C_INCLUDES += $(PREBUILT_PATH)/include LOCAL_SRC_FILES += encoderwrapper.c LOCAL_STATIC_LIBRARIES += x264 qy265 gnustl_static cpufeatures LOCAL_DISABLE_FATAL_LINKER_WARNINGS := true include $(BUILD_SHARED_LIBRARY) $(call import-module,android/cpufeatures) ================================================ FILE: Android_demo/KSY265CodecDemo/app/src/main/jni/encoder/encoderwrapper.c ================================================ #include #include #include #include #include #include #include "x264.h" #include "qy265enc.h" #include "qy265def.h" #include "qyauth_env.h" #include "encoderwrapper.h" #include "log.h" #define LOG_TAG "encoder" typedef struct Encoder { FILE* in_file; float real_fps; float real_time; double avg_psnr; int frame_num; } Encoder; static inline Encoder* getInstance(jlong ptr) { return (Encoder*)(intptr_t) ptr; } jlong Java_com_ksyun_media_ksy265codec_demo_encoder_EncoderWrapper_native_1init (JNIEnv *env, jobject instance) { Encoder* thiz = (Encoder*)calloc(1, sizeof(Encoder)); thiz->real_fps = 0; thiz->frame_num = 0; thiz->avg_psnr = 0; thiz->real_time =0; return (jlong)(intptr_t)thiz; } jint Java_com_ksyun_media_ksy265codec_demo_encoder_EncoderWrapper_native_1open (JNIEnv *env, jobject instance, jlong ptr, jstring path_) { const char *path = (*env)->GetStringUTFChars(env, path_, 0); Encoder* thiz = getInstance(ptr); thiz->in_file = fopen(path, "r"); if (NULL == thiz->in_file) { LOGD("open file failed with %d", errno); (*env)->ReleaseStringUTFChars(env, path_, path); return -1; } thiz->real_fps = 0; thiz->frame_num = 0; thiz->avg_psnr = 0; thiz->real_time =0; (*env)->ReleaseStringUTFChars(env, path_, path); return 0; } jint Java_com_ksyun_media_ksy265codec_demo_encoder_EncoderWrapper_native_1x264_1encode (JNIEnv *env, jobject instance, jlong ptr, jstring path_, jstring profile_, jstring delay_, jint width, jint height, jobject fps, jint bitrate, jint threads) { const char *path = (*env)->GetStringUTFChars(env, path_, 0); const char *profile = (*env)->GetStringUTFChars(env, profile_, 0); const char *delay = (*env)->GetStringUTFChars(env, delay_, 0); Encoder* thiz = getInstance(ptr); x264_param_t param; x264_picture_t pic; x264_picture_t pic_out; x264_t *h; int i_frame = 0; int i_frame_size; x264_nal_t *nal; int i_nal; clock_t clock_start, clock_end, clock_used; struct timeval tv_start, tv_end; double real_time; int64_t ms_used; FILE *out_file; double sum_psnr_y = 0.0; double sum_psnr_u = 0.0; double sum_psnr_v = 0.0; if ( NULL != path ) { out_file = fopen(path, "wb"); if ( NULL == out_file ) { LOGE("open output file failed with %d", errno); fclose(thiz->in_file); (*env)->ReleaseStringUTFChars(env, path_, path); (*env)->ReleaseStringUTFChars(env, profile_, profile); (*env)->ReleaseStringUTFChars(env, delay_, delay); return -1; } } LOGD("profile %s", profile); /* Get default params for preset/tuning */ if (strlen(delay) == 11 && strncmp(delay, "zerolatency", 11)) { if( x264_param_default_preset( ¶m, profile, "zerolatency" ) < 0 ) goto fail; } else { if( x264_param_default_preset( ¶m, profile, NULL ) < 0 ) goto fail; } /* Configure non-default params */ param.i_csp = X264_CSP_I420; param.i_width = width; param.i_height = height; param.b_vfr_input = 0; param.b_repeat_headers = 1; param.b_annexb = 1; if (strlen(delay) == 11 && strncmp(delay, "zerolatency", 11)) { param.i_bframe = 0; } else if (strlen(delay) == 13 && strncmp(delay, "livestreaming", 13)) { param.i_bframe = 3; } else if (strlen(delay) == 7 && strncmp(delay, "offline", 7)) { param.i_bframe = 7; } param.i_threads = threads; jclass floatClass = (*env)->FindClass(env, "java/lang/Float"); jmethodID floatMethod = (*env)->GetMethodID(env, floatClass, "floatValue", "()F"); jfloat val = (*env)->CallFloatMethod(env, fps, floatMethod); LOGD("x264 fps %.6f", val); param.i_fps_num = val; param.i_fps_den = 1; param.rc.i_bitrate = bitrate; param.rc.i_rc_method = X264_RC_ABR; param.analyse.b_psnr = 1; /* Apply profile restrictions. */ if( x264_param_apply_profile( ¶m, "high" ) < 0 ) goto fail; if( x264_picture_alloc( &pic, param.i_csp, param.i_width, param.i_height ) < 0 ) goto fail; h = x264_encoder_open( ¶m ); if( !h ) goto fail; int luma_size = param.i_width * param.i_height; int chroma_size = luma_size / 4; gettimeofday(&tv_start, NULL); clock_start = clock(); /* Encode frames */ for( ;; i_frame++ ) { /* Read input frame */ if( fread( pic.img.plane[0], 1, luma_size, thiz->in_file ) != luma_size ) break; if( fread( pic.img.plane[1], 1, chroma_size, thiz->in_file ) != chroma_size ) break; if( fread( pic.img.plane[2], 1, chroma_size, thiz->in_file ) != chroma_size ) break; pic.i_pts = i_frame; i_frame_size = x264_encoder_encode( h, &nal, &i_nal, &pic, &pic_out ); if( i_frame_size < 0 ) goto fail; else if( i_frame_size ) { if (param.analyse.b_psnr){ sum_psnr_y += pic_out.prop.f_psnr[0]; sum_psnr_u += pic_out.prop.f_psnr[1]; sum_psnr_v += pic_out.prop.f_psnr[2]; } if( !fwrite( nal->p_payload, i_frame_size, 1, out_file ) ) goto fail; } } /* Flush delayed frames */ while( x264_encoder_delayed_frames( h ) ) { i_frame_size = x264_encoder_encode( h, &nal, &i_nal, NULL, &pic_out ); if( i_frame_size < 0 ) goto fail; else if( i_frame_size ) { if (param.analyse.b_psnr){ sum_psnr_y += pic_out.prop.f_psnr[0]; sum_psnr_u += pic_out.prop.f_psnr[1]; sum_psnr_v += pic_out.prop.f_psnr[2]; } if( !fwrite( nal->p_payload, i_frame_size, 1, out_file ) ) goto fail; } } clock_end = clock(); gettimeofday(&tv_end, NULL); clock_used = clock_end - clock_start; ms_used = (int64_t)(clock_used * 1000.0 / CLOCKS_PER_SEC); real_time = (tv_end.tv_sec + (tv_end.tv_usec / 1000000.0)) - (tv_start.tv_sec + (tv_start.tv_usec / 1000000.0)); float realFPS = i_frame / real_time; double avg_psnr = (6*sum_psnr_y+sum_psnr_u+sum_psnr_v)/(8*i_frame); thiz->frame_num = i_frame; thiz->real_fps = realFPS; thiz->real_time = real_time; thiz->avg_psnr = avg_psnr; x264_encoder_close( h ); x264_picture_clean( &pic ); fclose(thiz->in_file); fclose(out_file); (*env)->ReleaseStringUTFChars(env, path_, path); (*env)->ReleaseStringUTFChars(env, profile_, profile); (*env)->ReleaseStringUTFChars(env, delay_, delay); return 0; fail: fclose(thiz->in_file); fclose(out_file); (*env)->ReleaseStringUTFChars(env, path_, path); (*env)->ReleaseStringUTFChars(env, profile_, profile); (*env)->ReleaseStringUTFChars(env, delay_, delay); return -1; } static double ksy265_psnr = 0; void ksy265log(const char* msg) { LOGD("ksy265 log: %s", msg); //psnr值出现在编码器的log中,形如"bitrate, psnr: 503.1069 40.4723 47.0057 45.9163" char* psnr = strstr(msg, "psnr"); if (psnr != NULL) { psnr += 4; char *p; const char* d = " :\t"; p = strtok(psnr, d); double y =0, u = 0, v = 0; //skip bitrate p = strtok(NULL, d); if (p != NULL) y = strtod(p, NULL); p = strtok(NULL, d); if (p != NULL) u = strtod(p, NULL); p = strtok(NULL, d); if (p != NULL) v = strtod(p, NULL); ksy265_psnr = (y*6 + u + v) / 8; LOGD("psnr %f, y %f , u %f, v %f \n", ksy265_psnr, y, u, v); } } jint Java_com_ksyun_media_ksy265codec_demo_encoder_EncoderWrapper_native_1ksy265_1encoder (JNIEnv *env, jobject instance, jobject context, jlong ptr, jstring path_, jstring profile_, jstring delay_, jint width, jint height, jobject fps, jint bitrate, jint threads) { const char *path = (*env)->GetStringUTFChars(env, path_, 0); const char *profile = (*env)->GetStringUTFChars(env, profile_, 0); const char *delay = (*env)->GetStringUTFChars(env, delay_, 0); QY265EncConfig param; QY265YUV yuv; QY265Picture pic; QY265Picture pic_out; QY265Nal *nal; void *h; int i_frame = 0; int i_frame_size; int i_nal; clock_t clock_start, clock_end, clock_used; struct timeval tv_start, tv_end; double real_time; int64_t ms_used; FILE *out_file; int errorCode; Encoder *thiz = getInstance(ptr); if (NULL != path) { out_file = fopen(path, "w"); if (NULL == out_file) { perror("open output file"); fclose(thiz->in_file); (*env)->ReleaseStringUTFChars(env, path_, path); (*env)->ReleaseStringUTFChars(env, profile_, profile); (*env)->ReleaseStringUTFChars(env, delay_, delay); return -1; } } /* Get default params for preset/tuning */ if (QY265ConfigDefaultPreset(¶m, profile, NULL, delay) < 0) goto fail; param.picWidth = width; param.picHeight = height; param.threads = threads; jclass floatClass = (*env)->FindClass(env, "java/lang/Float"); jmethodID floatMethod = (*env)->GetMethodID(env, floatClass, "floatValue", "()F"); jfloat val = (*env)->CallFloatMethod(env, fps, floatMethod); LOGD("265 fps %.6f", val); param.frameRate = val; param.bitrateInkbps = bitrate; TCounterEnv* tCounterEnv = (TCounterEnv*) malloc(sizeof(TCounterEnv)); tCounterEnv->context = context; (*env)->GetJavaVM(env, &tCounterEnv->jvm); param.pAuth = tCounterEnv; param.calcPsnr = 1; QY265SetLogPrintf(ksy265log); yuv.pData[0] = (unsigned char *)malloc(param.picWidth * param.picHeight * 3/2); yuv.pData[1] = yuv.pData[0] + param.picWidth * param.picHeight; yuv.pData[2] = yuv.pData[0] + param.picWidth * param.picHeight * 5/4; yuv.iWidth = param.picWidth; yuv.iHeight = param.picHeight; yuv.iStride[0] = yuv.iWidth; yuv.iStride[1] = yuv.iStride[2] = yuv.iWidth/2; h = QY265EncoderOpen( ¶m , &errorCode); if( !h ) goto fail; pic.yuv = &yuv; memset(&pic_out,0,sizeof(pic_out)); int luma_size = param.picWidth * param.picHeight; int chroma_size = luma_size / 4; gettimeofday(&tv_start, NULL); clock_start = clock(); /* Encode frames */ for( ;; i_frame++ ) { /* Read input frame */ if( fread( pic.yuv->pData[0], 1, luma_size, thiz->in_file ) != luma_size ) break; if( fread( pic.yuv->pData[1], 1, chroma_size, thiz->in_file ) != chroma_size ) break; if( fread( pic.yuv->pData[2], 1, chroma_size, thiz->in_file ) != chroma_size ) break; pic.pts = i_frame; i_frame_size = QY265EncoderEncodeFrame( h, &nal, &i_nal, &pic, &pic_out, 0 ); if( i_frame_size < 0 ) goto fail; for(int i = 0; i < i_nal; i++){ if( !fwrite( nal[i].pPayload, nal[i].iSize, 1, out_file ) ) goto fail; } } /* Flush delayed frames */ while( QY265EncoderDelayedFrames( h ) ) { i_frame_size = QY265EncoderEncodeFrame( h, &nal, &i_nal, NULL, &pic_out, 0 ); if( i_frame_size < 0 ) goto fail; for(int i = 0; i < i_nal; i++){ if( !fwrite( nal[i].pPayload, nal[i].iSize, 1, out_file ) ) goto fail; } } clock_end = clock(); gettimeofday(&tv_end, NULL); clock_used = clock_end - clock_start; ms_used = (int64_t)(clock_used * 1000.0 / CLOCKS_PER_SEC); real_time = (tv_end.tv_sec + (tv_end.tv_usec / 1000000.0)) - (tv_start.tv_sec + (tv_start.tv_usec / 1000000.0)); float realFPS = i_frame / real_time; printf("%d frame encoded\n" "\ttime\tfps\n" "CPU\t%lldms\t%.2f\n" "Real\t%.3fs\t%.2f.\n", i_frame, ms_used, i_frame * 1000.0 / ms_used, real_time, realFPS); QY265EncoderClose( h ); thiz->frame_num = i_frame; thiz->real_fps = realFPS; thiz->real_time = real_time; thiz->avg_psnr = ksy265_psnr; free(yuv.pData[0]); fclose(thiz->in_file); fclose(out_file); (*env)->ReleaseStringUTFChars(env, path_, path); (*env)->ReleaseStringUTFChars(env, profile_, profile); (*env)->ReleaseStringUTFChars(env, delay_, delay); return 0; fail: fclose(thiz->in_file); fclose(out_file); (*env)->ReleaseStringUTFChars(env, path_, path); (*env)->ReleaseStringUTFChars(env, profile_, profile); (*env)->ReleaseStringUTFChars(env, delay_, delay); return -1; } jfloat Java_com_ksyun_media_ksy265codec_demo_encoder_EncoderWrapper_native_1get_1real_1fps (JNIEnv *env, jobject instance, jlong ptr) { Encoder* thiz = getInstance(ptr); return thiz->real_fps; } jint Java_com_ksyun_media_ksy265codec_demo_encoder_EncoderWrapper_native_1get_1encoded_1frame_1num (JNIEnv *env, jobject instance,jlong ptr) { Encoder* thiz = getInstance(ptr); return thiz->frame_num; } JNIEXPORT jstring JNICALL Java_com_ksyun_media_ksy265codec_demo_encoder_EncoderWrapper_native_1get_1x264_1version( JNIEnv *env, jobject instance) { return (*env)->NewStringUTF(env, X264_POINTVER); } JNIEXPORT jstring JNICALL Java_com_ksyun_media_ksy265codec_demo_encoder_EncoderWrapper_native_1get_1ksy265_1version( JNIEnv *env, jobject instance) { return (*env)->NewStringUTF(env, strLibQy265Version); } JNIEXPORT jfloat JNICALL Java_com_ksyun_media_ksy265codec_demo_encoder_EncoderWrapper_native_1get_1real_1time(JNIEnv *env, jobject instance, jlong ptr) { Encoder* thiz = getInstance(ptr); return thiz->real_time; } JNIEXPORT jfloat JNICALL Java_com_ksyun_media_ksy265codec_demo_encoder_EncoderWrapper_native_1get_1psnr(JNIEnv *env, jobject instance, jlong ptr) { Encoder* thiz = getInstance(ptr); return thiz->avg_psnr; } ================================================ FILE: Android_demo/KSY265CodecDemo/app/src/main/jni/encoder/encoderwrapper.h ================================================ /* DO NOT EDIT THIS FILE - it is machine generated */ #include /* Header for class com_ksyun_media_ksy265codec_demo_encoder_EncoderWrapper */ #ifndef _Included_com_ksyun_media_ksy265codec_demo_encoder_EncoderWrapper #define _Included_com_ksyun_media_ksy265codec_demo_encoder_EncoderWrapper #ifdef __cplusplus extern "C" { #endif /* * Class: com_ksyun_media_ksy265codec_demo_encoder_EncoderWrapper * Method: native_init * Signature: ()V */ JNIEXPORT jlong JNICALL Java_com_ksyun_media_ksy265codec_demo_encoder_EncoderWrapper_native_1init(JNIEnv *, jobject); /* * Class: com_ksyun_media_ksy265codec_demo_encoder_EncoderWrapper * Method: native_open * Signature: (JLjava/lang/String;)I */ JNIEXPORT jint JNICALL Java_com_ksyun_media_ksy265codec_demo_encoder_EncoderWrapper_native_1open (JNIEnv *, jobject, jlong, jstring); /* * Class: com_ksyun_media_ksy265codec_demo_encoder_EncoderWrapper * Method: native_x264_encode * Signature: (JLjava/lang/String;Ljava/lang/String;Ljava/lang/String;IILjava/lang/Float;II)I */ JNIEXPORT jint JNICALL Java_com_ksyun_media_ksy265codec_demo_encoder_EncoderWrapper_native_1x264_1encode (JNIEnv *, jobject, jlong, jstring, jstring, jstring, jint, jint, jobject, jint, jint); /* * Class: com_ksyun_media_ksy265codec_demo_encoder_EncoderWrapper * Method: native_ksy265_encoder * Signature: (JLjava/lang/String;Ljava/lang/String;Ljava/lang/String;IILjava/lang/Float;II)I */ JNIEXPORT jint JNICALL Java_com_ksyun_media_ksy265codec_demo_encoder_EncoderWrapper_native_1ksy265_1encoder (JNIEnv *, jobject, jobject , jlong, jstring, jstring, jstring, jint, jint, jobject, jint, jint); /* * Class: com_ksyun_media_ksy265codec_demo_encoder_EncoderWrapper * Method: native_get_real_fps * Signature: (J)F */ JNIEXPORT jfloat JNICALL Java_com_ksyun_media_ksy265codec_demo_encoder_EncoderWrapper_native_1get_1real_1fps(JNIEnv *, jobject, jlong); /* * Class: com_ksyun_media_ksy265codec_demo_encoder_EncoderWrapper * Method: native_get_encoded_frame_num * Signature: (J)I */ JNIEXPORT jint JNICALL Java_com_ksyun_media_ksy265codec_demo_encoder_EncoderWrapper_native_1get_1encoded_1frame_1num( JNIEnv *, jobject, jlong); #ifdef __cplusplus } #endif #endif ================================================ FILE: Android_demo/KSY265CodecDemo/app/src/main/jni/encoder/log.h ================================================ // // Created by sujia on 2017/3/29. // #ifndef KSY265CODECDEMO_LOG_H #define KSY265CODECDEMO_LOG_H #include #define LOGD(fmt, args...) __android_log_print(ANDROID_LOG_DEBUG, LOG_TAG, fmt, ##args) #define LOGI(fmt, args...) __android_log_print(ANDROID_LOG_INFO, LOG_TAG, fmt, ##args) #define LOGW(fmt, args...) __android_log_print(ANDROID_LOG_WARN, LOG_TAG, fmt, ##args) #define LOGE(fmt, args...) __android_log_print(ANDROID_LOG_ERROR, LOG_TAG, fmt, ##args) #endif //KSY265CODECDEMO_LOG_H ================================================ FILE: Android_demo/KSY265CodecDemo/app/src/main/res/drawable/selector_tab_background.xml ================================================ ================================================ FILE: Android_demo/KSY265CodecDemo/app/src/main/res/drawable/tab_home_btn.xml ================================================ ================================================ FILE: Android_demo/KSY265CodecDemo/app/src/main/res/drawable/tab_view_btn.xml ================================================ ================================================ FILE: Android_demo/KSY265CodecDemo/app/src/main/res/layout/activity_main.xml ================================================ ================================================ FILE: Android_demo/KSY265CodecDemo/app/src/main/res/layout/decoder_settings.xml ================================================ >