[
  {
    "path": "Android_demo/.gitignore",
    "content": "KSY265CodecDemo/.idea\nKSY265CodecDemo/app/src/main/libs\nKSY265CodecDemo/app/src/main/obj\n"
  },
  {
    "path": "Android_demo/KSY265CodecDemo/.gitignore",
    "content": "*.iml\n.gradle\n/local.properties\n/.idea/workspace.xml\n/.idea/libraries\n.DS_Store\n/build\n/captures\n.externalNativeBuild\n"
  },
  {
    "path": "Android_demo/KSY265CodecDemo/app/.gitignore",
    "content": "/build\n"
  },
  {
    "path": "Android_demo/KSY265CodecDemo/app/CMakeLists.txt",
    "content": "# For more information about using CMake with Android Studio, read the\n# documentation: https://d.android.com/studio/projects/add-native-code.html\n\n# Sets the minimum version of CMake required to build the native library.\n\ncmake_minimum_required(VERSION 3.4.1)\n\n# Creates and names a library, sets it as either STATIC\n# or SHARED, and provides the relative paths to its source code.\n# You can define multiple libraries, and CMake builds them for you.\n# Gradle automatically packages shared libraries with your APK.\n\nadd_library( # Sets the name of the library.\n             native-lib\n\n             # Sets the library as a shared library.\n             SHARED\n\n             # Provides a relative path to your source file(s).\n             src/main/jni/encoderwrapper.c )\n\n# Specifies a path to native header files.\ninclude_directories(../../prebuilt/include )\n\n# Searches for a specified prebuilt library and stores the path as a\n# variable. Because CMake includes system libraries in the search path by\n# default, you only need to specify the name of the public NDK library\n# you want to add. CMake verifies that the library exists before\n# completing its build.\n\nfind_library( # Sets the name of the path variable.\n              log-lib\n\n              # Specifies the name of the NDK library that\n              # you want CMake to locate.\n              log )\n\n# Specifies libraries CMake should link to your target library. You\n# can link multiple libraries, such as libraries you define in this\n# build script, prebuilt third-party libraries, or system libraries.\n\ntarget_link_libraries( # Specifies the target library.\n                       native-lib\n\n                       # Links the target library to the log library\n                       # included in the NDK.\n                       ${log-lib}\n\n                       #x264\n                       ${CMAKE_CURRENT_SOURCE_DIR}/../../prebuilt/${ANDROID_ABI}/libx264.a\n\n                       #ksy265\n                       ${CMAKE_CURRENT_SOURCE_DIR}/../../prebuilt/${ANDROID_ABI}/libqy265.a\n                       ${CMAKE_CURRENT_SOURCE_DIR}/../../prebuilt/${ANDROID_ABI}/libcpufeatures.a)"
  },
  {
    "path": "Android_demo/KSY265CodecDemo/app/build.gradle",
    "content": "apply plugin: 'com.android.application'\n\nandroid {\n    compileSdkVersion 25\n    buildToolsVersion \"25.0.2\"\n    defaultConfig {\n        applicationId \"com.ksyun.media.ksy265codec.demo\"\n        minSdkVersion 14\n        targetSdkVersion 17\n        versionCode 1\n        versionName \"1.0.0\"\n        externalNativeBuild {\n            ndkBuild {\n                //abiFilters 'x86', 'armeabi', 'armeabi-v7a', 'arm64-v8a'\n                abiFilters \"armeabi-v7a\"\n            }\n        }\n    }\n\n    buildTypes {\n        release {\n            minifyEnabled false\n            proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro'\n        }\n        debug {\n            jniDebuggable true\n        }\n    }\n\n    sourceSets{\n        main{\n            jniLibs.srcDirs 'src/main/libs'\n            jni.srcDirs = []\n        }\n    }\n    externalNativeBuild {\n        ndkBuild {\n            path \"src/main/jni/Android.mk\"\n        }\n    }\n}\n\ndependencies {\n    compile fileTree(include: ['*.jar'], dir: 'libs')\n    compile 'com.android.support:appcompat-v7:25.3.0'\n    compile 'com.android.support:percent:25.3.0'\n}\n"
  },
  {
    "path": "Android_demo/KSY265CodecDemo/app/proguard-rules.pro",
    "content": "# Add project specific ProGuard rules here.\n# By default, the flags in this file are appended to flags specified\n# in /Users/sujia/Library/Android/sdk/tools/proguard/proguard-android.txt\n# You can edit the include path and order by changing the proguardFiles\n# directive in build.gradle.\n#\n# For more details, see\n#   http://developer.android.com/guide/developing/tools/proguard.html\n\n# Add any project specific keep options here:\n\n# If your project uses WebView with JS, uncomment the following\n# and specify the fully qualified class name to the JavaScript interface\n# class:\n#-keepclassmembers class fqcn.of.javascript.interface.for.webview {\n#   public *;\n#}\n\n# Uncomment this to preserve the line number information for\n# debugging stack traces.\n#-keepattributes SourceFile,LineNumberTable\n\n# If you keep the line number information, uncomment this to\n# hide the original source file name.\n#-renamesourcefileattribute SourceFile\n"
  },
  {
    "path": "Android_demo/KSY265CodecDemo/app/src/main/AndroidManifest.xml",
    "content": "<?xml version=\"1.0\" encoding=\"utf-8\"?>\n<manifest xmlns:android=\"http://schemas.android.com/apk/res/android\"\n    package=\"com.ksyun.media.ksy265codec.demo\">\n\n    <uses-permission android:name=\"android.permission.READ_EXTERNAL_STORAGE\" />\n    <uses-permission android:name=\"android.permission.WRITE_EXTERNAL_STORAGE\" />\n    <uses-feature android:glEsVersion=\"0x00020000\"/>\n\n    <application\n        android:allowBackup=\"true\"\n        android:icon=\"@mipmap/ic_launcher\"\n        android:label=\"@string/app_name\"\n        android:roundIcon=\"@mipmap/ic_launcher_round\"\n        android:supportsRtl=\"true\"\n        android:theme=\"@style/AppTheme\"\n        android:configChanges=\"orientation\"\n        android:screenOrientation=\"portrait\"\n        android:name=\".ui.KSY265CodecDemoApp\">\n        <activity android:name=\".ui.MainActivity\"\n            android:configChanges=\"orientation\"\n            android:screenOrientation=\"portrait\">\n            <intent-filter>\n                <action android:name=\"android.intent.action.MAIN\" />\n\n                <category android:name=\"android.intent.category.LAUNCHER\" />\n            </intent-filter>\n        </activity>\n\n        <activity\n            android:name=\"com.ipaulpro.afilechooser.FileChooserActivity\"\n            android:enabled=\"@bool/use_activity\"\n            android:exported=\"true\"\n            android:icon=\"@drawable/ic_chooser\"\n            android:label=\"@string/choose_file\"\n            android:theme=\"@style/ChooserTheme1\"\n            android:configChanges=\"orientation\"\n            android:screenOrientation=\"portrait\" >\n            <intent-filter>\n                <action android:name=\"android.intent.action.GET_CONTENT\" />\n\n                <category android:name=\"android.intent.category.DEFAULT\" />\n                <category android:name=\"android.intent.category.OPENABLE\" />\n\n                <data android:mimeType=\"*/*\" />\n            </intent-filter>\n        </activity>\n\n        <provider\n            android:name=\"com.ianhanniballake.localstorage.LocalStorageProvider\"\n            android:authorities=\"com.ianhanniballake.localstorage.documents\"\n            android:enabled=\"@bool/use_provider\"\n            android:exported=\"true\"\n            android:grantUriPermissions=\"true\"\n            android:permission=\"android.permission.MANAGE_DOCUMENTS\" >\n            <intent-filter>\n                <action android:name=\"android.content.action.DOCUMENTS_PROVIDER\" />\n            </intent-filter>\n        </provider>\n    </application>\n\n</manifest>"
  },
  {
    "path": "Android_demo/KSY265CodecDemo/app/src/main/java/com/ianhanniballake/localstorage/LocalStorageProvider.java",
    "content": "\npackage com.ianhanniballake.localstorage;\n\nimport android.content.res.AssetFileDescriptor;\nimport android.database.Cursor;\nimport android.database.MatrixCursor;\nimport android.graphics.Bitmap;\nimport android.graphics.BitmapFactory;\nimport android.graphics.Point;\nimport android.os.CancellationSignal;\nimport android.os.Environment;\nimport android.os.ParcelFileDescriptor;\nimport android.provider.DocumentsContract.Document;\nimport android.provider.DocumentsContract.Root;\nimport android.provider.DocumentsProvider;\nimport android.util.Log;\nimport android.webkit.MimeTypeMap;\n\nimport com.ksyun.media.ksy265codec.demo.R;\n\nimport java.io.File;\nimport java.io.FileNotFoundException;\nimport java.io.FileOutputStream;\nimport java.io.IOException;\n\npublic class LocalStorageProvider extends DocumentsProvider {\n\n    public static final String AUTHORITY = \"com.ianhanniballake.localstorage.documents\";\n\n    /**\n     * Default root projection: everything but Root.COLUMN_MIME_TYPES\n     */\n    private final static String[] DEFAULT_ROOT_PROJECTION = new String[] {\n            Root.COLUMN_ROOT_ID,\n            Root.COLUMN_FLAGS, Root.COLUMN_TITLE, Root.COLUMN_DOCUMENT_ID, Root.COLUMN_ICON,\n            Root.COLUMN_AVAILABLE_BYTES\n    };\n    /**\n     * Default document projection: everything but Document.COLUMN_ICON and\n     * Document.COLUMN_SUMMARY\n     */\n    private final static String[] DEFAULT_DOCUMENT_PROJECTION = new String[] {\n            Document.COLUMN_DOCUMENT_ID,\n            Document.COLUMN_DISPLAY_NAME, Document.COLUMN_FLAGS, Document.COLUMN_MIME_TYPE,\n            Document.COLUMN_SIZE,\n            Document.COLUMN_LAST_MODIFIED\n    };\n\n    @Override\n    public Cursor queryRoots(final String[] projection) throws FileNotFoundException {\n        // Create a cursor with either the requested fields, or the default\n        // projection if \"projection\" is null.\n        final MatrixCursor result = new MatrixCursor(projection != null ? projection\n                : DEFAULT_ROOT_PROJECTION);\n        // Add Home directory\n        File homeDir = Environment.getExternalStorageDirectory();\n        final MatrixCursor.RowBuilder row = result.newRow();\n        // These columns are required\n        row.add(Root.COLUMN_ROOT_ID, homeDir.getAbsolutePath());\n        row.add(Root.COLUMN_DOCUMENT_ID, homeDir.getAbsolutePath());\n        row.add(Root.COLUMN_TITLE, getContext().getString(R.string.internal_storage));\n        row.add(Root.COLUMN_FLAGS, Root.FLAG_LOCAL_ONLY | Root.FLAG_SUPPORTS_CREATE);\n        row.add(Root.COLUMN_ICON, R.drawable.ic_provider);\n        // These columns are optional\n        row.add(Root.COLUMN_AVAILABLE_BYTES, homeDir.getFreeSpace());\n        // Root.COLUMN_MIME_TYPE is another optional column and useful if you\n        // have multiple roots with different\n        // types of mime types (roots that don't match the requested mime type\n        // are automatically hidden)\n        return result;\n    }\n\n    @Override\n    public String createDocument(final String parentDocumentId, final String mimeType,\n                                 final String displayName) throws FileNotFoundException {\n        File newFile = new File(parentDocumentId, displayName);\n        try {\n            newFile.createNewFile();\n            return newFile.getAbsolutePath();\n        } catch (IOException e) {\n            Log.e(LocalStorageProvider.class.getSimpleName(), \"Error creating new file \" + newFile);\n        }\n        return null;\n    }\n\n    @Override\n    public AssetFileDescriptor openDocumentThumbnail(final String documentId, final Point sizeHint,\n                                                     final CancellationSignal signal) throws FileNotFoundException {\n        // Assume documentId points to an image file. Build a thumbnail no\n        // larger than twice the sizeHint\n        BitmapFactory.Options options = new BitmapFactory.Options();\n        options.inJustDecodeBounds = true;\n        BitmapFactory.decodeFile(documentId, options);\n        final int targetHeight = 2 * sizeHint.y;\n        final int targetWidth = 2 * sizeHint.x;\n        final int height = options.outHeight;\n        final int width = options.outWidth;\n        options.inSampleSize = 1;\n        if (height > targetHeight || width > targetWidth) {\n            final int halfHeight = height / 2;\n            final int halfWidth = width / 2;\n            // Calculate the largest inSampleSize value that is a power of 2 and\n            // keeps both\n            // height and width larger than the requested height and width.\n            while ((halfHeight / options.inSampleSize) > targetHeight\n                    || (halfWidth / options.inSampleSize) > targetWidth) {\n                options.inSampleSize *= 2;\n            }\n        }\n        options.inJustDecodeBounds = false;\n        Bitmap bitmap = BitmapFactory.decodeFile(documentId, options);\n        // Write out the thumbnail to a temporary file\n        File tempFile = null;\n        FileOutputStream out = null;\n        try {\n            tempFile = File.createTempFile(\"thumbnail\", null, getContext().getCacheDir());\n            out = new FileOutputStream(tempFile);\n            bitmap.compress(Bitmap.CompressFormat.PNG, 90, out);\n        } catch (IOException e) {\n            Log.e(LocalStorageProvider.class.getSimpleName(), \"Error writing thumbnail\", e);\n            return null;\n        } finally {\n            if (out != null)\n                try {\n                    out.close();\n                } catch (IOException e) {\n                    Log.e(LocalStorageProvider.class.getSimpleName(), \"Error closing thumbnail\", e);\n                }\n        }\n        // It appears the Storage Framework UI caches these results quite\n        // aggressively so there is little reason to\n        // write your own caching layer beyond what you need to return a single\n        // AssetFileDescriptor\n        return new AssetFileDescriptor(ParcelFileDescriptor.open(tempFile,\n                ParcelFileDescriptor.MODE_READ_ONLY), 0,\n                AssetFileDescriptor.UNKNOWN_LENGTH);\n    }\n\n    @Override\n    public Cursor queryChildDocuments(final String parentDocumentId, final String[] projection,\n                                      final String sortOrder) throws FileNotFoundException {\n        // Create a cursor with either the requested fields, or the default\n        // projection if \"projection\" is null.\n        final MatrixCursor result = new MatrixCursor(projection != null ? projection\n                : DEFAULT_DOCUMENT_PROJECTION);\n        final File parent = new File(parentDocumentId);\n        for (File file : parent.listFiles()) {\n            // Don't show hidden files/folders\n            if (!file.getName().startsWith(\".\")) {\n                // Adds the file's display name, MIME type, size, and so on.\n                includeFile(result, file);\n            }\n        }\n        return result;\n    }\n\n    @Override\n    public Cursor queryDocument(final String documentId, final String[] projection)\n            throws FileNotFoundException {\n        // Create a cursor with either the requested fields, or the default\n        // projection if \"projection\" is null.\n        final MatrixCursor result = new MatrixCursor(projection != null ? projection\n                : DEFAULT_DOCUMENT_PROJECTION);\n        includeFile(result, new File(documentId));\n        return result;\n    }\n\n    private void includeFile(final MatrixCursor result, final File file)\n            throws FileNotFoundException {\n        final MatrixCursor.RowBuilder row = result.newRow();\n        // These columns are required\n        row.add(Document.COLUMN_DOCUMENT_ID, file.getAbsolutePath());\n        row.add(Document.COLUMN_DISPLAY_NAME, file.getName());\n        String mimeType = getDocumentType(file.getAbsolutePath());\n        row.add(Document.COLUMN_MIME_TYPE, mimeType);\n        int flags = file.canWrite() ? Document.FLAG_SUPPORTS_DELETE | Document.FLAG_SUPPORTS_WRITE\n                : 0;\n        // We only show thumbnails for image files - expect a call to\n        // openDocumentThumbnail for each file that has\n        // this flag set\n        if (mimeType.startsWith(\"image/\"))\n            flags |= Document.FLAG_SUPPORTS_THUMBNAIL;\n        row.add(Document.COLUMN_FLAGS, flags);\n        // COLUMN_SIZE is required, but can be null\n        row.add(Document.COLUMN_SIZE, file.length());\n        // These columns are optional\n        row.add(Document.COLUMN_LAST_MODIFIED, file.lastModified());\n        // Document.COLUMN_ICON can be a resource id identifying a custom icon.\n        // The system provides default icons\n        // based on mime type\n        // Document.COLUMN_SUMMARY is optional additional information about the\n        // file\n    }\n\n    @Override\n    public String getDocumentType(final String documentId) throws FileNotFoundException {\n        File file = new File(documentId);\n        if (file.isDirectory())\n            return Document.MIME_TYPE_DIR;\n        // From FileProvider.getType(Uri)\n        final int lastDot = file.getName().lastIndexOf('.');\n        if (lastDot >= 0) {\n            final String extension = file.getName().substring(lastDot + 1);\n            final String mime = MimeTypeMap.getSingleton().getMimeTypeFromExtension(extension);\n            if (mime != null) {\n                return mime;\n            }\n        }\n        return \"application/octet-stream\";\n    }\n\n    @Override\n    public void deleteDocument(final String documentId) throws FileNotFoundException {\n        new File(documentId).delete();\n    }\n\n    @Override\n    public ParcelFileDescriptor openDocument(final String documentId, final String mode,\n                                             final CancellationSignal signal) throws FileNotFoundException {\n        File file = new File(documentId);\n        final boolean isWrite = (mode.indexOf('w') != -1);\n        if (isWrite) {\n            return ParcelFileDescriptor.open(file, ParcelFileDescriptor.MODE_READ_WRITE);\n        } else {\n            return ParcelFileDescriptor.open(file, ParcelFileDescriptor.MODE_READ_ONLY);\n        }\n    }\n\n    @Override\n    public boolean onCreate() {\n        return true;\n    }\n}\n"
  },
  {
    "path": "Android_demo/KSY265CodecDemo/app/src/main/java/com/ipaulpro/afilechooser/FileChooserActivity.java",
    "content": "/*\n * Copyright (C) 2013 Paul Burke\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n *      http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\npackage com.ipaulpro.afilechooser;\n\nimport android.app.ActionBar;\nimport android.content.BroadcastReceiver;\nimport android.content.Context;\nimport android.content.Intent;\nimport android.content.IntentFilter;\nimport android.net.Uri;\nimport android.os.Build;\nimport android.os.Bundle;\nimport android.os.Environment;\nimport android.support.v4.app.FragmentActivity;\nimport android.support.v4.app.FragmentManager;\nimport android.support.v4.app.FragmentManager.BackStackEntry;\nimport android.support.v4.app.FragmentManager.OnBackStackChangedListener;\nimport android.support.v4.app.FragmentTransaction;\nimport android.view.Menu;\nimport android.view.MenuItem;\nimport android.widget.Toast;\n\nimport com.ksyun.media.ksy265codec.demo.R;\n\nimport java.io.File;\n\n/**\n * Main Activity that handles the FileListFragments\n *\n * @version 2013-06-25\n * @author paulburke (ipaulpro)\n */\npublic class FileChooserActivity extends FragmentActivity implements\n        OnBackStackChangedListener, FileListFragment.Callbacks {\n\n    public static final String PATH = \"path\";\n    public static final String EXTERNAL_BASE_PATH = Environment\n            .getExternalStorageDirectory().getAbsolutePath();\n\n    private static final boolean HAS_ACTIONBAR = Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB;\n\n    private FragmentManager mFragmentManager;\n    private BroadcastReceiver mStorageListener = new BroadcastReceiver() {\n        @Override\n        public void onReceive(Context context, Intent intent) {\n            Toast.makeText(context, R.string.storage_removed, Toast.LENGTH_LONG).show();\n            finishWithResult(null);\n        }\n    };\n\n    private String mPath;\n\n    @Override\n    protected void onCreate(Bundle savedInstanceState) {\n        super.onCreate(savedInstanceState);\n\n        mFragmentManager = getSupportFragmentManager();\n        mFragmentManager.addOnBackStackChangedListener(this);\n\n        if (savedInstanceState == null) {\n            mPath = EXTERNAL_BASE_PATH;\n            addFragment();\n        } else {\n            mPath = savedInstanceState.getString(PATH);\n        }\n\n        setTitle(mPath);\n    }\n\n    @Override\n    protected void onPause() {\n        super.onPause();\n\n        unregisterStorageListener();\n    }\n\n    @Override\n    protected void onResume() {\n        super.onResume();\n\n        registerStorageListener();\n    }\n\n    @Override\n    protected void onSaveInstanceState(Bundle outState) {\n        super.onSaveInstanceState(outState);\n\n        outState.putString(PATH, mPath);\n    }\n\n    @Override\n    public void onBackStackChanged() {\n\n        int count = mFragmentManager.getBackStackEntryCount();\n        if (count > 0) {\n            BackStackEntry fragment = mFragmentManager.getBackStackEntryAt(count - 1);\n            mPath = fragment.getName();\n        } else {\n            mPath = EXTERNAL_BASE_PATH;\n        }\n\n        setTitle(mPath);\n        if (HAS_ACTIONBAR)\n            invalidateOptionsMenu();\n    }\n\n    @Override\n    public boolean onCreateOptionsMenu(Menu menu) {\n        if (HAS_ACTIONBAR) {\n            boolean hasBackStack = mFragmentManager.getBackStackEntryCount() > 0;\n\n            ActionBar actionBar = getActionBar();\n            actionBar.setDisplayHomeAsUpEnabled(hasBackStack);\n            actionBar.setHomeButtonEnabled(hasBackStack);\n        }\n\n        return true;\n    }\n\n    @Override\n    public boolean onOptionsItemSelected(MenuItem item) {\n        switch (item.getItemId()) {\n            case android.R.id.home:\n                mFragmentManager.popBackStack();\n                return true;\n        }\n\n        return super.onOptionsItemSelected(item);\n    }\n\n    /**\n     * Add the initial Fragment with given path.\n     */\n    private void addFragment() {\n        FileListFragment fragment = FileListFragment.newInstance(mPath);\n        mFragmentManager.beginTransaction()\n                .add(android.R.id.content, fragment).commit();\n    }\n\n    /**\n     * \"Replace\" the existing Fragment with a new one using given path. We're\n     * really adding a Fragment to the back stack.\n     *\n     * @param file The file (directory) to display.\n     */\n    private void replaceFragment(File file) {\n        mPath = file.getAbsolutePath();\n\n        FileListFragment fragment = FileListFragment.newInstance(mPath);\n        mFragmentManager.beginTransaction()\n                .replace(android.R.id.content, fragment)\n                .setTransition(FragmentTransaction.TRANSIT_FRAGMENT_OPEN)\n                .addToBackStack(mPath).commit();\n    }\n\n    /**\n     * Finish this Activity with a result code and URI of the selected file.\n     *\n     * @param file The file selected.\n     */\n    private void finishWithResult(File file) {\n        if (file != null) {\n            Uri uri = Uri.fromFile(file);\n            setResult(RESULT_OK, new Intent().setData(uri));\n            finish();\n        } else {\n            setResult(RESULT_CANCELED);\n            finish();\n        }\n    }\n\n    /**\n     * Called when the user selects a File\n     *\n     * @param file The file that was selected\n     */\n    @Override\n    public void onFileSelected(File file) {\n        if (file != null) {\n            if (file.isDirectory()) {\n                replaceFragment(file);\n            } else {\n                finishWithResult(file);\n            }\n        } else {\n            Toast.makeText(FileChooserActivity.this, R.string.error_selecting_file,\n                    Toast.LENGTH_SHORT).show();\n        }\n    }\n\n    /**\n     * Register the external storage BroadcastReceiver.\n     */\n    private void registerStorageListener() {\n        IntentFilter filter = new IntentFilter();\n        filter.addAction(Intent.ACTION_MEDIA_REMOVED);\n        registerReceiver(mStorageListener, filter);\n    }\n\n    /**\n     * Unregister the external storage BroadcastReceiver.\n     */\n    private void unregisterStorageListener() {\n        unregisterReceiver(mStorageListener);\n    }\n}\n"
  },
  {
    "path": "Android_demo/KSY265CodecDemo/app/src/main/java/com/ipaulpro/afilechooser/FileListAdapter.java",
    "content": "/*\n * Copyright (C) 2012 Paul Burke\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n *      http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\npackage com.ipaulpro.afilechooser;\n\nimport android.content.Context;\nimport android.view.LayoutInflater;\nimport android.view.View;\nimport android.view.ViewGroup;\nimport android.widget.BaseAdapter;\nimport android.widget.TextView;\n\nimport com.ksyun.media.ksy265codec.demo.R;\n\nimport java.io.File;\nimport java.util.ArrayList;\nimport java.util.List;\n\n/**\n * List adapter for Files.\n * \n * @version 2013-12-11\n * @author paulburke (ipaulpro)\n */\npublic class FileListAdapter extends BaseAdapter {\n\n    private final static int ICON_FOLDER = R.drawable.ic_folder;\n    private final static int ICON_FILE = R.drawable.ic_file;\n\n    private final LayoutInflater mInflater;\n\n    private List<File> mData = new ArrayList<File>();\n\n    public FileListAdapter(Context context) {\n        mInflater = LayoutInflater.from(context);\n    }\n\n    public void add(File file) {\n        mData.add(file);\n        notifyDataSetChanged();\n    }\n\n    public void remove(File file) {\n        mData.remove(file);\n        notifyDataSetChanged();\n    }\n\n    public void insert(File file, int index) {\n        mData.add(index, file);\n        notifyDataSetChanged();\n    }\n\n    public void clear() {\n        mData.clear();\n        notifyDataSetChanged();\n    }\n\n    @Override\n    public File getItem(int position) {\n        return mData.get(position);\n    }\n\n    @Override\n    public long getItemId(int position) {\n        return position;\n    }\n\n    @Override\n    public int getCount() {\n        return mData.size();\n    }\n\n    public List<File> getListItems() {\n        return mData;\n    }\n\n    /**\n     * Set the list items without notifying on the clear. This prevents loss of\n     * scroll position.\n     *\n     * @param data\n     */\n    public void setListItems(List<File> data) {\n        mData = data;\n        notifyDataSetChanged();\n    }\n\n    @Override\n    public View getView(int position, View convertView, ViewGroup parent) {\n        View row = convertView;\n\n        if (row == null)\n            row = mInflater.inflate(R.layout.file, parent, false);\n\n        TextView view = (TextView) row;\n\n        // Get the file at the current position\n        final File file = getItem(position);\n\n        // Set the TextView as the file name\n        view.setText(file.getName());\n\n        // If the item is not a directory, use the file icon\n        int icon = file.isDirectory() ? ICON_FOLDER : ICON_FILE;\n        view.setCompoundDrawablesWithIntrinsicBounds(icon, 0, 0, 0);\n\n        return row;\n    }\n\n}"
  },
  {
    "path": "Android_demo/KSY265CodecDemo/app/src/main/java/com/ipaulpro/afilechooser/FileListFragment.java",
    "content": "/*\n * Copyright (C) 2013 Paul Burke\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n *      http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\npackage com.ipaulpro.afilechooser;\n\nimport android.app.Activity;\nimport android.os.Bundle;\nimport android.os.Environment;\nimport android.support.v4.app.ListFragment;\nimport android.support.v4.app.LoaderManager;\nimport android.support.v4.content.Loader;\nimport android.view.View;\nimport android.widget.ListView;\n\nimport com.ksyun.media.ksy265codec.demo.R;\n\nimport java.io.File;\nimport java.util.List;\n\n/**\n * Fragment that displays a list of Files in a given path.\n * \n * @version 2013-12-11\n * @author paulburke (ipaulpro)\n */\npublic class FileListFragment extends ListFragment implements\n        LoaderManager.LoaderCallbacks<List<File>> {\n\n    /**\n     * Interface to listen for events.\n     */\n    public interface Callbacks {\n        /**\n         * Called when a file is selected from the list.\n         *\n         * @param file The file selected\n         */\n        public void onFileSelected(File file);\n    }\n\n    private static final int LOADER_ID = 0;\n\n    private FileListAdapter mAdapter;\n    private String mPath;\n\n    private Callbacks mListener;\n\n    /**\n     * Create a new instance with the given file path.\n     *\n     * @param path The absolute path of the file (directory) to display.\n     * @return A new Fragment with the given file path.\n     */\n    public static FileListFragment newInstance(String path) {\n        FileListFragment fragment = new FileListFragment();\n        Bundle args = new Bundle();\n        args.putString(FileChooserActivity.PATH, path);\n        fragment.setArguments(args);\n\n        return fragment;\n    }\n\n    @Override\n    public void onAttach(Activity activity) {\n        super.onAttach(activity);\n\n        try {\n            mListener = (Callbacks) activity;\n        } catch (ClassCastException e) {\n            throw new ClassCastException(activity.toString()\n                    + \" must implement FileListFragment.Callbacks\");\n        }\n    }\n\n    @Override\n    public void onCreate(Bundle savedInstanceState) {\n        super.onCreate(savedInstanceState);\n\n        mAdapter = new FileListAdapter(getActivity());\n        mPath = getArguments() != null ? getArguments().getString(\n                FileChooserActivity.PATH) : Environment\n                .getExternalStorageDirectory().getAbsolutePath();\n    }\n\n    @Override\n    public void onActivityCreated(Bundle savedInstanceState) {\n        setEmptyText(getString(R.string.empty_directory));\n        setListAdapter(mAdapter);\n        setListShown(false);\n\n        getLoaderManager().initLoader(LOADER_ID, null, this);\n\n        super.onActivityCreated(savedInstanceState);\n    }\n\n    @Override\n    public void onListItemClick(ListView l, View v, int position, long id) {\n        FileListAdapter adapter = (FileListAdapter) l.getAdapter();\n        if (adapter != null) {\n            File file = (File) adapter.getItem(position);\n            mPath = file.getAbsolutePath();\n            mListener.onFileSelected(file);\n        }\n    }\n\n    @Override\n    public Loader<List<File>> onCreateLoader(int id, Bundle args) {\n        return new FileLoader(getActivity(), mPath);\n    }\n\n    @Override\n    public void onLoadFinished(Loader<List<File>> loader, List<File> data) {\n        mAdapter.setListItems(data);\n\n        if (isResumed())\n            setListShown(true);\n        else\n            setListShownNoAnimation(true);\n    }\n\n    @Override\n    public void onLoaderReset(Loader<List<File>> loader) {\n        mAdapter.clear();\n    }\n}\n"
  },
  {
    "path": "Android_demo/KSY265CodecDemo/app/src/main/java/com/ipaulpro/afilechooser/FileLoader.java",
    "content": "/*\n * Copyright (C) 2013 Paul Burke\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n *      http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\npackage com.ipaulpro.afilechooser;\n\nimport android.content.Context;\nimport android.os.FileObserver;\nimport android.support.v4.content.AsyncTaskLoader;\n\nimport com.ipaulpro.afilechooser.utils.FileUtils;\n\nimport java.io.File;\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.List;\n\n/**\n * Loader that returns a list of Files in a given file path.\n * \n * @version 2013-12-11\n * @author paulburke (ipaulpro)\n */\npublic class FileLoader extends AsyncTaskLoader<List<File>> {\n\n\tprivate static final int FILE_OBSERVER_MASK = FileObserver.CREATE\n\t\t\t| FileObserver.DELETE | FileObserver.DELETE_SELF\n\t\t\t| FileObserver.MOVED_FROM | FileObserver.MOVED_TO\n\t\t\t| FileObserver.MODIFY | FileObserver.MOVE_SELF;\n\n\tprivate FileObserver mFileObserver;\n\n\tprivate List<File> mData;\n\tprivate String mPath;\n\n\tpublic FileLoader(Context context, String path) {\n\t\tsuper(context);\n\t\tthis.mPath = path;\n\t}\n\n\t@Override\n\tpublic List<File> loadInBackground() {\n\n        ArrayList<File> list = new ArrayList<File>();\n\n        // Current directory File instance\n        final File pathDir = new File(mPath);\n\n        // List file in this directory with the directory filter\n        final File[] dirs = pathDir.listFiles(FileUtils.sDirFilter);\n        if (dirs != null) {\n            // Sort the folders alphabetically\n            Arrays.sort(dirs, FileUtils.sComparator);\n            // Add each folder to the File list for the list adapter\n            for (File dir : dirs)\n                list.add(dir);\n        }\n\n        // List file in this directory with the file filter\n        final File[] files = pathDir.listFiles(FileUtils.sFileFilter);\n        if (files != null) {\n            // Sort the files alphabetically\n            Arrays.sort(files, FileUtils.sComparator);\n            // Add each file to the File list for the list adapter\n            for (File file : files)\n                list.add(file);\n        }\n\n        return list;\n\t}\n\n\t@Override\n\tpublic void deliverResult(List<File> data) {\n\t\tif (isReset()) {\n\t\t\tonReleaseResources(data);\n\t\t\treturn;\n\t\t}\n\n\t\tList<File> oldData = mData;\n\t\tmData = data;\n\n\t\tif (isStarted())\n\t\t\tsuper.deliverResult(data);\n\n\t\tif (oldData != null && oldData != data)\n\t\t\tonReleaseResources(oldData);\n\t}\n\n\t@Override\n\tprotected void onStartLoading() {\n\t\tif (mData != null)\n\t\t\tdeliverResult(mData);\n\n\t\tif (mFileObserver == null) {\n\t\t\tmFileObserver = new FileObserver(mPath, FILE_OBSERVER_MASK) {\n\t\t\t\t@Override\n\t\t\t\tpublic void onEvent(int event, String path) {\n\t\t\t\t\tonContentChanged();\n\t\t\t\t}\n\t\t\t};\n\t\t}\n\t\tmFileObserver.startWatching();\n\n\t\tif (takeContentChanged() || mData == null)\n\t\t\tforceLoad();\n\t}\n\n\t@Override\n\tprotected void onStopLoading() {\n\t\tcancelLoad();\n\t}\n\n\t@Override\n\tprotected void onReset() {\n\t\tonStopLoading();\n\n\t\tif (mData != null) {\n\t\t\tonReleaseResources(mData);\n\t\t\tmData = null;\n\t\t}\n\t}\n\n\t@Override\n\tpublic void onCanceled(List<File> data) {\n\t\tsuper.onCanceled(data);\n\n\t\tonReleaseResources(data);\n\t}\n\n\tprotected void onReleaseResources(List<File> data) {\n\n\t\tif (mFileObserver != null) {\n\t\t\tmFileObserver.stopWatching();\n\t\t\tmFileObserver = null;\n\t\t}\n\t}\n}"
  },
  {
    "path": "Android_demo/KSY265CodecDemo/app/src/main/java/com/ipaulpro/afilechooser/utils/FileUtils.java",
    "content": "/*\r\n * Copyright (C) 2007-2008 OpenIntents.org\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n *      http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\n\r\npackage com.ipaulpro.afilechooser.utils;\r\n\r\nimport android.content.ContentResolver;\r\nimport android.content.ContentUris;\r\nimport android.content.Context;\r\nimport android.content.Intent;\r\nimport android.database.Cursor;\r\nimport android.database.DatabaseUtils;\r\nimport android.graphics.Bitmap;\r\nimport android.net.Uri;\r\nimport android.os.Build;\r\nimport android.os.Environment;\r\nimport android.provider.DocumentsContract;\r\nimport android.provider.MediaStore;\r\nimport android.util.Log;\r\nimport android.webkit.MimeTypeMap;\r\n\r\nimport com.ianhanniballake.localstorage.LocalStorageProvider;\r\n\r\nimport java.io.File;\r\nimport java.io.FileFilter;\r\nimport java.text.DecimalFormat;\r\nimport java.util.Comparator;\r\n\r\n/**\r\n * @version 2009-07-03\r\n * @author Peli\r\n * @version 2013-12-11\r\n * @author paulburke (ipaulpro)\r\n */\r\npublic class FileUtils {\r\n    private FileUtils() {} //private constructor to enforce Singleton pattern\r\n    \r\n    /** TAG for log messages. */\r\n    static final String TAG = \"FileUtils\";\r\n    private static final boolean DEBUG = false; // Set to true to enable logging\r\n\r\n    public static final String MIME_TYPE_AUDIO = \"audio/*\";\r\n    public static final String MIME_TYPE_TEXT = \"text/*\";\r\n    public static final String MIME_TYPE_IMAGE = \"image/*\";\r\n    public static final String MIME_TYPE_VIDEO = \"video/*\";\r\n    public static final String MIME_TYPE_APP = \"application/*\";\r\n\r\n    public static final String HIDDEN_PREFIX = \".\";\r\n\r\n    /**\r\n     * Gets the extension of a file name, like \".png\" or \".jpg\".\r\n     *\r\n     * @param uri\r\n     * @return Extension including the dot(\".\"); \"\" if there is no extension;\r\n     *         null if uri was null.\r\n     */\r\n    public static String getExtension(String uri) {\r\n        if (uri == null) {\r\n            return null;\r\n        }\r\n\r\n        int dot = uri.lastIndexOf(\".\");\r\n        if (dot >= 0) {\r\n            return uri.substring(dot);\r\n        } else {\r\n            // No extension.\r\n            return \"\";\r\n        }\r\n    }\r\n\r\n    /**\r\n     * @return Whether the URI is a local one.\r\n     */\r\n    public static boolean isLocal(String url) {\r\n        if (url != null && !url.startsWith(\"http://\") && !url.startsWith(\"https://\")) {\r\n            return true;\r\n        }\r\n        return false;\r\n    }\r\n\r\n    /**\r\n     * @return True if Uri is a MediaStore Uri.\r\n     * @author paulburke\r\n     */\r\n    public static boolean isMediaUri(Uri uri) {\r\n        return \"media\".equalsIgnoreCase(uri.getAuthority());\r\n    }\r\n\r\n    /**\r\n     * Convert File into Uri.\r\n     *\r\n     * @param file\r\n     * @return uri\r\n     */\r\n    public static Uri getUri(File file) {\r\n        if (file != null) {\r\n            return Uri.fromFile(file);\r\n        }\r\n        return null;\r\n    }\r\n\r\n    /**\r\n     * Returns the path only (without file name).\r\n     *\r\n     * @param file\r\n     * @return\r\n     */\r\n    public static File getPathWithoutFilename(File file) {\r\n        if (file != null) {\r\n            if (file.isDirectory()) {\r\n                // no file to be split off. Return everything\r\n                return file;\r\n            } else {\r\n                String filename = file.getName();\r\n                String filepath = file.getAbsolutePath();\r\n\r\n                // Construct path without file name.\r\n                String pathwithoutname = filepath.substring(0,\r\n                        filepath.length() - filename.length());\r\n                if (pathwithoutname.endsWith(\"/\")) {\r\n                    pathwithoutname = pathwithoutname.substring(0, pathwithoutname.length() - 1);\r\n                }\r\n                return new File(pathwithoutname);\r\n            }\r\n        }\r\n        return null;\r\n    }\r\n\r\n    /**\r\n     * @return The MIME type for the given file.\r\n     */\r\n    public static String getMimeType(File file) {\r\n\r\n        String extension = getExtension(file.getName());\r\n\r\n        if (extension.length() > 0)\r\n            return MimeTypeMap.getSingleton().getMimeTypeFromExtension(extension.substring(1));\r\n\r\n        return \"application/octet-stream\";\r\n    }\r\n\r\n    /**\r\n     * @return The MIME type for the give Uri.\r\n     */\r\n    public static String getMimeType(Context context, Uri uri) {\r\n        File file = new File(getPath(context, uri));\r\n        return getMimeType(file);\r\n    }\r\n\r\n    /**\r\n     * @param uri The Uri to check.\r\n     * @return Whether the Uri authority is {@link LocalStorageProvider}.\r\n     * @author paulburke\r\n     */\r\n    public static boolean isLocalStorageDocument(Uri uri) {\r\n        return LocalStorageProvider.AUTHORITY.equals(uri.getAuthority());\r\n    }\r\n\r\n    /**\r\n     * @param uri The Uri to check.\r\n     * @return Whether the Uri authority is ExternalStorageProvider.\r\n     * @author paulburke\r\n     */\r\n    public static boolean isExternalStorageDocument(Uri uri) {\r\n        return \"com.android.externalstorage.documents\".equals(uri.getAuthority());\r\n    }\r\n\r\n    /**\r\n     * @param uri The Uri to check.\r\n     * @return Whether the Uri authority is DownloadsProvider.\r\n     * @author paulburke\r\n     */\r\n    public static boolean isDownloadsDocument(Uri uri) {\r\n        return \"com.android.providers.downloads.documents\".equals(uri.getAuthority());\r\n    }\r\n\r\n    /**\r\n     * @param uri The Uri to check.\r\n     * @return Whether the Uri authority is MediaProvider.\r\n     * @author paulburke\r\n     */\r\n    public static boolean isMediaDocument(Uri uri) {\r\n        return \"com.android.providers.media.documents\".equals(uri.getAuthority());\r\n    }\r\n\r\n    /**\r\n     * @param uri The Uri to check.\r\n     * @return Whether the Uri authority is Google Photos.\r\n     */\r\n    public static boolean isGooglePhotosUri(Uri uri) {\r\n        return \"com.google.android.apps.photos.content\".equals(uri.getAuthority());\r\n    }\r\n\r\n    /**\r\n     * Get the value of the data column for this Uri. This is useful for\r\n     * MediaStore Uris, and other file-based ContentProviders.\r\n     *\r\n     * @param context The context.\r\n     * @param uri The Uri to query.\r\n     * @param selection (Optional) Filter used in the query.\r\n     * @param selectionArgs (Optional) Selection arguments used in the query.\r\n     * @return The value of the _data column, which is typically a file path.\r\n     * @author paulburke\r\n     */\r\n    public static String getDataColumn(Context context, Uri uri, String selection,\r\n                                       String[] selectionArgs) {\r\n\r\n        Cursor cursor = null;\r\n        final String column = \"_data\";\r\n        final String[] projection = {\r\n                column\r\n        };\r\n\r\n        try {\r\n            cursor = context.getContentResolver().query(uri, projection, selection, selectionArgs,\r\n                    null);\r\n            if (cursor != null && cursor.moveToFirst()) {\r\n                if (DEBUG)\r\n                    DatabaseUtils.dumpCursor(cursor);\r\n\r\n                final int column_index = cursor.getColumnIndexOrThrow(column);\r\n                return cursor.getString(column_index);\r\n            }\r\n        } finally {\r\n            if (cursor != null)\r\n                cursor.close();\r\n        }\r\n        return null;\r\n    }\r\n\r\n    /**\r\n     * Get a file path from a Uri. This will get the the path for Storage Access\r\n     * Framework Documents, as well as the _data field for the MediaStore and\r\n     * other file-based ContentProviders.<br>\r\n     * <br>\r\n     * Callers should check whether the path is local before assuming it\r\n     * represents a local file.\r\n     * \r\n     * @param context The context.\r\n     * @param uri The Uri to query.\r\n     * @see #isLocal(String)\r\n     * @see #getFile(Context, Uri)\r\n     * @author paulburke\r\n     */\r\n    public static String getPath(final Context context, final Uri uri) {\r\n\r\n        if (DEBUG)\r\n            Log.d(TAG + \" File -\",\r\n                    \"Authority: \" + uri.getAuthority() +\r\n                            \", Fragment: \" + uri.getFragment() +\r\n                            \", Port: \" + uri.getPort() +\r\n                            \", Query: \" + uri.getQuery() +\r\n                            \", Scheme: \" + uri.getScheme() +\r\n                            \", Host: \" + uri.getHost() +\r\n                            \", Segments: \" + uri.getPathSegments().toString()\r\n                    );\r\n\r\n        final boolean isKitKat = Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT;\r\n\r\n        // DocumentProvider\r\n        if (isKitKat && DocumentsContract.isDocumentUri(context, uri)) {\r\n            // LocalStorageProvider\r\n            if (isLocalStorageDocument(uri)) {\r\n                // The path is the id\r\n                return DocumentsContract.getDocumentId(uri);\r\n            }\r\n            // ExternalStorageProvider\r\n            else if (isExternalStorageDocument(uri)) {\r\n                final String docId = DocumentsContract.getDocumentId(uri);\r\n                final String[] split = docId.split(\":\");\r\n                final String type = split[0];\r\n\r\n                if (\"primary\".equalsIgnoreCase(type)) {\r\n                    return Environment.getExternalStorageDirectory() + \"/\" + split[1];\r\n                }\r\n\r\n                // TODO handle non-primary volumes\r\n            }\r\n            // DownloadsProvider\r\n            else if (isDownloadsDocument(uri)) {\r\n\r\n                final String id = DocumentsContract.getDocumentId(uri);\r\n                final Uri contentUri = ContentUris.withAppendedId(\r\n                        Uri.parse(\"content://downloads/public_downloads\"), Long.valueOf(id));\r\n\r\n                return getDataColumn(context, contentUri, null, null);\r\n            }\r\n            // MediaProvider\r\n            else if (isMediaDocument(uri)) {\r\n                final String docId = DocumentsContract.getDocumentId(uri);\r\n                final String[] split = docId.split(\":\");\r\n                final String type = split[0];\r\n\r\n                Uri contentUri = null;\r\n                if (\"image\".equals(type)) {\r\n                    contentUri = MediaStore.Images.Media.EXTERNAL_CONTENT_URI;\r\n                } else if (\"video\".equals(type)) {\r\n                    contentUri = MediaStore.Video.Media.EXTERNAL_CONTENT_URI;\r\n                } else if (\"audio\".equals(type)) {\r\n                    contentUri = MediaStore.Audio.Media.EXTERNAL_CONTENT_URI;\r\n                }\r\n\r\n                final String selection = \"_id=?\";\r\n                final String[] selectionArgs = new String[] {\r\n                        split[1]\r\n                };\r\n\r\n                return getDataColumn(context, contentUri, selection, selectionArgs);\r\n            }\r\n        }\r\n        // MediaStore (and general)\r\n        else if (\"content\".equalsIgnoreCase(uri.getScheme())) {\r\n\r\n            // Return the remote address\r\n            if (isGooglePhotosUri(uri))\r\n                return uri.getLastPathSegment();\r\n\r\n            return getDataColumn(context, uri, null, null);\r\n        }\r\n        // File\r\n        else if (\"file\".equalsIgnoreCase(uri.getScheme())) {\r\n            return uri.getPath();\r\n        }\r\n\r\n        return null;\r\n    }\r\n\r\n    /**\r\n     * Convert Uri into File, if possible.\r\n     *\r\n     * @return file A local file that the Uri was pointing to, or null if the\r\n     *         Uri is unsupported or pointed to a remote resource.\r\n     * @see #getPath(Context, Uri)\r\n     * @author paulburke\r\n     */\r\n    public static File getFile(Context context, Uri uri) {\r\n        if (uri != null) {\r\n            String path = getPath(context, uri);\r\n            if (path != null && isLocal(path)) {\r\n                return new File(path);\r\n            }\r\n        }\r\n        return null;\r\n    }\r\n\r\n    /**\r\n     * Get the file size in a human-readable string.\r\n     *\r\n     * @param size\r\n     * @return\r\n     * @author paulburke\r\n     */\r\n    public static String getReadableFileSize(int size) {\r\n        final int BYTES_IN_KILOBYTES = 1024;\r\n        final DecimalFormat dec = new DecimalFormat(\"###.#\");\r\n        final String KILOBYTES = \" KB\";\r\n        final String MEGABYTES = \" MB\";\r\n        final String GIGABYTES = \" GB\";\r\n        float fileSize = 0;\r\n        String suffix = KILOBYTES;\r\n\r\n        if (size > BYTES_IN_KILOBYTES) {\r\n            fileSize = size / BYTES_IN_KILOBYTES;\r\n            if (fileSize > BYTES_IN_KILOBYTES) {\r\n                fileSize = fileSize / BYTES_IN_KILOBYTES;\r\n                if (fileSize > BYTES_IN_KILOBYTES) {\r\n                    fileSize = fileSize / BYTES_IN_KILOBYTES;\r\n                    suffix = GIGABYTES;\r\n                } else {\r\n                    suffix = MEGABYTES;\r\n                }\r\n            }\r\n        }\r\n        return String.valueOf(dec.format(fileSize) + suffix);\r\n    }\r\n\r\n    /**\r\n     * Attempt to retrieve the thumbnail of given File from the MediaStore. This\r\n     * should not be called on the UI thread.\r\n     *\r\n     * @param context\r\n     * @param file\r\n     * @return\r\n     * @author paulburke\r\n     */\r\n    public static Bitmap getThumbnail(Context context, File file) {\r\n        return getThumbnail(context, getUri(file), getMimeType(file));\r\n    }\r\n\r\n    /**\r\n     * Attempt to retrieve the thumbnail of given Uri from the MediaStore. This\r\n     * should not be called on the UI thread.\r\n     *\r\n     * @param context\r\n     * @param uri\r\n     * @return\r\n     * @author paulburke\r\n     */\r\n    public static Bitmap getThumbnail(Context context, Uri uri) {\r\n        return getThumbnail(context, uri, getMimeType(context, uri));\r\n    }\r\n\r\n    /**\r\n     * Attempt to retrieve the thumbnail of given Uri from the MediaStore. This\r\n     * should not be called on the UI thread.\r\n     *\r\n     * @param context\r\n     * @param uri\r\n     * @param mimeType\r\n     * @return\r\n     * @author paulburke\r\n     */\r\n    public static Bitmap getThumbnail(Context context, Uri uri, String mimeType) {\r\n        if (DEBUG)\r\n            Log.d(TAG, \"Attempting to get thumbnail\");\r\n\r\n        if (!isMediaUri(uri)) {\r\n            Log.e(TAG, \"You can only retrieve thumbnails for images and videos.\");\r\n            return null;\r\n        }\r\n\r\n        Bitmap bm = null;\r\n        if (uri != null) {\r\n            final ContentResolver resolver = context.getContentResolver();\r\n            Cursor cursor = null;\r\n            try {\r\n                cursor = resolver.query(uri, null, null, null, null);\r\n                if (cursor.moveToFirst()) {\r\n                    final int id = cursor.getInt(0);\r\n                    if (DEBUG)\r\n                        Log.d(TAG, \"Got thumb ID: \" + id);\r\n\r\n                    if (mimeType.contains(\"video\")) {\r\n                        bm = MediaStore.Video.Thumbnails.getThumbnail(\r\n                                resolver,\r\n                                id,\r\n                                MediaStore.Video.Thumbnails.MINI_KIND,\r\n                                null);\r\n                    }\r\n                    else if (mimeType.contains(FileUtils.MIME_TYPE_IMAGE)) {\r\n                        bm = MediaStore.Images.Thumbnails.getThumbnail(\r\n                                resolver,\r\n                                id,\r\n                                MediaStore.Images.Thumbnails.MINI_KIND,\r\n                                null);\r\n                    }\r\n                }\r\n            } catch (Exception e) {\r\n                if (DEBUG)\r\n                    Log.e(TAG, \"getThumbnail\", e);\r\n            } finally {\r\n                if (cursor != null)\r\n                    cursor.close();\r\n            }\r\n        }\r\n        return bm;\r\n    }\r\n\r\n    /**\r\n     * File and folder comparator. TODO Expose sorting option method\r\n     *\r\n     * @author paulburke\r\n     */\r\n    public static Comparator<File> sComparator = new Comparator<File>() {\r\n        @Override\r\n        public int compare(File f1, File f2) {\r\n            // Sort alphabetically by lower case, which is much cleaner\r\n            return f1.getName().toLowerCase().compareTo(\r\n                    f2.getName().toLowerCase());\r\n        }\r\n    };\r\n\r\n    /**\r\n     * File (not directories) filter.\r\n     *\r\n     * @author paulburke\r\n     */\r\n    public static FileFilter sFileFilter = new FileFilter() {\r\n        @Override\r\n        public boolean accept(File file) {\r\n            final String fileName = file.getName();\r\n            // Return files only (not directories) and skip hidden files\r\n            return file.isFile() && !fileName.startsWith(HIDDEN_PREFIX);\r\n        }\r\n    };\r\n\r\n    /**\r\n     * Folder (directories) filter.\r\n     *\r\n     * @author paulburke\r\n     */\r\n    public static FileFilter sDirFilter = new FileFilter() {\r\n        @Override\r\n        public boolean accept(File file) {\r\n            final String fileName = file.getName();\r\n            // Return directories only and skip hidden directories\r\n            return file.isDirectory() && !fileName.startsWith(HIDDEN_PREFIX);\r\n        }\r\n    };\r\n\r\n    /**\r\n     * Get the Intent for selecting content to be used in an Intent Chooser.\r\n     *\r\n     * @return The intent for opening a file with Intent.createChooser()\r\n     * @author paulburke\r\n     */\r\n    public static Intent createGetContentIntent() {\r\n        // Implicitly allow the user to select a particular kind of data\r\n        final Intent intent = new Intent(Intent.ACTION_GET_CONTENT);\r\n        // The MIME data type filter\r\n        intent.setType(\"*/*\");\r\n        // Only return URIs that can be opened with ContentResolver\r\n        intent.addCategory(Intent.CATEGORY_OPENABLE);\r\n        return intent;\r\n    }\r\n\r\n    /**\r\n     * 用来根据文件后缀名过滤文件的工具类\r\n     */\r\n    public static class FileFilterBySuffixs implements FileFilter {\r\n        private String suffixs;\r\n\r\n        /**\r\n         * 可传入一个或多个后缀名，不调用此方法,默认搜索除隐藏文件外的全部文件\r\n         * @param suffixs 后缀后，如 3gp|mp3|mp4\r\n         */\r\n        public FileFilterBySuffixs(String suffixs) {\r\n            this.suffixs = suffixs;\r\n        }\r\n\r\n        @Override\r\n        public boolean accept(File file) {\r\n            final String fileName = file.getName();\r\n            if (null == suffixs) {\r\n                // Return files only (not directories) and skip hidden files\r\n                return file.isFile() && !fileName.startsWith(HIDDEN_PREFIX);\r\n            } else {\r\n                return file.isFile() && !fileName.startsWith(HIDDEN_PREFIX) && fileName.matches(\"^.*?\\\\.(\" + suffixs + \")$\");\r\n            }\r\n        }\r\n    }\r\n\r\n    public static void setFileFilter(FileFilterBySuffixs filter) {\r\n        if (filter != null) {\r\n            sFileFilter = filter;\r\n        }\r\n    }\r\n}\r\n"
  },
  {
    "path": "Android_demo/KSY265CodecDemo/app/src/main/java/com/ksyun/media/ksy265codec/demo/decoder/hevdecoder/GLPlayView.java",
    "content": "package com.ksyun.media.ksy265codec.demo.decoder.hevdecoder;\n\nimport android.content.Context;\nimport android.graphics.PixelFormat;\nimport android.opengl.GLSurfaceView;\nimport android.util.AttributeSet;\nimport android.util.Log;\n\nimport javax.microedition.khronos.egl.EGL10;\nimport javax.microedition.khronos.egl.EGLConfig;\nimport javax.microedition.khronos.egl.EGLContext;\nimport javax.microedition.khronos.egl.EGLDisplay;\n\npublic class GLPlayView extends GLSurfaceView {\n    private static String TAG = \"GLPlayView\";\n    private static final boolean DEBUG = true;\n\n\tpublic GLPlayView(Context context) {\n\t\tsuper(context);\n        init(false, 0, 0);\n\t}\n\n    public GLPlayView(Context context, AttributeSet attrs) {\n        super(context, attrs);\n        init(false, 0, 0);\n    }\n\n    public GLPlayView(Context context, boolean translucent, int depth, int stencil) {\n\t\tsuper(context);\n\t\tinit(translucent, depth, stencil);\n\t}\n\t\n\tprivate void init(boolean translucent, int depth, int stencil) {\n\n        /* By default, GLSurfaceView() creates a RGB_565 opaque surface.\n         * If we want a translucent one, we should change the surface's\n         * format here, using PixelFormat.TRANSLUCENT for GL Surfaces\n         * is interpreted as any 32-bit surface with alpha by SurfaceFlinger.\n         */\n        if (translucent) {\n            this.getHolder().setFormat(PixelFormat.TRANSLUCENT);\n        }\n\n        /* Setup the context factory for 2.0 rendering.\n         * See ContextFactory class definition below\n         */\n        setEGLContextFactory(new ContextFactory());\n\n        /* We need to choose an EGLConfig that matches the format of\n         * our surface exactly. This is going to be done in our\n         * custom config chooser. See ConfigChooser class definition\n         * below.\n         */\n        setEGLConfigChooser( translucent ?\n                             new ConfigChooser(8, 8, 8, 8, depth, stencil) :\n                             new ConfigChooser(5, 6, 5, 0, depth, stencil) );\n\n        /* Set the renderer responsible for frame rendering */\n        setRenderer(new GLRenderer());\n        \n        setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY);\n    }\n\t\n    private static void checkEglError(String prompt, EGL10 egl) {\n        int error;\n        while ((error = egl.eglGetError()) != EGL10.EGL_SUCCESS) {\n            Log.e(TAG, String.format(\"%s: EGL error: 0x%x\", prompt, error));\n        }\n    }\n    \n\tprivate static class ContextFactory implements GLSurfaceView.EGLContextFactory {\n        private static int EGL_CONTEXT_CLIENT_VERSION = 0x3098;\n        public EGLContext createContext(EGL10 egl, EGLDisplay display, EGLConfig eglConfig) {\n            Log.i(TAG, \"creating OpenGL ES 2.0 context\");\n            checkEglError(\"Before eglCreateContext\", egl);\n            int[] attrib_list = {EGL_CONTEXT_CLIENT_VERSION, 2, EGL10.EGL_NONE };\n            EGLContext context = egl.eglCreateContext(display, eglConfig, EGL10.EGL_NO_CONTEXT, attrib_list);\n            checkEglError(\"After eglCreateContext\", egl);\n            return context;\n        }\n\n\t\t@Override\n        public void destroyContext(EGL10 egl, EGLDisplay display, EGLContext context) {\n            egl.eglDestroyContext(display, context);\n        }\n    }\n\n\tprivate static class ConfigChooser implements GLSurfaceView.EGLConfigChooser {\n\n        public ConfigChooser(int r, int g, int b, int a, int depth, int stencil) {\n            mRedSize = r;\n            mGreenSize = g;\n            mBlueSize = b;\n            mAlphaSize = a;\n            mDepthSize = depth;\n            mStencilSize = stencil;\n        }\n\n        /* This EGL config specification is used to specify 2.0 rendering.\n         * We use a minimum size of 4 bits for red/green/blue, but will\n         * perform actual matching in chooseConfig() below.\n         */\n        private static int EGL_OPENGL_ES2_BIT = 4;\n        private static int[] s_configAttribs2 =\n        {\n            EGL10.EGL_RED_SIZE, 4,\n            EGL10.EGL_GREEN_SIZE, 4,\n            EGL10.EGL_BLUE_SIZE, 4,\n            EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,\n            EGL10.EGL_NONE\n        };\n\n\t\t@Override\n        public EGLConfig chooseConfig(EGL10 egl, EGLDisplay display) {\n\n            /* Get the number of minimally matching EGL configurations\n             */\n            int[] num_config = new int[1];\n            egl.eglChooseConfig(display, s_configAttribs2, null, 0, num_config);\n\n            int numConfigs = num_config[0];\n\n            if (numConfigs <= 0) {\n                throw new IllegalArgumentException(\"No configs match configSpec\");\n            }\n\n            /* Allocate then read the array of minimally matching EGL configs\n             */\n            EGLConfig[] configs = new EGLConfig[numConfigs];\n            egl.eglChooseConfig(display, s_configAttribs2, configs, numConfigs, num_config);\n\n            if (DEBUG) {\n                 printConfigs(egl, display, configs);\n            }\n            /* Now return the \"best\" one\n             */\n            return chooseConfig(egl, display, configs);\n        }\n\n        public EGLConfig chooseConfig(EGL10 egl, EGLDisplay display,\n                                      EGLConfig[] configs) {\n            for(EGLConfig config : configs) {\n                int d = findConfigAttrib(egl, display, config,\n                        EGL10.EGL_DEPTH_SIZE, 0);\n                int s = findConfigAttrib(egl, display, config,\n                        EGL10.EGL_STENCIL_SIZE, 0);\n\n                // We need at least mDepthSize and mStencilSize bits\n                if (d < mDepthSize || s < mStencilSize)\n                    continue;\n\n                // We want an *exact* match for red/green/blue/alpha\n                int r = findConfigAttrib(egl, display, config,\n                        EGL10.EGL_RED_SIZE, 0);\n                int g = findConfigAttrib(egl, display, config,\n                            EGL10.EGL_GREEN_SIZE, 0);\n                int b = findConfigAttrib(egl, display, config,\n                            EGL10.EGL_BLUE_SIZE, 0);\n                int a = findConfigAttrib(egl, display, config,\n                        EGL10.EGL_ALPHA_SIZE, 0);\n\n                if (r == mRedSize && g == mGreenSize && b == mBlueSize && a == mAlphaSize) {\n                \tLog.i(TAG, \"choosed config: (r,g,b,a) = (\" + r + \", \" + g + \", \" + b + \", \" + a + \")\");\n                    return config;\n                }\n            }\n            return null;\n        }\n\n        private int findConfigAttrib(EGL10 egl, EGLDisplay display,\n                                     EGLConfig config, int attribute, int defaultValue) {\n\n            if (egl.eglGetConfigAttrib(display, config, attribute, mValue)) {\n                return mValue[0];\n            }\n            return defaultValue;\n        }\n\n        private void printConfigs(EGL10 egl, EGLDisplay display,\n                                  EGLConfig[] configs) {\n            int numConfigs = configs.length;\n            Log.i(TAG, String.format(\"%d configurations\", numConfigs));\n            for (int i = 0; i < numConfigs; i++) {\n                Log.i(TAG, String.format(\"Configuration %d:\\n\", i));\n                printConfig(egl, display, configs[i]);\n            }\n        }\n\n        private void printConfig(EGL10 egl, EGLDisplay display,\n                                 EGLConfig config) {\n            int[] attributes = {\n                    EGL10.EGL_BUFFER_SIZE,\n                    EGL10.EGL_ALPHA_SIZE,\n                    EGL10.EGL_BLUE_SIZE,\n                    EGL10.EGL_GREEN_SIZE,\n                    EGL10.EGL_RED_SIZE,\n                    EGL10.EGL_DEPTH_SIZE,\n                    EGL10.EGL_STENCIL_SIZE,\n                    EGL10.EGL_CONFIG_CAVEAT,\n                    EGL10.EGL_CONFIG_ID,\n                    EGL10.EGL_LEVEL,\n                    EGL10.EGL_MAX_PBUFFER_HEIGHT,\n                    EGL10.EGL_MAX_PBUFFER_PIXELS,\n                    EGL10.EGL_MAX_PBUFFER_WIDTH,\n                    EGL10.EGL_NATIVE_RENDERABLE,\n                    EGL10.EGL_NATIVE_VISUAL_ID,\n                    EGL10.EGL_NATIVE_VISUAL_TYPE,\n                    0x3030, // EGL10.EGL_PRESERVED_RESOURCES,\n                    EGL10.EGL_SAMPLES,\n                    EGL10.EGL_SAMPLE_BUFFERS,\n                    EGL10.EGL_SURFACE_TYPE,\n                    EGL10.EGL_TRANSPARENT_TYPE,\n                    EGL10.EGL_TRANSPARENT_RED_VALUE,\n                    EGL10.EGL_TRANSPARENT_GREEN_VALUE,\n                    EGL10.EGL_TRANSPARENT_BLUE_VALUE,\n                    0x3039, // EGL10.EGL_BIND_TO_TEXTURE_RGB,\n                    0x303A, // EGL10.EGL_BIND_TO_TEXTURE_RGBA,\n                    0x303B, // EGL10.EGL_MIN_SWAP_INTERVAL,\n                    0x303C, // EGL10.EGL_MAX_SWAP_INTERVAL,\n                    EGL10.EGL_LUMINANCE_SIZE,\n                    EGL10.EGL_ALPHA_MASK_SIZE,\n                    EGL10.EGL_COLOR_BUFFER_TYPE,\n                    EGL10.EGL_RENDERABLE_TYPE,\n                    0x3042 // EGL10.EGL_CONFORMANT\n            };\n            String[] names = {\n                    \"EGL_BUFFER_SIZE\",\n                    \"EGL_ALPHA_SIZE\",\n                    \"EGL_BLUE_SIZE\",\n                    \"EGL_GREEN_SIZE\",\n                    \"EGL_RED_SIZE\",\n                    \"EGL_DEPTH_SIZE\",\n                    \"EGL_STENCIL_SIZE\",\n                    \"EGL_CONFIG_CAVEAT\",\n                    \"EGL_CONFIG_ID\",\n                    \"EGL_LEVEL\",\n                    \"EGL_MAX_PBUFFER_HEIGHT\",\n                    \"EGL_MAX_PBUFFER_PIXELS\",\n                    \"EGL_MAX_PBUFFER_WIDTH\",\n                    \"EGL_NATIVE_RENDERABLE\",\n                    \"EGL_NATIVE_VISUAL_ID\",\n                    \"EGL_NATIVE_VISUAL_TYPE\",\n                    \"EGL_PRESERVED_RESOURCES\",\n                    \"EGL_SAMPLES\",\n                    \"EGL_SAMPLE_BUFFERS\",\n                    \"EGL_SURFACE_TYPE\",\n                    \"EGL_TRANSPARENT_TYPE\",\n                    \"EGL_TRANSPARENT_RED_VALUE\",\n                    \"EGL_TRANSPARENT_GREEN_VALUE\",\n                    \"EGL_TRANSPARENT_BLUE_VALUE\",\n                    \"EGL_BIND_TO_TEXTURE_RGB\",\n                    \"EGL_BIND_TO_TEXTURE_RGBA\",\n                    \"EGL_MIN_SWAP_INTERVAL\",\n                    \"EGL_MAX_SWAP_INTERVAL\",\n                    \"EGL_LUMINANCE_SIZE\",\n                    \"EGL_ALPHA_MASK_SIZE\",\n                    \"EGL_COLOR_BUFFER_TYPE\",\n                    \"EGL_RENDERABLE_TYPE\",\n                    \"EGL_CONFORMANT\"\n            };\n            int[] value = new int[1];\n            for (int i = 0; i < attributes.length; i++) {\n                int attribute = attributes[i];\n                String name = names[i];\n                if ( egl.eglGetConfigAttrib(display, config, attribute, value)) {\n                    Log.i(TAG, String.format(\"  %s: %d\\n\", name, value[0]));\n                } else {\n                    // Log.w(TAG, String.format(\"  %s: failed\\n\", name));\n                    while (egl.eglGetError() != EGL10.EGL_SUCCESS);\n                }\n            }\n        }\n\n        // Subclasses can adjust these values:\n        protected int mRedSize;\n        protected int mGreenSize;\n        protected int mBlueSize;\n        protected int mAlphaSize;\n        protected int mDepthSize;\n        protected int mStencilSize;\n        private int[] mValue = new int[1];\n\n    }\n}\n"
  },
  {
    "path": "Android_demo/KSY265CodecDemo/app/src/main/java/com/ksyun/media/ksy265codec/demo/decoder/hevdecoder/GLRenderer.java",
    "content": "package com.ksyun.media.ksy265codec.demo.decoder.hevdecoder;\n\nimport android.opengl.GLSurfaceView;\n\nimport javax.microedition.khronos.egl.EGLConfig;\nimport javax.microedition.khronos.opengles.GL10;\n\n/**\n * @author shengbin\n *\n */\npublic class GLRenderer implements GLSurfaceView.Renderer {\n\n    private native int nativeInit();\n    private native int nativeSetup(int w, int h);\n    private native void nativeDrawFrame();\n    \n\t@Override\n\tpublic void onDrawFrame(GL10 arg0) {\n\t\tnativeDrawFrame();\n\t}\n\n\t@Override\n\tpublic void onSurfaceChanged(GL10 arg0, int w, int h) {\n\t    nativeSetup(w, h);\n\t}\n\n\t@Override\n\tpublic void onSurfaceCreated(GL10 arg0, EGLConfig arg1) {\n\t\tnativeInit();\n\t}\n\n\tstatic {\n\t\tSystem.loadLibrary(\"jniplayer\");\n\t}\n\n}\n"
  },
  {
    "path": "Android_demo/KSY265CodecDemo/app/src/main/java/com/ksyun/media/ksy265codec/demo/decoder/hevdecoder/NativeMediaPlayer.java",
    "content": "package com.ksyun.media.ksy265codec.demo.decoder.hevdecoder;\n\nimport android.graphics.Bitmap;\nimport android.graphics.Bitmap.Config;\nimport android.graphics.Canvas;\nimport android.graphics.Color;\nimport android.graphics.Matrix;\nimport android.graphics.Paint;\nimport android.opengl.GLSurfaceView;\nimport android.os.Handler;\nimport android.os.Looper;\nimport android.util.Log;\nimport android.view.Surface;\nimport android.view.Surface.OutOfResourcesException;\nimport android.view.SurfaceHolder;\nimport android.widget.TextView;\nimport android.content.Context;\n\nimport java.io.File;\nimport java.io.FileFilter;\nimport java.util.regex.Pattern;\n\nimport com.ksyun.media.ksy265codec.demo.ui.Settings;\n\npublic class NativeMediaPlayer {\n\tpublic static final int MEDIA_INFO_FRAMERATE_VIDEO = 900;\n\tpublic static final int MEDIA_INFO_END_OF_FILE = 909;\n\n\tprivate int mNativeContext; // accessed by native methods\n\tprivate Surface mSurface;\n\tprivate GLSurfaceView mGLSurfaceView;\n\tprivate TextView mInfoTextView;\n\tprivate Bitmap mFrameBitmap = null;\n\tprivate int mDisplayWidth = 0;\n\tprivate int mDisplayHeight = 0;\n\tprivate int mDisplayFPS = -1;\n\tprivate int mDisplayAvgFPS = -1;\n\tprivate int mDecodeFPS = -1;\n\tprivate int mBitrateVideo = -1;\n\tprivate int mBitrateAudio = -1;\n\tprivate boolean mShowInfo = true;\n\tprivate boolean mShowInfoGL = true;\n\tprivate String mInfo = \"\";\n\n\tprivate OnCompletionListener mListener = null;\n\tprivate final Handler mMainHandler;\n\tprivate boolean mNeedSetup = true;\n\n\tpublic interface OnCompletionListener {\n\t\tpublic void onCompletion(int frame_count);\n\t}\n\n\tpublic void setCompletionListener(OnCompletionListener listener) {\n\t\tthis.mListener = listener;\n\t}\n\n\tpublic NativeMediaPlayer() {\n\t\tmMainHandler = new Handler(Looper.getMainLooper());\n\t}\n\n\tpublic void init() {\n\t\tnative_init();\n\t}\n\n\tpublic void setDisplay(SurfaceHolder sh) {\n\t\tif (sh != null) {\n\t\t\tmSurface = sh.getSurface();\n\t\t} else\n\t\t\tmSurface = null;\n\t}\n\n\tpublic void setGLDisplay(GLSurfaceView glView, TextView tv) {\n\t\tmGLSurfaceView = glView;\n\t\tmInfoTextView = tv;\n\t}\n\n\tpublic void setDisplaySize(int w, int h) {\n\t\tmDisplayHeight = h;\n\t\tmDisplayWidth = w;\n\n\t\tmNeedSetup = true;\n\t}\n\n\t/**\n\t * Gets the number of cores available in this device, across all processors.\n\t * Requires: Ability to peruse the filesystem at \"/sys/devices/system/cpu\"\n\t * \n\t * @return The number of cores, or 1 if failed to get result\n\t */\n\tprivate int getNumCores() {\n\t\t// Private Class to display only CPU devices in the directory listing\n\t\tclass CpuFilter implements FileFilter {\n\t\t\t@Override\n\t\t\tpublic boolean accept(File pathname) {\n\t\t\t\t// Check if filename is \"cpu\", followed by a single digit number\n\t\t\t\tif (Pattern.matches(\"cpu[0-9]+\", pathname.getName())) {\n\t\t\t\t\treturn true;\n\t\t\t\t}\n\t\t\t\treturn false;\n\t\t\t}\n\t\t}\n\n\t\ttry {\n\t\t\t// Get directory containing CPU info\n\t\t\tFile dir = new File(\"/sys/devices/system/cpu/\");\n\t\t\t// Filter to only list the devices we care about\n\t\t\tFile[] files = dir.listFiles(new CpuFilter());\n\t\t\t// Return the number of cores (virtual CPU devices)\n\t\t\treturn files.length;\n\t\t} catch (Exception e) {\n\t\t\t// Default to return 1 core\n\t\t\treturn 1;\n\t\t}\n\t}\n\n\tpublic int prepare(Context context,int type, int disableRender) {\n\t\t// android maintains the preferences for us, so use directly\n\t\tint num = Settings.getInstance().getDecoderSettings().getThreads();\n\t\tif (0 == num) {\n\t\t\tint cores = getNumCores();// Runtime.getRuntime().availableProcessors();\n\t\t\tif (cores <= 1)\n\t\t\t\tnum = 1;\n\t\t\telse\n\t\t\t\tnum = (cores < 5) ? ((cores * 3 + 1) / 2) : 8;\n\t\t\tLog.d(\"NativeMediaPlayer\", cores + \" cores detected! use \" + num\n\t\t\t\t\t+ \" threads.\\n\");\n\t\t}\n\n\t\tfloat fps = Settings.getInstance().getDecoderSettings().getFPS();\n\n\t\treturn native_prepare(context,type, disableRender, num, fps);\n\t}\n\n\tpublic int prepare(Context context,int type, int disableRender,\n\t\t\t\t\t   int threadNum, float fps) {\n\t\treturn native_prepare(context,type, disableRender, threadNum, fps);\n\t}\n\n\tpublic int start() {\n\t\tint w = getVideoWidth(), h = getVideoHeight();\n\t\tif (w > 0 && h > 0)\n\t\t\tmFrameBitmap = Bitmap.createBitmap(w, h, Config.RGB_565);\n\t\treturn native_start();\n\t}\n\n\tpublic void stop() {\n\t\tnative_stop();\n\t\tif (mFrameBitmap != null) {\n\t\t\tmFrameBitmap.recycle();\n\t\t\tmFrameBitmap = null;\n\t\t}\n\t}\n\n\tpublic void pause() {\n\t\tnative_pause();\n\t}\n\n\tpublic void go() {\n\t\tnative_go();\n\t}\n\n\tpublic void seekTo(int msec) {\n\n\t}\n\n\tpublic void setShowInfo(boolean show) {\n\t\tmShowInfo = show;\n\t\tif (mShowInfo == false && mInfoTextView != null) {\n\t\t\tmInfoTextView.setText(\"\");\n\t\t}\n\t}\n\n\tprivate void setupDisplay() {\n\t\tint videoWidth = getVideoWidth(), videoHeight = getVideoHeight();\n\t\tint screenWidth, screenHeight, displayWidth = 0, displayHeight = 0;\n\t\tscreenHeight = mDisplayHeight;\n\t\tscreenWidth = mDisplayWidth;\n\n\t\tdisplayWidth = videoWidth;\n\t\tdisplayHeight = videoHeight;\n\t\tif (displayHeight > screenHeight) {\n\t\t\tdisplayHeight = screenHeight;\n\t\t\tdisplayWidth = displayHeight * videoWidth / videoHeight;\n\t\t\tdisplayWidth -= displayWidth % 4;\n\t\t}\n\t\tif (displayWidth > screenWidth) {\n\t\t\tdisplayWidth = screenWidth;\n\t\t\tdisplayHeight = displayWidth * videoHeight / videoWidth;\n\t\t\tdisplayHeight -= displayHeight % 4;\n\t\t}\n\t\tsetDisplaySize(displayWidth, displayHeight);\n\t}\n\n\t/**\n\t * Called from native code\n\t */\n\tpublic int drawFrame(int width, int height) {\n\t\tboolean useGL = false;\n\n\t\tif (useGL) {\n\n\t\t\tmGLSurfaceView.requestRender();\n\n\t\t\tif (mShowInfoGL) {\n\t\t\t\tmInfo = \"\";\n\t\t\t\tPaint paint = new Paint();\n\t\t\t\tpaint.setColor(Color.WHITE);\n\t\t\t\tpaint.setTextSize(40);\n\t\t\t\tif (width > 0) {\n\t\t\t\t\tmInfo += (\"Video Size:\" + width + \"x\" + height);\n\t\t\t\t}\n\t\t\t\tif (mDisplayFPS > 0) {\n\t\t\t\t\tmInfo += (\"    Display FPS:\" + mDisplayFPS);\n\t\t\t\t}\n\t\t\t\tif (mDisplayAvgFPS > 0) {\n\t\t\t\t\tmInfo += String.format(\"    Average FPS:%.2f\",\n\t\t\t\t\t\t\tmDisplayAvgFPS / 4096.0);\n\t\t\t\t}\n\n\t\t\t\tmInfoTextView.post(new Runnable() {\n\t\t\t\t\t@Override\n\t\t\t\t\tpublic void run() {\n\t\t\t\t\t\tmInfoTextView.setText(mInfo);\n\t\t\t\t\t}\n\t\t\t\t});\n\n\t\t\t\tmShowInfoGL = false;\n\t\t\t}\n\n\t\t\treturn 0;\n\t\t}\n\n\t\tif (mSurface == null) {\n\t\t\treturn 0;\n\t\t}\n\n\t\tif (mNeedSetup) {\n\t\t\tsetupDisplay();\n\t\t\tmNeedSetup = false;\n\t\t}\n\n\t\t// draw without OpenGL\n\t\tCanvas canvas = null;\n\t\ttry {\n\t\t\tcanvas = mSurface.lockCanvas(null);\n\t\t} catch (IllegalArgumentException e) {\n\t\t\t// TODO Auto-generated catch block\n\t\t\te.printStackTrace();\n\t\t} catch (OutOfResourcesException e) {\n\t\t\t// TODO Auto-generated catch block\n\t\t\te.printStackTrace();\n\t\t}\n\n\t\tcanvas.drawColor(Color.BLACK);\n\n\t\tif (null == mFrameBitmap || mFrameBitmap.getWidth() != width) {\n\t\t\t// video size has changed, we need to create a new frame bitmap\n\t\t\t// correspondingly\n\t\t\tmFrameBitmap = Bitmap.createBitmap(width, height, Config.RGB_565);\n\t\t}\n\n\t\trenderBitmap(mFrameBitmap);\n\n\t\tif (mDisplayWidth != mFrameBitmap.getWidth()) {\n\t\t\tMatrix matrix = new Matrix();\n\t\t\tfloat scaleWidth = ((float) mDisplayWidth) / width;\n\t\t\tfloat scaleHeight = ((float) mDisplayHeight) / height;\n\t\t\tmatrix.postScale(scaleWidth, scaleHeight);\n\t\t\tmatrix.postTranslate((canvas.getWidth() - mDisplayWidth) / 2,\n\t\t\t\t\t(canvas.getHeight() - mDisplayHeight) / 2);\n\t\t\tif (mFrameBitmap.getWidth() < 640) {\n\t\t\t\t// small bitmap, able to use filter\n\t\t\t\tPaint paint = new Paint();\n\t\t\t\tpaint.setFilterBitmap(true);\n\t\t\t\tcanvas.drawBitmap(mFrameBitmap, matrix, paint);\n\t\t\t} else {\n\t\t\t\tcanvas.drawBitmap(mFrameBitmap, matrix, null);\n\t\t\t}\n\t\t} else {\n\t\t\tcanvas.drawBitmap(mFrameBitmap,\n\t\t\t\t\t(canvas.getWidth() - mDisplayWidth) / 2,\n\t\t\t\t\t(canvas.getHeight() - mDisplayHeight) / 2, null);\n\t\t}\n\n\t\tif (mShowInfo) {\n\t\t\tPaint paint = new Paint();\n\t\t\tpaint.setColor(Color.WHITE);\n\t\t\tpaint.setTextSize(40);\n\t\t\tString info = \"\";\n\t\t\tif (width > 0) {\n\t\t\t\tinfo += (\"Video Size:\" + width + \"x\" + height);\n\t\t\t}\n\t\t\tif (mDisplayFPS > 0) {\n\t\t\t\tinfo += (\"    Display FPS:\" + mDisplayFPS);\n\t\t\t}\n\t\t\tif (mDisplayAvgFPS > 0) {\n\t\t\t\tinfo += String.format(\"    Average FPS:%.2f\",\n\t\t\t\t\t\tmDisplayAvgFPS / 4096.0);\n\t\t\t}\n\t\t\tif (mDecodeFPS > 0) {\n\t\t\t\tinfo += (\"    Decode FPS:\" + mDecodeFPS);\n\t\t\t}\n\t\t\tcanvas.drawText(info, 20, 60, paint);\n\t\t\tinfo = \"\";\n\t\t\tif (mBitrateVideo > 0) {\n\t\t\t\tinfo += \"Bitrate: video \" + Integer.toString(mBitrateVideo);\n\t\t\t}\n\t\t\tif (mBitrateAudio > 0) {\n\t\t\t\tinfo += \", audio \" + Integer.toString(mBitrateAudio);\n\t\t\t}\n\t\t\tif (mBitrateVideo > 0 || mBitrateAudio > 0) {\n\t\t\t\tinfo += \", total \"\n\t\t\t\t\t\t+ Integer.toString(mBitrateVideo + mBitrateAudio)\n\t\t\t\t\t\t+ \" kbit/s\";\n\t\t\t}\n\t\t\tcanvas.drawText(info, 20, 100, paint);\n\t\t}\n\n\t\tmSurface.unlockCanvasAndPost(canvas);\n\n\t\treturn 0;\n\t}\n\n\t/**\n\t * Called from native code when an interesting event happens.\n\t */\n\tpublic void postEventFromNative(int what, int arg1, int arg2) {\n\t\tswitch (what) {\n\t\tcase MEDIA_INFO_FRAMERATE_VIDEO:\n\t\t\tmDisplayFPS = arg1;\n\t\t\tmDisplayAvgFPS = arg2;\n\t\t\tif (mShowInfo) {\n\t\t\t\tmShowInfoGL = true;\n\t\t\t}\n\t\t\tbreak;\n\t\tcase MEDIA_INFO_END_OF_FILE:\n\t\t\tfinal int frame_num = arg1;\n\t\t\tmMainHandler.post(new Runnable() {\n\t\t\t\t@Override\n\t\t\t\tpublic void run() {\n\t\t\t\t\tif (mListener != null) {\n\t\t\t\t\t\tmListener.onCompletion(frame_num);\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t});\n\n\t\t\tbreak;\n\t\t}\n\t}\n\n\t// set output file name\n\tpublic void setOutput(String outputFileName) {\n\t\tnative_set_output(outputFileName);\n\t}\n\n\tprivate native void native_init();\n\n\tprivate native int native_prepare(Context context,int decoderType, int disableRender, int threadNum, float renderFPS);\n\n\tprivate native int native_start();\n\n\tprivate native int native_stop();\n\n\tprivate native int native_pause();\n\n\tprivate native int native_go();\n\n\tprivate native int native_seekTo(int msec);\n\n\tprivate native static int hasNeon();\n\n\tpublic native int setDataSource(String path);\n\n\tpublic native int getVideoWidth();\n\n\tpublic native int getVideoHeight();\n\n\tpublic native boolean isPlaying();\n\n\tpublic native int getCurrentPosition();\n\n\tpublic native float getDuration();\n\n\tpublic native float getDecodeTime();\n\n\tpublic native float getDecodeFPS();\n\n\tprivate native static void renderBitmap(Bitmap bitmap);\n\n\tpublic native void native_set_output(String output);\n\n\tpublic native String getVersion();\n\n\tstatic {\n\t\tSystem.loadLibrary(\"lenthevcdec\");\n\t\tSystem.loadLibrary(\"jniplayer\");\n\t}\n\n}\n"
  },
  {
    "path": "Android_demo/KSY265CodecDemo/app/src/main/java/com/ksyun/media/ksy265codec/demo/encoder/Encoder.java",
    "content": "package com.ksyun.media.ksy265codec.demo.encoder;\n\nimport android.content.Context;\n\nimport com.ksyun.media.ksy265codec.demo.ui.EncoderSettings;\n\n/**\n * Created by sujia on 2017/3/29.\n */\n\npublic class Encoder {\n    private EncoderWrapper mWrapper;\n\n    public Encoder(EncoderSettings settings) {\n        mWrapper = new EncoderWrapper(settings);\n    }\n\n    //return -1 if failed\n    public int open(String path) {\n        if (mWrapper != null) {\n            return mWrapper.open(path);\n        }\n        return -1;\n    }\n\n    //return -1 if failed\n    public int encode(Context context) {\n        if (mWrapper != null) {\n            return mWrapper.encode(context);\n        }\n        return -1;\n    }\n\n    public int getEncodedFrameNum() {\n        if (mWrapper != null) {\n            return mWrapper.getEncodedFrameNum();\n        }\n        return 0;\n    }\n\n    public float getEncodeFPS() {\n        if (mWrapper != null) {\n            return mWrapper.getEncodeFPS();\n        }\n        return 0;\n    }\n\n    public float getCompressRatio() {\n        if (mWrapper != null) {\n            return mWrapper.getCompressRatio();\n        }\n        return 1;\n    }\n\n    public float getEncodeTime() {\n        if (mWrapper != null) {\n            return mWrapper.getEncodeTime();\n        }\n        return 0;\n    }\n\n    public double getPSNR() {\n        if (mWrapper != null) {\n            return mWrapper.getPSNR();\n        }\n        return 0;\n    }\n\n    public String getVersion() {\n        if (mWrapper != null) {\n            return mWrapper.getVersion();\n        }\n        return \"0.1\";\n    }\n\n    public float getEncodeBitrate() {\n        if (mWrapper != null) {\n            return mWrapper.getEncodeBitrate();\n        }\n        return 0;\n    }\n\n    public float getDuration() {\n        if (mWrapper != null) {\n            return mWrapper.getDuration();\n        }\n        return 0;\n    }\n\n    public String getInputFilePath() {\n        if (mWrapper != null) {\n            return mWrapper.getInputFilePath();\n        }\n        return null;\n    }\n\n    public String getOutputFilePath() {\n        if (mWrapper != null) {\n            return mWrapper.getOutputFilePath();\n        }\n        return null;\n    }\n}\n"
  },
  {
    "path": "Android_demo/KSY265CodecDemo/app/src/main/java/com/ksyun/media/ksy265codec/demo/encoder/EncoderWrapper.java",
    "content": "package com.ksyun.media.ksy265codec.demo.encoder;\n\nimport android.content.Context;\n\nimport com.ksyun.media.ksy265codec.demo.ui.EncoderSettings;\n\nimport java.io.File;\n\n/**\n * Created by sujia on 2017/3/29.\n */\n\npublic class EncoderWrapper {\n    private String mInputFilePath;\n    private String mOutputFilePath;\n\n    private EncoderSettings mSettings;\n\n    private long mInstance = 0;\n\n    public EncoderWrapper(EncoderSettings settings) {\n        this.mSettings = settings;\n        mInstance = native_init();\n    }\n\n    //return -1 if failed\n    public int open(String path) {\n        if (path != null && path.endsWith(\".yuv\")) {\n            mInputFilePath = path;\n            return native_open(mInstance, mInputFilePath);\n        }\n        return -1;\n    }\n\n    //return -1 if failed\n    public int encode(Context context) {\n        if (mSettings.getEncoderName().equals(EncoderSettings.Encoders[0])) {//KSC265\n            int dotIndex = mInputFilePath.lastIndexOf(\".\");\n            String fileName = mInputFilePath.substring(0, dotIndex);\n            mOutputFilePath = fileName + \".265\";\n\n            return native_ksy265_encoder(context,mInstance, mOutputFilePath,\n                    mSettings.getProfile(), mSettings.getDelay(),\n                    mSettings.getWidth(), mSettings.getHeight(),\n                    mSettings.getFps(), mSettings.getBitrate(),\n                    mSettings.getThreads());\n        } else if(mSettings.getEncoderName().equals(EncoderSettings.Encoders[1])) {//x264\n            int dotIndex = mInputFilePath.lastIndexOf(\".\");\n            String fileName = mInputFilePath.substring(0, dotIndex);\n            mOutputFilePath = fileName + \".264\";\n\n            return native_x264_encode(mInstance, mOutputFilePath,\n                    mSettings.getProfile(), mSettings.getDelay(),\n                    mSettings.getWidth(), mSettings.getHeight(),\n                    mSettings.getFps(), mSettings.getBitrate(),\n                    mSettings.getThreads());\n        }\n        return -1;\n    }\n\n    public String getInputFilePath() {\n        return mInputFilePath;\n    }\n\n    public String getOutputFilePath() {\n        return mOutputFilePath;\n    }\n\n    public float getEncodeFPS() {\n        return native_get_real_fps(mInstance);\n    }\n\n    public int getEncodedFrameNum() {\n        return native_get_encoded_frame_num(mInstance);\n    }\n\n    public float getCompressRatio() {\n        if (mInputFilePath == null ||\n                mOutputFilePath == null) {\n            return 0;\n        } else {\n            long inFileLength = new File(mInputFilePath).length();\n            long outFileLength = new File(mOutputFilePath).length();\n            if (outFileLength != 0) {\n                return inFileLength / outFileLength;\n            } else {\n                return 0;\n            }\n        }\n    }\n\n    public float getEncodeTime() {\n        return native_get_real_time(mInstance);\n    }\n\n    public double getPSNR() {\n        return native_get_psnr(mInstance);\n    }\n\n    public float getDuration() {\n        return getEncodedFrameNum() / mSettings.getFps();\n    }\n\n    public float getEncodeBitrate() {\n        float encodeTime = getDuration();\n        if (mOutputFilePath !=null &&\n                encodeTime != 0) {\n            long outFileLength = new File(mOutputFilePath).length();\n            return (outFileLength * 8) / encodeTime / 1000;\n        } else {\n            return 0;\n        }\n    }\n\n    public String getVersion() {\n        if (mSettings.getEncoderName().equals(EncoderSettings.Encoders[0])) {//KSC265\n            return native_get_ksy265_version();\n        } else if (mSettings.getEncoderName().equals(EncoderSettings.Encoders[1])) {//x264\n            return native_get_x264_version();\n        }\n        return \"0.1\";\n    }\n\n    public native long native_init();\n\n    public native int native_open(long ptr, String path);\n\n    public native int native_x264_encode(long ptr, String path,\n                                         String profile, String delay,\n                                         int width, int height,\n                                         Float fps, int bitrate, int threads);\n\n    public native int native_ksy265_encoder(Context context,long ptr, String outputFilePath,\n                                            String profile, String delay,\n                                            int width, int height,\n                                            Float fps, int bitrate, int threads);\n\n    public native float native_get_real_fps(long ptr);\n\n    public native int native_get_encoded_frame_num(long ptr);\n\n    public native String native_get_x264_version();\n\n    public native String native_get_ksy265_version();\n\n    public native float native_get_real_time(long ptr);\n\n    public native float native_get_psnr(long ptr);\n\n    static {\n        System.loadLibrary(\"native-lib\");\n    }\n}\n"
  },
  {
    "path": "Android_demo/KSY265CodecDemo/app/src/main/java/com/ksyun/media/ksy265codec/demo/ui/BaseFragment.java",
    "content": "package com.ksyun.media.ksy265codec.demo.ui;\n\nimport android.content.Intent;\nimport android.net.Uri;\nimport android.os.Bundle;\nimport android.support.v4.app.Fragment;\nimport android.text.method.ScrollingMovementMethod;\nimport android.util.Log;\nimport android.view.LayoutInflater;\nimport android.view.SurfaceView;\nimport android.view.View;\nimport android.view.ViewGroup;\nimport android.widget.Button;\nimport android.widget.EditText;\nimport android.widget.TextView;\n\nimport com.ipaulpro.afilechooser.FileChooserActivity;\nimport com.ipaulpro.afilechooser.utils.FileUtils;\nimport com.ksyun.media.ksy265codec.demo.R;\n\nimport static android.app.Activity.RESULT_OK;\nimport static android.content.ContentValues.TAG;\n\n/**\n * Created by sujia on 2017/3/27.\n */\n\npublic class BaseFragment extends Fragment {\n    private static final int REQUEST_CODE = 6384; // onActivityResult request code\n\n    protected Button mSettingButton;\n    protected Button mHelpButton;\n    protected Button mNavButton;\n    protected Button mStartButton;\n\n    private ButtonObserver mButtonObserver;\n\n    protected EditText mFilePathEditTxt;\n    protected String mInputFilePath;\n    protected String mOutputFilePath;\n\n    protected TextView mTitleText;\n    protected TextView mInfoText;\n\n    protected SurfaceView mSurfaceView;\n\n    @Override\n    public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {\n        View view = inflater.inflate(R.layout.fragment_item, null);\n\n        mTitleText =  (TextView) view.findViewById(R.id.title_txt);\n        mInfoText = (TextView) view.findViewById(R.id.info_txt);\n        mInfoText.setMovementMethod(ScrollingMovementMethod.getInstance());\n\n        mButtonObserver = new ButtonObserver();\n\n        mSettingButton = (Button) view.findViewById(R.id.settings);\n        mSettingButton.setOnClickListener(mButtonObserver);\n\n        mHelpButton = (Button) view.findViewById(R.id.help);\n        mHelpButton.setOnClickListener(mButtonObserver);\n\n        mNavButton = (Button) view.findViewById(R.id.nav);\n        mNavButton.setOnClickListener(mButtonObserver);\n\n        mStartButton = (Button) view.findViewById(R.id.start);\n        mStartButton.setOnClickListener(mButtonObserver);\n\n        mFilePathEditTxt = (EditText) view.findViewById(R.id.filepath);\n\n        mSurfaceView = (SurfaceView) view.findViewById(R.id.surface_view);\n        mSurfaceView.setVisibility(View.GONE);\n        return view;\n    }\n\n    private class ButtonObserver implements View.OnClickListener {\n        @Override\n        public void onClick(View view) {\n            switch (view.getId()) {\n                case R.id.settings:\n                    onSettingsClicked();\n                    break;\n                case R.id.help:\n                    onHelpClicked();\n                    break;\n                case R.id.nav:\n                    onNavClicked();\n                    break;\n                case R.id.start:\n                    onStartClicked();\n                    break;\n                default:\n                    break;\n            }\n        }\n    }\n\n    protected void onSettingsClicked() {\n    }\n\n    protected void onHelpClicked() {\n    }\n\n    protected void onNavClicked() {\n        showChooser();\n    }\n\n    private void showChooser() {\n        //set file filter\n        FileUtils.setFileFilter(new FileUtils.FileFilterBySuffixs(\"yuv|264|h264|avc|265|hevc|h265|hm91|hm10|bit|hvc\"));\n        Intent intent = new Intent(getContext(), FileChooserActivity.class);\n        startActivityForResult(intent, REQUEST_CODE);\n    }\n\n    @Override\n    public void onActivityResult(int requestCode, int resultCode, Intent data) {\n        switch (requestCode) {\n            case REQUEST_CODE:\n                // If the file selection was successful\n                if (resultCode == RESULT_OK) {\n                    if (data != null) {\n                        // Get the URI of the selected file\n                        final Uri uri = data.getData();\n                        Log.i(TAG, \"Uri = \" + uri.toString());\n                        try {\n                            // Get the file path from the URI\n                            mInputFilePath = FileUtils.getPath(getContext(), uri);\n                            mFilePathEditTxt.setText(mInputFilePath);\n\n                        } catch (Exception e) {\n                            Log.e(TAG, \"File select error: \" + e);\n                        }\n                    }\n                }\n                break;\n        }\n        super.onActivityResult(requestCode, resultCode, data);\n    }\n\n    protected void onStartClicked() {\n    }\n\n    protected void toggleView(boolean enable) {\n        mSettingButton.setEnabled(enable);\n        mNavButton.setEnabled(enable);\n        mStartButton.setEnabled(enable);\n        mHelpButton.setEnabled(enable);\n    }\n}\n"
  },
  {
    "path": "Android_demo/KSY265CodecDemo/app/src/main/java/com/ksyun/media/ksy265codec/demo/ui/DecoderFragment.java",
    "content": "package com.ksyun.media.ksy265codec.demo.ui;\n\nimport android.os.Bundle;\nimport android.util.Log;\nimport android.view.LayoutInflater;\nimport android.view.SurfaceHolder;\nimport android.view.View;\nimport android.view.ViewGroup;\nimport android.widget.Toast;\n\nimport com.ksyun.media.ksy265codec.demo.decoder.hevdecoder.NativeMediaPlayer;\n\nimport java.io.File;\nimport java.io.FileFilter;\nimport java.util.regex.Pattern;\n\n/**\n * Created by sujia on 2017/3/27.\n */\n\npublic class DecoderFragment extends BaseFragment implements DecoderSettingsFragment.OnSettingsChangeListener,\n        SurfaceHolder.Callback, NativeMediaPlayer.OnCompletionListener {\n    private DecoderSettings mSettings = null;\n\n    private static final String TAG = \"DecoderFragment\";\n    private NativeMediaPlayer mPlayer;\n\n    private boolean mPrepared = false;\n    private int mWidth;\n    private int mHeight;\n\n    @Override\n    public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {\n        View view = super.onCreateView(inflater, container, savedInstanceState);\n        mSettings = Settings.getInstance().getDecoderSettings();\n\n        updateUI();\n        mSurfaceView.getHolder().addCallback(this);\n\n        mPlayer = new NativeMediaPlayer();\n        mPlayer.setCompletionListener(this);\n\n        return view;\n    }\n\n    @Override\n    protected void onSettingsClicked() {\n        // Create an instance of the dialog fragment and show it\n        DecoderSettingsFragment settingFragment = new DecoderSettingsFragment();\n        settingFragment.setListener(this);\n        settingFragment.show(this.getFragmentManager(), \"setting dialog\");\n    }\n\n    @Override\n    public void onSettingsChanged(DecoderSettings settings) {\n        this.mSettings = settings;\n        updateUI();\n    }\n\n    @Override\n    protected void onHelpClicked() {\n        // Create an instance of the dialog fragment and show it\n        HelpFragment settingFragment = new HelpFragment();\n        settingFragment.setType(1);\n        settingFragment.show(getFragmentManager(), \"decode help dialog\");\n    }\n\n    private void updateUI() {\n        mTitleText.setText( mSettings.getDecoderName() + \"解码器\");\n        if (mSettings.getFPS() != -1) {\n            mSurfaceView.setVisibility(View.VISIBLE);\n        } else {\n            mSurfaceView.setVisibility(View.GONE);\n        }\n    }\n\n    /**\n     * Gets the number of cores available in this device, across all processors.\n     * Requires: Ability to peruse the filesystem at \"/sys/devices/system/cpu\"\n     *\n     * @return The number of cores, or 1 if failed to get result\n     */\n    private int getNumCores() {\n        // Private Class to display only CPU devices in the directory listing\n        class CpuFilter implements FileFilter {\n            @Override\n            public boolean accept(File pathname) {\n                // Check if filename is \"cpu\", followed by a single digit number\n                if (Pattern.matches(\"cpu[0-9]+\", pathname.getName())) {\n                    return true;\n                }\n                return false;\n            }\n        }\n\n        try {\n            // Get directory containing CPU info\n            File dir = new File(\"/sys/devices/system/cpu/\");\n            // Filter to only list the devices we care about\n            File[] files = dir.listFiles(new CpuFilter());\n            // Return the number of cores (virtual CPU devices)\n            return files.length;\n        } catch (Exception e) {\n            // Default to return 1 core\n            return 1;\n        }\n    }\n\n    @Override\n    protected void onStartClicked() {\n        if (mSettings == null) {\n            Toast.makeText(getContext(), \"解码参数未配置\",\n                    Toast.LENGTH_SHORT).show();\n            return;\n        }\n\n        if (mInputFilePath == null) {\n            Toast.makeText(getContext(), \"请选择输入文件\",\n                    Toast.LENGTH_SHORT).show();\n            return;\n        }\n\n        mPrepared = false;\n\n        mPlayer.init();\n        int ret = mPlayer.setDataSource(mInputFilePath);\n        if (ret != 0) {\n            Toast.makeText(getContext(),\n                    \"请检查输入文件格式\",\n                    Toast.LENGTH_SHORT).show();\n            return;\n        }\n        mPlayer.setDisplay(mSurfaceView.getHolder());\n        mPlayer.setDisplaySize(mWidth, mHeight);\n\n        int num = mSettings.getThreads();\n        if (0 == num) {\n            int cores = getNumCores();// Runtime.getRuntime().availableProcessors();\n            if (cores <= 1)\n                num = 1;\n            else if(mSettings.decoderIndex == 1) { // lenthevcdec\n                num = (cores < 5) ? ((cores * 3 + 1) / 2) : 8;\n            }\n            Log.d(TAG, cores + \" cores detected! use \" + num\n                    + \" threads.\\n\");\n        }\n\n        //0: ksc265\n        //1: lenthevcdec\n        int decoderType = mSettings.decoderIndex == 0 ? 0 : 1;\n        ret = mPlayer.prepare(getContext(),decoderType, mSettings.getFPS() == -1 ? 1 : 0,\n                num, mSettings.getFPS());\n        if ( ret < 0 ) {\n            Toast.makeText(getContext(),\n                    \"打开文件\" + mInputFilePath + \"失败，返回值: \" + ret,\n                    Toast.LENGTH_SHORT).show();\n            return;\n        } else {\n            mPrepared = true;\n        }\n\n        if (mSettings.enableYUVOutput) {\n            int dotIndex = mInputFilePath.lastIndexOf(\".\");\n            String inputFileName = mInputFilePath.substring(0, dotIndex);\n            mOutputFilePath = inputFileName + (mSettings.decoderIndex == 0 ? \".ksc\" : \".lent\" ) +\".yuv\";\n            mPlayer.setOutput(mOutputFilePath);\n        }\n\n        toggleView(false);\n        if (mPrepared) {\n            mPlayer.start();\n        }\n    }\n\n    //////////////////////////////////////////\n    //implements SurfaceHolder.Callback\n    @Override\n    public void surfaceCreated(SurfaceHolder surfaceHolder) {\n    }\n\n    @Override\n    public void surfaceChanged(SurfaceHolder surfaceHolder, int i, int i1, int i2) {\n        mWidth = i1;\n        mHeight = i2;\n        if (mPlayer != null) {\n            mPlayer.setDisplaySize(mWidth, mHeight);\n        }\n    }\n\n    @Override\n    public void surfaceDestroyed(SurfaceHolder surfaceHolder) {\n        mPlayer.stop();\n    }\n\n    // end of: implements SurfaceHolder.Callback\n    /////////////////////////////////////////////\n\n    @Override\n    public void onCompletion(int frame_count) {\n        updateInfo(frame_count);\n        mPlayer.stop();\n        toggleView(true);\n    }\n\n    private void updateInfo(int frame_num) {\n        String last_info = mInfoText.getText().toString();\n        String info;\n        if (mSettings.enableYUVOutput) {\n            info = String.format(\"解码器版本: %s \\n\" +\n                            \"\\n\" +\n                            \"\\n\" +\n                            \"解码参数: %s -b %s -o %s -threads %d \\n\" +\n                            \"\\n\" +\n                            \"\\n\" +\n                            \"分辨率: %d * %d \\n\" +\n                            \"线程数: %s \\n\" +\n                            \"解码时间: %.2f s\\n\" +\n                            \"解码帧数 %d \\n\" +\n                            \"解码速度 %.2f f/s\\n\" +\n                            \"渲染帧率 %s \\n\",\n                    mPlayer.getVersion(), mSettings.getDecoderName(),\n                    mInputFilePath, mOutputFilePath, mSettings.getThreads(),\n                    mPlayer.getVideoWidth(), mPlayer.getVideoHeight(),\n                    mSettings.getThreadsStr(), mPlayer.getDecodeTime(),\n                    frame_num, mPlayer.getDecodeFPS(), mSettings.getFPSStr());\n        } else {\n            info = String.format(\"解码器版本: %s \\n\" +\n                            \"\\n\" +\n                            \"\\n\" +\n                            \"解码参数: %s -b %s -threads %d \\n\" +\n                            \"\\n\" +\n                            \"\\n\" +\n                            \"分辨率: %d * %d \\n\" +\n                            \"线程数: %s \\n\" +\n                            \"解码时间: %.2f s\\n\" +\n                            \"解码帧数 %d \\n\" +\n                            \"解码速度 %.2f f/s\\n\" +\n                            \"渲染帧率 %s \\n\",\n                    mPlayer.getVersion(), mSettings.getDecoderName(),\n                    mInputFilePath, mSettings.getThreads(),\n                    mPlayer.getVideoWidth(), mPlayer.getVideoHeight(),\n                    mSettings.getThreadsStr(), mPlayer.getDecodeTime(),\n                    frame_num, mPlayer.getDecodeFPS(), mSettings.getFPSStr());\n        }\n\n        mInfoText.setText(info +\n                \"\\n\" +\n                \"\\n\" +\n                last_info);\n    }\n}\n"
  },
  {
    "path": "Android_demo/KSY265CodecDemo/app/src/main/java/com/ksyun/media/ksy265codec/demo/ui/DecoderSettings.java",
    "content": "package com.ksyun.media.ksy265codec.demo.ui;\n\nimport android.content.SharedPreferences;\n\n/**\n * Created by sujia on 2017/3/28.\n */\n\npublic class DecoderSettings {\n    public final static String DECODER_SETTINGS_DECODER = \"decoder_settings_decoder\";\n    public final static String DECODER_SETTINGS_THREADS = \"decoder_settings_threads\";\n    public final static String DECODER_SETTINGS_FPS = \"decoder_settings_fps\";\n    public final static String DECODER_SETTINGS_RENDER = \"decoder_settings_render\";\n    public final static String DECODER_SETTINGS_OUTPUT = \"decoder_settings_output\";\n\n    public final static String[] Decoders = new String[] {\"KSC265\", \"lenthevcdec\"};\n    public final static String[] Threads = new String[] {\"0 (auto)\", \"1\", \"2\",\n            \"3\", \"4\", \"5\", \"6\", \"7\", \"8\",\"9\", \"10\"};\n    public final static String[] FPS = new String[] {\"0 (fullspeed)\", \"24\",\n            \"-1 (off)\"};\n\n    public int decoderIndex;\n    public int threadsIndex;\n    public int fpsIndex;//渲染帧率\n    public boolean enableYUVOutput;\n\n    public DecoderSettings() {\n        this.decoderIndex = 0;\n        this.threadsIndex = 0;\n        this.fpsIndex = 0;\n        this.enableYUVOutput = false;\n    }\n\n    public DecoderSettings(SharedPreferences sharedPreferences) {\n        this.decoderIndex = sharedPreferences.getInt(DECODER_SETTINGS_DECODER, 0);\n        this.threadsIndex = sharedPreferences.getInt(DECODER_SETTINGS_THREADS, 0);\n        this.fpsIndex = sharedPreferences.getInt(DECODER_SETTINGS_FPS, 0);\n        this.enableYUVOutput = sharedPreferences.getBoolean(DECODER_SETTINGS_OUTPUT, false);\n    }\n\n    public String getDecoderName() {\n        if (decoderIndex <= Decoders.length -1) {\n            return Decoders[decoderIndex];\n        } else {\n            return \"unknow\";\n        }\n    }\n\n    public int getThreads() {\n        return threadsIndex;\n    }\n\n    public String getThreadsStr() {\n        if (threadsIndex <= Threads.length -1) {\n            return Threads[threadsIndex];\n        } else {\n            return \"\";\n        }\n    }\n\n    public int getFPS() {\n        switch (fpsIndex) {\n            case 0:\n                return 0;\n            case 1:\n                return 24;\n            case 2:\n                return -1;\n            default:\n                return 0;\n        }\n    }\n\n    public String getFPSStr() {\n        if (fpsIndex <= FPS.length -1) {\n            return FPS[fpsIndex];\n        } else {\n            return \"\";\n        }\n    }\n}\n"
  },
  {
    "path": "Android_demo/KSY265CodecDemo/app/src/main/java/com/ksyun/media/ksy265codec/demo/ui/DecoderSettingsFragment.java",
    "content": "package com.ksyun.media.ksy265codec.demo.ui;\n\nimport android.app.Dialog;\nimport android.os.Bundle;\nimport android.support.annotation.NonNull;\nimport android.support.v4.app.DialogFragment;\nimport android.support.v7.app.AlertDialog;\nimport android.view.LayoutInflater;\nimport android.view.View;\nimport android.widget.ArrayAdapter;\nimport android.widget.Button;\nimport android.widget.RadioButton;\nimport android.widget.Spinner;\n\nimport com.ksyun.media.ksy265codec.demo.R;\n\n/**\n * Created by sujia on 2017/3/28.\n */\n\npublic class DecoderSettingsFragment extends DialogFragment {\n    private Spinner mDecoderSpinner;\n    private Spinner mThreadSpinner;\n    private Spinner mFpsSpinner;\n    private Button mButton;\n    private RadioButton mEnableOutputButton;\n    private RadioButton mDisableOutputButton;\n\n    private DecoderSettings mSettings;\n\n    public interface OnSettingsChangeListener {\n        public void onSettingsChanged(DecoderSettings settings);\n    }\n\n    // Use this instance of the interface to deliver action events\n    OnSettingsChangeListener mListener;\n\n    public DecoderSettingsFragment() {\n        mSettings = Settings.getInstance().getDecoderSettings();\n    }\n\n    public void setListener(OnSettingsChangeListener listener) {\n        mListener = listener;\n    }\n\n    @NonNull\n    @Override\n    public Dialog onCreateDialog(Bundle savedInstanceState) {\n        AlertDialog.Builder builder = new AlertDialog.Builder(getActivity());\n        // Get the layout inflater\n        LayoutInflater inflater = getActivity().getLayoutInflater();\n\n        // Inflate and set the layout for the dialog\n        // Pass null as the parent view because its going in the dialog layout\n        View view = inflater.inflate(R.layout.decoder_settings, null);\n\n        mSettings = Settings.getInstance().getDecoderSettings();\n        initView(view);\n\n        builder.setView(view);\n\n        return builder.create();\n    }\n\n    private void initView(View view) {\n        ArrayAdapter<String> decodersAdapter = new ArrayAdapter<>(getContext(),\n                android.R.layout.simple_spinner_item, DecoderSettings.Decoders);\n        decodersAdapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item);\n        mDecoderSpinner = (Spinner) view.findViewById(R.id.decoder_settings_decoder_spinner);\n        mDecoderSpinner.setAdapter(decodersAdapter);\n        if (mSettings.decoderIndex <= DecoderSettings.Decoders.length) {\n            mDecoderSpinner.setSelection(mSettings.decoderIndex);\n        }\n\n        ArrayAdapter<String> threadsAdapter = new ArrayAdapter<>(getContext(),\n                android.R.layout.simple_spinner_item, DecoderSettings.Threads);\n        threadsAdapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item);\n        mThreadSpinner = (Spinner) view.findViewById(R.id.decoder_settings_threads_spinner);\n        mThreadSpinner.setAdapter(threadsAdapter);\n        if (mSettings.threadsIndex <= DecoderSettings.Threads.length) {\n            mThreadSpinner.setSelection(mSettings.threadsIndex);\n        }\n\n        ArrayAdapter<String> fpsAdapter = new ArrayAdapter<>(getContext(),\n                android.R.layout.simple_spinner_item, DecoderSettings.FPS);\n        fpsAdapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item);\n        mFpsSpinner = (Spinner) view.findViewById(R.id.decoder_settings_fps_spinner);\n        mFpsSpinner.setAdapter(fpsAdapter);\n        if (mSettings.fpsIndex <= DecoderSettings.FPS.length) {\n            mFpsSpinner.setSelection(mSettings.fpsIndex);\n        }\n\n        mEnableOutputButton = (RadioButton) view.findViewById(R.id.decoder_settings_enable_yuv_output);\n        mDisableOutputButton = (RadioButton) view.findViewById(R.id.decoder_settings_disable_yuv_output);\n        if (mSettings.enableYUVOutput) {\n            mEnableOutputButton.setChecked(true);\n            mDisableOutputButton.setChecked(false);\n        } else {\n            mEnableOutputButton.setChecked(false);\n            mDisableOutputButton.setChecked(true);\n        }\n\n        mButton = (Button) view.findViewById(R.id.decoder_settings_sure);\n        mButton.setOnClickListener(new View.OnClickListener() {\n            @Override\n            public void onClick(View view) {\n                mSettings.decoderIndex = mDecoderSpinner.getSelectedItemPosition();\n                mSettings.threadsIndex = mThreadSpinner.getSelectedItemPosition();\n                mSettings.fpsIndex = mFpsSpinner.getSelectedItemPosition();\n                mSettings.enableYUVOutput = mEnableOutputButton.isChecked();\n\n                Settings.getInstance().saveDecoderSettings(mSettings);\n                if (mListener != null) {\n                    mListener.onSettingsChanged(mSettings);\n                }\n\n                dismiss();\n            }\n        });\n    }\n}\n"
  },
  {
    "path": "Android_demo/KSY265CodecDemo/app/src/main/java/com/ksyun/media/ksy265codec/demo/ui/EncoderFragment.java",
    "content": "package com.ksyun.media.ksy265codec.demo.ui;\n\nimport android.os.AsyncTask;\nimport android.os.Bundle;\nimport android.view.LayoutInflater;\nimport android.view.View;\nimport android.view.ViewGroup;\nimport android.widget.Toast;\n\nimport com.ksyun.media.ksy265codec.demo.encoder.Encoder;\n\n/**\n * Created by sujia on 2017/3/27.\n */\n\npublic class EncoderFragment extends BaseFragment implements EncoderSettingsFragment.OnSettingsChangeListener {\n\n    private EncoderSettings mSettings = null;\n\n    @Override\n    public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {\n        View view = super.onCreateView(inflater, container, savedInstanceState);\n        mSettings = Settings.getInstance().getEncoderSettings();\n        mTitleText.setText( mSettings.getEncoderName() + \"编码器\");\n        return view;\n    }\n\n    @Override\n    protected void onSettingsClicked() {\n        // Create an instance of the dialog fragment and show it\n        EncoderSettingsFragment settingFragment = new EncoderSettingsFragment();\n        settingFragment.setListener(this);\n        settingFragment.show(this.getFragmentManager(), \"encoder setting dialog\");\n    }\n\n    @Override\n    public void onSettingsChanged(EncoderSettings settings) {\n        mSettings = settings;\n        mTitleText.setText( mSettings.getEncoderName() + \"编码器\");\n    }\n\n    @Override\n    protected void onHelpClicked() {\n        // Create an instance of the dialog fragment and show it\n        HelpFragment settingFragment = new HelpFragment();\n        settingFragment.setType(0);\n        settingFragment.show(getFragmentManager(), \"encode help dialog\");\n    }\n\n    @Override\n    protected void onStartClicked() {\n        if (mInputFilePath == null) {\n            Toast.makeText(getContext(), \"请选择yuv文件\",\n                    Toast.LENGTH_SHORT).show();\n            return;\n        }\n\n        EncodeTask task = new EncodeTask();\n        task.execute();\n    }\n\n    private class EncodeTask extends AsyncTask<Void, Void, Void> {\n\n        private ProgressDialogFragment mProgressDialog;\n        private Encoder mEncoder;\n\n        @Override\n        protected void onPreExecute() {\n            mEncoder = new Encoder(mSettings);\n            //Create progress dialog here and show it\n            mProgressDialog = new ProgressDialogFragment();\n            mProgressDialog.show(getFragmentManager(), \"show progress dialog\");\n\n            toggleView(false);\n        }\n\n        @Override\n        protected Void doInBackground(Void... params) {\n\n            // Execute query here\n            encodeYUV(mEncoder);\n            return null;\n\n        }\n\n        @Override\n        protected void onPostExecute(Void result) {\n            super.onPostExecute(result);\n\n            //update your listView adapter here\n            //Dismiss your dialog\n            toggleView(true);\n            mProgressDialog.dismissAllowingStateLoss();\n            updateInfo(mEncoder);\n        }\n\n    }\n\n\n    private void encodeYUV(Encoder encoder) {\n        if(encoder.open(mInputFilePath) < 0) {\n            getActivity().runOnUiThread(new Runnable() {\n                @Override\n                public void run() {\n                    Toast.makeText(getContext(),\n                            \"打开yuv文件错误\",\n                            Toast.LENGTH_SHORT).show();\n                }\n            });\n            return;\n        }\n\n        if(mSettings.getHeight() == 0 ||\n                mSettings.getWidth() == 0 ||\n                Integer.parseInt(mSettings.bitrate) <= 0 ||\n                Integer.parseInt(mSettings.fps) <= 0) {\n            getActivity().runOnUiThread(new Runnable() {\n                @Override\n                public void run() {\n                    Toast.makeText(getContext(),\n                            \"请检查编码参数设置\",\n                            Toast.LENGTH_SHORT).show();\n                }\n            });\n            return;\n        }\n\n        if(encoder.encode(getContext()) < 0) {\n            getActivity().runOnUiThread(new Runnable() {\n                @Override\n                public void run() {\n                    Toast.makeText(getContext(),\n                            \"编码失败，请检查输入文件格式\",\n                            Toast.LENGTH_SHORT).show();\n                }\n            });\n            return;\n        }\n    }\n\n    private void updateInfo(Encoder encoder) {\n        String last_info = mInfoText.getText().toString();\n\n        String info;\n        if (mSettings.getEncoderName().equals(EncoderSettings.Encoders[0])) {//KSC265\n            info = String.format(\"编码器版本: %s \\n \" +\n                            \" \\n\" +\n                            \"编码参数: %s -i %s -preset %s -latency %s\" +\n                            \" -wdt %d -hgt %d -fr %.2f -threads %d -br %d -b %s \\n\" +\n                            \" \\n\" +\n                            \"编码时间: %.2f s \\n\" +\n                            \"编码帧数: %d \\n\" +\n                            \"编码速度: %.2f f/s \\n\" +\n                            \"压缩比: %.2f \\n\" +\n                            \"PSNR: %.2f \\n\" +\n                            \"\\n \" +\n                            \"视频信息 \\n \" +\n                            \"码率: %.2f kbps \\n\" +\n                            \"分辨率: %s \\n\" +\n                            \"帧率: %.2f f/s\\n\" +\n                            \"文件总时长: %.2f s\\n\",\n                    encoder.getVersion(), mSettings.getEncoderName(),\n                    encoder.getInputFilePath(), mSettings.getProfile(), mSettings.getDelay(),\n                    mSettings.getWidth(), mSettings.getHeight(), mSettings.getFps(),\n                    mSettings.getThreads(), mSettings.getBitrate(), encoder.getOutputFilePath(),\n                    encoder.getEncodeTime(), encoder.getEncodedFrameNum(),\n                    encoder.getEncodeFPS(), encoder.getCompressRatio(),\n                    encoder.getPSNR(),\n                    encoder.getEncodeBitrate(), mSettings.getResolution(),\n                    mSettings.getFps(), encoder.getDuration());\n        } else {//x264\n            String delayShow;\n            if (mSettings.getDelay().equals(EncoderSettings.Delays[0])) {//zerolatency\n                delayShow = \"--bframes 0 --tune zerolatency\";\n            } else if(mSettings.getDelay().equals(EncoderSettings.Delays[1])) {//livestreaming\n                delayShow = \"--bframes 3\";\n            } else {//offline\n                delayShow = \"--bframes 7\";\n            }\n\n            info = String.format(\"编码器版本: %s \\n \" +\n                            \" \\n\" +\n                            \"编码参数: %s -i %s --preset %s %s \" +\n                            \"--input-res %dx%d --fps %.2f --threads %d --bitrate %d \" +\n                            \"-o %s \\n\" +\n                            \" \\n\" +\n                            \"编码时间: %.2f s \\n\" +\n                            \"编码帧数: %d \\n\" +\n                            \"编码速度: %.2f f/s \\n\" +\n                            \"压缩比: %.2f \\n\" +\n                            \"PSNR: %.2f \\n\" +\n                            \"\\n \" +\n                            \"视频信息 \\n\" +\n                            \"码率: %.2f kbps \\n\" +\n                            \"分辨率: %s \\n\" +\n                            \"帧率: %.2f f/s\\n\" +\n                            \"文件总时长: %.2f s\\n\",\n                    encoder.getVersion(), mSettings.getEncoderName(),\n                    encoder.getInputFilePath(), mSettings.getProfile(), delayShow,\n                    mSettings.getWidth(), mSettings.getHeight(), mSettings.getFps(),\n                    mSettings.getThreads(), mSettings.getBitrate(), encoder.getOutputFilePath(),\n                    encoder.getEncodeTime(), encoder.getEncodedFrameNum(),\n                    encoder.getEncodeFPS(), encoder.getCompressRatio(),\n                    encoder.getPSNR(),\n                    encoder.getEncodeBitrate(), mSettings.getResolution(),\n                    mSettings.getFps(), encoder.getDuration());\n        }\n\n        mInfoText.setText( info +\n                \"\\n\" +\n                \"\\n\" +\n                \"\\n\" +\n                last_info);\n    }\n}\n"
  },
  {
    "path": "Android_demo/KSY265CodecDemo/app/src/main/java/com/ksyun/media/ksy265codec/demo/ui/EncoderSettings.java",
    "content": "package com.ksyun.media.ksy265codec.demo.ui;\n\nimport android.content.SharedPreferences;\nimport android.util.Log;\n\n/**\n * Created by sujia on 2017/3/28.\n */\npublic class EncoderSettings {\n    public final static String TAG = \"EncoderSettings\";\n    public final static String ENCODER_SETTINGS_ENCODER = \"encoder_settings_encoder\";\n    public final static String ENCODER_SETTINGS_PROFILE = \"encoder_settings_profile\";\n    public final static String ENCODER_SETTINGS_DELAY = \"encoder_settings_delay\";\n    public final static String ENCODER_SETTINGS_RESOLUTION = \"encoder_settings_resolution\";\n    public final static String ENCODER_SETTINGS_RESOLUTION_IDX = \"encoder_settings_resolution_idx\";\n    public final static String ENCODER_SETTINGS_FPS = \"encoder_settings_fps\";\n    public final static String ENCODER_SETTINGS_THREADS = \"encoder_settings_threads\";\n    public final static String ENCODER_SETTINGS_BITRATE = \"encoder_settings_bitrate\";\n\n    public final static String[] Encoders = new String[] {\"KSC265\", \"x264\"};\n    public final static String[] Profiles = new String[] {\"superfast\", \"veryfast\", \"fast\",\n            \"medium\", \"slow\", \"veryslow\", \"placebo\"};\n    public final static String[] Delays = new String[] {\"zerolatency\", \"livestreaming\",\n            \"offline\"};\n    public final static String[] Resolutions = new String [] {\"1280*720\", \"960*540\", \"640*360\",\n    \"640*480\", \"360*640\", \"368*640\", \"自定义\"};\n\n    public int encoderIndex;\n    public int profileIndex;\n    public int delayIndex;\n    public int resIndex;\n    public String bitrate;\n    public String resolution;\n    public String fps;\n    public String threads;\n\n    public EncoderSettings() {\n        this.encoderIndex = 0;// ksc265\n        this.profileIndex = 1;//veryfast\n        this.delayIndex = 2;//offline\n        this.resolution =  Resolutions[0];\n        this.resIndex = 0;//1280*720\n        this.fps = \"15\";\n        this.threads = \"1\";\n        this.bitrate = \"500\";\n    }\n\n    public EncoderSettings(SharedPreferences sharedPreferences) {\n        this.encoderIndex = sharedPreferences.getInt(ENCODER_SETTINGS_ENCODER, 0);\n        this.profileIndex = sharedPreferences.getInt(ENCODER_SETTINGS_PROFILE, 0);\n        this.delayIndex = sharedPreferences.getInt(ENCODER_SETTINGS_DELAY, 0);\n        this.resIndex = sharedPreferences.getInt(ENCODER_SETTINGS_RESOLUTION_IDX, 0);\n        this.resolution = sharedPreferences.getString(ENCODER_SETTINGS_RESOLUTION, Resolutions[0]);\n        this.fps = sharedPreferences.getString(ENCODER_SETTINGS_FPS, \"15\");\n        this.threads = sharedPreferences.getString(ENCODER_SETTINGS_THREADS, \"1\");\n        this.bitrate = sharedPreferences.getString(ENCODER_SETTINGS_BITRATE, \"500\");\n    }\n\n    public String getEncoderName() {\n        if (encoderIndex <= Encoders.length -1) {\n            return Encoders[encoderIndex];\n        } else {\n            return \"unknow\";\n        }\n    }\n\n    public String getProfile() {\n        if (profileIndex <= Profiles.length -1) {\n            return Profiles[profileIndex];\n        } else {\n            return \"\";\n        }\n    }\n\n    public String getDelay() {\n        if (delayIndex <= Delays.length -1) {\n            return Delays[delayIndex];\n        } else {\n            return \"\";\n        }\n    }\n\n    public int getBitrate() {\n        return Integer.parseInt(bitrate);\n    }\n\n    public String getResolution() {\n        if (resIndex < Resolutions.length -1) {\n            return Resolutions[resIndex];\n        } else {\n            return resolution;\n        }\n    }\n\n    public int getWidth() {\n        String[] res = getResolution().split(\"\\\\*\");\n        if (res != null &&\n                res.length == 2) {\n            return Integer.parseInt(res[0]);\n        } else {\n            Log.e(TAG, \"分辨率解析错误，格式必须为 宽*高\");\n            return 0;\n        }\n    }\n\n    public int getHeight() {\n        String[] res = getResolution().split(\"\\\\*\");\n        if (res != null &&\n                res.length == 2) {\n            return Integer.parseInt(res[1]);\n        } else {\n            Log.e(TAG, \"分辨率解析错误，格式必须为 宽*高\");\n            return 0;\n        }\n    }\n\n    public Float getFps() {\n        return Float.parseFloat(fps);\n    }\n\n    public int getThreads() {\n        return Integer.parseInt(threads);\n    }\n}"
  },
  {
    "path": "Android_demo/KSY265CodecDemo/app/src/main/java/com/ksyun/media/ksy265codec/demo/ui/EncoderSettingsFragment.java",
    "content": "package com.ksyun.media.ksy265codec.demo.ui;\n\nimport android.app.Dialog;\nimport android.os.Bundle;\nimport android.support.annotation.NonNull;\nimport android.support.v4.app.DialogFragment;\nimport android.support.v7.app.AlertDialog;\nimport android.view.LayoutInflater;\nimport android.view.View;\nimport android.widget.AdapterView;\nimport android.widget.ArrayAdapter;\nimport android.widget.Button;\nimport android.widget.EditText;\nimport android.widget.Spinner;\n\nimport com.ksyun.media.ksy265codec.demo.R;\n\n/**\n * Created by sujia on 2017/3/28.\n */\n\npublic class EncoderSettingsFragment extends DialogFragment {\n    private Spinner mEncoderSpinner;\n    private Spinner mProfileSpinner;\n    private Spinner mDelaySpinner;\n    private EditText mResulutionEditTxt;\n    private Spinner mResSpinner;\n    private EditText mFpsEditTxt;\n    private EditText mThreadsEditTxt;\n    private EditText mBitrateEditTxt;\n    private Button mButton;\n\n    private EncoderSettings mSettings;\n\n    public interface OnSettingsChangeListener {\n        public void onSettingsChanged(EncoderSettings settings);\n    }\n\n    // Use this instance of the interface to deliver action events\n    OnSettingsChangeListener mListener;\n\n    public EncoderSettingsFragment() {\n        mSettings = Settings.getInstance().getEncoderSettings();\n    }\n\n    public void setListener(OnSettingsChangeListener listener) {\n        mListener = listener;\n    }\n\n    @NonNull\n    @Override\n    public Dialog onCreateDialog(Bundle savedInstanceState) {\n        AlertDialog.Builder builder = new AlertDialog.Builder(getActivity());\n        // Get the layout inflater\n        LayoutInflater inflater = getActivity().getLayoutInflater();\n\n        // Inflate and set the layout for the dialog\n        // Pass null as the parent view because its going in the dialog layout\n        View view = inflater.inflate(R.layout.encoder_settings, null);\n\n        mSettings = Settings.getInstance().getEncoderSettings();\n        initView(view);\n\n        builder.setView(view);\n\n        return builder.create();\n    }\n\n    private void initView(View view) {\n        ArrayAdapter<String> encodersAdapter = new ArrayAdapter<>(getContext(),\n                android.R.layout.simple_spinner_item, EncoderSettings.Encoders);\n        encodersAdapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item);\n        mEncoderSpinner = (Spinner) view.findViewById(R.id.encoder_settings_encoder_spinner);\n        mEncoderSpinner.setAdapter(encodersAdapter);\n        if (mSettings.encoderIndex <= EncoderSettings.Encoders.length) {\n            mEncoderSpinner.setSelection(mSettings.encoderIndex);\n        }\n\n        ArrayAdapter<String> profilesAdapter = new ArrayAdapter<>(getContext(),\n                android.R.layout.simple_spinner_item, EncoderSettings.Profiles);\n        profilesAdapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item);\n        mProfileSpinner = (Spinner) view.findViewById(R.id.encoder_settings_profile_spinner);\n        mProfileSpinner.setAdapter(profilesAdapter);\n        if (mSettings.profileIndex <= EncoderSettings.Profiles.length) {\n            mProfileSpinner.setSelection(mSettings.profileIndex);\n        }\n\n        ArrayAdapter<String> delayAdapter = new ArrayAdapter<>(getContext(),\n                android.R.layout.simple_spinner_item, EncoderSettings.Delays);\n        delayAdapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item);\n        mDelaySpinner = (Spinner) view.findViewById(R.id.encoder_settings_delay_spinner);\n        mDelaySpinner.setAdapter(delayAdapter);\n        if (mSettings.delayIndex <= EncoderSettings.Delays.length) {\n            mDelaySpinner.setSelection(mSettings.delayIndex);\n        }\n\n        mResulutionEditTxt = (EditText) view.findViewById(R.id.encoder_settings_resolution);\n        mResulutionEditTxt.setText(mSettings.resolution);\n        mResulutionEditTxt.setVisibility(View.VISIBLE);\n        if (mSettings.resIndex == EncoderSettings.Resolutions.length -1) {\n            mResulutionEditTxt.setVisibility(View.VISIBLE);\n            mResulutionEditTxt.requestFocus();\n        } else {\n            mResulutionEditTxt.setVisibility(View.GONE);\n        }\n\n        ArrayAdapter<String> resAdapter  = new ArrayAdapter<>(getContext(),\n                android.R.layout.simple_spinner_item, EncoderSettings.Resolutions);\n        resAdapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item);\n        mResSpinner = (Spinner) view.findViewById(R.id.encoder_settings_resolution_spinner);\n        mResSpinner.setAdapter(resAdapter);\n        if (mSettings.resIndex <= EncoderSettings.Resolutions.length -1) {\n            mResSpinner.setSelection(mSettings.resIndex);\n        }\n        mResSpinner.setOnItemSelectedListener(new AdapterView.OnItemSelectedListener() {\n            @Override\n            public void onItemSelected(AdapterView<?> parent, View view, int position, long id) {\n                if (mResulutionEditTxt == null) {\n                    return;\n                }\n\n                if (position == EncoderSettings.Resolutions.length -1) {\n                    mResulutionEditTxt.setVisibility(View.VISIBLE);\n                    mResulutionEditTxt.requestFocus();\n                } else {\n                    mResulutionEditTxt.setVisibility(View.GONE);\n                }\n            }\n\n            @Override\n            public void onNothingSelected(AdapterView<?> parent) {\n\n            }\n        });\n\n        mFpsEditTxt = (EditText) view.findViewById(R.id.encoder_settings_fps);\n        mFpsEditTxt.setText(mSettings.fps);\n\n        mThreadsEditTxt = (EditText) view.findViewById(R.id.encoder_settings_threads);\n        mThreadsEditTxt.setText(mSettings.threads);\n\n        mBitrateEditTxt = (EditText) view.findViewById(R.id.encoder_settings_bitrate);\n        mBitrateEditTxt.setText(mSettings.bitrate);\n\n        mButton = (Button) view.findViewById(R.id.encoder_settings_sure);\n        mButton.setOnClickListener(new View.OnClickListener() {\n            @Override\n            public void onClick(View view) {\n                mSettings.encoderIndex = mEncoderSpinner.getSelectedItemPosition();\n                mSettings.profileIndex = mProfileSpinner.getSelectedItemPosition();\n                mSettings.delayIndex = mDelaySpinner.getSelectedItemPosition();\n                mSettings.resolution = mResulutionEditTxt.getText().toString();\n                mSettings.resIndex = mResSpinner.getSelectedItemPosition();\n                mSettings.fps = mFpsEditTxt.getText().toString();\n                mSettings.threads = mThreadsEditTxt.getText().toString();\n                mSettings.bitrate = mBitrateEditTxt.getText().toString();\n\n                Settings.getInstance().saveEncoderSettings(mSettings);\n\n                if (mListener != null) {\n                    mListener.onSettingsChanged(mSettings);\n                }\n\n                dismiss();\n            }\n        });\n    }\n}\n"
  },
  {
    "path": "Android_demo/KSY265CodecDemo/app/src/main/java/com/ksyun/media/ksy265codec/demo/ui/HelpFragment.java",
    "content": "package com.ksyun.media.ksy265codec.demo.ui;\n\nimport android.app.Dialog;\nimport android.os.Bundle;\nimport android.support.annotation.NonNull;\nimport android.support.v4.app.DialogFragment;\nimport android.support.v7.app.AlertDialog;\nimport android.view.LayoutInflater;\nimport android.view.View;\nimport android.widget.TextView;\n\nimport com.ksyun.media.ksy265codec.demo.R;\n\n/**\n * Created by sujia on 2017/3/28.\n */\n\npublic class HelpFragment extends DialogFragment {\n\n    private int type;//0 encode, 1 decode\n\n    public HelpFragment() {\n    }\n\n    public void setType(int type) {\n        this.type = type;\n    }\n\n    @NonNull\n    @Override\n    public Dialog onCreateDialog(Bundle savedInstanceState) {\n        AlertDialog.Builder builder = new AlertDialog.Builder(getActivity());\n        // Get the layout inflater\n        LayoutInflater inflater = getActivity().getLayoutInflater();\n\n        // Inflate and set the layout for the dialog\n        // Pass null as the parent view because its going in the dialog layout\n        View view = inflater.inflate(R.layout.help, null);\n\n        if (type == 0) {\n            TextView info = (TextView) view.findViewById(R.id.help_info);\n            info.setText(R.string.encode_help_info);\n        } else if (type == 1) {\n            TextView info = (TextView) view.findViewById(R.id.help_info);\n            info.setText(R.string.decode_help_info);\n        }\n\n\n        builder.setView(view);\n\n        return builder.create();\n    }\n}\n"
  },
  {
    "path": "Android_demo/KSY265CodecDemo/app/src/main/java/com/ksyun/media/ksy265codec/demo/ui/KSY265CodecDemoApp.java",
    "content": "package com.ksyun.media.ksy265codec.demo.ui;\n\nimport android.app.Application;\n\n/**\n * Created by sujia on 2017/3/28.\n */\n\npublic class KSY265CodecDemoApp extends Application {\n    @Override\n    public void onCreate() {\n\n        super.onCreate();\n\n        Settings.getInstance().init(this);\n    }\n}\n"
  },
  {
    "path": "Android_demo/KSY265CodecDemo/app/src/main/java/com/ksyun/media/ksy265codec/demo/ui/MainActivity.java",
    "content": "package com.ksyun.media.ksy265codec.demo.ui;\n\nimport android.os.Bundle;\nimport android.support.v4.app.Fragment;\nimport android.support.v4.app.FragmentActivity;\nimport android.support.v4.app.FragmentTabHost;\nimport android.support.v4.view.ViewPager;\nimport android.view.LayoutInflater;\nimport android.view.View;\nimport android.view.ViewGroup;\nimport android.widget.ImageView;\nimport android.widget.TabHost;\nimport android.widget.TabWidget;\nimport android.widget.TextView;\n\nimport com.ksyun.media.ksy265codec.demo.R;\n\nimport java.util.ArrayList;\nimport java.util.List;\n\npublic class MainActivity extends FragmentActivity implements\n        ViewPager.OnPageChangeListener, TabHost.OnTabChangeListener {\n\n    private FragmentTabHost mTabHost;\n    private LayoutInflater mLayoutInflater;\n    private Class fragmentArray[] = { EncoderFragment.class, DecoderFragment.class };\n    private int tab_imageViewArray[] = { R.drawable.tab_home_btn, R.drawable.tab_home_btn };\n    private String tab_textViewArray[] = { \"编码\", \"解码\"};\n    private List<Fragment> list = new ArrayList<Fragment>();\n    private ViewPager mViewPager;\n\n    @Override\n    protected void onCreate(Bundle savedInstanceState) {\n        super.onCreate(savedInstanceState);\n        setContentView(R.layout.activity_main);\n        initView();//初始化控件\n        initPage();//初始化页面\n    }\n\n    //    控件初始化控件\n    private void initView() {\n        mViewPager = (ViewPager) findViewById(R.id.pager);\n\n        /*实现OnPageChangeListener接口,目的是监听Tab选项卡的变化，然后通知ViewPager适配器切换界面*/\n        /*简单来说,是为了让ViewPager滑动的时候能够带着底部菜单联动*/\n\n        mViewPager.addOnPageChangeListener(this);//设置页面切换时的监听器\n        mLayoutInflater = LayoutInflater.from(this);//加载布局管理器\n\n        /*实例化FragmentTabHost对象并进行绑定*/\n        mTabHost = (FragmentTabHost) findViewById(android.R.id.tabhost);//绑定tahost\n        mTabHost.setup(this, getSupportFragmentManager(), R.id.pager);//绑定viewpager\n\n        /*实现setOnTabChangedListener接口,目的是为监听界面切换），然后实现TabHost里面图片文字的选中状态切换*/\n        /*简单来说,是为了当点击下面菜单时,上面的ViewPager能滑动到对应的Fragment*/\n        mTabHost.setOnTabChangedListener(this);\n\n        int count = tab_textViewArray.length;\n\n        /*新建Tabspec选项卡并设置Tab菜单栏的内容和绑定对应的Fragment*/\n        for (int i = 0; i < count; i++) {\n            // 给每个Tab按钮设置标签、图标和文字\n            TabHost.TabSpec tabSpec = mTabHost.newTabSpec(tab_textViewArray[i])\n                    .setIndicator(getTabItemView(i));\n            // 将Tab按钮添加进Tab选项卡中，并绑定Fragment\n            mTabHost.addTab(tabSpec, fragmentArray[i], null);\n            mTabHost.setTag(i);\n            mTabHost.getTabWidget().getChildAt(i)\n                    .setBackgroundResource(R.drawable.selector_tab_background);//设置Tab被选中的时候颜色改变\n        }\n    }\n\n    /*初始化Fragment*/\n    private void initPage() {\n        EncoderFragment fragment1 = new EncoderFragment();\n        DecoderFragment fragment2 = new DecoderFragment();\n\n        list.add(fragment1);\n        list.add(fragment2);\n\n        //绑定Fragment适配器\n        mViewPager.setAdapter(new MyFragmentAdapter(getSupportFragmentManager(), list));\n        mTabHost.getTabWidget().setDividerDrawable(null);\n    }\n\n    private View getTabItemView(int i) {\n        //将xml布局转换为view对象\n        View view = mLayoutInflater.inflate(R.layout.tab_content, null);\n        //利用view对象，找到布局中的组件,并设置内容，然后返回视图\n        ImageView mTab_ImageView = (ImageView) view\n                .findViewById(R.id.tab_imageview);\n        TextView mTab_TextView = (TextView) view.findViewById(R.id.tab_textview);\n\n        mTab_ImageView.setBackgroundResource(tab_imageViewArray[i]);\n        mTab_TextView.setText(tab_textViewArray[i]);\n        return view;\n    }\n\n\n    @Override\n    public void onPageScrollStateChanged(int arg0) {\n\n    }//arg0 ==1的时候表示正在滑动，arg0==2的时候表示滑动完毕了，arg0==0的时候表示什么都没做，就是停在那。\n\n    @Override\n    public void onPageScrolled(int arg0, float arg1, int arg2) {\n\n    }//表示在前一个页面滑动到后一个页面的时候，在前一个页面滑动前调用的方法\n\n    @Override\n    public void onPageSelected(int arg0) {//arg0是表示你当前选中的页面位置Postion，这事件是在你页面跳转完毕的时候调用的。\n        TabWidget widget = mTabHost.getTabWidget();\n        int oldFocusability = widget.getDescendantFocusability();\n        widget.setDescendantFocusability(ViewGroup.FOCUS_BLOCK_DESCENDANTS);//设置View覆盖子类控件而直接获得焦点\n        mTabHost.setCurrentTab(arg0);//根据位置Postion设置当前的Tab\n        widget.setDescendantFocusability(oldFocusability);//设置取消分割线\n    }\n\n    @Override\n    public void onTabChanged(String tabId) {//Tab改变的时候调用\n        int position = mTabHost.getCurrentTab();\n        mViewPager.setCurrentItem(position);//把选中的Tab的位置赋给适配器，让它控制页面切换\n    }\n}\n"
  },
  {
    "path": "Android_demo/KSY265CodecDemo/app/src/main/java/com/ksyun/media/ksy265codec/demo/ui/MyFragmentAdapter.java",
    "content": "package com.ksyun.media.ksy265codec.demo.ui;\n\nimport android.support.v4.app.Fragment;\nimport android.support.v4.app.FragmentManager;\nimport android.support.v4.app.FragmentPagerAdapter;\n\nimport java.util.List;\n\n/**\n * Created by Carson_Ho on 16/5/23.\n */\npublic class MyFragmentAdapter extends FragmentPagerAdapter {\n    List<Fragment> list;\n\n    public MyFragmentAdapter(FragmentManager fm, List<Fragment> list) {\n        super(fm);\n        this.list = list;\n    }\n\n    @Override\n    public Fragment getItem(int arg0) {\n        return list.get(arg0);\n    }//根据Item的位置返回对应位置的Fragment，绑定item和Fragment\n\n    @Override\n    public int getCount() {\n        return list.size();\n    }//设置Item的数量\n\n    }\n\n\n"
  },
  {
    "path": "Android_demo/KSY265CodecDemo/app/src/main/java/com/ksyun/media/ksy265codec/demo/ui/ProgressDialogFragment.java",
    "content": "package com.ksyun.media.ksy265codec.demo.ui;\n\nimport android.app.Dialog;\nimport android.app.ProgressDialog;\nimport android.os.Bundle;\nimport android.support.v4.app.DialogFragment;\n\n/**\n * Created by sujia on 2017/4/7.\n */\n\npublic class ProgressDialogFragment extends DialogFragment\n{\n    @Override\n    public void onCreate(Bundle savedInstanceState)\n    {\n        super.onCreate(savedInstanceState);\n        setCancelable(false);\n    }\n\n    @Override\n    public Dialog onCreateDialog(Bundle savedInstanceState)\n    {\n        ProgressDialog dialog = new ProgressDialog(getActivity());\n        dialog.setTitle(\"请等待\");\n        dialog.setMessage(\"编码中...\");\n        dialog.setIndeterminate(true);\n        dialog.setProgressStyle(ProgressDialog.STYLE_SPINNER);\n        return dialog;\n    }\n}"
  },
  {
    "path": "Android_demo/KSY265CodecDemo/app/src/main/java/com/ksyun/media/ksy265codec/demo/ui/Settings.java",
    "content": "package com.ksyun.media.ksy265codec.demo.ui;\n\nimport android.content.Context;\nimport android.content.SharedPreferences;\nimport android.util.Log;\n\n/**\n * Created by sujia on 2017/3/28.\n */\n\npublic class Settings {\n    private static final String TAG = \"settings\";\n    private static final boolean TRACE = true;\n\n    private final String FILE_NAME = \"ksy265codecdemo_settings\";\n    private SharedPreferences mSharedPreferences;\n    private SharedPreferences.Editor mEditor;\n\n    private static Settings sInstance;\n\n    private EncoderSettings mEncoderSettings;\n    private DecoderSettings mDecoderSettings;\n\n    public static Settings getInstance() {\n        if (sInstance == null) {\n            synchronized (Settings.class) {\n                if (sInstance == null) {\n                    sInstance = new Settings();\n                }\n            }\n        }\n\n        return sInstance;\n    }\n\n    public void init(Context context) throws IllegalArgumentException {\n        if (context == null) {\n            throw new IllegalArgumentException(\"the context must not null\");\n        }\n\n        if (mSharedPreferences == null) {\n            mSharedPreferences = context.getSharedPreferences(FILE_NAME,\n                    context.MODE_PRIVATE);\n            mEditor = mSharedPreferences.edit();\n        }\n    }\n\n    public EncoderSettings getEncoderSettings() {\n        if (mSharedPreferences == null) {\n            if (mEncoderSettings == null) {\n                if(TRACE) {\n                    Log.w(TAG, \"please call init before call this function\");\n                }\n                mEncoderSettings = new EncoderSettings();\n                return mEncoderSettings;\n            }\n        }\n\n        if (mEncoderSettings == null) {\n            if(mSharedPreferences != null) {\n                mEncoderSettings = new EncoderSettings(mSharedPreferences);\n            } else {\n                mEncoderSettings = new EncoderSettings();\n            }\n        }\n\n        return mEncoderSettings;\n    }\n\n    public DecoderSettings getDecoderSettings() {\n        if (mSharedPreferences == null) {\n            if (mDecoderSettings == null) {\n                if(TRACE) {\n                    Log.w(TAG, \"please call init before call this function\");\n                }\n                mDecoderSettings = new DecoderSettings();\n                return mDecoderSettings;\n            }\n        }\n\n        if (mDecoderSettings == null) {\n            if(mSharedPreferences != null) {\n                mDecoderSettings = new DecoderSettings(mSharedPreferences);\n            } else {\n                mDecoderSettings = new DecoderSettings();\n            }\n        }\n        return mDecoderSettings;\n    }\n\n    public void saveEncoderSettings(EncoderSettings settings) {\n        if (mSharedPreferences == null) {\n            return;\n        }\n\n        if (mEditor != null) {\n            mEditor.putInt(EncoderSettings.ENCODER_SETTINGS_ENCODER, settings.encoderIndex);\n            mEditor.putInt(EncoderSettings.ENCODER_SETTINGS_PROFILE, settings.profileIndex);\n            mEditor.putInt(EncoderSettings.ENCODER_SETTINGS_DELAY, settings.delayIndex);\n            mEditor.putString(EncoderSettings.ENCODER_SETTINGS_RESOLUTION, settings.resolution);\n            mEditor.putInt(EncoderSettings.ENCODER_SETTINGS_RESOLUTION_IDX, settings.resIndex);\n            mEditor.putString(EncoderSettings.ENCODER_SETTINGS_THREADS, settings.threads);\n            mEditor.putString(EncoderSettings.ENCODER_SETTINGS_FPS, settings.fps);\n            mEditor.putString(EncoderSettings.ENCODER_SETTINGS_BITRATE, settings.bitrate);\n\n            mEditor.commit();\n        }\n    }\n\n    public void saveDecoderSettings(DecoderSettings settings) {\n        if (mSharedPreferences == null) {\n            return;\n        }\n\n        if (mEditor != null) {\n            mEditor.putInt(DecoderSettings.DECODER_SETTINGS_DECODER, settings.decoderIndex);\n            mEditor.putInt(DecoderSettings.DECODER_SETTINGS_THREADS, settings.threadsIndex);\n            mEditor.putInt(DecoderSettings.DECODER_SETTINGS_FPS, settings.fpsIndex);\n            mEditor.putBoolean(DecoderSettings.DECODER_SETTINGS_OUTPUT, settings.enableYUVOutput);\n\n            mEditor.commit();\n        }\n    }\n}\n"
  },
  {
    "path": "Android_demo/KSY265CodecDemo/app/src/main/jni/Android.mk",
    "content": "LOCAL_PATH := $(call my-dir)\n\ninclude $(call all-subdir-makefiles)"
  },
  {
    "path": "Android_demo/KSY265CodecDemo/app/src/main/jni/Application.mk",
    "content": "APP_ABI := armeabi-v7a\n\nDEBUG := $(NDK_DEBUG)\n\nifndef NDK_DEBUG\n\tDEBUG := 0\nendif\nifeq ($(DEBUG),true)\n\tDEBUG := 1\nendif\n\nifeq ($(DEBUG),1)\n\tAPP_CFLAGS += -O0 -g\n\tAPP_OPTIM := debug\nelse\n\tAPP_CFLAGS += -O2\n\tAPP_OPTIM := release\nendif\n\nAPP_STL \t := gnustl_static\n\nAPP_PLATFORM := android-9\n#NDK_TOOLCHAIN_VERSION := 4.9\nAPP_STL \t := gnustl_static"
  },
  {
    "path": "Android_demo/KSY265CodecDemo/app/src/main/jni/decoder/Android.mk",
    "content": "LOCAL_PATH := $(call my-dir)\n\ninclude $(call all-subdir-makefiles)"
  },
  {
    "path": "Android_demo/KSY265CodecDemo/app/src/main/jni/decoder/jniplayer/Android.mk",
    "content": "LOCAL_PATH := $(call my-dir)\nARCH_ABI := $(TARGET_ARCH_ABI)\nPREBUILT_PATH := $(LOCAL_PATH)/../../../../../../../prebuilt\n\n#\n# Prebuilt Shared library\n#\ninclude $(CLEAR_VARS)\nLOCAL_MODULE\t:= lenthevcdec\nLOCAL_SRC_FILES\t:= $(PREBUILT_PATH)/$(TARGET_ARCH_ABI)/liblenthevcdec.so\ninclude $(PREBUILT_SHARED_LIBRARY)\n\ninclude $(CLEAR_VARS)\nLOCAL_MODULE\t:= qydecoder\nLOCAL_SRC_FILES\t:= $(PREBUILT_PATH)/$(TARGET_ARCH_ABI)/libqydecoder.a\ninclude $(PREBUILT_STATIC_LIBRARY)\n\n#\n# jniplayer.so\n#\ninclude $(CLEAR_VARS)\n\nifeq ($(TARGET_ARCH_ABI), armeabi-v7a)\nLENT_CFLAGS := -DARCH_ARM=1 -DHAVE_NEON=1\nendif\nifeq ($(TARGET_ARCH_ABI), x86)\nLENT_CFLAGS := -DARCH_X86_32=1\nendif\n\nLOCAL_C_INCLUDES += $(PREBUILT_PATH)/include\n\nLOCAL_SRC_FILES := jniplayer.cpp jni_utils.cpp yuv2rgb565.cpp gl_renderer.cpp\n\nLOCAL_LDLIBS := -llog -lz -ljnigraphics -lGLESv2\n\nLOCAL_CFLAGS += $(LENT_CFLAGS)\n\nLOCAL_SHARED_LIBRARIES := lenthevcdec\n\nLOCAL_STATIC_LIBRARIES += qydecoder gnustl_static cpufeatures\n\nLOCAL_MODULE := jniplayer\n\ninclude $(BUILD_SHARED_LIBRARY)\n"
  },
  {
    "path": "Android_demo/KSY265CodecDemo/app/src/main/jni/decoder/jniplayer/Chromium_LICENSE.txt",
    "content": "Copyright (c) 2010 The Chromium Authors. All rights reserved.\n\nRedistribution and use in source and binary forms, with or without\nmodification, are permitted provided that the following conditions are\nmet:\n\n   * Redistributions of source code must retain the above copyright\nnotice, this list of conditions and the following disclaimer.\n   * Redistributions in binary form must reproduce the above\ncopyright notice, this list of conditions and the following disclaimer\nin the documentation and/or other materials provided with the\ndistribution.\n   * Neither the name of Google Inc. nor the names of its\ncontributors may be used to endorse or promote products derived from\nthis software without specific prior written permission.\n\nTHIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n\"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\nLIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\nA PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\nOWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\nSPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\nLIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\nDATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\nTHEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\nOF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE."
  },
  {
    "path": "Android_demo/KSY265CodecDemo/app/src/main/jni/decoder/jniplayer/gl_renderer.cpp",
    "content": "// gl_renderer.cpp : render YUV data directly using GPU with OpenGL ES 2.0\n//\n// Copyright (c) 2013 Strongene Ltd. All Right Reserved.\n// http://www.strongene.com\n//\n// Contributors:\n// Shengbin Meng <shengbinmeng@gmail.com>\n// James Deng <hugeice@gmail.com>\n//\n// You are free to re-use this as the basis for your own application\n// in source and binary forms, with or without modification, provided\n// that the following conditions are met:\n//\n//  * Redistributions of source code must retain the above copyright\n// notice and this list of conditions.\n//  * Redistributions in binary form must reproduce the above\n// copyright notice and this list of conditions in the documentation\n// and/or other materials provided with the distribution.\n\n#include <jni.h>\n#include <android/log.h>\n\n#include <GLES2/gl2.h>\n#include <GLES2/gl2ext.h>\n\n#include <stdio.h>\n#include <stdlib.h>\n#include <math.h>\n#include <pthread.h>\n\n#include \"jniplayer.h\"\n#include \"gl_renderer.h\"\n#include \"jni_utils.h\"\n\nextern VideoFrame gVF;\nextern pthread_mutex_t gVFMutex;\n\n#define LOG_TAG    \"gl_renderer\"\n\n#define ENABLE_LOGD 0\n#if ENABLE_LOGD\n#define LOGD(...)  __android_log_print(ANDROID_LOG_DEBUG,LOG_TAG,__VA_ARGS__)\n#else\n#define LOGD(...)\n#endif\n#define LOGI(...)  __android_log_print(ANDROID_LOG_INFO,LOG_TAG,__VA_ARGS__)\n#define LOGE(...)  __android_log_print(ANDROID_LOG_ERROR,LOG_TAG,__VA_ARGS__)\n\nstatic GLuint gProgram;\nstatic GLuint gTexIds[3];\nstatic GLuint gAttribPosition;\nstatic GLuint gAttribTexCoord;\nstatic GLuint gUniformTexY;\nstatic GLuint gUniformTexU;\nstatic GLuint gUniformTexV;\n\nstatic int backingWidth, backingHeight;\nstatic int needSetup = 0;\n\nstatic const char gVertexShader[] =\n\"attribute vec4 a_position;\\n\"\n\"attribute vec2 a_texCoord;\\n\"\n\"varying vec2 v_tc;\\n\"\n\"void main()\\n\"\n\"{\\n\"\n\"\tgl_Position = a_position;\\n\"\n\"\tv_tc = a_texCoord;\\n\"\n\"}\\n\";\n\nstatic const char gFragmentShader[] =\n\"varying lowp vec2 v_tc;\\n\"\n\"uniform sampler2D u_texY;\\n\"\n\"uniform sampler2D u_texU;\\n\"\n\"uniform sampler2D u_texV;\\n\"\n\"void main(void)\\n\"\n\"{\\n\"\n\"mediump vec3 yuv;\\n\"\n\"lowp vec3 rgb;\\n\"\n\"yuv.x = texture2D(u_texY, v_tc).r;\\n\"\n\"yuv.y = texture2D(u_texU, v_tc).r - 0.5;\\n\"\n\"yuv.z = texture2D(u_texV, v_tc).r - 0.5;\\n\"\n\"rgb = mat3( 1,   1,   1,\\n\"\n\"0,       -0.39465,  2.03211,\\n\"\n\"1.13983,   -0.58060,  0) * yuv;\\n\"\n\"gl_FragColor = vec4(rgb, 1);\\n\"\n\"}\\n\";\n\nstatic void printGLString(const char *name, GLenum s) {\n\tconst char *v = (const char *) glGetString(s);\n\tLOGI(\"GL %s = %s\\n\", name, v);\n}\n\nstatic GLuint loadShader(GLenum shaderType, const char* pSource) {\n\tGLuint shader = glCreateShader(shaderType);\n\tif (shader) {\n\t\tglShaderSource(shader, 1, &pSource, NULL);\n\t\tglCompileShader(shader);\n\t\tGLint compiled = 0;\n\t\tglGetShaderiv(shader, GL_COMPILE_STATUS, &compiled);\n\t\tif (!compiled) {\n\t\t\tGLint infoLen = 0;\n\t\t\tglGetShaderiv(shader, GL_INFO_LOG_LENGTH, &infoLen);\n\t\t\tif (infoLen) {\n\t\t\t\tchar* buf = (char*) malloc(infoLen);\n\t\t\t\tif (buf) {\n\t\t\t\t\tglGetShaderInfoLog(shader, infoLen, NULL, buf);\n\t\t\t\t\tLOGE(\"Could not compile shader %d:\\n%s\\n\", shaderType, buf);\n\t\t\t\t\tfree(buf);\n\t\t\t\t}\n\t\t\t\tglDeleteShader(shader);\n\t\t\t\tshader = 0;\n\t\t\t}\n\t\t}\n\t}\n\treturn shader;\n}\n\nstatic GLuint createProgram(const char* pVertexSource,\n\t\tconst char* pFragmentSource) {\n\tGLuint vertexShader = loadShader(GL_VERTEX_SHADER, pVertexSource);\n\tif (!vertexShader) {\n\t\treturn 0;\n\t}\n\n\tGLuint fragmentShader = loadShader(GL_FRAGMENT_SHADER, pFragmentSource);\n\tif (!fragmentShader) {\n\t\treturn 0;\n\t}\n\n\tGLuint program = glCreateProgram();\n\tif (program) {\n\t\tglAttachShader(program, vertexShader);\n\t\tglAttachShader(program, fragmentShader);\n\t\tglLinkProgram(program);\n\t\tGLint linkStatus = GL_FALSE;\n\t\tglGetProgramiv(program, GL_LINK_STATUS, &linkStatus);\n\t\tif (linkStatus != GL_TRUE) {\n\t\t\tGLint bufLength = 0;\n\t\t\tglGetProgramiv(program, GL_INFO_LOG_LENGTH, &bufLength);\n\t\t\tif (bufLength) {\n\t\t\t\tchar* buf = (char*) malloc(bufLength);\n\t\t\t\tif (buf) {\n\t\t\t\t\tglGetProgramInfoLog(program, bufLength, NULL, buf);\n\t\t\t\t\tLOGE(\"Could not link program:\\n%s\\n\", buf);\n\t\t\t\t\tfree(buf);\n\t\t\t\t}\n\t\t\t}\n\t\t\tglDeleteProgram(program);\n\t\t\tprogram = 0;\n\t\t}\n\t}\n\treturn program;\n}\n\nstatic GLfloat vertexPositions[] = {\n\t\t-1.0, -1.0, 0.0,\n\t\t 1.0, -1.0, 0.0,\n\t\t-1.0,  1.0, 0.0,\n\t\t 1.0,  1.0, 0.0\n};\n\nstatic GLfloat textureCoords[] = {\n\t\t0.0, 1.0,\n\t\t1.0, 1.0,\n\t\t0.0, 0.0,\n\t\t1.0, 0.0\n};\n\nstatic int init() {\n\tprintGLString(\"Version\", GL_VERSION);\n\tprintGLString(\"Vendor\", GL_VENDOR);\n\tprintGLString(\"Renderer\", GL_RENDERER);\n\tprintGLString(\"Extensions\", GL_EXTENSIONS);\n\n\t// create and use our program\n\tgProgram = createProgram(gVertexShader, gFragmentShader);\n\tif (!gProgram) {\n\t\tLOGE(\"Could not create program.\");\n\t\treturn -1;\n\t}\n\tglUseProgram(gProgram);\n\n\t// get the location of attributes in our shader\n\tgAttribPosition = glGetAttribLocation(gProgram, \"a_position\");\n\tgAttribTexCoord = glGetAttribLocation(gProgram, \"a_texCoord\");\n\n\t// get the location of uniforms in our shader\n\tgUniformTexY = glGetUniformLocation(gProgram, \"u_texY\");\n\tgUniformTexU = glGetUniformLocation(gProgram, \"u_texU\");\n\tgUniformTexV = glGetUniformLocation(gProgram, \"u_texV\");\n\n\t// can enable only once\n\tglEnableVertexAttribArray(gAttribPosition);\n\tglEnableVertexAttribArray(gAttribTexCoord);\n\n\t// set the value of uniforms (uniforms all have constant value)\n\tglUniform1i(gUniformTexY, 0);\n\tglUniform1i(gUniformTexU, 1);\n\tglUniform1i(gUniformTexV, 2);\n\n\t// generate and set parameters for the textures\n\tglEnable (GL_TEXTURE_2D);\n\tglGenTextures(3, gTexIds);\n\tfor (int i = 0; i < 3; i++) {\n\t\tglActiveTexture(GL_TEXTURE0 + i);\n\t\tglBindTexture(GL_TEXTURE_2D, gTexIds[i]);\n\t\tglTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);\n\t\tglTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);\n\t\tglTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);\n\t\tglTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);\n\t}\n\n\treturn 0;\n}\n\nstatic int setupGraphics(int w, int h) {\n\n\tLOGI(\"setupGraphics(%d, %d)\", w, h);\n\n\tbackingWidth = w;\n\tbackingHeight = h;\n\tneedSetup = 1;\n\n\treturn 0;\n}\n\nvoid glDrawFrame() {\n\n\tpthread_mutex_lock(&gVFMutex);\n\n\tif (gVF.yuv_data[0] == NULL) {\n\t\tLOGI(\"gVF.yuv_data[0] == NULL\");\n\t\tpthread_mutex_unlock(&gVFMutex);\n\t\treturn;\n\t}\n\tdouble pts = gVF.pts;\n\n\tif (needSetup) {\n\n\t\tLOGI(\"Will setup ... \\n\");\n\t\tGLuint width = gVF.width;\n\t\tGLuint height = gVF.height;\n\n\t\tfloat aspect = (float) width / (float) height;\n\n\t\tif (aspect >= (float) backingWidth / (float) backingHeight) {\n\t\t\t// fill screen in width, and leave space in Y\n\t\t\tfloat scale = (float) backingWidth / (float) width;\n\t\t\tfloat maxY = ((float) height * scale) / (float) backingHeight;\n\t\t\tvertexPositions[1] = vertexPositions[4] = -maxY;\n\t\t\tvertexPositions[7] = vertexPositions[10] = maxY;\n\n\t\t} else {\n\t\t\t// fill screen in height, and leave space in X\n\t\t\tfloat scale = (float) backingHeight / (float) height;\n\t\t\tfloat maxX = ((float) width * scale) / (float) backingWidth;\n\t\t\tvertexPositions[0] = vertexPositions[6] = -maxX;\n\t\t\tvertexPositions[3] = vertexPositions[9] = maxX;\n\t\t}\n\n\t\t// modify the texture coordinates\n\t\tfloat texCoord = ((float) width) / gVF.linesize_y;\n\t\ttextureCoords[2] = textureCoords[6] = texCoord;\n\n\t\t// set the value of attributes\n\t\tglVertexAttribPointer(gAttribPosition, 3, GL_FLOAT, 0, 0,\n\t\t\t\tvertexPositions);\n\t\tglVertexAttribPointer(gAttribTexCoord, 2, GL_FLOAT, 0, 0,\n\t\t\t\ttextureCoords);\n\n\t\tglViewport(0, 0, backingWidth, backingHeight);\n\n\t\tLOGI(\"setup finished\\n\");\n\n\t\tneedSetup = 0;\n\t}\n\n\tglClearColor(0.0f, 0.0f, 0.0f, 1.0f);\n\tglClear (GL_COLOR_BUFFER_BIT);\n\n\tLOGD(\"before upload: %u (%f)\", getms(), pts);\n\n\t// upload textures\n\tglActiveTexture(GL_TEXTURE0 + 0);\n\tglTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, gVF.linesize_y, gVF.height, 0, GL_LUMINANCE, GL_UNSIGNED_BYTE, gVF.yuv_data[0]);\n\tglActiveTexture(GL_TEXTURE0 + 1);\n\tglTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, gVF.linesize_uv, gVF.height / 2, 0, GL_LUMINANCE, GL_UNSIGNED_BYTE, gVF.yuv_data[1]);\n\tglActiveTexture(GL_TEXTURE0 + 2);\n\tglTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, gVF.linesize_uv, gVF.height / 2, 0, GL_LUMINANCE, GL_UNSIGNED_BYTE, gVF.yuv_data[2]);\n\tpthread_mutex_unlock(&gVFMutex);\n\n\tLOGD(\"after upload: %u (%f)\", getms(), pts);\n\n\tLOGD(\"before glDrawArrays: %u (%f)\", getms(), pts);\n\tglDrawArrays(GL_TRIANGLE_STRIP, 0, 4);\n\n\tLOGD(\"after glDrawArrays: %u (%f)\", getms(), pts);\n\n}\n\njint nativeInit(JNIEnv * env, jobject obj) {\n\tint ret = init();\n\tif (ret < 0) {\n\t\tLOGE(\"initialize failed!\");\n\t}\n\treturn ret;\n}\n\njint nativeSetup(JNIEnv * env, jobject obj, jint width, jint height) {\n\tint ret = setupGraphics(width, height);\n\tif (ret < 0) {\n\t\tLOGE(\"setup failed!\");\n\t}\n\treturn ret;\n}\n\nvoid nativeDrawFrame(JNIEnv * env, jobject obj) {\n\tglDrawFrame();\n}\n\nstatic JNINativeMethod methods[] = {\n\t{ \"nativeInit\", \"()I\", (void *) nativeInit },\n\t{ \"nativeSetup\", \"(II)I\", (void *) nativeSetup },\n\t{ \"nativeDrawFrame\", \"()V\", (void *) nativeDrawFrame },\n};\n\nint register_renderer(JNIEnv *env) {\n\treturn jniRegisterNativeMethods(env, \"com/ksyun/media/ksy265codec/demo/decoder/hevdecoder/GLRenderer\",\n\t\t\tmethods, sizeof(methods) / sizeof(methods[0]));\n}\n"
  },
  {
    "path": "Android_demo/KSY265CodecDemo/app/src/main/jni/decoder/jniplayer/gl_renderer.h",
    "content": "#ifndef __GL_RENDERER_H__\n#define __GL_RENDERER_H__\n\nvoid glDrawFrame();\n\n#endif\n"
  },
  {
    "path": "Android_demo/KSY265CodecDemo/app/src/main/jni/decoder/jniplayer/jni_utils.cpp",
    "content": "#include <stdlib.h>\n#include <android/log.h>\n#include \"jni_utils.h\"\n\n#define LOG_TAG    \"jni_utils\"\n\nstatic JavaVM *gVM;\n\nextern int register_player(JNIEnv *env);\nextern int register_renderer(JNIEnv *env);\n\n/*\n * Throw an exception with the specified class and an optional message.\n */\nint jniThrowException(JNIEnv* env, const char* className, const char* msg) {\n\tjclass exceptionClass = env->FindClass(className);\n\tif (exceptionClass == NULL) {\n\t\tLOGE(\"Unable to find exception class %s\", className);\n\t\treturn -1;\n\t}\n\tif (env->ThrowNew(exceptionClass, msg) != JNI_OK) {\n\t\tLOGE(\"Failed throwing '%s' '%s'\", className, msg);\n\t}\n\treturn 0;\n}\n\nJNIEnv* getJNIEnv() {\n\tJNIEnv* env = NULL;\n\tint ret = gVM->GetEnv((void**) &env, JNI_VERSION_1_4);\n\tif (ret == JNI_OK) {\n\t\treturn env;\n\t} else if (ret == JNI_EDETACHED) {\n\t\tjint attachSuccess = gVM->AttachCurrentThread(&env, NULL);\n\t\tif (attachSuccess != 0) {\n\t\t\tLOGE(\"attach current thread failed \\n\");\n\t\t\treturn NULL;\n\t\t}\n\t} else {\n\t\tLOGE(\"obtain JNIEnv failed, return: %d \\n\", ret);\n\t}\n\treturn env;\n}\n\nvoid detachJVM() {\n\tint ret;\n\tret = gVM->DetachCurrentThread();\n\tif (ret == JNI_OK) {\n\t\tLOGI(\"detach return OK: %d\", ret);\n\t} else {\n\t\tLOGE(\"detach return NOT OK: %d\", ret);\n\t}\n}\n\n/*\n * Register native JNI-callable methods.\n *\n * \"className\" looks like \"java/lang/String\".\n */\nint jniRegisterNativeMethods(JNIEnv* env, const char* className,\n\t\tconst JNINativeMethod* gMethods, int numMethods) {\n\tjclass clazz;\n\n\tLOGI(\"Registering %s natives\\n\", className);\n\tclazz = env->FindClass(className);\n\tif (clazz == NULL) {\n\t\tLOGE(\"Native registration unable to find class '%s'\\n\", className);\n\t\treturn -1;\n\t}\n\tif (env->RegisterNatives(clazz, gMethods, numMethods) < 0) {\n\t\tLOGE(\"RegisterNatives failed for '%s'\\n\", className);\n\t\treturn -1;\n\t}\n\treturn 0;\n}\n\njint JNI_OnLoad(JavaVM* vm, void* reserved) {\n\tJNIEnv* env = NULL;\n\tjint result = JNI_ERR;\n\tgVM = vm;\n\n\tif (vm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) {\n\t\tLOGE(\"GetEnv failed!\");\n\t\treturn JNI_ERR;\n\t}\n\n\tLOGI(\"loading . . .\");\n\tif (register_player(env) != JNI_OK) {\n\t\tLOGE(\"can't register player\");\n\t\treturn JNI_ERR;\n\t}\n\tif (register_renderer(env) != JNI_OK) {\n\t\tLOGE(\"can't register renderer\");\n\t\treturn JNI_ERR;\n\t}\n\tLOGI(\"loaded\");\n\n\treturn JNI_VERSION_1_4;\n}\n"
  },
  {
    "path": "Android_demo/KSY265CodecDemo/app/src/main/jni/decoder/jniplayer/jni_utils.h",
    "content": "#ifndef __JNI_UTILS_H__\n#define __JNI_UTILS_H__\n\n#include <stdlib.h>\n#include <jni.h>\n#include <android/log.h>\n\n\n#ifdef __cplusplus\n\t#define __STDC_CONSTANT_MACROS\n\t#define __STDC_LIMIT_MACROS\n\t#ifdef _STDINT_H\n\t\t#undef _STDINT_H\n\t#endif\n\t#include <stdint.h>\n\t#define __STDC_FORMAT_MACROS\n#endif\n\n#define ENABLE_LOGD 0\n\n#if ENABLE_LOGD\n#define LOGD(...)  __android_log_print(ANDROID_LOG_DEBUG,LOG_TAG,__VA_ARGS__)\n#else\n#define LOGD(...)\n#endif\n#define LOGI(...)  __android_log_print(ANDROID_LOG_INFO,LOG_TAG,__VA_ARGS__)\n#define LOGE(...)  __android_log_print(ANDROID_LOG_ERROR,LOG_TAG,__VA_ARGS__)\n\nint jniThrowException(JNIEnv* env, const char* className, const char* msg);\nint jniRegisterNativeMethods(JNIEnv* env, const char* className, const JNINativeMethod* gMethods, int numMethods);\n\nJNIEnv* getJNIEnv();\nvoid detachJVM();\n\n#endif /* __JNI_UTILS_H__ */\n"
  },
  {
    "path": "Android_demo/KSY265CodecDemo/app/src/main/jni/decoder/jniplayer/jniplayer.cpp",
    "content": "// jniplayer.cpp : decode H.265/HEVC video data in separate native thread\n//\n// Copyright (c) 2013 Strongene Ltd. All Right Reserved.\n// http://www.strongene.com\n//\n// Contributors:\n// Shengbin Meng <shengbinmeng@gmail.com>\n// James Deng <hugeice@gmail.com>\n//\n// You are free to re-use this as the basis for your own application\n// in source and binary forms, with or without modification, provided\n// that the following conditions are met:\n//\n//  * Redistributions of source code must retain the above copyright\n// notice and this list of conditions.\n//  * Redistributions in binary form must reproduce the above\n// copyright notice and this list of conditions in the documentation\n// and/or other materials provided with the distribution.\n\n\n\n#include <android/log.h>\n#include <android/bitmap.h>\n#include <stdio.h>\n#include <time.h>\n#include <pthread.h>\n#include <unistd.h>\n#include \"jniplayer.h\"\n#include \"jni_utils.h\"\n#include \"yuv2rgb565.h\"\n#include \"gl_renderer.h\"\n\n#ifdef __cplusplus\n\t#define __STDC_CONSTANT_MACROS\n\t#define __STDC_LIMIT_MACROS\n\t#ifdef _STDINT_H\n\t\t#undef _STDINT_H\n\t#endif\n\t#include <stdint.h>\n\t#define __STDC_FORMAT_MACROS\n#endif\n\nextern \"C\" {\n#include \"lenthevcdec.h\"\n#include \"qy265dec.h\"\n#include \"qyauth_env.h\"\n}\n\n#define LOG_TAG    \"jniplayer\"\n\n#define ENABLE_LOGD 0\n#if ENABLE_LOGD\n#define LOGD(...)  __android_log_print(ANDROID_LOG_DEBUG,LOG_TAG,__VA_ARGS__)\n#else\n#define LOGD(...)\n#endif\n#define LOGI(...)  __android_log_print(ANDROID_LOG_INFO,LOG_TAG,__VA_ARGS__)\n#define LOGE(...)  __android_log_print(ANDROID_LOG_ERROR,LOG_TAG,__VA_ARGS__)\n\n#ifndef _countof\n#define _countof(a) (sizeof(a) / sizeof((a)[0]))\n#endif\n\n#define LOOP_PLAY 0\n\n#if ARCH_ARM\n#define USE_SWSCALE 0\n#else\n#define USE_SWSCALE 0\n#endif\n\nstruct MediaInfo\n{\n\tint width;\n\tint height;\n\tchar data_src[1024];\n\tint raw_bs;\n};\n\nVideoFrame gVF = {0, 0, 0, 0, 0, {NULL, NULL, NULL}};\npthread_mutex_t gVFMutex = PTHREAD_MUTEX_INITIALIZER;\n\nstatic MediaInfo media;\n\nstatic pthread_t decode_thread;\n\nstatic struct SwsContext   *p_sws_ctx;\n\nstatic const char* const kClassPathName = \"com/ksyun/media/ksy265codec/demo/decoder/hevdecoder/NativeMediaPlayer\";\n\n// for lenthevcdec\nstatic const uint32_t AU_COUNT_MAX = 1024 * 1024;\nstatic const uint32_t AU_BUF_SIZE_MAX = 1024 * 1024 * 80;\nstatic uint32_t au_pos[AU_COUNT_MAX];\t// too big array, use static to save stack space\nstatic uint32_t au_count, au_buf_size;\nstatic uint8_t *au_buf = NULL;\nstatic lenthevcdec_ctx lent_ctx = NULL;\n\nstatic volatile int exit_decode_thread = 0;\nstatic volatile int is_playing = 0;\n\n\nstatic int frames_sum = 0;\nstatic double tstart = 0;\n\nstatic int frames = 0;\nstatic double tlast = 0;\n\nstatic float renderFPS = 0;\nstatic double avg_fps = 0;\nstatic uint64_t renderInterval = 0;\nstatic struct timeval timeStart;\n\nstatic int frame_count = 0;\nstatic double real_time = 0;\nstatic float real_fps = 0;\n\nstatic int use_ksy = 0;\nstatic void* ksydec_ctx = NULL;\nstatic QY265Frame decframe;\n\nstatic int disable_render = 0;\n\nstatic bool enable_output_yuv = 0;\nstatic char output_path[1024];\n\ntypedef struct Context {\n    jobject obj;\n    jmethodID\tdrawFrame;\n    jmethodID   postEvent;\n} Context;\n\nstatic Context *ctx;\n\nstatic void freeContext(void* opaque) {\n    Context* ctx = (Context*) opaque;\n    if (ctx && ctx->obj) {\n        JNIEnv *env = NULL;\n        env = getJNIEnv();\n        env->DeleteGlobalRef(ctx->obj);\n    }\n    free(ctx);\n}\n\nstatic void postEventToJava(void *opaque, int msg, int ext1, int ext2)\n{\n    Context* ctx = (Context*) opaque;\n    if (ctx == NULL) {\n        return;\n    }\n\n    int isAttached = 0;\n    JNIEnv *env = NULL;\n\n    env = getJNIEnv();\n    if (env == NULL) {\n        return;\n    }\n\n    env->CallVoidMethod(ctx->obj, ctx->postEvent, msg, ext1, ext2);\n\n    if (env->ExceptionCheck()) {\n        env->ExceptionDescribe();\n    }\n}\n\nstatic int callJavaDrawFrame(void* opaque, int width, int height)\n{\n    Context* ctx = (Context*) opaque;\n    if (ctx == NULL) {\n        return 0;\n    }\n\n    int isAttached = 0;\n    JNIEnv *env = NULL;\n\n    env = getJNIEnv();\n    if (env == NULL) {\n        return 0;\n    }\n    int ret =  env->CallIntMethod(ctx->obj, ctx->drawFrame, width, height);\n\n    if (env->ExceptionCheck()) {\n        env->ExceptionDescribe();\n    }\n\n    return ret;\n}\n\nuint32_t getms()\n{\n\tstruct timeval t;\n\tgettimeofday(&t, NULL);\n\treturn (t.tv_sec * 1000) + (t.tv_usec / 1000);\n}\n\nint drawFrame(VideoFrame * vf)\n{\n    int64_t timePassed, delay;\n\tLOGD(\"enter drawFrame:%u (%f)\", getms(), vf->pts);\n\n\tif(disable_render)\n\t    goto show_display;\n\n\t// copy decode frame to global buffer\n\tpthread_mutex_lock(&gVFMutex);\n\tif ( gVF.linesize_y != vf->linesize_y || gVF.linesize_uv != vf->linesize_uv || gVF.height != vf->height ) {\n\t\tif ( NULL != gVF.yuv_data[0] )\n\t\t\tfree(gVF.yuv_data[0]);\n\t\tgVF.yuv_data[0] = gVF.yuv_data[1] = gVF.yuv_data[2] = NULL;\n\t\tgVF.yuv_data[0] = (uint8_t*)malloc(vf->linesize_y * vf->height + vf->linesize_uv * vf->height );\n\t\tif ( NULL == gVF.yuv_data[0] ) {\n\t\t\tLOGE(\"malloc failed!\\n\");\n\t\t\treturn -1;\n\t\t}\n\t\tgVF.yuv_data[1] = gVF.yuv_data[0] + vf->linesize_y*vf->height;\n\t\tgVF.yuv_data[2] = gVF.yuv_data[1] + vf->linesize_uv*vf->height/2;\n\t}\n\tgVF.width = vf->width;\n\tgVF.height = vf->height;\n\tgVF.linesize_y = vf->linesize_y;\n\tgVF.linesize_uv = vf->linesize_uv;\n\tgVF.pts = vf->pts;\n\tmemcpy(gVF.yuv_data[0], vf->yuv_data[0], vf->linesize_y*vf->height);\n\tmemcpy(gVF.yuv_data[1], vf->yuv_data[1], vf->linesize_uv*vf->height/2);\n\tmemcpy(gVF.yuv_data[2], vf->yuv_data[2], vf->linesize_uv*vf->height/2);\n\tpthread_mutex_unlock(&gVFMutex);\n\n\t// wait for display\n\tstruct timeval timeNow;\n\tgettimeofday(&timeNow, NULL);\n\ttimePassed = ((int64_t)(timeNow.tv_sec - timeStart.tv_sec))*1000000 + (timeNow.tv_usec - timeStart.tv_usec);\n\tdelay = vf->pts - timePassed;\n\tif (delay > 0) {\n\t\tusleep(delay);\n\t}\n\nshow_display:\n\t// update information\n\tgettimeofday(&timeNow, NULL);\n\tdouble tnow = timeNow.tv_sec + (timeNow.tv_usec / 1000000.0);\n\tif (tlast == 0) tlast = tnow;\n\tif (tstart == 0) tstart = tnow;\n\tif (tnow > tlast + 1) {\n\t\tLOGI(\"Video Display FPS:%i\", (int)frames);\n\t\tframes_sum += frames;\n\t\tavg_fps = frames_sum / (tnow - tstart);\n\t\tLOGI(\"Video AVG FPS:%.2lf\", avg_fps);\n        postEventToJava(ctx, 900, int(frames), int(avg_fps * 4096));\n\t\ttlast = tlast + 1;\n\t\tframes = 0;\n\t}\n\tframes++;\n\n\t// request display\n\tLOGD(\"before request draw:%u (%f)\", getms(), vf->pts);\n    if (disable_render) {\n        return 0;\n    } else {\n        return callJavaDrawFrame(ctx, vf->width, vf->height);\n    }\n}\n\nint lent_hevc_get_sps(uint8_t* buf, int size, uint8_t** sps_ptr)\n{\n\tint i, nal_type, sps_pos;\n\tsps_pos = -1;\n\tfor ( i = 0; i < (size - 4); i++ ) {\n\t\tif ( 0 == buf[i] && 0 == buf[i+1] && 1 == buf[i+2] ) {\n\t\t\tnal_type = (buf[i+3] & 0x7E) >> 1;\n\t\t\tif ( 33 != nal_type && sps_pos >= 0 ) {\n\t\t\t\tbreak;\n\t\t\t}\n\t\t\tif ( 33 == nal_type ) { // sps\n\t\t\t\tsps_pos = i;\n\t\t\t}\n\t\t\ti += 2;\n\t\t}\n\t}\n\tif ( sps_pos < 0 )\n\t\treturn 0;\n\tif ( i == (size - 4) )\n\t\ti = size;\n\t*sps_ptr = buf + sps_pos;\n\treturn i - sps_pos;\n}\n\nint lent_hevc_get_frame(uint8_t* buf, int size, int *is_idr)\n{\n\tstatic int seq_hdr = 0;\n\tint i, nal_type, idr = 0;\n\tfor ( i = 0; i < (size - 6); i++ ) {\n\t\tif ( 0 == buf[i] && 0 == buf[i+1] && 1 == buf[i+2] ) {\n\t\t\tnal_type = (buf[i+3] & 0x7E) >> 1;\n\t\t\tif ( nal_type <= 21 ) {\n\t\t\t\tif ( buf[i+5] & 0x80 ) { /* first slice in pic */\n\t\t\t\t\tif ( !seq_hdr )\n\t\t\t\t\t\tbreak;\n\t\t\t\t\telse\n\t\t\t\t\t\tseq_hdr = 0;\n\t\t\t\t}\n\t\t\t}\n\t\t\tif ( nal_type >= 32 && nal_type <= 34 ) {\n\t\t\t\tif ( !seq_hdr ) {\n\t\t\t\t\tseq_hdr = 1;\n\t\t\t\t\tidr = 1;\n\t\t\t\t\tbreak;\n\t\t\t\t}\n\t\t\t\tseq_hdr = 1;\n\t\t\t}\n\t\t\ti += 2;\n\t\t}\n\t}\n\tif ( i == (size - 6) )\n\t\ti = size;\n\tif ( NULL != is_idr )\n\t\t*is_idr = idr;\n\treturn i;\n}\n\nstatic int write_pic_yv12(int w, int h, uint8_t* buf[3], short stride[3], FILE *fp)\n{\n    uint8_t *line;\n    int line_len, line_count, i, j, pitch;\n    for ( i = 0; i < 3; i++ ) {\n        line = buf[i];\n        pitch = stride[i];\n        line_len = (0 == i) ? w : (w / 2);\n        line_count = (0 == i) ? h : (h / 2);\n        for ( j = 0; j < line_count; j++ ) {\n            if ( fwrite(line, 1, line_len, fp) != line_len )\n                return -1;\n            line += pitch;\n        }\n    }\n    return 0;\n}\n\nvoid* rawbs_runDecoder(void *p)\n{\n\tint32_t got_frame, width, height, stride[3];\n\tuint8_t* pixels[3];\n\tint64_t pts, got_pts;\n\tint ret, i;\n    struct timeval tv_start, tv_end;\n\n\tif ( NULL == lent_ctx || NULL == au_buf )\n\t\treturn NULL;\n\n    FILE* out_file = NULL;\n    if (enable_output_yuv) {\n        out_file = fopen(output_path, \"wb\");\n        if (out_file == NULL) {\n            LOGE(\"open outout file %s faile\", output_path);\n            goto exit;\n        }\n    }\n\ndecode:\n\t// decode all AUs\n    frame_count = 0;\n    real_time = 0;\n    gettimeofday(&tv_start, NULL);\n\n\tLOGD(\"dec %d\\n\",  au_count);\n\tfor ( i = 0; i < au_count && !exit_decode_thread; i++ ) {\n\t\tpts = i * 40;\n\t\tgot_frame = 0;\n\t\tuint32_t start_time = getms();\n\t\tLOGD(\"before decode: %u\", start_time);\n\n\t\tif(use_ksy) {\n\t\t    QY265DecodeFrame(ksydec_ctx, au_buf + au_pos[i], au_pos[i + 1] - au_pos[i], &ret, 0);\n            if ( ret < 0 ) {\n                LOGE(\"call QY265DecodeFrame failed! ret = %d   i: %d /%d\\n\", ret, i, au_count);\n            }\n\n            QY265DecoderGetDecodedFrame(ksydec_ctx, &decframe, &ret, 0);\n            if ( ret == 0 && decframe.bValid ) {\n                got_frame = 1;\n                width = decframe.frameinfo.nWidth;\n                height = decframe.frameinfo.nHeight;\n                stride[0] = decframe.iStride[0];\n                stride[1] = decframe.iStride[1];\n                pixels[0] = decframe.pData[0];\n                pixels[1] = decframe.pData[1];\n                pixels[2] = decframe.pData[2];\n            }\n\t\t} else {\n\t\t    ret = lenthevcdec_decode_frame(lent_ctx, au_buf + au_pos[i], au_pos[i + 1] - au_pos[i], pts,\n\t\t\t\t\t       &got_frame, &width, &height, stride, (void**)pixels, &got_pts);\n\t\t    if ( ret < 0 ) {\n\t\t\t    LOGE(\"call lenthevcdec_decode_frame failed! ret = %d\\n\", ret);\n\t\t\t    goto exit;\n\t\t    }\n\t\t}\n\n\t\tuint32_t end_time = getms();\n\t\tLOGD(\"after decode: %u\", end_time);\n\t\tuint32_t dec_time = end_time - start_time;\n\t\tif ( got_frame > 0 ) {\n\t\t\tLOGD(\"decoding time: %u - %u = %u\\n\", end_time, start_time, dec_time);\n\t\t\tLOGD(\"decode frame: pts = %\" PRId64 \", linesize = {%d,%d,%d}\\n\", got_pts, stride[0], stride[1], stride[2]);\n\t\t\tif ( media.width != width || media.height != height ) {\n\t\t\t\tLOGD(\"Video dimensions change! %dx%d -> %dx%d\\n\", media.width, media.height, width, height);\n\t\t\t\tmedia.width = width;\n\t\t\t\tmedia.height = height;\n\t\t\t}\n\t\t\t// draw frame to screen\n\t\t\tVideoFrame vf;\n\t\t\tvf.width = width;\n\t\t\tvf.height = height;\n\t\t\tvf.linesize_y = stride[0];\n\t\t\tvf.linesize_uv = stride[1];\n\t\t\tvf.pts = renderInterval * frame_count;\n\t\t\tvf.yuv_data[0] = pixels[0];\n\t\t\tvf.yuv_data[1] = pixels[1];\n\t\t\tvf.yuv_data[2] = pixels[2];\n\n            if (enable_output_yuv &&\n                    out_file != NULL) {\n                write_pic_yv12(decframe.frameinfo.nWidth, decframe.frameinfo.nHeight,\n                               (uint8_t**)decframe.pData, decframe.iStride, out_file);\n            }\n\n\t\t\tif (frame_count == 0) {\n\t\t\t\tgettimeofday(&timeStart, NULL);\n\t\t\t}\n\t\t\tdrawFrame(&vf);\n            if(use_ksy)\n\t\t\t    QY265DecoderReturnDecodedFrame(ksydec_ctx, &decframe);\n\t\t\tframe_count++;\n\t\t}\n\t}\n\n#if LOOP_PLAY\n\tif (!exit_decode_thread) {\n\t\tLOGI(\"automatically play again\\n\");\n\t\tgoto decode;\n\t}\n#endif\n\n    LOGE(\"flush  %d + %d /%d \\n\", i,frame_count, au_count);\n\t// flush decoder\n\twhile ( !exit_decode_thread ) {\n\t\tgot_frame = 0;\n        if(use_ksy) {\n            QY265DecoderGetDecodedFrame(ksydec_ctx, &decframe, &ret, 0);\n            if ( ret == 0 && frame_count < au_count){\n                if (decframe.bValid) {\n                    got_frame = 1;\n                    width = decframe.frameinfo.nWidth;\n                    height = decframe.frameinfo.nHeight;\n                    stride[0] = decframe.iStride[0];\n                    stride[1] = decframe.iStride[1];\n                    pixels[0] = decframe.pData[0];\n                    pixels[1] = decframe.pData[1];\n                    pixels[2] = decframe.pData[2];\n                }\n            }\n            else {\n                break;\n            }\n\t\t} else {\n\t\t    ret = lenthevcdec_decode_frame(lent_ctx, NULL, 0, pts,\n\t\t\t\t\t       &got_frame, &width, &height, stride, (void**)pixels, &got_pts);\n\t\t    if ( ret < 0 || got_frame <= 0)\n\t\t\t    break;\n\t\t}\n\n\t\tif ( got_frame > 0 ) {\n\t\t\tif ( media.width != width || media.height != height ) {\n\t\t\t\tLOGD(\"Video dimensions change! %dx%d -> %dx%d\\n\", media.width, media.height, width, height);\n\t\t\t\tmedia.width = width;\n\t\t\t\tmedia.height = height;\n\t\t\t}\n\t\t\t// draw frame to screen\n\t\t\tVideoFrame vf;\n\t\t\tvf.width = width;\n\t\t\tvf.height = height;\n\t\t\tvf.linesize_y = stride[0];\n\t\t\tvf.linesize_uv = stride[1];\n\t\t\tvf.pts = renderInterval * frame_count;\n\t\t\tvf.yuv_data[0] = pixels[0];\n\t\t\tvf.yuv_data[1] = pixels[1];\n\t\t\tvf.yuv_data[2] = pixels[2];\n\n            if (enable_output_yuv &&\n                out_file != NULL) {\n                write_pic_yv12(decframe.frameinfo.nWidth, decframe.frameinfo.nHeight,\n                               (uint8_t**)decframe.pData, decframe.iStride, out_file);\n            }\n\n\t\t\tdrawFrame(&vf);\n            if(use_ksy)\n\t\t\t    QY265DecoderReturnDecodedFrame(ksydec_ctx, &decframe);\n\t\t\tframe_count++;\n\t\t}\n\t}\n\nexit:\n\tif ( NULL != au_buf )\n\t\tfree(au_buf);\n\tau_buf = NULL;\n\tau_buf_size = 0;\n\tif ( NULL != lent_ctx )\n\t\tlenthevcdec_destroy(lent_ctx);\n\tlent_ctx = NULL;\n\tif ( NULL != ksydec_ctx )\n    \tQY265DecoderDestroy(ksydec_ctx);\n    ksydec_ctx = NULL;\n\n    gettimeofday(&tv_end, NULL);\n    real_time = (tv_end.tv_sec + (tv_end.tv_usec / 1000000.0)) - (tv_start.tv_sec + (tv_start.tv_usec / 1000000.0));\n    real_fps = frame_count / real_time;\n\n    postEventToJava(ctx, 909, int(frame_count), 0);// end of file\n\tdetachJVM();\n\tis_playing = 0;\n\tLOGI(\"decode thread exit\\n\");\n\texit_decode_thread = 0;\n\n    if (out_file != NULL) {\n        fclose(out_file);\n    }\n\n\treturn NULL;\n}\n\n\n\nstatic int\nMediaPlayer_setDataSource(JNIEnv *env, jobject thiz, jstring path)\n{\n\tconst char *pathStr = env->GetStringUTFChars(path, NULL);\n\tmemset(&media, 0, sizeof(media));\n\tstrcpy(media.data_src, pathStr);\n\t// Make sure that local ref is released before a potential exception\n\tenv->ReleaseStringUTFChars(path, pathStr);\n\t// is raw HEVC bitstream file ?\n\tstatic const char * hevc_raw_bs_ext[] = {\".hevc\", \".hm91\", \".hm10\", \".bit\", \".hvc\", \".h265\", \".265\"};\n\tchar * ext = strrchr(media.data_src, '.');\n\tif ( NULL != ext ) {\n\t\tint i;\n\t\tfor ( i = 0; i < _countof(hevc_raw_bs_ext); i++ ) {\n\t\t\tif ( strcasecmp(hevc_raw_bs_ext[i], ext) == 0 )\n\t\t\t\tbreak;\n\t\t}\n\t\tif ( i < _countof(hevc_raw_bs_ext) )\n\t\t\tmedia.raw_bs = 1;\n\t}\n\treturn 0;\n}\n\nstatic int rawbs_prepare(JNIEnv *env, jobject context, int threads)\n{\n\tFILE *in_file;\n\tint32_t got_frame, width, height, stride[3];\n\tuint8_t* pixels[3];\n\tint64_t pts, got_pts;\n\tuint8_t *sps;\n\tlenthevcdec_ctx one_thread_ctx;\n\tint compatibility, frame_count, sps_len, ret, i;\n\n\tin_file = NULL;\n\tau_buf = NULL;\n\tlent_ctx = NULL;\n\tone_thread_ctx = NULL;\n\n\t// get compatibility version\n\tcompatibility = 0x7fffffff;\n\tif ( strncasecmp(\".hm91\", media.data_src + (strlen(media.data_src) - 5), 5) == 0 )\n\t\tcompatibility = 91;\n\telse if ( strncasecmp(\".hm10\", media.data_src + (strlen(media.data_src) - 5), 5) == 0 )\n\t\tcompatibility = 100;\n\n\t// read file\n\tin_file = fopen(media.data_src, \"rb\");\n\tif ( NULL == in_file ) {\n\t\tLOGE(\"Can not open input file '%s'\\n\", media.data_src);\n\t\tgoto error_exit;\n\t}\n\tfseek(in_file, 0, SEEK_END);\n\tau_buf_size = ftell(in_file);\n\tfseek(in_file, 0, SEEK_SET);\n\tLOGD(\"file size is %d bytes\\n\", au_buf_size);\n\tif ( au_buf_size > AU_BUF_SIZE_MAX )\n\t\tau_buf_size = AU_BUF_SIZE_MAX;\n\tau_buf = (uint8_t*)malloc(au_buf_size);\n\tif ( NULL == au_buf ) {\n\t\tLOGE(\"call malloc failed! size is %d\\n\", au_buf_size);\n\t\tgoto error_exit;\n\t}\n\tif ( fread(au_buf, 1, au_buf_size, in_file) != au_buf_size ) {\n\t\tLOGE(\"call fread failed!\\n\");\n\t\tgoto error_exit;\n\t}\n\tfclose(in_file);\n\tin_file = NULL;\n\tLOGD(\"%d bytes read to address %p\\n\", au_buf_size, au_buf);\n\n\t// find all AU\n\tau_count = 0;\n\tfor ( i = 0; i < au_buf_size && au_count < (AU_COUNT_MAX - 1); i+=3 ) {\n\t\ti += lent_hevc_get_frame(au_buf + i, au_buf_size - i, NULL);\n\t\tif (i < au_buf_size) {\n\t\t\tau_pos[au_count++] = i;\n\t\t}\n\t\tLOGD(\"AU[%d] = %d\\n\", au_count - 1, au_pos[au_count - 1]);\n\t}\n\tau_pos[au_count] = au_buf_size; // include last AU\n\tLOGD(\"found %d AUs\\n\", au_count);\n\n    if(use_ksy) {\n        int hr = QY_OK;\n        QY265DecConfig config = {0};\n\n        config.threads = threads;\n        config.bEnableOutputRecToFile = 0;\n        config.strRecYuvFileName = NULL;\n\t\tTCounterEnv* tCounterEnv = (TCounterEnv*) malloc(sizeof(TCounterEnv));\n\t\ttCounterEnv->context = context;\n\t\tenv->GetJavaVM(&tCounterEnv->jvm);\n\t\tconfig.pAuth = tCounterEnv;\n\n        ksydec_ctx = QY265DecoderCreate(&config, &hr);\n        if(ksydec_ctx == NULL) {\n            LOGE(\"call QY265DecoderCreate fail..\");\n            goto error_exit;\n        }\n        LOGD(\"call QY265DecoderCreate Succeed..\");\n    }\n\t// open lentoid HEVC decoder\n\tLOGI(\"create lentoid decoder: compatibility = %d, threads = %d\\n\", compatibility, threads);\n\tlent_ctx = lenthevcdec_create(threads, compatibility, NULL);\n\tif ( NULL == lent_ctx ) {\n\t\tLOGE(\"call lenthevcdec_create failed!\\n\");\n\t\tgoto error_exit;\n\t}\n\tLOGD(\"get decoder %p\\n\", lent_ctx);\n\n    // find sps, decode it and get video resolution\n    sps_len = lent_hevc_get_sps(au_buf, au_buf_size, &sps);\n    if ( sps_len > 0 ) {\n        // get a one-thread decoder to decode SPS\n        one_thread_ctx = lenthevcdec_create(1, compatibility, NULL);\n        if ( NULL == lent_ctx )\n            goto error_exit;\n        width = 0;\n        height = 0;\n        ret = lenthevcdec_decode_frame(one_thread_ctx, sps, sps_len, 0, &got_frame, &width, &height, stride, (void**)pixels, &pts);\n        if ( 0 != width && 0 != height ) {\n            media.width = width;\n            media.height = height;\n            LOGD(\"Video dimensions is %dx%d\\n\", width, height);\n        }\n        lenthevcdec_destroy(one_thread_ctx);\n        one_thread_ctx = NULL;\n    }\n    return 0;\n\nerror_exit:\n\tif ( NULL != in_file )\n\t\tfclose(in_file);\n\tin_file = NULL;\n\tif ( NULL != au_buf )\n\t\tfree(au_buf);\n\tau_buf = NULL;\n\tau_buf_size = 0;\n\tif ( NULL != lent_ctx )\n\t\tlenthevcdec_destroy(lent_ctx);\n\tlent_ctx = NULL;\n\tif ( NULL != one_thread_ctx )\n\t\tlenthevcdec_destroy(one_thread_ctx);\n\tone_thread_ctx = NULL;\n\tif ( NULL != ksydec_ctx )\n\t    QY265DecoderDestroy(ksydec_ctx);\n\tksydec_ctx = NULL;\n\n\treturn -1;\n}\n\n\nstatic int\nMediaPlayer_prepare(JNIEnv *env, jobject thiz, jobject context, jint decoderType, jint render, jint threadNumber, jfloat fps) {\n    LOGD(\"MediaPlayer_prepare: %d threads, fps %f\\n\", threadNumber, fps);\n    renderFPS = fps;\n    if (fps == 0) {\n        renderInterval = 1;\n    } else {\n        renderInterval = 1.0 / fps * 1000000; // us\n    }\n\n    if (decoderType == 0) {\n        use_ksy = 1;\n    } else {\n        use_ksy = 0;\n    }\n\n    disable_render = render;\n\n\treturn rawbs_prepare(env, context, threadNumber);\n\n}\n\nstatic int\nMediaPlayer_start(JNIEnv *env, jobject thiz)\n{\n\tLOGI(\"start decoding thread\");\n\n\tpthread_create(&decode_thread, NULL, rawbs_runDecoder, NULL);\n\n\treturn 0;\n}\n\nstatic int\nMediaPlayer_pause(JNIEnv *env, jobject thiz)\n{\n\treturn 0;\n}\n\nstatic int\nMediaPlayer_go(JNIEnv *env, jobject thiz)\n{\n\treturn 0;\n}\n\n\nstatic int\nMediaPlayer_stop(JNIEnv *env, jobject thiz)\n{\n\tvoid* result;\n\texit_decode_thread = 1;\n\tpthread_join(decode_thread, &result);\n\texit_decode_thread = 0;\n\tif (p_sws_ctx != NULL) {\n//\t\tsws_freeContext(p_sws_ctx);\n\t\tp_sws_ctx = NULL;\n\t}\n\tif ( NULL != gVF.yuv_data[0] )\n\t\tfree(gVF.yuv_data[0]);\n\tmemset(&gVF, 0, sizeof(gVF));\n\tLOGI(\"media player stopped\\n\");\n\treturn 0;\n}\n\nstatic bool\nMediaPlayer_isPlaying(JNIEnv *env, jobject thiz)\n{\n    return is_playing;\n}\n\nstatic int\nMediaPlayer_seekTo(JNIEnv *env, jobject thiz, jint msec)\n{\n\treturn 0;\n}\n\nstatic int\nMediaPlayer_getVideoWidth(JNIEnv *env, jobject thiz)\n{\n    int w = media.width;\n    return w;\n}\n\nstatic int\nMediaPlayer_getVideoHeight(JNIEnv *env, jobject thiz)\n{\n    int h = media.height;\n    return h;\n}\n\n\nstatic int\nMediaPlayer_getCurrentPosition(JNIEnv *env, jobject thiz)\n{\n    int msec = 0;\n    return msec;\n}\n\nstatic jfloat\nMediaPlayer_getDuration(JNIEnv *env, jobject thiz)\n{\n    //TODO: 通过视频实际帧率计算\n    return frame_count / renderFPS;\n}\n\nstatic jfloat\nMediaPlayer_getDecodeTime(JNIEnv *env, jobject thiz)\n{\n    return real_time;\n}\n\nstatic jfloat\nMediaPlayer_getDecodeFPS(JNIEnv *env, jobject thiz)\n{\n    return real_fps;\n}\n\n// ----------------------------------------------------------------------------\n\nstatic void MediaPlayer_native_init(JNIEnv *env, jobject thiz)\n{\n    jclass clazz;\n    clazz = env->FindClass(\"com/ksyun/media/ksy265codec/demo/decoder/hevdecoder/NativeMediaPlayer\");\n    if (clazz == NULL) {\n        jniThrowException(env, \"java/lang/RuntimeException\", \"Can't find MediaPlayer\");\n        return;\n    }\n\tp_sws_ctx = NULL;\n\n\tframes_sum = 0;\n\ttstart = 0;\n\n\tframes = 0;\n\ttlast = 0;\n\n\trenderFPS = 0;\n\trenderInterval = 0;\n\n\tdisable_render = 0;\n    use_ksy = 0;\n\n    if (thiz) {\n        ctx = (Context *) calloc(1, sizeof(Context));\n        if (ctx == NULL) {\n            return;\n        }\n        jclass clazz = env->GetObjectClass(thiz);\n        ctx->obj = env->NewGlobalRef(thiz);\n\n        ctx->postEvent = env->GetMethodID(clazz, \"postEventFromNative\", \"(III)V\");\n        if (ctx->postEvent == NULL) {\n            jniThrowException(env, \"java/lang/RuntimeException\", \"Can't find MediaPlayer.postEventFromNative\");\n            return;\n        }\n        ctx->drawFrame = env->GetMethodID(clazz, \"drawFrame\", \"(II)I\");\n        if (ctx->drawFrame == NULL) {\n            jniThrowException(env, \"java/lang/RuntimeException\", \"Can't find MediaPlayer.drawFrame\");\n            return;\n        }\n    }\n}\n\nstatic void\nMediaPlayer_renderBitmap(JNIEnv *env, jobject  obj, jobject bitmap)\n{\n\tvoid*              pixels;\n\tint                ret;\n\n\tif ((ret = AndroidBitmap_lockPixels(env, bitmap, &pixels)) < 0) {\n\t\tLOGE(\"AndroidBitmap_lockPixels() failed ! error=%d\", ret);\n\t}\n\n\t// Convert the image from its native format to RGB565\n\tuint32_t start_time = getms();\n\tLOGD(\"before scale: %d\", getms());\n#if USE_SWSCALE\n\t// use swscale, which may be optimized with SSE for x86 arch\n\tif (p_sws_ctx == NULL) {\n\t\tp_sws_ctx = sws_getContext( gVF.width,\n\t\t\t\t\t\t\t\t\tgVF.height,\n\t\t\t\t\t\t\t\t\tPIX_FMT_YUV420P,\n\t\t\t\t\t\t\t\t\tgVF.width,\n\t\t\t\t\t\t\t\t\tgVF.height,\n\t\t\t\t\t\t\t\t\tPIX_FMT_RGB565, SWS_BICUBIC|SWS_CPU_CAPS_MMX|SWS_CPU_CAPS_MMX2|SWS_CPU_CAPS_SSE2, NULL, NULL, NULL);\n\t}\n\tif (p_sws_ctx != NULL) {\n\t\tunsigned char *src[4];\n\t\tint src_stride[4];\n\t\tunsigned char *dst[4];\n\t\tint dst_stride[4];\n\n\t\tsrc_stride[0] = gVF.linesize_y;\n\t\tsrc_stride[1] = src_stride[2] = gVF.linesize_uv;\n\t\tdst[0] = (unsigned char*)pixels;\n\t\tdst_stride[0] = gVF.width * 2;\n\t\tsws_scale(p_sws_ctx, (const uint8_t * const *)gVF.yuv_data, src_stride, 0, gVF.height, dst, dst_stride);\n\t}\n#else\n\tConvertYCbCrToRGB565(\t\tgVF.yuv_data[0],\n\t\t\t\t\t\t\t\tgVF.yuv_data[1],\n\t\t\t\t\t\t\t\tgVF.yuv_data[2],\n\t\t\t\t\t\t\t\t(uint8_t*)pixels,\n\t\t\t\t\t\t\t\tgVF.width,\n\t\t\t\t\t\t\t\tgVF.height,\n\t\t\t\t\t\t\t\tgVF.linesize_y,\n\t\t\t\t\t\t\t\tgVF.linesize_uv,\n\t\t\t\t\t\t\t\tgVF.width * 2,\n\t\t\t\t\t\t\t\t420  );\n#endif\n\n\tuint32_t end_time = getms();\n\tLOGD(\"after scale: %d\", getms());\n\tLOGD(\"scale time: %dms\", end_time - start_time);\n\n\tAndroidBitmap_unlockPixels(env, bitmap);\n}\n\n\nstatic void MediaPlayer_set_output(JNIEnv *env, jobject thiz, jstring path) {\n    const char *pathStr = env->GetStringUTFChars(path, NULL);\n\n    enable_output_yuv = true;\n    strcpy(output_path, pathStr);\n\n    // Make sure that local ref is released before a potential exception\n    env->ReleaseStringUTFChars(path, pathStr);\n}\n\nstatic jstring MediaPlayer_getVersion(JNIEnv *env, jobject thiz) {\n    if (use_ksy) {\n        return env->NewStringUTF(strLibQy265Version);\n    } else {\n\t\tchar version[20];\n\t\tsprintf(version, \"%d\", lenthevcdec_version());\n        return env->NewStringUTF(version);\n    }\n}\n\n// ----------------------------------------------------------------------------\n\nstatic JNINativeMethod gMethods[] = {\n    { \"setDataSource\", \"(Ljava/lang/String;)I\", (void *) MediaPlayer_setDataSource },\n    { \"native_prepare\", \"(Landroid/content/Context;IIIF)I\", (void *) MediaPlayer_prepare },\n    { \"native_start\", \"()I\", (void *) MediaPlayer_start },\n    { \"native_stop\", \"()I\", (void *) MediaPlayer_stop },\n    { \"getVideoWidth\", \"()I\", (void *) MediaPlayer_getVideoWidth },\n    { \"getVideoHeight\", \"()I\", (void *) MediaPlayer_getVideoHeight },\n    { \"native_seekTo\", \"(I)I\", (void *) MediaPlayer_seekTo },\n    { \"native_pause\", \"()I\", (void *) MediaPlayer_pause },\n    { \"native_go\", \"()I\", (void *) MediaPlayer_go },\n    { \"isPlaying\", \"()Z\", (void *) MediaPlayer_isPlaying },\n    { \"getCurrentPosition\", \"()I\", (void *) MediaPlayer_getCurrentPosition },\n    { \"getDuration\", \"()F\", (void *) MediaPlayer_getDuration },\n    { \"getDecodeTime\", \"()F\", (void *) MediaPlayer_getDecodeTime },\n    { \"getDecodeFPS\", \"()F\", (void *)  MediaPlayer_getDecodeFPS },\n    { \"native_init\", \"()V\", (void *) MediaPlayer_native_init },\n    { \"renderBitmap\", \"(Landroid/graphics/Bitmap;)V\", (void *) MediaPlayer_renderBitmap },\n    { \"native_set_output\", \"(Ljava/lang/String;)V\", (void *) MediaPlayer_set_output },\n    { \"getVersion\", \"()Ljava/lang/String;\", (jstring*) MediaPlayer_getVersion },\n};\n\nint register_player(JNIEnv *env) {\n\treturn jniRegisterNativeMethods(env, kClassPathName, gMethods, sizeof(gMethods) / sizeof(gMethods[0]));\n}\n"
  },
  {
    "path": "Android_demo/KSY265CodecDemo/app/src/main/jni/decoder/jniplayer/jniplayer.h",
    "content": "#ifndef __JNIPLAYER_H__\n#define __JNIPLAYER_H__\n\nstruct VideoFrame\n{\n\tint width;\n\tint height;\n\tint linesize_y;\n\tint linesize_uv;\n\tdouble pts;\n\tuint8_t *yuv_data[3];\n};\n\nuint32_t getms();\n\n#endif /* __JNIPLAYER_H__ */\n"
  },
  {
    "path": "Android_demo/KSY265CodecDemo/app/src/main/jni/decoder/jniplayer/jniplayer_new.cpp",
    "content": "// jniplayer.cpp : decode H.265/HEVC video data in separate native thread\n//\n// Copyright (c) 2013 Strongene Ltd. All Right Reserved.\n// http://www.strongene.com\n//\n// Contributors:\n// Shengbin Meng <shengbinmeng@gmail.com>\n// James Deng <hugeice@gmail.com>\n//\n// You are free to re-use this as the basis for your own application\n// in source and binary forms, with or without modification, provided\n// that the following conditions are met:\n//\n//  * Redistributions of source code must retain the above copyright\n// notice and this list of conditions.\n//  * Redistributions in binary form must reproduce the above\n// copyright notice and this list of conditions in the documentation\n// and/or other materials provided with the distribution.\n\n\n\n#include <android/log.h>\n#include <android/bitmap.h>\n#include <stdio.h>\n#include <time.h>\n#include <pthread.h>\n#include <unistd.h>\n#include \"jniplayer.h\"\n#include \"jni_utils.h\"\n#include \"yuv2rgb565.h\"\n#include \"gl_renderer.h\"\n\n#ifdef __cplusplus\n\t#define __STDC_CONSTANT_MACROS\n\t#define __STDC_LIMIT_MACROS\n\t#ifdef _STDINT_H\n\t\t#undef _STDINT_H\n\t#endif\n\t#include <stdint.h>\n\t#define __STDC_FORMAT_MACROS\n#endif\n\nextern \"C\" {\n#include \"lenthevcdec.h\"\n#include \"qy265dec.h\"\n}\n\n#define LOG_TAG    \"jniplayer\"\n\n#define ENABLE_LOGD 0\n#if ENABLE_LOGD\n#define LOGD(...)  __android_log_print(ANDROID_LOG_DEBUG,LOG_TAG,__VA_ARGS__)\n#else\n#define LOGD(...)\n#endif\n#define LOGI(...)  __android_log_print(ANDROID_LOG_INFO,LOG_TAG,__VA_ARGS__)\n#define LOGE(...)  __android_log_print(ANDROID_LOG_ERROR,LOG_TAG,__VA_ARGS__)\n\n#ifndef _countof\n#define _countof(a) (sizeof(a) / sizeof((a)[0]))\n#endif\n\n#define LOOP_PLAY 0\n\n#if ARCH_ARM\n#define USE_SWSCALE 0\n#else\n#define USE_SWSCALE 0\n#endif\n\nstruct fields_t {\n    jmethodID\tdrawFrame;\n    jmethodID   postEvent;\n};\n\nstruct MediaInfo\n{\n\tint width;\n\tint height;\n\tchar data_src[1024];\n\tint raw_bs;\n};\n\nVideoFrame gVF = {0, 0, 0, 0, 0, {NULL, NULL, NULL}};\npthread_mutex_t gVFMutex = PTHREAD_MUTEX_INITIALIZER;\n\nstatic fields_t fields;\n\nstatic JNIEnv *gEnv = NULL;\nstatic JNIEnv *gEnvLocal = NULL;\n\nstatic jclass gClass = NULL;\nstatic MediaInfo media;\n\nstatic pthread_t decode_thread;\n\nstatic struct SwsContext   *p_sws_ctx;\n\nstatic const char* const kClassPathName = \"com/ksyun/media/ksy265codec/demo/decoder/hevdecoder/NativeMediaPlayer\";\n\n// for lenthevcdec\nstatic const uint32_t AU_COUNT_MAX = 1024 * 1024;\nstatic const uint32_t AU_BUF_SIZE_MAX = 1024 * 1024 * 50;\nstatic uint32_t au_pos[AU_COUNT_MAX];\t// too big array, use static to save stack space\nstatic uint32_t au_count, au_buf_size;\nstatic uint8_t *au_buf = NULL;\nstatic lenthevcdec_ctx lent_ctx = NULL;\n\nstatic volatile int exit_decode_thread = 0;\nstatic volatile int is_playing = 0;\n\n\nstatic int frames_sum = 0;\nstatic double tstart = 0;\n\nstatic int frames = 0;\nstatic double tlast = 0;\n\nstatic float renderFPS = 0;\nstatic uint64_t renderInterval = 0;\nstatic struct timeval timeStart;\n\nstatic int use_ksy = 0;\nstatic void* ksydec_ctx = NULL;\nstatic QY265Frame decframe;\n\nstatic int disable_render = 0;\n\nstatic inline int next_p2(int a) {\n    int rval=1;\n    while(rval<a) rval<<=1;\n    return rval;\n}\n\nuint32_t getms()\n{\n\tstruct timeval t;\n\tgettimeofday(&t, NULL);\n\treturn (t.tv_sec * 1000) + (t.tv_usec / 1000);\n}\n\nvoid postEvent(int msg, int ext1, int ext2)\n{\n\tJNIEnv *env = getJNIEnv();\n    env->CallStaticVoidMethod(gClass, fields.postEvent, msg, ext1, ext2, 0);\n}\n\nint drawFrame(VideoFrame * vf)\n{\n\tLOGD(\"enter drawFrame:%u (%f)\", getms(), vf->pts);\n\n\tif(disable_render)\n\t    return 0;\n\n\t// copy decode frame to global buffer\n\tpthread_mutex_lock(&gVFMutex);\n\tif ( gVF.linesize_y != vf->linesize_y || gVF.linesize_uv != vf->linesize_uv || gVF.height != vf->height ) {\n\t\tif ( NULL != gVF.yuv_data[0] )\n\t\t\tfree(gVF.yuv_data[0]);\n\t\tif ( NULL != gVF.yuv_data[1] )\n            free(gVF.yuv_data[1]);\n        if ( NULL != gVF.yuv_data[2] )\n            free(gVF.yuv_data[2]);\n\t\tgVF.yuv_data[0] = gVF.yuv_data[1] = gVF.yuv_data[2] = NULL;\n\t\tgVF.yuv_data[0] = (uint8_t*)malloc(vf->linesize_y * vf->height + vf->linesize_uv * vf->height );\n\t\tif ( NULL == gVF.yuv_data[0] ) {\n\t\t\tLOGE(\"malloc failed!\\n\");\n\t\t\treturn -1;\n\t\t}\n\t\tgVF.yuv_data[1] = gVF.yuv_data[0] + vf->linesize_y*vf->height;\n\t\tgVF.yuv_data[2] = gVF.yuv_data[1] + vf->linesize_uv*vf->height/2;\n\t}\n\tgVF.width = vf->width;\n\tgVF.height = vf->height;\n\tgVF.linesize_y = vf->linesize_y;\n\tgVF.linesize_uv = vf->linesize_uv;\n\tgVF.pts = vf->pts;\n\tif(use_ksy) {\n        uint8_t *dst[3] = {gVF.yuv_data[0], gVF.yuv_data[1], gVF.yuv_data[2]};\n        uint8_t *src[3] = {decframe.pData[0], decframe.pData[1], decframe.pData[2]};\n        for (int j = 0; j < gVF.height/2; ++j) {\n                memcpy(dst[0], src[0], gVF.linesize_y);\n                dst[0] += gVF.linesize_y;\n                src[0] += decframe.iStride[0];\n                memcpy(dst[0], src[0], gVF.linesize_y);\n                dst[0] += gVF.linesize_y;\n                src[0] += decframe.iStride[0];\n                memcpy(dst[1], src[1], gVF.linesize_uv);\n                dst[1] += gVF.linesize_uv;\n                src[1] += decframe.iStride[1];\n                memcpy(dst[2], src[2], gVF.linesize_uv);\n                dst[2] += gVF.linesize_uv;\n                src[2] += decframe.iStride[2];\n        }\n\t} else {\n\t    memcpy(gVF.yuv_data[0], vf->yuv_data[0], vf->linesize_y*vf->height);\n        memcpy(gVF.yuv_data[1], vf->yuv_data[1], vf->linesize_uv*vf->height/2);\n        memcpy(gVF.yuv_data[2], vf->yuv_data[2], vf->linesize_uv*vf->height/2);\n\t}\n\tpthread_mutex_unlock(&gVFMutex);\n\n\t// wait for display\n\tstruct timeval timeNow;\n\tgettimeofday(&timeNow, NULL);\n\tint64_t timePassed = ((int64_t)(timeNow.tv_sec - timeStart.tv_sec))*1000000 + (timeNow.tv_usec - timeStart.tv_usec);\n\tint64_t delay = vf->pts - timePassed;\n\tif (delay > 0) {\n\t\tusleep(delay);\n\t}\n\n\t// update information\n\tgettimeofday(&timeNow, NULL);\n\tdouble tnow = timeNow.tv_sec + (timeNow.tv_usec / 1000000.0);\n\tif (tlast == 0) tlast = tnow;\n\tif (tstart == 0) tstart = tnow;\n\tif (tnow > tlast + 1) {\n\t\tdouble avg_fps;\n\n\t\tLOGI(\"Video Display FPS:%i\", (int)frames);\n\t\tframes_sum += frames;\n\t\tavg_fps = frames_sum / (tnow - tstart);\n\t\tLOGI(\"Video AVG FPS:%.2lf\", avg_fps);\n\t\tpostEvent(900, int(frames), int(avg_fps * 4096));\n\t\ttlast = tlast + 1;\n\t\tframes = 0;\n\t}\n\tframes++;\n\n\t// request display\n\tif (gEnvLocal == NULL) gEnvLocal = getJNIEnv();\n\tLOGD(\"before request draw:%u (%f)\", getms(), vf->pts);\n   return gEnvLocal->CallStaticIntMethod(gClass, fields.drawFrame, vf->width, vf->height);\n}\n\nint lent_hevc_get_sps(uint8_t* buf, int size, uint8_t** sps_ptr)\n{\n\tint i, nal_type, sps_pos;\n\tsps_pos = -1;\n\tfor ( i = 0; i < (size - 4); i++ ) {\n\t\tif ( 0 == buf[i] && 0 == buf[i+1] && 1 == buf[i+2] ) {\n\t\t\tnal_type = (buf[i+3] & 0x7E) >> 1;\n\t\t\tif ( 33 != nal_type && sps_pos >= 0 ) {\n\t\t\t\tbreak;\n\t\t\t}\n\t\t\tif ( 33 == nal_type ) { // sps\n\t\t\t\tsps_pos = i;\n\t\t\t}\n\t\t\ti += 2;\n\t\t}\n\t}\n\tif ( sps_pos < 0 )\n\t\treturn 0;\n\tif ( i == (size - 4) )\n\t\ti = size;\n\t*sps_ptr = buf + sps_pos;\n\treturn i - sps_pos;\n}\n\nint lent_hevc_get_frame(uint8_t* buf, int size, int *is_idr)\n{\n\tstatic int seq_hdr = 0;\n\tint i, nal_type, idr = 0;\n\tfor ( i = 0; i < (size - 6); i++ ) {\n\t\tif ( 0 == buf[i] && 0 == buf[i+1] && 1 == buf[i+2] ) {\n\t\t\tnal_type = (buf[i+3] & 0x7E) >> 1;\n\t\t\tif ( nal_type <= 21 ) {\n\t\t\t\tif ( buf[i+5] & 0x80 ) { /* first slice in pic */\n\t\t\t\t\tif ( !seq_hdr )\n\t\t\t\t\t\tbreak;\n\t\t\t\t\telse\n\t\t\t\t\t\tseq_hdr = 0;\n\t\t\t\t}\n\t\t\t}\n\t\t\tif ( nal_type >= 32 && nal_type <= 34 ) {\n\t\t\t\tif ( !seq_hdr ) {\n\t\t\t\t\tseq_hdr = 1;\n\t\t\t\t\tidr = 1;\n\t\t\t\t\tbreak;\n\t\t\t\t}\n\t\t\t\tseq_hdr = 1;\n\t\t\t}\n\t\t\ti += 2;\n\t\t}\n\t}\n\tif ( i == (size - 6) )\n\t\ti = size;\n\tif ( NULL != is_idr )\n\t\t*is_idr = idr;\n\treturn i;\n}\n\nvoid* rawbs_runDecoder(void *p)\n{\n\tint32_t got_frame, width, height, stride[3];\n\tuint8_t* pixels[3];\n\tint64_t pts, got_pts;\n\tint frame_count, ret, i;\n\n\tif ( (NULL == lent_ctx && ksydec_ctx == NULL) || NULL == au_buf )\n\t\treturn NULL;\n\ndecode:\n\t// decode all AUs\n\tframe_count = 0;\n\tfor ( i = 0; i < au_count && !exit_decode_thread; i++ ) {\n\t\tpts = i * 40;\n\t\tgot_frame = 0;\n\t\tuint32_t start_time = getms();\n\t\tLOGD(\"before decode: %u\", start_time);\n\t\tif(use_ksy) {\n\t\t    QY265DecodeFrame(ksydec_ctx, au_buf + au_pos[i], au_pos[i + 1] - au_pos[i], &ret, 0);\n\t\t    if ( ret < 0 ) {\n\t\t        LOGE(\"call QY265DecodeFrame failed! ret = %d\\n\", ret);\n                goto exit;\n\t\t    }\n\n            QY265DecoderGetDecodedFrame(ksydec_ctx, &decframe, &ret, 0);\n            if ( ret == 0 && decframe.bValid ) {\n                got_frame = 1;\n                width = decframe.frameinfo.nWidth;\n                height = decframe.frameinfo.nHeight;\n                stride[0] = decframe.iStride[0];\n                stride[1] = decframe.iStride[1];\n                pixels[0] = decframe.pData[0];\n                pixels[1] = decframe.pData[1];\n                pixels[2] = decframe.pData[2];\n            }\n            else\n                got_frame = 0;\n\t\t} else {\n\t\t    ret = lenthevcdec_decode_frame(lent_ctx, au_buf + au_pos[i], au_pos[i + 1] - au_pos[i], pts,\n\t\t\t\t\t       &got_frame, &width, &height, stride, (void**)pixels, &got_pts);\n\t\t    if ( ret < 0 ) {\n\t\t\t    LOGE(\"call lenthevcdec_decode_frame failed! ret = %d\\n\", ret);\n\t\t\t    goto exit;\n\t\t    }\n\t\t}\n\t\tuint32_t end_time = getms();\n\t\tLOGD(\"after decode: %u\", end_time);\n\t\tuint32_t dec_time = end_time - start_time;\n\t\tif ( got_frame > 0 ) {\n\t\t\tLOGD(\"decoding time: %u - %u = %u\\n\", end_time, start_time, dec_time);\n\t\t\tLOGD(\"decode frame: pts = %\" PRId64 \", linesize = {%d,%d,%d}\\n\", got_pts, stride[0], stride[1], stride[2]);\n\t\t\tif ( media.width != width || media.height != height ) {\n\t\t\t\tLOGD(\"Video dimensions change! %dx%d -> %dx%d\\n\", media.width, media.height, width, height);\n\t\t\t\tmedia.width = width;\n\t\t\t\tmedia.height = height;\n\t\t\t}\n\t\t\t// draw frame to screen\n\t\t\tVideoFrame vf;\n\t\t\tvf.width = width;\n\t\t\tvf.height = height;\n\t\t\tvf.linesize_y = stride[0];\n\t\t\tvf.linesize_uv = stride[1];\n\t\t\tvf.pts = renderInterval * frame_count;\n\t\t\tvf.yuv_data[0] = pixels[0];\n\t\t\tvf.yuv_data[1] = pixels[1];\n\t\t\tvf.yuv_data[2] = pixels[2];\n\n\t\t\tif (frame_count == 0) {\n\t\t\t\tgettimeofday(&timeStart, NULL);\n\t\t\t}\n\t\t\tdrawFrame(&vf);\n\t\t\tif(use_ksy)\n\t\t\t    QY265DecoderReturnDecodedFrame(ksydec_ctx, &decframe);\n\t\t\tframe_count++;\n\t\t}\n\t}\n\n#if LOOP_PLAY\n\tif (!exit_decode_thread) {\n\t\tLOGI(\"automatically play again\\n\");\n\t\tgoto decode;\n\t}\n#endif\n\n\t// flush decoder\n\twhile ( !exit_decode_thread ) {\n\t\tgot_frame = 0;\n\t\tif(use_ksy) {\n\t\t    QY265DecoderGetDecodedFrame(ksydec_ctx, &decframe, &ret, 0);\n            if ( ret == 0 && decframe.bValid ) {\n                got_frame = 1;\n                width = decframe.frameinfo.nWidth;\n                height = decframe.frameinfo.nHeight;\n                stride[0] = decframe.iStride[0];\n                stride[1] = decframe.iStride[1];\n                pixels[0] = decframe.pData[0];\n                pixels[1] = decframe.pData[1];\n                pixels[2] = decframe.pData[2];\n           } else\n                break;\n\t\t} else {\n\t\t        ret = lenthevcdec_decode_frame(lent_ctx, NULL, 0, pts,\n\t\t\t\t\t       &got_frame, &width, &height, stride, (void**)pixels, &got_pts);\n\t\t        if ( ret < 0 || got_frame <= 0)\n\t\t\t        break;\n\t\t}\n\n\t\tif ( got_frame > 0 ) {\n\t\t\tif ( media.width != width || media.height != height ) {\n\t\t\t\tLOGD(\"Video dimensions change! %dx%d -> %dx%d\\n\", media.width, media.height, width, height);\n\t\t\t\tmedia.width = width;\n\t\t\t\tmedia.height = height;\n\t\t\t}\n\t\t\t// draw frame to screen\n\t\t\tVideoFrame vf;\n\t\t\tvf.width = width;\n\t\t\tvf.height = height;\n\t\t\tvf.linesize_y = stride[0];\n\t\t\tvf.linesize_uv = stride[1];\n\t\t\tvf.pts = renderInterval * frame_count;\n\t\t\tvf.yuv_data[0] = pixels[0];\n\t\t\tvf.yuv_data[1] = pixels[1];\n\t\t\tvf.yuv_data[2] = pixels[2];\n\t\t\tdrawFrame(&vf);\n\t\t\tif(use_ksy)\n                QY265DecoderReturnDecodedFrame(ksydec_ctx, &decframe);\n\t\t\tframe_count++;\n\t\t}\n\t}\n\nexit:\n\tif ( NULL != au_buf )\n\t\tfree(au_buf);\n\tau_buf = 0;\n\tif ( NULL != lent_ctx )\n\t\tlenthevcdec_destroy(lent_ctx);\n\tlent_ctx = NULL;\n\tif ( ksydec_ctx != NULL )\n\t    QY265DecoderDestroy(ksydec_ctx);\n\tksydec_ctx = NULL;\n\tpostEvent(909, int(frame_count), 0); // end of file\n\tdetachJVM();\n\tis_playing = 0;\n\tLOGI(\"decode thread exit\\n\");\n\texit_decode_thread = 0;\n\n\treturn NULL;\n}\n\n\n\nstatic int\nMediaPlayer_setDataSource(JNIEnv *env, jobject thiz, jstring path)\n{\n\tconst char *pathStr = env->GetStringUTFChars(path, NULL);\n\tmemset(&media, 0, sizeof(media));\n\tstrcpy(media.data_src, pathStr);\n\t// Make sure that local ref is released before a potential exception\n\tenv->ReleaseStringUTFChars(path, pathStr);\n\t// is raw HEVC bitstream file ?\n\tstatic const char * hevc_raw_bs_ext[] = {\".hevc\", \".hm91\", \".hm10\", \".bit\", \".hvc\", \".h265\", \".265\"};\n\tchar * ext = strrchr(media.data_src, '.');\n\tif ( NULL != ext ) {\n\t\tint i;\n\t\tfor ( i = 0; i < _countof(hevc_raw_bs_ext); i++ ) {\n\t\t\tif ( strcasecmp(hevc_raw_bs_ext[i], ext) == 0 )\n\t\t\t\tbreak;\n\t\t}\n\t\tif ( i < _countof(hevc_raw_bs_ext) )\n\t\t\tmedia.raw_bs = 1;\n\t}\n\treturn 0;\n}\n\nstatic int rawbs_prepare(int threads)\n{\n\tFILE *in_file;\n\tint32_t got_frame, width, height, stride[3];\n\tuint8_t* pixels[3];\n\tint64_t pts, got_pts;\n\tuint8_t *sps;\n\tlenthevcdec_ctx one_thread_ctx;\n\tint compatibility, frame_count, sps_len, ret, i;\n\n\tin_file = NULL;\n\tau_buf = NULL;\n\tlent_ctx = NULL;\n\tone_thread_ctx = NULL;\n\tksydec_ctx = NULL;\n\n\t// get compatibility version\n\tcompatibility = 0x7fffffff;\n\tif ( strncasecmp(\".hm91\", media.data_src + (strlen(media.data_src) - 5), 5) == 0 )\n\t\tcompatibility = 91;\n\telse if ( strncasecmp(\".hm10\", media.data_src + (strlen(media.data_src) - 5), 5) == 0 )\n\t\tcompatibility = 100;\n\n\t// read file\n\tin_file = fopen(media.data_src, \"rb\");\n\tif ( NULL == in_file ) {\n\t\tLOGE(\"Can not open input file '%s'\\n\", media.data_src);\n\t\tgoto error_exit;\n\t}\n\tfseek(in_file, 0, SEEK_END);\n\tau_buf_size = ftell(in_file);\n\tfseek(in_file, 0, SEEK_SET);\n\tLOGE(\"file size is %d bytes\\n\", au_buf_size);\n\tif ( au_buf_size > AU_BUF_SIZE_MAX )\n\t\tau_buf_size = AU_BUF_SIZE_MAX;\n\tau_buf = (uint8_t*)malloc(au_buf_size);\n\tif ( NULL == au_buf ) {\n\t\tLOGE(\"call malloc failed! size is %d\\n\", au_buf_size);\n\t\tgoto error_exit;\n\t}\n\tif ( fread(au_buf, 1, au_buf_size, in_file) != au_buf_size ) {\n\t\tLOGE(\"call fread failed!\\n\");\n\t\tgoto error_exit;\n\t}\n\tfclose(in_file);\n\tin_file = NULL;\n\tLOGE(\"%d bytes read to address %p\\n\", au_buf_size, au_buf);\n\n\t// find all AU\n\tau_count = 0;\n\tfor ( i = 0; i < au_buf_size && au_count < (AU_COUNT_MAX - 1); i+=3 ) {\n\t\ti += lent_hevc_get_frame(au_buf + i, au_buf_size - i, NULL);\n\t\tif (i < au_buf_size) {\n\t\t\tau_pos[au_count++] = i;\n\t\t}\n\t\tLOGD(\"AU[%d] = %d\\n\", au_count - 1, au_pos[au_count - 1]);\n\t}\n\tau_pos[au_count] = au_buf_size; // include last AU\n\tLOGE(\"found %d AUs\\n\", au_count);\n\n\t// open lentoid HEVC decoder\n\tif(use_ksy) {\n\t    int hr = QY_OK;\n        QY265DecConfig config;\n\n        config.threads = threads;\n        config.bEnableOutputRecToFile = 0;\n        config.strRecYuvFileName = NULL;\n\n        ksydec_ctx = QY265DecoderCreate(&config, &hr);\n        if(ksydec_ctx == NULL) {\n            LOGE(\"call QY265DecoderCreate fail..\");\n            goto error_exit;\n        }\n        LOGE(\"call QY265DecoderCreate Succeed..\");\n\t}\n\t    LOGI(\"create lentoid decoder: compatibility = %d, threads = %d\\n\", compatibility, threads);\n\t    lent_ctx = lenthevcdec_create(threads, compatibility, NULL);\n\t    if ( NULL == lent_ctx ) {\n\t\t    LOGE(\"call lenthevcdec_create failed!\\n\");\n\t\t    goto error_exit;\n\t    }\n\t    LOGD(\"get decoder %p\\n\", lent_ctx);\n\n\n\t    // find sps, decode it and get video resolution\n\t    sps_len = lent_hevc_get_sps(au_buf, au_buf_size, &sps);\n\t    if ( sps_len > 0 ) {\n\t\t    // get a one-thread decoder to decode SPS\n\t\t    one_thread_ctx = lenthevcdec_create(1, compatibility, NULL);\n\t\t    if ( NULL == lent_ctx ) {\n\t\t        LOGE(\"call lenthevcdec_create fail..\");\n\t\t\t    goto error_exit;\n\t\t    }\n\t\t    width = 0;\n\t\t    height = 0;\n\t\t    ret = lenthevcdec_decode_frame(one_thread_ctx, sps, sps_len, 0, &got_frame, &width, &height, stride, (void**)pixels, &pts);\n\t\t    if ( 0 != width && 0 != height ) {\n\t\t\t    media.width = width;\n\t\t\t    media.height = height;\n\t\t\t    LOGE(\"Video dimensions is %dx%d\\n\", width, height);\n\t\t    }\n\t\t    lenthevcdec_destroy(one_thread_ctx);\n\t\t    one_thread_ctx = NULL;\n\t\t    if(use_ksy) {\n\t\t        gVF.linesize_y = next_p2(width);\n                gVF.linesize_uv = next_p2(width/2);\n                gVF.yuv_data[0] = (uint8_t*)malloc( gVF.linesize_y * height);\n                gVF.yuv_data[1] = (uint8_t*)malloc( gVF.linesize_uv * height/2);\n                gVF.yuv_data[2] = (uint8_t*)malloc( gVF.linesize_uv * height/2);\n                LOGE(\"linesize:%d, %d\", gVF.linesize_y, gVF.linesize_uv);\n\t\t    }\n\t    }\n\treturn 0;\n\nerror_exit:\n\tif ( NULL != in_file )\n\t\tfclose(in_file);\n\tin_file = NULL;\n\tif ( NULL != au_buf )\n\t\tfree(au_buf);\n\tau_buf = NULL;\n\tif ( NULL != lent_ctx )\n\t\tlenthevcdec_destroy(lent_ctx);\n\tlent_ctx = NULL;\n\tif ( NULL != one_thread_ctx )\n\t\tlenthevcdec_destroy(one_thread_ctx);\n\tone_thread_ctx = NULL;\n\tif ( NULL != ksydec_ctx)\n        QY265DecoderDestroy(ksydec_ctx);\n    ksydec_ctx = NULL;\n\n\treturn -1;\n}\n\nstatic int\nMediaPlayer_prepare(JNIEnv *env, jobject thiz, jint decoderType, jint render, jint threadNumber, jfloat fps)\n{\n\tLOGE(\"MediaPlayer_prepare: decoderType:%d, %d threads, fps %f\\n\", decoderType, threadNumber, fps);\n\trenderFPS = fps;\n\tif (fps == 0) renderInterval = 1;\n\telse {\n\t\trenderInterval = 1.0 / fps * 1000000; // us\n\t}\n\n\tif (decoderType == 0)\n\t    use_ksy = 1;\n\n\tdisable_render = render;\n\n\treturn rawbs_prepare(threadNumber);\n}\n\nstatic int\nMediaPlayer_start(JNIEnv *env, jobject thiz)\n{\n\tLOGI(\"start decoding thread\");\n\n\tpthread_create(&decode_thread, NULL, rawbs_runDecoder, NULL);\n\n\treturn 0;\n}\n\nstatic int\nMediaPlayer_pause(JNIEnv *env, jobject thiz)\n{\n\treturn 0;\n}\n\nstatic int\nMediaPlayer_go(JNIEnv *env, jobject thiz)\n{\n\treturn 0;\n}\n\n\nstatic int\nMediaPlayer_stop(JNIEnv *env, jobject thiz)\n{\n\tvoid* result;\n\texit_decode_thread = 1;\n\tpthread_join(decode_thread, &result);\n\texit_decode_thread = 0;\n\tif (p_sws_ctx != NULL) {\n//\t\tsws_freeContext(p_sws_ctx);\n\t\tp_sws_ctx = NULL;\n\t}\n\tif ( NULL != gVF.yuv_data[0] )\n\t\tfree(gVF.yuv_data[0]);\n\tmemset(&gVF, 0, sizeof(gVF));\n\tLOGI(\"media player stopped\\n\");\n\treturn 0;\n}\n\nstatic bool\nMediaPlayer_isPlaying(JNIEnv *env, jobject thiz)\n{\n    return is_playing;\n}\n\nstatic int\nMediaPlayer_seekTo(JNIEnv *env, jobject thiz, jint msec)\n{\n\treturn 0;\n}\n\nstatic int\nMediaPlayer_getVideoWidth(JNIEnv *env, jobject thiz)\n{\n    int w = media.width;\n    return w;\n}\n\nstatic int\nMediaPlayer_getVideoHeight(JNIEnv *env, jobject thiz)\n{\n    int h = media.height;\n    return h;\n}\n\n\nstatic int\nMediaPlayer_getCurrentPosition(JNIEnv *env, jobject thiz)\n{\n    int msec = 0;\n    return msec;\n}\n\nstatic int\nMediaPlayer_getDuration(JNIEnv *env, jobject thiz)\n{\n    int msec = 0;\n    return msec;\n}\n\n\n\n// ----------------------------------------------------------------------------\n\nstatic void MediaPlayer_native_init(JNIEnv *env)\n{\n    jclass clazz;\n    clazz = env->FindClass(\"com/ksyun/media/ksy265codec/demo/decoder/hevdecoder/NativeMediaPlayer\");\n    if (clazz == NULL) {\n        jniThrowException(env, \"java/lang/RuntimeException\", \"Can't find MediaPlayer\");\n        return;\n    }\n\n    fields.postEvent = env->GetStaticMethodID(clazz, \"postEventFromNative\", \"(III)V\");\n\tif (fields.postEvent == NULL) {\n\t\tjniThrowException(env, \"java/lang/RuntimeException\", \"Can't find MediaPlayer.postEventFromNative\");\n\t\treturn;\n\t}\n\n\tfields.drawFrame = env->GetStaticMethodID(clazz, \"drawFrame\",\"(II)I\");\n\tif (fields.drawFrame == NULL) {\n\t\tjniThrowException(env, \"java/lang/RuntimeException\", \"Can't find MediaPlayer.drawFrame\");\n\t\treturn;\n\t}\n\n\tgClass = NULL;\n\tgEnv = NULL;\n\tgEnvLocal = NULL;\n\tp_sws_ctx = NULL;\n\n\tframes_sum = 0;\n\ttstart = 0;\n\n\tframes = 0;\n\ttlast = 0;\n\n\trenderFPS = 0;\n\trenderInterval = 0;\n\n\tdisable_render = 0;\n}\n\nstatic void\nMediaPlayer_native_setup(JNIEnv *env, jobject thiz, jobject weak_this)\n{\n\t// Hold onto the MediaPlayer class for use in calling the static method\n\t// that posts events to the application thread.\n\tjclass clazz = env->GetObjectClass(thiz);\n\tif (clazz == NULL) {\n\t\tjniThrowException(env, \"java/lang/Exception\", kClassPathName);\n\t\treturn;\n\t}\n\tgClass = (jclass)env->NewGlobalRef(clazz);\n\tgEnv = env;\n}\n\nstatic void\nMediaPlayer_renderBitmap(JNIEnv *env, jobject  obj, jobject bitmap)\n{\n\tvoid*              pixels;\n\tint                ret;\n\n\tif ((ret = AndroidBitmap_lockPixels(env, bitmap, &pixels)) < 0) {\n\t\tLOGE(\"AndroidBitmap_lockPixels() failed ! error=%d\", ret);\n\t}\n\n\t// Convert the image from its native format to RGB565\n\tuint32_t start_time = getms();\n\tLOGD(\"before scale: %d\", getms());\n#if USE_SWSCALE\n\t// use swscale, which may be optimized with SSE for x86 arch\n\tif (p_sws_ctx == NULL) {\n\t\tp_sws_ctx = sws_getContext( gVF.width,\n\t\t\t\t\t\t\t\t\tgVF.height,\n\t\t\t\t\t\t\t\t\tPIX_FMT_YUV420P,\n\t\t\t\t\t\t\t\t\tgVF.width,\n\t\t\t\t\t\t\t\t\tgVF.height,\n\t\t\t\t\t\t\t\t\tPIX_FMT_RGB565, SWS_BICUBIC|SWS_CPU_CAPS_MMX|SWS_CPU_CAPS_MMX2|SWS_CPU_CAPS_SSE2, NULL, NULL, NULL);\n\t}\n\tif (p_sws_ctx != NULL) {\n\t\tunsigned char *src[4];\n\t\tint src_stride[4];\n\t\tunsigned char *dst[4];\n\t\tint dst_stride[4];\n\n\t\tsrc_stride[0] = gVF.linesize_y;\n\t\tsrc_stride[1] = src_stride[2] = gVF.linesize_uv;\n\t\tdst[0] = (unsigned char*)pixels;\n\t\tdst_stride[0] = gVF.width * 2;\n\t\tsws_scale(p_sws_ctx, (const uint8_t * const *)gVF.yuv_data, src_stride, 0, gVF.height, dst, dst_stride);\n\t}\n#else\n\tConvertYCbCrToRGB565(\t\tgVF.yuv_data[0],\n\t\t\t\t\t\t\t\tgVF.yuv_data[1],\n\t\t\t\t\t\t\t\tgVF.yuv_data[2],\n\t\t\t\t\t\t\t\t(uint8_t*)pixels,\n\t\t\t\t\t\t\t\tgVF.width,\n\t\t\t\t\t\t\t\tgVF.height,\n\t\t\t\t\t\t\t\tgVF.linesize_y,\n\t\t\t\t\t\t\t\tgVF.linesize_uv,\n\t\t\t\t\t\t\t\tgVF.width * 2,\n\t\t\t\t\t\t\t\t420  );\n#endif\n\n\tuint32_t end_time = getms();\n\tLOGD(\"after scale: %d\", getms());\n\tLOGD(\"scale time: %dms\", end_time - start_time);\n\n\tAndroidBitmap_unlockPixels(env, bitmap);\n}\n\n\n\n\n// ----------------------------------------------------------------------------\n\nstatic JNINativeMethod gMethods[] = {\n    { \"setDataSource\", \"(Ljava/lang/String;)I\", (void *) MediaPlayer_setDataSource },\n    { \"native_prepare\", \"(IIIF)I\", (void *) MediaPlayer_prepare },\n    { \"native_start\", \"()I\", (void *) MediaPlayer_start },\n    { \"native_stop\", \"()I\", (void *) MediaPlayer_stop },\n    { \"getVideoWidth\", \"()I\", (void *) MediaPlayer_getVideoWidth },\n    { \"getVideoHeight\", \"()I\", (void *) MediaPlayer_getVideoHeight },\n    { \"native_seekTo\", \"(I)I\", (void *) MediaPlayer_seekTo },\n    { \"native_pause\", \"()I\", (void *) MediaPlayer_pause },\n    { \"native_go\", \"()I\", (void *) MediaPlayer_go },\n    { \"isPlaying\", \"()Z\", (void *) MediaPlayer_isPlaying },\n    { \"getCurrentPosition\", \"()I\", (void *) MediaPlayer_getCurrentPosition },\n    { \"getDuration\", \"()I\", (void *) MediaPlayer_getDuration },\n    { \"native_init\", \"()V\", (void *) MediaPlayer_native_init },\n    { \"native_setup\", \"(Ljava/lang/Object;)V\", (void *) MediaPlayer_native_setup },\n    { \"renderBitmap\", \"(Landroid/graphics/Bitmap;)V\", (void *) MediaPlayer_renderBitmap },\n};\n\nint register_player(JNIEnv *env) {\n\treturn jniRegisterNativeMethods(env, kClassPathName, gMethods, sizeof(gMethods) / sizeof(gMethods[0]));\n}\n"
  },
  {
    "path": "Android_demo/KSY265CodecDemo/app/src/main/jni/decoder/jniplayer/yuv2rgb565.cpp",
    "content": "// Copyright (c) 2010 The Chromium Authors. All rights reserved.\n// Use of this source code is governed by a BSD-style license that can be\n// found in the LICENSE file.\n\n// contributor Siarhei Siamashka <siarhei.siamashka@gmail.com>\n\n// This file is modified based on:\n// http://dxr.mozilla.org/mozilla-central/source/gfx/ycbcr/yuv_convert_arm.cpp\n\n#include \"yuv2rgb565.h\"\n\n#if ARCH_ARM && HAVE_NEON\n\n/***************************************\n * convert in neon:\n */\n\nvoid __attribute((noinline,optimize(\"-fomit-frame-pointer\")))\nyuv42x_to_rgb565_row_neon(uint16_t *dst,\n                          const uint8_t *y,\n                          const uint8_t *u,\n                          const uint8_t *v,\n                          int n,\n                          int oddflag)\n{\n    static __attribute__((aligned(16))) uint16_t acc_r[8] = {\n        22840, 22840, 22840, 22840, 22840, 22840, 22840, 22840,\n    };\n    static __attribute__((aligned(16))) uint16_t acc_g[8] = {\n        17312, 17312, 17312, 17312, 17312, 17312, 17312, 17312,\n    };\n    static __attribute__((aligned(16))) uint16_t acc_b[8] = {\n        28832, 28832, 28832, 28832, 28832, 28832, 28832, 28832,\n    };\n    /*\n     * Registers:\n     * q0, q1 : d0, d1, d2, d3  - are used for initial loading of YUV data\n     * q2     : d4, d5          - are used for storing converted RGB data\n     * q3     : d6, d7          - are used for temporary storage\n     *\n     * q4-q7 - reserved\n     *\n     * q8, q9 : d16, d17, d18, d19  - are used for expanded Y data\n     * q10    : d20, d21\n     * q11    : d22, d23\n     * q12    : d24, d25\n     * q13    : d26, d27\n     * q13, q14, q15            - various constants (#16, #149, #204, #50, #104, #154)\n     */\n    asm volatile (\n\".fpu neon\\n\"\n/* Allow to build on targets not supporting neon, and force the object file\n * target to avoid bumping the final binary target */\n\".arch armv7-a\\n\"\n\".object_arch armv4t\\n\"\n\".macro convert_macroblock size\\n\"\n/* load up to 16 source pixels */\n\t\".if \\\\size == 16\\n\"\n\t    \"pld [%[y], #64]\\n\"\n\t    \"pld [%[u], #64]\\n\"\n\t    \"pld [%[v], #64]\\n\"\n\t    \"vld1.8 {d1}, [%[y]]!\\n\"\n\t    \"vld1.8 {d3}, [%[y]]!\\n\"\n\t    \"vld1.8 {d0}, [%[u]]!\\n\"\n\t    \"vld1.8 {d2}, [%[v]]!\\n\"\n\t\".elseif \\\\size == 8\\n\"\n\t    \"vld1.8 {d1}, [%[y]]!\\n\"\n\t    \"vld1.8 {d0[0]}, [%[u]]!\\n\"\n\t    \"vld1.8 {d0[1]}, [%[u]]!\\n\"\n\t    \"vld1.8 {d0[2]}, [%[u]]!\\n\"\n\t    \"vld1.8 {d0[3]}, [%[u]]!\\n\"\n\t    \"vld1.8 {d2[0]}, [%[v]]!\\n\"\n\t    \"vld1.8 {d2[1]}, [%[v]]!\\n\"\n\t    \"vld1.8 {d2[2]}, [%[v]]!\\n\"\n\t    \"vld1.8 {d2[3]}, [%[v]]!\\n\"\n\t\".elseif \\\\size == 4\\n\"\n\t    \"vld1.8 {d1[0]}, [%[y]]!\\n\"\n\t    \"vld1.8 {d1[1]}, [%[y]]!\\n\"\n\t    \"vld1.8 {d1[2]}, [%[y]]!\\n\"\n\t    \"vld1.8 {d1[3]}, [%[y]]!\\n\"\n\t    \"vld1.8 {d0[0]}, [%[u]]!\\n\"\n\t    \"vld1.8 {d0[1]}, [%[u]]!\\n\"\n\t    \"vld1.8 {d2[0]}, [%[v]]!\\n\"\n\t    \"vld1.8 {d2[1]}, [%[v]]!\\n\"\n\t\".elseif \\\\size == 2\\n\"\n\t    \"vld1.8 {d1[0]}, [%[y]]!\\n\"\n\t    \"vld1.8 {d1[1]}, [%[y]]!\\n\"\n\t    \"vld1.8 {d0[0]}, [%[u]]!\\n\"\n\t    \"vld1.8 {d2[0]}, [%[v]]!\\n\"\n\t\".elseif \\\\size == 1\\n\"\n\t    \"vld1.8 {d1[0]}, [%[y]]!\\n\"\n\t    \"vld1.8 {d0[0]}, [%[u]]!\\n\"\n\t    \"vld1.8 {d2[0]}, [%[v]]!\\n\"\n\t\".else\\n\"\n\t    \".error \\\"unsupported macroblock size\\\"\\n\"\n\t\".endif\\n\"\n\n        /* d1 - Y data (first 8 bytes) */\n        /* d3 - Y data (next 8 bytes) */\n        /* d0 - U data, d2 - V data */\n\n\t/* split even and odd Y color components */\n\t\"vuzp.8      d1, d3\\n\"                       /* d1 - evenY, d3 - oddY */\n\t/* clip upper and lower boundaries */\n\t\"vqadd.u8    q0, q0, q4\\n\"\n\t\"vqadd.u8    q1, q1, q4\\n\"\n\t\"vqsub.u8    q0, q0, q5\\n\"\n\t\"vqsub.u8    q1, q1, q5\\n\"\n\n\t\"vshr.u8     d4, d2, #1\\n\"                   /* d4 = V >> 1 */\n\n\t\"vmull.u8    q8, d1, d27\\n\"                  /* q8 = evenY * 149 */\n\t\"vmull.u8    q9, d3, d27\\n\"                  /* q9 = oddY * 149 */\n\n\t\"vld1.16     {d20, d21}, [%[acc_r], :128]\\n\" /* q10 - initialize accumulator for red */\n\t\"vsubw.u8    q10, q10, d4\\n\"                 /* red acc -= (V >> 1) */\n\t\"vmlsl.u8    q10, d2, d28\\n\"                 /* red acc -= V * 204 */\n\t\"vld1.16     {d22, d23}, [%[acc_g], :128]\\n\" /* q11 - initialize accumulator for green */\n\t\"vmlsl.u8    q11, d2, d30\\n\"                 /* green acc -= V * 104 */\n\t\"vmlsl.u8    q11, d0, d29\\n\"                 /* green acc -= U * 50 */\n\t\"vld1.16     {d24, d25}, [%[acc_b], :128]\\n\" /* q12 - initialize accumulator for blue */\n\t\"vmlsl.u8    q12, d0, d30\\n\"                 /* blue acc -= U * 104 */\n\t\"vmlsl.u8    q12, d0, d31\\n\"                 /* blue acc -= U * 154 */\n\n\t\"vhsub.s16   q3, q8, q10\\n\"                  /* calculate even red components */\n\t\"vhsub.s16   q10, q9, q10\\n\"                 /* calculate odd red components */\n\t\"vqshrun.s16 d0, q3, #6\\n\"                   /* right shift, narrow and saturate even red components */\n\t\"vqshrun.s16 d3, q10, #6\\n\"                  /* right shift, narrow and saturate odd red components */\n\n\t\"vhadd.s16   q3, q8, q11\\n\"                  /* calculate even green components */\n\t\"vhadd.s16   q11, q9, q11\\n\"                 /* calculate odd green components */\n\t\"vqshrun.s16 d1, q3, #6\\n\"                   /* right shift, narrow and saturate even green components */\n\t\"vqshrun.s16 d4, q11, #6\\n\"                  /* right shift, narrow and saturate odd green components */\n\n\t\"vhsub.s16   q3, q8, q12\\n\"                  /* calculate even blue components */\n\t\"vhsub.s16   q12, q9, q12\\n\"                 /* calculate odd blue components */\n\t\"vqshrun.s16 d2, q3, #6\\n\"                   /* right shift, narrow and saturate even blue components */\n\t\"vqshrun.s16 d5, q12, #6\\n\"                  /* right shift, narrow and saturate odd blue components */\n\n\t\"vzip.8      d0, d3\\n\"                       /* join even and odd red components */\n\t\"vzip.8      d1, d4\\n\"                       /* join even and odd green components */\n\t\"vzip.8      d2, d5\\n\"                       /* join even and odd blue components */\n\n\t\"vshll.u8    q3, d0, #8\\n\\t\"\n\t\"vshll.u8    q8, d1, #8\\n\\t\"\n\t\"vshll.u8    q9, d2, #8\\n\\t\"\n\t\"vsri.u16    q3, q8, #5\\t\\n\"\n\t\"vsri.u16    q3, q9, #11\\t\\n\"\n\t/* store pixel data to memory */\n\t\".if \\\\size == 16\\n\"\n\t\"    vst1.16 {d6, d7}, [%[dst]]!\\n\"\n\t\"    vshll.u8    q3, d3, #8\\n\\t\"\n\t\"    vshll.u8    q8, d4, #8\\n\\t\"\n\t\"    vshll.u8    q9, d5, #8\\n\\t\"\n\t\"    vsri.u16    q3, q8, #5\\t\\n\"\n\t\"    vsri.u16    q3, q9, #11\\t\\n\"\n\t\"    vst1.16 {d6, d7}, [%[dst]]!\\n\"\n\t\".elseif \\\\size == 8\\n\"\n\t\"    vst1.16 {d6, d7}, [%[dst]]!\\n\"\n\t\".elseif \\\\size == 4\\n\"\n\t\"    vst1.16 {d6}, [%[dst]]!\\n\"\n\t\".elseif \\\\size == 2\\n\"\n\t\"    vst1.16 {d6[0]}, [%[dst]]!\\n\"\n\t\"    vst1.16 {d6[1]}, [%[dst]]!\\n\"\n\t\".elseif \\\\size == 1\\n\"\n\t\"    vst1.16 {d6[0]}, [%[dst]]!\\n\"\n\t\".endif\\n\"\n\".endm\\n\"\n\n\t\"vmov.u8     d8, #15\\n\" /* add this to U/V to saturate upper boundary */\n\t\"vmov.u8     d9, #20\\n\" /* add this to Y to saturate upper boundary */\n\t\"vmov.u8     d10, #31\\n\" /* sub this from U/V to saturate lower boundary */\n\t\"vmov.u8     d11, #36\\n\" /* sub this from Y to saturate lower boundary */\n\n\t\"vmov.u8     d26, #16\\n\"\n\t\"vmov.u8     d27, #149\\n\"\n\t\"vmov.u8     d28, #204\\n\"\n\t\"vmov.u8     d29, #50\\n\"\n\t\"vmov.u8     d30, #104\\n\"\n\t\"vmov.u8     d31, #154\\n\"\n\n\t\"cmp         %[oddflag], #0\\n\"\n\t\"beq         1f\\n\"\n\t\"convert_macroblock 1\\n\"\n\t\"sub         %[n], %[n], #1\\n\"\n    \"1:\\n\"\n\t\"subs        %[n], %[n], #16\\n\"\n\t\"blt         2f\\n\"\n    \"1:\\n\"\n\t\"convert_macroblock 16\\n\"\n\t\"subs        %[n], %[n], #16\\n\"\n\t\"bge         1b\\n\"\n    \"2:\\n\"\n\t\"tst         %[n], #8\\n\"\n\t\"beq         3f\\n\"\n\t\"convert_macroblock 8\\n\"\n    \"3:\\n\"\n\t\"tst         %[n], #4\\n\"\n\t\"beq         4f\\n\"\n\t\"convert_macroblock 4\\n\"\n    \"4:\\n\"\n\t\"tst         %[n], #2\\n\"\n\t\"beq         5f\\n\"\n\t\"convert_macroblock 2\\n\"\n    \"5:\\n\"\n\t\"tst         %[n], #1\\n\"\n\t\"beq         6f\\n\"\n\t\"convert_macroblock 1\\n\"\n    \"6:\\n\"\n\t\".purgem convert_macroblock\\n\"\n\t: [y] \"+&r\" (y), [u] \"+&r\" (u), [v] \"+&r\" (v), [dst] \"+&r\" (dst), [n] \"+&r\" (n)\n\t: [acc_r] \"r\" (&acc_r[0]), [acc_g] \"r\" (&acc_g[0]), [acc_b] \"r\" (&acc_b[0]),\n\t  [oddflag] \"r\" (oddflag)\n\t: \"cc\", \"memory\",\n\t  \"d0\",  \"d1\",  \"d2\",  \"d3\",  \"d4\",  \"d5\",  \"d6\",  \"d7\",\n\t  \"d8\",  \"d9\",  \"d10\", \"d11\", /* \"d12\", \"d13\", \"d14\", \"d15\", */\n\t  \"d16\", \"d17\", \"d18\", \"d19\", \"d20\", \"d21\", \"d22\", \"d23\",\n\t  \"d24\", \"d25\", \"d26\", \"d27\", \"d28\", \"d29\", \"d30\", \"d31\"\n    );\n}\n\n\nvoid ConvertYCbCrToRGB565_neon( const uint8_t* y_buf,\n\t\t\t\t\t\t\t\tconst uint8_t* u_buf,\n\t\t\t\t\t\t\t\tconst uint8_t* v_buf,\n\t\t\t\t\t\t\t\tuint8_t* rgb_buf,\n\t\t\t\t\t\t\t\tint pic_width,\n\t\t\t\t\t\t\t\tint pic_height,\n\t\t\t\t\t\t\t\tint y_stride,\n\t\t\t\t\t\t\t\tint uv_stride,\n\t\t\t\t\t\t\t\tint rgb_stride,\n\t\t\t\t\t\t\t\tint yuv_type)\n{\n\tint x_shift;\n\tint y_shift;\n\tx_shift = (yuv_type != 444);  //YUV 4:4:4\n\ty_shift = (yuv_type == 420);  //YUV 4:2:0\n\t/*\n\tFrom Wiki: The Y'V12 format is essentially the same as Y'UV420p, \n\tbut it has the U and V data reversed: the Y' values are followed by the V values, with the U values last.\n\t*/\n\n\tfor (int i = 0; i < pic_height; i++) {\n\t  int yoffs;\n\t  int uvoffs;\n\t  yoffs = y_stride * i;\n\t  uvoffs = uv_stride * (i>>y_shift);\n\t  yuv42x_to_rgb565_row_neon((uint16_t*)(rgb_buf + rgb_stride * i),\n\t\t                        y_buf + yoffs,\n\t\t                        u_buf + uvoffs,\n\t\t                        v_buf + uvoffs,\n\t\t                        pic_width,\n\t\t                        0);\n\t}\n}\n\n#endif //ARCH_ARM && HAVE_NEON\n\n\n/*************************************\n * convert in c:\n */\n\n/*\n * Use NS_CLAMP to force a value (such as a preference) into a range.\n */\n#define NS_CLAMP(x, low, high)  (((x) > (high)) ? (high) : (((x) < (low)) ? (low) : (x)))\n\n\n/*Convert a single pixel from Y'CbCr to RGB565.\n  This uses the exact same formulas as the asm, even though we could make the\n   constants a lot more accurate with 32-bit wide registers.*/\nstatic uint16_t yu2rgb565(int y, int u, int v, int dither) {\n  /*This combines the constant offset that needs to be added during the Y'CbCr\n     conversion with a rounding offset that depends on the dither parameter.*/\n  static const int DITHER_BIAS[4][3] = {\n    {-14240,    8704,    -17696},\n    {-14240+128,8704+64, -17696+128},\n    {-14240+256,8704+128,-17696+256},\n    {-14240+384,8704+192,-17696+384}\n  };\n  int r;\n  int g;\n  int b;\n  r = NS_CLAMP((74*y+102*v+DITHER_BIAS[dither][0])>>9, 0, 31);\n  g = NS_CLAMP((74*y-25*u-52*v+DITHER_BIAS[dither][1])>>8, 0, 63);\n  b = NS_CLAMP((74*y+129*u+DITHER_BIAS[dither][2])>>9, 0, 31);\n  return (uint16_t)(r<<11 | g<<5 | b);\n}\n\nvoid yuv_to_rgb565_row_c(uint16_t *dst,\n                         const uint8_t *y,\n                         const uint8_t *u,\n                         const uint8_t *v,\n                         int x_shift,\n                         int pic_width)\n{\n  int x;\n  for (x = 0; x < pic_width; x++)\n  {\n    dst[x] = yu2rgb565(y[x],\n                       u[x>>x_shift],\n                       v[x>>x_shift],\n                       2); // Disable dithering for now.\n  }\n}\n\nvoid ConvertYCbCrToRGB565_c( \tconst uint8_t* y_buf,\n\t\t\t\t\t\t\t\tconst uint8_t* u_buf,\n\t\t\t\t\t\t\t\tconst uint8_t* v_buf,\n\t\t\t\t\t\t\t\tuint8_t* rgb_buf,\n\t\t\t\t\t\t\t\tint pic_width,\n\t\t\t\t\t\t\t\tint pic_height,\n\t\t\t\t\t\t\t\tint y_stride,\n\t\t\t\t\t\t\t\tint uv_stride,\n\t\t\t\t\t\t\t\tint rgb_stride,\n\t\t\t\t\t\t\t\tint yuv_type)\n{\n\tint x_shift;\n\tint y_shift;\n\tx_shift = (yuv_type != 444);  //YUV 4:4:4\n\ty_shift = (yuv_type == 420);  //YUV 4:2:0\n\t/*\n\tFrom Wiki: The Y'V12 format is essentially the same as Y'UV420p,\n\tbut it has the U and V data reversed: the Y' values are followed by the V values, with the U values last.\n\t*/\n\tfor (int i = 0; i < pic_height; i++) {\n\t  int yoffs;\n\t  int uvoffs;\n\t  yoffs = y_stride * i;\n\t  uvoffs = uv_stride * (i>>y_shift);\n\t  yuv_to_rgb565_row_c((uint16_t*)(rgb_buf + rgb_stride * i),\n\t\t\t\t\t\t\ty_buf + yoffs,\n\t\t\t\t\t\t\tu_buf + uvoffs,\n\t\t\t\t\t\t\tv_buf + uvoffs,\n\t\t\t\t\t\t\tx_shift,\n\t\t\t\t\t\t\tpic_width);\n\t}\n}\n\nvoid ConvertYCbCrToRGB565( \tconst uint8_t* y_buf,\n\t\t\t\t\t\t\tconst uint8_t* u_buf,\n\t\t\t\t\t\t\tconst uint8_t* v_buf,\n\t\t\t\t\t\t\tuint8_t* rgb_buf,\n\t\t\t\t\t\t\tint pic_width,\n\t\t\t\t\t\t\tint pic_height,\n\t\t\t\t\t\t\tint y_stride,\n\t\t\t\t\t\t\tint uv_stride,\n\t\t\t\t\t\t\tint rgb_stride,\n\t\t\t\t\t\t\tint yuv_type)\n{\n#if HAVE_NEON\n\tConvertYCbCrToRGB565_neon(y_buf, u_buf, v_buf, rgb_buf, pic_width, pic_height, y_stride, uv_stride, rgb_stride, yuv_type);\n#else\n\tConvertYCbCrToRGB565_c(y_buf, u_buf, v_buf, rgb_buf, pic_width, pic_height, y_stride, uv_stride, rgb_stride, yuv_type);\n#endif\n}\n"
  },
  {
    "path": "Android_demo/KSY265CodecDemo/app/src/main/jni/decoder/jniplayer/yuv2rgb565.h",
    "content": "#include <sys/types.h>\n\nvoid ConvertYCbCrToRGB565_neon( const uint8_t* y_buf,\n\t\t                   const uint8_t* u_buf,\n\t\t                   const uint8_t* v_buf,\n\t\t                   uint8_t* rgb_buf,\n\t\t                   int pic_width,\n\t\t                   int pic_height,\n\t\t                   int y_stride,\n\t\t                   int uv_stride,\n\t\t                   int rgb_stride,\n\t\t\t\t\t\t   int yuv_type);\n\nvoid ConvertYCbCrToRGB565_c( const uint8_t* y_buf,\n\t\t                   const uint8_t* u_buf,\n\t\t                   const uint8_t* v_buf,\n\t\t                   uint8_t* rgb_buf,\n\t\t                   int pic_width,\n\t\t                   int pic_height,\n\t\t                   int y_stride,\n\t\t                   int uv_stride,\n\t\t                   int rgb_stride,\n\t\t\t\t\t\t   int yuv_type);\n\nvoid ConvertYCbCrToRGB565( const uint8_t* y_buf,\n\t\t                   const uint8_t* u_buf,\n\t\t                   const uint8_t* v_buf,\n\t\t                   uint8_t* rgb_buf,\n\t\t                   int pic_width,\n\t\t                   int pic_height,\n\t\t                   int y_stride,\n\t\t                   int uv_stride,\n\t\t                   int rgb_stride,\n\t\t                   int yuv_type);\n"
  },
  {
    "path": "Android_demo/KSY265CodecDemo/app/src/main/jni/encoder/Android.mk",
    "content": "LOCAL_PATH := $(call my-dir)\n\nPREBUILT_PATH := $(LOCAL_PATH)/../../../../../../prebuilt\n\ninclude $(CLEAR_VARS)\nLOCAL_MODULE := x264\nLOCAL_SRC_FILES := $(PREBUILT_PATH)/$(TARGET_ARCH_ABI)/libx264.a\ninclude $(PREBUILT_STATIC_LIBRARY)\n\nifneq ($(TARGET_ARCH_ABI),x86)\nifneq ($(TARGET_ARCH_ABI),x86_64)\ninclude $(CLEAR_VARS)\nLOCAL_MODULE := qy265\nLOCAL_SRC_FILES := $(PREBUILT_PATH)/$(TARGET_ARCH_ABI)/libqyencoder.a\ninclude $(PREBUILT_STATIC_LIBRARY)\nendif\nendif\n\ninclude $(CLEAR_VARS)\n\nLOCAL_LDLIBS += -llog\n\nLOCAL_MODULE := native-lib\n\nLOCAL_CONLYFLAGS += -std=c99\n\nLOCAL_C_INCLUDES += $(PREBUILT_PATH)/include\n\nLOCAL_SRC_FILES += encoderwrapper.c\n\nLOCAL_STATIC_LIBRARIES += x264 qy265 gnustl_static cpufeatures\n\nLOCAL_DISABLE_FATAL_LINKER_WARNINGS := true\n\ninclude $(BUILD_SHARED_LIBRARY)\n\n$(call import-module,android/cpufeatures)"
  },
  {
    "path": "Android_demo/KSY265CodecDemo/app/src/main/jni/encoder/encoderwrapper.c",
    "content": "#include <jni.h>\n#include <stdio.h>\n#include <errno.h>\n#include <time.h>\n#include <string.h>\n#include <stdlib.h>\n#include \"x264.h\"\n#include \"qy265enc.h\"\n#include \"qy265def.h\"\n#include \"qyauth_env.h\"\n#include \"encoderwrapper.h\"\n#include \"log.h\"\n\n#define LOG_TAG \"encoder\"\n\ntypedef struct Encoder {\n    FILE* in_file;\n    float real_fps;\n    float real_time;\n    double avg_psnr;\n    int frame_num;\n} Encoder;\n\nstatic inline Encoder* getInstance(jlong ptr)\n{\n    return (Encoder*)(intptr_t) ptr;\n}\n\njlong Java_com_ksyun_media_ksy265codec_demo_encoder_EncoderWrapper_native_1init\n        (JNIEnv *env,\n         jobject instance) {\n\n    Encoder* thiz = (Encoder*)calloc(1, sizeof(Encoder));\n    thiz->real_fps = 0;\n    thiz->frame_num = 0;\n    thiz->avg_psnr = 0;\n    thiz->real_time =0;\n\n    return (jlong)(intptr_t)thiz;\n}\n\njint Java_com_ksyun_media_ksy265codec_demo_encoder_EncoderWrapper_native_1open\n        (JNIEnv *env,\n         jobject instance,\n         jlong ptr,\n         jstring path_) {\n    const char *path = (*env)->GetStringUTFChars(env, path_, 0);\n\n    Encoder* thiz = getInstance(ptr);\n    thiz->in_file = fopen(path, \"r\");\n    if (NULL == thiz->in_file) {\n        LOGD(\"open file failed with %d\", errno);\n        (*env)->ReleaseStringUTFChars(env, path_, path);\n        return -1;\n    }\n    thiz->real_fps = 0;\n    thiz->frame_num = 0;\n    thiz->avg_psnr = 0;\n    thiz->real_time =0;\n\n    (*env)->ReleaseStringUTFChars(env, path_, path);\n    return 0;\n}\n\njint Java_com_ksyun_media_ksy265codec_demo_encoder_EncoderWrapper_native_1x264_1encode\n        (JNIEnv *env,\n         jobject instance,\n         jlong ptr,\n         jstring path_,\n         jstring profile_,\n         jstring delay_,\n         jint width,\n         jint height,\n         jobject fps,\n         jint bitrate,\n         jint threads) {\n    const char *path = (*env)->GetStringUTFChars(env, path_, 0);\n    const char *profile = (*env)->GetStringUTFChars(env, profile_, 0);\n    const char *delay = (*env)->GetStringUTFChars(env, delay_, 0);\n\n    Encoder* thiz = getInstance(ptr);\n\n    x264_param_t param;\n    x264_picture_t pic;\n    x264_picture_t pic_out;\n    x264_t *h;\n\n    int i_frame = 0;\n    int i_frame_size;\n    x264_nal_t *nal;\n    int i_nal;\n    clock_t clock_start, clock_end, clock_used;\n    struct timeval tv_start, tv_end;\n    double real_time;\n    int64_t ms_used;\n    FILE *out_file;\n\n    double sum_psnr_y = 0.0;\n    double sum_psnr_u = 0.0;\n    double sum_psnr_v = 0.0;\n\n    if ( NULL != path ) {\n        out_file = fopen(path, \"wb\");\n        if ( NULL == out_file ) {\n            LOGE(\"open output file failed with %d\", errno);\n            fclose(thiz->in_file);\n\n            (*env)->ReleaseStringUTFChars(env, path_, path);\n            (*env)->ReleaseStringUTFChars(env, profile_, profile);\n            (*env)->ReleaseStringUTFChars(env, delay_, delay);\n            return -1;\n        }\n    }\n\n    LOGD(\"profile %s\", profile);\n    /* Get default params for preset/tuning */\n    if (strlen(delay) == 11 && strncmp(delay, \"zerolatency\", 11)) {\n        if( x264_param_default_preset( &param, profile, \"zerolatency\" ) < 0 )\n        goto fail;\n    } else {\n        if( x264_param_default_preset( &param, profile, NULL ) < 0 )\n        goto fail;\n    }\n\n    /* Configure non-default params */\n    param.i_csp = X264_CSP_I420;\n    param.i_width  = width;\n    param.i_height = height;\n    param.b_vfr_input = 0;\n    param.b_repeat_headers = 1;\n    param.b_annexb = 1;\n\n    if (strlen(delay) == 11 && strncmp(delay, \"zerolatency\", 11)) {\n        param.i_bframe = 0;\n    } else if (strlen(delay) == 13 && strncmp(delay, \"livestreaming\", 13)) {\n        param.i_bframe = 3;\n    } else if (strlen(delay) == 7 && strncmp(delay, \"offline\", 7)) {\n        param.i_bframe = 7;\n    }\n\n    param.i_threads = threads;\n    jclass floatClass = (*env)->FindClass(env, \"java/lang/Float\");\n    jmethodID floatMethod = (*env)->GetMethodID(env, floatClass, \"floatValue\", \"()F\");\n    jfloat val = (*env)->CallFloatMethod(env, fps, floatMethod);\n    LOGD(\"x264 fps %.6f\", val);\n    param.i_fps_num = val;\n    param.i_fps_den = 1;\n    param.rc.i_bitrate = bitrate;\n    param.rc.i_rc_method = X264_RC_ABR;\n\n    param.analyse.b_psnr = 1;\n\n    /* Apply profile restrictions. */\n    if( x264_param_apply_profile( &param, \"high\" ) < 0 )\n        goto fail;\n\n    if( x264_picture_alloc( &pic, param.i_csp, param.i_width, param.i_height ) < 0 )\n        goto fail;\n\n    h = x264_encoder_open( &param );\n    if( !h )\n        goto fail;\n\n    int luma_size = param.i_width * param.i_height;\n    int chroma_size = luma_size / 4;\n    gettimeofday(&tv_start, NULL);\n    clock_start = clock();\n    /* Encode frames */\n    for( ;; i_frame++ )\n    {\n        /* Read input frame */\n        if( fread( pic.img.plane[0], 1, luma_size, thiz->in_file ) != luma_size )\n            break;\n        if( fread( pic.img.plane[1], 1, chroma_size, thiz->in_file ) != chroma_size )\n            break;\n        if( fread( pic.img.plane[2], 1, chroma_size, thiz->in_file ) != chroma_size )\n            break;\n\n        pic.i_pts = i_frame;\n        i_frame_size = x264_encoder_encode( h, &nal, &i_nal, &pic, &pic_out );\n        if( i_frame_size < 0 )\n            goto fail;\n        else if( i_frame_size )\n        {\n            if (param.analyse.b_psnr){\n                sum_psnr_y += pic_out.prop.f_psnr[0];\n                sum_psnr_u += pic_out.prop.f_psnr[1];\n                sum_psnr_v += pic_out.prop.f_psnr[2];\n            }\n\n            if( !fwrite( nal->p_payload, i_frame_size, 1, out_file ) )\n                goto fail;\n        }\n    }\n\n    /* Flush delayed frames */\n    while( x264_encoder_delayed_frames( h ) )\n    {\n        i_frame_size = x264_encoder_encode( h, &nal, &i_nal, NULL, &pic_out );\n        if( i_frame_size < 0 )\n            goto fail;\n        else if( i_frame_size )\n        {\n            if (param.analyse.b_psnr){\n                sum_psnr_y += pic_out.prop.f_psnr[0];\n                sum_psnr_u += pic_out.prop.f_psnr[1];\n                sum_psnr_v += pic_out.prop.f_psnr[2];\n            }\n\n            if( !fwrite( nal->p_payload, i_frame_size, 1, out_file ) )\n                goto fail;\n        }\n    }\n\n    clock_end = clock();\n    gettimeofday(&tv_end, NULL);\n    clock_used = clock_end - clock_start;\n    ms_used = (int64_t)(clock_used * 1000.0 / CLOCKS_PER_SEC);\n    real_time = (tv_end.tv_sec + (tv_end.tv_usec / 1000000.0)) - (tv_start.tv_sec + (tv_start.tv_usec / 1000000.0));\n    float realFPS = i_frame / real_time;\n    double avg_psnr = (6*sum_psnr_y+sum_psnr_u+sum_psnr_v)/(8*i_frame);\n\n    thiz->frame_num = i_frame;\n    thiz->real_fps = realFPS;\n    thiz->real_time = real_time;\n    thiz->avg_psnr = avg_psnr;\n\n    x264_encoder_close( h );\n    x264_picture_clean( &pic );\n    fclose(thiz->in_file);\n    fclose(out_file);\n    (*env)->ReleaseStringUTFChars(env, path_, path);\n    (*env)->ReleaseStringUTFChars(env, profile_, profile);\n    (*env)->ReleaseStringUTFChars(env, delay_, delay);\n    return 0;\n\n    fail:\n    fclose(thiz->in_file);\n    fclose(out_file);\n    (*env)->ReleaseStringUTFChars(env, path_, path);\n    (*env)->ReleaseStringUTFChars(env, profile_, profile);\n    (*env)->ReleaseStringUTFChars(env, delay_, delay);\n    return -1;\n\n}\n\nstatic double ksy265_psnr = 0;\n\nvoid ksy265log(const char* msg) {\n    LOGD(\"ksy265 log: %s\", msg);\n    //psnr值出现在编码器的log中，形如\"bitrate, psnr: 503.1069\t40.4723\t47.0057\t45.9163\"\n    char* psnr = strstr(msg, \"psnr\");\n    if (psnr != NULL) {\n        psnr += 4;\n\n        char *p;\n        const char* d = \" :\\t\";\n        p = strtok(psnr, d);\n\n        double y =0, u = 0, v = 0;\n        //skip bitrate\n        p = strtok(NULL, d);\n        if (p != NULL)\n            y = strtod(p, NULL);\n\n        p = strtok(NULL, d);\n        if (p != NULL)\n            u = strtod(p, NULL);\n\n        p = strtok(NULL, d);\n        if (p != NULL)\n            v = strtod(p, NULL);\n\n        ksy265_psnr = (y*6 + u + v) / 8;\n        LOGD(\"psnr %f, y %f , u %f, v %f \\n\", ksy265_psnr, y, u, v);\n    }\n}\n\njint Java_com_ksyun_media_ksy265codec_demo_encoder_EncoderWrapper_native_1ksy265_1encoder\n        (JNIEnv *env,\n         jobject instance,\n         jobject context,\n         jlong ptr,\n         jstring path_,\n         jstring profile_,\n         jstring delay_,\n         jint width,\n         jint height,\n         jobject fps,\n         jint bitrate,\n         jint threads) {\n    const char *path = (*env)->GetStringUTFChars(env, path_, 0);\n    const char *profile = (*env)->GetStringUTFChars(env, profile_, 0);\n    const char *delay = (*env)->GetStringUTFChars(env, delay_, 0);\n\n    QY265EncConfig param;\n    QY265YUV yuv;\n    QY265Picture pic;\n    QY265Picture pic_out;\n    QY265Nal *nal;\n    void *h;\n    int i_frame = 0;\n    int i_frame_size;\n    int i_nal;\n    clock_t clock_start, clock_end, clock_used;\n    struct timeval tv_start, tv_end;\n    double real_time;\n    int64_t ms_used;\n    FILE *out_file;\n    int errorCode;\n\n    Encoder *thiz = getInstance(ptr);\n\n    if (NULL != path) {\n        out_file = fopen(path, \"w\");\n        if (NULL == out_file) {\n            perror(\"open output file\");\n            fclose(thiz->in_file);\n            (*env)->ReleaseStringUTFChars(env, path_, path);\n            (*env)->ReleaseStringUTFChars(env, profile_, profile);\n            (*env)->ReleaseStringUTFChars(env, delay_, delay);\n            return -1;\n        }\n    }\n\n    /* Get default params for preset/tuning */\n    if (QY265ConfigDefaultPreset(&param, profile, NULL, delay) < 0)\n        goto fail;\n\n    param.picWidth = width;\n    param.picHeight = height;\n    param.threads = threads;\n\n    jclass floatClass = (*env)->FindClass(env, \"java/lang/Float\");\n    jmethodID floatMethod = (*env)->GetMethodID(env, floatClass, \"floatValue\", \"()F\");\n    jfloat val = (*env)->CallFloatMethod(env, fps, floatMethod);\n    LOGD(\"265 fps %.6f\", val);\n    param.frameRate = val;\n    param.bitrateInkbps = bitrate;\n    TCounterEnv* tCounterEnv = (TCounterEnv*) malloc(sizeof(TCounterEnv));\n    tCounterEnv->context = context;\n    (*env)->GetJavaVM(env, &tCounterEnv->jvm);\n    param.pAuth = tCounterEnv;\n\n    param.calcPsnr = 1;\n    QY265SetLogPrintf(ksy265log);\n\n    yuv.pData[0] = (unsigned char *)malloc(param.picWidth * param.picHeight * 3/2);\n    yuv.pData[1] = yuv.pData[0] + param.picWidth * param.picHeight;\n    yuv.pData[2] = yuv.pData[0] + param.picWidth * param.picHeight * 5/4;\n    yuv.iWidth = param.picWidth;\n    yuv.iHeight = param.picHeight;\n    yuv.iStride[0] = yuv.iWidth;\n    yuv.iStride[1] = yuv.iStride[2] = yuv.iWidth/2;\n\n    h = QY265EncoderOpen( &param , &errorCode);\n    if( !h )\n        goto fail;\n\n    pic.yuv = &yuv;\n    memset(&pic_out,0,sizeof(pic_out));\n\n    int luma_size = param.picWidth * param.picHeight;\n    int chroma_size = luma_size / 4;\n    gettimeofday(&tv_start, NULL);\n    clock_start = clock();\n    /* Encode frames */\n    for( ;; i_frame++ )\n    {\n        /* Read input frame */\n        if( fread( pic.yuv->pData[0], 1, luma_size, thiz->in_file ) != luma_size )\n            break;\n        if( fread( pic.yuv->pData[1], 1, chroma_size, thiz->in_file ) != chroma_size )\n            break;\n        if( fread( pic.yuv->pData[2], 1, chroma_size, thiz->in_file ) != chroma_size )\n            break;\n\n        pic.pts = i_frame;\n        i_frame_size = QY265EncoderEncodeFrame( h, &nal, &i_nal, &pic, &pic_out, 0 );\n        if( i_frame_size < 0 )\n            goto fail;\n\n        for(int i = 0; i < i_nal; i++){\n            if( !fwrite(  nal[i].pPayload, nal[i].iSize, 1, out_file ) )\n                goto fail;\n        }\n    }\n    /* Flush delayed frames */\n    while( QY265EncoderDelayedFrames( h ) )\n    {\n        i_frame_size = QY265EncoderEncodeFrame( h, &nal, &i_nal, NULL, &pic_out, 0 );\n        if( i_frame_size < 0 )\n            goto fail;\n\n        for(int i = 0; i < i_nal; i++){\n            if( !fwrite(  nal[i].pPayload, nal[i].iSize, 1, out_file ) )\n                goto fail;\n        }\n    }\n    clock_end = clock();\n    gettimeofday(&tv_end, NULL);\n    clock_used = clock_end - clock_start;\n    ms_used = (int64_t)(clock_used * 1000.0 / CLOCKS_PER_SEC);\n    real_time = (tv_end.tv_sec + (tv_end.tv_usec / 1000000.0)) - (tv_start.tv_sec + (tv_start.tv_usec / 1000000.0));\n    float realFPS = i_frame / real_time;\n    printf(\"%d frame encoded\\n\"\n                   \"\\ttime\\tfps\\n\"\n                   \"CPU\\t%lldms\\t%.2f\\n\"\n                   \"Real\\t%.3fs\\t%.2f.\\n\",\n           i_frame,\n           ms_used, i_frame * 1000.0 / ms_used,\n           real_time, realFPS);\n\n    QY265EncoderClose( h );\n\n    thiz->frame_num = i_frame;\n    thiz->real_fps = realFPS;\n    thiz->real_time = real_time;\n    thiz->avg_psnr = ksy265_psnr;\n\n    free(yuv.pData[0]);\n    fclose(thiz->in_file);\n    fclose(out_file);\n    (*env)->ReleaseStringUTFChars(env, path_, path);\n    (*env)->ReleaseStringUTFChars(env, profile_, profile);\n    (*env)->ReleaseStringUTFChars(env, delay_, delay);\n    return 0;\n\n    fail:\n    fclose(thiz->in_file);\n    fclose(out_file);\n    (*env)->ReleaseStringUTFChars(env, path_, path);\n    (*env)->ReleaseStringUTFChars(env, profile_, profile);\n    (*env)->ReleaseStringUTFChars(env, delay_, delay);\n    return -1;\n}\n\njfloat Java_com_ksyun_media_ksy265codec_demo_encoder_EncoderWrapper_native_1get_1real_1fps\n        (JNIEnv *env,\n         jobject instance,\n         jlong ptr) {\n    Encoder* thiz = getInstance(ptr);\n    return thiz->real_fps;\n}\n\njint Java_com_ksyun_media_ksy265codec_demo_encoder_EncoderWrapper_native_1get_1encoded_1frame_1num\n        (JNIEnv *env,\n         jobject instance,jlong ptr) {\n    Encoder* thiz = getInstance(ptr);\n    return thiz->frame_num;\n}\n\nJNIEXPORT jstring JNICALL\nJava_com_ksyun_media_ksy265codec_demo_encoder_EncoderWrapper_native_1get_1x264_1version(\n        JNIEnv *env, jobject instance) {\n    return (*env)->NewStringUTF(env, X264_POINTVER);\n}\n\nJNIEXPORT jstring JNICALL\nJava_com_ksyun_media_ksy265codec_demo_encoder_EncoderWrapper_native_1get_1ksy265_1version(\n        JNIEnv *env, jobject instance) {\n    return (*env)->NewStringUTF(env, strLibQy265Version);\n}\n\nJNIEXPORT jfloat JNICALL\nJava_com_ksyun_media_ksy265codec_demo_encoder_EncoderWrapper_native_1get_1real_1time(JNIEnv *env,\n                                                                                       jobject instance,\n                                                                                       jlong ptr) {\n    Encoder* thiz = getInstance(ptr);\n    return thiz->real_time;\n}\n\nJNIEXPORT jfloat JNICALL\nJava_com_ksyun_media_ksy265codec_demo_encoder_EncoderWrapper_native_1get_1psnr(JNIEnv *env,\n                                                                                 jobject instance,\n                                                                                 jlong ptr) {\n    Encoder* thiz = getInstance(ptr);\n    return thiz->avg_psnr;\n}"
  },
  {
    "path": "Android_demo/KSY265CodecDemo/app/src/main/jni/encoder/encoderwrapper.h",
    "content": "/* DO NOT EDIT THIS FILE - it is machine generated */\n#include <jni.h>\n/* Header for class com_ksyun_media_ksy265codec_demo_encoder_EncoderWrapper */\n\n#ifndef _Included_com_ksyun_media_ksy265codec_demo_encoder_EncoderWrapper\n#define _Included_com_ksyun_media_ksy265codec_demo_encoder_EncoderWrapper\n#ifdef __cplusplus\nextern \"C\" {\n#endif\n\n/*\n * Class:     com_ksyun_media_ksy265codec_demo_encoder_EncoderWrapper\n * Method:    native_init\n * Signature: ()V\n */\nJNIEXPORT jlong JNICALL\nJava_com_ksyun_media_ksy265codec_demo_encoder_EncoderWrapper_native_1init(JNIEnv *, jobject);\n\n/*\n * Class:     com_ksyun_media_ksy265codec_demo_encoder_EncoderWrapper\n * Method:    native_open\n * Signature: (JLjava/lang/String;)I\n */\nJNIEXPORT jint JNICALL Java_com_ksyun_media_ksy265codec_demo_encoder_EncoderWrapper_native_1open\n        (JNIEnv *, jobject, jlong, jstring);\n\n/*\n * Class:     com_ksyun_media_ksy265codec_demo_encoder_EncoderWrapper\n * Method:    native_x264_encode\n * Signature: (JLjava/lang/String;Ljava/lang/String;Ljava/lang/String;IILjava/lang/Float;II)I\n */\nJNIEXPORT jint JNICALL Java_com_ksyun_media_ksy265codec_demo_encoder_EncoderWrapper_native_1x264_1encode\n        (JNIEnv *, jobject, jlong, jstring, jstring, jstring, jint, jint, jobject, jint, jint);\n\n/*\n * Class:     com_ksyun_media_ksy265codec_demo_encoder_EncoderWrapper\n * Method:    native_ksy265_encoder\n * Signature: (JLjava/lang/String;Ljava/lang/String;Ljava/lang/String;IILjava/lang/Float;II)I\n */\nJNIEXPORT jint JNICALL Java_com_ksyun_media_ksy265codec_demo_encoder_EncoderWrapper_native_1ksy265_1encoder\n        (JNIEnv *, jobject, jobject , jlong, jstring, jstring, jstring, jint, jint, jobject, jint, jint);\n\n/*\n * Class:     com_ksyun_media_ksy265codec_demo_encoder_EncoderWrapper\n * Method:    native_get_real_fps\n * Signature: (J)F\n */\nJNIEXPORT jfloat JNICALL\nJava_com_ksyun_media_ksy265codec_demo_encoder_EncoderWrapper_native_1get_1real_1fps(JNIEnv *,\n                                                                                      jobject, jlong);\n\n/*\n * Class:     com_ksyun_media_ksy265codec_demo_encoder_EncoderWrapper\n * Method:    native_get_encoded_frame_num\n * Signature: (J)I\n */\nJNIEXPORT jint JNICALL\nJava_com_ksyun_media_ksy265codec_demo_encoder_EncoderWrapper_native_1get_1encoded_1frame_1num(\n        JNIEnv *, jobject, jlong);\n\n#ifdef __cplusplus\n}\n#endif\n#endif\n"
  },
  {
    "path": "Android_demo/KSY265CodecDemo/app/src/main/jni/encoder/log.h",
    "content": "//\n// Created by sujia on 2017/3/29.\n//\n\n#ifndef KSY265CODECDEMO_LOG_H\n#define KSY265CODECDEMO_LOG_H\n\n#include <android/log.h>\n\n#define LOGD(fmt, args...) __android_log_print(ANDROID_LOG_DEBUG, LOG_TAG, fmt, ##args)\n#define LOGI(fmt, args...) __android_log_print(ANDROID_LOG_INFO, LOG_TAG, fmt, ##args)\n#define LOGW(fmt, args...) __android_log_print(ANDROID_LOG_WARN, LOG_TAG, fmt, ##args)\n#define LOGE(fmt, args...) __android_log_print(ANDROID_LOG_ERROR, LOG_TAG, fmt, ##args)\n\n#endif //KSY265CODECDEMO_LOG_H\n"
  },
  {
    "path": "Android_demo/KSY265CodecDemo/app/src/main/res/drawable/selector_tab_background.xml",
    "content": "<?xml version=\"1.0\" encoding=\"utf-8\"?>\n<selector xmlns:android=\"http://schemas.android.com/apk/res/android\">\n    <item android:state_selected=\"true\" android:drawable=\"@android:color/white\"/><!--被选中状态-->\n    <item android:state_focused=\"true\" android:drawable=\"@android:color/white\"/><!--被焦点聚焦状态-->\n    <item android:state_pressed=\"true\" android:drawable=\"@android:color/white\"/><!--被按下状态-->\n    <item android:drawable=\"@android:color/white\"/><!--默认状态-->\n</selector>"
  },
  {
    "path": "Android_demo/KSY265CodecDemo/app/src/main/res/drawable/tab_home_btn.xml",
    "content": "<?xml version=\"1.0\" encoding=\"utf-8\"?>\n<selector xmlns:android=\"http://schemas.android.com/apk/res/android\">\n\n        <!-- 指定选择时和未被选择的图片 -->\n        <item android:drawable=\"@drawable/home_selected\"\n            android:state_selected=\"true\"/>\n        <item android:drawable=\"@drawable/home\" />\n\n\n</selector>"
  },
  {
    "path": "Android_demo/KSY265CodecDemo/app/src/main/res/drawable/tab_view_btn.xml",
    "content": "<?xml version=\"1.0\" encoding=\"utf-8\"?>\n<selector xmlns:android=\"http://schemas.android.com/apk/res/android\">\n\n    <!-- 指定选择时和未被选择的图片 -->\n    <item android:drawable=\"@drawable/view_selected\"\n        android:state_selected=\"true\"/>\n    <item android:drawable=\"@drawable/view\" />\n</selector>\n"
  },
  {
    "path": "Android_demo/KSY265CodecDemo/app/src/main/res/layout/activity_main.xml",
    "content": "<?xml version=\"1.0\" encoding=\"utf-8\"?>\n<LinearLayout xmlns:android=\"http://schemas.android.com/apk/res/android\"\n    xmlns:tools=\"http://schemas.android.com/tools\"\n    android:layout_width=\"fill_parent\"\n    android:layout_height=\"fill_parent\"\n    android:orientation=\"vertical\" >\n\n    <android.support.v4.view.ViewPager\n        android:id=\"@+id/pager\"\n        android:layout_width=\"match_parent\"\n        android:layout_height=\"0dp\"\n        android:layout_weight=\"1\" /><!--装4个Fragment-->\n\n    <FrameLayout\n        android:visibility=\"gone\"\n        android:layout_width=\"match_parent\"\n        android:layout_height=\"0dp\"\n        android:layout_weight=\"1\" />\n\n    <!--定义FragmentTabHost控件-->\n    <android.support.v4.app.FragmentTabHost\n        android:id=\"@android:id/tabhost\"\n        android:layout_width=\"fill_parent\"\n        android:layout_height=\"wrap_content\"\n        android:background=\"@android:color/black\" ><!--装4个Fragment-->\n\n        <FrameLayout\n            android:id=\"@android:id/tabcontent\"\n            android:layout_width=\"0dp\"\n            android:layout_height=\"0dp\"\n            android:layout_weight=\"0\" /><!--装Tab的内容-->\n    </android.support.v4.app.FragmentTabHost>\n</LinearLayout>\n"
  },
  {
    "path": "Android_demo/KSY265CodecDemo/app/src/main/res/layout/decoder_settings.xml",
    "content": "<?xml version=\"1.0\" encoding=\"utf-8\"?>\n<LinearLayout xmlns:android=\"http://schemas.android.com/apk/res/android\"\n    android:orientation=\"vertical\" android:layout_width=\"match_parent\"\n    android:layout_height=\"match_parent\">\n\n    <LinearLayout\n        android:layout_width=\"match_parent\"\n        android:layout_height=\"wrap_content\"\n        android:orientation=\"horizontal\"\n        android:paddingTop=\"20px\"\n        android:paddingBottom=\"20px\"\n        android:layout_gravity=\"top\">\n        <TextView\n            android:id=\"@+id/decoder_settings_decoder_txt\"\n            android:layout_width=\"0px\"\n            android:layout_height=\"wrap_content\"\n            android:paddingLeft=\"10px\"\n            android:paddingRight=\"10px\"\n            android:gravity=\"center\"\n            android:layout_weight=\"1\"\n            android:text=\"解码器\"/>\n        <Spinner\n            android:id=\"@+id/decoder_settings_decoder_spinner\"\n            android:layout_width=\"0px\"\n            android:layout_height=\"wrap_content\"\n            android:layout_weight=\"1\"\n            android:paddingLeft=\"10px\"/>\n    </LinearLayout>\n\n    <LinearLayout\n        android:layout_width=\"match_parent\"\n        android:layout_height=\"wrap_content\"\n        android:orientation=\"horizontal\"\n        android:paddingTop=\"20px\"\n        android:paddingBottom=\"20px\"\n        android:layout_gravity=\"top\">\n        <TextView\n            android:id=\"@+id/decoder_settings_threads_txt\"\n            android:layout_width=\"0px\"\n            android:layout_height=\"wrap_content\"\n            android:paddingLeft=\"10px\"\n            android:paddingRight=\"10px\"\n            android:gravity=\"center\"\n            android:layout_weight=\"1\"\n            android:text=\"解码线程数\"/>\n        <Spinner\n            android:id=\"@+id/decoder_settings_threads_spinner\"\n            android:layout_width=\"0px\"\n            android:layout_height=\"wrap_content\"\n            android:layout_weight=\"1\"\n            android:paddingLeft=\"10px\"/>\n    </LinearLayout>\n\n    <LinearLayout\n        android:layout_width=\"match_parent\"\n        android:layout_height=\"wrap_content\"\n        android:orientation=\"horizontal\"\n        android:paddingTop=\"20px\"\n        android:paddingBottom=\"20px\"\n        android:layout_gravity=\"top\">\n        <TextView\n            android:id=\"@+id/decoder_settings_fps_txt\"\n            android:layout_width=\"0px\"\n            android:layout_height=\"wrap_content\"\n            android:paddingLeft=\"10px\"\n            android:paddingRight=\"10px\"\n            android:gravity=\"center\"\n            android:layout_weight=\"1\"\n            android:text=\"渲染帧率\"/>\n        <Spinner\n            android:id=\"@+id/decoder_settings_fps_spinner\"\n            android:layout_width=\"0px\"\n            android:layout_height=\"wrap_content\"\n            android:layout_weight=\"1\"\n            android:paddingLeft=\"10px\"/>\n    </LinearLayout>\n\n    <LinearLayout\n        android:layout_width=\"match_parent\"\n        android:layout_height=\"wrap_content\"\n        android:orientation=\"horizontal\"\n        android:paddingTop=\"20px\"\n        android:paddingBottom=\"20px\"\n        android:layout_gravity=\"top\">\n        <TextView\n            android:id=\"@+id/decoder_settings_enable_yuv_output_txt\"\n            android:layout_width=\"0px\"\n            android:layout_height=\"wrap_content\"\n            android:paddingLeft=\"10px\"\n            android:paddingRight=\"10px\"\n            android:gravity=\"center\"\n            android:layout_weight=\"1\"\n            android:text=\"输出YUV\"/>\n        <RadioGroup\n            android:layout_width=\"0px\"\n            android:layout_height=\"wrap_content\"\n            android:layout_weight=\"1\"\n            android:paddingLeft=\"10px\"\n            android:orientation=\"horizontal\" >\n            <RadioButton\n                android:id=\"@+id/decoder_settings_enable_yuv_output\"\n                android:layout_width=\"wrap_content\"\n                android:layout_height=\"wrap_content\"\n                android:checked=\"true\"\n                android:text=\"是\"\n                android:layout_weight=\"1\"/>\n            <RadioButton\n                android:id=\"@+id/decoder_settings_disable_yuv_output\"\n                android:layout_width=\"wrap_content\"\n                android:layout_height=\"wrap_content\"\n                android:checked=\"false\"\n                android:text=\"否\"\n                android:layout_weight=\"1\"/>\n        </RadioGroup>>\n    </LinearLayout>\n\n    <Button\n        android:id=\"@+id/decoder_settings_sure\"\n        android:layout_marginTop=\"10dp\"\n        android:layout_gravity=\"center_horizontal\"\n        android:layout_width=\"wrap_content\"\n        android:layout_height=\"wrap_content\"\n        android:text=\"确定\" />\n\n</LinearLayout>"
  },
  {
    "path": "Android_demo/KSY265CodecDemo/app/src/main/res/layout/encoder_settings.xml",
    "content": "<?xml version=\"1.0\" encoding=\"utf-8\"?>\n<LinearLayout xmlns:android=\"http://schemas.android.com/apk/res/android\"\n    android:orientation=\"vertical\" android:layout_width=\"match_parent\"\n    android:layout_height=\"match_parent\">\n\n    <LinearLayout\n        android:layout_width=\"match_parent\"\n        android:layout_height=\"wrap_content\"\n        android:layout_gravity=\"top\"\n        android:orientation=\"horizontal\"\n        android:paddingBottom=\"20px\"\n        android:paddingTop=\"20px\">\n\n        <TextView\n            android:id=\"@+id/encoder_settings_encoder_txt\"\n            android:layout_width=\"0px\"\n            android:layout_height=\"wrap_content\"\n            android:layout_weight=\"1\"\n            android:gravity=\"center\"\n            android:paddingLeft=\"10px\"\n            android:paddingRight=\"10px\"\n            android:text=\"编码器\" />\n\n        <Spinner\n            android:id=\"@+id/encoder_settings_encoder_spinner\"\n            android:layout_width=\"0px\"\n            android:layout_height=\"wrap_content\"\n            android:layout_weight=\"1\"\n            android:paddingLeft=\"10px\" />\n    </LinearLayout>\n\n    <LinearLayout\n        android:layout_width=\"match_parent\"\n        android:layout_height=\"wrap_content\"\n        android:orientation=\"horizontal\"\n        android:paddingTop=\"20px\"\n        android:paddingBottom=\"20px\">\n        <TextView\n            android:id=\"@+id/encoder_settings_profile_txt\"\n            android:layout_width=\"0px\"\n            android:layout_height=\"wrap_content\"\n            android:paddingLeft=\"10px\"\n            android:paddingRight=\"10px\"\n            android:gravity=\"center\"\n            android:layout_weight=\"1\"\n            android:text=\"编码档次\"/>\n        <Spinner\n            android:id=\"@+id/encoder_settings_profile_spinner\"\n            android:layout_width=\"0px\"\n            android:layout_height=\"wrap_content\"\n            android:layout_weight=\"1\"/>\n    </LinearLayout>\n\n    <LinearLayout\n        android:layout_width=\"match_parent\"\n        android:layout_height=\"wrap_content\"\n        android:orientation=\"horizontal\"\n        android:paddingTop=\"20px\"\n        android:paddingBottom=\"20px\" >\n        <TextView\n            android:id=\"@+id/encoder_settings_delay_txt\"\n            android:layout_width=\"0px\"\n            android:layout_height=\"wrap_content\"\n            android:paddingLeft=\"10px\"\n            android:paddingRight=\"10px\"\n            android:gravity=\"center\"\n            android:layout_weight=\"1\"\n            android:text=\"延时\" />\n        <Spinner\n            android:id=\"@+id/encoder_settings_delay_spinner\"\n            android:layout_width=\"0px\"\n            android:layout_height=\"wrap_content\"\n            android:layout_weight=\"1\"/>\n    </LinearLayout>\n\n    <LinearLayout\n        android:layout_width=\"match_parent\"\n        android:layout_height=\"wrap_content\"\n        android:orientation=\"horizontal\"\n        android:paddingTop=\"20px\"\n        android:paddingBottom=\"20px\">\n        <TextView\n            android:id=\"@+id/encoder_settings_resolution_txt\"\n            android:layout_width=\"0px\"\n            android:layout_height=\"wrap_content\"\n            android:paddingLeft=\"10px\"\n            android:paddingRight=\"10px\"\n            android:gravity=\"center\"\n            android:layout_weight=\"1\"\n            android:text=\"分辨率\" />\n        <LinearLayout\n            android:layout_width=\"0px\"\n            android:layout_height=\"wrap_content\"\n            android:layout_weight=\"1\"\n            android:orientation=\"vertical\">\n            <Spinner\n                android:id=\"@+id/encoder_settings_resolution_spinner\"\n                android:layout_width=\"wrap_content\"\n                android:layout_height=\"0px\"\n                android:layout_weight=\"1\"/>\n            <EditText\n                android:id=\"@+id/encoder_settings_resolution\"\n                android:layout_width=\"wrap_content\"\n                android:layout_height=\"0px\"\n                android:layout_weight=\"1\"\n                android:hint=\"宽*高\"\n                android:text=\"1280*720\"/>\n        </LinearLayout>\n\n    </LinearLayout>\n\n    <LinearLayout\n        android:layout_width=\"match_parent\"\n        android:layout_height=\"wrap_content\"\n        android:orientation=\"horizontal\"\n        android:paddingTop=\"20px\"\n        android:paddingBottom=\"20px\">\n        <TextView\n            android:id=\"@+id/encoder_settings_bitrate_txt\"\n            android:layout_width=\"0px\"\n            android:layout_height=\"wrap_content\"\n            android:paddingLeft=\"10px\"\n            android:paddingRight=\"10px\"\n            android:gravity=\"center\"\n            android:layout_weight=\"1\"\n            android:hint=\"0代表关闭\"\n            android:text=\"码率(kbps)\" />\n        <EditText\n            android:id=\"@+id/encoder_settings_bitrate\"\n            android:layout_width=\"0px\"\n            android:layout_height=\"wrap_content\"\n            android:layout_weight=\"1\"\n            android:text=\"500\"/>\n    </LinearLayout>\n\n    <LinearLayout\n        android:layout_width=\"match_parent\"\n        android:layout_height=\"wrap_content\"\n        android:orientation=\"horizontal\"\n        android:paddingTop=\"20px\"\n        android:paddingBottom=\"20px\">\n        <TextView\n            android:id=\"@+id/encoder_settings_fps_txt\"\n            android:layout_width=\"0px\"\n            android:layout_height=\"wrap_content\"\n            android:paddingLeft=\"10px\"\n            android:paddingRight=\"10px\"\n            android:gravity=\"center\"\n            android:layout_weight=\"1\"\n            android:text=\"帧率\" />\n        <EditText\n            android:id=\"@+id/encoder_settings_fps\"\n            android:layout_width=\"0px\"\n            android:layout_height=\"wrap_content\"\n            android:layout_weight=\"1\"\n            android:text=\"15\"/>\n    </LinearLayout>\n\n    <LinearLayout\n        android:layout_width=\"match_parent\"\n        android:layout_height=\"wrap_content\"\n        android:orientation=\"horizontal\"\n        android:paddingTop=\"20px\"\n        android:paddingBottom=\"20px\"\n        android:layout_gravity=\"bottom\">\n        <TextView\n            android:id=\"@+id/encoder_settings_threads_txt\"\n            android:layout_width=\"0px\"\n            android:layout_height=\"wrap_content\"\n            android:paddingLeft=\"10px\"\n            android:paddingRight=\"10px\"\n            android:gravity=\"center\"\n            android:layout_weight=\"1\"\n            android:text=\"编码线程\" />\n        <EditText\n            android:id=\"@+id/encoder_settings_threads\"\n            android:layout_width=\"0px\"\n            android:layout_height=\"wrap_content\"\n            android:layout_weight=\"1\"\n            android:text=\"1\"/>\n    </LinearLayout>\n\n\n    <Button\n        android:id=\"@+id/encoder_settings_sure\"\n        android:layout_marginTop=\"10dp\"\n        android:layout_gravity=\"center_horizontal\"\n        android:layout_width=\"wrap_content\"\n        android:layout_height=\"wrap_content\"\n        android:text=\"确定\" />\n</LinearLayout>"
  },
  {
    "path": "Android_demo/KSY265CodecDemo/app/src/main/res/layout/file.xml",
    "content": "<?xml version=\"1.0\" encoding=\"utf-8\"?>\n<!--\n * Copyright (C) 2013 Paul Burke\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n *      http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n-->\n<TextView xmlns:android=\"http://schemas.android.com/apk/res/android\"\n    style=\"@style/fileChooserName\"\n    android:layout_width=\"fill_parent\"\n    android:layout_height=\"?android:attr/listPreferredItemHeight\" />\n"
  },
  {
    "path": "Android_demo/KSY265CodecDemo/app/src/main/res/layout/fragment_item.xml",
    "content": "<?xml version=\"1.0\" encoding=\"utf-8\"?>\n<android.support.percent.PercentRelativeLayout\n    xmlns:android=\"http://schemas.android.com/apk/res/android\"\n    xmlns:app=\"http://schemas.android.com/apk/res-auto\"\n    android:orientation=\"vertical\"\n    android:layout_width=\"match_parent\"\n    android:layout_height=\"match_parent\"\n    android:id=\"@+id/fragment_layout\">\n\n    <RelativeLayout\n        android:id=\"@+id/main_top\"\n        android:layout_width=\"match_parent\"\n        android:layout_height=\"wrap_content\"\n        android:orientation=\"horizontal\"\n        android:paddingBottom=\"20px\">\n\n        <Button\n            android:id=\"@+id/settings\"\n            android:layout_width=\"wrap_content\"\n            android:layout_height=\"wrap_content\"\n            android:layout_alignParentLeft=\"true\"\n            android:paddingLeft=\"10px\"\n            android:text=\"@string/settings\"\n            android:textColor=\"#0000FF\"/>\n\n        <TextView\n            android:id=\"@+id/title_txt\"\n            android:layout_width=\"wrap_content\"\n            android:layout_height=\"wrap_content\"\n            android:layout_centerInParent=\"true\"/>\n\n        <Button\n            android:id=\"@+id/help\"\n            android:layout_width=\"wrap_content\"\n            android:layout_height=\"wrap_content\"\n            android:layout_alignParentRight=\"true\"\n            android:paddingRight=\"10px\"\n            android:text=\"@string/help\"\n            android:textColor=\"#0000FF\"/>\n\n    </RelativeLayout>\n\n    <Button\n        android:id=\"@+id/nav\"\n        android:layout_width=\"wrap_content\"\n        android:layout_height=\"wrap_content\"\n        android:paddingTop=\"20px\"\n        android:paddingBottom=\"20px\"\n        android:text=\"@string/navigate\"\n        android:textColor=\"#0000FF\"\n        android:layout_below=\"@id/main_top\"/>\n\n    <LinearLayout\n        android:id=\"@+id/path\"\n        android:orientation=\"horizontal\"\n        android:layout_width=\"match_parent\"\n        android:layout_height=\"wrap_content\"\n        android:layout_below=\"@id/nav\">\n\n        <EditText\n            android:id=\"@+id/filepath\"\n            android:layout_width=\"wrap_content\"\n            android:layout_height=\"wrap_content\"\n            android:layout_weight=\"1\"/>\n        <Button\n            android:id=\"@+id/start\"\n            android:layout_width=\"wrap_content\"\n            android:layout_height=\"wrap_content\"\n            android:text=\"@string/start\"\n            android:textColor=\"#0000FF\"/>\n    </LinearLayout>\n\n    <SurfaceView\n        android:id=\"@+id/surface_view\"\n        android:layout_width=\"0px\"\n        android:layout_height=\"0px\"\n        app:layout_heightPercent=\"40%\"\n        app:layout_widthPercent=\"100%\"\n        android:layout_gravity=\"center\"\n        android:paddingTop=\"20px\"\n        android:layout_below=\"@id/path\"/>\n\n    <TextView\n        android:id=\"@+id/info_txt\"\n        android:layout_width=\"match_parent\"\n        android:layout_height=\"wrap_content\"\n        android:paddingTop=\"20px\"\n        android:layout_gravity=\"bottom\"\n        android:scrollbars=\"vertical\"\n        android:layout_below=\"@id/surface_view\"/>\n</android.support.percent.PercentRelativeLayout>"
  },
  {
    "path": "Android_demo/KSY265CodecDemo/app/src/main/res/layout/help.xml",
    "content": "<?xml version=\"1.0\" encoding=\"utf-8\"?>\n<LinearLayout xmlns:android=\"http://schemas.android.com/apk/res/android\"\n    android:orientation=\"vertical\" android:layout_width=\"match_parent\"\n    android:layout_height=\"match_parent\">\n\n    <TextView\n        android:id=\"@+id/help_title\"\n        android:layout_width=\"wrap_content\"\n        android:layout_height=\"wrap_content\"\n        android:paddingTop=\"20px\"\n        android:paddingBottom=\"20px\"\n        android:layout_gravity=\"center\"\n        android:text=\"帮助\"/>\n\n    <TextView\n        android:id=\"@+id/help_info\"\n        android:layout_width=\"match_parent\"\n        android:layout_height=\"wrap_content\"\n        android:paddingTop=\"20px\"\n        android:paddingBottom=\"20px\"/>\n\n    <!--<TextView-->\n        <!--android:id=\"@+id/help_maip_page\"-->\n        <!--android:layout_width=\"match_parent\"-->\n        <!--android:layout_height=\"wrap_content\"-->\n        <!--android:paddingTop=\"20px\"-->\n        <!--android:paddingBottom=\"20px\"-->\n        <!--android:text=\"官方主页： \"/>-->\n\n    <!--<TextView-->\n        <!--android:id=\"@+id/help_github_txt\"-->\n        <!--android:layout_width=\"match_parent\"-->\n        <!--android:layout_height=\"wrap_content\"-->\n        <!--android:paddingTop=\"20px\"-->\n        android:text=\"github地址:\"/>\n\n    <TextView\n        android:id=\"@+id/help_github\"\n        android:layout_width=\"match_parent\"\n        android:layout_height=\"wrap_content\"\n        android:paddingTop=\"10px\"\n        android:paddingBottom=\"20px\"\n        android:text=\"https://github.com/ksvc/ks265codec \"/>\n\n    <!--<TextView-->\n        <!--android:id=\"@+id/help_qq\"-->\n        <!--android:layout_width=\"match_parent\"-->\n        <!--android:layout_height=\"wrap_content\"-->\n        <!--android:paddingTop=\"20px\"-->\n        <!--android:paddingBottom=\"20px\"-->\n        <!--android:text=\"qq群： \"/>-->\n</LinearLayout>"
  },
  {
    "path": "Android_demo/KSY265CodecDemo/app/src/main/res/layout/main_tab_layout.xml",
    "content": "<?xml version=\"1.0\" encoding=\"utf-8\"?>\n<RelativeLayout xmlns:android=\"http://schemas.android.com/apk/res/android\"\n    xmlns:tools=\"http://schemas.android.com/tools\"\n    android:layout_width=\"fill_parent\"\n    android:layout_height=\"fill_parent\"\n    android:orientation=\"vertical\" >\n\n    <android.support.v4.view.ViewPager\n        android:id=\"@+id/pager\"\n        android:layout_width=\"match_parent\"\n        android:layout_height=\"0dp\"\n        android:layout_weight=\"1\" /><!--装4个Fragment-->\n\n    <FrameLayout\n        android:visibility=\"gone\"\n        android:layout_width=\"match_parent\"\n        android:layout_height=\"0dp\"\n        android:layout_weight=\"1\" />\n\n    <!--定义FragmentTabHost控件-->\n    <android.support.v4.app.FragmentTabHost\n        android:id=\"@android:id/tabhost\"\n        android:layout_width=\"fill_parent\"\n        android:layout_height=\"wrap_content\"\n        android:background=\"@android:color/black\" ><!--装4个Fragment-->\n\n        <FrameLayout\n            android:id=\"@android:id/tabcontent\"\n            android:layout_width=\"0dp\"\n            android:layout_height=\"0dp\"\n            android:layout_weight=\"0\" /><!--装Tab的内容-->\n    </android.support.v4.app.FragmentTabHost>\n</RelativeLayout>"
  },
  {
    "path": "Android_demo/KSY265CodecDemo/app/src/main/res/layout/tab_content.xml",
    "content": "<?xml version=\"1.0\" encoding=\"utf-8\"?>\n<LinearLayout xmlns:android=\"http://schemas.android.com/apk/res/android\"\n\n    android:layout_width=\"match_parent\"\n    android:layout_height=\"match_parent\"\n    android:gravity=\"center\"\n    android:orientation=\"vertical\"\n    android:background=\"#ffffff\">\n\n    <ImageView\n        android:id=\"@+id/tab_imageview\"\n        android:layout_width=\"wrap_content\"\n        android:layout_height=\"wrap_content\"\n        />\n\n    <TextView\n        android:id=\"@+id/tab_textview\"\n        android:layout_width=\"wrap_content\"\n        android:layout_height=\"wrap_content\"\n        android:text=\"\"\n        android:textColor=\"#000000\" />\n\n</LinearLayout>\n"
  },
  {
    "path": "Android_demo/KSY265CodecDemo/app/src/main/res/values/bool.xml",
    "content": "<?xml version=\"1.0\" encoding=\"utf-8\"?>\n<resources>\n\n    <bool name=\"use_activity\">true</bool>\n    <bool name=\"use_provider\">false</bool>\n\n</resources>"
  },
  {
    "path": "Android_demo/KSY265CodecDemo/app/src/main/res/values/colors.xml",
    "content": "<?xml version=\"1.0\" encoding=\"utf-8\"?>\n<resources>\n    <color name=\"colorPrimary\">#3F51B5</color>\n    <color name=\"colorPrimaryDark\">#303F9F</color>\n    <color name=\"colorAccent\">#FF4081</color>\n</resources>\n"
  },
  {
    "path": "Android_demo/KSY265CodecDemo/app/src/main/res/values/dimens.xml",
    "content": "<?xml version=\"1.0\" encoding=\"utf-8\"?>\n<!--\n * Copyright (C) 2012 Paul Burke\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n *      http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n-->\n<resources xmlns:android=\"http://schemas.android.com/apk/res/android\">\n\t<dimen name=\"list_padding\">0dp</dimen>\n\t<dimen name=\"list_item_padding\">16dp</dimen>\n</resources>"
  },
  {
    "path": "Android_demo/KSY265CodecDemo/app/src/main/res/values/strings.xml",
    "content": "<resources>\n    <string name=\"app_name\">KSY265CodecDemo</string>\n    <string name=\"settings\">设置</string>\n    <string name=\"help\">帮助</string>\n    <string name=\"navigate\">浏览文件</string>\n    <string name=\"start\">确认</string>\n    <string name=\"encode_help_info\">编码器使用说明： 先设置编码器参数，然后选择文件，最好确定即开始编码</string>\n    <string name=\"decode_help_info\">解码器使用说明： 先设置解码器参数(其中选择渲染帧率为-1(off)时，是关闭渲染功能)，然后选择文件，最好确定即开始解码</string>\n\n    <!--a file choose-->\n    <string name=\"empty_directory\">Empty Directory</string>\n    <string name=\"storage_removed\">Storage was removed or unmounted.</string>\n    <string name=\"choose_file\">Select a file</string>\n    <string name=\"error_selecting_file\">Error selecting File</string>\n    <string name=\"internal_storage\">Internal storage</string>\n\n    <string name=\"chooser_title\">KSY Yun</string>\n</resources>\n"
  },
  {
    "path": "Android_demo/KSY265CodecDemo/app/src/main/res/values/styles.xml",
    "content": "<resources xmlns:android=\"http://schemas.android.com/apk/res/android\">\n\n    <!-- Base application theme. -->\n    <style name=\"AppTheme\" parent=\"Theme.AppCompat.Light.DarkActionBar\">\n        <!-- Customize your theme here. -->\n        <item name=\"colorPrimary\">@color/colorPrimary</item>\n        <item name=\"colorPrimaryDark\">@color/colorPrimaryDark</item>\n        <item name=\"colorAccent\">@color/colorAccent</item>\n    </style>\n\n    <!--file choose-->\n    <style name=\"fileChooserName\">\n        <item name=\"android:drawablePadding\">@dimen/list_item_padding</item>\n        <item name=\"android:ellipsize\">end</item>\n        <item name=\"android:gravity\">center_vertical</item>\n        <item name=\"android:maxLines\">2</item>\n        <item name=\"android:paddingLeft\">@dimen/list_item_padding</item>\n        <item name=\"android:paddingRight\">@dimen/list_item_padding</item>\n        <item name=\"android:textAppearance\">?android:attr/textAppearanceMedium</item>\n    </style>\n</resources>\n"
  },
  {
    "path": "Android_demo/KSY265CodecDemo/app/src/main/res/values/themes.xml",
    "content": "<?xml version=\"1.0\" encoding=\"utf-8\"?>\n<!-- \n * Copyright (C) 2011 Paul Burke\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n *      http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n -->\n <resources xmlns:android=\"http://schemas.android.com/apk/res/android\">\n\n    <style name=\"AppTheme1\" parent=\"android:Theme.Light\" />\n    <style name=\"ChooserTheme1\" parent=\"android:Theme.Light\" />\n\n</resources>"
  },
  {
    "path": "Android_demo/KSY265CodecDemo/app/src/main/res/xml/mimetypes.xml",
    "content": "<?xml version=\"1.0\" encoding=\"utf-8\"?>\r\n<!-- \r\n * Copyright (C) 2007-2008 OpenIntents.org\r\n *\r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n *\r\n *      http://www.apache.org/licenses/LICENSE-2.0\r\n *\r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n -->\r\n<MimeTypes>\r\n\t<!-- Image types -->\r\n\t<type extension=\".png\" mimetype=\"image/png\" />\r\n\t<type extension=\".gif\" mimetype=\"image/gif\" />\r\n\t<type extension=\".jpg\" mimetype=\"image/jpeg\" />\r\n\t<type extension=\".jpeg\" mimetype=\"image/jpeg\" />\r\n\t<type extension=\".bmp\" mimetype=\"image/bmp\" />\r\n\t<type extension=\".tiff\" mimetype=\"image/tiff\" />\r\n\t<type extension=\".tif\" mimetype=\"image/tiff\" />\r\n\t<type extension=\".icon\" mimetype=\"image/x-icon\" />\r\n\t    \r\n\t<!-- Audio types -->\r\n\t<type extension=\".mp3\" mimetype=\"audio/mpeg\" />\r\n\t<type extension=\".mp2\" mimetype=\"audio/mpeg\" />\r\n\t<type extension=\".mpga\" mimetype=\"audio/mpeg\" />\r\n\t<type extension=\".m4a\" mimetype=\"audio/mp4a-latm\" />\r\n\t<type extension=\".m4p\" mimetype=\"audio/mp4a-latm\" />\r\n\t<type extension=\".wav\" mimetype=\"audio/wav\" />\r\n\t<type extension=\".ogg\" mimetype=\"audio/x-ogg\" />\r\n\t<type extension=\".mid\" mimetype=\"audio/mid\" />\r\n\t<type extension=\".midi\" mimetype=\"audio/midi\" />\r\n\t<type extension=\".amr\" mimetype=\"audio/AMR\" />\r\n\t<type extension=\".aac\" mimetype=\"audio/x-aac\"/>\r\n\t<type extension=\".m3u\" mimetype=\"audio/x-mpegurl\"/>\r\n\t<type extension=\".ram\" mimetype=\"audio/x-pn-realaudio\"/>\r\n\t<type extension=\".ra\" mimetype=\"audio/x-pn-realaudio\"/>\r\n\t<type extension=\".aif\" mimetype=\"audio/x-aiff\"/>\r\n\t<type extension=\".aiff\" mimetype=\"audio/x-aiff\"/>\r\n\t<type extension=\".aifc\" mimetype=\"audio/x-aiff\"/>\r\n\t\t\t\r\n\t<!-- Video types -->\r\n\t<type extension=\".mpeg\" mimetype=\"video/mpeg\" />\r\n\t<type extension=\".mpg\" mimetype=\"video/mpeg\" />\t\r\n\t<type extension=\".mpe\" mimetype=\"video/mpeg\" />\r\n\t<type extension=\".mov\" mimetype=\"video/quicktime\" />\r\n\t<type extension=\".qt\" mimetype=\"video/quicktime\" />\r\n\t<type extension=\".mp4\" mimetype=\"video/mpeg\" />\r\n\t<type extension=\".3gp\" mimetype=\"video/3gpp\" />\r\n\t<type extension=\".3gpp\" mimetype=\"video/3gpp\" />\r\n\t<type extension=\".m4u\" mimetype=\"video/vnd.mpegurl\" />\r\n\t<type extension=\".mxu\" mimetype=\"video/vnd.mpegurl\" />\r\n\t<type extension=\".flv\" mimetype=\"video/x-flv\" />\r\n\t<type extension=\".wmx\" mimetype=\"video/x-ms-wmv\" />\r\n\t<type extension=\".avi\" mimetype=\"video/x-msvideo\" />\r\n\t\t\t\r\n\t<!-- Package types -->\r\n\t<type extension=\".jar\" mimetype=\"application/java-archive\" />\r\n\t<type extension=\".zip\" mimetype=\"application/zip\" />\r\n\t<type extension=\".rar\" mimetype=\"application/x-rar-compressed\" />\r\n\t<type extension=\".gz\" mimetype=\"application/gzip\" />\r\n\t\r\n\t<!-- Web browser types -->\r\n\t<type extension=\".htm\" mimetype=\"text/html\" />\r\n\t<type extension=\".html\" mimetype=\"text/html\" />\r\n\t<type extension=\".php\" mimetype=\"text/php \" />\r\n\t\r\n\t<!-- Doc types -->\r\n\t<type extension=\".txt\" mimetype=\"text/plain\" />\r\n\t<type extension=\".rtf\" mimetype=\"text/rtf\" />\r\n\t<type extension=\".csv\" mimetype=\"text/csv\" />\r\n\t<type extension=\".xml\" mimetype=\"text/xml\" />\r\n\t<type extension=\".xml\" mimetype=\"text/xml\" />\r\n\t<type extension=\".css\" mimetype=\"text/css\" />\r\n\t<type extension=\".doc\" mimetype=\"application/msword\" />\r\n\t<type extension=\".docx\" mimetype=\"application/msword\" />\r\n\t<type extension=\".ppt\" mimetype=\"application/vnd.ms-powerpoint\" />\r\n\t<type extension=\".pdf\" mimetype=\"application/pdf\" />\r\n\t<type extension=\".xls\" mimetype=\"application/vnd.ms-excel\" />\r\n\t\r\n\t<!-- Android specific -->\r\n\t<type extension=\".apk\" mimetype=\"application/vnd.android.package-archive\" />\r\n\t\r\n</MimeTypes>"
  },
  {
    "path": "Android_demo/KSY265CodecDemo/build.gradle",
    "content": "// Top-level build file where you can add configuration options common to all sub-projects/modules.\n\nbuildscript {\n    repositories {\n        jcenter()\n        maven {\n            url 'https://maven.google.com/'\n            name 'Google'\n        }\n    }\n    dependencies {\n        classpath 'com.android.tools.build:gradle:2.3.0'\n\n        // NOTE: Do not place your application dependencies here; they belong\n        // in the individual module build.gradle files\n    }\n}\n\nallprojects {\n    repositories {\n        jcenter()\n        maven {\n            url 'https://maven.google.com/'\n            name 'Google'\n        }\n    }\n}\n\ntask clean(type: Delete) {\n    delete rootProject.buildDir\n}\n"
  },
  {
    "path": "Android_demo/KSY265CodecDemo/gradle/wrapper/gradle-wrapper.properties",
    "content": "#Mon Mar 27 15:56:29 CST 2017\ndistributionBase=GRADLE_USER_HOME\ndistributionPath=wrapper/dists\nzipStoreBase=GRADLE_USER_HOME\nzipStorePath=wrapper/dists\ndistributionUrl=https\\://services.gradle.org/distributions/gradle-3.3-all.zip\n"
  },
  {
    "path": "Android_demo/KSY265CodecDemo/gradle.properties",
    "content": "# Project-wide Gradle settings.\n\n# IDE (e.g. Android Studio) users:\n# Gradle settings configured through the IDE *will override*\n# any settings specified in this file.\n\n# For more details on how to configure your build environment visit\n# http://www.gradle.org/docs/current/userguide/build_environment.html\n\n# Specifies the JVM arguments used for the daemon process.\n# The setting is particularly useful for tweaking memory settings.\norg.gradle.jvmargs=-Xmx1536m\n\n# When configured, Gradle will run in incubating parallel mode.\n# This option should only be used with decoupled projects. More details, visit\n# http://www.gradle.org/docs/current/userguide/multi_project_builds.html#sec:decoupled_projects\n# org.gradle.parallel=true\nsystemProp.http.proxyHost=120.92.42.188\nsystemProp.http.proxyPort=52179"
  },
  {
    "path": "Android_demo/KSY265CodecDemo/gradlew",
    "content": "#!/usr/bin/env bash\n\n##############################################################################\n##\n##  Gradle start up script for UN*X\n##\n##############################################################################\n\n# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.\nDEFAULT_JVM_OPTS=\"\"\n\nAPP_NAME=\"Gradle\"\nAPP_BASE_NAME=`basename \"$0\"`\n\n# Use the maximum available, or set MAX_FD != -1 to use that value.\nMAX_FD=\"maximum\"\n\nwarn ( ) {\n    echo \"$*\"\n}\n\ndie ( ) {\n    echo\n    echo \"$*\"\n    echo\n    exit 1\n}\n\n# OS specific support (must be 'true' or 'false').\ncygwin=false\nmsys=false\ndarwin=false\ncase \"`uname`\" in\n  CYGWIN* )\n    cygwin=true\n    ;;\n  Darwin* )\n    darwin=true\n    ;;\n  MINGW* )\n    msys=true\n    ;;\nesac\n\n# Attempt to set APP_HOME\n# Resolve links: $0 may be a link\nPRG=\"$0\"\n# Need this for relative symlinks.\nwhile [ -h \"$PRG\" ] ; do\n    ls=`ls -ld \"$PRG\"`\n    link=`expr \"$ls\" : '.*-> \\(.*\\)$'`\n    if expr \"$link\" : '/.*' > /dev/null; then\n        PRG=\"$link\"\n    else\n        PRG=`dirname \"$PRG\"`\"/$link\"\n    fi\ndone\nSAVED=\"`pwd`\"\ncd \"`dirname \\\"$PRG\\\"`/\" >/dev/null\nAPP_HOME=\"`pwd -P`\"\ncd \"$SAVED\" >/dev/null\n\nCLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar\n\n# Determine the Java command to use to start the JVM.\nif [ -n \"$JAVA_HOME\" ] ; then\n    if [ -x \"$JAVA_HOME/jre/sh/java\" ] ; then\n        # IBM's JDK on AIX uses strange locations for the executables\n        JAVACMD=\"$JAVA_HOME/jre/sh/java\"\n    else\n        JAVACMD=\"$JAVA_HOME/bin/java\"\n    fi\n    if [ ! -x \"$JAVACMD\" ] ; then\n        die \"ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME\n\nPlease set the JAVA_HOME variable in your environment to match the\nlocation of your Java installation.\"\n    fi\nelse\n    JAVACMD=\"java\"\n    which java >/dev/null 2>&1 || die \"ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.\n\nPlease set the JAVA_HOME variable in your environment to match the\nlocation of your Java installation.\"\nfi\n\n# Increase the maximum file descriptors if we can.\nif [ \"$cygwin\" = \"false\" -a \"$darwin\" = \"false\" ] ; then\n    MAX_FD_LIMIT=`ulimit -H -n`\n    if [ $? -eq 0 ] ; then\n        if [ \"$MAX_FD\" = \"maximum\" -o \"$MAX_FD\" = \"max\" ] ; then\n            MAX_FD=\"$MAX_FD_LIMIT\"\n        fi\n        ulimit -n $MAX_FD\n        if [ $? -ne 0 ] ; then\n            warn \"Could not set maximum file descriptor limit: $MAX_FD\"\n        fi\n    else\n        warn \"Could not query maximum file descriptor limit: $MAX_FD_LIMIT\"\n    fi\nfi\n\n# For Darwin, add options to specify how the application appears in the dock\nif $darwin; then\n    GRADLE_OPTS=\"$GRADLE_OPTS \\\"-Xdock:name=$APP_NAME\\\" \\\"-Xdock:icon=$APP_HOME/media/gradle.icns\\\"\"\nfi\n\n# For Cygwin, switch paths to Windows format before running java\nif $cygwin ; then\n    APP_HOME=`cygpath --path --mixed \"$APP_HOME\"`\n    CLASSPATH=`cygpath --path --mixed \"$CLASSPATH\"`\n    JAVACMD=`cygpath --unix \"$JAVACMD\"`\n\n    # We build the pattern for arguments to be converted via cygpath\n    ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`\n    SEP=\"\"\n    for dir in $ROOTDIRSRAW ; do\n        ROOTDIRS=\"$ROOTDIRS$SEP$dir\"\n        SEP=\"|\"\n    done\n    OURCYGPATTERN=\"(^($ROOTDIRS))\"\n    # Add a user-defined pattern to the cygpath arguments\n    if [ \"$GRADLE_CYGPATTERN\" != \"\" ] ; then\n        OURCYGPATTERN=\"$OURCYGPATTERN|($GRADLE_CYGPATTERN)\"\n    fi\n    # Now convert the arguments - kludge to limit ourselves to /bin/sh\n    i=0\n    for arg in \"$@\" ; do\n        CHECK=`echo \"$arg\"|egrep -c \"$OURCYGPATTERN\" -`\n        CHECK2=`echo \"$arg\"|egrep -c \"^-\"`                                 ### Determine if an option\n\n        if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then                    ### Added a condition\n            eval `echo args$i`=`cygpath --path --ignore --mixed \"$arg\"`\n        else\n            eval `echo args$i`=\"\\\"$arg\\\"\"\n        fi\n        i=$((i+1))\n    done\n    case $i in\n        (0) set -- ;;\n        (1) set -- \"$args0\" ;;\n        (2) set -- \"$args0\" \"$args1\" ;;\n        (3) set -- \"$args0\" \"$args1\" \"$args2\" ;;\n        (4) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" ;;\n        (5) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" \"$args4\" ;;\n        (6) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" \"$args4\" \"$args5\" ;;\n        (7) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" \"$args4\" \"$args5\" \"$args6\" ;;\n        (8) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" \"$args4\" \"$args5\" \"$args6\" \"$args7\" ;;\n        (9) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" \"$args4\" \"$args5\" \"$args6\" \"$args7\" \"$args8\" ;;\n    esac\nfi\n\n# Split up the JVM_OPTS And GRADLE_OPTS values into an array, following the shell quoting and substitution rules\nfunction splitJvmOpts() {\n    JVM_OPTS=(\"$@\")\n}\neval splitJvmOpts $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS\nJVM_OPTS[${#JVM_OPTS[*]}]=\"-Dorg.gradle.appname=$APP_BASE_NAME\"\n\nexec \"$JAVACMD\" \"${JVM_OPTS[@]}\" -classpath \"$CLASSPATH\" org.gradle.wrapper.GradleWrapperMain \"$@\"\n"
  },
  {
    "path": "Android_demo/KSY265CodecDemo/gradlew.bat",
    "content": "@if \"%DEBUG%\" == \"\" @echo off\r\n@rem ##########################################################################\r\n@rem\r\n@rem  Gradle startup script for Windows\r\n@rem\r\n@rem ##########################################################################\r\n\r\n@rem Set local scope for the variables with windows NT shell\r\nif \"%OS%\"==\"Windows_NT\" setlocal\r\n\r\n@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.\r\nset DEFAULT_JVM_OPTS=\r\n\r\nset DIRNAME=%~dp0\r\nif \"%DIRNAME%\" == \"\" set DIRNAME=.\r\nset APP_BASE_NAME=%~n0\r\nset APP_HOME=%DIRNAME%\r\n\r\n@rem Find java.exe\r\nif defined JAVA_HOME goto findJavaFromJavaHome\r\n\r\nset JAVA_EXE=java.exe\r\n%JAVA_EXE% -version >NUL 2>&1\r\nif \"%ERRORLEVEL%\" == \"0\" goto init\r\n\r\necho.\r\necho ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.\r\necho.\r\necho Please set the JAVA_HOME variable in your environment to match the\r\necho location of your Java installation.\r\n\r\ngoto fail\r\n\r\n:findJavaFromJavaHome\r\nset JAVA_HOME=%JAVA_HOME:\"=%\r\nset JAVA_EXE=%JAVA_HOME%/bin/java.exe\r\n\r\nif exist \"%JAVA_EXE%\" goto init\r\n\r\necho.\r\necho ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%\r\necho.\r\necho Please set the JAVA_HOME variable in your environment to match the\r\necho location of your Java installation.\r\n\r\ngoto fail\r\n\r\n:init\r\n@rem Get command-line arguments, handling Windowz variants\r\n\r\nif not \"%OS%\" == \"Windows_NT\" goto win9xME_args\r\nif \"%@eval[2+2]\" == \"4\" goto 4NT_args\r\n\r\n:win9xME_args\r\n@rem Slurp the command line arguments.\r\nset CMD_LINE_ARGS=\r\nset _SKIP=2\r\n\r\n:win9xME_args_slurp\r\nif \"x%~1\" == \"x\" goto execute\r\n\r\nset CMD_LINE_ARGS=%*\r\ngoto execute\r\n\r\n:4NT_args\r\n@rem Get arguments from the 4NT Shell from JP Software\r\nset CMD_LINE_ARGS=%$\r\n\r\n:execute\r\n@rem Setup the command line\r\n\r\nset CLASSPATH=%APP_HOME%\\gradle\\wrapper\\gradle-wrapper.jar\r\n\r\n@rem Execute Gradle\r\n\"%JAVA_EXE%\" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% \"-Dorg.gradle.appname=%APP_BASE_NAME%\" -classpath \"%CLASSPATH%\" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%\r\n\r\n:end\r\n@rem End local scope for the variables with windows NT shell\r\nif \"%ERRORLEVEL%\"==\"0\" goto mainEnd\r\n\r\n:fail\r\nrem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of\r\nrem the _cmd.exe /c_ return code!\r\nif  not \"\" == \"%GRADLE_EXIT_CONSOLE%\" exit 1\r\nexit /b 1\r\n\r\n:mainEnd\r\nif \"%OS%\"==\"Windows_NT\" endlocal\r\n\r\n:omega\r\n"
  },
  {
    "path": "Android_demo/KSY265CodecDemo/settings.gradle",
    "content": "include ':app'\n"
  },
  {
    "path": "Android_demo/README.md",
    "content": "KSY 265 Demo"
  },
  {
    "path": "Android_demo/prebuilt/include/lenthevcdec.h",
    "content": "#ifndef __LENTHEVCDEC_H__\r\n#define __LENTHEVCDEC_H__\r\n\r\n\r\n#ifdef __cplusplus\r\nextern \"C\" {\r\n#endif\r\n\r\n#include <stdint.h>\r\n\r\n#if defined(_WIN32) || defined(WIN32)\r\n\t#define LENTAPI __stdcall\r\n#else\r\n\t#define LENTAPI\r\n#endif\r\n\r\n\ttypedef void* lenthevcdec_ctx;\r\n\r\n\tint             LENTAPI lenthevcdec_version(void);\r\n\r\n\tlenthevcdec_ctx LENTAPI lenthevcdec_create(int threads, int compatibility, void* reserved);\r\n\r\n\tvoid            LENTAPI lenthevcdec_destroy(lenthevcdec_ctx ctx);\r\n\r\n\tvoid            LENTAPI lenthevcdec_flush(lenthevcdec_ctx ctx);\r\n\r\n\t/* bs & bs_len: intput bitstream\r\n\t * pts: input play timestamp\r\n\t * got_frame: return 1 if we got frame, then the pixels & line_stride & got_pts is valid\r\n\t * width & height: picture size\r\n\t * line_stride & pixels: output picture pixel data\r\n\t * got_pts: pts of output frame\r\n\t * return: byte count used by decoder, or negative number for error\r\n\t */\r\n\tint             LENTAPI lenthevcdec_decode_frame(lenthevcdec_ctx ctx,\r\n\t\t\t\t\t\t\t const void* bs, int bs_len,\r\n\t\t\t\t\t\t\t int64_t pts,\r\n\t\t\t\t\t\t\t int* got_frame,\r\n\t\t\t\t\t\t\t int* width, int* height,\r\n\t\t\t\t\t\t\t int line_stride[3], \r\n\t\t\t\t\t\t\t void* pixels[3], \r\n\t\t\t\t\t\t\t int64_t* got_pts);\r\n\r\n#ifdef __cplusplus\r\n}\r\n#endif\r\n\r\n#endif/*__LENTHEVCDEC_H__*/\r\n"
  },
  {
    "path": "Android_demo/prebuilt/include/qy265dec.h",
    "content": "///////////////////////////////////////////////////\n//\n//         KingSoft H265 Codec Library\n//\n//  Copyright(c) 2013-2014 KingSoft, Inc.\n//              www.KingSoft.cn\n//\n///////////////////////////////////////////////////\n/************************************************************************************\n* decInf.h: interface of decoder for user\n*\n* \\date     2013-09-28: first version\n*\n************************************************************************************/\n#ifndef _QY265_DECODER_INTERFACE_H_\n#define  _QY265_DECODER_INTERFACE_H_\n\n#include \"qy265def.h\"\n\n// config parameters for Decoder\ntypedef struct QY265DecConfig {\n    void* pAuth;                //QYAuth, invalid if don't need aksk auth\n    int threads;               // number of threads used in decoding (0: auto)\n    int bEnableOutputRecToFile;  // For debug: write reconstruct YUV to File\n    char* strRecYuvFileName;      // For debug: file name of YUV\n                                  // when bEnableOutputRecToFile = 1\n    int logLevel;               //For debug: log level\n}QY265DecConfig;\n\n// information of decoded frame\ntypedef struct QY265FrameInfo {\n    int nWidth;     // frame width\n    int nHeight;    // frame height\n    long long pts;  // time stamp\n    int bIllegalStream; // input bit stream is illegal\n    int poc;\n}QY265FrameInfo;\n\n// decoded frame with data and information\ntypedef struct QY265Frame {\n    int  bValid; //if == 0, no more valid output frame\n    unsigned char* pData[3]; // Y U V\n    short iStride[3];        // stride for each component\n    QY265FrameInfo frameinfo;\n#ifdef EMSCRIPTEN//TEST_YUVPLANE\n    unsigned char* pYUVPlane; //liner buffer for yuv 420p \n#endif\n}QY265Frame;\n\n\n#if defined(__cplusplus)\nextern \"C\" {\n#endif//__cplusplus\n\n/************************************************************************************\n* I/F for all usrs\n************************************************************************************/\n// create decoder, return  handle of decoder\n_h_dll_export void* QY265DecoderCreate(QY265DecConfig* pDecConfig, int * pStat);\n// destroy decoder with specific handle\n_h_dll_export void QY265DecoderDestroy(void* pDecoder);\n// set config to specific decoder\n_h_dll_export void QY265DecoderSetDecConfig(void *pDecoder, QY265DecConfig* pDecConfig, int * pStat);\n//the input of this function should be one or more NALs;\n//if only one NAL, with or without start bytes are both OK\n_h_dll_export void QY265DecodeFrame(void *pDecoder, unsigned char* pData, int iLen, int * pStat, const long long pts);\n// bSkip = false : same as QY265DecodeFrame\n// bSkip = true : only decode slice headers in pData, slice data skipped\n_h_dll_export void QY265DecodeFrameEnSkip(void *pDecoder, unsigned char* pData, int iLen, int * pStat, const long long pts, int bSkip);\n//flush decoding, called at end\n_h_dll_export void QY265DecodeFlush(void *pDecoder, int bClearCachedPics, int * pStat);\n// retrieve the output, the function are used for synchronized output, this function need to call several time until get NULL\n// if bForceLogo == true, only one frame buffer inside, need  return before get next output\n_h_dll_export void QY265DecoderGetDecodedFrame(void *pDecoder, QY265Frame* pFrame, int * pStat, int bForceLogo);\n// return the frame buffer which QY265DecoderGetOutput get from decoder, each valid QY265DecoderGetOutput should match with a ReturnFrame\n_h_dll_export void QY265DecoderReturnDecodedFrame( void *pDecoder, QY265Frame* pFrame);\n\n/**\n * dump latest decoded VUI parameters\n * @param_input pDecoder:   decoder instance\n * @param_output vui:       fill with decoded vui parameters\n * @param_output bValid: =0 if no valid vui parameters decoded,\n *                      otherwise =1\n */\n_h_dll_export void QY265DumpVUIParameters(void* pDecoder, vui_parameters* vui, int* bValid);\n\n#if defined(__cplusplus)\n}\n#endif//__cplusplus\n\n#endif//header\n"
  },
  {
    "path": "Android_demo/prebuilt/include/qy265def.h",
    "content": "#ifndef _QY265_DEF_H_\n#define  _QY265_DEF_H_\n\n// ****************************************\n// error type\n// ****************************************\nenum\n{\n    QY_OK = (0x00000000),          // Success codes\n    QY_FAIL = (0x80000001),        //  Unspecified error\n    QY_OUTOFMEMORY = (0x80000002), //  Ran out of memory\n    QY_POINTER = (0x80000003),     //  Invalid pointer\n    QY_NOTSUPPORTED = (0x80000004),//  NOT support feature encoutnered\n    QY_AUTH_INVALID = (0x80000005), //  authentication invalid\n    QY_SEARCHING_ACCESS_POINT = (0x00000001), // in process of searching first access point\n    QY_REF_PIC_NOT_FOUND = (0x00000007), // reference picture not found, can be ignored\n#if defined(EMSCRIPTEN)||defined(_TEST_FOR_EMSCRIPTEN)\n    QY_NEED_MORE_DATA = (0x00000008), //need push more data\n#endif\n    QY_BITSTREAM_ERROR = (0x00000009), // detecting bitstream error, can be ignored\n    QY_CTU_REENCODE = (0x81000000), //ctu reencode\n};\n\nenum NAL_UNIT_TYPE{\n    NAL_UNIT_TYPE_TRAIL_N = 0,\n    NAL_UNIT_TYPE_TRAIL_R = 1,\n\n    NAL_UNIT_TYPE_TSA_N = 2,\n    NAL_UNIT_TYPE_TSA_R = 3,\n\n    NAL_UNIT_TYPE_STSA_N = 4,\n    NAL_UNIT_TYPE_STSA_R = 5,\n\n    NAL_UNIT_TYPE_RADL_N = 6,\n    NAL_UNIT_TYPE_RADL_R = 7,\n\n    NAL_UNIT_TYPE_RASL_N = 8,\n    NAL_UNIT_TYPE_RASL_R = 9,\n\n    //reserved\n    NAL_UNIT_TYPE_RSV_VCL_N10 = 10,\n    NAL_UNIT_TYPE_RSV_VCL_N12 = 12,\n    NAL_UNIT_TYPE_RSV_VCL_N14 = 13,\n    NAL_UNIT_TYPE_RSV_VCL_R11 = 11,\n    NAL_UNIT_TYPE_RSV_VCL_R13 = 13,\n    NAL_UNIT_TYPE_RSV_VCL_R15 = 15,\n\n    NAL_UNIT_TYPE_BLA_W_LP = 16,\n    NAL_UNIT_TYPE_BLA_W_RADL = 17,\n    NAL_UNIT_TYPE_BLA_N_LP = 18,\n\n    NAL_UNIT_TYPE_IDR_W_RADL = 19,\n    NAL_UNIT_TYPE_IDR_N_LP = 20,\n\n    NAL_UNIT_TYPE_CRA_NUT = 21,\n\n    NAL_UNIT_TYPE_RSV_IRAP_VCL22 = 22,\n    NAL_UNIT_TYPE_RSV_IRAP_VCL23 = 23,\n\n    NAL_UNIT_TYPE_RSV_VCL24 = 24,\n    NAL_UNIT_TYPE_RSV_VCL25 = 25,\n    NAL_UNIT_TYPE_RSV_VCL26 = 26,\n    NAL_UNIT_TYPE_RSV_VCL27 = 27,\n    NAL_UNIT_TYPE_RSV_VCL28 = 28,\n    NAL_UNIT_TYPE_RSV_VCL29 = 29,\n    NAL_UNIT_TYPE_RSV_VCL30 = 30,\n    NAL_UNIT_TYPE_RSV_VCL31 = 31,\n\n    NAL_UNIT_TYPE_VPS_NUT = 32,\n    NAL_UNIT_TYPE_SPS_NUT = 33,\n    NAL_UNIT_TYPE_PPS_NUT = 34,\n    NAL_UNIT_TYPE_AUD_NUT = 35,\n    NAL_UNIT_TYPE_EOS_NUT = 36,\n    NAL_UNIT_TYPE_EOB_NUT = 37,\n    NAL_UNIT_TYPE_FD_NUT = 38,\n\n    NAL_UNIT_TYPE_PREFIX_SEI_NUT = 39,\n    NAL_UNIT_TYPE_SUFFIX_SEI_NUT = 40,\n\n    NAL_UNIT_TYPE_RSV_NVCL41 = 41,\n    NAL_UNIT_TYPE_RSV_NVCL42 = 42,\n    NAL_UNIT_TYPE_RSV_NVCL43 = 43,\n    NAL_UNIT_TYPE_RSV_NVCL44 = 44,\n    NAL_UNIT_TYPE_RSV_NVCL45 = 45,\n    NAL_UNIT_TYPE_RSV_NVCL46 = 46,\n    NAL_UNIT_TYPE_RSV_NVCL47 = 47,\n\n    NAL_UNIT_TYPE_UNSPEC48 = 48,\n    NAL_UNIT_TYPE_UNSPEC49 = 49,\n    NAL_UNIT_TYPE_UNSPEC50 = 50,\n    NAL_UNIT_TYPE_UNSPEC51 = 51,\n    NAL_UNIT_TYPE_UNSPEC52 = 52,\n    NAL_UNIT_TYPE_UNSPEC53 = 53,\n    NAL_UNIT_TYPE_UNSPEC54 = 54,\n    NAL_UNIT_TYPE_UNSPEC55 = 55,\n    NAL_UNIT_TYPE_UNSPEC56 = 56,\n    NAL_UNIT_TYPE_UNSPEC57 = 57,\n    NAL_UNIT_TYPE_UNSPEC58 = 58,\n    NAL_UNIT_TYPE_UNSPEC59 = 59,\n    NAL_UNIT_TYPE_UNSPEC60 = 60,\n    NAL_UNIT_TYPE_UNSPEC61 = 61,\n    NAL_UNIT_TYPE_UNSPEC62 = 62,\n    NAL_UNIT_TYPE_UNSPEC63 = 63,\n};\n\n// ****************************************\n// VUI\n// ****************************************\ntypedef struct vui_parameters{\n        // --- sample aspect ratio (SAR) ---\n    unsigned char     aspect_ratio_info_present_flag;\n    unsigned short sar_width;  // sar_width and sar_height are zero if unspecified\n    unsigned short sar_height;\n\n    // --- overscan ---\n    unsigned char     overscan_info_present_flag;\n    unsigned char     overscan_appropriate_flag;\n\n    // --- video signal type ---\n    unsigned char   video_signal_type_present_flag;\n    unsigned char   video_format;\n    unsigned char   video_full_range_flag;\n    unsigned char   colour_description_present_flag;\n    unsigned char   colour_primaries;\n    unsigned char   transfer_characteristics;\n    unsigned char   matrix_coeffs;\n\n    // --- chroma / interlaced ---\n    unsigned char     chroma_loc_info_present_flag;\n    unsigned char  chroma_sample_loc_type_top_field;\n    unsigned char  chroma_sample_loc_type_bottom_field;\n    unsigned char     neutral_chroma_indication_flag;\n    unsigned char     field_seq_flag;\n    unsigned char     frame_field_info_present_flag;\n\n    // --- default display window ---\n    unsigned char     default_display_window_flag;\n    unsigned int def_disp_win_left_offset;\n    unsigned int def_disp_win_right_offset;\n    unsigned int def_disp_win_top_offset;\n    unsigned int def_disp_win_bottom_offset;\n\n    // --- timing ---\n    unsigned char     vui_timing_info_present_flag;\n    unsigned int vui_num_units_in_tick;\n    unsigned int vui_time_scale;\n\n    unsigned char     vui_poc_proportional_to_timing_flag;\n    unsigned int vui_num_ticks_poc_diff_one;\n\n    // --- hrd parameters ---\n    unsigned char     vui_hrd_parameters_present_flag;\n    //hrd_parameters vui_hrd_parameters;\n\n    // --- bitstream restriction ---\n    unsigned char bitstream_restriction_flag;\n    unsigned char tiles_fixed_structure_flag;\n    unsigned char motion_vectors_over_pic_boundaries_flag;\n    unsigned char restricted_ref_pic_lists_flag;\n    unsigned short min_spatial_segmentation_idc;\n    unsigned char  max_bytes_per_pic_denom;\n    unsigned char  max_bits_per_min_cu_denom;\n    unsigned char  log2_max_mv_length_horizontal;\n    unsigned char  log2_max_mv_length_vertical;\n}vui_parameters;\n\n#if defined(SWIG) || defined(__AVM2__)\n#define _h_dll_export\n#else\n\n#ifdef WIN32\n#define _h_dll_export   __declspec(dllexport)\n#else // for GCC\n#define _h_dll_export __attribute__ ((visibility(\"default\")))\n#endif\n\n#endif  //SWIG\n\ntypedef void  (*QYLogPrintf)(const char* msg);\ntypedef void  (*QYAuthWarning)();\n\n#if defined(__cplusplus)\nextern \"C\" {\n#endif//__cplusplus\n\n// log output callback func pointer \n// if  pFuncCB == NULL, use the default printf\n_h_dll_export void QY265SetLogPrintf ( QYLogPrintf pFuncCB);\n\n// auth trouble warning callback func pointer\n_h_dll_export void QY265SetAuthWarning ( QYAuthWarning pFuncCB);\n\n#if defined(__cplusplus)\n}\n#endif//__cplusplus\n\n//libqy265 version number string\n_h_dll_export extern const char strLibQy265Version[];\n\n#endif\n"
  },
  {
    "path": "Android_demo/prebuilt/include/qy265enc.h",
    "content": "///////////////////////////////////////////////////\n//\n//         Kingsoft H265 Codec Library \n//\n//  Copyright(c) Kingsoft cloud Inc.\n//              http://www.ksyun.com/\n//\n///////////////////////////////////////////////////\n/************************************************************************************\n* encInf.h: interface of encoder for user\n*\n* \\date     2013-09-28: first version\n*    \n************************************************************************************/\n#ifndef   _QY265_ENCODER_INTERFACE_H_\n#define   _QY265_ENCODER_INTERFACE_H_\n\n#include \"qy265def.h\"\n// ****************************************\n// base configuration \n// ****************************************\n//app type\ntypedef enum QY265Tune_tag{\n    QY265TUNE_DEFAULT = 0,\n    QY265TUNE_SELFSHOW = 1,\n    QY265TUNE_GAME = 2,\n    QY265TUNE_MOVIE = 3,\n    QY265TUNE_SCREEN = 4\n}QY265Tune;\n\ntypedef enum QY265Preset_tag{\n    QY265PRESET_ULTRAFAST = 0,\n    QY265PRESET_SUPERFAST = 1,\n    QY265PRESET_VERYFAST = 2,\n    QY265PRESET_FAST = 3,\n    QY265PRESET_MEDIUM = 4,\n    QY265PRESET_SLOW = 5,\n    QY265PRESET_SLOWER = 6,\n    QY265PRESET_VERYSLOW = 7,\n    QY265PRESET_PLACEBO = 8,\n}QY265Preset;\n\ntypedef enum QY265Latency_tag{\n    QY265LATENCY_ZERO = 0,\n    QY265LATENCY_LOWDELAY = 1,\n    QY265LATENCY_LIVESTREMING = 2,\n    QY265LATENCY_DEFAULT = 3,\n}QY265Latency;\n\n//base configuration\ntypedef struct QY265EncConfig{\n    void* pAuth;        //QYAuth, invalid if don't need aksk auth\n    QY265Tune tune;    //\n    QY265Preset preset;\n    QY265Latency latency;\n    int profileId;         //currently, support 1 and 3 separately for main and main still profile\n    int bHeaderBeforeKeyframe; //whether output vps,sps,pps before key frame, default 1. dis/enable 0/1\n    int picWidth;          // input frame width\n    int picHeight;         // input frame height\n    double frameRate;      // input frame rate\n    int bframes;           // num of bi-pred frames, -1: using default\n    int temporalLayer;     // works with QY265LATENCY_ZERO, separate P frames into temporal layers, 0 or 1\n    \n    int vpp_denoise;       // vpp denoise 0 disable, 1 gentle, 2 medium, 3 aggressive\n    int vpp_edge;          // vpp edge enhance 0 disable, 1 gentle, 2 medium, 3 aggressive\n    int vpp_color;         // vpp color enhance 0 disable, 1 gentle, 2 medium, 3 aggressive\n    int vpp_hdr;           // vpp HDR enhance 0 disable, 1 enable\n    double vpp_hdr_strength;  // vpp HDR strength, 0~5\n    int vpp_hdr_iter;      // vpp HDR iteration, 2 or 3\n    double vpp_hdr_sigma_s; // vpp HDR parameter 0~100\n    double vpp_hdr_sigma_r; // vpp HDR parameter 0~100\n    double vpp_recur_filter;    // vpp Recursive Filter 0~30\n\n    int rc;                // rc type 0 disable,1 cbr,2 abr,3 crf,4 cvbr,5 cvq, default 2\n    int bitrateInkbps;     // target bit rate in kbps, valid when rctype is cbr abd vbr\n    int vbv_buffer_size;   // buf size of vbv\n    int vbv_max_rate;      // max rate of vbv\n    int vbv_min_rate;      // max rate of vbv\n    int qp;                // valid when rctype is disable, default 26\n    int crf;               // valid when rctype is crf,default 24\n    int visual_quality;    // valid when rctype is cvq,[70-100], default 95\n    int iIntraPeriod;      // I-Frame period, -1 = only first\n    int qpmin;              //minimal qp, valid when rc != 0, 0~51\n    int qpmax;              //maximal qp, valid when rc != 0, 1~51, qpmax = 0 means 51\n    int enFrameSkip;        //1: enable frame skip for ratecontrol, default 0\n    //* Execute Properties \n    int enWavefront;       //enable wave front parallel\n    int enFrameParallel;   //enable frame parallel\n    int threads;           // number of threads used in encoding ( for wavefront, frame parallel, or enable both )\n    //* vui_parameters\n    //vui_parameters_present_flag equal to 1 specifies that the vui_parameters() syntax in struct vui should set by usr\n    int vui_parameters_present_flag;\n    struct{\n        /* video_signal_type_present_flag.  If this is set then\n         * video_format, video_full_range_flag and colour_description_present_flag\n         * will be added to the VUI. The default is false */\n        int video_signal_type_present_flag;\n        /* Video format of the source video.  0 = component, 1 = PAL, 2 = NTSC,\n         * 3 = SECAM, 4 = MAC, 5 = unspecified video format is the default */\n        int video_format;\n        /* video_full_range_flag indicates the black level and range of the luma\n         * and chroma signals as derived from E'Y, E'PB, and E'PR or E'R, E'G,\n         * and E'B real-valued component signals. The default is false */\n        int video_full_range_flag;\n        /* colour_description_present_flag in the VUI. If this is set then\n         * color_primaries, transfer_characteristics and matrix_coeffs are to be\n         * added to the VUI. The default is false */\n        int colour_description_present_flag;\n        /* colour_primaries holds the chromacity coordinates of the source\n         * primaries. The default is 2 */\n        int colour_primaries;\n        /* transfer_characteristics indicates the opto-electronic transfer\n         * characteristic of the source picture. The default is 2 */\n        int transfer_characteristics;\n        /* matrix_coeffs used to derive the luma and chroma signals from\n         * the red, blue and green primaries. The default is 2 */\n        int matrix_coeffs;\n    }vui;\n    //* tool list\n    int logLevel;          //log level (-1: dbg; 0: info; 1:warn; 2:err; 3:fatal)\n    int lookahead;         // rc lookahead settings\n    int calcPsnr;          //0:not calc psnr; 1: print total psnr; 2: print each frame\n    int calcSsim;          //0:not calc ssim; 1: print total ssim; 2: print each frame\n    int shortLoadingForPlayer;  //reduce b frames after I frame, for shorting the loading time of VOD for some players\n    //ZEL_2PASS:parameters for 2pass\n    int  iPass; //Multi pass rate control,0,disable 2pass encode method; 1: first pass; 2: second pass;\n    char statFileName[256]; //log file produced from first pass, seet by user\n    double      fRateTolerance;//default 2.0f,0.5 is suitable to reduce the largest bitrate, and 0.1 is to make the bitrate stable\n    int  rdoq;//1:enabling rdoq\n    int  me;//0: DIA, 1: HEX, 2: UMH, 3:EPZS,\n    int  part;//enabling 2nxn, nx2n pu\n    int  do64;//1:enabling 64x64 cu\n    int  tuInter;//inter RQT tu depth, 0~3, -1 means auto\n    int  tuIntra;//intra RQT tu depth, 0~3, -1 means auto\n    int  smooth;//1: enabling strong intra smoothing\n    int  transskip;//1: enabling transform skip\n    int  subme;// 0 : disable 1 : fast, 2 : square full\n    int  satdInter;//1:enabling hardmad sad\n    int  satdIntra;//1:enabling hardmad sad\n    int  searchrange;//search range\n    int  refnum;// reference number \n    int  ref0;//add interface for ref0\n    int  sao;//sao enabling, 0: disable; 1:faster; 2: faster; 3: usual; 4:complex\n    int  longTermRef;//0:disabling longterm reference 1:enable;\n    int  iAqMode;// adaptive quantization 0~3, 0: disable\n    double fAqStrength;//strength of adaptive quantizaiton, 0~3.0, default 1.0\n    int  rasl; // enable RASL NAL for CRA,default 1, if not enable RASL, then CRA is act like IDR\n}QY265EncConfig;\n\n// ****************************************\n// callback functions\n// ****************************************\n//the encoder works in asynchronous mode (for supports of B frames)\n//once calling on EncodeFrame not corresponds to one Frame's bitstream output\n//thus, use callback function on Frame Encoded\n//also, buffer of srcYUV should be reserved for encoder, until it's done\n// CALLBACK method to feed the encoded bit stream\n\n// input frame data and info\ntypedef struct QY265YUV{\n    int iWidth;                 // input frame width\n    int iHeight;                // input frame height\n    unsigned char* pData[3];    // input frame Y U V\n    int iStride[3];             // stride for Y U V\n}QY265YUV;\n\n// input frame data and info\ntypedef struct QY265Picture{\n    int iSliceType; // specified by output pictures\n    int poc;        // ignored on input\n    long long pts;\n    long long dts;\n    QY265YUV* yuv;\n}QY265Picture;\n\n\ntypedef struct QY265Nal\n{\n    int naltype;\n    int tid;\n    int iSize;\n    long long pts;\n    unsigned char* pPayload;\n}QY265Nal;\n\n\n#if defined(__cplusplus)\nextern \"C\" {\n#endif//__cplusplus\n/**\n* create encoder\n* @param pCfg : base config of encoder\n* @param errorCode: error code\n* @return encoder handle\n*/\n_h_dll_export void* QY265EncoderOpen(QY265EncConfig* pCfg, int *errorCode);\n// destroy encoder \n_h_dll_export void QY265EncoderClose(void* pEncoder);\n// reconfig encoder\n_h_dll_export void QY265EncoderReconfig(void* pEncoder,QY265EncConfig* pCfg);\n// return the VPS, SPS and PPS that will be used for the whole stream.\n_h_dll_export int QY265EncoderEncodeHeaders(void* pEncoder,QY265Nal** pNals,int* iNalCount);\n\n/**\n* Encode one frame add logo or not\n*\n* @param pEncoder   handle of encoder\n* @param pNals      pointer array of output NAL units\n* @param iNalCount  output NAL unit count\n* @param pInPic     input frame\n* @param pOutPic    output frame\n* @param bForceLogo add logo on the input frame ( when auth failed)\n* @return if succeed, return 0; if failed, return the error code\n*/\n_h_dll_export int QY265EncoderEncodeFrame(void* pEncoder, QY265Nal** pNals, int* iNalCount, QY265Picture* pInpic, QY265Picture* pOutpic, int bForceLogo);\n\n// Request encoder to encode a Key Frame\n_h_dll_export void QY265EncoderKeyFrameRequest(void* pEncoder);\n// current buffered frames \n_h_dll_export int QY265EncoderDelayedFrames(void* pEncoder);\n\nstatic const char* const  qy265_preset_names[] = { \"ultrafast\", \"superfast\", \"veryfast\", \"fast\", \"medium\", \"slow\", \"slower\", \"veryslow\", \"placebo\", 0 };\nstatic const char* const  qy265_tunes_names[] = { \"default\", \"selfshow\", \"game\", \"movie\", \"screen\", 0 };\nstatic const char* const  qy265_latency_names[] = { \"zerolatency\", \"lowdelay\", \"livestreaming\", \"default\", 0 };\n// get default config values by preset, tune and latency. enum format\n_h_dll_export int QY265ConfigDefault(QY265EncConfig* pConfig, QY265Preset preset, QY265Tune tune, QY265Latency latency);\n\n// get default config values by preset, tune and latency. string format\n_h_dll_export int QY265ConfigDefaultPreset(QY265EncConfig* pConfig, char* preset, char* tune, char* latency);\n\n#define QY265_PARAM_BAD_NAME  (-1)\n#define QY265_PARAM_BAD_VALUE (-2)\n_h_dll_export int QY265ConfigParse(QY265EncConfig *p, const char *name, const char *value);\n#if defined(__cplusplus)\n}\n#endif//__cplusplus\n\n#endif\n"
  },
  {
    "path": "Android_demo/prebuilt/include/qyauth_env.h",
    "content": "\n#ifndef _KS_AUTH_ENV_H_\n#define _KS_AUTH_ENV_H_\n\n#if !defined(WIN32)\n#include <stdint.h>\n#endif\n\n#define MAX_URL_LEN 2048\n#define MAX_LEN 512\n#define AUTH_ADDR_NUM 3\n//***********************************\n//* KSPrivateAuthEnv used as AUTH struct\n//* when private server auth method is adopted\n//***********************************\n#ifndef KSAUTH_PRIVATE_AUTH\n#define KSAUTH_PRIVATE_AUTH 0\n#endif\n\n//***********************************\n//for Android, TCounterEnv used as AUTH struct\n//when adopt Count auth method\n#ifndef __PLATFORM_COUNTER_ENV__\n#define __PLATFORM_COUNTER_ENV__\n\n#ifdef ANDROID\n#include <jni.h>\ntypedef struct _TCounterEnv\n{\n    JavaVM *jvm;\n    jobject context;\n}TCounterEnv;\n#endif\n\n#endif\n\n#ifdef WIN32\n#define _ks_dll_export   __declspec(dllexport)\n#else // for GCC\n#define _ks_dll_export __attribute__ ((visibility(\"default\")))\n#endif\n_ks_dll_export extern const char strKsc265AuthVersion[];\n\n#endif //header\n"
  },
  {
    "path": "Android_demo/prebuilt/include/x264.h",
    "content": "/*****************************************************************************\n * x264.h: x264 public header\n *****************************************************************************\n * Copyright (C) 2003-2016 x264 project\n *\n * Authors: Laurent Aimar <fenrir@via.ecp.fr>\n *          Loren Merritt <lorenm@u.washington.edu>\n *          Fiona Glaser <fiona@x264.com>\n *\n * This program is free software; you can redistribute it and/or modify\n * it under the terms of the GNU General Public License as published by\n * the Free Software Foundation; either version 2 of the License, or\n * (at your option) any later version.\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n * GNU General Public License for more details.\n *\n * You should have received a copy of the GNU General Public License\n * along with this program; if not, write to the Free Software\n * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02111, USA.\n *\n * This program is also available under a commercial proprietary license.\n * For more information, contact us at licensing@x264.com.\n *****************************************************************************/\n\n#ifndef X264_X264_H\n#define X264_X264_H\n\n#ifdef __cplusplus\nextern \"C\" {\n#endif\n\n#if !defined(_STDINT_H) && !defined(_STDINT_H_) && !defined(_STDINT_H_INCLUDED) && !defined(_STDINT) &&\\\n    !defined(_SYS_STDINT_H_) && !defined(_INTTYPES_H) && !defined(_INTTYPES_H_) && !defined(_INTTYPES)\n# ifdef _MSC_VER\n#  pragma message(\"You must include stdint.h or inttypes.h before x264.h\")\n# else\n#  warning You must include stdint.h or inttypes.h before x264.h\n# endif\n#endif\n\n#include <stdarg.h>\n\n#include \"x264_config.h\"\n\n#define X264_BUILD 148\n\n/* Application developers planning to link against a shared library version of\n * libx264 from a Microsoft Visual Studio or similar development environment\n * will need to define X264_API_IMPORTS before including this header.\n * This clause does not apply to MinGW, similar development environments, or non\n * Windows platforms. */\n#ifdef X264_API_IMPORTS\n#define X264_API __declspec(dllimport)\n#else\n#define X264_API\n#endif\n\n/* x264_t:\n *      opaque handler for encoder */\ntypedef struct x264_t x264_t;\n\n/****************************************************************************\n * NAL structure and functions\n ****************************************************************************/\n\nenum nal_unit_type_e\n{\n    NAL_UNKNOWN     = 0,\n    NAL_SLICE       = 1,\n    NAL_SLICE_DPA   = 2,\n    NAL_SLICE_DPB   = 3,\n    NAL_SLICE_DPC   = 4,\n    NAL_SLICE_IDR   = 5,    /* ref_idc != 0 */\n    NAL_SEI         = 6,    /* ref_idc == 0 */\n    NAL_SPS         = 7,\n    NAL_PPS         = 8,\n    NAL_AUD         = 9,\n    NAL_FILLER      = 12,\n    /* ref_idc == 0 for 6,9,10,11,12 */\n};\nenum nal_priority_e\n{\n    NAL_PRIORITY_DISPOSABLE = 0,\n    NAL_PRIORITY_LOW        = 1,\n    NAL_PRIORITY_HIGH       = 2,\n    NAL_PRIORITY_HIGHEST    = 3,\n};\n\n/* The data within the payload is already NAL-encapsulated; the ref_idc and type\n * are merely in the struct for easy access by the calling application.\n * All data returned in an x264_nal_t, including the data in p_payload, is no longer\n * valid after the next call to x264_encoder_encode.  Thus it must be used or copied\n * before calling x264_encoder_encode or x264_encoder_headers again. */\ntypedef struct x264_nal_t\n{\n    int i_ref_idc;  /* nal_priority_e */\n    int i_type;     /* nal_unit_type_e */\n    int b_long_startcode;\n    int i_first_mb; /* If this NAL is a slice, the index of the first MB in the slice. */\n    int i_last_mb;  /* If this NAL is a slice, the index of the last MB in the slice. */\n\n    /* Size of payload (including any padding) in bytes. */\n    int     i_payload;\n    /* If param->b_annexb is set, Annex-B bytestream with startcode.\n     * Otherwise, startcode is replaced with a 4-byte size.\n     * This size is the size used in mp4/similar muxing; it is equal to i_payload-4 */\n    uint8_t *p_payload;\n\n    /* Size of padding in bytes. */\n    int i_padding;\n} x264_nal_t;\n\n/****************************************************************************\n * Encoder parameters\n ****************************************************************************/\n/* CPU flags */\n\n/* x86 */\n#define X264_CPU_CMOV            0x0000001\n#define X264_CPU_MMX             0x0000002\n#define X264_CPU_MMX2            0x0000004  /* MMX2 aka MMXEXT aka ISSE */\n#define X264_CPU_MMXEXT          X264_CPU_MMX2\n#define X264_CPU_SSE             0x0000008\n#define X264_CPU_SSE2            0x0000010\n#define X264_CPU_SSE3            0x0000020\n#define X264_CPU_SSSE3           0x0000040\n#define X264_CPU_SSE4            0x0000080  /* SSE4.1 */\n#define X264_CPU_SSE42           0x0000100  /* SSE4.2 */\n#define X264_CPU_LZCNT           0x0000200  /* Phenom support for \"leading zero count\" instruction. */\n#define X264_CPU_AVX             0x0000400  /* AVX support: requires OS support even if YMM registers aren't used. */\n#define X264_CPU_XOP             0x0000800  /* AMD XOP */\n#define X264_CPU_FMA4            0x0001000  /* AMD FMA4 */\n#define X264_CPU_FMA3            0x0002000  /* FMA3 */\n#define X264_CPU_AVX2            0x0004000  /* AVX2 */\n#define X264_CPU_BMI1            0x0008000  /* BMI1 */\n#define X264_CPU_BMI2            0x0010000  /* BMI2 */\n/* x86 modifiers */\n#define X264_CPU_CACHELINE_32    0x0020000  /* avoid memory loads that span the border between two cachelines */\n#define X264_CPU_CACHELINE_64    0x0040000  /* 32/64 is the size of a cacheline in bytes */\n#define X264_CPU_SSE2_IS_SLOW    0x0080000  /* avoid most SSE2 functions on Athlon64 */\n#define X264_CPU_SSE2_IS_FAST    0x0100000  /* a few functions are only faster on Core2 and Phenom */\n#define X264_CPU_SLOW_SHUFFLE    0x0200000  /* The Conroe has a slow shuffle unit (relative to overall SSE performance) */\n#define X264_CPU_STACK_MOD4      0x0400000  /* if stack is only mod4 and not mod16 */\n#define X264_CPU_SLOW_CTZ        0x0800000  /* BSR/BSF x86 instructions are really slow on some CPUs */\n#define X264_CPU_SLOW_ATOM       0x1000000  /* The Atom is terrible: slow SSE unaligned loads, slow\n                                             * SIMD multiplies, slow SIMD variable shifts, slow pshufb,\n                                             * cacheline split penalties -- gather everything here that\n                                             * isn't shared by other CPUs to avoid making half a dozen\n                                             * new SLOW flags. */\n#define X264_CPU_SLOW_PSHUFB     0x2000000  /* such as on the Intel Atom */\n#define X264_CPU_SLOW_PALIGNR    0x4000000  /* such as on the AMD Bobcat */\n\n/* PowerPC */\n#define X264_CPU_ALTIVEC         0x0000001\n\n/* ARM and AArch64 */\n#define X264_CPU_ARMV6           0x0000001\n#define X264_CPU_NEON            0x0000002  /* ARM NEON */\n#define X264_CPU_FAST_NEON_MRC   0x0000004  /* Transfer from NEON to ARM register is fast (Cortex-A9) */\n#define X264_CPU_ARMV8           0x0000008\n\n/* MIPS */\n#define X264_CPU_MSA             0x0000001  /* MIPS MSA */\n\n/* Analyse flags */\n#define X264_ANALYSE_I4x4       0x0001  /* Analyse i4x4 */\n#define X264_ANALYSE_I8x8       0x0002  /* Analyse i8x8 (requires 8x8 transform) */\n#define X264_ANALYSE_PSUB16x16  0x0010  /* Analyse p16x8, p8x16 and p8x8 */\n#define X264_ANALYSE_PSUB8x8    0x0020  /* Analyse p8x4, p4x8, p4x4 */\n#define X264_ANALYSE_BSUB16x16  0x0100  /* Analyse b16x8, b8x16 and b8x8 */\n#define X264_DIRECT_PRED_NONE        0\n#define X264_DIRECT_PRED_SPATIAL     1\n#define X264_DIRECT_PRED_TEMPORAL    2\n#define X264_DIRECT_PRED_AUTO        3\n#define X264_ME_DIA                  0\n#define X264_ME_HEX                  1\n#define X264_ME_UMH                  2\n#define X264_ME_ESA                  3\n#define X264_ME_TESA                 4\n#define X264_CQM_FLAT                0\n#define X264_CQM_JVT                 1\n#define X264_CQM_CUSTOM              2\n#define X264_RC_CQP                  0\n#define X264_RC_CRF                  1\n#define X264_RC_ABR                  2\n#define X264_QP_AUTO                 0\n#define X264_AQ_NONE                 0\n#define X264_AQ_VARIANCE             1\n#define X264_AQ_AUTOVARIANCE         2\n#define X264_AQ_AUTOVARIANCE_BIASED  3\n#define X264_B_ADAPT_NONE            0\n#define X264_B_ADAPT_FAST            1\n#define X264_B_ADAPT_TRELLIS         2\n#define X264_WEIGHTP_NONE            0\n#define X264_WEIGHTP_SIMPLE          1\n#define X264_WEIGHTP_SMART           2\n#define X264_B_PYRAMID_NONE          0\n#define X264_B_PYRAMID_STRICT        1\n#define X264_B_PYRAMID_NORMAL        2\n#define X264_KEYINT_MIN_AUTO         0\n#define X264_KEYINT_MAX_INFINITE     (1<<30)\n\nstatic const char * const x264_direct_pred_names[] = { \"none\", \"spatial\", \"temporal\", \"auto\", 0 };\nstatic const char * const x264_motion_est_names[] = { \"dia\", \"hex\", \"umh\", \"esa\", \"tesa\", 0 };\nstatic const char * const x264_b_pyramid_names[] = { \"none\", \"strict\", \"normal\", 0 };\nstatic const char * const x264_overscan_names[] = { \"undef\", \"show\", \"crop\", 0 };\nstatic const char * const x264_vidformat_names[] = { \"component\", \"pal\", \"ntsc\", \"secam\", \"mac\", \"undef\", 0 };\nstatic const char * const x264_fullrange_names[] = { \"off\", \"on\", 0 };\nstatic const char * const x264_colorprim_names[] = { \"\", \"bt709\", \"undef\", \"\", \"bt470m\", \"bt470bg\", \"smpte170m\", \"smpte240m\", \"film\", \"bt2020\", \"smpte428\",\n                                                     \"smpte431\", \"smpte432\", 0 };\nstatic const char * const x264_transfer_names[] = { \"\", \"bt709\", \"undef\", \"\", \"bt470m\", \"bt470bg\", \"smpte170m\", \"smpte240m\", \"linear\", \"log100\", \"log316\",\n                                                    \"iec61966-2-4\", \"bt1361e\", \"iec61966-2-1\", \"bt2020-10\", \"bt2020-12\", \"smpte2084\", \"smpte428\", 0 };\nstatic const char * const x264_colmatrix_names[] = { \"GBR\", \"bt709\", \"undef\", \"\", \"fcc\", \"bt470bg\", \"smpte170m\", \"smpte240m\", \"YCgCo\", \"bt2020nc\", \"bt2020c\",\n                                                     \"smpte2085\", 0 };\nstatic const char * const x264_nal_hrd_names[] = { \"none\", \"vbr\", \"cbr\", 0 };\n\n/* Colorspace type */\n#define X264_CSP_MASK           0x00ff  /* */\n#define X264_CSP_NONE           0x0000  /* Invalid mode     */\n#define X264_CSP_I420           0x0001  /* yuv 4:2:0 planar */\n#define X264_CSP_YV12           0x0002  /* yvu 4:2:0 planar */\n#define X264_CSP_NV12           0x0003  /* yuv 4:2:0, with one y plane and one packed u+v */\n#define X264_CSP_NV21           0x0004  /* yuv 4:2:0, with one y plane and one packed v+u */\n#define X264_CSP_I422           0x0005  /* yuv 4:2:2 planar */\n#define X264_CSP_YV16           0x0006  /* yvu 4:2:2 planar */\n#define X264_CSP_NV16           0x0007  /* yuv 4:2:2, with one y plane and one packed u+v */\n#define X264_CSP_V210           0x0008  /* 10-bit yuv 4:2:2 packed in 32 */\n#define X264_CSP_I444           0x0009  /* yuv 4:4:4 planar */\n#define X264_CSP_YV24           0x000a  /* yvu 4:4:4 planar */\n#define X264_CSP_BGR            0x000b  /* packed bgr 24bits   */\n#define X264_CSP_BGRA           0x000c  /* packed bgr 32bits   */\n#define X264_CSP_RGB            0x000d  /* packed rgb 24bits   */\n#define X264_CSP_MAX            0x000e  /* end of list */\n#define X264_CSP_VFLIP          0x1000  /* the csp is vertically flipped */\n#define X264_CSP_HIGH_DEPTH     0x2000  /* the csp has a depth of 16 bits per pixel component */\n\n/* Slice type */\n#define X264_TYPE_AUTO          0x0000  /* Let x264 choose the right type */\n#define X264_TYPE_IDR           0x0001\n#define X264_TYPE_I             0x0002\n#define X264_TYPE_P             0x0003\n#define X264_TYPE_BREF          0x0004  /* Non-disposable B-frame */\n#define X264_TYPE_B             0x0005\n#define X264_TYPE_KEYFRAME      0x0006  /* IDR or I depending on b_open_gop option */\n#define IS_X264_TYPE_I(x) ((x)==X264_TYPE_I || (x)==X264_TYPE_IDR || (x)==X264_TYPE_KEYFRAME)\n#define IS_X264_TYPE_B(x) ((x)==X264_TYPE_B || (x)==X264_TYPE_BREF)\n\n/* Log level */\n#define X264_LOG_NONE          (-1)\n#define X264_LOG_ERROR          0\n#define X264_LOG_WARNING        1\n#define X264_LOG_INFO           2\n#define X264_LOG_DEBUG          3\n\n/* Threading */\n#define X264_THREADS_AUTO 0 /* Automatically select optimal number of threads */\n#define X264_SYNC_LOOKAHEAD_AUTO (-1) /* Automatically select optimal lookahead thread buffer size */\n\n/* HRD */\n#define X264_NAL_HRD_NONE            0\n#define X264_NAL_HRD_VBR             1\n#define X264_NAL_HRD_CBR             2\n\n/* Zones: override ratecontrol or other options for specific sections of the video.\n * See x264_encoder_reconfig() for which options can be changed.\n * If zones overlap, whichever comes later in the list takes precedence. */\ntypedef struct x264_zone_t\n{\n    int i_start, i_end; /* range of frame numbers */\n    int b_force_qp; /* whether to use qp vs bitrate factor */\n    int i_qp;\n    float f_bitrate_factor;\n    struct x264_param_t *param;\n} x264_zone_t;\n\ntypedef struct x264_param_t\n{\n    /* CPU flags */\n    unsigned int cpu;\n    int         i_threads;           /* encode multiple frames in parallel */\n    int         i_lookahead_threads; /* multiple threads for lookahead analysis */\n    int         b_sliced_threads;  /* Whether to use slice-based threading. */\n    int         b_deterministic; /* whether to allow non-deterministic optimizations when threaded */\n    int         b_cpu_independent; /* force canonical behavior rather than cpu-dependent optimal algorithms */\n    int         i_sync_lookahead; /* threaded lookahead buffer */\n\n    /* Video Properties */\n    int         i_width;\n    int         i_height;\n    int         i_csp;         /* CSP of encoded bitstream */\n    int         i_level_idc;\n    int         i_frame_total; /* number of frames to encode if known, else 0 */\n\n    /* NAL HRD\n     * Uses Buffering and Picture Timing SEIs to signal HRD\n     * The HRD in H.264 was not designed with VFR in mind.\n     * It is therefore not recommendeded to use NAL HRD with VFR.\n     * Furthermore, reconfiguring the VBV (via x264_encoder_reconfig)\n     * will currently generate invalid HRD. */\n    int         i_nal_hrd;\n\n    struct\n    {\n        /* they will be reduced to be 0 < x <= 65535 and prime */\n        int         i_sar_height;\n        int         i_sar_width;\n\n        int         i_overscan;    /* 0=undef, 1=no overscan, 2=overscan */\n\n        /* see h264 annex E for the values of the following */\n        int         i_vidformat;\n        int         b_fullrange;\n        int         i_colorprim;\n        int         i_transfer;\n        int         i_colmatrix;\n        int         i_chroma_loc;    /* both top & bottom */\n    } vui;\n\n    /* Bitstream parameters */\n    int         i_frame_reference;  /* Maximum number of reference frames */\n    int         i_dpb_size;         /* Force a DPB size larger than that implied by B-frames and reference frames.\n                                     * Useful in combination with interactive error resilience. */\n    int         i_keyint_max;       /* Force an IDR keyframe at this interval */\n    int         i_keyint_min;       /* Scenecuts closer together than this are coded as I, not IDR. */\n    int         i_scenecut_threshold; /* how aggressively to insert extra I frames */\n    int         b_intra_refresh;    /* Whether or not to use periodic intra refresh instead of IDR frames. */\n\n    int         i_bframe;   /* how many b-frame between 2 references pictures */\n    int         i_bframe_adaptive;\n    int         i_bframe_bias;\n    int         i_bframe_pyramid;   /* Keep some B-frames as references: 0=off, 1=strict hierarchical, 2=normal */\n    int         b_open_gop;\n    int         b_bluray_compat;\n    int         i_avcintra_class;\n\n    int         b_deblocking_filter;\n    int         i_deblocking_filter_alphac0;    /* [-6, 6] -6 light filter, 6 strong */\n    int         i_deblocking_filter_beta;       /* [-6, 6]  idem */\n\n    int         b_cabac;\n    int         i_cabac_init_idc;\n\n    int         b_interlaced;\n    int         b_constrained_intra;\n\n    int         i_cqm_preset;\n    char        *psz_cqm_file;      /* filename (in UTF-8) of CQM file, JM format */\n    uint8_t     cqm_4iy[16];        /* used only if i_cqm_preset == X264_CQM_CUSTOM */\n    uint8_t     cqm_4py[16];\n    uint8_t     cqm_4ic[16];\n    uint8_t     cqm_4pc[16];\n    uint8_t     cqm_8iy[64];\n    uint8_t     cqm_8py[64];\n    uint8_t     cqm_8ic[64];\n    uint8_t     cqm_8pc[64];\n\n    /* Log */\n    void        (*pf_log)( void *, int i_level, const char *psz, va_list );\n    void        *p_log_private;\n    int         i_log_level;\n    int         b_full_recon;   /* fully reconstruct frames, even when not necessary for encoding.  Implied by psz_dump_yuv */\n    char        *psz_dump_yuv;  /* filename (in UTF-8) for reconstructed frames */\n\n    /* Encoder analyser parameters */\n    struct\n    {\n        unsigned int intra;     /* intra partitions */\n        unsigned int inter;     /* inter partitions */\n\n        int          b_transform_8x8;\n        int          i_weighted_pred; /* weighting for P-frames */\n        int          b_weighted_bipred; /* implicit weighting for B-frames */\n        int          i_direct_mv_pred; /* spatial vs temporal mv prediction */\n        int          i_chroma_qp_offset;\n\n        int          i_me_method; /* motion estimation algorithm to use (X264_ME_*) */\n        int          i_me_range; /* integer pixel motion estimation search range (from predicted mv) */\n        int          i_mv_range; /* maximum length of a mv (in pixels). -1 = auto, based on level */\n        int          i_mv_range_thread; /* minimum space between threads. -1 = auto, based on number of threads. */\n        int          i_subpel_refine; /* subpixel motion estimation quality */\n        int          b_chroma_me; /* chroma ME for subpel and mode decision in P-frames */\n        int          b_mixed_references; /* allow each mb partition to have its own reference number */\n        int          i_trellis;  /* trellis RD quantization */\n        int          b_fast_pskip; /* early SKIP detection on P-frames */\n        int          b_dct_decimate; /* transform coefficient thresholding on P-frames */\n        int          i_noise_reduction; /* adaptive pseudo-deadzone */\n        float        f_psy_rd; /* Psy RD strength */\n        float        f_psy_trellis; /* Psy trellis strength */\n        int          b_psy; /* Toggle all psy optimizations */\n\n        int          b_mb_info;            /* Use input mb_info data in x264_picture_t */\n        int          b_mb_info_update; /* Update the values in mb_info according to the results of encoding. */\n\n        /* the deadzone size that will be used in luma quantization */\n        int          i_luma_deadzone[2]; /* {inter, intra} */\n\n        int          b_psnr;    /* compute and print PSNR stats */\n        int          b_ssim;    /* compute and print SSIM stats */\n    } analyse;\n\n    /* Rate control parameters */\n    struct\n    {\n        int         i_rc_method;    /* X264_RC_* */\n\n        int         i_qp_constant;  /* 0 to (51 + 6*(x264_bit_depth-8)). 0=lossless */\n        int         i_qp_min;       /* min allowed QP value */\n        int         i_qp_max;       /* max allowed QP value */\n        int         i_qp_step;      /* max QP step between frames */\n\n        int         i_bitrate;\n        float       f_rf_constant;  /* 1pass VBR, nominal QP */\n        float       f_rf_constant_max;  /* In CRF mode, maximum CRF as caused by VBV */\n        float       f_rate_tolerance;\n        int         i_vbv_max_bitrate;\n        int         i_vbv_buffer_size;\n        float       f_vbv_buffer_init; /* <=1: fraction of buffer_size. >1: kbit */\n        float       f_ip_factor;\n        float       f_pb_factor;\n\n        /* VBV filler: force CBR VBV and use filler bytes to ensure hard-CBR.\n         * Implied by NAL-HRD CBR. */\n        int         b_filler;\n\n        int         i_aq_mode;      /* psy adaptive QP. (X264_AQ_*) */\n        float       f_aq_strength;\n        int         b_mb_tree;      /* Macroblock-tree ratecontrol. */\n        int         i_lookahead;\n\n        /* 2pass */\n        int         b_stat_write;   /* Enable stat writing in psz_stat_out */\n        char        *psz_stat_out;  /* output filename (in UTF-8) of the 2pass stats file */\n        int         b_stat_read;    /* Read stat from psz_stat_in and use it */\n        char        *psz_stat_in;   /* input filename (in UTF-8) of the 2pass stats file */\n\n        /* 2pass params (same as ffmpeg ones) */\n        float       f_qcompress;    /* 0.0 => cbr, 1.0 => constant qp */\n        float       f_qblur;        /* temporally blur quants */\n        float       f_complexity_blur; /* temporally blur complexity */\n        x264_zone_t *zones;         /* ratecontrol overrides */\n        int         i_zones;        /* number of zone_t's */\n        char        *psz_zones;     /* alternate method of specifying zones */\n    } rc;\n\n    /* Cropping Rectangle parameters: added to those implicitly defined by\n       non-mod16 video resolutions. */\n    struct\n    {\n        unsigned int i_left;\n        unsigned int i_top;\n        unsigned int i_right;\n        unsigned int i_bottom;\n    } crop_rect;\n\n    /* frame packing arrangement flag */\n    int i_frame_packing;\n\n    /* Muxing parameters */\n    int b_aud;                  /* generate access unit delimiters */\n    int b_repeat_headers;       /* put SPS/PPS before each keyframe */\n    int b_annexb;               /* if set, place start codes (4 bytes) before NAL units,\n                                 * otherwise place size (4 bytes) before NAL units. */\n    int i_sps_id;               /* SPS and PPS id number */\n    int b_vfr_input;            /* VFR input.  If 1, use timebase and timestamps for ratecontrol purposes.\n                                 * If 0, use fps only. */\n    int b_pulldown;             /* use explicity set timebase for CFR */\n    uint32_t i_fps_num;\n    uint32_t i_fps_den;\n    uint32_t i_timebase_num;    /* Timebase numerator */\n    uint32_t i_timebase_den;    /* Timebase denominator */\n\n    int b_tff;\n\n    /* Pulldown:\n     * The correct pic_struct must be passed with each input frame.\n     * The input timebase should be the timebase corresponding to the output framerate. This should be constant.\n     * e.g. for 3:2 pulldown timebase should be 1001/30000\n     * The PTS passed with each frame must be the PTS of the frame after pulldown is applied.\n     * Frame doubling and tripling require b_vfr_input set to zero (see H.264 Table D-1)\n     *\n     * Pulldown changes are not clearly defined in H.264. Therefore, it is the calling app's responsibility to manage this.\n     */\n\n    int b_pic_struct;\n\n    /* Fake Interlaced.\n     *\n     * Used only when b_interlaced=0. Setting this flag makes it possible to flag the stream as PAFF interlaced yet\n     * encode all frames progessively. It is useful for encoding 25p and 30p Blu-Ray streams.\n     */\n\n    int b_fake_interlaced;\n\n    /* Don't optimize header parameters based on video content, e.g. ensure that splitting an input video, compressing\n     * each part, and stitching them back together will result in identical SPS/PPS. This is necessary for stitching\n     * with container formats that don't allow multiple SPS/PPS. */\n    int b_stitchable;\n\n    int b_opencl;            /* use OpenCL when available */\n    int i_opencl_device;     /* specify count of GPU devices to skip, for CLI users */\n    void *opencl_device_id;  /* pass explicit cl_device_id as void*, for API users */\n    char *psz_clbin_file;    /* filename (in UTF-8) of the compiled OpenCL kernel cache file */\n\n    /* Slicing parameters */\n    int i_slice_max_size;    /* Max size per slice in bytes; includes estimated NAL overhead. */\n    int i_slice_max_mbs;     /* Max number of MBs per slice; overrides i_slice_count. */\n    int i_slice_min_mbs;     /* Min number of MBs per slice */\n    int i_slice_count;       /* Number of slices per frame: forces rectangular slices. */\n    int i_slice_count_max;   /* Absolute cap on slices per frame; stops applying slice-max-size\n                              * and slice-max-mbs if this is reached. */\n\n    /* Optional callback for freeing this x264_param_t when it is done being used.\n     * Only used when the x264_param_t sits in memory for an indefinite period of time,\n     * i.e. when an x264_param_t is passed to x264_t in an x264_picture_t or in zones.\n     * Not used when x264_encoder_reconfig is called directly. */\n    void (*param_free)( void* );\n\n    /* Optional low-level callback for low-latency encoding.  Called for each output NAL unit\n     * immediately after the NAL unit is finished encoding.  This allows the calling application\n     * to begin processing video data (e.g. by sending packets over a network) before the frame\n     * is done encoding.\n     *\n     * This callback MUST do the following in order to work correctly:\n     * 1) Have available an output buffer of at least size nal->i_payload*3/2 + 5 + 64.\n     * 2) Call x264_nal_encode( h, dst, nal ), where dst is the output buffer.\n     * After these steps, the content of nal is valid and can be used in the same way as if\n     * the NAL unit were output by x264_encoder_encode.\n     *\n     * This does not need to be synchronous with the encoding process: the data pointed to\n     * by nal (both before and after x264_nal_encode) will remain valid until the next\n     * x264_encoder_encode call.  The callback must be re-entrant.\n     *\n     * This callback does not work with frame-based threads; threads must be disabled\n     * or sliced-threads enabled.  This callback also does not work as one would expect\n     * with HRD -- since the buffering period SEI cannot be calculated until the frame\n     * is finished encoding, it will not be sent via this callback.\n     *\n     * Note also that the NALs are not necessarily returned in order when sliced threads is\n     * enabled.  Accordingly, the variable i_first_mb and i_last_mb are available in\n     * x264_nal_t to help the calling application reorder the slices if necessary.\n     *\n     * When this callback is enabled, x264_encoder_encode does not return valid NALs;\n     * the calling application is expected to acquire all output NALs through the callback.\n     *\n     * It is generally sensible to combine this callback with a use of slice-max-mbs or\n     * slice-max-size.\n     *\n     * The opaque pointer is the opaque pointer from the input frame associated with this\n     * NAL unit. This helps distinguish between nalu_process calls from different sources,\n     * e.g. if doing multiple encodes in one process.\n     */\n    void (*nalu_process)( x264_t *h, x264_nal_t *nal, void *opaque );\n} x264_param_t;\n\nvoid x264_nal_encode( x264_t *h, uint8_t *dst, x264_nal_t *nal );\n\n/****************************************************************************\n * H.264 level restriction information\n ****************************************************************************/\n\ntypedef struct x264_level_t\n{\n    int level_idc;\n    int mbps;        /* max macroblock processing rate (macroblocks/sec) */\n    int frame_size;  /* max frame size (macroblocks) */\n    int dpb;         /* max decoded picture buffer (mbs) */\n    int bitrate;     /* max bitrate (kbit/sec) */\n    int cpb;         /* max vbv buffer (kbit) */\n    int mv_range;    /* max vertical mv component range (pixels) */\n    int mvs_per_2mb; /* max mvs per 2 consecutive mbs. */\n    int slice_rate;  /* ?? */\n    int mincr;       /* min compression ratio */\n    int bipred8x8;   /* limit bipred to >=8x8 */\n    int direct8x8;   /* limit b_direct to >=8x8 */\n    int frame_only;  /* forbid interlacing */\n} x264_level_t;\n\n/* all of the levels defined in the standard, terminated by .level_idc=0 */\nX264_API extern const x264_level_t x264_levels[];\n\n/****************************************************************************\n * Basic parameter handling functions\n ****************************************************************************/\n\n/* x264_param_default:\n *      fill x264_param_t with default values and do CPU detection */\nvoid    x264_param_default( x264_param_t * );\n\n/* x264_param_parse:\n *  set one parameter by name.\n *  returns 0 on success, or returns one of the following errors.\n *  note: BAD_VALUE occurs only if it can't even parse the value,\n *  numerical range is not checked until x264_encoder_open() or\n *  x264_encoder_reconfig().\n *  value=NULL means \"true\" for boolean options, but is a BAD_VALUE for non-booleans. */\n#define X264_PARAM_BAD_NAME  (-1)\n#define X264_PARAM_BAD_VALUE (-2)\nint x264_param_parse( x264_param_t *, const char *name, const char *value );\n\n/****************************************************************************\n * Advanced parameter handling functions\n ****************************************************************************/\n\n/* These functions expose the full power of x264's preset-tune-profile system for\n * easy adjustment of large numbers of internal parameters.\n *\n * In order to replicate x264CLI's option handling, these functions MUST be called\n * in the following order:\n * 1) x264_param_default_preset\n * 2) Custom user options (via param_parse or directly assigned variables)\n * 3) x264_param_apply_fastfirstpass\n * 4) x264_param_apply_profile\n *\n * Additionally, x264CLI does not apply step 3 if the preset chosen is \"placebo\"\n * or --slow-firstpass is set. */\n\n/* x264_param_default_preset:\n *      The same as x264_param_default, but also use the passed preset and tune\n *      to modify the default settings.\n *      (either can be NULL, which implies no preset or no tune, respectively)\n *\n *      Currently available presets are, ordered from fastest to slowest: */\nstatic const char * const x264_preset_names[] = { \"ultrafast\", \"superfast\", \"veryfast\", \"faster\", \"fast\", \"medium\", \"slow\", \"slower\", \"veryslow\", \"placebo\", 0 };\n\n/*      The presets can also be indexed numerically, as in:\n *      x264_param_default_preset( &param, \"3\", ... )\n *      with ultrafast mapping to \"0\" and placebo mapping to \"9\".  This mapping may\n *      of course change if new presets are added in between, but will always be\n *      ordered from fastest to slowest.\n *\n *      Warning: the speed of these presets scales dramatically.  Ultrafast is a full\n *      100 times faster than placebo!\n *\n *      Currently available tunings are: */\nstatic const char * const x264_tune_names[] = { \"film\", \"animation\", \"grain\", \"stillimage\", \"psnr\", \"ssim\", \"fastdecode\", \"zerolatency\", 0 };\n\n/*      Multiple tunings can be used if separated by a delimiter in \",./-+\",\n *      however multiple psy tunings cannot be used.\n *      film, animation, grain, stillimage, psnr, and ssim are psy tunings.\n *\n *      returns 0 on success, negative on failure (e.g. invalid preset/tune name). */\nint     x264_param_default_preset( x264_param_t *, const char *preset, const char *tune );\n\n/* x264_param_apply_fastfirstpass:\n *      If first-pass mode is set (rc.b_stat_read == 0, rc.b_stat_write == 1),\n *      modify the encoder settings to disable options generally not useful on\n *      the first pass. */\nvoid    x264_param_apply_fastfirstpass( x264_param_t * );\n\n/* x264_param_apply_profile:\n *      Applies the restrictions of the given profile.\n *      Currently available profiles are, from most to least restrictive: */\nstatic const char * const x264_profile_names[] = { \"baseline\", \"main\", \"high\", \"high10\", \"high422\", \"high444\", 0 };\n\n/*      (can be NULL, in which case the function will do nothing)\n *\n *      Does NOT guarantee that the given profile will be used: if the restrictions\n *      of \"High\" are applied to settings that are already Baseline-compatible, the\n *      stream will remain baseline.  In short, it does not increase settings, only\n *      decrease them.\n *\n *      returns 0 on success, negative on failure (e.g. invalid profile name). */\nint     x264_param_apply_profile( x264_param_t *, const char *profile );\n\n/****************************************************************************\n * Picture structures and functions\n ****************************************************************************/\n\n/* x264_bit_depth:\n *      Specifies the number of bits per pixel that x264 uses. This is also the\n *      bit depth that x264 encodes in. If this value is > 8, x264 will read\n *      two bytes of input data for each pixel sample, and expect the upper\n *      (16-x264_bit_depth) bits to be zero.\n *      Note: The flag X264_CSP_HIGH_DEPTH must be used to specify the\n *      colorspace depth as well. */\nX264_API extern const int x264_bit_depth;\n\n/* x264_chroma_format:\n *      Specifies the chroma formats that x264 supports encoding. When this\n *      value is non-zero, then it represents a X264_CSP_* that is the only\n *      chroma format that x264 supports encoding. If the value is 0 then\n *      there are no restrictions. */\nX264_API extern const int x264_chroma_format;\n\nenum pic_struct_e\n{\n    PIC_STRUCT_AUTO              = 0, // automatically decide (default)\n    PIC_STRUCT_PROGRESSIVE       = 1, // progressive frame\n    // \"TOP\" and \"BOTTOM\" are not supported in x264 (PAFF only)\n    PIC_STRUCT_TOP_BOTTOM        = 4, // top field followed by bottom\n    PIC_STRUCT_BOTTOM_TOP        = 5, // bottom field followed by top\n    PIC_STRUCT_TOP_BOTTOM_TOP    = 6, // top field, bottom field, top field repeated\n    PIC_STRUCT_BOTTOM_TOP_BOTTOM = 7, // bottom field, top field, bottom field repeated\n    PIC_STRUCT_DOUBLE            = 8, // double frame\n    PIC_STRUCT_TRIPLE            = 9, // triple frame\n};\n\ntypedef struct x264_hrd_t\n{\n    double cpb_initial_arrival_time;\n    double cpb_final_arrival_time;\n    double cpb_removal_time;\n\n    double dpb_output_time;\n} x264_hrd_t;\n\n/* Arbitrary user SEI:\n * Payload size is in bytes and the payload pointer must be valid.\n * Payload types and syntax can be found in Annex D of the H.264 Specification.\n * SEI payload alignment bits as described in Annex D must be included at the\n * end of the payload if needed.\n * The payload should not be NAL-encapsulated.\n * Payloads are written first in order of input, apart from in the case when HRD\n * is enabled where payloads are written after the Buffering Period SEI. */\n\ntypedef struct x264_sei_payload_t\n{\n    int payload_size;\n    int payload_type;\n    uint8_t *payload;\n} x264_sei_payload_t;\n\ntypedef struct x264_sei_t\n{\n    int num_payloads;\n    x264_sei_payload_t *payloads;\n    /* In: optional callback to free each payload AND x264_sei_payload_t when used. */\n    void (*sei_free)( void* );\n} x264_sei_t;\n\ntypedef struct x264_image_t\n{\n    int     i_csp;       /* Colorspace */\n    int     i_plane;     /* Number of image planes */\n    int     i_stride[4]; /* Strides for each plane */\n    uint8_t *plane[4];   /* Pointers to each plane */\n} x264_image_t;\n\ntypedef struct x264_image_properties_t\n{\n    /* All arrays of data here are ordered as follows:\n     * each array contains one offset per macroblock, in raster scan order.  In interlaced\n     * mode, top-field MBs and bottom-field MBs are interleaved at the row level.\n     * Macroblocks are 16x16 blocks of pixels (with respect to the luma plane).  For the\n     * purposes of calculating the number of macroblocks, width and height are rounded up to\n     * the nearest 16.  If in interlaced mode, height is rounded up to the nearest 32 instead. */\n\n    /* In: an array of quantizer offsets to be applied to this image during encoding.\n     *     These are added on top of the decisions made by x264.\n     *     Offsets can be fractional; they are added before QPs are rounded to integer.\n     *     Adaptive quantization must be enabled to use this feature.  Behavior if quant\n     *     offsets differ between encoding passes is undefined. */\n    float *quant_offsets;\n    /* In: optional callback to free quant_offsets when used.\n     *     Useful if one wants to use a different quant_offset array for each frame. */\n    void (*quant_offsets_free)( void* );\n\n    /* In: optional array of flags for each macroblock.\n     *     Allows specifying additional information for the encoder such as which macroblocks\n     *     remain unchanged.  Usable flags are listed below.\n     *     x264_param_t.analyse.b_mb_info must be set to use this, since x264 needs to track\n     *     extra data internally to make full use of this information.\n     *\n     * Out: if b_mb_info_update is set, x264 will update this array as a result of encoding.\n     *\n     *      For \"MBINFO_CONSTANT\", it will remove this flag on any macroblock whose decoded\n     *      pixels have changed.  This can be useful for e.g. noting which areas of the\n     *      frame need to actually be blitted. Note: this intentionally ignores the effects\n     *      of deblocking for the current frame, which should be fine unless one needs exact\n     *      pixel-perfect accuracy.\n     *\n     *      Results for MBINFO_CONSTANT are currently only set for P-frames, and are not\n     *      guaranteed to enumerate all blocks which haven't changed.  (There may be false\n     *      negatives, but no false positives.)\n     */\n    uint8_t *mb_info;\n    /* In: optional callback to free mb_info when used. */\n    void (*mb_info_free)( void* );\n\n    /* The macroblock is constant and remains unchanged from the previous frame. */\n    #define X264_MBINFO_CONSTANT   (1<<0)\n    /* More flags may be added in the future. */\n\n    /* Out: SSIM of the the frame luma (if x264_param_t.b_ssim is set) */\n    double f_ssim;\n    /* Out: Average PSNR of the frame (if x264_param_t.b_psnr is set) */\n    double f_psnr_avg;\n    /* Out: PSNR of Y, U, and V (if x264_param_t.b_psnr is set) */\n    double f_psnr[3];\n\n    /* Out: Average effective CRF of the encoded frame */\n    double f_crf_avg;\n} x264_image_properties_t;\n\ntypedef struct x264_picture_t\n{\n    /* In: force picture type (if not auto)\n     *     If x264 encoding parameters are violated in the forcing of picture types,\n     *     x264 will correct the input picture type and log a warning.\n     * Out: type of the picture encoded */\n    int     i_type;\n    /* In: force quantizer for != X264_QP_AUTO */\n    int     i_qpplus1;\n    /* In: pic_struct, for pulldown/doubling/etc...used only if b_pic_struct=1.\n     *     use pic_struct_e for pic_struct inputs\n     * Out: pic_struct element associated with frame */\n    int     i_pic_struct;\n    /* Out: whether this frame is a keyframe.  Important when using modes that result in\n     * SEI recovery points being used instead of IDR frames. */\n    int     b_keyframe;\n    /* In: user pts, Out: pts of encoded picture (user)*/\n    int64_t i_pts;\n    /* Out: frame dts. When the pts of the first frame is close to zero,\n     *      initial frames may have a negative dts which must be dealt with by any muxer */\n    int64_t i_dts;\n    /* In: custom encoding parameters to be set from this frame forwards\n           (in coded order, not display order). If NULL, continue using\n           parameters from the previous frame.  Some parameters, such as\n           aspect ratio, can only be changed per-GOP due to the limitations\n           of H.264 itself; in this case, the caller must force an IDR frame\n           if it needs the changed parameter to apply immediately. */\n    x264_param_t *param;\n    /* In: raw image data */\n    /* Out: reconstructed image data.  x264 may skip part of the reconstruction process,\n            e.g. deblocking, in frames where it isn't necessary.  To force complete\n            reconstruction, at a small speed cost, set b_full_recon. */\n    x264_image_t img;\n    /* In: optional information to modify encoder decisions for this frame\n     * Out: information about the encoded frame */\n    x264_image_properties_t prop;\n    /* Out: HRD timing information. Output only when i_nal_hrd is set. */\n    x264_hrd_t hrd_timing;\n    /* In: arbitrary user SEI (e.g subtitles, AFDs) */\n    x264_sei_t extra_sei;\n    /* private user data. copied from input to output frames. */\n    void *opaque;\n} x264_picture_t;\n\n/* x264_picture_init:\n *  initialize an x264_picture_t.  Needs to be done if the calling application\n *  allocates its own x264_picture_t as opposed to using x264_picture_alloc. */\nvoid x264_picture_init( x264_picture_t *pic );\n\n/* x264_picture_alloc:\n *  alloc data for a picture. You must call x264_picture_clean on it.\n *  returns 0 on success, or -1 on malloc failure or invalid colorspace. */\nint x264_picture_alloc( x264_picture_t *pic, int i_csp, int i_width, int i_height );\n\n/* x264_picture_clean:\n *  free associated resource for a x264_picture_t allocated with\n *  x264_picture_alloc ONLY */\nvoid x264_picture_clean( x264_picture_t *pic );\n\n/****************************************************************************\n * Encoder functions\n ****************************************************************************/\n\n/* Force a link error in the case of linking against an incompatible API version.\n * Glue #defines exist to force correct macro expansion; the final output of the macro\n * is x264_encoder_open_##X264_BUILD (for purposes of dlopen). */\n#define x264_encoder_glue1(x,y) x##y\n#define x264_encoder_glue2(x,y) x264_encoder_glue1(x,y)\n#define x264_encoder_open x264_encoder_glue2(x264_encoder_open_,X264_BUILD)\n\n/* x264_encoder_open:\n *      create a new encoder handler, all parameters from x264_param_t are copied */\nx264_t *x264_encoder_open( x264_param_t * );\n\n/* x264_encoder_reconfig:\n *      various parameters from x264_param_t are copied.\n *      this takes effect immediately, on whichever frame is encoded next;\n *      due to delay, this may not be the next frame passed to encoder_encode.\n *      if the change should apply to some particular frame, use x264_picture_t->param instead.\n *      returns 0 on success, negative on parameter validation error.\n *      not all parameters can be changed; see the actual function for a detailed breakdown.\n *\n *      since not all parameters can be changed, moving from preset to preset may not always\n *      fully copy all relevant parameters, but should still work usably in practice. however,\n *      more so than for other presets, many of the speed shortcuts used in ultrafast cannot be\n *      switched out of; using reconfig to switch between ultrafast and other presets is not\n *      recommended without a more fine-grained breakdown of parameters to take this into account. */\nint     x264_encoder_reconfig( x264_t *, x264_param_t * );\n/* x264_encoder_parameters:\n *      copies the current internal set of parameters to the pointer provided\n *      by the caller.  useful when the calling application needs to know\n *      how x264_encoder_open has changed the parameters, or the current state\n *      of the encoder after multiple x264_encoder_reconfig calls.\n *      note that the data accessible through pointers in the returned param struct\n *      (e.g. filenames) should not be modified by the calling application. */\nvoid    x264_encoder_parameters( x264_t *, x264_param_t * );\n/* x264_encoder_headers:\n *      return the SPS and PPS that will be used for the whole stream.\n *      *pi_nal is the number of NAL units outputted in pp_nal.\n *      returns the number of bytes in the returned NALs.\n *      returns negative on error.\n *      the payloads of all output NALs are guaranteed to be sequential in memory. */\nint     x264_encoder_headers( x264_t *, x264_nal_t **pp_nal, int *pi_nal );\n/* x264_encoder_encode:\n *      encode one picture.\n *      *pi_nal is the number of NAL units outputted in pp_nal.\n *      returns the number of bytes in the returned NALs.\n *      returns negative on error and zero if no NAL units returned.\n *      the payloads of all output NALs are guaranteed to be sequential in memory. */\nint     x264_encoder_encode( x264_t *, x264_nal_t **pp_nal, int *pi_nal, x264_picture_t *pic_in, x264_picture_t *pic_out );\n/* x264_encoder_close:\n *      close an encoder handler */\nvoid    x264_encoder_close( x264_t * );\n/* x264_encoder_delayed_frames:\n *      return the number of currently delayed (buffered) frames\n *      this should be used at the end of the stream, to know when you have all the encoded frames. */\nint     x264_encoder_delayed_frames( x264_t * );\n/* x264_encoder_maximum_delayed_frames( x264_t *h ):\n *      return the maximum number of delayed (buffered) frames that can occur with the current\n *      parameters. */\nint     x264_encoder_maximum_delayed_frames( x264_t *h );\n/* x264_encoder_intra_refresh:\n *      If an intra refresh is not in progress, begin one with the next P-frame.\n *      If an intra refresh is in progress, begin one as soon as the current one finishes.\n *      Requires that b_intra_refresh be set.\n *\n *      Useful for interactive streaming where the client can tell the server that packet loss has\n *      occurred.  In this case, keyint can be set to an extremely high value so that intra refreshes\n *      only occur when calling x264_encoder_intra_refresh.\n *\n *      In multi-pass encoding, if x264_encoder_intra_refresh is called differently in each pass,\n *      behavior is undefined.\n *\n *      Should not be called during an x264_encoder_encode. */\nvoid    x264_encoder_intra_refresh( x264_t * );\n/* x264_encoder_invalidate_reference:\n *      An interactive error resilience tool, designed for use in a low-latency one-encoder-few-clients\n *      system.  When the client has packet loss or otherwise incorrectly decodes a frame, the encoder\n *      can be told with this command to \"forget\" the frame and all frames that depend on it, referencing\n *      only frames that occurred before the loss.  This will force a keyframe if no frames are left to\n *      reference after the aforementioned \"forgetting\".\n *\n *      It is strongly recommended to use a large i_dpb_size in this case, which allows the encoder to\n *      keep around extra, older frames to fall back on in case more recent frames are all invalidated.\n *      Unlike increasing i_frame_reference, this does not increase the number of frames used for motion\n *      estimation and thus has no speed impact.  It is also recommended to set a very large keyframe\n *      interval, so that keyframes are not used except as necessary for error recovery.\n *\n *      x264_encoder_invalidate_reference is not currently compatible with the use of B-frames or intra\n *      refresh.\n *\n *      In multi-pass encoding, if x264_encoder_invalidate_reference is called differently in each pass,\n *      behavior is undefined.\n *\n *      Should not be called during an x264_encoder_encode, but multiple calls can be made simultaneously.\n *\n *      Returns 0 on success, negative on failure. */\nint x264_encoder_invalidate_reference( x264_t *, int64_t pts );\n\n#ifdef __cplusplus\n}\n#endif\n\n#endif\n"
  },
  {
    "path": "Android_demo/prebuilt/include/x264_config.h",
    "content": "#define X264_BIT_DEPTH     8\n#define X264_GPL           1\n#define X264_INTERLACED    1\n#define X264_CHROMA_FORMAT 0\n#define X264_REV 2744\n#define X264_REV_DIFF 4\n#define X264_VERSION \" r2744+4M e192189\"\n#define X264_POINTVER \"0.148.2744+4M e192189\"\n"
  },
  {
    "path": "README.md",
    "content": "# *KSC265codec(v2.6.1.3)*\n\nIt should be noted that, the libraries used in our ios/andriod demos have expiration time.\n\n## ENCODER\nUsage: command line examples    \n```\nAppEncoder_x64 -i p_3840x2160_50.yuv -preset ultrafast/veryfast/slow/veryslow -latency offline -wdt 3840 -hgt 2160 -fr 50 -rc 1 -br 20000 -iper 128 -b test.265\nAppEncoder_x64 -i p_3840x2160_50.yuv -preset ultrafast/veryfast/slow/veryslow -latency offline -wdt 3840 -hgt 2160 -fr 50 -rc 0 -qp 27 -iper 128 -b test.265\nAppEncoder_x64 -i p_3840x2160_50.yuv -preset ultrafast/veryfast/slow/veryslow -latency offline -wdt 3840 -hgt 2160 -fr 50 -rc 3 -crf 24 -iper 128 -b test.265\n```\n\n\n### Basic parameters:\n\n-preset [preset_value], \n\nwhich specifies the encoding speed by the character string [preset_value], among strings of \"superfast\", \"veryfast\", \"fast\", \"medium\", \"slow\", \"veryslow\" and \"placebo\".\n\n-latency [latency_value],\n\nwhich specifies the encoding latency by the character string [lactency_value], among strings of \"zerolatency\", \"livestreaming\", \"offline\". Note that, in the presets of ultrafast~veryfast, the latency under \"livestreaming\" and \"offline\" are the same.\n\n-i [input_filename], \n\nwhich specifies the address of the input YUV file in 4:2:0 sampling format by a character string [input_filename].\n\n-wdt [width], \n\nwhich specifies the image width of the input video by a positive integer value [width]. \n\n-hgt [height], \n\nwhich specifies the image height of the input video by a positive integer value [height].\n\n-fr [framerate], \n\nwhich specifies the frame rate of the input video by a positive integer value [framerate].\n\n-iper [intraperiod], \n\nwhich specifies the maximum distances between consecutive I pictures by a positive integer value [intraperiod].\n\n-rc [rctype], \n\nwhich specifies the rate control type by the positive integer value [rctype] valuing among values 0(fixed qp), 1(cbr), 2(abr) and 3(crf). There are four cases:\n* -br [bitrate] should be followed. If [rctype] equals to 1 or 2, a parameter -br [bitrate] should be followed and specifies the target encoding bit-rate by the positive value [bitrate] (kbps,kilo bit rate per second). \n* -qp [qp_value] should be followed. If [rctype] equals to 0, a parameter -qp [qp_value] should be followed and specifies the target encoding quantization parameter by the positive value [qp_value] ranging from 0 to 51. \n* -crf [crf_value] should be followed. If [rctype] equals to 3, a parameter -crf [crf_value] should be followed and specifies the target crf parameter by the positive value [crf_value] ranging from 0 to 51. \n\n-b [stream_filename], \n\nwhich specifies the address of the output stream file in HEVC/H.265 format by a character string [stream_filename]. Default: no stream is output.\n\n### Optional parameters:\n\n-v or -V [version],\n\nwhich is utilized to print the version and copyright of the encoder.\n\n-psnr [psnrcalc],\n\nwhich specifies psnr calculation method by a non-negative value [psnrcalc], and\n* 0 (as a default value) means disabling psnr calculation,\n* 1 means enabling psnr calculation and outputing the overall psnr result. \n* 2 means enabling psnr calculation and outputing psnr info for each frame.\n\n-o [reconstructYUV], \n\nwhich specifies the address of the reconstrcuted yuv file in 4:2:0 format by a character string [reconstructYUV]. Default: no reconstructed YUV file is output.\n\n-frms [frame_no], \n\nwhich specifies the number of frames to be encoded for the input video by a positive integer value [frame_no]. Default: [frame_no] = -1, when all input frames are encoded.\n\n-threads [thread_no], \n\nwhich specifies the number of threads used to encode the input video by a non-negative value [thread_no]. Default: [thread_no] = 0, when all available threads can be utilized.\n\n-bframes[value1], -vbv-maxrate [value2] , -vbv-bufsize[value3],\n\nwhich specifies similar meanings as similar values defined in x264\n\n\n\n## DECODER\nUsage: command line examples    \n```\nAppDecoder_x64.exe -b test.265 -o test.yuv -threads 2\n```\n\n### Basic parameters:\n\n-v or -V [version]\n\nwhich specifies the decoder version and copyright.\n\n-b [bitstream],\n\nwhich specifies input bit-stream file by a character string [bitstream].\n\n### Optional parameters:\n\n-o [output],\n\nwhich specifies the decoded yuv file name by a character string [output].\n\n-threads [threadnum],\n\nwhich specifies the number of threads used for decoding process by a non-negative value [threadnum]. Default: [threadnum] = 0, when all available threads can be utilized.\n\n## Performance of decoder\n\nKSC265 decoder is compared with openHEVC in ffmpeg on ARM64@Andriod, ARM64@iOS and x86 platforms.\n\n| decoding  speed <br> ( ksc265inFFmpeg / openHEVCInFFmpeg) | iOS<br>(ipad mini2) | Andriod<br>(VIVOxplay5a) | PC<br>(E5-2690 v3  @ 2.60GHz) |\n| ---------------------------------------- | ------------------- | ------------------------ | ----------------------------- |\n| 1 thread                                 | 2.90                | 2.85                     | 2.11                          |\n| full threads                             | 2.69                | 2.99                     | 3.89                          |\n\nOn average, as above table shows, KSC265 decoder can achieve more than 2/2.5 times the speed of openHEVC in ffmpeg on x86/ARM, and details can be found in the excels for decoding performance. Moreover, as following table shows, the decoding speed of KSC265 now can well support the 1080p@25fps applications.\n\n| decoding  speed of ksc265inFFmpeg <br> (in frames per second) | iOS<br>(ipad mini2) | Andriod<br>(VIVOxplay5a) | PC<br>(E5-2690 v3  @ 2.60GHz) |\n| ---------------------------------------- | ------------------- | ------------------------ | ----------------------------- |\n| 1920x1080 @  1thread                     | 32.06               | 32.94                    | 177.19                        |\n| 1280x720 @  1thread                      | 77.88               | 89.60                    | 346.24                        |\n| 1920x1080 @  full threads                | 51.13               | 90.44                    | 939.25                        |\n| 1280x720 @  full threads                 | 120.20              | 187.16                   | 1976.24                       |\n\n\n\n## Performance of encoder\n\nKSC265 encoder is firstly compared with X265-v2.4,  X264 and vp9 on Win7@i7-4790@4threads using following parameters:\n\n```\nx264.exe -o out.264 BQSquare_416x240_60.yuv --input-res 416x240 --preset [superfast|veryfast|slow|placebo] --fps [framerate] --profile high --aq-mode 0 --no-psy --psnr  --bitrate [btrNumber] --threads 1/0 --keyint [framerate * 10] --frames 1000000\nAppEncoder_x64.exe -b out.265 -i BQSquare_416x240_60.yuv -preset [veryfast|slow|veryslow] -threads 1/0 -psnr 2 -rc 1 -br [btrNumber] -frms 1000000 -iper [framerate * 10]\nx265.exe -o out.265 --input BQSquare_416x240_60.yuv --input-res 416x240 --preset [ultrafast|ultrafast|slow|veryslow] --fps [framerate] --aq-mode 0 --no-psy-rd --no-psy-rdoq  --psnr  --bitrate [btrNumber] --frame-threads [1|0] --no-wpp/--wpp --keyint [framerate * 10] --frames 1000000\nvpxenc.exe --codec=vp9 --passes=1 --[rt|goog|best] --fps=[framerate]/1 --i420 --end-usage=vbr --target-bitrate=[btrNumber] --kf-max-dist=[framerate * 10] --cpu-used=8 --threads=[1|4] --psnr -w 416 -h 240 -o out.vp9 BQSquare_416x240_60.yuv --frame-parallel=0\n```\n\nThen on test sequences of JCTVC CLASS-A ~ CLASS-E, and one class of game videos@30fps, compared to x264(20161020), x265-v2.4 and vp9 in the speed form of encoded frames per second (fps), the average performance of KS265 can be summarized by the follows. \n\n|                          | KSC265  vs. X264        | KSC265 vs. X264        | KSC265 vs. X265         | KSC265 vs. X265        | KSC265 vs. vp9          | KSC265 vs. vp9        |\n| ------------------------ | ----------------------- | ---------------------- | ----------------------- | ---------------------- | ----------------------- | --------------------- |\n| full-thread  comparisons | Bitsaving @same quality | Speedup @same  quality | Bitsaving @same quality | Speedup @same  bitrate | Bitsaving @same quality | Speedup @same bitrate |\n| RealTime+                | 43.7%                   | -5.4%                  | 26.0%                   | 212.6%                 | 30.5%                   | 147.9%                |\n| RealTime                 | 41.2%                   | 8.9%                   | 34.5%                   | 123.2%                 | 38.0%                   | 75.5%                 |\n| Transcode                | 36.2%                   | -5.1%                  | 23.7%                   | 199.1%                 | 32.6%                   | 179.0%                |\n| Best Ratio               | 35.3%                   | 83.3%                  | 10.9%                   | 84.1%                  | 23.1%                   | 778.0%                |\n\nSecondly, then on test sequences of JCTVC CLASS-A ~ CLASS-E, and one class of showself videos@15fps, compared to x264(20161020), x265-v2.4 and vp9 in the speed form of encoded frames per second (fps), the average performance of KS265 on OPPOR9s@1thread and ipad min2 @1thread can be summarized by the follows. \n\n|                          | Andriod&iOS             | Andriod                | iOS                    |\n| ------------------------ | ----------------------- | ---------------------- | ---------------------- |\n| full-thread  comparisons | Bitsaving @same quality | Speedup @same  quality | Speedup @same  quality |\n| superfast                | 43.9%                   | -4.1%                  | 6.3%                   |\n| veryfast                 | 43.4%                   | -2.2%                  | 1.2%                   |\n| fast                     | 38.0%                   | 7.3%                   | 4.7%                   |\n| medium                   | 36.2%                   | 4.8%                   | 7.1%                   |\n\nThe details are described in the excel document."
  },
  {
    "path": "centos_x64/READme.txt",
    "content": "CentOS release 6.5\ngcc 4.8.2-15"
  },
  {
    "path": "iOS_demo/KSY265CodecDemo_iOS/AppDelegate.h",
    "content": "//\n//  AppDelegate.h\n//  KSY265CodecDemo_iOS\n//\n//  Created by 江东 on 17/3/17.\n//  Copyright © 2017年 江东. All rights reserved.\n//\n\n#import <UIKit/UIKit.h>\n\n@interface AppDelegate : UIResponder <UIApplicationDelegate>\n\n@property (strong, nonatomic) UIWindow *window;\n\n\n@end\n\n"
  },
  {
    "path": "iOS_demo/KSY265CodecDemo_iOS/AppDelegate.m",
    "content": "//\n//  AppDelegate.m\n//  KSY265CodecDemo_iOS\n//\n//  Created by 江东 on 17/3/17.\n//  Copyright © 2017年 江东. All rights reserved.\n//\n\n#import \"AppDelegate.h\"\n\n@interface AppDelegate ()\n\n@end\n\n@implementation AppDelegate\n\n\n- (BOOL)application:(UIApplication *)application didFinishLaunchingWithOptions:(NSDictionary *)launchOptions {\n    // Override point for customization after application launch.\n    return YES;\n}\n\n\n- (void)applicationWillResignActive:(UIApplication *)application {\n    // Sent when the application is about to move from active to inactive state. This can occur for certain types of temporary interruptions (such as an incoming phone call or SMS message) or when the user quits the application and it begins the transition to the background state.\n    // Use this method to pause ongoing tasks, disable timers, and invalidate graphics rendering callbacks. Games should use this method to pause the game.\n}\n\n\n- (void)applicationDidEnterBackground:(UIApplication *)application {\n    // Use this method to release shared resources, save user data, invalidate timers, and store enough application state information to restore your application to its current state in case it is terminated later.\n    // If your application supports background execution, this method is called instead of applicationWillTerminate: when the user quits.\n}\n\n\n- (void)applicationWillEnterForeground:(UIApplication *)application {\n    // Called as part of the transition from the background to the active state; here you can undo many of the changes made on entering the background.\n}\n\n\n- (void)applicationDidBecomeActive:(UIApplication *)application {\n    // Restart any tasks that were paused (or not yet started) while the application was inactive. If the application was previously in the background, optionally refresh the user interface.\n}\n\n\n- (void)applicationWillTerminate:(UIApplication *)application {\n    // Called when the application is about to terminate. Save data if appropriate. See also applicationDidEnterBackground:.\n}\n\n\n@end\n"
  },
  {
    "path": "iOS_demo/KSY265CodecDemo_iOS/Assets.xcassets/AppIcon.appiconset/Contents.json",
    "content": "{\n  \"images\" : [\n    {\n      \"idiom\" : \"iphone\",\n      \"size\" : \"20x20\",\n      \"scale\" : \"2x\"\n    },\n    {\n      \"idiom\" : \"iphone\",\n      \"size\" : \"20x20\",\n      \"scale\" : \"3x\"\n    },\n    {\n      \"idiom\" : \"iphone\",\n      \"size\" : \"29x29\",\n      \"scale\" : \"2x\"\n    },\n    {\n      \"idiom\" : \"iphone\",\n      \"size\" : \"29x29\",\n      \"scale\" : \"3x\"\n    },\n    {\n      \"idiom\" : \"iphone\",\n      \"size\" : \"40x40\",\n      \"scale\" : \"2x\"\n    },\n    {\n      \"idiom\" : \"iphone\",\n      \"size\" : \"40x40\",\n      \"scale\" : \"3x\"\n    },\n    {\n      \"idiom\" : \"iphone\",\n      \"size\" : \"60x60\",\n      \"scale\" : \"2x\"\n    },\n    {\n      \"idiom\" : \"iphone\",\n      \"size\" : \"60x60\",\n      \"scale\" : \"3x\"\n    },\n    {\n      \"idiom\" : \"ipad\",\n      \"size\" : \"20x20\",\n      \"scale\" : \"1x\"\n    },\n    {\n      \"idiom\" : \"ipad\",\n      \"size\" : \"20x20\",\n      \"scale\" : \"2x\"\n    },\n    {\n      \"idiom\" : \"ipad\",\n      \"size\" : \"29x29\",\n      \"scale\" : \"1x\"\n    },\n    {\n      \"idiom\" : \"ipad\",\n      \"size\" : \"29x29\",\n      \"scale\" : \"2x\"\n    },\n    {\n      \"idiom\" : \"ipad\",\n      \"size\" : \"40x40\",\n      \"scale\" : \"1x\"\n    },\n    {\n      \"idiom\" : \"ipad\",\n      \"size\" : \"40x40\",\n      \"scale\" : \"2x\"\n    },\n    {\n      \"idiom\" : \"ipad\",\n      \"size\" : \"76x76\",\n      \"scale\" : \"1x\"\n    },\n    {\n      \"idiom\" : \"ipad\",\n      \"size\" : \"76x76\",\n      \"scale\" : \"2x\"\n    },\n    {\n      \"idiom\" : \"ipad\",\n      \"size\" : \"83.5x83.5\",\n      \"scale\" : \"2x\"\n    }\n  ],\n  \"info\" : {\n    \"version\" : 1,\n    \"author\" : \"xcode\"\n  }\n}"
  },
  {
    "path": "iOS_demo/KSY265CodecDemo_iOS/Assets.xcassets/Contents.json",
    "content": "{\n  \"info\" : {\n    \"version\" : 1,\n    \"author\" : \"xcode\"\n  }\n}"
  },
  {
    "path": "iOS_demo/KSY265CodecDemo_iOS/Assets.xcassets/first.imageset/Contents.json",
    "content": "{\n  \"images\" : [\n    {\n      \"idiom\" : \"universal\",\n      \"filename\" : \"first.pdf\"\n    }\n  ],\n  \"info\" : {\n    \"version\" : 1,\n    \"author\" : \"xcode\"\n  }\n}"
  },
  {
    "path": "iOS_demo/KSY265CodecDemo_iOS/Assets.xcassets/recycle-bin.imageset/Contents.json",
    "content": "{\n  \"images\" : [\n    {\n      \"idiom\" : \"universal\",\n      \"filename\" : \"recycle-bin.png\",\n      \"scale\" : \"1x\"\n    },\n    {\n      \"idiom\" : \"universal\",\n      \"scale\" : \"2x\"\n    },\n    {\n      \"idiom\" : \"universal\",\n      \"scale\" : \"3x\"\n    }\n  ],\n  \"info\" : {\n    \"version\" : 1,\n    \"author\" : \"xcode\"\n  }\n}"
  },
  {
    "path": "iOS_demo/KSY265CodecDemo_iOS/Assets.xcassets/second.imageset/Contents.json",
    "content": "{\n  \"images\" : [\n    {\n      \"idiom\" : \"universal\",\n      \"filename\" : \"second.pdf\"\n    }\n  ],\n  \"info\" : {\n    \"version\" : 1,\n    \"author\" : \"xcode\"\n  }\n}"
  },
  {
    "path": "iOS_demo/KSY265CodecDemo_iOS/Base.lproj/LaunchScreen.storyboard",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"no\"?>\n<document type=\"com.apple.InterfaceBuilder3.CocoaTouch.Storyboard.XIB\" version=\"3.0\" toolsVersion=\"11134\" systemVersion=\"15F34\" targetRuntime=\"iOS.CocoaTouch\" propertyAccessControl=\"none\" useAutolayout=\"YES\" launchScreen=\"YES\" useTraitCollections=\"YES\" colorMatched=\"YES\" initialViewController=\"01J-lp-oVM\">\n    <dependencies>\n        <plugIn identifier=\"com.apple.InterfaceBuilder.IBCocoaTouchPlugin\" version=\"11106\"/>\n        <capability name=\"documents saved in the Xcode 8 format\" minToolsVersion=\"8.0\"/>\n    </dependencies>\n    <scenes>\n        <!--View Controller-->\n        <scene sceneID=\"EHf-IW-A2E\">\n            <objects>\n                <viewController id=\"01J-lp-oVM\" sceneMemberID=\"viewController\">\n                    <layoutGuides>\n                        <viewControllerLayoutGuide type=\"top\" id=\"Llm-lL-Icb\"/>\n                        <viewControllerLayoutGuide type=\"bottom\" id=\"xb3-aO-Qok\"/>\n                    </layoutGuides>\n                    <view key=\"view\" contentMode=\"scaleToFill\" id=\"Ze5-6b-2t3\">\n                        <rect key=\"frame\" x=\"0.0\" y=\"0.0\" width=\"375\" height=\"667\"/>\n                        <autoresizingMask key=\"autoresizingMask\" widthSizable=\"YES\" heightSizable=\"YES\"/>\n                        <color key=\"backgroundColor\" red=\"1\" green=\"1\" blue=\"1\" alpha=\"1\" colorSpace=\"custom\" customColorSpace=\"sRGB\"/>\n                    </view>\n                </viewController>\n                <placeholder placeholderIdentifier=\"IBFirstResponder\" id=\"iYj-Kq-Ea1\" userLabel=\"First Responder\" sceneMemberID=\"firstResponder\"/>\n            </objects>\n            <point key=\"canvasLocation\" x=\"53\" y=\"375\"/>\n        </scene>\n    </scenes>\n</document>\n"
  },
  {
    "path": "iOS_demo/KSY265CodecDemo_iOS/Base.lproj/Main.storyboard",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"no\"?>\n<document type=\"com.apple.InterfaceBuilder3.CocoaTouch.Storyboard.XIB\" version=\"3.0\" toolsVersion=\"11762\" systemVersion=\"15G31\" targetRuntime=\"iOS.CocoaTouch\" propertyAccessControl=\"none\" useAutolayout=\"YES\" useTraitCollections=\"YES\" colorMatched=\"YES\" initialViewController=\"49e-Tb-3d3\">\n    <device id=\"retina4_7\" orientation=\"portrait\">\n        <adaptation id=\"fullscreen\"/>\n    </device>\n    <dependencies>\n        <deployment identifier=\"iOS\"/>\n        <plugIn identifier=\"com.apple.InterfaceBuilder.IBCocoaTouchPlugin\" version=\"11757\"/>\n        <capability name=\"documents saved in the Xcode 8 format\" minToolsVersion=\"8.0\"/>\n    </dependencies>\n    <scenes>\n        <!--编码-->\n        <scene sceneID=\"hNz-n2-bh7\">\n            <objects>\n                <viewController id=\"9pv-A4-QxB\" customClass=\"FirstViewController\" sceneMemberID=\"viewController\">\n                    <layoutGuides>\n                        <viewControllerLayoutGuide type=\"top\" id=\"Ia1-K6-d13\"/>\n                        <viewControllerLayoutGuide type=\"bottom\" id=\"4ug-Mw-9AY\"/>\n                    </layoutGuides>\n                    <view key=\"view\" contentMode=\"scaleToFill\" id=\"tsR-hK-woN\">\n                        <rect key=\"frame\" x=\"0.0\" y=\"0.0\" width=\"375\" height=\"667\"/>\n                        <autoresizingMask key=\"autoresizingMask\" widthSizable=\"YES\" heightSizable=\"YES\"/>\n                        <color key=\"backgroundColor\" red=\"1\" green=\"1\" blue=\"1\" alpha=\"1\" colorSpace=\"custom\" customColorSpace=\"sRGB\"/>\n                    </view>\n                    <tabBarItem key=\"tabBarItem\" title=\"编码\" image=\"first\" id=\"acW-dT-cKf\"/>\n                </viewController>\n                <placeholder placeholderIdentifier=\"IBFirstResponder\" id=\"W5J-7L-Pyd\" sceneMemberID=\"firstResponder\"/>\n            </objects>\n            <point key=\"canvasLocation\" x=\"750\" y=\"-320\"/>\n        </scene>\n        <!--解码-->\n        <scene sceneID=\"wg7-f3-ORb\">\n            <objects>\n                <viewController id=\"8rJ-Kc-sve\" customClass=\"SecondViewController\" sceneMemberID=\"viewController\">\n                    <layoutGuides>\n                        <viewControllerLayoutGuide type=\"top\" id=\"L7p-HK-0SC\"/>\n                        <viewControllerLayoutGuide type=\"bottom\" id=\"Djb-ko-YwX\"/>\n                    </layoutGuides>\n                    <view key=\"view\" contentMode=\"scaleToFill\" id=\"QS5-Rx-YEW\">\n                        <rect key=\"frame\" x=\"0.0\" y=\"0.0\" width=\"375\" height=\"667\"/>\n                        <autoresizingMask key=\"autoresizingMask\" widthSizable=\"YES\" heightSizable=\"YES\"/>\n                        <subviews>\n                            <view contentMode=\"scaleToFill\" fixedFrame=\"YES\" translatesAutoresizingMaskIntoConstraints=\"NO\" id=\"abv-n4-IQk\" customClass=\"GLView\">\n                                <rect key=\"frame\" x=\"55\" y=\"194\" width=\"240\" height=\"128\"/>\n                                <autoresizingMask key=\"autoresizingMask\" flexibleMaxX=\"YES\" flexibleMaxY=\"YES\"/>\n                                <color key=\"backgroundColor\" white=\"1\" alpha=\"1\" colorSpace=\"calibratedWhite\"/>\n                            </view>\n                        </subviews>\n                        <color key=\"backgroundColor\" red=\"1\" green=\"1\" blue=\"1\" alpha=\"1\" colorSpace=\"custom\" customColorSpace=\"sRGB\"/>\n                    </view>\n                    <tabBarItem key=\"tabBarItem\" title=\"解码\" image=\"second\" id=\"cPa-gy-q4n\"/>\n                    <connections>\n                        <outlet property=\"playView\" destination=\"abv-n4-IQk\" id=\"7Tb-ix-HfO\"/>\n                        <outlet property=\"playerView\" destination=\"QS5-Rx-YEW\" id=\"MGp-CK-dmy\"/>\n                    </connections>\n                </viewController>\n                <placeholder placeholderIdentifier=\"IBFirstResponder\" id=\"4Nw-L8-lE0\" sceneMemberID=\"firstResponder\"/>\n            </objects>\n            <point key=\"canvasLocation\" x=\"749.60000000000002\" y=\"359.37031484257875\"/>\n        </scene>\n        <!--Tab Bar Controller-->\n        <scene sceneID=\"yl2-sM-qoP\">\n            <objects>\n                <tabBarController id=\"49e-Tb-3d3\" sceneMemberID=\"viewController\">\n                    <nil key=\"simulatedBottomBarMetrics\"/>\n                    <tabBar key=\"tabBar\" contentMode=\"scaleToFill\" id=\"W28-zg-YXA\">\n                        <rect key=\"frame\" x=\"0.0\" y=\"975\" width=\"768\" height=\"49\"/>\n                        <autoresizingMask key=\"autoresizingMask\" widthSizable=\"YES\" flexibleMinY=\"YES\"/>\n                        <color key=\"backgroundColor\" red=\"0.0\" green=\"0.0\" blue=\"0.0\" alpha=\"0.0\" colorSpace=\"custom\" customColorSpace=\"sRGB\"/>\n                    </tabBar>\n                    <connections>\n                        <segue destination=\"9pv-A4-QxB\" kind=\"relationship\" relationship=\"viewControllers\" id=\"u7Y-xg-7CH\"/>\n                        <segue destination=\"8rJ-Kc-sve\" kind=\"relationship\" relationship=\"viewControllers\" id=\"lzU-1b-eKA\"/>\n                    </connections>\n                </tabBarController>\n                <placeholder placeholderIdentifier=\"IBFirstResponder\" id=\"HuB-VB-40B\" sceneMemberID=\"firstResponder\"/>\n            </objects>\n            <point key=\"canvasLocation\" x=\"0.0\" y=\"0.0\"/>\n        </scene>\n    </scenes>\n    <resources>\n        <image name=\"first\" width=\"30\" height=\"30\"/>\n        <image name=\"second\" width=\"30\" height=\"30\"/>\n    </resources>\n</document>\n"
  },
  {
    "path": "iOS_demo/KSY265CodecDemo_iOS/BaseViewController.h",
    "content": "//\n//  BaseViewController.h\n//  KSYVideoClipsDemo\n//\n//  Created by iVermisseDich on 2017/2/15.\n//  Copyright © 2017年 com.ksyun. All rights reserved.\n//\n\n#import <UIKit/UIKit.h>\n\n@interface BaseViewController : UIViewController\n\n- (UISegmentedControl *)segmentedControlWithItems:(NSArray<__kindof NSString *> *) items;\n\n- (UIButton *)buttonWithTitle:(NSString *)title\n                       action:(SEL)action;\n\n- (UIButton *)addButtonWithTitle:(NSString *)title action:(SEL)action;\n\n- (void)addViews:(NSArray<__kindof UIView *> *)btns\n       withFrame:(CGRect)frame;\n\n- (void)addViews2:(NSArray<__kindof UIView *> *)btns\n       withFrame:(CGRect)frame;\n\n- (void)addViews3:(NSArray<__kindof UIView *> *)btns\n        withFrame:(CGRect)frame;\n\n- (void)addViews4:(NSArray<__kindof UIView *> *)btns\n        withFrame:(CGRect)frame;\n\n- (UILabel *)addLable:(NSString*)title;\n\n- (UITextField *)addTextField: (NSString*)text;\n\n- (void) toast:(NSString*)message;\n@end\n"
  },
  {
    "path": "iOS_demo/KSY265CodecDemo_iOS/BaseViewController.m",
    "content": "//\n//  BaseViewController.m\n//  KSYVideoClipsDemo\n//\n//  Created by iVermisseDich on 2017/2/15.\n//  Copyright © 2017年 com.ksyun. All rights reserved.\n//\n\n#import \"BaseViewController.h\"\n\n#define kSpace 20\n\n@interface BaseViewController ()\n\n@end\n\n@implementation BaseViewController\n\n- (void)viewDidLoad {\n    [super viewDidLoad];\n    // Do any additional setup after loading the view.\n}\n\n#pragma mark - tool funcs\n\n- (UIButton *)addButtonWithTitle:(NSString *)title action:(SEL)action{\n    UIButton *button = [UIButton buttonWithType:UIButtonTypeRoundedRect];\n    [button setTitle:title forState: UIControlStateNormal];\n    button.backgroundColor = [UIColor lightGrayColor];\n    [button addTarget:self action:action forControlEvents:UIControlEventTouchUpInside];\n    button.layer.masksToBounds  = YES;\n    button.layer.cornerRadius   = 5;\n    button.layer.borderColor    = [UIColor blackColor].CGColor;\n    button.layer.borderWidth    = 1;\n    [self.view addSubview:button];\n    return button;\n}\n\n// custom segmentedContrl\n- (UISegmentedControl *)segmentedControlWithItems: (NSArray<__kindof NSString *> *) items {\n    UISegmentedControl * segC;\n    segC = [[UISegmentedControl alloc] initWithItems:items];\n    segC.selectedSegmentIndex = 0;\n    segC.layer.cornerRadius = 5;\n    segC.backgroundColor = [UIColor lightGrayColor];\n    [segC addTarget:self\n             action:@selector(didSegCtrlValueChanged:)\n   forControlEvents:UIControlEventValueChanged];\n    return segC;\n}\n\n// custom button\n- (void)addViews:(NSArray<__kindof UIView *> *)btns withFrame:(CGRect)frame{\n    CGFloat width = (frame.size.width - (btns.count + 1) * 5) / btns.count;\n    CGFloat height = frame.size.height;\n    CGFloat xPos = frame.origin.x+5;\n    CGFloat yPos = frame.origin.y;\n    \n    for (UIView *view in btns) {\n        view.frame = CGRectMake(xPos, yPos, width, height);\n        [self.view addSubview:view];\n        xPos += width + 5;\n    }\n}\n\n- (void)addViews2:(NSArray<__kindof UIView *> *)btns withFrame:(CGRect)frame{\n    CGFloat width  = frame.size.width;\n    CGFloat height = frame.size.height;\n    CGFloat xPos = frame.origin.x+5;\n    CGFloat yPos = frame.origin.y;\n    \n    btns[0].frame = CGRectMake(xPos, yPos, width*2/3-10, height);\n    [self.view addSubview:btns[0]];\n    xPos += width*2/3;\n    btns[1].frame = CGRectMake(xPos, yPos, width/3-10, height);\n    [self.view addSubview:btns[1]];\n}\n\n- (void)addViews3:(NSArray<__kindof UIView *> *)btns withFrame:(CGRect)frame{\n    CGFloat width  = frame.size.width;\n    CGFloat height = frame.size.height;\n    CGFloat xPos = frame.origin.x+5;\n    CGFloat yPos = frame.origin.y;\n    \n    btns[0].frame = CGRectMake(xPos, yPos, width*1/3-10, height);\n    [self.view addSubview:btns[0]];\n    xPos += width*1/3;\n    btns[1].frame = CGRectMake(xPos, yPos, width*2/3-10, height);\n    [self.view addSubview:btns[1]];\n}\n\n- (void)addViews4:(NSArray<__kindof UIView *> *)btns withFrame:(CGRect)frame{\n    CGFloat width  = frame.size.width;\n    CGFloat height = frame.size.height;\n    CGFloat xPos = frame.origin.x+5;\n    CGFloat yPos = frame.origin.y;\n    \n    btns[0].frame = CGRectMake(xPos, yPos, width*1/3-10, height);\n    [self.view addSubview:btns[0]];\n    xPos += width*1/3;\n    btns[1].frame = CGRectMake(xPos, yPos, width*1/2-10, height);\n    [self.view addSubview:btns[1]];\n    xPos += width*1/2;\n    btns[2].frame = CGRectMake(xPos, yPos, width*1/6-10, height);\n    [self.view addSubview:btns[2]];\n}\n/*\n- (void)addMidViews:(NSArray<__kindof UIView *> *)btns withFrame:(CGRect)frame{\n    CGFloat width  = frame.size.width;\n    CGFloat height = frame.size.height;\n    CGFloat xPos = 5;\n    CGFloat yPos = frame.origin.y;\n    \n    btns[0].frame = CGRectMake(xPos, yPos, width*2/3-10, height);\n    [self.view addSubview:btns[0]];\n}*/\n\n- (UILabel *)addLable:(NSString*)title{\n    UILabel *  lbl = [[UILabel alloc] init];\n    lbl.text = title;\n    lbl.textAlignment = NSTextAlignmentCenter;\n    [self.view addSubview:lbl];\n    lbl.backgroundColor = [UIColor colorWithWhite:0.8 alpha:0.3];\n    return lbl;\n}\n\n- (UITextField *)addTextField: (NSString*)text{\n    UITextField * textF;\n    textF = [[UITextField alloc] init];\n    textF.text =  text;\n    textF.borderStyle = UITextBorderStyleRoundedRect;\n    [self.view addSubview:textF];\n    return textF;\n}\n\n- (UIButton *)buttonWithTitle:(NSString *)title action:(SEL)action{\n    UIButton *button = [UIButton buttonWithType:UIButtonTypeRoundedRect];\n    [button setTitle:title forState:UIControlStateNormal];\n    button.backgroundColor = [UIColor lightGrayColor];\n    button.alpha = 0.9;\n    button.layer.cornerRadius = 10;\n    button.clipsToBounds = YES;\n    [button addTarget:self\n               action:action\n     forControlEvents:UIControlEventTouchUpInside];\n    \n    return button;\n}\n\n- (void) toast:(NSString*)message{\n    UIAlertView *toast = [[UIAlertView alloc] initWithTitle:nil\n                                                    message:message\n                                                   delegate:nil\n                                          cancelButtonTitle:nil\n                                          otherButtonTitles:nil, nil];\n    [toast show];\n    \n    double duration = 0.5; // duration in seconds\n    \n    dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t)(duration * NSEC_PER_SEC)), dispatch_get_main_queue(), ^{\n        [toast dismissWithClickedButtonIndex:0 animated:YES];\n    });\n}\n\n@end\n"
  },
  {
    "path": "iOS_demo/KSY265CodecDemo_iOS/DecoderHelperViewController.h",
    "content": "//\n//  SettingsViewController.h\n//  IPGateway\n//\n//  Created by Meng Shengbin on 2/1/12.\n//  Copyright (c) 2012 Peking University. All rights reserved.\n//\n\n#import <UIKit/UIKit.h>\n#import \"BaseViewController.h\"\n\n@interface DecoderHelperViewController : BaseViewController\n\n@end\n"
  },
  {
    "path": "iOS_demo/KSY265CodecDemo_iOS/DecoderHelperViewController.m",
    "content": "//\n//  SettingsViewController.m\n//  IPGateway\n//\n//  Created by Meng Shengbin on 2/1/12.\n//  Copyright (c) 2012 Peking University. All rights reserved.\n//\n\n#import \"DecoderHelperViewController.h\"\n\n@implementation DecoderHelperViewController {\n}\n\n- (void)viewDidLoad {\n    [super viewDidLoad];\n    \n    [self setupUI];\n}\n\n\n- (void)setupUI{\n    self.view.backgroundColor = [UIColor whiteColor];\n\n    //add helper title text\n    UILabel  *lblSetting =  [self addLable:@\"帮助\"];\n    [self addViews:@[lblSetting] withFrame:CGRectMake(self.view.frame.size.width/3, 40, self.view.frame.size.width/3, 40)];\n    \n    //编码器使用说明\n    UILabel *encoderInstructions = [self addLable:@\"解码器使用说明：先设置解码器参数(其中选择渲染频率为-1(off)时,是关闭渲染功能)，然后选择文件，最后确定即开始解码\" ];\n    encoderInstructions.numberOfLines = 0;\n    encoderInstructions.textAlignment = NSTextAlignmentLeft;\n    [self addViews:@[encoderInstructions] withFrame:CGRectMake(0, 100, self.view.frame.size.width, 40*4)];\n    \n    //github地址\n    UILabel *gitHubSite = [self addLable:@\"github: https://github.com/ksvc/ks265codec\" ];\n    gitHubSite.numberOfLines = 0;\n    gitHubSite.textAlignment = NSTextAlignmentLeft;\n    [self addViews:@[gitHubSite] withFrame:CGRectMake(0, 300, self.view.frame.size.width, 40*2)];\n\n    UIButton *btnBack = [self addButtonWithTitle:@\"返回\" action:@selector(onDone:)];\n    [self addViews:@[btnBack] withFrame:CGRectMake(self.view.frame.size.width*2/3, 450, self.view.frame.size.width/3, 40)];\n}\n\n#pragma mark - actions\n- (void)onDone:(UIButton *)btn {\n    [self dismissViewControllerAnimated:FALSE completion:nil];\n}\n\n- (void)didReceiveMemoryWarning {\n    [super didReceiveMemoryWarning];\n    // Dispose of any resources that can be recreated.\n}\n\n@end\n\n"
  },
  {
    "path": "iOS_demo/KSY265CodecDemo_iOS/EncoderHelperViewController.h",
    "content": "//\n//  SettingsViewController.h\n//  IPGateway\n//\n//  Created by Meng Shengbin on 2/1/12.\n//  Copyright (c) 2012 Peking University. All rights reserved.\n//\n\n#import <UIKit/UIKit.h>\n#import \"BaseViewController.h\"\n\n@interface EncoderHelperViewController : BaseViewController\n\n@end\n"
  },
  {
    "path": "iOS_demo/KSY265CodecDemo_iOS/EncoderHelperViewController.m",
    "content": "//\n//  SettingsViewController.m\n//  IPGateway\n//\n//  Created by Meng Shengbin on 2/1/12.\n//  Copyright (c) 2012 Peking University. All rights reserved.\n//\n\n#import \"EncoderHelperViewController.h\"\n\n@implementation EncoderHelperViewController {\n}\n\n- (void)viewDidLoad {\n    [super viewDidLoad];\n    \n    [self setupUI];\n}\n\n\n- (void)setupUI{\n    self.view.backgroundColor = [UIColor whiteColor];\n\n    //add helper title text\n    UILabel  *lblSetting =  [self addLable:@\"帮助\"];\n    [self addViews:@[lblSetting] withFrame:CGRectMake(self.view.frame.size.width/3, 40, self.view.frame.size.width/3, 40)];\n    \n    //编码器使用说明\n    UILabel *encoderInstructions = [self addLable:@\"编码器使用说明：先设置编码器参数，然后选择文件，最后确定即开始编码\" ];\n    encoderInstructions.numberOfLines = 0;\n    encoderInstructions.textAlignment = NSTextAlignmentLeft;\n    [self addViews:@[encoderInstructions] withFrame:CGRectMake(0, 100, self.view.frame.size.width, 40*4)];\n\n    //github地址\n    UILabel *gitHubSite = [self addLable:@\"github: https://github.com/ksvc/ks265codec\" ];\n    gitHubSite.numberOfLines = 0;\n    gitHubSite.textAlignment = NSTextAlignmentLeft;\n    [self addViews:@[gitHubSite] withFrame:CGRectMake(0, 300, self.view.frame.size.width, 40*2)];\n\n    //add back button\n    UIButton *btnBack = [self addButtonWithTitle:@\"返回\" action:@selector(onDone:)];\n    [self addViews:@[btnBack] withFrame:CGRectMake(self.view.frame.size.width*2/3, 450, self.view.frame.size.width/3, 40)];\n}\n\n#pragma mark - actions\n- (void)onDone:(UIButton *)btn {\n    [self dismissViewControllerAnimated:FALSE completion:nil];\n}\n\n- (void)didReceiveMemoryWarning {\n    [super didReceiveMemoryWarning];\n    // Dispose of any resources that can be recreated.\n}\n\n@end\n\n"
  },
  {
    "path": "iOS_demo/KSY265CodecDemo_iOS/FirstViewController.h",
    "content": "//\n//  FirstViewController.h\n//  KSY265CodecDemo_iOS\n//\n//  Created by 江东 on 17/3/17.\n//  Copyright © 2017年 江东. All rights reserved.\n//\n\n#import <UIKit/UIKit.h>\n#import \"BaseViewController.h\"\n\n@interface FirstViewController : BaseViewController\n\n\n@end\n\n"
  },
  {
    "path": "iOS_demo/KSY265CodecDemo_iOS/FirstViewController.m",
    "content": "//\n//  FirstViewController.m\n//  KSY265CodecDemo_iOS\n//\n//  Created by 江东 on 17/3/17.\n//  Copyright © 2017年 江东. All rights reserved.\n//\n\n#import \"FirstViewController.h\"\n#import \"SettingsEncoderViewController.h\"\n#import \"EncoderHelperViewController.h\"\n#import \"MoviesViewController.h\"\n#import \"MovieEncoder.h\"\n#import \"KSYMovieEncoder.h\"\n#import \"qy265enc.h\"\n#import \"x264.h\"\n\n@interface FirstViewController (){\n    UITextField *encoderFile;\n    UITextView *infoView;\n    SettingsEncoderViewController *setEncoderVC;\n    MoviesViewController *listVC;\n}\n\n@property (nonatomic, retain) MovieEncoder *enc;\n\n@end\n\n@implementation FirstViewController\n\n- (void)viewDidLoad {\n    [super viewDidLoad];\n    \n    [self setupUI];\n    \n    self.enc = nil;\n    \n    setEncoderVC = [[SettingsEncoderViewController alloc] initDefaultCfg];\n\n    listVC = [[MoviesViewController alloc] initWithSuffix:@\".yuv\"];\n    listVC.tableBlock = ^(NSString* filePath){\n        NSLog(@\"%@\", filePath);\n        encoderFile.text = filePath;\n    };\n}\n\n-(void)viewDidAppear:(BOOL)animated\n{\n    [super viewDidAppear:animated];\n    [self copyFile2Documents:@\"960x540_15\"];\n    [self copyFile2Documents:@\"1280x720_15\"];\n    [self copyFile2Documents:@\"640x480_15\"];\n}\n\n- (void)setupUI{\n    self.view.backgroundColor = [UIColor whiteColor];\n    \n    //add set button\n    UIButton *btnSet = [self addButtonWithTitle:@\"设置\" action:@selector(onSetEncoder:)];\n    //add help button\n    UIButton *btnHelp = [self addButtonWithTitle:@\"帮助\" action:@selector(onHelp:)];\n    //add encoder text\n    UILabel  *lblEncoder =  [self addLable:@\"KSC265编码器\"];\n    [self addViews:@[btnSet, lblEncoder, btnHelp] withFrame:CGRectMake(0, 40, self.view.frame.size.width, 40)];\n    //add browse file button\n    UIButton *selectBtn = [self addButtonWithTitle:@\"浏览(.yuv)文件\" action:@selector(didClickSelectBtn:)];\n    [self addViews:@[selectBtn] withFrame:CGRectMake(0, 120, self.view.frame.size.width/3, 40)];\n    //input encoder file\n    encoderFile = [self addTextField:NULL ];\n    UIButton *doneBtn =  [self addButtonWithTitle:@\"确定\" action:@selector(onDone:)];\n    [self addViews2:@[encoderFile,doneBtn] withFrame:CGRectMake(0, 180, self.view.frame.size.width, 40)];\n\n    // info\n    infoView = [[UITextView alloc] init];\n    infoView.editable = NO;\n    infoView.textAlignment = NSTextAlignmentLeft;\n    infoView.backgroundColor = [UIColor colorWithWhite:0.8 alpha:0.3];\n    infoView.font = [UIFont systemFontOfSize:13];\n    infoView.layer.cornerRadius = 2;\n    infoView.clipsToBounds = YES;\n    infoView.layoutManager.allowsNonContiguousLayout = NO;\n    [self addViews:@[infoView] withFrame:CGRectMake(0,  260, self.view.frame.size.width, self.view.frame.size.height- 260 - 20)];\n}\n\n- (void)startEncoder:(NSString *) filePath\n{\n    NSString *encoder = [[NSUserDefaults standardUserDefaults] valueForKey:@\"encoder\"];\n    if (self.enc == nil) {\n        if ([encoder isEqualToString:@\"x264\"]) {\n            self.enc = [[MovieEncoder alloc] init];\n            NSString* string = [NSString stringWithFormat:@\"%s\" ,X264_POINTVER];\n            [[NSUserDefaults standardUserDefaults] setValue:string  forKey:@\"version\"];\n        }\n        else {\n            self.enc = [[KSYMovieEncoder alloc] init];\n            NSString* string = [NSString stringWithFormat:@\"%s\" , strLibQy265Version];\n            [[NSUserDefaults standardUserDefaults] setValue:string forKey:@\"version\"];\n        }\n    }\n    \n    int ret = [self.enc openMovie:filePath];\n    if(ret != 0) {\n        UIAlertView * alert = [[UIAlertView alloc] initWithTitle:@\"Message\" message:@\"Get movie data failed! Please check your source or try again.\" delegate:nil cancelButtonTitle:@\"OK\" otherButtonTitles:nil];\n        [alert show];\n        return ;\n    } else {\n        int ret = [self.enc encoder];\n        if(ret != 0) {\n            UIAlertView * alert = [[UIAlertView alloc] initWithTitle:@\"Message\" message:@\"Can't encode this yuv! Please check its format.\" delegate:nil cancelButtonTitle:@\"OK\" otherButtonTitles:nil];\n            [alert show];\n            return ;\n        }\n        \n        NSString *encoder = [[NSUserDefaults standardUserDefaults] valueForKey:@\"encoder\"];\n        NSString *fps = [[NSUserDefaults standardUserDefaults] valueForKey:@\"fps\"];\n        NSString *bitRate = [[NSUserDefaults standardUserDefaults] valueForKey:@\"bitRate\"];\n        NSString *threads = [[NSUserDefaults standardUserDefaults] valueForKey:@\"threads\"];\n        NSString *profile = [[NSUserDefaults standardUserDefaults] valueForKey:@\"profile\"];\n        NSString *delayed = [[NSUserDefaults standardUserDefaults] valueForKey:@\"delayed\"];\n        NSString *version = [[NSUserDefaults standardUserDefaults] valueForKey:@\"version\"];\n        NSString *psnr = [[NSUserDefaults standardUserDefaults] valueForKey:@\"psnr\"];\n        \n        NSFileManager *manager = [NSFileManager defaultManager];\n        NSDictionary *outDic = [manager attributesOfItemAtPath:self.enc.out_file_string error:nil];\n        NSDictionary *inDic = [manager attributesOfItemAtPath:filePath error:nil];\n        unsigned long long outLength = outDic.fileSize;\n        unsigned long long inLength = inDic.fileSize;\n        \n        if ([encoder isEqualToString:@\"x264\"]) {\n            NSString *delayShow;\n            if ([delayed isEqualToString:@\"zerolatency\"]) {\n                delayShow = @\"--bframes 0 --tune zerolatency\";\n            }\n            else if([delayed isEqualToString:@\"livestreaming\"]){\n                delayShow = @\"--bframes 3\";\n            }\n            else{\n                delayShow = @\"--bframes 7\";\n            }\n            \n            infoView.text = [NSString stringWithFormat:@\"%@\\n编码器版本:%@\\n编码参数:%@ --preset %@ %@ --input-res %ldx%ld --fps %@ --threads %@ --bitrate %@ -o %@ %@\\n\\n编码时间:%.2lf s\\n编码帧数:%ld\\n编码速度:%.2lf f/s\\n压缩比:%llu\\nPSNR:%.2lf\\n\\n视频信息\\n码率:%.2lf kbps\\n分辨率:%@\\n帧率:%@\\n文件总时长:%.2lf s\\n\\n\\n\",\n                             infoView.text,\n                             version,\n                             encoder,\n                             profile,\n                             delayShow,\n                             self.enc.width,\n                             self.enc.height,\n                             fps,\n                             threads,\n                             bitRate,\n                             [self.enc.out_file_string lastPathComponent],\n                             encoderFile.text,\n                             self.enc.real_time,\n                             self.enc.frameNum,\n                             self.enc.realFPS,\n                             inLength/outLength,\n                             self.enc.avg_psnr,\n                             outLength*8.0/(1000.0*(self.enc.frameNum/[fps floatValue])),\n                             NSStringFromCGSize(CGSizeMake(self.enc.width, self.enc.height)),\n                             fps,\n                             self.enc.frameNum/[fps floatValue]];\n        }\n        else{\n            infoView.text = [NSString stringWithFormat:@\"%@\\n编码器版本:%@\\n编码参数:%@ -i %@ -preset %@ -latency %@ -wdt %ld -hgt %ld -fr %@ -threads %@ -br %@ -b %@\\n\\n编码时间:%.2lf s\\n编码帧数:%ld\\n编码速度:%.2lf f/s\\n压缩比:%llu\\nPSNR:%@\\n\\n视频信息\\n码率:%.2lf kbps\\n分辨率:%@\\n帧率:%@\\n文件总时长:%.2lf s\\n\\n\\n\",\n                             infoView.text,\n                             version,\n                             encoder,\n                             encoderFile.text,\n                             profile,\n                             delayed,\n                             self.enc.width,\n                             self.enc.height,\n                             fps,\n                             threads,\n                             bitRate,\n                             [self.enc.out_file_string lastPathComponent],\n                             self.enc.real_time,\n                             self.enc.frameNum,\n                             self.enc.realFPS,\n                             inLength/outLength,\n                             psnr,\n                             outLength*8.0/(1000.0*(self.enc.frameNum/[fps floatValue])),\n                             NSStringFromCGSize(CGSizeMake(self.enc.width, self.enc.height)),\n                             fps,\n                             self.enc.frameNum/[fps floatValue]];\n            \n        }\n        [infoView scrollRangeToVisible:NSMakeRange(infoView.text.length, 1)];\n    }\n    self.enc = nil;\n}\n\n#pragma mark - actions\n- (void)onSetEncoder:(UIButton *)btn {\n    [self presentViewController:setEncoderVC animated:true completion:nil];\n}\n- (void)onHelp:(UIButton *)btn {\n    EncoderHelperViewController *encoderHelperVC = [[EncoderHelperViewController alloc] init];\n    [self presentViewController:encoderHelperVC animated:true completion:nil];\n}\n- (void)didClickSelectBtn:(UIButton *)btn{\n    UINavigationController *naVC = [[UINavigationController alloc]initWithRootViewController: listVC];\n    [self presentViewController:naVC animated:YES completion:nil];\n}\n- (void)onDone:(UIButton *)btn {\n    [encoderFile resignFirstResponder];\n    NSString *dir = [NSHomeDirectory() stringByAppendingString:@\"/Documents/\"];\n    NSString *encFile = [dir stringByAppendingPathComponent:encoderFile.text];\n    [self startEncoder:encFile];\n}\n\n- (void)didReceiveMemoryWarning {\n    [super didReceiveMemoryWarning];\n    // Dispose of any resources that can be recreated.\n}\n\n-(NSString*) copyFile2Documents:(NSString*)fileName\n{\n    NSFileManager*fileManager =[NSFileManager defaultManager];\n    NSError*error;\n    NSArray*paths =NSSearchPathForDirectoriesInDomains(NSDocumentDirectory,NSUserDomainMask, YES);\n    NSString*documentsDirectory =[paths objectAtIndex:0];\n    \n    NSString*destPath =[documentsDirectory stringByAppendingPathComponent:fileName];\n    destPath = [destPath stringByAppendingString:@\".yuv\"];\n    \n    //  如果目标目录也就是(Documents)目录没有数据库文件的时候，才会复制一份，否则不复制\n    if(![fileManager fileExistsAtPath:destPath]){\n        NSString* sourcePath =[[NSBundle mainBundle] pathForResource:fileName ofType:@\"yuv\"];\n        [fileManager copyItemAtPath:sourcePath toPath:destPath error:&error];\n    }\n    return destPath;\n}\n\n@end\n"
  },
  {
    "path": "iOS_demo/KSY265CodecDemo_iOS/GLRenderer.h",
    "content": "//\n//  GLRenderer.h\n//  HEVDecoder\n//\n//  Created by Shengbin Meng on 11/21/13.\n//  Copyright (c) 2013 Peking University. All rights reserved.\n//\n\n#import <Foundation/Foundation.h>\n#import <QuartzCore/QuartzCore.h>\n\n@protocol RenderStateListener\n- (void) bufferDone;\n@end\n\n@interface GLRenderer : NSObject\n\n- (void) setRenderStateListener:(id<RenderStateListener>) lis;\n\n- (int) resizeFromLayer:(CAEAGLLayer *)layer;\n\n- (void) render: (void*) data;\n\n@end\n"
  },
  {
    "path": "iOS_demo/KSY265CodecDemo_iOS/GLRenderer.m",
    "content": "//\n//  GLRenderer.m\n//  HEVDecoder\n//\n//  Created by Shengbin Meng on 11/21/13.\n//  Copyright (c) 2013 Peking University. All rights reserved.\n//\n\n#import \"GLRenderer.h\"\n#import \"MoviePlayer.h\"\n#import <GLKit/GLKit.h>\n\n#define ENABLE_LOGD 0\n#if ENABLE_LOGD\n#define LOGD(...)  printf(__VA_ARGS__)\n#else\n#define LOGD(...)\n#endif\n#define LOGI LOGD\n#define LOGE LOGD\n\nstatic const char gVertexShader[] =\n\"attribute vec4 a_position;\\n\"\n\"attribute vec2 a_texCoord;\\n\"\n\"varying vec2 v_tc;\\n\"\n\"void main()\\n\"\n\"{\\n\"\n\"\tgl_Position = a_position;\\n\"\n\"\tv_tc = a_texCoord;\\n\"\n\"}\\n\";\n\nstatic const char gFragmentShader[] =\n\"varying lowp vec2 v_tc;\\n\"\n\"uniform sampler2D u_texY;\\n\"\n\"uniform sampler2D u_texU;\\n\"\n\"uniform sampler2D u_texV;\\n\"\n\"void main(void)\\n\"\n\"{\\n\"\n\"mediump vec3 yuv;\\n\"\n\"lowp vec3 rgb;\\n\"\n\"yuv.x = texture2D(u_texY, v_tc).r;\\n\"\n\"yuv.y = texture2D(u_texU, v_tc).r - 0.5;\\n\"\n\"yuv.z = texture2D(u_texV, v_tc).r - 0.5;\\n\"\n\"rgb = mat3( 1,   1,   1,\\n\"\n\"0,       -0.39465,  2.03211,\\n\"\n\"1.13983,   -0.58060,  0) * yuv;\\n\"\n\"gl_FragColor = vec4(rgb, 1);\\n\"\n\"}\\n\";\n\nstatic void printGLString(const char *name, GLenum s) {\n    LOGI(\"GL %s = %s\\n\", name, glGetString(s););\n}\n\nstatic GLuint loadShader(GLenum shaderType, const char* pSource) {\n    GLuint shader = glCreateShader(shaderType);\n    if (shader) {\n        glShaderSource(shader, 1, &pSource, NULL);\n        glCompileShader(shader);\n        GLint compiled = 0;\n        glGetShaderiv(shader, GL_COMPILE_STATUS, &compiled);\n        if (!compiled) {\n            GLint infoLen = 0;\n            glGetShaderiv(shader, GL_INFO_LOG_LENGTH, &infoLen);\n            if (infoLen) {\n                char* buf = (char*) malloc(infoLen);\n                if (buf) {\n                    glGetShaderInfoLog(shader, infoLen, NULL, buf);\n                    LOGE(\"Could not compile shader %d:\\n%s\\n\",\n                         shaderType, buf);\n                    free(buf);\n                }\n                glDeleteShader(shader);\n                shader = 0;\n            }\n        }\n    }\n    return shader;\n}\n\nstatic GLuint createProgram(const char* pVertexSource, const char* pFragmentSource) {\n    GLuint vertexShader = loadShader(GL_VERTEX_SHADER, pVertexSource);\n    if (!vertexShader) {\n        return 0;\n    }\n    \n    GLuint fragmentShader = loadShader(GL_FRAGMENT_SHADER, pFragmentSource);\n    if (!fragmentShader) {\n        return 0;\n    }\n    \n    GLuint program = glCreateProgram();\n    if (program) {\n        glAttachShader(program, vertexShader);\n        glAttachShader(program, fragmentShader);\n        glLinkProgram(program);\n        GLint linkStatus = GL_FALSE;\n        glGetProgramiv(program, GL_LINK_STATUS, &linkStatus);\n        if (linkStatus != GL_TRUE) {\n            GLint bufLength = 0;\n            glGetProgramiv(program, GL_INFO_LOG_LENGTH, &bufLength);\n            if (bufLength) {\n                char* buf = (char*) malloc(bufLength);\n                if (buf) {\n                    glGetProgramInfoLog(program, bufLength, NULL, buf);\n                    LOGE(\"Could not link program:\\n%s\\n\", buf);\n                    free(buf);\n                }\n            }\n            glDeleteProgram(program);\n            program = 0;\n        }\n    }\n    return program;\n}\n\nstatic GLfloat vertexPositions[] = {\n\t-1.0, -1.0, 0.0,\n    1.0, -1.0, 0.0,\n\t-1.0,  1.0, 0.0,\n    1.0,  1.0, 0.0\n};\n\nstatic GLfloat textureCoords[] = {\n\t0.0, 1.0,\n\t1.0, 1.0,\n\t0.0, 0.0,\n\t1.0, 0.0\n};\n\n\n@implementation GLRenderer\n\n{\n    EAGLContext *context;\n    \n    GLint backingWidth, backingHeight;\n    \n    GLuint defaultFramebuffer, colorRenderbuffer;\n    \n    GLuint gProgram;\n    GLuint gTexIds[3];\n    GLuint gAttribPosition;\n    GLuint gAttribTexCoord;\n    GLuint gUniformTexY;\n    GLuint gUniformTexU;\n    GLuint gUniformTexV;\n    \n    id<RenderStateListener> listener;\n    \n    int needSetup;\n}\n\n- (id)init\n{\n    self = [super init];\n    if (self == nil) {\n        return nil;\n    }\n    \n    context = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2];\n    \n    if (!context || ![EAGLContext setCurrentContext:context]) {\n        return nil;\n    }\n    \n    printGLString(\"Version\", GL_VERSION);\n\tprintGLString(\"Vendor\", GL_VENDOR);\n\tprintGLString(\"Renderer\", GL_RENDERER);\n\tprintGLString(\"Extensions\", GL_EXTENSIONS);\n    \n\t// create and use our program\n\tgProgram = createProgram(gVertexShader, gFragmentShader);\n\tif (!gProgram) {\n\t\tLOGE(\"Could not create program. \\n\");\n\t\treturn nil;\n\t}\n    glUseProgram(gProgram);\n    \n    // get the location of attributes in our shader\n\tgAttribPosition = glGetAttribLocation(gProgram, \"a_position\");\n\tgAttribTexCoord = glGetAttribLocation(gProgram, \"a_texCoord\");\n    \n    // get the location of uniforms in our shader\n    gUniformTexY = glGetUniformLocation(gProgram, \"u_texY\");\n\tgUniformTexU = glGetUniformLocation(gProgram, \"u_texU\");\n\tgUniformTexV = glGetUniformLocation(gProgram, \"u_texV\");\n    \n\t// can enable only once\n\tglEnableVertexAttribArray(gAttribPosition);\n\tglEnableVertexAttribArray(gAttribTexCoord);\n    \n\t// set the value of uniforms (uniforms all have constant value)\n\tglUniform1i(gUniformTexY, 0);\n\tglUniform1i(gUniformTexU, 1);\n\tglUniform1i(gUniformTexV, 2);\n    \n\t// generate and set parameters for the textures\n    glEnable(GL_TEXTURE_2D);\n    glGenTextures(3, gTexIds);\n    for (int i = 0; i < 3; i++) {\n    \tglActiveTexture(GL_TEXTURE0 + i);\n    \tglBindTexture ( GL_TEXTURE_2D, gTexIds[i] );\n    \tglTexParameteri ( GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR );\n\t\tglTexParameteri ( GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR );\n\t\tglTexParameteri ( GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE );\n\t\tglTexParameteri ( GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE );\n    }\n    \n    // genarate frame and render buffers\n    glGenFramebuffers(1, &defaultFramebuffer);\n    glGenRenderbuffers(1, &colorRenderbuffer);\n    glBindFramebuffer(GL_FRAMEBUFFER, defaultFramebuffer);\n    glBindRenderbuffer(GL_RENDERBUFFER, colorRenderbuffer);\n    glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_RENDERBUFFER, colorRenderbuffer);\n    \n    needSetup = 0;\n    \n    return self;\n}\n\n- (void) setRenderStateListener:(id<RenderStateListener>) lis\n{\n    listener = lis;\n}\n\n- (int) resizeFromLayer:(CAEAGLLayer *)layer\n{\n    // Allocate color buffer backing based on the current layer size\n    glBindRenderbuffer(GL_RENDERBUFFER, colorRenderbuffer);\n    [context renderbufferStorage:GL_RENDERBUFFER fromDrawable:layer];\n    glGetRenderbufferParameteriv(GL_RENDERBUFFER, GL_RENDERBUFFER_WIDTH, &backingWidth);\n    glGetRenderbufferParameteriv(GL_RENDERBUFFER, GL_RENDERBUFFER_HEIGHT, &backingHeight);\n    \n    if (glCheckFramebufferStatus(GL_FRAMEBUFFER) != GL_FRAMEBUFFER_COMPLETE) {\n        LOGI(\"failed to make complete framebuffer object %x \\n\", glCheckFramebufferStatus(GL_FRAMEBUFFER));\n        return -1;\n    }\n    \n    needSetup = 1;\n    \n    return 0;\n}\n\n- (void) render: (void*) data {\n    \n    struct VideoFrame *gVF = (struct VideoFrame*)data;\n    \n    if (needSetup) {\n        \n        [EAGLContext setCurrentContext:context];\n        \n\t\tGLuint width = gVF->width;\n\t\tGLuint height = gVF->height;\n        \n\t\tfloat aspect = (float)width / (float)height;\n        \n\t\tif(aspect >= (float)backingWidth/(float)backingHeight) {\n\t\t\t// fill screen in width, and leave space in Y\n\t\t\tfloat scale = (float)backingWidth / (float) width;\n\t\t\tfloat maxY = ((float)height * scale) / (float) backingHeight;\n\t\t\tvertexPositions[1] = vertexPositions[4] = - maxY;\n\t\t\tvertexPositions[7] = vertexPositions[10] = maxY;\n            \n\t\t} else {\n\t\t\t// fill screen in height, and leave space in X\n\t\t\tfloat scale = (float) backingHeight / (float) height;\n\t\t\tfloat maxX = ((float) width * scale) / (float) backingWidth;\n\t\t\tvertexPositions[0] = vertexPositions[6] = - maxX;\n\t\t\tvertexPositions[3] = vertexPositions[9] = maxX;\n\t\t}\n        \n\t\t// modify the texture coordinates\n\t\tfloat texCoord = ((float)width) / gVF->linesize_y;\n\t\ttextureCoords[2] = textureCoords[6] = texCoord;\n        \n\t\t// set the value of attributes\n\t\tglVertexAttribPointer(gAttribPosition, 3, GL_FLOAT, 0, 0, vertexPositions);\n\t\tglVertexAttribPointer(gAttribTexCoord, 2, GL_FLOAT, 0, 0, textureCoords);\n        \n\t\tglViewport(0, 0, backingWidth, backingHeight);\n        \n\t\tLOGI(\"setup finished\\n\");\n        \n\t\tneedSetup = 0;\n\t}\n\n    glClearColor(0.0f, 0.0f, 0.0f, 1.0f);\n\tglClear(GL_COLOR_BUFFER_BIT);\n    \n    LOGD(\"before upload: %u (%f) \\n\", getms(), gVF->pts);\n    \n\t// upload textures\n\tglActiveTexture(GL_TEXTURE0 + 0);\n\tglTexImage2D ( GL_TEXTURE_2D, 0, GL_LUMINANCE, gVF->linesize_y, gVF->height, 0, GL_LUMINANCE, GL_UNSIGNED_BYTE, gVF->yuv_data[0]);\n\tglActiveTexture(GL_TEXTURE0 + 1);\n\tglTexImage2D ( GL_TEXTURE_2D, 0, GL_LUMINANCE, gVF->linesize_uv, gVF->height/2, 0, GL_LUMINANCE, GL_UNSIGNED_BYTE, gVF->yuv_data[1]);\n\tglActiveTexture(GL_TEXTURE0 + 2);\n\tglTexImage2D ( GL_TEXTURE_2D, 0, GL_LUMINANCE, gVF->linesize_uv, gVF->height/2, 0, GL_LUMINANCE, GL_UNSIGNED_BYTE, gVF->yuv_data[2]);\n    \n    [listener bufferDone];\n    \n    LOGD(\"after upload: %u (%f) \\n\", getms(), gVF->pts);\n    \n\tglDrawArrays(GL_TRIANGLE_STRIP, 0, 4);\n    \n    LOGD(\"after glDrawArrays: %u (%f) \\n\", getms(), gVF->pts);\n    \n    [context presentRenderbuffer:GL_RENDERBUFFER];\n    \n}\n\n\n\n- (void)dealloc\n{\n    // delete buffers\n    if (defaultFramebuffer) {\n        glDeleteFramebuffers(1, &defaultFramebuffer);\n        defaultFramebuffer = 0;\n    }\n    if (colorRenderbuffer) {\n        glDeleteRenderbuffers(1, &colorRenderbuffer);\n        colorRenderbuffer = 0;\n    }\n    \n    // tear down context\n    if ([EAGLContext currentContext] == context) {\n        [EAGLContext setCurrentContext:nil];\n    }\n    \n    context = nil;\n}\n\n@end\n"
  },
  {
    "path": "iOS_demo/KSY265CodecDemo_iOS/GLView.h",
    "content": "//\n//  GLView.h\n//  HEVDecoder\n//\n//  Created by Shengbin Meng on 11/21/13.\n//  Copyright (c) 2013 Peking University. All rights reserved.\n//\n\n#import <UIKit/UIKit.h>\n#import \"GLRenderer.h\"\n\n@interface GLView : UIView\n\n@property (nonatomic, retain) GLRenderer *renderer;\n\n@end\n"
  },
  {
    "path": "iOS_demo/KSY265CodecDemo_iOS/GLView.m",
    "content": "//\n//  GLView.m\n//  HEVDecoder\n//\n//  Created by Shengbin Meng on 11/21/13.\n//  Copyright (c) 2013 Peking University. All rights reserved.\n//\n\n#import \"GLView.h\"\n\n@implementation GLView\n\n+ (Class)layerClass\n{\n    return [CAEAGLLayer class];\n}\n\n// this is called when the view is loaded from xib files\n- (id)initWithCoder:(NSCoder *)coder\n{\n    self = [super initWithCoder:coder];\n    if (self) {\n        // configure the properties of the layer\n        CAEAGLLayer *eaglLayer = (CAEAGLLayer *)self.layer;\n        eaglLayer.opaque = TRUE;\n        eaglLayer.drawableProperties = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithBool:FALSE], kEAGLDrawablePropertyRetainedBacking, kEAGLColorFormatRGBA8, kEAGLDrawablePropertyColorFormat, nil];\n        \n        self.renderer = [[GLRenderer alloc] init];\n        if (self.renderer == nil) {\n            return nil;\n        }\n    }\n    return self;\n}\n\n\n- (void)layoutSubviews\n{\n    [self.renderer resizeFromLayer:(CAEAGLLayer*)self.layer];\n    [super layoutSubviews];\n}\n\n\n@end\n"
  },
  {
    "path": "iOS_demo/KSY265CodecDemo_iOS/Info.plist",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<!DOCTYPE plist PUBLIC \"-//Apple//DTD PLIST 1.0//EN\" \"http://www.apple.com/DTDs/PropertyList-1.0.dtd\">\n<plist version=\"1.0\">\n<dict>\n\t<key>CFBundleDevelopmentRegion</key>\n\t<string>en</string>\n\t<key>CFBundleExecutable</key>\n\t<string>$(EXECUTABLE_NAME)</string>\n\t<key>CFBundleIdentifier</key>\n\t<string>$(PRODUCT_BUNDLE_IDENTIFIER)</string>\n\t<key>CFBundleInfoDictionaryVersion</key>\n\t<string>6.0</string>\n\t<key>CFBundleName</key>\n\t<string>$(PRODUCT_NAME)</string>\n\t<key>CFBundlePackageType</key>\n\t<string>APPL</string>\n\t<key>CFBundleShortVersionString</key>\n\t<string>1.0</string>\n\t<key>CFBundleVersion</key>\n\t<string>1</string>\n\t<key>LSRequiresIPhoneOS</key>\n\t<true/>\n\t<key>UIFileSharingEnabled</key>\n\t<true/>\n\t<key>UILaunchStoryboardName</key>\n\t<string>LaunchScreen</string>\n\t<key>UIMainStoryboardFile</key>\n\t<string>Main</string>\n\t<key>UIRequiredDeviceCapabilities</key>\n\t<array>\n\t\t<string>armv7</string>\n\t</array>\n\t<key>UIStatusBarTintParameters</key>\n\t<dict>\n\t\t<key>UINavigationBar</key>\n\t\t<dict>\n\t\t\t<key>Style</key>\n\t\t\t<string>UIBarStyleDefault</string>\n\t\t\t<key>Translucent</key>\n\t\t\t<false/>\n\t\t</dict>\n\t</dict>\n\t<key>UISupportedInterfaceOrientations</key>\n\t<array>\n\t\t<string>UIInterfaceOrientationPortrait</string>\n\t\t<string>UIInterfaceOrientationLandscapeLeft</string>\n\t\t<string>UIInterfaceOrientationLandscapeRight</string>\n\t</array>\n\t<key>UISupportedInterfaceOrientations~ipad</key>\n\t<array>\n\t\t<string>UIInterfaceOrientationPortrait</string>\n\t\t<string>UIInterfaceOrientationPortraitUpsideDown</string>\n\t\t<string>UIInterfaceOrientationLandscapeLeft</string>\n\t\t<string>UIInterfaceOrientationLandscapeRight</string>\n\t</array>\n</dict>\n</plist>\n"
  },
  {
    "path": "iOS_demo/KSY265CodecDemo_iOS/KSYMovieEncoder.h",
    "content": "//\n//  MoviePlayer.h\n//  HEVDecoder\n//\n//  Created by Shengbin Meng on 13-2-25.\n//  Copyright (c) 2013 Peking University. All rights reserved.\n//\n\n#import <Foundation/Foundation.h>\n\n@interface KSYMovieEncoder : NSObject\n@property NSInteger width;\n@property NSInteger height;\n@property NSInteger frameNum;\n@property float realFPS;\n@property float real_time;\n@property NSString *out_file_string;\n\n- (int) openMovie:(NSString*) path;\n\n- (int) encoder;\n\n@end\n"
  },
  {
    "path": "iOS_demo/KSY265CodecDemo_iOS/KSYMovieEncoder.m",
    "content": "//\n//  MoviePlayer.m\n//  HEVDecoder\n//\n//  Created by Shengbin Meng on 13-2-25.\n//  Copyright (c) 2013 Peking University. All rights reserved.\n//\n\n#import \"KSYMovieEncoder.h\"\n#include <stdint.h>\n#include <stdio.h>\n#include <qy265enc.h>\n#include <sys/time.h>\n#include \"qy265def.h\"\n\nvoid logPrint(const char* msg){\n    if(strncmp(msg, \"\\n\", sizeof(\"\\n\"))){\n        NSString * message = [[NSString alloc]initWithUTF8String:msg];\n        NSLog(@\"message:%@\",message);\n        NSString *regulaStr = @\"\\\\d+\\\\.\\\\d+\";\n        NSRegularExpression *regex = [NSRegularExpression regularExpressionWithPattern:regulaStr\n                                                                               options:NSRegularExpressionCaseInsensitive\n                                                                                 error:nil];\n        NSArray *arrayOfAllMatches = [regex matchesInString:message options:0 range:NSMakeRange(0, [message length])];\n        if(arrayOfAllMatches.count >= 4){\n            float Y_PSNR = [[message substringWithRange:((NSTextCheckingResult *)arrayOfAllMatches[1]).range] floatValue];\n            float U_PSNR = [[message substringWithRange:((NSTextCheckingResult *)arrayOfAllMatches[2]).range] floatValue];\n            float V_PSNR = [[message substringWithRange:((NSTextCheckingResult *)arrayOfAllMatches[3]).range] floatValue];\n            float PSNR = (6* Y_PSNR + U_PSNR + V_PSNR)/8;\n            NSString *stringPSNR = [NSString stringWithFormat:@\"%.2f\",PSNR];\n            [[NSUserDefaults standardUserDefaults] setValue:stringPSNR forKey:@\"psnr\"];\n        }\n    \n    }\n    return;\n};\n\n\n@implementation KSYMovieEncoder\n\n{\n    NSString *moviePath;\n    FILE *in_file;\n}\n\n- (id) init\n{\n    self = [super init];\n\n    QY265SetLogPrintf(logPrint);\n    return self;\n}\n\n- (int) openMovie:(NSString*) path\n{\n    moviePath = path;\n    in_file = fopen([moviePath UTF8String], \"rb\");\n\tif(NULL == in_file) {\n\t\tprintf(\"can not open input file '%s'!\\n\", [moviePath UTF8String]);\n        return -1;\n\t}\n    \n    return 0;\n}\n\n- (int) encoder\n{\n    QY265EncConfig param;\n    QY265YUV yuv;\n    QY265Picture pic;\n    QY265Picture pic_out;\n    QY265Nal *nal;\n    void *h;\n    int i_frame = 0;\n    int i_frame_size;\n    int i_nal;\n    clock_t clock_start, clock_end, clock_used;\n    struct timeval tv_start, tv_end;\n    double real_time;\n    int64_t ms_used;\n    FILE *out_file;\n    int errorCode;\n\n    _out_file_string = [NSString stringWithFormat:@\"%@.265\", moviePath];\n    if ( NULL != _out_file_string ) {\n        out_file = fopen([_out_file_string UTF8String], \"wb\");\n        if ( NULL == out_file ) {\n            perror(\"open output file\");\n            fclose(in_file);\n            return -1;\n        }\n    }\n    \n    NSString *resolution = [[NSUserDefaults standardUserDefaults] valueForKey:@\"resolution\"];\n    NSArray *arrayofRes = [resolution componentsSeparatedByString:@\"*\"];\n    NSString *fps = [[NSUserDefaults standardUserDefaults] valueForKey:@\"fps\"];\n    NSString *bitRate = [[NSUserDefaults standardUserDefaults] valueForKey:@\"bitRate\"];\n    NSString *threads = [[NSUserDefaults standardUserDefaults] valueForKey:@\"threads\"];\n    NSString *profile = [[NSUserDefaults standardUserDefaults] valueForKey:@\"profile\"];\n    NSString *delayed = [[NSUserDefaults standardUserDefaults] valueForKey:@\"delayed\"];\n    \n    /* Get default params for preset/tuning */\n    if( QY265ConfigDefaultPreset( &param, [profile UTF8String], NULL, [delayed UTF8String]) < 0 )\n        goto fail;\n    \n    param.picWidth = [arrayofRes[0] intValue];\n    param.picHeight = [arrayofRes[1] intValue];\n    param.threads = [threads intValue];\n    param.frameRate = [fps floatValue];\n    if([bitRate intValue])\n        param.bitrateInkbps = [bitRate intValue];\n    param.calcPsnr = 1;\n\n    yuv.pData[0] = (unsigned char *)malloc(param.picWidth * param.picHeight * 3/2);\n    yuv.pData[1] = yuv.pData[0] + param.picWidth * param.picHeight;\n    yuv.pData[2] = yuv.pData[0] + param.picWidth * param.picHeight * 5/4;\n    yuv.iWidth = param.picWidth;\n    yuv.iHeight = param.picHeight;\n    yuv.iStride[0] = yuv.iWidth;\n    yuv.iStride[1] = yuv.iStride[2] = yuv.iWidth/2;\n    \n    h = QY265EncoderOpen( &param, &errorCode );\n    if( !h )\n        goto fail;\n    \n    pic.yuv = &yuv;\n    memset(&pic_out,0,sizeof(pic_out));\n    \n    int luma_size = param.picWidth * param.picHeight;\n    int chroma_size = luma_size / 4;\n    gettimeofday(&tv_start, NULL);\n    clock_start = clock();\n    /* Encode frames */\n    for( ;; i_frame++ )\n    {\n        /* Read input frame */\n        if( fread( pic.yuv->pData[0], 1, luma_size, in_file ) != luma_size )\n            break;\n        if( fread( pic.yuv->pData[1], 1, chroma_size, in_file ) != chroma_size )\n            break;\n        if( fread( pic.yuv->pData[2], 1, chroma_size, in_file ) != chroma_size )\n            break;\n        \n        pic.pts = i_frame;\n        i_frame_size = QY265EncoderEncodeFrame( h, &nal, &i_nal, &pic, &pic_out, 0 );\n        if( i_frame_size < 0 )\n            goto fail;\n        \n        for(int i = 0; i < i_nal; i++){\n            if( !fwrite(  nal[i].pPayload, nal[i].iSize, 1, out_file ) )\n                goto fail;\n        }\n    }\n    /* Flush delayed frames */\n    while( QY265EncoderDelayedFrames( h ) )\n    {\n        i_frame_size = QY265EncoderEncodeFrame( h, &nal, &i_nal, NULL, &pic_out, 0 );\n        if( i_frame_size < 0 )\n            goto fail;\n        \n        for(int i = 0; i < i_nal; i++){\n            if( !fwrite(  nal[i].pPayload, nal[i].iSize, 1, out_file ) )\n                goto fail;\n        }\n    }\n    clock_end = clock();\n    gettimeofday(&tv_end, NULL);\n    clock_used = clock_end - clock_start;\n    ms_used = (int64_t)(clock_used * 1000.0 / CLOCKS_PER_SEC);\n    real_time = (tv_end.tv_sec + (tv_end.tv_usec / 1000000.0)) - (tv_start.tv_sec + (tv_start.tv_usec / 1000000.0));\n    float realFPS = i_frame / real_time;\n    printf(\"%d frame encoded\\n\"\n           \"\\ttime\\tfps\\n\"\n           \"CPU\\t%lldms\\t%.2f\\n\"\n           \"Real\\t%.3fs\\t%.2f.\\n\",\n           i_frame,\n           ms_used, i_frame * 1000.0 / ms_used,\n           real_time, realFPS);\n\n    self.width = param.picWidth;\n    self.height = param.picHeight;\n    self.frameNum = i_frame;\n    self.realFPS = realFPS;\n    self.real_time = real_time;\n    \n    QY265EncoderClose( h );\n    \n    free(yuv.pData[0]);\n    fclose(in_file);\n    fclose(out_file);\n    return 0;\n    \nfail:\n    fclose(in_file);\n    fclose(out_file);\n    return -1;\n}\n\n@end\n"
  },
  {
    "path": "iOS_demo/KSY265CodecDemo_iOS/KSYMoviePlayer.h",
    "content": "//\n//  MoviePlayer.h\n//  HEVDecoder\n//\n//  Created by Shengbin Meng on 13-2-25.\n//  Copyright (c) 2013 Peking University. All rights reserved.\n//\n\n#import <Foundation/Foundation.h>\n#import \"GLRenderer.h\"\n#import \"GLView.h\"\n#import \"MoviePlayer.h\"\n@interface KSYMoviePlayer : NSObject <RenderStateListener>\n\n@property (nonatomic, retain) GLRenderer *renderer;\n@property NSInteger width;\n@property NSInteger height;\n@property NSInteger frameNum;\n@property float realFPS;\n@property float real_time;\n@property bool decodeEnd;\n@property NSString *out_file_string;\n\n- (int) openMovie:(NSString*) path;\n\n- (int) play;\n\n- (int) stop;\n\n- (int)test:(int) thread_num;\n\n@end\n"
  },
  {
    "path": "iOS_demo/KSY265CodecDemo_iOS/KSYMoviePlayer.m",
    "content": "//\n//  MoviePlayer.m\n//  HEVDecoder\n//\n//  Created by Shengbin Meng on 13-2-25.\n//  Copyright (c) 2013 Peking University. All rights reserved.\n//\n\n#import \"KSYMoviePlayer.h\"\n#import \"GLRenderer.h\"\n#include \"lenthevcdec.h\"\n#include <sys/sysctl.h>\n#include <sys/time.h>\n#include \"qy265dec.h\"\n\n#define AU_COUNT_MAX (1024 * 256)\n#define AU_BUF_SIZE_MAX (1024 * 1024 * 128)\n\nstatic inline int next_p2(int a) {\n    int rval=1;\n    while(rval<a) rval<<=1;\n    return rval;\n}\n\n@interface KSYMoviePlayer() {\n    void * _ksydec;\n    BOOL  _skipRender;\n}\n\n@end\n\n@implementation KSYMoviePlayer \n{\n    NSString *moviePath;\n    NSThread *decodeThread;\n    BOOL isBusy, stopRender;\n    unsigned char * pData[3];\n    int exit_decode_thread;\n    uint32_t au_pos[AU_COUNT_MAX];\n    uint32_t au_count, au_buf_size;\n    uint8_t *au_buf;\n    struct VideoFrame frame;\n    QY265Frame decframe;\n    int frames;\n    int frames_sum;\n    double tstart, tlast;\n    uint64_t renderInterval;\n    struct timeval timeStart;\n    FILE *out_file;\n}\n\n@synthesize renderer;\n\n- (id) init\n{\n    self = [super init];\n    \n    exit_decode_thread = 0;\n    frames_sum = 0;\n    tstart = 0;\n    frames = 0;\n    tlast = 0;\n    renderInterval = 0;\n    \n    isBusy = NO;\n    stopRender = NO;\n    _skipRender = NO;\n    self.decodeEnd = 0;\n\n    return self;\n}\n\n- (void) setupRenderer {\n    [self.renderer setRenderStateListener:self];\n}\n\n- (void) bufferDone {\n    isBusy = NO;\n}\n\n- (void) renderFrame:(struct VideoFrame *) vf\n{\n    if (_skipRender) {\n        return;\n    }\n    //int frames = 0;\n    uint8_t *dst[3] = {frame.yuv_data[0], frame.yuv_data[1], frame.yuv_data[2]};\n    uint8_t *src[3] = {decframe.pData[0], decframe.pData[1], decframe.pData[2]};\n    for (int j = 0; j < frame.height/2; ++j) {\n        memcpy(dst[0], src[0], frame.linesize_y);\n        dst[0] += frame.linesize_y;\n        src[0] += decframe.iStride[0];\n        memcpy(dst[0], src[0], frame.linesize_y);\n        dst[0] += frame.linesize_y;\n        src[0] += decframe.iStride[0];\n        memcpy(dst[1], src[1], frame.linesize_uv);\n        dst[1] += frame.linesize_uv;\n        src[1] += decframe.iStride[1];\n        memcpy(dst[2], src[2], frame.linesize_uv);\n        dst[2] += frame.linesize_uv;\n        src[2] += decframe.iStride[2];\n    }\n    vf = &frame;\n\tstruct timeval timeNow;\n\tgettimeofday(&timeNow, NULL);\n\tint64_t timePassed = ((int64_t)(timeNow.tv_sec - timeStart.tv_sec))*1000000 + (timeNow.tv_usec - timeStart.tv_usec);\n\tint64_t delay = vf->pts - timePassed;\n\tif (delay > 0) {\n\t\tusleep(delay);\n\t}\n    \n\tgettimeofday(&timeNow, NULL);\n\tdouble tnow = timeNow.tv_sec + (timeNow.tv_usec / 1000000.0);\n\tif (tlast == 0) tlast = tnow;\n\tif (tstart == 0) tstart = tnow;\n\tif (tnow > tlast + 1) {\n\t\tdouble avg_fps;\n\t\tprintf(\"Video Display FPS:%i\\n\", (int)frames);\n\t\tframes_sum += frames;\n\t\tavg_fps = frames_sum / (tnow - tstart);\n\t\tprintf(\"Video AVG FPS:%.2lf\\n\", avg_fps);\n        \n        //self.infoString = [NSString stringWithFormat:@\"size:%dx%d, fps:%d\", vf->width, vf->height, frames];\n        \n\t\ttlast = tlast + 1;\n\t\tframes = 0;\n\t}\n\tframes++;\n    while(isBusy && !stopRender) usleep(50);\n    isBusy = YES;\n    [renderer render:vf];\n}\n\nstatic int lent_hevc_get_sps(uint8_t* buf, int size, uint8_t** sps_ptr)\n{\n    int i, nal_type, sps_pos;\n    sps_pos = -1;\n    for ( i = 0; i < (size - 4); i++ ) {\n        if ( 0 == buf[i] && 0 == buf[i+1] && 1 == buf[i+2] ) {\n            nal_type = (buf[i+3] & 0x7E) >> 1;\n            if ( 33 != nal_type && sps_pos >= 0 ) {\n                break;\n            }\n            if ( 33 == nal_type ) { // sps\n                sps_pos = i;\n            }\n            i += 2;\n        }\n    }\n    if ( sps_pos < 0 )\n        return 0;\n    if ( i == (size - 4) )\n        i = size;\n    *sps_ptr = buf + sps_pos;\n    return i - sps_pos;\n}\n\nstatic int lent_hevc_get_frame(uint8_t* buf, int size, int *is_idr)\n{\n\tstatic int seq_hdr = 0;\n\tint i, nal_type, idr = 0;\n\tfor ( i = 0; i < (size - 6); i++ ) {\n\t\tif ( 0 == buf[i] && 0 == buf[i+1] && 1 == buf[i+2] ) {\n\t\t\tnal_type = (buf[i+3] & 0x7E) >> 1;\n\t\t\tif ( nal_type <= 21 ) {\n\t\t\t\tif ( buf[i+5] & 0x80 ) { /* first slice in pic */\n\t\t\t\t\tif ( !seq_hdr )\n\t\t\t\t\t\tbreak;\n\t\t\t\t\telse\n\t\t\t\t\t\tseq_hdr = 0;\n\t\t\t\t}\n\t\t\t}\n\t\t\tif ( nal_type >= 32 && nal_type <= 34 ) {\n\t\t\t\tif ( !seq_hdr ) {\n\t\t\t\t\tseq_hdr = 1;\n\t\t\t\t\tidr = 1;\n\t\t\t\t\tbreak;\n\t\t\t\t}\n\t\t\t\tseq_hdr = 1;\n\t\t\t}\n\t\t\ti += 2;\n\t\t}\n\t}\n\tif ( i == (size - 6) )\n\t\ti = size;\n\tif ( NULL != is_idr )\n\t\t*is_idr = idr;\n\treturn i;\n}\n\n- (int) ksy_hevc_prepare:(int) thread_num\n{\n    // open hevc decoder\n    int hr = QY_OK;\n    QY265DecConfig ksycfg ={0};\n    ksycfg.threads = thread_num;\n    _ksydec = QY265DecoderCreate(&ksycfg, &hr );\n    if ( NULL == _ksydec ) {\n        fprintf(stderr, \"call QY265DecoderCreate failed!\\n\");\n        return -1;\n    }\n    fflush(stdout);\n    FILE *in_file = fopen([moviePath UTF8String], \"rb\");\n    if ( NULL == in_file ) {\n        fprintf(stderr, \" failed! can not open input file '%s'!\\n\",\n                [moviePath UTF8String]);\n        return -1;\n    }\n    fseek(in_file, 0, SEEK_END);\n    au_buf_size = ftell(in_file);\n    fseek(in_file, 0, SEEK_SET);\n    printf(\"(%d bytes) ... \", au_buf_size);\n    if ( au_buf_size > AU_BUF_SIZE_MAX )\n        au_buf_size = AU_BUF_SIZE_MAX;\n    au_buf = (uint8_t*)malloc(au_buf_size);\n    if ( NULL == au_buf ) {\n        perror(\"allocate AU buffer\");\n        fclose(in_file);\n        return -1;\n    }\n    if ( fread(au_buf, 1, au_buf_size, in_file) != au_buf_size ) {\n        perror(\"read intput file failed\");\n        fclose(in_file);\n        return -1;\n    }\n    fclose(in_file);\n    printf(\"done. %d bytes read.\\n\", au_buf_size);\n    \n    // find all AUs\n\tau_count = 0;\n\tfor (int i = 0; i < au_buf_size && au_count < (AU_COUNT_MAX - 1); i+=3 ) {\n\t\ti += lent_hevc_get_frame(au_buf + i, au_buf_size - i, NULL);\n\t\tau_pos[au_count++] = i;\n\t}\n\tau_pos[au_count] = au_buf_size; // include last AU\n    printf(\"found %d AUs\\n\", au_count);\n    \n    int ret;\n    uint8_t *sps;\n    int sps_len = lent_hevc_get_sps(au_buf, au_buf_size, &sps);\n    if ( sps_len > 0 ) {\n        lenthevcdec_ctx one_thread_ctx = lenthevcdec_create(1, INT32_MAX, NULL);\n        lenthevcdec_frame out_frame;\n        memset(&out_frame, 0, sizeof(lenthevcdec_frame));\n        out_frame.size = sizeof(lenthevcdec_frame);\n        ret = lenthevcdec_decode_frame(one_thread_ctx, sps, sps_len, 0, &out_frame);\n        if ( 0 != out_frame.width && 0 != out_frame.height ) {\n            //printf(\"Video dimensions is %dx%d\\n\", out_frame.width, out_frame.height);\n            // initialization that depends on width and heigt\n            //frame.width = out_frame.width;\n            //frame.height = out_frame.height;\n        }\n        lenthevcdec_destroy(one_thread_ctx);\n        frame.linesize_y = next_p2(out_frame.width);\n        frame.linesize_uv = next_p2(out_frame.width/2);\n        frame.yuv_data[0] = malloc( frame.linesize_y * out_frame.height);\n        frame.yuv_data[1] = malloc( frame.linesize_uv * out_frame.height/2);\n        frame.yuv_data[2] = malloc( frame.linesize_uv * out_frame.height/2);\n    }\n    return 0;\n}\n\n- (int) openMovie:(NSString*) path\n{\n    moviePath = path;\n\tif(!fopen([moviePath UTF8String], \"rb\")) {\n\t\tprintf(\"can not open input file '%s'!\\n\", [moviePath UTF8String]);\n        return -1;\n\t}\n    \n    return 0;\n}\n\n- (int) play\n{\n    // prepare decoder\n    float renderFPS = 0;\n    NSString *num = [[NSUserDefaults standardUserDefaults] valueForKey:@\"threadNum\"];\n    int thread_num = [num integerValue];\n    \n    NSString *fps = [[NSUserDefaults standardUserDefaults] valueForKey:@\"renderFPS\"];\n    renderFPS = [fps floatValue];\n    if ([fps isEqualToString:@\"-1 (off)\"]) {\n        _skipRender = YES;\n    }\n    if (renderFPS == 0) {\n        renderInterval = 1;\n    }\n\telse {\n\t\trenderInterval = 1.0 / renderFPS * 1000000; // us\n\t}\n    \n    printf(\"will play with decoding thread number: %d, and FPS: %.2f\", thread_num, renderFPS);\n    \n    /* open output file */\n    out_file = NULL;\n    _out_file_string = NULL;\n    NSString *flag = [[NSUserDefaults standardUserDefaults] valueForKey:@\"outputFlag\"];\n    if ([flag isEqualToString:@\"YES\"]) {\n        _out_file_string = [NSString stringWithFormat:@\"%@.ksc.yuv\", moviePath];\n        if ( NULL != _out_file_string ) {\n            out_file = fopen([_out_file_string UTF8String], \"wb\");\n            if ( NULL == out_file ) {\n                perror(\"open output file\");\n                return -1;\n            }\n        }\n    }\n\n    int ret = [self ksy_hevc_prepare:thread_num];\n    if (ret < 0) {\n        if (au_buf != NULL) {\n            free(au_buf);\n        }\n        if (_ksydec != NULL) {\n            QY265DecoderDestroy(_ksydec);\n            _ksydec = NULL;\n        }\n        return ret;\n    }\n    decodeThread = [[NSThread alloc] initWithTarget:self selector:@selector(ksydecodeVideo) object:nil];\n    [decodeThread start];\n    return 0;\n}\n\n- (int) stop {\n\texit_decode_thread = 1;\n    stopRender = YES;\n    return 0;\n}\n\n- (void) ksydecodeVideo {\n    exit_decode_thread = 0;\n    [self setupRenderer];\n    // decode video\n    int64_t pts, ms_used;\n    clock_t clock_start, clock_end, clock_used;\n    struct timeval tv_start, tv_end;\n    double real_time;\n    int ret;\n    int frame_count = 0;\n    gettimeofday(&tv_start, NULL);\n    clock_start = clock();\n    for (int i = 0; i < au_count; i++ ) {\n        if (exit_decode_thread) {\n            break;\n        }\n        pts = i * 40;\n        unsigned char*  pD = au_buf + au_pos[i];\n        int len = au_pos[i + 1] - au_pos[i];\n        if (len > 0) {\n            QY265DecodeFrame(_ksydec, pD, len, &ret, 0);\n            if ( ret < 0 ) {\n                fprintf(stderr, \"decode_frame failed[%d]\\n\", ret);\n                return ;\n            }\n        }\n        QY265DecoderGetDecodedFrame(_ksydec, &decframe, &ret, 0);\n        if (ret == 0 && decframe.bValid) {\n            frame.width = decframe.frameinfo.nWidth;\n            frame.height = decframe.frameinfo.nHeight;\n            frame.pts = frame_count * renderInterval;\n            if (out_file){\n                ret = write_pic_yv12(decframe.frameinfo.nWidth, decframe.frameinfo.nHeight, (UInt8 **)decframe.pData, decframe.iStride, out_file);\n                if ( ret < 0 ) {\n                    perror(\"write output file\");\n                    return;\n                }\n            }\n            if (frame_count == 0) {\n                gettimeofday(&timeStart, NULL);\n            }\n            frame_count++;\n            [self renderFrame:&frame];\n            QY265DecoderReturnDecodedFrame(_ksydec, &decframe);\n        }\n    }\n    printf(\"========== %d ========\\n\", frame_count);\n    // flush decoder\n    while (1){\n        if (exit_decode_thread) {\n            break;\n        }\n        QY265DecoderGetDecodedFrame(_ksydec, &decframe, &ret, 0);\n        if (ret == 0 && frame_count < au_count - 1) {\n            if (decframe.bValid){\n                frame.pts = frame_count * renderInterval;\n                if (out_file){\n                    ret = write_pic_yv12(decframe.frameinfo.nWidth, decframe.frameinfo.nHeight, (UInt8 **)decframe.pData, decframe.iStride, out_file);\n                    if ( ret < 0 ) {\n                        perror(\"write output file\");\n                        return;\n                    }\n                }\n                frame_count++;\n                [self renderFrame:&frame];\n                QY265DecoderReturnDecodedFrame(_ksydec, &decframe);\n            }\n        }\n        else {\n            if (out_file){\n                ret = write_pic_yv12(decframe.frameinfo.nWidth, decframe.frameinfo.nHeight, (UInt8 **)decframe.pData, decframe.iStride, out_file);\n                if ( ret < 0 ) {\n                    perror(\"write output file\");\n                    return;\n                }\n            }\n            break;\n        }\n    }\n    \n    \n    clock_end = clock();\n    gettimeofday(&tv_end, NULL);\n    clock_used = clock_end - clock_start;\n    ms_used = (int64_t)(clock_used * 1000.0 / CLOCKS_PER_SEC);\n    real_time = (tv_end.tv_sec + (tv_end.tv_usec / 1000000.0)) - (tv_start.tv_sec + (tv_start.tv_usec / 1000000.0));\n   \n    float realFPS = frame_count / real_time;\n    printf(\"%d frame decoded\\n\"\n           \"\\ttime\\tfps\\n\"\n           \"CPU\\t%lldms\\t%.2f\\n\"\n           \"Real\\t%.3fs\\t%.2f.\\n\",\n           frame_count,\n           ms_used, frame_count * 1000.0 / ms_used,\n           real_time, realFPS);\n    self.width = frame.width;\n    self.height = frame.height;\n    self.frameNum = frame_count;\n    self.realFPS = realFPS;\n    self.real_time = real_time;\n\n    self.decodeEnd = 1;\n    free(au_buf);\n    au_buf = NULL;\n    free(frame.yuv_data[0]);\n    free(frame.yuv_data[1]);\n    free(frame.yuv_data[2]);\n    if (_ksydec) {\n        QY265DecoderDestroy(_ksydec);\n        _ksydec = NULL;\n    }\n    if (out_file)\n        fclose(out_file);\n    exit_decode_thread = 0;\n}\n\nstatic int write_pic_yv12(int w, int h, uint8_t* buf[3], short stride[3], FILE *fp)\n{\n    uint8_t *line;\n    int line_len, line_count, i, j, pitch;\n    for ( i = 0; i < 3; i++ ) {\n        line = buf[i];\n        pitch = stride[i];\n        line_len = (0 == i) ? w : (w / 2);\n        line_count = (0 == i) ? h : (h / 2);\n        for ( j = 0; j < line_count; j++ ) {\n            if ( fwrite(line, 1, line_len, fp) != line_len )\n                return -1;\n            line += pitch;\n        }\n    }\n    return 0;\n}\n\n- (int)test:(int) thread_num{\n    return 0;\n}\n\n@end\n"
  },
  {
    "path": "iOS_demo/KSY265CodecDemo_iOS/MovieEncoder.h",
    "content": "//\n//  MoviePlayer.h\n//  HEVDecoder\n//\n//  Created by Shengbin Meng on 13-2-25.\n//  Copyright (c) 2013 Peking University. All rights reserved.\n//\n\n#import <Foundation/Foundation.h>\n\n@interface MovieEncoder : NSObject\n@property NSInteger width;\n@property NSInteger height;\n@property NSInteger frameNum;\n@property float realFPS;\n@property float real_time;\n@property double avg_psnr;\n@property NSString *out_file_string;\n\n- (int) openMovie:(NSString*) path;\n\n- (int) encoder;\n\n@end\n"
  },
  {
    "path": "iOS_demo/KSY265CodecDemo_iOS/MovieEncoder.m",
    "content": "//\n//  MoviePlayer.m\n//  HEVDecoder\n//\n//  Created by Shengbin Meng on 13-2-25.\n//  Copyright (c) 2013 Peking University. All rights reserved.\n//\n\n#import \"MovieEncoder.h\"\n#include <stdint.h>\n#include <stdio.h>\n#include <x264.h>\n#include <sys/time.h>\n\n@implementation MovieEncoder\n\n{\n    NSString *moviePath;\n    FILE *in_file;\n}\n\n- (id) init\n{\n    self = [super init];\n\n    return self;\n}\n\n- (int) openMovie:(NSString*) path\n{\n    moviePath = path;\n    in_file = fopen([moviePath UTF8String], \"rb\");\n\tif(NULL == in_file) {\n\t\tprintf(\"can not open input file '%s'!\\n\", [moviePath UTF8String]);\n        return -1;\n\t}\n    \n    return 0;\n}\n\n- (int) encoder\n{\n    x264_param_t param;\n    x264_picture_t pic;\n    x264_picture_t pic_out;\n    x264_t *h;\n    int i_frame = 0;\n    int i_frame_size;\n    x264_nal_t *nal;\n    int i_nal;\n    clock_t clock_start, clock_end, clock_used;\n    struct timeval tv_start, tv_end;\n    double real_time;\n    int64_t ms_used;\n    FILE *out_file;\n    double sum_psnr_y = 0.0;\n    double sum_psnr_u = 0.0;\n    double sum_psnr_v = 0.0;\n\n    _out_file_string = [NSString stringWithFormat:@\"%@.264\", moviePath];\n    if ( NULL != _out_file_string ) {\n        out_file = fopen([_out_file_string UTF8String], \"wb\");\n        if ( NULL == out_file ) {\n            perror(\"open output file\");\n            fclose(in_file);\n            return -1;\n        }\n    }\n    \n    NSString *resolution = [[NSUserDefaults standardUserDefaults] valueForKey:@\"resolution\"];\n    NSArray *arrayofRes = [resolution componentsSeparatedByString:@\"*\"];\n    NSString *fps = [[NSUserDefaults standardUserDefaults] valueForKey:@\"fps\"];\n    NSString *bitRate = [[NSUserDefaults standardUserDefaults] valueForKey:@\"bitRate\"];\n    NSString *threads = [[NSUserDefaults standardUserDefaults] valueForKey:@\"threads\"];\n    NSString *profile = [[NSUserDefaults standardUserDefaults] valueForKey:@\"profile\"];\n    NSString *delayed = [[NSUserDefaults standardUserDefaults] valueForKey:@\"delayed\"];\n    \n    /* Get default params for preset/tuning */\n    if ([delayed isEqualToString:@\"zerolatency\"]) {\n        if( x264_param_default_preset( &param, [profile UTF8String], \"zerolatency\" ) < 0 )\n            goto fail;\n    }\n    else {\n        if( x264_param_default_preset( &param, [profile UTF8String], NULL ) < 0 )\n            goto fail;\n    }\n    \n    /* Configure non-default params */\n    param.i_csp = X264_CSP_I420;\n    param.i_width  = [arrayofRes[0] intValue];\n    param.i_height = [arrayofRes[1] intValue];\n    param.b_vfr_input = 0;\n    param.b_repeat_headers = 1;\n    param.b_annexb = 1;\n    \n    if([bitRate intValue]){\n        param.rc.i_bitrate = [bitRate intValue];\n        param.rc.i_rc_method = X264_RC_ABR;\n    }\n    \n    if ([delayed isEqualToString:@\"zerolatency\"]) {\n        param.i_bframe = 0;\n    }\n    else if([delayed isEqualToString:@\"livestreaming\"]){\n        param.i_bframe = 3;\n    }\n    else{\n        param.i_bframe = 7;\n    }\n    \n    param.i_threads = [threads intValue];\n    param.i_fps_num = [fps floatValue];\n    param.i_fps_den = 1;\n    \n    param.analyse.b_psnr = 1;\n    \n    /* Apply profile restrictions. */\n    if( x264_param_apply_profile( &param, \"high\" ) < 0 )\n        goto fail;\n    \n    if( x264_picture_alloc( &pic, param.i_csp, param.i_width, param.i_height ) < 0 )\n        goto fail;\n\n    h = x264_encoder_open( &param );\n    if( !h )\n        goto fail;\n    \n    int luma_size = param.i_width * param.i_height;\n    int chroma_size = luma_size / 4;\n    gettimeofday(&tv_start, NULL);\n    clock_start = clock();\n    /* Encode frames */\n    for( ;; i_frame++ )\n    {\n        /* Read input frame */\n        if( fread( pic.img.plane[0], 1, luma_size, in_file ) != luma_size )\n            break;\n        if( fread( pic.img.plane[1], 1, chroma_size, in_file ) != chroma_size )\n            break;\n        if( fread( pic.img.plane[2], 1, chroma_size, in_file ) != chroma_size )\n            break;\n        \n        pic.i_pts = i_frame;\n        i_frame_size = x264_encoder_encode( h, &nal, &i_nal, &pic, &pic_out );\n        if( i_frame_size < 0 )\n            goto fail;\n        else if( i_frame_size )\n        {\n            if (param.analyse.b_psnr){\n                sum_psnr_y += pic_out.prop.f_psnr[0];\n                sum_psnr_u += pic_out.prop.f_psnr[1];\n                sum_psnr_v += pic_out.prop.f_psnr[2];\n            }\n            if( !fwrite( nal->p_payload, i_frame_size, 1, out_file ) )\n                goto fail;\n        }\n    }\n    /* Flush delayed frames */\n    while( x264_encoder_delayed_frames( h ) )\n    {\n        i_frame_size = x264_encoder_encode( h, &nal, &i_nal, NULL, &pic_out );\n        if( i_frame_size < 0 )\n            goto fail;\n        else if( i_frame_size )\n        {\n            if (param.analyse.b_psnr){\n                sum_psnr_y += pic_out.prop.f_psnr[0];\n                sum_psnr_u += pic_out.prop.f_psnr[1];\n                sum_psnr_v += pic_out.prop.f_psnr[2];\n            }\n            if( !fwrite( nal->p_payload, i_frame_size, 1, out_file ) )\n                goto fail;\n        }\n    }\n    clock_end = clock();\n    gettimeofday(&tv_end, NULL);\n    clock_used = clock_end - clock_start;\n    ms_used = (int64_t)(clock_used * 1000.0 / CLOCKS_PER_SEC);\n    real_time = (tv_end.tv_sec + (tv_end.tv_usec / 1000000.0)) - (tv_start.tv_sec + (tv_start.tv_usec / 1000000.0));\n    float realFPS = i_frame / real_time;\n    double avg_psnr = (6*sum_psnr_y+sum_psnr_u+sum_psnr_v)/(8*i_frame);\n    printf(\"%d frame encoded\\n\"\n           \"\\ttime\\tfps\\n\"\n           \"CPU\\t%lldms\\t%.2f\\n\"\n           \"Real\\t%.3fs\\t%.2f.\\n\"\n           \"PSNR\\t%.2f\\n\",\n           i_frame,\n           ms_used, i_frame * 1000.0 / ms_used,\n           real_time, realFPS, avg_psnr);\n    \n    self.width = param.i_width;\n    self.height = param.i_height;\n    self.frameNum = i_frame;\n    self.realFPS = realFPS;\n    self.real_time = real_time;\n    self.avg_psnr = avg_psnr;\n    \n    x264_encoder_close( h );\n    x264_picture_clean( &pic );\n    fclose(in_file);\n    fclose(out_file);\n    return 0;\n    \nfail:\n    fclose(in_file);\n    fclose(out_file);\n    return -1;\n}\n\n@end\n"
  },
  {
    "path": "iOS_demo/KSY265CodecDemo_iOS/MoviePlayer.h",
    "content": "//\n//  MoviePlayer.h\n//  HEVDecoder\n//\n//  Created by Shengbin Meng on 13-2-25.\n//  Copyright (c) 2013 Peking University. All rights reserved.\n//\n\n#import <Foundation/Foundation.h>\n#import \"GLRenderer.h\"\n#import \"GLView.h\"\n\nstruct VideoFrame\n{\n\tint width;\n\tint height;\n\tint linesize_y;\n\tint linesize_uv;\n\tdouble pts;\n\tuint8_t *yuv_data[3];\n};\n\nuint32_t getms();\n\n@interface MoviePlayer : NSObject <RenderStateListener>\n\n@property (nonatomic, retain) GLRenderer *renderer;\n@property NSInteger width;\n@property NSInteger height;\n@property NSInteger frameNum;\n@property float realFPS;\n@property float real_time;\n@property bool decodeEnd;\n@property NSString *out_file_string;\n\n- (int) openMovie:(NSString*) path;\n\n- (int) play;\n\n- (int) stop;\n\n- (int)test:(int) thread_num;\n\n@end\n"
  },
  {
    "path": "iOS_demo/KSY265CodecDemo_iOS/MoviePlayer.m",
    "content": "//\n//  MoviePlayer.m\n//  HEVDecoder\n//\n//  Created by Shengbin Meng on 13-2-25.\n//  Copyright (c) 2013 Peking University. All rights reserved.\n//\n\n#import \"MoviePlayer.h\"\n#import \"GLRenderer.h\"\n#include \"lenthevcdec.h\"\n#include <sys/sysctl.h>\n#include <sys/time.h>\n\n#define AU_COUNT_MAX (1024 * 256)\n#define AU_BUF_SIZE_MAX (1024 * 1024 * 128)\n\nstatic unsigned int count_cores()\n{\n    size_t len;\n    unsigned int ncpu = 0;\n    \n    len = sizeof(ncpu);\n    sysctlbyname (\"hw.ncpu\", &ncpu, &len, NULL, 0);\n    return ncpu;\n}\n\nuint32_t getms()\n{\n\tstruct timeval t;\n\tgettimeofday(&t, NULL);\n\treturn (t.tv_sec * 1000) + (t.tv_usec / 1000);\n}\n\n\n@implementation MoviePlayer\n\n{\n    NSString *moviePath;\n    NSThread *decodeThread;\n    BOOL isBusy, stopRender;\n    BOOL _bSkipRender;\n    int exit_decode_thread;\n    uint32_t au_pos[AU_COUNT_MAX];\n    uint32_t au_count, au_buf_size;\n    uint8_t *au_buf;\n    lenthevcdec_ctx ctx;\n    struct VideoFrame frame;\n    int frames;\n    int frames_sum;\n    double tstart, tlast;\n    uint64_t renderInterval;\n    struct timeval timeStart;\n    FILE *out_file;\n}\n\n@synthesize renderer;\n\n- (id) init\n{\n    self = [super init];\n    \n    exit_decode_thread = 0;\n    ctx = NULL;\n    frames_sum = 0;\n    tstart = 0;\n    frames = 0;\n    tlast = 0;\n    renderInterval = 0;\n\n    isBusy = NO;\n    stopRender = NO;\n    _bSkipRender = NO;\n    self.decodeEnd = 0;\n\n    return self;\n}\n\n- (void) setupRenderer\n{\n    [self.renderer setRenderStateListener:self];\n}\n\n- (void) bufferDone {\n    isBusy = NO;\n}\n\n- (void) renderFrame:(struct VideoFrame *) vf\n{\n    vf = &frame;\n    if (_bSkipRender) {\n        return;\n    }\n    \n\tstruct timeval timeNow;\n\tgettimeofday(&timeNow, NULL);\n\tint64_t timePassed = ((int64_t)(timeNow.tv_sec - timeStart.tv_sec))*1000000 + (timeNow.tv_usec - timeStart.tv_usec);\n\tint64_t delay = vf->pts - timePassed;\n\tif (delay > 0) {\n\t\tusleep(delay);\n\t}\n    \n\tgettimeofday(&timeNow, NULL);\n\tdouble tnow = timeNow.tv_sec + (timeNow.tv_usec / 1000000.0);\n\tif (tlast == 0) tlast = tnow;\n\tif (tstart == 0) tstart = tnow;\n\tif (tnow > tlast + 1) {\n\t\tdouble avg_fps;\n        \n\t\tprintf(\"Video Display FPS:%i\\n\", (int)frames);\n\t\tframes_sum += frames;\n\t\tavg_fps = frames_sum / (tnow - tstart);\n\t\tprintf(\"Video AVG FPS:%.2lf\\n\", avg_fps);\n        \n        //self.infoString = [NSString stringWithFormat:@\"size:%dx%d, fps:%d\", vf->width, vf->height, frames];\n        \n\t\ttlast = tlast + 1;\n\t\tframes = 0;\n\t}\n\tframes++;\n    \n    \n    while(isBusy && !stopRender) usleep(50);\n    isBusy = YES;\n    [renderer render:vf];\n}\n\nstatic int lent_hevc_get_sps(uint8_t* buf, int size, uint8_t** sps_ptr)\n{\n    int i, nal_type, sps_pos;\n    sps_pos = -1;\n    for ( i = 0; i < (size - 4); i++ ) {\n        if ( 0 == buf[i] && 0 == buf[i+1] && 1 == buf[i+2] ) {\n            nal_type = (buf[i+3] & 0x7E) >> 1;\n            if ( 33 != nal_type && sps_pos >= 0 ) {\n                break;\n            }\n            if ( 33 == nal_type ) { // sps\n                sps_pos = i;\n            }\n            i += 2;\n        }\n    }\n    if ( sps_pos < 0 )\n        return 0;\n    if ( i == (size - 4) )\n        i = size;\n    *sps_ptr = buf + sps_pos;\n    return i - sps_pos;\n}\n\nstatic int lent_hevc_get_frame(uint8_t* buf, int size, int *is_idr)\n{\n\tstatic int seq_hdr = 0;\n\tint i, nal_type, idr = 0;\n\tfor ( i = 0; i < (size - 6); i++ ) {\n\t\tif ( 0 == buf[i] && 0 == buf[i+1] && 1 == buf[i+2] ) {\n\t\t\tnal_type = (buf[i+3] & 0x7E) >> 1;\n\t\t\tif ( nal_type <= 21 ) {\n\t\t\t\tif ( buf[i+5] & 0x80 ) { /* first slice in pic */\n\t\t\t\t\tif ( !seq_hdr )\n\t\t\t\t\t\tbreak;\n\t\t\t\t\telse\n\t\t\t\t\t\tseq_hdr = 0;\n\t\t\t\t}\n\t\t\t}\n\t\t\tif ( nal_type >= 32 && nal_type <= 34 ) {\n\t\t\t\tif ( !seq_hdr ) {\n\t\t\t\t\tseq_hdr = 1;\n\t\t\t\t\tidr = 1;\n\t\t\t\t\tbreak;\n\t\t\t\t}\n\t\t\t\tseq_hdr = 1;\n\t\t\t}\n\t\t\ti += 2;\n\t\t}\n\t}\n\tif ( i == (size - 6) )\n\t\ti = size;\n\tif ( NULL != is_idr )\n\t\t*is_idr = idr;\n\treturn i;\n}\n\n- (int) lent_hevc_prepare:(int) thread_num\n{\n    // open hevc decoder\n    int compatibility = INT32_MAX;\n    if ([[moviePath pathExtension] isEqualToString:@\"hm91\"]) {\n        compatibility = 91;\n    } else if ([[moviePath pathExtension] isEqualToString:@\"hm10\"]) {\n        compatibility = 100;\n    }\n    if (thread_num == 0) {\n        thread_num = count_cores();\n    }\n    ctx = lenthevcdec_create(thread_num, compatibility, NULL);\n    if ( NULL == ctx ) {\n        fprintf(stderr, \"call lenthevcdec_create failed!\\n\");\n        return -1;\n    }\n    printf(\"raw bitstream, compatibility: %s\\n\",\n           (91 == compatibility) ? \"HM9.1\" : ((100 == compatibility) ? \"HM10.0\" : \"Unknown(Last)\"));\n    \n    // read intput file\n    printf(\"read input file \");\n    fflush(stdout);\n    FILE *in_file = fopen([moviePath UTF8String], \"rb\");\n    if ( NULL == in_file ) {\n        fprintf(stderr, \" failed! can not open input file '%s'!\\n\",\n                [moviePath UTF8String]);\n        return -1;\n    }\n    \n    fseek(in_file, 0, SEEK_END);\n    au_buf_size = ftell(in_file);\n    fseek(in_file, 0, SEEK_SET);\n    printf(\"(%d bytes) ... \", au_buf_size);\n    if ( au_buf_size > AU_BUF_SIZE_MAX )\n        au_buf_size = AU_BUF_SIZE_MAX;\n    au_buf = (uint8_t*)malloc(au_buf_size);\n    if ( NULL == au_buf ) {\n        perror(\"allocate AU buffer\");\n        fclose(in_file);\n        return -1;\n    }\n    if ( fread(au_buf, 1, au_buf_size, in_file) != au_buf_size ) {\n        perror(\"read intput file failed\");\n        fclose(in_file);\n        return -1;\n    }\n    fclose(in_file);\n    printf(\"done. %d bytes read.\\n\", au_buf_size);\n    \n    // find all AUs\n\tau_count = 0;\n\tfor (int i = 0; i < au_buf_size && au_count < (AU_COUNT_MAX - 1); i+=3 ) {\n\t\ti += lent_hevc_get_frame(au_buf + i, au_buf_size - i, NULL);\n\t\tau_pos[au_count++] = i;\n\t}\n\tau_pos[au_count] = au_buf_size; // include last AU\n    printf(\"found %d AUs\\n\", au_count);\n    \n    int ret;\n    uint8_t *sps;\n    int sps_len = lent_hevc_get_sps(au_buf, au_buf_size, &sps);\n    if ( sps_len > 0 ) {\n        lenthevcdec_ctx one_thread_ctx = lenthevcdec_create(1, compatibility, NULL);\n        lenthevcdec_frame out_frame;\n        memset(&out_frame, 0, sizeof(lenthevcdec_frame));\n        out_frame.size = sizeof(lenthevcdec_frame);\n        ret = lenthevcdec_decode_frame(one_thread_ctx, sps, sps_len, 0, &out_frame);\n        if ( 0 != out_frame.width && 0 != out_frame.height ) {\n            //printf(\"Video dimensions is %dx%d\\n\", out_frame.width, out_frame.height);\n            // initialization that depends on width and heigt\n        }\n        lenthevcdec_destroy(one_thread_ctx);\n        \n    }\n    \n    return 0;\n}\n\n- (int) openMovie:(NSString*) path\n{\n    moviePath = path;\n\tif(!fopen([moviePath UTF8String], \"rb\")) {\n\t\tprintf(\"can not open input file '%s'!\\n\", [moviePath UTF8String]);\n        return -1;\n\t}\n    \n    return 0;\n}\n\n- (int) play\n{\n    // prepare decoder\n    float renderFPS = 0;\n    NSString *num = [[NSUserDefaults standardUserDefaults] valueForKey:@\"threadNum\"];\n    int thread_num = [num integerValue];\n    \n    NSString *fps = [[NSUserDefaults standardUserDefaults] valueForKey:@\"renderFPS\"];\n    renderFPS = [fps floatValue];\n    if ([fps isEqualToString:@\"-1 (off)\"]) {\n        _bSkipRender = YES;\n    }\n\tif (renderFPS == 0) renderInterval = 1;\n\telse {\n\t\trenderInterval = 1.0 / renderFPS * 1000000; // us\n\t}\n    printf(\"will play with decoding thread number: %d, FPS: %.2f\", thread_num, renderFPS);\n    \n    /* open output file */\n    out_file = NULL;\n    _out_file_string = NULL;\n    NSString *flag = [[NSUserDefaults standardUserDefaults] valueForKey:@\"outputFlag\"];\n    if ([flag isEqualToString:@\"YES\"]) {\n        _out_file_string = [NSString stringWithFormat:@\"%@.lent.yuv\", moviePath];\n        if ( NULL != _out_file_string ) {\n            out_file = fopen([_out_file_string UTF8String], \"wb\");\n            if ( NULL == out_file ) {\n                perror(\"open output file\");\n                return -1;\n            }\n        }\n    }\n    \n    int ret = [self lent_hevc_prepare:thread_num];\n    if (ret < 0) {\n        if (au_buf != NULL) free(au_buf);\n        if (ctx != NULL) lenthevcdec_destroy(ctx);\n            return ret;\n    }\n    \n    decodeThread = [[NSThread alloc] initWithTarget:self selector:@selector(decodeVideo) object:nil];\n    [decodeThread start];\n    \n    return 0;\n}\n\n- (int) stop\n{\n\texit_decode_thread = 1;\n    stopRender = YES;\n    return 0;\n}\n\n- (void) decodeVideo\n{\n    exit_decode_thread = 0;\n    \n    [self setupRenderer];\n    \n    // decode video\n    int64_t pts, ms_used;\n    clock_t clock_start, clock_end, clock_used;\n    struct timeval tv_start, tv_end;\n    double real_time;\n    int ret;\n    int frame_count = 0;\n    lenthevcdec_frame out_frame;\n    \n    gettimeofday(&tv_start, NULL);\n    clock_start = clock();\n    for (int i = 0; i < au_count; i++ ) {\n        if (exit_decode_thread) {\n            break;\n        }\n        pts = i * 40;\n        out_frame.got_frame = 0;\n        ret = lenthevcdec_decode_frame(ctx, au_buf + au_pos[i], au_pos[i + 1] - au_pos[i], pts, &out_frame);\n        if ( ret < 0 ) {\n            fprintf(stderr, \"lenthevcdec_decode_frame failed! ret=%d\\n\", ret);\n            return ;\n        }\n        if ( out_frame.got_frame > 0 ) {\n            \n            // draw frame to screen\n            frame.yuv_data[0] = out_frame.pixels[0];\n            frame.yuv_data[1] = out_frame.pixels[1];\n            frame.yuv_data[2] = out_frame.pixels[2];\n            frame.linesize_y = out_frame.line_stride[0];\n            frame.linesize_uv = out_frame.line_stride[1];\n            frame.pts = frame_count * renderInterval;\n            frame.width = out_frame.width;\n            frame.height = out_frame.height;\n            \n//            printf(\"decode frame %d, %dx%d, pts is %\" PRId64 \"\\n\",\n//                   frame_count, width, height, got_pts);\n//\n            if (out_file){\n                ret = write_pic_yv12(out_frame.width, out_frame.height, (UInt8 **)out_frame.pixels, out_frame.line_stride, out_file);\n                if ( ret < 0 ) {\n                    perror(\"write output file\");\n                    return;\n                }\n            }\n            \n            \n            if (frame_count == 0) {\n                gettimeofday(&timeStart, NULL);\n            }\n            frame_count++;\n            \n            [self renderFrame:&frame];\n            \n        }\n    }\n    \n    // flush decoder\n    while (1) {\n        if (exit_decode_thread) {\n            break;\n        }\n        out_frame.got_frame = 0;\n        ret = lenthevcdec_decode_frame(ctx, NULL, 0, pts, &out_frame);\n        if ( ret == 0 && out_frame.got_frame > 0 ) {\n            // draw frame to screen\n            frame.yuv_data[0] = out_frame.pixels[0];\n            frame.yuv_data[1] = out_frame.pixels[1];\n            frame.yuv_data[2] = out_frame.pixels[2];\n            frame.linesize_y = out_frame.line_stride[0];\n            frame.linesize_uv = out_frame.line_stride[1];\n            frame.pts = frame_count * renderInterval;\n            \n            printf(\"decode frame %d, %dx%d, pts is %\" PRId64 \"\\n\",\n                   frame_count, out_frame.width, out_frame.height, out_frame.got_pts);\n            \n            if (out_file){\n                ret = write_pic_yv12(out_frame.width, out_frame.height, (UInt8 **)out_frame.pixels, out_frame.line_stride, out_file);\n                if ( ret < 0 ) {\n                    perror(\"write output file\");\n                    return;\n                }\n            }\n            \n            if (frame_count == 0) {\n                gettimeofday(&timeStart, NULL);\n            }\n            frame_count++;\n            \n            [self renderFrame:&frame];\n        }\n        else{\n            break;\n        }\n    }\n    \n    clock_end = clock();\n    gettimeofday(&tv_end, NULL);\n    clock_used = clock_end - clock_start;\n    ms_used = (int64_t)(clock_used * 1000.0 / CLOCKS_PER_SEC);\n    real_time = (tv_end.tv_sec + (tv_end.tv_usec / 1000000.0)) - (tv_start.tv_sec + (tv_start.tv_usec / 1000000.0));\n    float realFPS = frame_count / real_time;\n    printf(\"%d frame decoded\\n\"\n           \"\\ttime\\tfps\\n\"\n           \"CPU\\t%lldms\\t%.2f\\n\"\n           \"Real\\t%.3fs\\t%.2f.\\n\",\n           frame_count,\n           ms_used, frame_count * 1000.0 / ms_used,\n           real_time, realFPS);\n    self.width = frame.width;\n    self.height = frame.height;\n    self.frameNum = frame_count;\n    self.realFPS = realFPS;\n    self.real_time = real_time;\n    self.decodeEnd = 1;\n    free(au_buf);\n    au_buf = NULL;\n    lenthevcdec_destroy(ctx);\n    if (out_file)\n        fclose(out_file);\n\n    exit_decode_thread = 0;\n\n}\n\nstatic int write_pic_yv12(int w, int h, uint8_t* buf[3], int stride[3], FILE *fp)\n{\n    uint8_t *line;\n    int line_len, line_count, i, j, pitch;\n    for ( i = 0; i < 3; i++ ) {\n        line = buf[i];\n        pitch = stride[i];\n        line_len = (0 == i) ? w : (w / 2);\n        line_count = (0 == i) ? h : (h / 2);\n        for ( j = 0; j < line_count; j++ ) {\n            if ( fwrite(line, 1, line_len, fp) != line_len )\n                return -1;\n            line += pitch;\n        }\n    }\n    return 0;\n}\n\n- (int)test:(int) thread_num\n{\n    int64_t pts;\n    int ret;\n    int frame_count = 0;\n    lenthevcdec_frame out_frame;\n    \n    printf(\"%s\\n threads:%d\\n\", [moviePath UTF8String],thread_num);\n    \n    int compatibility = INT32_MAX;\n    if ([[moviePath pathExtension] isEqualToString:@\"hm91\"]) {\n        compatibility = 91;\n    } else if ([[moviePath pathExtension] isEqualToString:@\"hm10\"]) {\n        compatibility = 100;\n    }\n    \n    ctx = lenthevcdec_create(thread_num, compatibility, NULL);\n    if ( NULL == ctx ) {\n        fprintf(stderr, \"call lenthevcdec_create failed!\\n\");\n        return -1;\n    }\n    \n    printf(\"raw bitstream, compatibility: %s\\n\",\n           (91 == compatibility) ? \"HM9.1\" : ((100 == compatibility) ? \"HM10.0\" : \"Unknown(Last)\"));\n    \n    // read intput file\n    printf(\"read input file \");\n    fflush(stdout);\n    FILE *in_file = fopen([moviePath UTF8String], \"rb\");\n    if ( NULL == in_file ) {\n        fprintf(stderr, \" failed! can not open input file '%s'!\\n\",\n                [moviePath UTF8String]);\n        return -1;\n    }\n    \n    fseek(in_file, 0, SEEK_END);\n    au_buf_size = ftell(in_file);\n    fseek(in_file, 0, SEEK_SET);\n    printf(\"(%d bytes) ... \", au_buf_size);\n    if ( au_buf_size > AU_BUF_SIZE_MAX )\n        au_buf_size = AU_BUF_SIZE_MAX;\n    au_buf = (uint8_t*)malloc(au_buf_size);\n    if ( NULL == au_buf ) {\n        perror(\"allocate AU buffer\");\n        fclose(in_file);\n        return -1;\n    }\n    if ( fread(au_buf, 1, au_buf_size, in_file) != au_buf_size ) {\n        perror(\"read intput file failed\");\n        fclose(in_file);\n        return -1;\n    }\n    fclose(in_file);\n    printf(\"done. %d bytes read.\\n\", au_buf_size);\n    \n    // find all AUs\n    au_count = 0;\n    for (int i = 0; i < au_buf_size && au_count < (AU_COUNT_MAX - 1); i+=3 ) {\n        i += lent_hevc_get_frame(au_buf + i, au_buf_size - i, NULL);\n        if ( i < au_buf_size )\n            au_pos[au_count++] = i;\n    }\n    au_pos[au_count] = au_buf_size; // include last AU\n    printf(\"found %d AUs\\n\", au_count);\n    \n    /* open output file */\n    FILE *out_file = NULL;\n    NSString *out_file_string = [NSString stringWithFormat:@\"%@.%d.yuv\", moviePath, thread_num];\n    if ( NULL != out_file_string ) {\n        out_file = fopen([out_file_string UTF8String], \"wb\");\n        if ( NULL == out_file ) {\n            perror(\"open output file\");\n            return 6;\n        }\n    }\n    \n    \n    for (int i = 0; i < au_count; i++ ) {\n        pts = i * 40;\n        out_frame.got_frame = 0;\n        ret = lenthevcdec_decode_frame(ctx, au_buf + au_pos[i], au_pos[i + 1] - au_pos[i], pts, &out_frame);\n        if ( ret < 0 ) {\n            fprintf(stderr, \"lenthevcdec_decode_frame failed! ret=%d\\n\", ret);\n            return -1;\n        }\n        if ( out_frame.got_frame > 0 ) {\n            printf(\"decode frame %d, %dx%d, pts is %\" PRId64 \"\\n\",\n                   frame_count, out_frame.width, out_frame.height, out_frame.got_pts);\n            ret = write_pic_yv12(out_frame.width, out_frame.height, (UInt8 **)out_frame.pixels, out_frame.line_stride, out_file);\n            if ( ret < 0 ) {\n                perror(\"write output file\");\n                return 10;\n            }\n            \n            frame_count++;\n        }\n    }\n    \n    // flush decoder\n    while (1) {\n        out_frame.got_frame = 0;\n        ret = lenthevcdec_decode_frame(ctx, NULL, 0, pts, &out_frame);\n        if ( ret < 0 || out_frame.got_frame <= 0) {\n            break;\n        }\n        if ( out_frame.got_frame > 0 ) {\n            printf(\"decode frame %d, %dx%d, pts is %\" PRId64 \"\\n\",\n                   frame_count, out_frame.width, out_frame.height, out_frame.got_pts);\n            ret = write_pic_yv12(out_frame.width, out_frame.height, (UInt8 **)out_frame.pixels, out_frame.line_stride, out_file);\n            if ( ret < 0 ) {\n                perror(\"write output file\");\n                return 10;\n            }\n            \n            frame_count++;\n            \n        }\n    }\n    \n    printf(\"%d frame decoded\\n\",\n           frame_count);\n\n    \n    fclose(out_file);\n    free(au_buf);\n    au_buf = NULL;\n    lenthevcdec_destroy(ctx);\n    \n    return 0;\n}\n\n@end\n"
  },
  {
    "path": "iOS_demo/KSY265CodecDemo_iOS/MoviesViewController.h",
    "content": "//\n//  MoviesViewController.h\n//  HEVDecoder\n//\n//  Created by Shengbin Meng on 13-2-25.\n//  Copyright (c) 2013 Peking University. All rights reserved.\n//\n\n#import <UIKit/UIKit.h>\n\n@interface MoviesViewController : UITableViewController\n\n-(id)initWithSuffix:(NSString *)suffix;\n\n@property (nonatomic, retain) NSMutableArray *movieList;\n\n@property (nonatomic, copy)void (^tableBlock)(NSString *fileName);\n\n@end\n"
  },
  {
    "path": "iOS_demo/KSY265CodecDemo_iOS/MoviesViewController.m",
    "content": "//\n//  MoviesViewController.m\n//  HEVDecoder\n//\n//  Created by Shengbin Meng on 13-2-25.\n//  Copyright (c) 2013 Peking University. All rights reserved.\n//\n\n#import \"MoviesViewController.h\"\n//#import \"PlayViewController.h\"\n//#import \"SettingsViewController.h\"\n//#import \"TestDecoderViewController.h\"\n\n@interface MoviesViewController (){\n    NSString * _suffix;\n}\n\n@end\n\n@implementation MoviesViewController\n\n- (id)initWithStyle:(UITableViewStyle)style\n{\n    self = [super initWithStyle:style];\n    if (self) {\n        // Custom initialization\n        self.title = @\"Movies\";\n    }\n    _tableBlock = nil;\n    return self;\n}\n\n-(id)initWithSuffix:(NSString *)suffix\n{\n    self = [super init];\n    _suffix = suffix;\n    return self;\n}\n\n-(id)init{\n    self = [super init];\n    _suffix = @\"\";\n    return self;\n}\n\n- (void)viewDidLoad\n{\n    [super viewDidLoad];\n    \n    if ([self.tableView respondsToSelector:@selector(registerClass:forCellReuseIdentifier:)]) {\n        // this is iOS 6.0 above\n        [self.tableView registerClass:[UITableViewCell class] forCellReuseIdentifier:@\"Cell\"];\n    }\n    \n    self.navigationItem.leftBarButtonItem = [[UIBarButtonItem alloc] initWithBarButtonSystemItem:UIBarButtonSystemItemCancel target:self action:@selector(backAction)];\n}\n\n- (void)backAction{\n    [self dismissViewControllerAnimated:FALSE completion:nil];\n}\n\n- (void) viewWillAppear:(BOOL)animated\n{\n    self.movieList = [[NSMutableArray alloc] init];\n    \n    NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);\n    NSString *documentsDirectory = [paths objectAtIndex:0];\n    NSFileManager *manager = [NSFileManager defaultManager];\n    NSArray *fileList = [manager contentsOfDirectoryAtPath:documentsDirectory error:nil];\n    for (NSString *filename in fileList){\n        if([filename hasSuffix:_suffix]){\n            NSMutableDictionary *movie = [[NSMutableDictionary alloc] init];\n            [movie setObject:filename forKey:@\"Filename\"];\n            [movie setObject:[documentsDirectory stringByAppendingString:[@\"/\" stringByAppendingString:filename]] forKey:@\"Path\"];\n            [self.movieList addObject:movie];\n        }\n    }\n    \n    [self.tableView reloadData];\n}\n\n- (void)didReceiveMemoryWarning\n{\n    [super didReceiveMemoryWarning];\n    // Dispose of any resources that can be recreated.\n}\n\n#pragma mark - Table view data source\n\n- (NSInteger)numberOfSectionsInTableView:(UITableView *)tableView\n{\n    // Return the number of sections.\n    return 1;\n}\n\n- (NSInteger)tableView:(UITableView *)tableView numberOfRowsInSection:(NSInteger)section\n{\n    // Return the number of rows in the section.\n    return self.movieList.count;\n}\n\n- (UITableViewCell *)tableView:(UITableView *)tableView cellForRowAtIndexPath:(NSIndexPath *)indexPath\n{\n    static NSString *CellIdentifier = @\"Cell\";\n    UITableViewCell *cell;\n    if ([tableView respondsToSelector:@selector(dequeueReusableCellWithIdentifier:forIndexPath:)]) {\n        cell = [tableView dequeueReusableCellWithIdentifier:CellIdentifier forIndexPath:indexPath];\n    } else {\n        cell = [tableView dequeueReusableCellWithIdentifier:CellIdentifier];\n        if (cell == nil) {\n            cell = [[UITableViewCell alloc] initWithStyle:UITableViewCellStyleDefault reuseIdentifier:CellIdentifier];\n        }\n\n    }\n\n    \n    // Configure the cell...\n    NSUInteger row = [indexPath row];\n    NSDictionary *movie = [self.movieList objectAtIndex:row];\n    cell.textLabel.text = [movie objectForKey:@\"Filename\"];\n    \n    return cell;\n}\n\n// Override to support editing the table view.\n- (void)tableView:(UITableView *)tableView commitEditingStyle:(UITableViewCellEditingStyle)editingStyle forRowAtIndexPath:(NSIndexPath *)indexPath\n{\n    if (editingStyle == UITableViewCellEditingStyleDelete) {\n        // Delete the row from the data source\n        int index = [indexPath row];\n        NSFileManager *manager = [NSFileManager defaultManager];\n        [manager removeItemAtPath:[[self.movieList objectAtIndex:index] valueForKey:@\"Path\"] error:nil];\n        [self.movieList removeObjectAtIndex:index];\n        [tableView deleteRowsAtIndexPaths:@[indexPath] withRowAnimation:UITableViewRowAnimationFade];\n    }   \n    else if (editingStyle == UITableViewCellEditingStyleInsert) {\n        // Create a new instance of the appropriate class, insert it into the array, and add a new row to the table view\n    }   \n}\n\n#pragma mark - Table view delegate\n\n- (void)tableView:(UITableView *)  tableView didSelectRowAtIndexPath:(NSIndexPath *)indexPath\n{\n    NSInteger row = [indexPath row];\n    NSDictionary *movie = [self.movieList objectAtIndex:row];\n    [[NSUserDefaults standardUserDefaults] setValue:[movie objectForKey:@\"Path\"] forKey:@\"videoPath\"];\n    NSString * path = [movie objectForKey:@\"Filename\"];\n    [tableView deselectRowAtIndexPath:indexPath animated:NO];\n    [self dismissViewControllerAnimated:FALSE completion:nil];\n    if (_tableBlock){\n        _tableBlock(path);\n    }\n}\n\n@end\n"
  },
  {
    "path": "iOS_demo/KSY265CodecDemo_iOS/PlayViewController.h",
    "content": "//\n//  PlayViewController.h\n//  HEVDecoder\n//\n//  Created by Shengbin Meng on 13-2-25.\n//  Copyright (c) 2013 Peking University. All rights reserved.\n//\n\n#import <UIKit/UIKit.h>\n#import \"MoviePlayer.h\"\n#import \"KSYMoviePlayer.h\"\n\n@interface PlayViewController : UIViewController\n\n@property (nonatomic, retain) IBOutlet UILabel *infoLabel;\n@property (nonatomic, retain) IBOutlet UIButton *doneButton;\n@property (nonatomic, retain) KSYMoviePlayer *player;\n- (IBAction)doneButtonPressed:(id)sender;\n\n@end\n"
  },
  {
    "path": "iOS_demo/KSY265CodecDemo_iOS/PlayViewController.m",
    "content": "//\n//  PlayViewController.m\n//  HEVDecoder\n//\n//  Created by Shengbin Meng on 13-2-25.\n//  Copyright (c) 2013 Peking University. All rights reserved.\n//\n\n#import \"PlayViewController.h\"\n#import \"GLView.h\"\n\n@implementation PlayViewController\n\n{\n    bool isPlaying;\n}\n\n- (id)initWithNibName:(NSString *)nibNameOrNil bundle:(NSBundle *)nibBundleOrNil\n{\n    self = [super initWithNibName:nibNameOrNil bundle:nibBundleOrNil];\n    if (self) {\n        // Custom initialization\n    }\n    return self;\n}\n\n- (void) monitorPlaybackTime\n{\n    if (!isPlaying) {\n        return;\n    }\n\n    [self.infoLabel setText:self.player.infoString];\n    [self performSelector:@selector(monitorPlaybackTime) withObject:nil afterDelay:1.0];\n}\n\n- (void)viewDidLoad\n{\n    [super viewDidLoad];\n    \n    NSString *decoder = [[NSUserDefaults standardUserDefaults] valueForKey:@\"codec\"];\n    // Do any additional setup after loading the view from its nib.\n    if (self.player == nil) {\n        if ([decoder isEqualToString:@\"lenthevcdec\"]) {\n            self.player = [[MoviePlayer alloc] init];\n            self.player.infoString = @\"lenthevc decoding\";\n        }\n        else {\n            self.player = [[KSYMoviePlayer alloc] init];\n            self.player.infoString = @\"ksc265 decoding\";\n        }\n    }\n    \n    NSString * path = [[NSUserDefaults standardUserDefaults] valueForKey:@\"videoPath\"];\n    int ret = [self.player openMovie:path];\n    if(ret != 0) {\n        UIAlertView * alert = [[UIAlertView alloc] initWithTitle:@\"Message\" message:@\"Get movie data failed! Please check your source or try again.\" delegate:nil cancelButtonTitle:@\"OK\" otherButtonTitles:nil];\n        [alert show];\n        return ;\n    } else {\n        self.player.renderer = ((GLView*)self.view).renderer;\n        [self.player setOutputViews:nil:self.infoLabel];\n\n        int ret = [self.player play];\n        if(ret != 0) {\n            UIAlertView * alert = [[UIAlertView alloc] initWithTitle:@\"Message\" message:@\"Can't play this movie! Please check its format.\" delegate:nil cancelButtonTitle:@\"OK\" otherButtonTitles:nil];\n            [alert show];\n            return ;\n        }\n        isPlaying = YES;\n        [self monitorPlaybackTime];\n    }\n}\n\n- (void) viewWillAppear:(BOOL)animated\n{\n    [super viewWillAppear:animated];\n    [[self.navigationController navigationBar] setHidden:YES];\n}\n\n- (void) viewWillDisappear:(BOOL)animated\n{\n    [super viewWillDisappear:animated];\n    [[self.navigationController navigationBar] setHidden:NO];\n}\n\n- (void)didReceiveMemoryWarning\n{\n    [super didReceiveMemoryWarning];\n    // Dispose of any resources that can be recreated.\n}\n\n- (NSUInteger)supportedInterfaceOrientations\n{\n    return UIInterfaceOrientationMaskLandscapeRight;\n}\n\n-(BOOL)shouldAutorotate\n{\n    return YES;\n}\n\n- (UIInterfaceOrientation)preferredInterfaceOrientationForPresentation\n{\n    return UIInterfaceOrientationLandscapeRight;\n}\n\n\n- (IBAction)doneButtonPressed:(id)sender\n{\n    isPlaying = NO;\n    [self.player stop];\n    [[UIApplication sharedApplication] setIdleTimerDisabled:NO];\n    [self.navigationController popViewControllerAnimated:YES];\n}\n\n@end\n"
  },
  {
    "path": "iOS_demo/KSY265CodecDemo_iOS/PlayViewController.xib",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"no\"?>\n<document type=\"com.apple.InterfaceBuilder3.CocoaTouch.XIB\" version=\"3.0\" toolsVersion=\"4514\" systemVersion=\"13A603\" targetRuntime=\"iOS.CocoaTouch\" propertyAccessControl=\"none\">\n    <dependencies>\n        <deployment defaultVersion=\"1536\" identifier=\"iOS\"/>\n        <plugIn identifier=\"com.apple.InterfaceBuilder.IBCocoaTouchPlugin\" version=\"3747\"/>\n    </dependencies>\n    <objects>\n        <placeholder placeholderIdentifier=\"IBFilesOwner\" id=\"-1\" userLabel=\"File's Owner\" customClass=\"PlayViewController\">\n            <connections>\n                <outlet property=\"doneButton\" destination=\"21\" id=\"31\"/>\n                <outlet property=\"infoLabel\" destination=\"5\" id=\"29\"/>\n                <outlet property=\"view\" destination=\"1\" id=\"3\"/>\n            </connections>\n        </placeholder>\n        <placeholder placeholderIdentifier=\"IBFirstResponder\" id=\"-2\" customClass=\"UIResponder\"/>\n        <view contentMode=\"scaleToFill\" id=\"1\" customClass=\"GLView\">\n            <rect key=\"frame\" x=\"0.0\" y=\"0.0\" width=\"480\" height=\"320\"/>\n            <autoresizingMask key=\"autoresizingMask\" widthSizable=\"YES\" flexibleMinY=\"YES\" heightSizable=\"YES\" flexibleMaxY=\"YES\"/>\n            <subviews>\n                <label opaque=\"NO\" clipsSubviews=\"YES\" userInteractionEnabled=\"NO\" contentMode=\"left\" textAlignment=\"center\" lineBreakMode=\"tailTruncation\" numberOfLines=\"2\" minimumFontSize=\"10\" id=\"5\">\n                    <rect key=\"frame\" x=\"71\" y=\"6\" width=\"339\" height=\"50\"/>\n                    <autoresizingMask key=\"autoresizingMask\" flexibleMinX=\"YES\" widthSizable=\"YES\" flexibleMaxX=\"YES\" flexibleMaxY=\"YES\"/>\n                    <string key=\"text\">size: 0x0 display fps: 0  decode fps: 0\n</string>\n                    <fontDescription key=\"fontDescription\" type=\"system\" pointSize=\"17\"/>\n                    <color key=\"textColor\" red=\"0.26317909569999998\" green=\"0.27905806509999997\" blue=\"1\" alpha=\"1\" colorSpace=\"calibratedRGB\"/>\n                    <nil key=\"highlightedColor\"/>\n                </label>\n                <button opaque=\"NO\" alpha=\"0.80000001192092896\" contentMode=\"scaleToFill\" contentHorizontalAlignment=\"center\" contentVerticalAlignment=\"center\" buttonType=\"roundedRect\" lineBreakMode=\"middleTruncation\" id=\"21\">\n                    <rect key=\"frame\" x=\"192\" y=\"277\" width=\"97\" height=\"30\"/>\n                    <autoresizingMask key=\"autoresizingMask\" flexibleMinX=\"YES\" widthSizable=\"YES\" flexibleMaxX=\"YES\" flexibleMinY=\"YES\"/>\n                    <fontDescription key=\"fontDescription\" type=\"boldSystem\" pointSize=\"12\"/>\n                    <state key=\"normal\" title=\"Done\">\n                        <color key=\"titleColor\" red=\"0.0\" green=\"0.47843137250000001\" blue=\"1\" alpha=\"1\" colorSpace=\"calibratedRGB\"/>\n                    </state>\n                    <connections>\n                        <action selector=\"doneButtonPressed:\" destination=\"-1\" eventType=\"touchUpInside\" id=\"33\"/>\n                    </connections>\n                </button>\n            </subviews>\n            <color key=\"backgroundColor\" white=\"0.0\" alpha=\"1\" colorSpace=\"calibratedWhite\"/>\n            <simulatedOrientationMetrics key=\"simulatedOrientationMetrics\" orientation=\"landscapeRight\"/>\n            <simulatedScreenMetrics key=\"simulatedDestinationMetrics\"/>\n        </view>\n    </objects>\n</document>"
  },
  {
    "path": "iOS_demo/KSY265CodecDemo_iOS/SecondViewController.h",
    "content": "//\n//  SecondViewController.h\n//  KSY265CodecDemo_iOS\n//\n//  Created by 江东 on 17/3/17.\n//  Copyright © 2017年 江东. All rights reserved.\n//\n\n#import <UIKit/UIKit.h>\n#import \"BaseViewController.h\"\n\n@interface SecondViewController : BaseViewController\n\n@property (nonatomic, retain) NSMutableArray *movieList;\n\n@end\n\n"
  },
  {
    "path": "iOS_demo/KSY265CodecDemo_iOS/SecondViewController.m",
    "content": "//\n//  SecondViewController.m\n//  KSY265CodecDemo_iOS\n//\n//  Created by 江东 on 17/3/17.\n//  Copyright © 2017年 江东. All rights reserved.\n//\n\n#import \"SecondViewController.h\"\n#import \"SettingsDecoderViewController.h\"\n#import \"DecoderHelperViewController.h\"\n#import \"MoviesViewController.h\"\n#import \"MoviePlayer.h\"\n#import \"KSYMoviePlayer.h\"\n#import \"GLView.h\"\n#import \"qy265dec.h\"\n#include \"lenthevcdec.h\"\n\n@interface SecondViewController ()<UINavigationControllerDelegate, UIImagePickerControllerDelegate, UIAlertViewDelegate>\n{\n    UILabel  *lblDecoder;\n    UITextView *infoView;\n    UITextField *decoderFile;\n    UIButton *doneBtn;\n    UIButton *btnSet;\n    UIButton *btnHelp;\n    UIButton *selectBtn;\n    SettingsDecoderViewController *setDecoderVC;\n    MoviesViewController *listVC;\n    NSString *outputFlag;\n}\n@property (strong, nonatomic) IBOutlet UIView *playerView;\n@property (weak, nonatomic) IBOutlet GLView *playView;\n@property (nonatomic, retain) KSYMoviePlayer *player;\n\n@end\n\n@implementation SecondViewController\n\n- (void)viewDidLoad {\n    [super viewDidLoad];\n    [self setupUI];\n    self.player = nil;\n    setDecoderVC = [[SettingsDecoderViewController alloc] initDefaultCfg];\n    listVC = [[MoviesViewController alloc] initWithSuffix:@\".265\"];\n    \n    //__weak SecondViewController *weakself = self;\n    listVC.tableBlock = ^(NSString* filePath){\n        NSLog(@\"%@\", filePath);\n        decoderFile.text = filePath;\n    };\n\n}\n\n- (void)setupUI{\n    self.view.backgroundColor = [UIColor whiteColor];\n    \n    //add set button\n    btnSet = [self addButtonWithTitle:@\"设置\" action:@selector(onSetDecoder:)];\n    //add help button\n    btnHelp = [self addButtonWithTitle:@\"帮助\" action:@selector(onHelp:)];\n    //add decoder text\n    lblDecoder =  [self addLable:@\"KSC265解码器\"];\n    [self addViews:@[btnSet, lblDecoder, btnHelp] withFrame:CGRectMake(0, 40, self.view.frame.size.width, 40)];\n    //add browse file button\n    selectBtn = [self addButtonWithTitle:@\"浏览(.265)文件\" action:@selector(didClickSelectBtn:)];\n    [self addViews:@[selectBtn] withFrame:CGRectMake(0, 120, self.view.frame.size.width/3, 40)];\n    //input decoder file\n    decoderFile = [self addTextField:NULL ];\n    doneBtn =  [self addButtonWithTitle:@\"确定\" action:@selector(onDone:)];\n    [self addViews2:@[decoderFile,doneBtn] withFrame:CGRectMake(0, 180, self.view.frame.size.width, 40)];\n    _playView.frame = CGRectMake(0, 240, self.view.frame.size.width, self.view.frame.size.height/4);\n    // info\n    infoView = [[UITextView alloc] init];\n    infoView.editable = NO;\n    infoView.textAlignment = NSTextAlignmentLeft;\n    infoView.backgroundColor = [UIColor colorWithWhite:0.8 alpha:0.3];\n    infoView.font = [UIFont systemFontOfSize:13];\n    infoView.layer.cornerRadius = 2;\n    infoView.clipsToBounds = YES;\n    infoView.layoutManager.allowsNonContiguousLayout = NO;\n    [self addViews:@[infoView] withFrame:CGRectMake(0,  self.view.frame.size.height/4 + 280, self.view.frame.size.width, self.view.frame.size.height- (self.view.frame.size.height/4 + 280) - 20)];\n}\n\n- (void) monitorPlaybackTime\n{\n    if (self.player.decodeEnd) {\n        [self stopPlay];\n        return;\n    }\n    \n    [self performSelector:@selector(monitorPlaybackTime) withObject:nil afterDelay:1.0];\n}\n\n- (void)startPlay:(NSString *) filePath\n{\n    NSString *decoder = [[NSUserDefaults standardUserDefaults] valueForKey:@\"codec\"];\n    if (self.player == nil) {\n        if ([decoder isEqualToString:@\"lenthevcdec\"]) {\n            self.player = [[MoviePlayer alloc] init];\n            NSString* string = [NSString stringWithFormat:@\"%d\" , lenthevcdec_version()];\n            [[NSUserDefaults standardUserDefaults] setValue:string forKey:@\"version\"];\n        }\n        else {\n            self.player = [[KSYMoviePlayer alloc] init];\n            NSString* string = [NSString stringWithFormat:@\"%s\" , strLibQy265Version];\n            [[NSUserDefaults standardUserDefaults] setValue:string forKey:@\"version\"];\n        }\n    }\n    \n    int ret = [self.player openMovie:filePath];\n    if(ret != 0) {\n        UIAlertView * alert = [[UIAlertView alloc] initWithTitle:@\"Message\" message:@\"Get movie data failed! Please check your source or try again.\" delegate:nil cancelButtonTitle:@\"OK\" otherButtonTitles:nil];\n        [alert show];\n        doneBtn.enabled = YES;\n        btnSet.enabled = YES;\n        selectBtn.enabled = YES;\n        return ;\n    } else {\n        NSString *fps = [[NSUserDefaults standardUserDefaults] valueForKey:@\"renderFPS\"];\n        if ([fps isEqualToString:@\"-1 (off)\"]) {\n            self.playView.hidden = YES;\n        }\n        else{\n            self.playView.hidden = NO;\n            [_playView.renderer resizeFromLayer:(CAEAGLLayer*)self.playView.layer];\n        }\n        self.player.renderer = _playView.renderer;\n\n        int ret = [self.player play];\n        if(ret != 0) {\n            UIAlertView * alert = [[UIAlertView alloc] initWithTitle:@\"Message\" message:@\"Can't play this movie! Please check its format.\" delegate:nil cancelButtonTitle:@\"OK\" otherButtonTitles:nil];\n            [alert show];\n            doneBtn.enabled = YES;\n            btnSet.enabled = YES;\n            selectBtn.enabled = YES;\n            return ;\n        }\n        [self monitorPlaybackTime];\n    }\n}\n\n- (void)stopPlay{\n    NSString *decoder = [[NSUserDefaults standardUserDefaults] valueForKey:@\"codec\"];\n    NSString *threadNum = [[NSUserDefaults standardUserDefaults] valueForKey:@\"threadNum\"];\n    NSString *renderFPS = [[NSUserDefaults standardUserDefaults] valueForKey:@\"renderFPS\"];\n    NSString *version = [[NSUserDefaults standardUserDefaults] valueForKey:@\"version\"];\n    \n    NSUInteger threads = [threadNum intValue];\n    if (self.player.out_file_string){\n        infoView.text = [NSString stringWithFormat:@\"%@\\n解码器版本:%@\\n解码参数:%@ -b %@ -o %@ -threads %ld\\n\\n分辨率:%@\\n渲染帧率:%@\\n线程数:%@\\n解码时间:%.2lf s\\n解码帧数:%ld\\n解码速度:%.2lf f/s\\n\\n\",\n                         infoView.text,\n                         version,\n                         decoder,\n                         decoderFile.text,\n                         [self.player.out_file_string lastPathComponent],\n                         threads,\n                         NSStringFromCGSize(CGSizeMake(self.player.width, self.player.height)),\n                         renderFPS,\n                         threadNum,\n                         self.player.real_time,\n                         self.player.frameNum,\n                         self.player.realFPS];\n        \n    }else{\n        infoView.text = [NSString stringWithFormat:@\"%@\\n解码器版本:%@\\n解码参数:%@ -b %@ -threads %ld\\n\\n分辨率:%@\\n渲染帧率:%@\\n线程数:%@\\n解码时间:%.2lf s\\n解码帧数:%ld\\n解码速度:%.2lf f/s\\n\\n\",\n                         infoView.text,\n                         version,\n                         decoder,\n                         decoderFile.text,\n                         threads,\n                         NSStringFromCGSize(CGSizeMake(self.player.width, self.player.height)),\n                         renderFPS,\n                         threadNum,\n                         self.player.real_time,\n                         self.player.frameNum,\n                         self.player.realFPS];\n    }\n\n    [infoView scrollRangeToVisible:NSMakeRange(infoView.text.length, 1)];\n    [self.player stop];\n    self.player = nil;\n    doneBtn.enabled = YES;\n    btnSet.enabled = YES;\n    selectBtn.enabled = YES;\n}\n\n#pragma mark - actions\n- (void)onSetDecoder:(UIButton *)btn {\n    [self presentViewController:setDecoderVC animated:true completion:nil];\n}\n- (void)onHelp:(UIButton *)btn {\n    DecoderHelperViewController *decoderHelperVC = [[DecoderHelperViewController alloc] init];\n    [self presentViewController:decoderHelperVC animated:true completion:nil];\n}\n- (void)didClickSelectBtn:(UIButton *)send{\n    UINavigationController *naVC = [[UINavigationController alloc]initWithRootViewController: listVC];\n    [self presentViewController:naVC animated:YES completion:nil];\n}\n- (void)onDone:(UIButton *)btn {\n    btn.enabled = NO;\n    btnSet.enabled = NO;\n    selectBtn.enabled = NO;\n    [decoderFile resignFirstResponder];\n    NSString *dir = [NSHomeDirectory() stringByAppendingString:@\"/Documents/\"];\n    NSString *decFile = [dir stringByAppendingPathComponent:decoderFile.text];\n    [self startPlay:decFile];\n}\n\n- (void)didReceiveMemoryWarning {\n    [super didReceiveMemoryWarning];\n    // Dispose of any resources that can be recreated.\n}\n\n\n@end\n"
  },
  {
    "path": "iOS_demo/KSY265CodecDemo_iOS/SettingsDecoderViewController.h",
    "content": "//\n//  SettingsViewController.h\n//  IPGateway\n//\n//  Created by Meng Shengbin on 2/1/12.\n//  Copyright (c) 2012 Peking University. All rights reserved.\n//\n\n#import <UIKit/UIKit.h>\n#import \"BaseViewController.h\"\n\n@interface SettingsDecoderViewController : BaseViewController\n\n@property UILabel            *lblVideoDecoderUI;\n@property UILabel            *lblDecoderThreadNumUI;\n@property UILabel            *lblRenderFpsUI;\n@property UILabel            *lblOutputFlagUI;\n@property UISegmentedControl *videoDecoderUI; //\n@property UISegmentedControl *decoderThreadNumUI; //\n@property UISegmentedControl *renderFpsUI; //\n@property UISegmentedControl *outputFlagUI; //\n\n//默认解码器配置\n- (id)initDefaultCfg;\n\n@end\n"
  },
  {
    "path": "iOS_demo/KSY265CodecDemo_iOS/SettingsDecoderViewController.m",
    "content": "//\n//  SettingsViewController.m\n//  IPGateway\n//\n//  Created by Meng Shengbin on 2/1/12.\n//  Copyright (c) 2012 Peking University. All rights reserved.\n//\n\n#import \"SettingsDecoderViewController.h\"\n\n@implementation SettingsDecoderViewController {\n    //Say you have an array of strings you want to present in the pickerview like this\n    UILabel  *lblSetting;\n    UIButton *btnDone;\n    NSArray *arrayOfStrings;\n    NSArray *arrayOfStringsFPS;\n    NSArray *arrayOfStringsOutputFlag;\n    NSArray *decStrings;\n}\n\n- (id)initDefaultCfg {\n    self = [super init];\n    decStrings = [NSArray arrayWithObjects:@\"ksc265dec\", @\"lenthevcdec\", nil];\n    arrayOfStrings = [NSArray arrayWithObjects:@\"0 (auto)\", @\"1\", @\"2\", @\"4\", nil];\n    arrayOfStringsFPS = [NSArray arrayWithObjects:@\"0 (full speed)\", @\"24\", @\"-1 (off)\", nil];\n    arrayOfStringsOutputFlag = [NSArray arrayWithObjects:@\"NO\", @\"YES\", nil];\n    [[NSUserDefaults standardUserDefaults] setValue:[decStrings objectAtIndex:0] forKey:@\"codec\"];\n    [[NSUserDefaults standardUserDefaults] setValue:[arrayOfStrings objectAtIndex:0] forKey:@\"threadNum\"];\n    [[NSUserDefaults standardUserDefaults] setValue:[arrayOfStringsFPS objectAtIndex:0] forKey:@\"renderFPS\"];\n    [[NSUserDefaults standardUserDefaults] setValue:[arrayOfStringsOutputFlag objectAtIndex:0] forKey:@\"outputFlag\"];\n    \n    return self;\n}\n\n- (void)viewDidLoad {\n    [super viewDidLoad];\n    [self setupUI];\n}\n\n- (void)setupUI{\n    self.view.backgroundColor = [UIColor whiteColor];\n\n    //add set title text\n    lblSetting =  [self addLable:@\"设置\"];\n    [self addViews:@[lblSetting] withFrame:CGRectMake(self.view.frame.size.width/3, 40, self.view.frame.size.width/3, 40)];\n    \n    //decoder\n    _lblVideoDecoderUI = [self addLable:@\"视频解码器\"];\n    _videoDecoderUI = [self addSegCtrlWithItems:decStrings];\n    [self addViews3:@[_lblVideoDecoderUI, _videoDecoderUI] withFrame:CGRectMake(0, 120, self.view.frame.size.width, 40)];\n\n    //decoder threads\n    _lblDecoderThreadNumUI = [self addLable:@\"解码线程数\"];\n    _decoderThreadNumUI = [self addSegCtrlWithItems:arrayOfStrings];\n    [self addViews3:@[_lblDecoderThreadNumUI, _decoderThreadNumUI] withFrame:CGRectMake(0, 200, self.view.frame.size.width, 40)];\n\n    //render fps\n    _lblRenderFpsUI = [self addLable:@\"渲染帧率\"];\n    _renderFpsUI = [self addSegCtrlWithItems:arrayOfStringsFPS];\n    [self addViews3:@[_lblRenderFpsUI, _renderFpsUI] withFrame:CGRectMake(0, 280, self.view.frame.size.width, 40)];\n    \n    //output yuv settings\n    _lblOutputFlagUI = [self addLable:@\"输出yuv\"];\n    _outputFlagUI = [self addSegCtrlWithItems:arrayOfStringsOutputFlag];\n    [self addViews3:@[_lblOutputFlagUI, _outputFlagUI] withFrame:CGRectMake(0, 360, self.view.frame.size.width, 40)];\n    \n    //add done button\n    btnDone = [self addButtonWithTitle:@\"确定\" action:@selector(onDone:)];\n    [self addViews:@[btnDone] withFrame:CGRectMake(self.view.frame.size.width*2/3, 440, self.view.frame.size.width/3, 40)];\n}\n\n#pragma mark - actions\n- (void)onDone:(UIButton *)btn {\n    [[NSUserDefaults standardUserDefaults] setValue:[decStrings objectAtIndex:_videoDecoderUI.selectedSegmentIndex] forKey:@\"codec\"];\n    [[NSUserDefaults standardUserDefaults] setValue:[arrayOfStrings objectAtIndex:_decoderThreadNumUI.selectedSegmentIndex] forKey:@\"threadNum\"];\n    [[NSUserDefaults standardUserDefaults] setValue:[arrayOfStringsFPS objectAtIndex:_renderFpsUI.selectedSegmentIndex] forKey:@\"renderFPS\"];\n    [[NSUserDefaults standardUserDefaults] setValue:[arrayOfStringsOutputFlag objectAtIndex:_outputFlagUI.selectedSegmentIndex] forKey:@\"outputFlag\"];\n    \n    /*\n    NSString *decoder = [[NSUserDefaults standardUserDefaults] valueForKey:@\"codec\"];\n    NSString *threadNum = [[NSUserDefaults standardUserDefaults] valueForKey:@\"threadNum\"];\n    NSString *renderFPS = [[NSUserDefaults standardUserDefaults] valueForKey:@\"renderFPS\"];\n    \n    NSLog(@\"set cfg:\\n codec %@, threadNum %@, renderFPS %@\", decoder, threadNum, renderFPS);\n     */\n    \n    [self dismissViewControllerAnimated:FALSE completion:nil];\n}\n\n#pragma mark - tool funcs\n- (UISegmentedControl *)addSegCtrlWithItems: (NSArray *) items {\n    UISegmentedControl * segC;\n    segC = [[UISegmentedControl alloc] initWithItems:items];\n    segC.selectedSegmentIndex = 0;\n    segC.layer.cornerRadius = 5;\n    segC.backgroundColor = [UIColor lightGrayColor];\n    [self.view addSubview:segC];\n    return segC;\n}\n\n\n- (void)didReceiveMemoryWarning {\n    [super didReceiveMemoryWarning];\n    // Dispose of any resources that can be recreated.\n}\n\n@end\n\n"
  },
  {
    "path": "iOS_demo/KSY265CodecDemo_iOS/SettingsEncoderViewController.h",
    "content": "//\n//  SettingsViewController.h\n//  IPGateway\n//\n//  Created by Meng Shengbin on 2/1/12.\n//  Copyright (c) 2012 Peking University. All rights reserved.\n//\n\n#import <UIKit/UIKit.h>\n#import \"BaseViewController.h\"\n#import \"AYHCustomComboBox.h\"\n\n@interface SettingsEncoderViewController : BaseViewController<AYHCustomComboBoxDelegate,UIGestureRecognizerDelegate,UITextFieldDelegate>\n\n@property UILabel            *lblVideoEncoderUI;\n@property UILabel            *lblEncoderProfileUI;\n@property UILabel            *lblEncoderDelayedUI;\n@property UILabel            *lblResolutionUI;\n@property UILabel            *lblFpsUI;\n@property UILabel            *lblBitRateUI;\n@property UILabel            *lblTheadNumUI;\n@property UISegmentedControl *videoEncoderUI; //\n@property UISegmentedControl *encoderDelayedUI; //\n@property UITextField *fps;\n@property UITextField *theadNum;\n@property UITextField *bitRate;\n@property UITextField *resolutionText;\n@property UIButton * resolutionButton;\n@property AYHCustomComboBox* resolutionComboBox;\n@property UIButton * profileButton;\n@property AYHCustomComboBox* profileComboBox;\n\n//默认编码器配置\n- (id)initDefaultCfg;\n\n@end\n"
  },
  {
    "path": "iOS_demo/KSY265CodecDemo_iOS/SettingsEncoderViewController.m",
    "content": "//\n//  SettingsViewController.m\n//  IPGateway\n//\n//  Created by Meng Shengbin on 2/1/12.\n//  Copyright (c) 2012 Peking University. All rights reserved.\n//\n\n#import \"SettingsEncoderViewController.h\"\n\n#define KINTEVAL 30\n\n@implementation SettingsEncoderViewController {\n    NSArray *arrayOfStringsEnc;\n    NSArray *arrayOfStringsProfile;\n    NSArray *arrayOfStringsDelay;\n    BOOL isVisibleResolution;\n    BOOL isVisibleProfile;\n}\n\n- (id)initDefaultCfg {\n    self = [super init];\n    arrayOfStringsEnc = [NSArray arrayWithObjects:@\"ksc265enc\", @\"x264\", nil];\n    arrayOfStringsProfile = [NSArray arrayWithObjects:@\"superfast\",@\"veryfast\",@\"fast\",@\"medium\",@\"slow\",@\"veryslow\",@\"placebo\", nil];\n    arrayOfStringsDelay = [NSArray arrayWithObjects:@\"zerolatency\",@\"livestreaming\",@\"offline\", nil];\n    [[NSUserDefaults standardUserDefaults] setValue:[arrayOfStringsEnc objectAtIndex:0] forKey:@\"encoder\"];\n    [[NSUserDefaults standardUserDefaults] setValue:@\"1280*720\" forKey:@\"resolution\"];\n    [[NSUserDefaults standardUserDefaults] setValue:@\"15\" forKey:@\"fps\"];\n    [[NSUserDefaults standardUserDefaults] setValue:@\"0\" forKey:@\"threads\"];\n    [[NSUserDefaults standardUserDefaults] setValue:[arrayOfStringsProfile objectAtIndex:1] forKey:@\"profile\"];\n    [[NSUserDefaults standardUserDefaults] setValue:[arrayOfStringsDelay objectAtIndex:2] forKey:@\"delayed\"];\n    \n    isVisibleResolution = NO;\n    isVisibleProfile = NO;\n    return self;\n}\n\n- (void)viewDidLoad {\n    [super viewDidLoad];\n    \n    [self setupUI];\n}\n\n- (void)setupUI{\n    self.view.backgroundColor = [UIColor whiteColor];\n\n    //add set title text\n    UILabel  *lblSetting =  [self addLable:@\"设置\"];\n    [self addViews:@[lblSetting] withFrame:CGRectMake(self.view.frame.size.width/3, KINTEVAL, self.view.frame.size.width/3, KINTEVAL)];\n    //encoder\n    _lblVideoEncoderUI = [self addLable:@\"视频编码器\"];\n    _videoEncoderUI = [self addSegCtrlWithItems:arrayOfStringsEnc];\n    [self addViews3:@[_lblVideoEncoderUI, _videoEncoderUI] withFrame:CGRectMake(0, KINTEVAL*3, self.view.frame.size.width, KINTEVAL)];\n    //Resolution\n    _lblResolutionUI = [self addLable:@\"分辨率\"];\n    _resolutionButton = [self addButtonWithTitle:@\"1280*720\" action:@selector(buttonclick:)];\n    _resolutionButton.tag = 100;\n    [self addViews3:@[_lblResolutionUI, _resolutionButton] withFrame:CGRectMake(0, KINTEVAL*5, self.view.frame.size.width, KINTEVAL)];\n    //Resolution 自定义\n    _resolutionText = [self addTextField:@\"\"];\n    _resolutionText.delegate = self;\n    [_resolutionText removeFromSuperview];\n\n    _resolutionComboBox = [[AYHCustomComboBox alloc] initWithFrame:CGRectMake(_resolutionButton.frame.origin.x, _resolutionButton.frame.origin.y+_resolutionButton.frame.size.height, _resolutionButton.frame.size.width, 100) DataCount:4 NotificationName:@\"AYHComboBoxNationChanged\"];\n    [_resolutionComboBox setTag:200];\n    [_resolutionComboBox setDelegate:self];\n    [_resolutionComboBox addItemsData: [[NSArray alloc] initWithObjects:@\"1280*720\",@\"960*540\",@\"640*360\",@\"640*480\",@\"自定义\",nil]];\n    [_resolutionComboBox flushData];\n    \n    //fps\n    _lblFpsUI = [self addLable:@\"帧率\"];\n    _fps = [self addTextField:@\"15\"];\n    [self addViews3:@[_lblFpsUI, _fps] withFrame:CGRectMake(0, KINTEVAL*7, self.view.frame.size.width, KINTEVAL)];\n    \n    //bitrate\n    _lblBitRateUI = [self addLable:@\"码率(kbps)\"];\n    _bitRate =[self addTextField:@\"800\"];\n    [self addViews3:@[_lblBitRateUI, _bitRate] withFrame:CGRectMake(0, KINTEVAL*9, self.view.frame.size.width, KINTEVAL)];\n\n    //encoder threads\n    _lblTheadNumUI = [self addLable:@\"编码线程\"];\n    _theadNum = [self addTextField:@\"0\" ];\n    [self addViews3:@[_lblTheadNumUI, _theadNum] withFrame:CGRectMake(0, KINTEVAL*11, self.view.frame.size.width, KINTEVAL)];\n    //encoder profile\n    _lblEncoderProfileUI = [self addLable:@\"编码档次\"];\n    _profileButton = [self addButtonWithTitle:@\"veryfast\" action:@selector(buttonclick:)];\n    _profileButton.tag = 101;\n    [self addViews3:@[_lblEncoderProfileUI, _profileButton] withFrame:CGRectMake(0, KINTEVAL*13, self.view.frame.size.width, KINTEVAL)];\n    \n    _profileComboBox = [[AYHCustomComboBox alloc] initWithFrame:CGRectMake(_profileButton.frame.origin.x, _profileButton.frame.origin.y+_profileButton.frame.size.height, _profileButton.frame.size.width, 100) DataCount:4 NotificationName:@\"AYHComboBoxNationChanged\"];\n    [_profileComboBox setTag:201];\n    [_profileComboBox setDelegate:self];\n    [_profileComboBox addItemsData:arrayOfStringsProfile];\n    [_profileComboBox flushData];\n    \n    //encoder delayed\n    _lblEncoderDelayedUI = [self addLable:@\"延时\"];\n    _encoderDelayedUI = [self addSegCtrlWithItems:arrayOfStringsDelay];\n    _encoderDelayedUI.selectedSegmentIndex = 2;\n\n    [self addViews3:@[_lblEncoderDelayedUI, _encoderDelayedUI] withFrame:CGRectMake(0, KINTEVAL*15, self.view.frame.size.width, KINTEVAL)];\n    //add done button\n    UIButton *btnDone = [self addButtonWithTitle:@\"确定\" action:@selector(onDone:)];\n    [self addViews:@[btnDone] withFrame:CGRectMake(self.view.frame.size.width*2/3, KINTEVAL*17, self.view.frame.size.width/3, KINTEVAL)];\n    \n    UITapGestureRecognizer *tapGes = [[UITapGestureRecognizer alloc] initWithTarget:self.view action:@selector(endEditing:)];\n    tapGes.delegate = self;\n    [self.view addGestureRecognizer:tapGes];\n}\n\n#pragma mark AYHCustomComboBoxDelegate\n- (void) CustomComboBoxChanged:(id) sender SelectedItem:(NSString *)selectedItem\n{\n    AYHCustomComboBox* ccb = (AYHCustomComboBox*) sender;\n    if ([ccb tag]==200)\n    {\n        if([selectedItem isEqualToString:@\"自定义\"]){\n            [_resolutionButton removeFromSuperview];\n            [_resolutionComboBox removeFromSuperview];\n            [self addViews3:@[_lblResolutionUI, _resolutionText] withFrame:CGRectMake(0, KINTEVAL*5, self.view.frame.size.width, KINTEVAL)];\n        }else{\n            [_resolutionButton setTitle:selectedItem forState:UIControlStateNormal];\n            [_resolutionComboBox removeFromSuperview];\n        }\n        isVisibleResolution = NO;\n    }\n    else if([ccb tag]==201)\n    {\n        [_profileButton setTitle:selectedItem forState:UIControlStateNormal];\n        [_profileComboBox removeFromSuperview];\n        isVisibleProfile = NO;\n    }\n}\n\n#pragma mark - actions\n- (void)onDone:(UIButton *)btn {\n    [[NSUserDefaults standardUserDefaults] setValue:[arrayOfStringsEnc objectAtIndex:_videoEncoderUI.selectedSegmentIndex] forKey:@\"encoder\"];\n    [[NSUserDefaults standardUserDefaults] setValue:_fps.text forKey:@\"fps\"];\n    [[NSUserDefaults standardUserDefaults] setValue:_theadNum.text forKey:@\"threads\"];\n    [[NSUserDefaults standardUserDefaults] setValue:_bitRate.text forKey:@\"bitRate\"];\n    [[NSUserDefaults standardUserDefaults] setValue:_profileButton.titleLabel.text forKey:@\"profile\"];\n    [[NSUserDefaults standardUserDefaults] setValue:[arrayOfStringsDelay objectAtIndex:_encoderDelayedUI.selectedSegmentIndex] forKey:@\"delayed\"];\n\n    if([self.view.subviews containsObject:_resolutionText])\n    {\n        [_resolutionText removeFromSuperview];\n        [self addViews3:@[_lblResolutionUI, _resolutionButton] withFrame:CGRectMake(0, KINTEVAL*5, self.view.frame.size.width, KINTEVAL)];\n        if(_resolutionText.text.length)\n        {\n            [[NSUserDefaults standardUserDefaults] setValue:_resolutionText.text forKey:@\"resolution\"];\n        }\n    }\n    else{\n        [[NSUserDefaults standardUserDefaults] setValue:_resolutionButton.titleLabel.text forKey:@\"resolution\"];\n    }\n\n    [self dismissViewControllerAnimated:FALSE completion:nil];\n}\n\n-(void)buttonclick:(UIButton *)sender {\n    UIButton* button = (UIButton*) sender;\n    if ([button tag]==100)\n    {\n        if (isVisibleResolution==NO)\n        {\n            [self.view addSubview:_resolutionComboBox];\n            isVisibleResolution = YES;\n        }\n    }\n    else if ([button tag]==101)\n    {\n        if (isVisibleProfile==NO)\n        {\n            [self.view addSubview:_profileComboBox];\n            isVisibleProfile = YES;\n        }\n    }\n}\n\n#pragma mark - tool funcs\n- (UISegmentedControl *)addSegCtrlWithItems: (NSArray *) items {\n    UISegmentedControl * segC;\n    segC = [[UISegmentedControl alloc] initWithItems:items];\n    segC.selectedSegmentIndex = 0;\n    segC.layer.cornerRadius = 5;\n    segC.backgroundColor = [UIColor lightGrayColor];\n    [self.view addSubview:segC];\n    return segC;\n}\n\n\n- (void)didReceiveMemoryWarning {\n    [super didReceiveMemoryWarning];\n    // Dispose of any resources that can be recreated.\n}\n\n- (BOOL)gestureRecognizer:(UIGestureRecognizer *)gestureRecognizer shouldReceiveTouch:(UITouch *)touch\n{\n    if ([NSStringFromClass([touch.view class]) isEqualToString:@\"UITableViewCellContentView\"]) {\n        return NO;\n    }\n    return  YES;\n}\n\n- (void)textFieldDidEndEditing:(UITextField *)textField\n{\n    if(textField == _resolutionText)\n    {\n        [[NSUserDefaults standardUserDefaults] setValue:_resolutionText.text forKey:@\"resolution\"];\n    }\n}\n\n@end\n\n"
  },
  {
    "path": "iOS_demo/KSY265CodecDemo_iOS/main.m",
    "content": "//\n//  main.m\n//  KSY265CodecDemo_iOS\n//\n//  Created by 江东 on 17/3/17.\n//  Copyright © 2017年 江东. All rights reserved.\n//\n\n#import <UIKit/UIKit.h>\n#import \"AppDelegate.h\"\n\nint main(int argc, char * argv[]) {\n    @autoreleasepool {\n        return UIApplicationMain(argc, argv, nil, NSStringFromClass([AppDelegate class]));\n    }\n}\n"
  },
  {
    "path": "iOS_demo/KSY265CodecDemo_iOS.xcodeproj/project.pbxproj",
    "content": "// !$*UTF8*$!\n{\n\tarchiveVersion = 1;\n\tclasses = {\n\t};\n\tobjectVersion = 46;\n\tobjects = {\n\n/* Begin PBXBuildFile section */\n\t\t055AEDF41E7BD4280006FE5D /* main.m in Sources */ = {isa = PBXBuildFile; fileRef = 055AEDF31E7BD4280006FE5D /* main.m */; };\n\t\t055AEDF71E7BD4280006FE5D /* AppDelegate.m in Sources */ = {isa = PBXBuildFile; fileRef = 055AEDF61E7BD4280006FE5D /* AppDelegate.m */; };\n\t\t055AEDFA1E7BD4280006FE5D /* FirstViewController.m in Sources */ = {isa = PBXBuildFile; fileRef = 055AEDF91E7BD4280006FE5D /* FirstViewController.m */; };\n\t\t055AEDFD1E7BD4280006FE5D /* SecondViewController.m in Sources */ = {isa = PBXBuildFile; fileRef = 055AEDFC1E7BD4280006FE5D /* SecondViewController.m */; };\n\t\t055AEE001E7BD4280006FE5D /* Main.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 055AEDFE1E7BD4280006FE5D /* Main.storyboard */; };\n\t\t055AEE021E7BD4280006FE5D /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 055AEE011E7BD4280006FE5D /* Assets.xcassets */; };\n\t\t055AEE051E7BD4280006FE5D /* LaunchScreen.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 055AEE031E7BD4280006FE5D /* LaunchScreen.storyboard */; };\n\t\t055AEE2D1E7BE7350006FE5D /* BaseViewController.m in Sources */ = {isa = PBXBuildFile; fileRef = 055AEE2C1E7BE7350006FE5D /* BaseViewController.m */; };\n\t\t058715AE1E836C3C008D8860 /* libx264.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 058715AD1E836C3C008D8860 /* libx264.a */; };\n\t\t058715B11E836FE8008D8860 /* MovieEncoder.m in Sources */ = {isa = PBXBuildFile; fileRef = 058715B01E836FE8008D8860 /* MovieEncoder.m */; };\n\t\t058715B61E83BDB9008D8860 /* libqydecoder.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 058715B31E83BDB9008D8860 /* libqydecoder.a */; };\n\t\t058715B71E83BDB9008D8860 /* libqyencoder.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 058715B41E83BDB9008D8860 /* libqyencoder.a */; };\n\t\t058715BA1E83BEED008D8860 /* KSYMovieEncoder.m in Sources */ = {isa = PBXBuildFile; fileRef = 058715B91E83BEED008D8860 /* KSYMovieEncoder.m */; };\n\t\t05C45C861E8E02C60045FE79 /* libqycommon.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 05C45C851E8E02C60045FE79 /* libqycommon.a */; };\n\t\t05C45C8C1E8E06A10045FE79 /* libz.tbd in Frameworks */ = {isa = PBXBuildFile; fileRef = 05C45C8B1E8E06A10045FE79 /* libz.tbd */; };\n\t\t05F746781E7E78820076E6EB /* SettingsDecoderViewController.m in Sources */ = {isa = PBXBuildFile; fileRef = 05F746751E7E78820076E6EB /* SettingsDecoderViewController.m */; };\n\t\t05F746791E7E78820076E6EB /* SettingsEncoderViewController.m in Sources */ = {isa = PBXBuildFile; fileRef = 05F746771E7E78820076E6EB /* SettingsEncoderViewController.m */; };\n\t\t05F7467C1E7E81E60076E6EB /* EncoderHelperViewController.m in Sources */ = {isa = PBXBuildFile; fileRef = 05F7467B1E7E81E60076E6EB /* EncoderHelperViewController.m */; };\n\t\t05F7467F1E7E87D10076E6EB /* DecoderHelperViewController.m in Sources */ = {isa = PBXBuildFile; fileRef = 05F7467E1E7E87D10076E6EB /* DecoderHelperViewController.m */; };\n\t\t05F746851E7FB53C0076E6EB /* MoviesViewController.m in Sources */ = {isa = PBXBuildFile; fileRef = 05F746841E7FB53C0076E6EB /* MoviesViewController.m */; };\n\t\t05F7468B1E7FBDBA0076E6EB /* KSYMoviePlayer.m in Sources */ = {isa = PBXBuildFile; fileRef = 05F7468A1E7FBDBA0076E6EB /* KSYMoviePlayer.m */; };\n\t\t05F7468E1E7FBDF70076E6EB /* MoviePlayer.m in Sources */ = {isa = PBXBuildFile; fileRef = 05F7468D1E7FBDF70076E6EB /* MoviePlayer.m */; };\n\t\t05F746971E7FBE8A0076E6EB /* OpenGLES.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 05F746961E7FBE8A0076E6EB /* OpenGLES.framework */; };\n\t\t05F746991E7FBECB0076E6EB /* liblenthevcdec.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 05F746981E7FBECB0076E6EB /* liblenthevcdec.a */; };\n\t\t05F7469B1E7FBF230076E6EB /* QuartzCore.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 05F7469A1E7FBF230076E6EB /* QuartzCore.framework */; };\n\t\t05F746A91E7FC1520076E6EB /* UIKit.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 05F746A81E7FC1520076E6EB /* UIKit.framework */; };\n\t\t05F746AB1E7FC15B0076E6EB /* Foundation.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 05F746AA1E7FC15B0076E6EB /* Foundation.framework */; };\n\t\t05F746AD1E7FC1680076E6EB /* CoreGraphics.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 05F746AC1E7FC1680076E6EB /* CoreGraphics.framework */; };\n\t\t05F746BE1E7FCC890076E6EB /* GLRenderer.m in Sources */ = {isa = PBXBuildFile; fileRef = 05F746BB1E7FCC890076E6EB /* GLRenderer.m */; };\n\t\t05F746BF1E7FCC890076E6EB /* GLView.m in Sources */ = {isa = PBXBuildFile; fileRef = 05F746BD1E7FCC890076E6EB /* GLView.m */; };\n\t\t05F746C11E7FCDFE0076E6EB /* libc++.tbd in Frameworks */ = {isa = PBXBuildFile; fileRef = 05F746C01E7FCDFE0076E6EB /* libc++.tbd */; };\n\t\tF28F15871E88EB6600E1A739 /* 1280x720_15.yuv in Resources */ = {isa = PBXBuildFile; fileRef = F28F15851E88EB6600E1A739 /* 1280x720_15.yuv */; };\n\t\tF28F15881E88EB6600E1A739 /* 640x480_15.yuv in Resources */ = {isa = PBXBuildFile; fileRef = F28F15861E88EB6600E1A739 /* 640x480_15.yuv */; };\n\t\tF28F158A1E88EFF700E1A739 /* 960x540_15.yuv in Resources */ = {isa = PBXBuildFile; fileRef = F28F15891E88EFF700E1A739 /* 960x540_15.yuv */; };\n\t\tF28F159E1E890F7B00E1A739 /* AYHCustomComboBox.m in Sources */ = {isa = PBXBuildFile; fileRef = F28F159C1E890F7B00E1A739 /* AYHCustomComboBox.m */; };\n/* End PBXBuildFile section */\n\n/* Begin PBXContainerItemProxy section */\n\t\t055AEE0C1E7BD4280006FE5D /* PBXContainerItemProxy */ = {\n\t\t\tisa = PBXContainerItemProxy;\n\t\t\tcontainerPortal = 055AEDE71E7BD4280006FE5D /* Project object */;\n\t\t\tproxyType = 1;\n\t\t\tremoteGlobalIDString = 055AEDEE1E7BD4280006FE5D;\n\t\t\tremoteInfo = KSY265CodecDemo_iOS;\n\t\t};\n\t\t055AEE171E7BD4290006FE5D /* PBXContainerItemProxy */ = {\n\t\t\tisa = PBXContainerItemProxy;\n\t\t\tcontainerPortal = 055AEDE71E7BD4280006FE5D /* Project object */;\n\t\t\tproxyType = 1;\n\t\t\tremoteGlobalIDString = 055AEDEE1E7BD4280006FE5D;\n\t\t\tremoteInfo = KSY265CodecDemo_iOS;\n\t\t};\n/* End PBXContainerItemProxy section */\n\n/* Begin PBXFileReference section */\n\t\t055AEDEF1E7BD4280006FE5D /* KSY265CodecDemo_iOS.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = KSY265CodecDemo_iOS.app; sourceTree = BUILT_PRODUCTS_DIR; };\n\t\t055AEDF31E7BD4280006FE5D /* main.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = main.m; sourceTree = \"<group>\"; };\n\t\t055AEDF51E7BD4280006FE5D /* AppDelegate.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = AppDelegate.h; sourceTree = \"<group>\"; };\n\t\t055AEDF61E7BD4280006FE5D /* AppDelegate.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = AppDelegate.m; sourceTree = \"<group>\"; };\n\t\t055AEDF81E7BD4280006FE5D /* FirstViewController.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = FirstViewController.h; sourceTree = \"<group>\"; };\n\t\t055AEDF91E7BD4280006FE5D /* FirstViewController.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = FirstViewController.m; sourceTree = \"<group>\"; };\n\t\t055AEDFB1E7BD4280006FE5D /* SecondViewController.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = SecondViewController.h; sourceTree = \"<group>\"; };\n\t\t055AEDFC1E7BD4280006FE5D /* SecondViewController.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = SecondViewController.m; sourceTree = \"<group>\"; };\n\t\t055AEDFF1E7BD4280006FE5D /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/Main.storyboard; sourceTree = \"<group>\"; };\n\t\t055AEE011E7BD4280006FE5D /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = \"<group>\"; };\n\t\t055AEE041E7BD4280006FE5D /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/LaunchScreen.storyboard; sourceTree = \"<group>\"; };\n\t\t055AEE061E7BD4280006FE5D /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = \"<group>\"; };\n\t\t055AEE0B1E7BD4280006FE5D /* KSY265CodecDemo_iOSTests.xctest */ = {isa = PBXFileReference; explicitFileType = wrapper.cfbundle; includeInIndex = 0; path = KSY265CodecDemo_iOSTests.xctest; sourceTree = BUILT_PRODUCTS_DIR; };\n\t\t055AEE161E7BD4290006FE5D /* KSY265CodecDemo_iOSUITests.xctest */ = {isa = PBXFileReference; explicitFileType = wrapper.cfbundle; includeInIndex = 0; path = KSY265CodecDemo_iOSUITests.xctest; sourceTree = BUILT_PRODUCTS_DIR; };\n\t\t055AEE2B1E7BE7350006FE5D /* BaseViewController.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = BaseViewController.h; sourceTree = \"<group>\"; };\n\t\t055AEE2C1E7BE7350006FE5D /* BaseViewController.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = BaseViewController.m; sourceTree = \"<group>\"; };\n\t\t058715AD1E836C3C008D8860 /* libx264.a */ = {isa = PBXFileReference; lastKnownFileType = archive.ar; name = libx264.a; path = x264/libx264.a; sourceTree = \"<group>\"; };\n\t\t058715AF1E836FE8008D8860 /* MovieEncoder.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = MovieEncoder.h; path = KSY265CodecDemo_iOS/MovieEncoder.h; sourceTree = \"<group>\"; };\n\t\t058715B01E836FE8008D8860 /* MovieEncoder.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = MovieEncoder.m; path = KSY265CodecDemo_iOS/MovieEncoder.m; sourceTree = \"<group>\"; };\n\t\t058715B31E83BDB9008D8860 /* libqydecoder.a */ = {isa = PBXFileReference; lastKnownFileType = archive.ar; name = libqydecoder.a; path = ksy265codec/libqydecoder.a; sourceTree = \"<group>\"; };\n\t\t058715B41E83BDB9008D8860 /* libqyencoder.a */ = {isa = PBXFileReference; lastKnownFileType = archive.ar; name = libqyencoder.a; path = ksy265codec/libqyencoder.a; sourceTree = \"<group>\"; };\n\t\t058715B81E83BEED008D8860 /* KSYMovieEncoder.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = KSYMovieEncoder.h; path = KSY265CodecDemo_iOS/KSYMovieEncoder.h; sourceTree = \"<group>\"; };\n\t\t058715B91E83BEED008D8860 /* KSYMovieEncoder.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = KSYMovieEncoder.m; path = KSY265CodecDemo_iOS/KSYMovieEncoder.m; sourceTree = \"<group>\"; };\n\t\t05C45C851E8E02C60045FE79 /* libqycommon.a */ = {isa = PBXFileReference; lastKnownFileType = archive.ar; name = libqycommon.a; path = ksy265codec/libqycommon.a; sourceTree = \"<group>\"; };\n\t\t05C45C8B1E8E06A10045FE79 /* libz.tbd */ = {isa = PBXFileReference; lastKnownFileType = \"sourcecode.text-based-dylib-definition\"; name = libz.tbd; path = usr/lib/libz.tbd; sourceTree = SDKROOT; };\n\t\t05F746741E7E78820076E6EB /* SettingsDecoderViewController.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = SettingsDecoderViewController.h; sourceTree = \"<group>\"; };\n\t\t05F746751E7E78820076E6EB /* SettingsDecoderViewController.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = SettingsDecoderViewController.m; sourceTree = \"<group>\"; };\n\t\t05F746761E7E78820076E6EB /* SettingsEncoderViewController.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = SettingsEncoderViewController.h; sourceTree = \"<group>\"; };\n\t\t05F746771E7E78820076E6EB /* SettingsEncoderViewController.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = SettingsEncoderViewController.m; sourceTree = \"<group>\"; };\n\t\t05F7467A1E7E81E60076E6EB /* EncoderHelperViewController.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = EncoderHelperViewController.h; sourceTree = \"<group>\"; };\n\t\t05F7467B1E7E81E60076E6EB /* EncoderHelperViewController.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = EncoderHelperViewController.m; sourceTree = \"<group>\"; };\n\t\t05F7467D1E7E87D10076E6EB /* DecoderHelperViewController.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = DecoderHelperViewController.h; sourceTree = \"<group>\"; };\n\t\t05F7467E1E7E87D10076E6EB /* DecoderHelperViewController.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = DecoderHelperViewController.m; sourceTree = \"<group>\"; };\n\t\t05F746831E7FB53C0076E6EB /* MoviesViewController.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = MoviesViewController.h; sourceTree = \"<group>\"; };\n\t\t05F746841E7FB53C0076E6EB /* MoviesViewController.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = MoviesViewController.m; sourceTree = \"<group>\"; };\n\t\t05F746891E7FBDBA0076E6EB /* KSYMoviePlayer.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = KSYMoviePlayer.h; path = KSY265CodecDemo_iOS/KSYMoviePlayer.h; sourceTree = \"<group>\"; };\n\t\t05F7468A1E7FBDBA0076E6EB /* KSYMoviePlayer.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = KSYMoviePlayer.m; path = KSY265CodecDemo_iOS/KSYMoviePlayer.m; sourceTree = \"<group>\"; };\n\t\t05F7468C1E7FBDF70076E6EB /* MoviePlayer.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = MoviePlayer.h; path = KSY265CodecDemo_iOS/MoviePlayer.h; sourceTree = \"<group>\"; };\n\t\t05F7468D1E7FBDF70076E6EB /* MoviePlayer.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = MoviePlayer.m; path = KSY265CodecDemo_iOS/MoviePlayer.m; sourceTree = \"<group>\"; };\n\t\t05F746961E7FBE8A0076E6EB /* OpenGLES.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = OpenGLES.framework; path = System/Library/Frameworks/OpenGLES.framework; sourceTree = SDKROOT; };\n\t\t05F746981E7FBECB0076E6EB /* liblenthevcdec.a */ = {isa = PBXFileReference; lastKnownFileType = archive.ar; name = liblenthevcdec.a; path = lenthevcdec/liblenthevcdec.a; sourceTree = \"<group>\"; };\n\t\t05F7469A1E7FBF230076E6EB /* QuartzCore.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = QuartzCore.framework; path = System/Library/Frameworks/QuartzCore.framework; sourceTree = SDKROOT; };\n\t\t05F746A81E7FC1520076E6EB /* UIKit.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = UIKit.framework; path = System/Library/Frameworks/UIKit.framework; sourceTree = SDKROOT; };\n\t\t05F746AA1E7FC15B0076E6EB /* Foundation.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = Foundation.framework; path = System/Library/Frameworks/Foundation.framework; sourceTree = SDKROOT; };\n\t\t05F746AC1E7FC1680076E6EB /* CoreGraphics.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = CoreGraphics.framework; path = System/Library/Frameworks/CoreGraphics.framework; sourceTree = SDKROOT; };\n\t\t05F746BA1E7FCC890076E6EB /* GLRenderer.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GLRenderer.h; path = KSY265CodecDemo_iOS/GLRenderer.h; sourceTree = \"<group>\"; };\n\t\t05F746BB1E7FCC890076E6EB /* GLRenderer.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GLRenderer.m; path = KSY265CodecDemo_iOS/GLRenderer.m; sourceTree = \"<group>\"; };\n\t\t05F746BC1E7FCC890076E6EB /* GLView.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GLView.h; path = KSY265CodecDemo_iOS/GLView.h; sourceTree = \"<group>\"; };\n\t\t05F746BD1E7FCC890076E6EB /* GLView.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GLView.m; path = KSY265CodecDemo_iOS/GLView.m; sourceTree = \"<group>\"; };\n\t\t05F746C01E7FCDFE0076E6EB /* libc++.tbd */ = {isa = PBXFileReference; lastKnownFileType = \"sourcecode.text-based-dylib-definition\"; name = \"libc++.tbd\"; path = \"usr/lib/libc++.tbd\"; sourceTree = SDKROOT; };\n\t\tF28F15851E88EB6600E1A739 /* 1280x720_15.yuv */ = {isa = PBXFileReference; lastKnownFileType = file; path = 1280x720_15.yuv; sourceTree = \"<group>\"; };\n\t\tF28F15861E88EB6600E1A739 /* 640x480_15.yuv */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text; path = 640x480_15.yuv; sourceTree = \"<group>\"; };\n\t\tF28F15891E88EFF700E1A739 /* 960x540_15.yuv */ = {isa = PBXFileReference; lastKnownFileType = file; path = 960x540_15.yuv; sourceTree = \"<group>\"; };\n\t\tF28F159B1E890F7B00E1A739 /* AYHCustomComboBox.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = AYHCustomComboBox.h; sourceTree = \"<group>\"; };\n\t\tF28F159C1E890F7B00E1A739 /* AYHCustomComboBox.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = AYHCustomComboBox.m; sourceTree = \"<group>\"; };\n\t\tF28F159D1E890F7B00E1A739 /* AYHCustomComboBoxDelegate.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = AYHCustomComboBoxDelegate.h; sourceTree = \"<group>\"; };\n/* End PBXFileReference section */\n\n/* Begin PBXFrameworksBuildPhase section */\n\t\t055AEDEC1E7BD4280006FE5D /* Frameworks */ = {\n\t\t\tisa = PBXFrameworksBuildPhase;\n\t\t\tbuildActionMask = 2147483647;\n\t\t\tfiles = (\n\t\t\t\t05C45C8C1E8E06A10045FE79 /* libz.tbd in Frameworks */,\n\t\t\t\t05F746C11E7FCDFE0076E6EB /* libc++.tbd in Frameworks */,\n\t\t\t\t05F746AD1E7FC1680076E6EB /* CoreGraphics.framework in Frameworks */,\n\t\t\t\t05F746AB1E7FC15B0076E6EB /* Foundation.framework in Frameworks */,\n\t\t\t\t05F746A91E7FC1520076E6EB /* UIKit.framework in Frameworks */,\n\t\t\t\t05F7469B1E7FBF230076E6EB /* QuartzCore.framework in Frameworks */,\n\t\t\t\t05F746971E7FBE8A0076E6EB /* OpenGLES.framework in Frameworks */,\n\t\t\t\t058715B61E83BDB9008D8860 /* libqydecoder.a in Frameworks */,\n\t\t\t\t058715B71E83BDB9008D8860 /* libqyencoder.a in Frameworks */,\n\t\t\t\t05C45C861E8E02C60045FE79 /* libqycommon.a in Frameworks */,\n\t\t\t\t05F746991E7FBECB0076E6EB /* liblenthevcdec.a in Frameworks */,\n\t\t\t\t058715AE1E836C3C008D8860 /* libx264.a in Frameworks */,\n\t\t\t);\n\t\t\trunOnlyForDeploymentPostprocessing = 0;\n\t\t};\n\t\t055AEE081E7BD4280006FE5D /* Frameworks */ = {\n\t\t\tisa = PBXFrameworksBuildPhase;\n\t\t\tbuildActionMask = 2147483647;\n\t\t\tfiles = (\n\t\t\t);\n\t\t\trunOnlyForDeploymentPostprocessing = 0;\n\t\t};\n\t\t055AEE131E7BD4290006FE5D /* Frameworks */ = {\n\t\t\tisa = PBXFrameworksBuildPhase;\n\t\t\tbuildActionMask = 2147483647;\n\t\t\tfiles = (\n\t\t\t);\n\t\t\trunOnlyForDeploymentPostprocessing = 0;\n\t\t};\n/* End PBXFrameworksBuildPhase section */\n\n/* Begin PBXGroup section */\n\t\t055AEDE61E7BD4280006FE5D = {\n\t\t\tisa = PBXGroup;\n\t\t\tchildren = (\n\t\t\t\t05F746A11E7FC1000076E6EB /* Prebuilt */,\n\t\t\t\t058715AA1E82B0DA008D8860 /* KSY265Encoder */,\n\t\t\t\t05F7469C1E7FC0320076E6EB /* KSY265Decoder */,\n\t\t\t\t055AEDF11E7BD4280006FE5D /* KSY265CodecDemo_iOS */,\n\t\t\t\tF28F159A1E890F7B00E1A739 /* combox */,\n\t\t\t\tF28F15841E88EB6600E1A739 /* resource */,\n\t\t\t\t055AEDF01E7BD4280006FE5D /* Products */,\n\t\t\t\t05F746951E7FBE890076E6EB /* Frameworks */,\n\t\t\t);\n\t\t\tsourceTree = \"<group>\";\n\t\t};\n\t\t055AEDF01E7BD4280006FE5D /* Products */ = {\n\t\t\tisa = PBXGroup;\n\t\t\tchildren = (\n\t\t\t\t055AEDEF1E7BD4280006FE5D /* KSY265CodecDemo_iOS.app */,\n\t\t\t\t055AEE0B1E7BD4280006FE5D /* KSY265CodecDemo_iOSTests.xctest */,\n\t\t\t\t055AEE161E7BD4290006FE5D /* KSY265CodecDemo_iOSUITests.xctest */,\n\t\t\t);\n\t\t\tname = Products;\n\t\t\tsourceTree = \"<group>\";\n\t\t};\n\t\t055AEDF11E7BD4280006FE5D /* KSY265CodecDemo_iOS */ = {\n\t\t\tisa = PBXGroup;\n\t\t\tchildren = (\n\t\t\t\t055AEDF51E7BD4280006FE5D /* AppDelegate.h */,\n\t\t\t\t055AEDF61E7BD4280006FE5D /* AppDelegate.m */,\n\t\t\t\t055AEDF81E7BD4280006FE5D /* FirstViewController.h */,\n\t\t\t\t055AEDF91E7BD4280006FE5D /* FirstViewController.m */,\n\t\t\t\t055AEDFB1E7BD4280006FE5D /* SecondViewController.h */,\n\t\t\t\t055AEDFC1E7BD4280006FE5D /* SecondViewController.m */,\n\t\t\t\t055AEE2B1E7BE7350006FE5D /* BaseViewController.h */,\n\t\t\t\t055AEE2C1E7BE7350006FE5D /* BaseViewController.m */,\n\t\t\t\t05F746741E7E78820076E6EB /* SettingsDecoderViewController.h */,\n\t\t\t\t05F746751E7E78820076E6EB /* SettingsDecoderViewController.m */,\n\t\t\t\t05F746761E7E78820076E6EB /* SettingsEncoderViewController.h */,\n\t\t\t\t05F746771E7E78820076E6EB /* SettingsEncoderViewController.m */,\n\t\t\t\t05F7467A1E7E81E60076E6EB /* EncoderHelperViewController.h */,\n\t\t\t\t05F7467B1E7E81E60076E6EB /* EncoderHelperViewController.m */,\n\t\t\t\t05F7467D1E7E87D10076E6EB /* DecoderHelperViewController.h */,\n\t\t\t\t05F7467E1E7E87D10076E6EB /* DecoderHelperViewController.m */,\n\t\t\t\t05F746831E7FB53C0076E6EB /* MoviesViewController.h */,\n\t\t\t\t05F746841E7FB53C0076E6EB /* MoviesViewController.m */,\n\t\t\t\t055AEDFE1E7BD4280006FE5D /* Main.storyboard */,\n\t\t\t\t055AEE011E7BD4280006FE5D /* Assets.xcassets */,\n\t\t\t\t055AEE031E7BD4280006FE5D /* LaunchScreen.storyboard */,\n\t\t\t\t055AEE061E7BD4280006FE5D /* Info.plist */,\n\t\t\t\t055AEDF21E7BD4280006FE5D /* Supporting Files */,\n\t\t\t);\n\t\t\tpath = KSY265CodecDemo_iOS;\n\t\t\tsourceTree = \"<group>\";\n\t\t};\n\t\t055AEDF21E7BD4280006FE5D /* Supporting Files */ = {\n\t\t\tisa = PBXGroup;\n\t\t\tchildren = (\n\t\t\t\t055AEDF31E7BD4280006FE5D /* main.m */,\n\t\t\t);\n\t\t\tname = \"Supporting Files\";\n\t\t\tsourceTree = \"<group>\";\n\t\t};\n\t\t058715AA1E82B0DA008D8860 /* KSY265Encoder */ = {\n\t\t\tisa = PBXGroup;\n\t\t\tchildren = (\n\t\t\t\t058715AF1E836FE8008D8860 /* MovieEncoder.h */,\n\t\t\t\t058715B01E836FE8008D8860 /* MovieEncoder.m */,\n\t\t\t\t058715B81E83BEED008D8860 /* KSYMovieEncoder.h */,\n\t\t\t\t058715B91E83BEED008D8860 /* KSYMovieEncoder.m */,\n\t\t\t);\n\t\t\tname = KSY265Encoder;\n\t\t\tsourceTree = \"<group>\";\n\t\t};\n\t\t05F746951E7FBE890076E6EB /* Frameworks */ = {\n\t\t\tisa = PBXGroup;\n\t\t\tchildren = (\n\t\t\t\t05C45C8B1E8E06A10045FE79 /* libz.tbd */,\n\t\t\t\t05C45C851E8E02C60045FE79 /* libqycommon.a */,\n\t\t\t\t058715B31E83BDB9008D8860 /* libqydecoder.a */,\n\t\t\t\t058715B41E83BDB9008D8860 /* libqyencoder.a */,\n\t\t\t\t058715AD1E836C3C008D8860 /* libx264.a */,\n\t\t\t\t05F746C01E7FCDFE0076E6EB /* libc++.tbd */,\n\t\t\t\t05F7469A1E7FBF230076E6EB /* QuartzCore.framework */,\n\t\t\t\t05F746961E7FBE8A0076E6EB /* OpenGLES.framework */,\n\t\t\t\t05F746A81E7FC1520076E6EB /* UIKit.framework */,\n\t\t\t\t05F746AA1E7FC15B0076E6EB /* Foundation.framework */,\n\t\t\t\t05F746AC1E7FC1680076E6EB /* CoreGraphics.framework */,\n\t\t\t);\n\t\t\tname = Frameworks;\n\t\t\tsourceTree = \"<group>\";\n\t\t};\n\t\t05F7469C1E7FC0320076E6EB /* KSY265Decoder */ = {\n\t\t\tisa = PBXGroup;\n\t\t\tchildren = (\n\t\t\t\t05F746891E7FBDBA0076E6EB /* KSYMoviePlayer.h */,\n\t\t\t\t05F7468A1E7FBDBA0076E6EB /* KSYMoviePlayer.m */,\n\t\t\t\t05F7468C1E7FBDF70076E6EB /* MoviePlayer.h */,\n\t\t\t\t05F7468D1E7FBDF70076E6EB /* MoviePlayer.m */,\n\t\t\t\t05F746BA1E7FCC890076E6EB /* GLRenderer.h */,\n\t\t\t\t05F746BB1E7FCC890076E6EB /* GLRenderer.m */,\n\t\t\t\t05F746BC1E7FCC890076E6EB /* GLView.h */,\n\t\t\t\t05F746BD1E7FCC890076E6EB /* GLView.m */,\n\t\t\t);\n\t\t\tname = KSY265Decoder;\n\t\t\tsourceTree = \"<group>\";\n\t\t};\n\t\t05F746A11E7FC1000076E6EB /* Prebuilt */ = {\n\t\t\tisa = PBXGroup;\n\t\t\tchildren = (\n\t\t\t\t05F746981E7FBECB0076E6EB /* liblenthevcdec.a */,\n\t\t\t);\n\t\t\tname = Prebuilt;\n\t\t\tsourceTree = \"<group>\";\n\t\t};\n\t\tF28F15841E88EB6600E1A739 /* resource */ = {\n\t\t\tisa = PBXGroup;\n\t\t\tchildren = (\n\t\t\t\tF28F15891E88EFF700E1A739 /* 960x540_15.yuv */,\n\t\t\t\tF28F15851E88EB6600E1A739 /* 1280x720_15.yuv */,\n\t\t\t\tF28F15861E88EB6600E1A739 /* 640x480_15.yuv */,\n\t\t\t);\n\t\t\tpath = resource;\n\t\t\tsourceTree = \"<group>\";\n\t\t};\n\t\tF28F159A1E890F7B00E1A739 /* combox */ = {\n\t\t\tisa = PBXGroup;\n\t\t\tchildren = (\n\t\t\t\tF28F159B1E890F7B00E1A739 /* AYHCustomComboBox.h */,\n\t\t\t\tF28F159C1E890F7B00E1A739 /* AYHCustomComboBox.m */,\n\t\t\t\tF28F159D1E890F7B00E1A739 /* AYHCustomComboBoxDelegate.h */,\n\t\t\t);\n\t\t\tpath = combox;\n\t\t\tsourceTree = \"<group>\";\n\t\t};\n/* End PBXGroup section */\n\n/* Begin PBXNativeTarget section */\n\t\t055AEDEE1E7BD4280006FE5D /* KSY265CodecDemo_iOS */ = {\n\t\t\tisa = PBXNativeTarget;\n\t\t\tbuildConfigurationList = 055AEE1F1E7BD4290006FE5D /* Build configuration list for PBXNativeTarget \"KSY265CodecDemo_iOS\" */;\n\t\t\tbuildPhases = (\n\t\t\t\t055AEDEB1E7BD4280006FE5D /* Sources */,\n\t\t\t\t055AEDEC1E7BD4280006FE5D /* Frameworks */,\n\t\t\t\t055AEDED1E7BD4280006FE5D /* Resources */,\n\t\t\t);\n\t\t\tbuildRules = (\n\t\t\t);\n\t\t\tdependencies = (\n\t\t\t);\n\t\t\tname = KSY265CodecDemo_iOS;\n\t\t\tproductName = KSY265CodecDemo_iOS;\n\t\t\tproductReference = 055AEDEF1E7BD4280006FE5D /* KSY265CodecDemo_iOS.app */;\n\t\t\tproductType = \"com.apple.product-type.application\";\n\t\t};\n\t\t055AEE0A1E7BD4280006FE5D /* KSY265CodecDemo_iOSTests */ = {\n\t\t\tisa = PBXNativeTarget;\n\t\t\tbuildConfigurationList = 055AEE221E7BD4290006FE5D /* Build configuration list for PBXNativeTarget \"KSY265CodecDemo_iOSTests\" */;\n\t\t\tbuildPhases = (\n\t\t\t\t055AEE071E7BD4280006FE5D /* Sources */,\n\t\t\t\t055AEE081E7BD4280006FE5D /* Frameworks */,\n\t\t\t\t055AEE091E7BD4280006FE5D /* Resources */,\n\t\t\t);\n\t\t\tbuildRules = (\n\t\t\t);\n\t\t\tdependencies = (\n\t\t\t\t055AEE0D1E7BD4280006FE5D /* PBXTargetDependency */,\n\t\t\t);\n\t\t\tname = KSY265CodecDemo_iOSTests;\n\t\t\tproductName = KSY265CodecDemo_iOSTests;\n\t\t\tproductReference = 055AEE0B1E7BD4280006FE5D /* KSY265CodecDemo_iOSTests.xctest */;\n\t\t\tproductType = \"com.apple.product-type.bundle.unit-test\";\n\t\t};\n\t\t055AEE151E7BD4290006FE5D /* KSY265CodecDemo_iOSUITests */ = {\n\t\t\tisa = PBXNativeTarget;\n\t\t\tbuildConfigurationList = 055AEE251E7BD4290006FE5D /* Build configuration list for PBXNativeTarget \"KSY265CodecDemo_iOSUITests\" */;\n\t\t\tbuildPhases = (\n\t\t\t\t055AEE121E7BD4290006FE5D /* Sources */,\n\t\t\t\t055AEE131E7BD4290006FE5D /* Frameworks */,\n\t\t\t\t055AEE141E7BD4290006FE5D /* Resources */,\n\t\t\t);\n\t\t\tbuildRules = (\n\t\t\t);\n\t\t\tdependencies = (\n\t\t\t\t055AEE181E7BD4290006FE5D /* PBXTargetDependency */,\n\t\t\t);\n\t\t\tname = KSY265CodecDemo_iOSUITests;\n\t\t\tproductName = KSY265CodecDemo_iOSUITests;\n\t\t\tproductReference = 055AEE161E7BD4290006FE5D /* KSY265CodecDemo_iOSUITests.xctest */;\n\t\t\tproductType = \"com.apple.product-type.bundle.ui-testing\";\n\t\t};\n/* End PBXNativeTarget section */\n\n/* Begin PBXProject section */\n\t\t055AEDE71E7BD4280006FE5D /* Project object */ = {\n\t\t\tisa = PBXProject;\n\t\t\tattributes = {\n\t\t\t\tLastUpgradeCheck = 0820;\n\t\t\t\tORGANIZATIONNAME = \"江东\";\n\t\t\t\tTargetAttributes = {\n\t\t\t\t\t055AEDEE1E7BD4280006FE5D = {\n\t\t\t\t\t\tCreatedOnToolsVersion = 8.2.1;\n\t\t\t\t\t\tDevelopmentTeam = ZGJ54Q7R45;\n\t\t\t\t\t\tProvisioningStyle = Automatic;\n\t\t\t\t\t};\n\t\t\t\t\t055AEE0A1E7BD4280006FE5D = {\n\t\t\t\t\t\tCreatedOnToolsVersion = 8.2.1;\n\t\t\t\t\t\tDevelopmentTeam = ZGJ54Q7R45;\n\t\t\t\t\t\tProvisioningStyle = Automatic;\n\t\t\t\t\t\tTestTargetID = 055AEDEE1E7BD4280006FE5D;\n\t\t\t\t\t};\n\t\t\t\t\t055AEE151E7BD4290006FE5D = {\n\t\t\t\t\t\tCreatedOnToolsVersion = 8.2.1;\n\t\t\t\t\t\tDevelopmentTeam = ZGJ54Q7R45;\n\t\t\t\t\t\tProvisioningStyle = Automatic;\n\t\t\t\t\t\tTestTargetID = 055AEDEE1E7BD4280006FE5D;\n\t\t\t\t\t};\n\t\t\t\t};\n\t\t\t};\n\t\t\tbuildConfigurationList = 055AEDEA1E7BD4280006FE5D /* Build configuration list for PBXProject \"KSY265CodecDemo_iOS\" */;\n\t\t\tcompatibilityVersion = \"Xcode 3.2\";\n\t\t\tdevelopmentRegion = English;\n\t\t\thasScannedForEncodings = 0;\n\t\t\tknownRegions = (\n\t\t\t\ten,\n\t\t\t\tBase,\n\t\t\t);\n\t\t\tmainGroup = 055AEDE61E7BD4280006FE5D;\n\t\t\tproductRefGroup = 055AEDF01E7BD4280006FE5D /* Products */;\n\t\t\tprojectDirPath = \"\";\n\t\t\tprojectRoot = \"\";\n\t\t\ttargets = (\n\t\t\t\t055AEDEE1E7BD4280006FE5D /* KSY265CodecDemo_iOS */,\n\t\t\t\t055AEE0A1E7BD4280006FE5D /* KSY265CodecDemo_iOSTests */,\n\t\t\t\t055AEE151E7BD4290006FE5D /* KSY265CodecDemo_iOSUITests */,\n\t\t\t);\n\t\t};\n/* End PBXProject section */\n\n/* Begin PBXResourcesBuildPhase section */\n\t\t055AEDED1E7BD4280006FE5D /* Resources */ = {\n\t\t\tisa = PBXResourcesBuildPhase;\n\t\t\tbuildActionMask = 2147483647;\n\t\t\tfiles = (\n\t\t\t\t055AEE051E7BD4280006FE5D /* LaunchScreen.storyboard in Resources */,\n\t\t\t\t055AEE021E7BD4280006FE5D /* Assets.xcassets in Resources */,\n\t\t\t\tF28F15881E88EB6600E1A739 /* 640x480_15.yuv in Resources */,\n\t\t\t\tF28F158A1E88EFF700E1A739 /* 960x540_15.yuv in Resources */,\n\t\t\t\tF28F15871E88EB6600E1A739 /* 1280x720_15.yuv in Resources */,\n\t\t\t\t055AEE001E7BD4280006FE5D /* Main.storyboard in Resources */,\n\t\t\t);\n\t\t\trunOnlyForDeploymentPostprocessing = 0;\n\t\t};\n\t\t055AEE091E7BD4280006FE5D /* Resources */ = {\n\t\t\tisa = PBXResourcesBuildPhase;\n\t\t\tbuildActionMask = 2147483647;\n\t\t\tfiles = (\n\t\t\t);\n\t\t\trunOnlyForDeploymentPostprocessing = 0;\n\t\t};\n\t\t055AEE141E7BD4290006FE5D /* Resources */ = {\n\t\t\tisa = PBXResourcesBuildPhase;\n\t\t\tbuildActionMask = 2147483647;\n\t\t\tfiles = (\n\t\t\t);\n\t\t\trunOnlyForDeploymentPostprocessing = 0;\n\t\t};\n/* End PBXResourcesBuildPhase section */\n\n/* Begin PBXSourcesBuildPhase section */\n\t\t055AEDEB1E7BD4280006FE5D /* Sources */ = {\n\t\t\tisa = PBXSourcesBuildPhase;\n\t\t\tbuildActionMask = 2147483647;\n\t\t\tfiles = (\n\t\t\t\t05F746BF1E7FCC890076E6EB /* GLView.m in Sources */,\n\t\t\t\t05F746BE1E7FCC890076E6EB /* GLRenderer.m in Sources */,\n\t\t\t\t055AEDFD1E7BD4280006FE5D /* SecondViewController.m in Sources */,\n\t\t\t\t058715BA1E83BEED008D8860 /* KSYMovieEncoder.m in Sources */,\n\t\t\t\t055AEE2D1E7BE7350006FE5D /* BaseViewController.m in Sources */,\n\t\t\t\tF28F159E1E890F7B00E1A739 /* AYHCustomComboBox.m in Sources */,\n\t\t\t\t05F746851E7FB53C0076E6EB /* MoviesViewController.m in Sources */,\n\t\t\t\t05F746791E7E78820076E6EB /* SettingsEncoderViewController.m in Sources */,\n\t\t\t\t055AEDF71E7BD4280006FE5D /* AppDelegate.m in Sources */,\n\t\t\t\t058715B11E836FE8008D8860 /* MovieEncoder.m in Sources */,\n\t\t\t\t05F7467F1E7E87D10076E6EB /* DecoderHelperViewController.m in Sources */,\n\t\t\t\t055AEDFA1E7BD4280006FE5D /* FirstViewController.m in Sources */,\n\t\t\t\t05F746781E7E78820076E6EB /* SettingsDecoderViewController.m in Sources */,\n\t\t\t\t055AEDF41E7BD4280006FE5D /* main.m in Sources */,\n\t\t\t\t05F7467C1E7E81E60076E6EB /* EncoderHelperViewController.m in Sources */,\n\t\t\t\t05F7468E1E7FBDF70076E6EB /* MoviePlayer.m in Sources */,\n\t\t\t\t05F7468B1E7FBDBA0076E6EB /* KSYMoviePlayer.m in Sources */,\n\t\t\t);\n\t\t\trunOnlyForDeploymentPostprocessing = 0;\n\t\t};\n\t\t055AEE071E7BD4280006FE5D /* Sources */ = {\n\t\t\tisa = PBXSourcesBuildPhase;\n\t\t\tbuildActionMask = 2147483647;\n\t\t\tfiles = (\n\t\t\t);\n\t\t\trunOnlyForDeploymentPostprocessing = 0;\n\t\t};\n\t\t055AEE121E7BD4290006FE5D /* Sources */ = {\n\t\t\tisa = PBXSourcesBuildPhase;\n\t\t\tbuildActionMask = 2147483647;\n\t\t\tfiles = (\n\t\t\t);\n\t\t\trunOnlyForDeploymentPostprocessing = 0;\n\t\t};\n/* End PBXSourcesBuildPhase section */\n\n/* Begin PBXTargetDependency section */\n\t\t055AEE0D1E7BD4280006FE5D /* PBXTargetDependency */ = {\n\t\t\tisa = PBXTargetDependency;\n\t\t\ttarget = 055AEDEE1E7BD4280006FE5D /* KSY265CodecDemo_iOS */;\n\t\t\ttargetProxy = 055AEE0C1E7BD4280006FE5D /* PBXContainerItemProxy */;\n\t\t};\n\t\t055AEE181E7BD4290006FE5D /* PBXTargetDependency */ = {\n\t\t\tisa = PBXTargetDependency;\n\t\t\ttarget = 055AEDEE1E7BD4280006FE5D /* KSY265CodecDemo_iOS */;\n\t\t\ttargetProxy = 055AEE171E7BD4290006FE5D /* PBXContainerItemProxy */;\n\t\t};\n/* End PBXTargetDependency section */\n\n/* Begin PBXVariantGroup section */\n\t\t055AEDFE1E7BD4280006FE5D /* Main.storyboard */ = {\n\t\t\tisa = PBXVariantGroup;\n\t\t\tchildren = (\n\t\t\t\t055AEDFF1E7BD4280006FE5D /* Base */,\n\t\t\t);\n\t\t\tname = Main.storyboard;\n\t\t\tsourceTree = \"<group>\";\n\t\t};\n\t\t055AEE031E7BD4280006FE5D /* LaunchScreen.storyboard */ = {\n\t\t\tisa = PBXVariantGroup;\n\t\t\tchildren = (\n\t\t\t\t055AEE041E7BD4280006FE5D /* Base */,\n\t\t\t);\n\t\t\tname = LaunchScreen.storyboard;\n\t\t\tsourceTree = \"<group>\";\n\t\t};\n/* End PBXVariantGroup section */\n\n/* Begin XCBuildConfiguration section */\n\t\t055AEE1D1E7BD4290006FE5D /* Debug */ = {\n\t\t\tisa = XCBuildConfiguration;\n\t\t\tbuildSettings = {\n\t\t\t\tALWAYS_SEARCH_USER_PATHS = NO;\n\t\t\t\tCLANG_ANALYZER_NONNULL = YES;\n\t\t\t\tCLANG_CXX_LANGUAGE_STANDARD = \"gnu++0x\";\n\t\t\t\tCLANG_CXX_LIBRARY = \"libc++\";\n\t\t\t\tCLANG_ENABLE_MODULES = YES;\n\t\t\t\tCLANG_ENABLE_OBJC_ARC = YES;\n\t\t\t\tCLANG_WARN_BOOL_CONVERSION = YES;\n\t\t\t\tCLANG_WARN_CONSTANT_CONVERSION = YES;\n\t\t\t\tCLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;\n\t\t\t\tCLANG_WARN_DOCUMENTATION_COMMENTS = YES;\n\t\t\t\tCLANG_WARN_EMPTY_BODY = YES;\n\t\t\t\tCLANG_WARN_ENUM_CONVERSION = YES;\n\t\t\t\tCLANG_WARN_INFINITE_RECURSION = YES;\n\t\t\t\tCLANG_WARN_INT_CONVERSION = YES;\n\t\t\t\tCLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;\n\t\t\t\tCLANG_WARN_SUSPICIOUS_MOVE = YES;\n\t\t\t\tCLANG_WARN_UNREACHABLE_CODE = YES;\n\t\t\t\tCLANG_WARN__DUPLICATE_METHOD_MATCH = YES;\n\t\t\t\t\"CODE_SIGN_IDENTITY[sdk=iphoneos*]\" = \"iPhone Developer\";\n\t\t\t\tCOPY_PHASE_STRIP = NO;\n\t\t\t\tDEBUG_INFORMATION_FORMAT = dwarf;\n\t\t\t\tENABLE_STRICT_OBJC_MSGSEND = YES;\n\t\t\t\tENABLE_TESTABILITY = YES;\n\t\t\t\tGCC_C_LANGUAGE_STANDARD = gnu99;\n\t\t\t\tGCC_DYNAMIC_NO_PIC = NO;\n\t\t\t\tGCC_NO_COMMON_BLOCKS = YES;\n\t\t\t\tGCC_OPTIMIZATION_LEVEL = 0;\n\t\t\t\tGCC_PREPROCESSOR_DEFINITIONS = (\n\t\t\t\t\t\"DEBUG=1\",\n\t\t\t\t\t\"$(inherited)\",\n\t\t\t\t);\n\t\t\t\tGCC_WARN_64_TO_32_BIT_CONVERSION = YES;\n\t\t\t\tGCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;\n\t\t\t\tGCC_WARN_UNDECLARED_SELECTOR = YES;\n\t\t\t\tGCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;\n\t\t\t\tGCC_WARN_UNUSED_FUNCTION = YES;\n\t\t\t\tGCC_WARN_UNUSED_VARIABLE = YES;\n\t\t\t\tHEADER_SEARCH_PATHS = (\n\t\t\t\t\tKSY265CodecDemo_iOS/GLRenderView,\n\t\t\t\t\tlenthevcdec,\n\t\t\t\t\tksy265dec,\n\t\t\t\t);\n\t\t\t\tIPHONEOS_DEPLOYMENT_TARGET = 8.0;\n\t\t\t\tMTL_ENABLE_DEBUG_INFO = YES;\n\t\t\t\tONLY_ACTIVE_ARCH = YES;\n\t\t\t\tSDKROOT = iphoneos;\n\t\t\t\tTARGETED_DEVICE_FAMILY = \"1,2\";\n\t\t\t};\n\t\t\tname = Debug;\n\t\t};\n\t\t055AEE1E1E7BD4290006FE5D /* Release */ = {\n\t\t\tisa = XCBuildConfiguration;\n\t\t\tbuildSettings = {\n\t\t\t\tALWAYS_SEARCH_USER_PATHS = NO;\n\t\t\t\tCLANG_ANALYZER_NONNULL = YES;\n\t\t\t\tCLANG_CXX_LANGUAGE_STANDARD = \"gnu++0x\";\n\t\t\t\tCLANG_CXX_LIBRARY = \"libc++\";\n\t\t\t\tCLANG_ENABLE_MODULES = YES;\n\t\t\t\tCLANG_ENABLE_OBJC_ARC = YES;\n\t\t\t\tCLANG_WARN_BOOL_CONVERSION = YES;\n\t\t\t\tCLANG_WARN_CONSTANT_CONVERSION = YES;\n\t\t\t\tCLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;\n\t\t\t\tCLANG_WARN_DOCUMENTATION_COMMENTS = YES;\n\t\t\t\tCLANG_WARN_EMPTY_BODY = YES;\n\t\t\t\tCLANG_WARN_ENUM_CONVERSION = YES;\n\t\t\t\tCLANG_WARN_INFINITE_RECURSION = YES;\n\t\t\t\tCLANG_WARN_INT_CONVERSION = YES;\n\t\t\t\tCLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;\n\t\t\t\tCLANG_WARN_SUSPICIOUS_MOVE = YES;\n\t\t\t\tCLANG_WARN_UNREACHABLE_CODE = YES;\n\t\t\t\tCLANG_WARN__DUPLICATE_METHOD_MATCH = YES;\n\t\t\t\t\"CODE_SIGN_IDENTITY[sdk=iphoneos*]\" = \"iPhone Developer\";\n\t\t\t\tCOPY_PHASE_STRIP = NO;\n\t\t\t\tDEBUG_INFORMATION_FORMAT = \"dwarf-with-dsym\";\n\t\t\t\tENABLE_NS_ASSERTIONS = NO;\n\t\t\t\tENABLE_STRICT_OBJC_MSGSEND = YES;\n\t\t\t\tGCC_C_LANGUAGE_STANDARD = gnu99;\n\t\t\t\tGCC_NO_COMMON_BLOCKS = YES;\n\t\t\t\tGCC_WARN_64_TO_32_BIT_CONVERSION = YES;\n\t\t\t\tGCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;\n\t\t\t\tGCC_WARN_UNDECLARED_SELECTOR = YES;\n\t\t\t\tGCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;\n\t\t\t\tGCC_WARN_UNUSED_FUNCTION = YES;\n\t\t\t\tGCC_WARN_UNUSED_VARIABLE = YES;\n\t\t\t\tHEADER_SEARCH_PATHS = (\n\t\t\t\t\tKSY265CodecDemo_iOS/GLRenderView,\n\t\t\t\t\tlenthevcdec,\n\t\t\t\t\tksy265dec,\n\t\t\t\t);\n\t\t\t\tIPHONEOS_DEPLOYMENT_TARGET = 8.0;\n\t\t\t\tMTL_ENABLE_DEBUG_INFO = NO;\n\t\t\t\tSDKROOT = iphoneos;\n\t\t\t\tTARGETED_DEVICE_FAMILY = \"1,2\";\n\t\t\t\tVALIDATE_PRODUCT = YES;\n\t\t\t};\n\t\t\tname = Release;\n\t\t};\n\t\t055AEE201E7BD4290006FE5D /* Debug */ = {\n\t\t\tisa = XCBuildConfiguration;\n\t\t\tbuildSettings = {\n\t\t\t\tALWAYS_SEARCH_USER_PATHS = NO;\n\t\t\t\tASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;\n\t\t\t\tDEVELOPMENT_TEAM = ZGJ54Q7R45;\n\t\t\t\tENABLE_BITCODE = NO;\n\t\t\t\tHEADER_SEARCH_PATHS = (\n\t\t\t\t\tKSY265CodecDemo_iOS/GLRenderView,\n\t\t\t\t\tlenthevcdec,\n\t\t\t\t\tksy265codec,\n\t\t\t\t\tx264,\n\t\t\t\t);\n\t\t\t\tINFOPLIST_FILE = KSY265CodecDemo_iOS/Info.plist;\n\t\t\t\tIPHONEOS_DEPLOYMENT_TARGET = 8.0;\n\t\t\t\tLD_RUNPATH_SEARCH_PATHS = \"$(inherited) @executable_path/Frameworks\";\n\t\t\t\tLIBRARY_SEARCH_PATHS = (\n\t\t\t\t\t\"$(inherited)\",\n\t\t\t\t\t\"$(PROJECT_DIR)/lenthevcdec\",\n\t\t\t\t\t\"$(PROJECT_DIR)/ksy265dec\",\n\t\t\t\t\t\"$(PROJECT_DIR)/x264\",\n\t\t\t\t\t\"$(PROJECT_DIR)/ksy265codec\",\n\t\t\t\t\t\"$(PROJECT_DIR)\",\n\t\t\t\t);\n\t\t\t\tPRODUCT_BUNDLE_IDENTIFIER = \"com.ksyun.ios.KSY265CodecDemo-iOS\";\n\t\t\t\tPRODUCT_NAME = \"$(TARGET_NAME)\";\n\t\t\t};\n\t\t\tname = Debug;\n\t\t};\n\t\t055AEE211E7BD4290006FE5D /* Release */ = {\n\t\t\tisa = XCBuildConfiguration;\n\t\t\tbuildSettings = {\n\t\t\t\tALWAYS_SEARCH_USER_PATHS = NO;\n\t\t\t\tASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;\n\t\t\t\tDEVELOPMENT_TEAM = ZGJ54Q7R45;\n\t\t\t\tENABLE_BITCODE = NO;\n\t\t\t\tHEADER_SEARCH_PATHS = (\n\t\t\t\t\tKSY265CodecDemo_iOS/GLRenderView,\n\t\t\t\t\tlenthevcdec,\n\t\t\t\t\tksy265codec,\n\t\t\t\t\tx264,\n\t\t\t\t);\n\t\t\t\tINFOPLIST_FILE = KSY265CodecDemo_iOS/Info.plist;\n\t\t\t\tIPHONEOS_DEPLOYMENT_TARGET = 8.0;\n\t\t\t\tLD_RUNPATH_SEARCH_PATHS = \"$(inherited) @executable_path/Frameworks\";\n\t\t\t\tLIBRARY_SEARCH_PATHS = (\n\t\t\t\t\t\"$(inherited)\",\n\t\t\t\t\t\"$(PROJECT_DIR)/lenthevcdec\",\n\t\t\t\t\t\"$(PROJECT_DIR)/ksy265dec\",\n\t\t\t\t\t\"$(PROJECT_DIR)/x264\",\n\t\t\t\t\t\"$(PROJECT_DIR)/ksy265codec\",\n\t\t\t\t\t\"$(PROJECT_DIR)\",\n\t\t\t\t);\n\t\t\t\tPRODUCT_BUNDLE_IDENTIFIER = \"com.ksyun.ios.KSY265CodecDemo-iOS\";\n\t\t\t\tPRODUCT_NAME = \"$(TARGET_NAME)\";\n\t\t\t};\n\t\t\tname = Release;\n\t\t};\n\t\t055AEE231E7BD4290006FE5D /* Debug */ = {\n\t\t\tisa = XCBuildConfiguration;\n\t\t\tbuildSettings = {\n\t\t\t\tBUNDLE_LOADER = \"$(TEST_HOST)\";\n\t\t\t\tDEVELOPMENT_TEAM = ZGJ54Q7R45;\n\t\t\t\tINFOPLIST_FILE = KSY265CodecDemo_iOSTests/Info.plist;\n\t\t\t\tLD_RUNPATH_SEARCH_PATHS = \"$(inherited) @executable_path/Frameworks @loader_path/Frameworks\";\n\t\t\t\tPRODUCT_BUNDLE_IDENTIFIER = \"ksy.KSY265CodecDemo-iOSTests\";\n\t\t\t\tPRODUCT_NAME = \"$(TARGET_NAME)\";\n\t\t\t\tTEST_HOST = \"$(BUILT_PRODUCTS_DIR)/KSY265CodecDemo_iOS.app/KSY265CodecDemo_iOS\";\n\t\t\t};\n\t\t\tname = Debug;\n\t\t};\n\t\t055AEE241E7BD4290006FE5D /* Release */ = {\n\t\t\tisa = XCBuildConfiguration;\n\t\t\tbuildSettings = {\n\t\t\t\tBUNDLE_LOADER = \"$(TEST_HOST)\";\n\t\t\t\tDEVELOPMENT_TEAM = ZGJ54Q7R45;\n\t\t\t\tINFOPLIST_FILE = KSY265CodecDemo_iOSTests/Info.plist;\n\t\t\t\tLD_RUNPATH_SEARCH_PATHS = \"$(inherited) @executable_path/Frameworks @loader_path/Frameworks\";\n\t\t\t\tPRODUCT_BUNDLE_IDENTIFIER = \"ksy.KSY265CodecDemo-iOSTests\";\n\t\t\t\tPRODUCT_NAME = \"$(TARGET_NAME)\";\n\t\t\t\tTEST_HOST = \"$(BUILT_PRODUCTS_DIR)/KSY265CodecDemo_iOS.app/KSY265CodecDemo_iOS\";\n\t\t\t};\n\t\t\tname = Release;\n\t\t};\n\t\t055AEE261E7BD4290006FE5D /* Debug */ = {\n\t\t\tisa = XCBuildConfiguration;\n\t\t\tbuildSettings = {\n\t\t\t\tDEVELOPMENT_TEAM = ZGJ54Q7R45;\n\t\t\t\tINFOPLIST_FILE = KSY265CodecDemo_iOSUITests/Info.plist;\n\t\t\t\tLD_RUNPATH_SEARCH_PATHS = \"$(inherited) @executable_path/Frameworks @loader_path/Frameworks\";\n\t\t\t\tPRODUCT_BUNDLE_IDENTIFIER = \"ksy.KSY265CodecDemo-iOSUITests\";\n\t\t\t\tPRODUCT_NAME = \"$(TARGET_NAME)\";\n\t\t\t\tTEST_TARGET_NAME = KSY265CodecDemo_iOS;\n\t\t\t};\n\t\t\tname = Debug;\n\t\t};\n\t\t055AEE271E7BD4290006FE5D /* Release */ = {\n\t\t\tisa = XCBuildConfiguration;\n\t\t\tbuildSettings = {\n\t\t\t\tDEVELOPMENT_TEAM = ZGJ54Q7R45;\n\t\t\t\tINFOPLIST_FILE = KSY265CodecDemo_iOSUITests/Info.plist;\n\t\t\t\tLD_RUNPATH_SEARCH_PATHS = \"$(inherited) @executable_path/Frameworks @loader_path/Frameworks\";\n\t\t\t\tPRODUCT_BUNDLE_IDENTIFIER = \"ksy.KSY265CodecDemo-iOSUITests\";\n\t\t\t\tPRODUCT_NAME = \"$(TARGET_NAME)\";\n\t\t\t\tTEST_TARGET_NAME = KSY265CodecDemo_iOS;\n\t\t\t};\n\t\t\tname = Release;\n\t\t};\n/* End XCBuildConfiguration section */\n\n/* Begin XCConfigurationList section */\n\t\t055AEDEA1E7BD4280006FE5D /* Build configuration list for PBXProject \"KSY265CodecDemo_iOS\" */ = {\n\t\t\tisa = XCConfigurationList;\n\t\t\tbuildConfigurations = (\n\t\t\t\t055AEE1D1E7BD4290006FE5D /* Debug */,\n\t\t\t\t055AEE1E1E7BD4290006FE5D /* Release */,\n\t\t\t);\n\t\t\tdefaultConfigurationIsVisible = 0;\n\t\t\tdefaultConfigurationName = Release;\n\t\t};\n\t\t055AEE1F1E7BD4290006FE5D /* Build configuration list for PBXNativeTarget \"KSY265CodecDemo_iOS\" */ = {\n\t\t\tisa = XCConfigurationList;\n\t\t\tbuildConfigurations = (\n\t\t\t\t055AEE201E7BD4290006FE5D /* Debug */,\n\t\t\t\t055AEE211E7BD4290006FE5D /* Release */,\n\t\t\t);\n\t\t\tdefaultConfigurationIsVisible = 0;\n\t\t\tdefaultConfigurationName = Release;\n\t\t};\n\t\t055AEE221E7BD4290006FE5D /* Build configuration list for PBXNativeTarget \"KSY265CodecDemo_iOSTests\" */ = {\n\t\t\tisa = XCConfigurationList;\n\t\t\tbuildConfigurations = (\n\t\t\t\t055AEE231E7BD4290006FE5D /* Debug */,\n\t\t\t\t055AEE241E7BD4290006FE5D /* Release */,\n\t\t\t);\n\t\t\tdefaultConfigurationIsVisible = 0;\n\t\t\tdefaultConfigurationName = Release;\n\t\t};\n\t\t055AEE251E7BD4290006FE5D /* Build configuration list for PBXNativeTarget \"KSY265CodecDemo_iOSUITests\" */ = {\n\t\t\tisa = XCConfigurationList;\n\t\t\tbuildConfigurations = (\n\t\t\t\t055AEE261E7BD4290006FE5D /* Debug */,\n\t\t\t\t055AEE271E7BD4290006FE5D /* Release */,\n\t\t\t);\n\t\t\tdefaultConfigurationIsVisible = 0;\n\t\t\tdefaultConfigurationName = Release;\n\t\t};\n/* End XCConfigurationList section */\n\t};\n\trootObject = 055AEDE71E7BD4280006FE5D /* Project object */;\n}\n"
  },
  {
    "path": "iOS_demo/KSY265CodecDemo_iOS.xcodeproj/project.xcworkspace/contents.xcworkspacedata",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<Workspace\n   version = \"1.0\">\n   <FileRef\n      location = \"self:KSY265CodecDemo_iOS.xcodeproj\">\n   </FileRef>\n</Workspace>\n"
  },
  {
    "path": "iOS_demo/KSY265CodecDemo_iOS.xcodeproj/xcuserdata/jiangdong.xcuserdatad/xcdebugger/Breakpoints_v2.xcbkptlist",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<Bucket\n   type = \"1\"\n   version = \"2.0\">\n   <Breakpoints>\n      <BreakpointProxy\n         BreakpointExtensionID = \"Xcode.Breakpoint.FileBreakpoint\">\n         <BreakpointContent\n            shouldBeEnabled = \"No\"\n            ignoreCount = \"0\"\n            continueAfterRunningActions = \"No\"\n            filePath = \"KSY265CodecDemo_iOS/KSYMovieEncoder.m\"\n            timestampString = \"513056532.661268\"\n            startingColumnNumber = \"9223372036854775807\"\n            endingColumnNumber = \"9223372036854775807\"\n            startingLineNumber = \"77\"\n            endingLineNumber = \"77\"\n            landmarkName = \"-encoder\"\n            landmarkType = \"7\">\n         </BreakpointContent>\n      </BreakpointProxy>\n      <BreakpointProxy\n         BreakpointExtensionID = \"Xcode.Breakpoint.FileBreakpoint\">\n         <BreakpointContent\n            shouldBeEnabled = \"Yes\"\n            ignoreCount = \"0\"\n            continueAfterRunningActions = \"No\"\n            filePath = \"KSY265CodecDemo_iOS/KSYMovieEncoder.m\"\n            timestampString = \"513068459.825586\"\n            startingColumnNumber = \"9223372036854775807\"\n            endingColumnNumber = \"9223372036854775807\"\n            startingLineNumber = \"163\"\n            endingLineNumber = \"163\"\n            landmarkName = \"-encoder\"\n            landmarkType = \"7\">\n         </BreakpointContent>\n      </BreakpointProxy>\n      <BreakpointProxy\n         BreakpointExtensionID = \"Xcode.Breakpoint.FileBreakpoint\">\n         <BreakpointContent\n            shouldBeEnabled = \"No\"\n            ignoreCount = \"0\"\n            continueAfterRunningActions = \"No\"\n            filePath = \"KSY265CodecDemo_iOS/SettingsEncoderViewController.m\"\n            timestampString = \"513505595.907382\"\n            startingColumnNumber = \"9223372036854775807\"\n            endingColumnNumber = \"9223372036854775807\"\n            startingLineNumber = \"207\"\n            endingLineNumber = \"207\"\n            landmarkName = \"-textFieldDidEndEditing:\"\n            landmarkType = \"7\">\n         </BreakpointContent>\n      </BreakpointProxy>\n      <BreakpointProxy\n         BreakpointExtensionID = \"Xcode.Breakpoint.FileBreakpoint\">\n         <BreakpointContent\n            shouldBeEnabled = \"No\"\n            ignoreCount = \"0\"\n            continueAfterRunningActions = \"No\"\n            filePath = \"KSY265CodecDemo_iOS/MoviesViewController.m\"\n            timestampString = \"513584550.763971\"\n            startingColumnNumber = \"9223372036854775807\"\n            endingColumnNumber = \"9223372036854775807\"\n            startingLineNumber = \"99\"\n            endingLineNumber = \"99\"\n            landmarkName = \"-tableView:numberOfRowsInSection:\"\n            landmarkType = \"7\">\n         </BreakpointContent>\n      </BreakpointProxy>\n      <BreakpointProxy\n         BreakpointExtensionID = \"Xcode.Breakpoint.FileBreakpoint\">\n         <BreakpointContent\n            shouldBeEnabled = \"No\"\n            ignoreCount = \"0\"\n            continueAfterRunningActions = \"No\"\n            filePath = \"combox/AYHCustomComboBox.m\"\n            timestampString = \"513584558.960267\"\n            startingColumnNumber = \"9223372036854775807\"\n            endingColumnNumber = \"9223372036854775807\"\n            startingLineNumber = \"76\"\n            endingLineNumber = \"76\"\n            landmarkName = \"-tableView:numberOfRowsInSection:\"\n            landmarkType = \"7\">\n         </BreakpointContent>\n      </BreakpointProxy>\n   </Breakpoints>\n</Bucket>\n"
  },
  {
    "path": "iOS_demo/KSY265CodecDemo_iOS.xcodeproj/xcuserdata/jiangdong.xcuserdatad/xcschemes/KSY265CodecDemo_iOS.xcscheme",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<Scheme\n   LastUpgradeVersion = \"0820\"\n   version = \"1.3\">\n   <BuildAction\n      parallelizeBuildables = \"YES\"\n      buildImplicitDependencies = \"YES\">\n      <BuildActionEntries>\n         <BuildActionEntry\n            buildForTesting = \"YES\"\n            buildForRunning = \"YES\"\n            buildForProfiling = \"YES\"\n            buildForArchiving = \"YES\"\n            buildForAnalyzing = \"YES\">\n            <BuildableReference\n               BuildableIdentifier = \"primary\"\n               BlueprintIdentifier = \"055AEDEE1E7BD4280006FE5D\"\n               BuildableName = \"KSY265CodecDemo_iOS.app\"\n               BlueprintName = \"KSY265CodecDemo_iOS\"\n               ReferencedContainer = \"container:KSY265CodecDemo_iOS.xcodeproj\">\n            </BuildableReference>\n         </BuildActionEntry>\n      </BuildActionEntries>\n   </BuildAction>\n   <TestAction\n      buildConfiguration = \"Debug\"\n      selectedDebuggerIdentifier = \"Xcode.DebuggerFoundation.Debugger.LLDB\"\n      selectedLauncherIdentifier = \"Xcode.DebuggerFoundation.Launcher.LLDB\"\n      shouldUseLaunchSchemeArgsEnv = \"YES\">\n      <Testables>\n         <TestableReference\n            skipped = \"NO\">\n            <BuildableReference\n               BuildableIdentifier = \"primary\"\n               BlueprintIdentifier = \"055AEE0A1E7BD4280006FE5D\"\n               BuildableName = \"KSY265CodecDemo_iOSTests.xctest\"\n               BlueprintName = \"KSY265CodecDemo_iOSTests\"\n               ReferencedContainer = \"container:KSY265CodecDemo_iOS.xcodeproj\">\n            </BuildableReference>\n         </TestableReference>\n         <TestableReference\n            skipped = \"NO\">\n            <BuildableReference\n               BuildableIdentifier = \"primary\"\n               BlueprintIdentifier = \"055AEE151E7BD4290006FE5D\"\n               BuildableName = \"KSY265CodecDemo_iOSUITests.xctest\"\n               BlueprintName = \"KSY265CodecDemo_iOSUITests\"\n               ReferencedContainer = \"container:KSY265CodecDemo_iOS.xcodeproj\">\n            </BuildableReference>\n         </TestableReference>\n      </Testables>\n      <MacroExpansion>\n         <BuildableReference\n            BuildableIdentifier = \"primary\"\n            BlueprintIdentifier = \"055AEDEE1E7BD4280006FE5D\"\n            BuildableName = \"KSY265CodecDemo_iOS.app\"\n            BlueprintName = \"KSY265CodecDemo_iOS\"\n            ReferencedContainer = \"container:KSY265CodecDemo_iOS.xcodeproj\">\n         </BuildableReference>\n      </MacroExpansion>\n      <AdditionalOptions>\n      </AdditionalOptions>\n   </TestAction>\n   <LaunchAction\n      buildConfiguration = \"Debug\"\n      selectedDebuggerIdentifier = \"Xcode.DebuggerFoundation.Debugger.LLDB\"\n      selectedLauncherIdentifier = \"Xcode.DebuggerFoundation.Launcher.LLDB\"\n      launchStyle = \"0\"\n      useCustomWorkingDirectory = \"NO\"\n      ignoresPersistentStateOnLaunch = \"NO\"\n      debugDocumentVersioning = \"YES\"\n      debugServiceExtension = \"internal\"\n      allowLocationSimulation = \"YES\">\n      <BuildableProductRunnable\n         runnableDebuggingMode = \"0\">\n         <BuildableReference\n            BuildableIdentifier = \"primary\"\n            BlueprintIdentifier = \"055AEDEE1E7BD4280006FE5D\"\n            BuildableName = \"KSY265CodecDemo_iOS.app\"\n            BlueprintName = \"KSY265CodecDemo_iOS\"\n            ReferencedContainer = \"container:KSY265CodecDemo_iOS.xcodeproj\">\n         </BuildableReference>\n      </BuildableProductRunnable>\n      <AdditionalOptions>\n      </AdditionalOptions>\n   </LaunchAction>\n   <ProfileAction\n      buildConfiguration = \"Release\"\n      shouldUseLaunchSchemeArgsEnv = \"YES\"\n      savedToolIdentifier = \"\"\n      useCustomWorkingDirectory = \"NO\"\n      debugDocumentVersioning = \"YES\">\n      <BuildableProductRunnable\n         runnableDebuggingMode = \"0\">\n         <BuildableReference\n            BuildableIdentifier = \"primary\"\n            BlueprintIdentifier = \"055AEDEE1E7BD4280006FE5D\"\n            BuildableName = \"KSY265CodecDemo_iOS.app\"\n            BlueprintName = \"KSY265CodecDemo_iOS\"\n            ReferencedContainer = \"container:KSY265CodecDemo_iOS.xcodeproj\">\n         </BuildableReference>\n      </BuildableProductRunnable>\n   </ProfileAction>\n   <AnalyzeAction\n      buildConfiguration = \"Debug\">\n   </AnalyzeAction>\n   <ArchiveAction\n      buildConfiguration = \"Release\"\n      revealArchiveInOrganizer = \"YES\">\n   </ArchiveAction>\n</Scheme>\n"
  },
  {
    "path": "iOS_demo/KSY265CodecDemo_iOS.xcodeproj/xcuserdata/jiangdong.xcuserdatad/xcschemes/xcschememanagement.plist",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<!DOCTYPE plist PUBLIC \"-//Apple//DTD PLIST 1.0//EN\" \"http://www.apple.com/DTDs/PropertyList-1.0.dtd\">\n<plist version=\"1.0\">\n<dict>\n\t<key>SuppressBuildableAutocreation</key>\n\t<dict>\n\t\t<key>055AEDEE1E7BD4280006FE5D</key>\n\t\t<dict>\n\t\t\t<key>primary</key>\n\t\t\t<true/>\n\t\t</dict>\n\t\t<key>055AEE0A1E7BD4280006FE5D</key>\n\t\t<dict>\n\t\t\t<key>primary</key>\n\t\t\t<true/>\n\t\t</dict>\n\t\t<key>055AEE151E7BD4290006FE5D</key>\n\t\t<dict>\n\t\t\t<key>primary</key>\n\t\t\t<true/>\n\t\t</dict>\n\t</dict>\n</dict>\n</plist>\n"
  },
  {
    "path": "iOS_demo/KSY265CodecDemo_iOS.xcodeproj/xcuserdata/ksyun.xcuserdatad/xcdebugger/Breakpoints_v2.xcbkptlist",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<Bucket\n   type = \"1\"\n   version = \"2.0\">\n</Bucket>\n"
  },
  {
    "path": "iOS_demo/KSY265CodecDemo_iOS.xcodeproj/xcuserdata/ksyun.xcuserdatad/xcschemes/KSY265CodecDemo_iOS.xcscheme",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<Scheme\n   LastUpgradeVersion = \"0820\"\n   version = \"1.3\">\n   <BuildAction\n      parallelizeBuildables = \"YES\"\n      buildImplicitDependencies = \"YES\">\n      <BuildActionEntries>\n         <BuildActionEntry\n            buildForTesting = \"YES\"\n            buildForRunning = \"YES\"\n            buildForProfiling = \"YES\"\n            buildForArchiving = \"YES\"\n            buildForAnalyzing = \"YES\">\n            <BuildableReference\n               BuildableIdentifier = \"primary\"\n               BlueprintIdentifier = \"055AEDEE1E7BD4280006FE5D\"\n               BuildableName = \"KSY265CodecDemo_iOS.app\"\n               BlueprintName = \"KSY265CodecDemo_iOS\"\n               ReferencedContainer = \"container:KSY265CodecDemo_iOS.xcodeproj\">\n            </BuildableReference>\n         </BuildActionEntry>\n      </BuildActionEntries>\n   </BuildAction>\n   <TestAction\n      buildConfiguration = \"Debug\"\n      selectedDebuggerIdentifier = \"Xcode.DebuggerFoundation.Debugger.LLDB\"\n      selectedLauncherIdentifier = \"Xcode.DebuggerFoundation.Launcher.LLDB\"\n      shouldUseLaunchSchemeArgsEnv = \"YES\">\n      <Testables>\n         <TestableReference\n            skipped = \"NO\">\n            <BuildableReference\n               BuildableIdentifier = \"primary\"\n               BlueprintIdentifier = \"055AEE0A1E7BD4280006FE5D\"\n               BuildableName = \"KSY265CodecDemo_iOSTests.xctest\"\n               BlueprintName = \"KSY265CodecDemo_iOSTests\"\n               ReferencedContainer = \"container:KSY265CodecDemo_iOS.xcodeproj\">\n            </BuildableReference>\n         </TestableReference>\n         <TestableReference\n            skipped = \"NO\">\n            <BuildableReference\n               BuildableIdentifier = \"primary\"\n               BlueprintIdentifier = \"055AEE151E7BD4290006FE5D\"\n               BuildableName = \"KSY265CodecDemo_iOSUITests.xctest\"\n               BlueprintName = \"KSY265CodecDemo_iOSUITests\"\n               ReferencedContainer = \"container:KSY265CodecDemo_iOS.xcodeproj\">\n            </BuildableReference>\n         </TestableReference>\n      </Testables>\n      <MacroExpansion>\n         <BuildableReference\n            BuildableIdentifier = \"primary\"\n            BlueprintIdentifier = \"055AEDEE1E7BD4280006FE5D\"\n            BuildableName = \"KSY265CodecDemo_iOS.app\"\n            BlueprintName = \"KSY265CodecDemo_iOS\"\n            ReferencedContainer = \"container:KSY265CodecDemo_iOS.xcodeproj\">\n         </BuildableReference>\n      </MacroExpansion>\n      <AdditionalOptions>\n      </AdditionalOptions>\n   </TestAction>\n   <LaunchAction\n      buildConfiguration = \"Debug\"\n      selectedDebuggerIdentifier = \"Xcode.DebuggerFoundation.Debugger.LLDB\"\n      selectedLauncherIdentifier = \"Xcode.DebuggerFoundation.Launcher.LLDB\"\n      launchStyle = \"0\"\n      useCustomWorkingDirectory = \"NO\"\n      ignoresPersistentStateOnLaunch = \"NO\"\n      debugDocumentVersioning = \"YES\"\n      debugServiceExtension = \"internal\"\n      allowLocationSimulation = \"YES\">\n      <BuildableProductRunnable\n         runnableDebuggingMode = \"0\">\n         <BuildableReference\n            BuildableIdentifier = \"primary\"\n            BlueprintIdentifier = \"055AEDEE1E7BD4280006FE5D\"\n            BuildableName = \"KSY265CodecDemo_iOS.app\"\n            BlueprintName = \"KSY265CodecDemo_iOS\"\n            ReferencedContainer = \"container:KSY265CodecDemo_iOS.xcodeproj\">\n         </BuildableReference>\n      </BuildableProductRunnable>\n      <AdditionalOptions>\n      </AdditionalOptions>\n   </LaunchAction>\n   <ProfileAction\n      buildConfiguration = \"Release\"\n      shouldUseLaunchSchemeArgsEnv = \"YES\"\n      savedToolIdentifier = \"\"\n      useCustomWorkingDirectory = \"NO\"\n      debugDocumentVersioning = \"YES\">\n      <BuildableProductRunnable\n         runnableDebuggingMode = \"0\">\n         <BuildableReference\n            BuildableIdentifier = \"primary\"\n            BlueprintIdentifier = \"055AEDEE1E7BD4280006FE5D\"\n            BuildableName = \"KSY265CodecDemo_iOS.app\"\n            BlueprintName = \"KSY265CodecDemo_iOS\"\n            ReferencedContainer = \"container:KSY265CodecDemo_iOS.xcodeproj\">\n         </BuildableReference>\n      </BuildableProductRunnable>\n   </ProfileAction>\n   <AnalyzeAction\n      buildConfiguration = \"Debug\">\n   </AnalyzeAction>\n   <ArchiveAction\n      buildConfiguration = \"Release\"\n      revealArchiveInOrganizer = \"YES\">\n   </ArchiveAction>\n</Scheme>\n"
  },
  {
    "path": "iOS_demo/KSY265CodecDemo_iOS.xcodeproj/xcuserdata/ksyun.xcuserdatad/xcschemes/xcschememanagement.plist",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<!DOCTYPE plist PUBLIC \"-//Apple//DTD PLIST 1.0//EN\" \"http://www.apple.com/DTDs/PropertyList-1.0.dtd\">\n<plist version=\"1.0\">\n<dict>\n\t<key>SchemeUserState</key>\n\t<dict>\n\t\t<key>KSY265CodecDemo_iOS.xcscheme</key>\n\t\t<dict>\n\t\t\t<key>orderHint</key>\n\t\t\t<integer>0</integer>\n\t\t</dict>\n\t</dict>\n\t<key>SuppressBuildableAutocreation</key>\n\t<dict>\n\t\t<key>055AEDEE1E7BD4280006FE5D</key>\n\t\t<dict>\n\t\t\t<key>primary</key>\n\t\t\t<true/>\n\t\t</dict>\n\t\t<key>055AEE0A1E7BD4280006FE5D</key>\n\t\t<dict>\n\t\t\t<key>primary</key>\n\t\t\t<true/>\n\t\t</dict>\n\t\t<key>055AEE151E7BD4290006FE5D</key>\n\t\t<dict>\n\t\t\t<key>primary</key>\n\t\t\t<true/>\n\t\t</dict>\n\t</dict>\n</dict>\n</plist>\n"
  },
  {
    "path": "iOS_demo/combox/AYHCustomComboBox.h",
    "content": "//\n//  AYHCustomComboBox.h\n//  TestCustomComboBox\n//\n//  Created by AlimysoYang on 12-4-25.\n//  Copyright (c) 2012年 __Alimyso Software Ltd__. All rights reserved.\n//\tQQ:86373007\n\n#import <UIKit/UIKit.h>\n#import <QuartzCore/QuartzCore.h>\n#import \"AYHCustomComboBoxDelegate.h\"\n\n#define kTableViewCellHeight 28.0f\n\n@interface AYHCustomComboBox : UIView<UITableViewDelegate, UITableViewDataSource>\n{\n    NSString* NotificationName;\n}\n\n@property (strong, nonatomic) UITableView* ccbtableView;\n@property (strong, nonatomic) NSMutableArray* ccbListData;\n@property (assign, nonatomic) id<AYHCustomComboBoxDelegate> delegate;\n\n//初始化\n- (id) initWithFrame:(CGRect)frame DataCount:(int) count NotificationName:(NSString*) notificationName;\n//添加一个数据\n- (void) addItemData:(NSString*) itemData;\n//添加一组数据\n- (void) addItemsData:(NSArray*) itemsData;\n- (NSString*) getItemData;\n//UITableView数据刷新\n- (void) flushData;\n\n@end\n"
  },
  {
    "path": "iOS_demo/combox/AYHCustomComboBox.m",
    "content": "//\n//  AYHCustomComboBox.m\n//  TestCustomComboBox\n//\n//  Created by AlimysoYang on 12-4-25.\n//  Copyright (c) 2012年 __Alimyso Software Ltd__. All rights reserved.\n//\tQQ:86373007\n\n#import \"AYHCustomComboBox.h\"\n\n@implementation AYHCustomComboBox\n\n@synthesize ccbtableView, ccbListData;//, ccbTitle;\n@synthesize delegate;\n\n- (id)initWithFrame:(CGRect)frame DataCount:(int)count NotificationName:(NSString *)notificationName\n{\n    self = [super initWithFrame:frame];\n    if (self) \n    {\n        NotificationName = [[NSString alloc] initWithString:notificationName];\n        ccbListData = [[NSMutableArray alloc] initWithCapacity:0];\n        //ccbTitle = [[NSString alloc] initWithString:@\"\"];\n        ccbtableView = [[UITableView alloc] initWithFrame:CGRectMake(0, 0, frame.size.width, frame.size.height)];\n        [ccbtableView setDelegate:self];\n        [ccbtableView setDataSource:self];\n        [ccbtableView setBackgroundColor:[UIColor grayColor]];\n        [self addSubview:ccbtableView];\n        [self setBackgroundColor:[UIColor grayColor]];\n        self.layer.cornerRadius = 5.0f;\n        self.layer.borderWidth = 1.0f;\n        self.layer.borderColor = [UIColor blackColor].CGColor;\n        self.layer.masksToBounds = YES;\n        self.layer.borderWidth = 1;\n    }\n    return self;\n}\n\n/*\n// Only override drawRect: if you perform custom drawing.\n// An empty implementation adversely affects performance during animation.\n- (void)drawRect:(CGRect)rect\n{\n    // Drawing code\n}\n*/\n\n- (void) addItemData:(NSString *)itemData\n{\n\t[ccbListData addObject:itemData];    \n}\n\n- (void) addItemsData:(NSArray *)itemsData\n{\n    [ccbListData addObjectsFromArray:itemsData];\n}\n\n- (NSString*) getItemData\n{\n    return @\"\";\n    //return ccbTitle;\n}\n\n- (void) flushData\n{\n    [self.ccbtableView reloadData];\n}\n\n- (NSInteger) numberOfSectionsInTableView:(UITableView *)tableView\n{\n    return 1;\n}\n\n- (NSInteger) tableView:(UITableView *)tableView numberOfRowsInSection:(NSInteger)section\n{\n    return [ccbListData count];\n}\n\n- (CGFloat) tableView:(UITableView *)tableView heightForRowAtIndexPath:(NSIndexPath *)indexPath\n{\n    return kTableViewCellHeight;\n}\n\n- (UITableViewCell*) tableView:(UITableView *)tableView cellForRowAtIndexPath:(NSIndexPath *)indexPath\n{\n    static NSString* CellIdentifier = @\"CustomComboBoxCell\";\n    UITableViewCell* cell = [tableView dequeueReusableCellWithIdentifier:CellIdentifier];\n    if (cell==nil)\n        cell = [[UITableViewCell alloc] initWithStyle:UITableViewCellStyleDefault reuseIdentifier:CellIdentifier];\n    \n    cell.textLabel.font = [UIFont boldSystemFontOfSize:15.0f];\n    cell.textLabel.textAlignment = NSTextAlignmentCenter;\n    cell.textLabel.text = [ccbListData objectAtIndex:[indexPath row]];\n    return cell;\n}\n\n- (void) tableView:(UITableView *)tableView didSelectRowAtIndexPath:(NSIndexPath *)indexPath\n{\n    NSString* selectItem = [ccbListData objectAtIndex:[indexPath row]];\n    \n    //协议执行\n    [delegate CustomComboBoxChanged:self SelectedItem:selectItem];\n    //通知消息返回\n    //[[NSNotificationCenter defaultCenter] postNotificationName:NotificationName object:nil];\n}\n\n- (void) dealloc\n{\n}\n@end\n"
  },
  {
    "path": "iOS_demo/combox/AYHCustomComboBoxDelegate.h",
    "content": "//\n//  AYHCustomComboBoxDelegate.h\n//  TestCustomComboBox\n//\n//  Created by AlimysoYang on 12-4-25.\n//  Copyright (c) 2012年 __Alimyso Software Ltd__. All rights reserved.\n//\tQQ:86373007\n\n#import <Foundation/Foundation.h>\n\n@protocol AYHCustomComboBoxDelegate <NSObject>\n\n- (void) CustomComboBoxChanged:(id) sender SelectedItem:(NSString*) selectedItem;\n\n@end\n"
  },
  {
    "path": "iOS_demo/ksy265codec/qy265dec.h",
    "content": "///////////////////////////////////////////////////\n//\n//         Qianyi H265 Codec Library\n//\n//  Copyright(c) 2013-2014 Qianyi, Inc.\n//              www.qyvideo.cn\n//\n///////////////////////////////////////////////////\n/************************************************************************************\n* decInf.h: interface of decoder for user\n*\n* \\date     2013-09-28: first version\n*\n************************************************************************************/\n#ifndef _QY265_DECODER_INTERFACE_H_\n#define  _QY265_DECODER_INTERFACE_H_\n\n#include \"qy265def.h\"\n\n// config parameters for Decoder\ntypedef struct QY265DecConfig {\n    void* pAuth;                //QYAuth, invalid if don't need aksk auth\n    int threads;               // number of threads used in decoding (0: auto)\n    int bEnableOutputRecToFile;  // For debug: write reconstruct YUV to File\n    char* strRecYuvFileName;      // For debug: file name of YUV\n                                  // when bEnableOutputRecToFile = 1\n    int logLevel;               //For debug: log level\n}QY265DecConfig;\n\n// information of decoded frame\ntypedef struct QY265FrameInfo {\n    int nWidth;     // frame width\n    int nHeight;    // frame height\n    long long pts;  // time stamp\n    int bIllegalStream; // input bit stream is illegal\n}QY265FrameInfo;\n\n// decoded frame with data and information\ntypedef struct QY265Frame {\n    int  bValid; //if == 0, no more valid output frame\n    unsigned char* pData[3]; // Y U V\n    short iStride[3];        // stride for each component\n    QY265FrameInfo frameinfo;\n}QY265Frame;\n\n\n#if defined(__cplusplus)\nextern \"C\" {\n#endif//__cplusplus\n\n/************************************************************************************\n* I/F for all usrs\n************************************************************************************/\n// create decoder, return  handle of decoder\n_h_dll_export void* QY265DecoderCreate(QY265DecConfig* pDecConfig, int * pStat);\n// destroy decoder with specific handle\n_h_dll_export void QY265DecoderDestroy(void* pDecoder);\n// set config to specific decoder\n_h_dll_export void QY265DecoderSetDecConfig(void *pDecoder, QY265DecConfig* pDecConfig, int * pStat);\n//the input of this function should be one or more NALs;\n//if only one NAL, with or without start bytes are both OK\n_h_dll_export void QY265DecodeFrame(void *pDecoder, unsigned char* pData, int iLen, int * pStat, const long long pts);\n// bSkip = false : same as QY265DecodeFrame\n// bSkip = true : only decode slice headers in pData, slice data skipped\n_h_dll_export void QY265DecodeFrameEnSkip(void *pDecoder, unsigned char* pData, int iLen, int * pStat, const long long pts, int bSkip);\n//flush decoding, called at end\n_h_dll_export void QY265DecodeFlush(void *pDecoder, int bClearCachedPics, int * pStat);\n// retrieve the output, the function are used for synchronized output, this function need to call several time until get NULL\n// if bForceLogo == true, only one frame buffer inside, need  return before get next output\n_h_dll_export void QY265DecoderGetDecodedFrame(void *pDecoder, QY265Frame* pFrame, int * pStat, int bForceLogo);\n// return the frame buffer which QY265DecoderGetOutput get from decoder, each valid QY265DecoderGetOutput should match with a ReturnFrame\n_h_dll_export void QY265DecoderReturnDecodedFrame( void *pDecoder, QY265Frame* pFrame);\n\n/**\n * dump latest decoded VUI parameters\n * @param_input pDecoder:   decoder instance\n * @param_output vui:       fill with decoded vui parameters\n * @param_output bValid: =0 if no valid vui parameters decoded,\n *                      otherwise =1\n */\n_h_dll_export void QY265DumpVUIParameters(void* pDecoder, vui_parameters* vui, int* bValid);\n\n#if defined(__cplusplus)\n}\n#endif//__cplusplus\n\n#endif//header\n"
  },
  {
    "path": "iOS_demo/ksy265codec/qy265def.h",
    "content": "#ifndef _QY265_DEF_H_\n#define  _QY265_DEF_H_\n\n// ****************************************\n// error type\n// ****************************************\nenum\n{\n    QY_OK = (0x00000000),          // Success codes\n    QY_FAIL = (0x80000001),        //  Unspecified error\n    QY_OUTOFMEMORY = (0x80000002), //  Ran out of memory\n    QY_POINTER = (0x80000003),     //  Invalid pointer\n    QY_NOTSUPPORTED = (0x80000004),//  NOT support feature encoutnered\n    QY_AUTH_INVALID = (0x80000005), //  authentication invalid\n    QY_SEARCHING_ACCESS_POINT = (0x00000001), // in process of searching first access point\n    QY_REF_PIC_NOT_FOUND = (0x00000007), // reference picture not found, can be ignored\n#if defined(EMSCRIPTEN)||defined(_TEST_FOR_EMSCRIPTEN)\n    QY_NEED_MORE_DATA = (0x00000008), //need push more data\n#endif\n    QY_BITSTREAM_ERROR = (0x00000009), // detecting bitstream error, can be ignored\n};\n\nenum NAL_UNIT_TYPE{\n    NAL_UNIT_TYPE_TRAIL_N = 0,\n    NAL_UNIT_TYPE_TRAIL_R = 1,\n\n    NAL_UNIT_TYPE_TSA_N = 2,\n    NAL_UNIT_TYPE_TSA_R = 3,\n\n    NAL_UNIT_TYPE_STSA_N = 4,\n    NAL_UNIT_TYPE_STSA_R = 5,\n\n    NAL_UNIT_TYPE_RADL_N = 6,\n    NAL_UNIT_TYPE_RADL_R = 7,\n\n    NAL_UNIT_TYPE_RASL_N = 8,\n    NAL_UNIT_TYPE_RASL_R = 9,\n\n    //reserved\n    NAL_UNIT_TYPE_RSV_VCL_N10 = 10,\n    NAL_UNIT_TYPE_RSV_VCL_N12 = 12,\n    NAL_UNIT_TYPE_RSV_VCL_N14 = 13,\n    NAL_UNIT_TYPE_RSV_VCL_R11 = 11,\n    NAL_UNIT_TYPE_RSV_VCL_R13 = 13,\n    NAL_UNIT_TYPE_RSV_VCL_R15 = 15,\n\n    NAL_UNIT_TYPE_BLA_W_LP = 16,\n    NAL_UNIT_TYPE_BLA_W_RADL = 17,\n    NAL_UNIT_TYPE_BLA_N_LP = 18,\n\n    NAL_UNIT_TYPE_IDR_W_RADL = 19,\n    NAL_UNIT_TYPE_IDR_N_LP = 20,\n\n    NAL_UNIT_TYPE_CRA_NUT = 21,\n\n    NAL_UNIT_TYPE_RSV_IRAP_VCL22 = 22,\n    NAL_UNIT_TYPE_RSV_IRAP_VCL23 = 23,\n\n    NAL_UNIT_TYPE_RSV_VCL24 = 24,\n    NAL_UNIT_TYPE_RSV_VCL25 = 25,\n    NAL_UNIT_TYPE_RSV_VCL26 = 26,\n    NAL_UNIT_TYPE_RSV_VCL27 = 27,\n    NAL_UNIT_TYPE_RSV_VCL28 = 28,\n    NAL_UNIT_TYPE_RSV_VCL29 = 29,\n    NAL_UNIT_TYPE_RSV_VCL30 = 30,\n    NAL_UNIT_TYPE_RSV_VCL31 = 31,\n\n    NAL_UNIT_TYPE_VPS_NUT = 32,\n    NAL_UNIT_TYPE_SPS_NUT = 33,\n    NAL_UNIT_TYPE_PPS_NUT = 34,\n    NAL_UNIT_TYPE_AUD_NUT = 35,\n    NAL_UNIT_TYPE_EOS_NUT = 36,\n    NAL_UNIT_TYPE_EOB_NUT = 37,\n    NAL_UNIT_TYPE_FD_NUT = 38,\n\n    NAL_UNIT_TYPE_PREFIX_SEI_NUT = 39,\n    NAL_UNIT_TYPE_SUFFIX_SEI_NUT = 40,\n\n    NAL_UNIT_TYPE_RSV_NVCL41 = 41,\n    NAL_UNIT_TYPE_RSV_NVCL42 = 42,\n    NAL_UNIT_TYPE_RSV_NVCL43 = 43,\n    NAL_UNIT_TYPE_RSV_NVCL44 = 44,\n    NAL_UNIT_TYPE_RSV_NVCL45 = 45,\n    NAL_UNIT_TYPE_RSV_NVCL46 = 46,\n    NAL_UNIT_TYPE_RSV_NVCL47 = 47,\n\n    NAL_UNIT_TYPE_UNSPEC48 = 48,\n    NAL_UNIT_TYPE_UNSPEC49 = 49,\n    NAL_UNIT_TYPE_UNSPEC50 = 50,\n    NAL_UNIT_TYPE_UNSPEC51 = 51,\n    NAL_UNIT_TYPE_UNSPEC52 = 52,\n    NAL_UNIT_TYPE_UNSPEC53 = 53,\n    NAL_UNIT_TYPE_UNSPEC54 = 54,\n    NAL_UNIT_TYPE_UNSPEC55 = 55,\n    NAL_UNIT_TYPE_UNSPEC56 = 56,\n    NAL_UNIT_TYPE_UNSPEC57 = 57,\n    NAL_UNIT_TYPE_UNSPEC58 = 58,\n    NAL_UNIT_TYPE_UNSPEC59 = 59,\n    NAL_UNIT_TYPE_UNSPEC60 = 60,\n    NAL_UNIT_TYPE_UNSPEC61 = 61,\n    NAL_UNIT_TYPE_UNSPEC62 = 62,\n    NAL_UNIT_TYPE_UNSPEC63 = 63,\n};\n\n// ****************************************\n// VUI\n// ****************************************\ntypedef struct vui_parameters{\n        // --- sample aspect ratio (SAR) ---\n    unsigned char     aspect_ratio_info_present_flag;\n    unsigned short sar_width;  // sar_width and sar_height are zero if unspecified\n    unsigned short sar_height;\n\n    // --- overscan ---\n    unsigned char     overscan_info_present_flag;\n    unsigned char     overscan_appropriate_flag;\n\n    // --- video signal type ---\n    unsigned char   video_signal_type_present_flag;\n    unsigned char   video_format;\n    unsigned char   video_full_range_flag;\n    unsigned char   colour_description_present_flag;\n    unsigned char   colour_primaries;\n    unsigned char   transfer_characteristics;\n    unsigned char   matrix_coeffs;\n\n    // --- chroma / interlaced ---\n    unsigned char     chroma_loc_info_present_flag;\n    unsigned char  chroma_sample_loc_type_top_field;\n    unsigned char  chroma_sample_loc_type_bottom_field;\n    unsigned char     neutral_chroma_indication_flag;\n    unsigned char     field_seq_flag;\n    unsigned char     frame_field_info_present_flag;\n\n    // --- default display window ---\n    unsigned char     default_display_window_flag;\n    unsigned int def_disp_win_left_offset;\n    unsigned int def_disp_win_right_offset;\n    unsigned int def_disp_win_top_offset;\n    unsigned int def_disp_win_bottom_offset;\n\n    // --- timing ---\n    unsigned char     vui_timing_info_present_flag;\n    unsigned int vui_num_units_in_tick;\n    unsigned int vui_time_scale;\n\n    unsigned char     vui_poc_proportional_to_timing_flag;\n    unsigned int vui_num_ticks_poc_diff_one;\n\n    // --- hrd parameters ---\n    unsigned char     vui_hrd_parameters_present_flag;\n    //hrd_parameters vui_hrd_parameters;\n\n    // --- bitstream restriction ---\n    unsigned char bitstream_restriction_flag;\n    unsigned char tiles_fixed_structure_flag;\n    unsigned char motion_vectors_over_pic_boundaries_flag;\n    unsigned char restricted_ref_pic_lists_flag;\n    unsigned short min_spatial_segmentation_idc;\n    unsigned char  max_bytes_per_pic_denom;\n    unsigned char  max_bits_per_min_cu_denom;\n    unsigned char  log2_max_mv_length_horizontal;\n    unsigned char  log2_max_mv_length_vertical;\n}vui_parameters;\n\n#if defined(SWIG) || defined(__AVM2__)\n#define _h_dll_export\n#else\n\n#ifdef WIN32\n#define _h_dll_export   __declspec(dllexport)\n#else // for GCC\n#define _h_dll_export __attribute__ ((visibility(\"default\")))\n#endif\n\n#endif  //SWIG\n\ntypedef void  (*QYLogPrintf)(const char* msg);\ntypedef void  (*QYAuthWarning)();\n\n#if defined(__cplusplus)\nextern \"C\" {\n#endif//__cplusplus\n\n// log output callback func pointer \n// if  pFuncCB == NULL, use the default printf\n_h_dll_export void QY265SetLogPrintf ( QYLogPrintf pFuncCB);\n\n// auth trouble warning callback func pointer\n_h_dll_export void QY265SetAuthWarning ( QYAuthWarning pFuncCB);\n\n#if defined(__cplusplus)\n}\n#endif//__cplusplus\n\n//libqy265 version number string\n_h_dll_export extern const char strLibQy265Version[];\n\n#endif\n"
  },
  {
    "path": "iOS_demo/ksy265codec/qy265enc.h",
    "content": "///////////////////////////////////////////////////\n//\n//         Kingsoft H265 Codec Library \n//\n//  Copyright(c) Kingsoft cloud Inc.\n//              http://www.ksyun.com/\n//\n///////////////////////////////////////////////////\n/************************************************************************************\n* encInf.h: interface of encoder for user\n*\n* \\date     2013-09-28: first version\n*    \n************************************************************************************/\n#ifndef   _QY265_ENCODER_INTERFACE_H_\n#define   _QY265_ENCODER_INTERFACE_H_\n\n#include \"qy265def.h\"\n// ****************************************\n// base configuration \n// ****************************************\n//app type\ntypedef enum QY265Tune_tag{\n    QY265TUNE_DEFAULT = 0,\n    QY265TUNE_SELFSHOW = 1,\n    QY265TUNE_GAME = 2,\n    QY265TUNE_MOVIE = 3,\n    QY265TUNE_SCREEN = 4\n}QY265Tune;\n\ntypedef enum QY265Preset_tag{\n    QY265PRESET_SUPERFAST = 0,\n    QY265PRESET_VERYFAST = 1,\n    QY265PRESET_FAST = 2,\n    QY265PRESET_MEDIUM = 3,\n    QY265PRESET_SLOW = 4,\n    QY265PRESET_VERYSLOW = 5,\n    QY265PRESET_PLACEBO = 6,\n}QY265Preset;\n\ntypedef enum QY265Latency_tag{\n    QY265LATENCY_ZERO = 0,\n    QY265LATENCY_LOWDELAY = 1,\n    QY265LATENCY_LIVESTREMING = 2,\n    QY265LATENCY_OFFLINE = 3,\n}QY265Latency;\n\n//base configuration\ntypedef struct QY265EncConfig{\n    void* pAuth;        //QYAuth, invalid if don't need aksk auth\n    QY265Tune tune;    //\n    QY265Preset preset;\n    QY265Latency latency;\n    int bHeaderBeforeKeyframe; //whether output vps,sps,pps before key frame, default 1. dis/enable 0/1\n    int picWidth;          // input frame width\n    int picHeight;         // input frame height\n    double frameRate;      // input frame rate\n    int bframes;           // num of bi-pred frames, -1: using default\n    int temporalLayer;     // works with QY265LATENCY_ZERO, separate P frames into temporal layers, 0 or 1\n\n    int rc;                // rc type 0 disable,1 cbr,2 abr,3 crf, default 2\n    int bitrateInkbps;     // target bit rate in kbps, valid when rctype is cbr abd vbr\n    int vbv_buffer_size;   // buf size of vbv\n    int vbv_max_rate;      // max rate of vbv\n    int qp;                // valid when rctype is disable, default 26\n    int crf;               // valid when rctype is crf,default 24\n    int iIntraPeriod;      // I-Frame period, -1 = only first\n    int qpmin;              //minimal qp, valid when rc != 0, 0~51\n    int qpmax;              //maximal qp, valid when rc != 0, 1~51, qpmax = 0 means 51\n    int enFrameSkip;        //1: enable frame skip for ratecontrol, default 0\n    //* Execute Properties \n    int enWavefront;       //enable wave front parallel\n    int enFrameParallel;   //enable frame parallel\n    int threads;           // number of threads used in encoding ( for wavefront, frame parallel, or enable both )\n    //* vui_parameters\n    //vui_parameters_present_flag equal to 1 specifies that the vui_parameters() syntax in struct vui should set by usr\n    int vui_parameters_present_flag;\n    struct{\n        /* video_signal_type_present_flag.  If this is set then\n         * video_format, video_full_range_flag and colour_description_present_flag\n         * will be added to the VUI. The default is false */\n        int video_signal_type_present_flag;\n        /* Video format of the source video.  0 = component, 1 = PAL, 2 = NTSC,\n         * 3 = SECAM, 4 = MAC, 5 = unspecified video format is the default */\n        int video_format;\n        /* video_full_range_flag indicates the black level and range of the luma\n         * and chroma signals as derived from EY, EPB, and EPR or ER, EG,\n         * and EB real-valued component signals. The default is false */\n        int video_full_range_flag;\n        /* colour_description_present_flag in the VUI. If this is set then\n         * color_primaries, transfer_characteristics and matrix_coeffs are to be\n         * added to the VUI. The default is false */\n        int colour_description_present_flag;\n        /* colour_primaries holds the chromacity coordinates of the source\n         * primaries. The default is 2 */\n        int colour_primaries;\n        /* transfer_characteristics indicates the opto-electronic transfer\n         * characteristic of the source picture. The default is 2 */\n        int transfer_characteristics;\n        /* matrix_coeffs used to derive the luma and chroma signals from\n         * the red, blue and green primaries. The default is 2 */\n        int matrix_coeffs;\n    }vui;\n    //* debug\n    int logLevel;\n    int calcPsnr;          //0:not calc psnr; 1: print total psnr; 2: print each frame\n}QY265EncConfig;\n\n// ****************************************\n// callback functions\n// ****************************************\n//the encoder works in asynchronous mode (for supports of B frames)\n//once calling on EncodeFrame not corresponds to one Frame's bitstream output\n//thus, use callback function on Frame Encoded\n//also, buffer of srcYUV should be reserved for encoder, until it's done\n// CALLBACK method to feed the encoded bit stream\n\n// input frame data and info\ntypedef struct QY265YUV{\n    int iWidth;                 // input frame width\n    int iHeight;                // input frame height\n    unsigned char* pData[3];    // input frame Y U V\n    int iStride[3];             // stride for Y U V\n}QY265YUV;\n\n// input frame data and info\ntypedef struct QY265Picture{\n    int iSliceType; // specified by output pictures\n    int poc;        // ignored on input\n    long long pts;\n    long long dts;\n    QY265YUV* yuv;\n}QY265Picture;\n\n\ntypedef struct QY265Nal\n{\n    int naltype;\n    int tid;\n    int iSize;\n    long long pts;\n    unsigned char* pPayload;\n}QY265Nal;\n\n\n#if defined(__cplusplus)\nextern \"C\" {\n#endif//__cplusplus\n/**\n* create encoder\n* @param pCfg : base config of encoder\n* @param errorCode: error code\n* @return encoder handle\n*/\n_h_dll_export void* QY265EncoderOpen(QY265EncConfig* pCfg, int *errorCode);\n// destroy encoder \n_h_dll_export void QY265EncoderClose(void* pEncoder);\n// reconfig encoder\n_h_dll_export void QY265EncoderReconfig(void* pEncoder,QY265EncConfig* pCfg);\n// return the VPS, SPS and PPS that will be used for the whole stream.\n_h_dll_export int QY265EncoderEncodeHeaders(void* pEncoder,QY265Nal** pNals,int* iNalCount);\n\n/**\n* Encode one frame add logo or not\n*\n* @param pEncoder   handle of encoder\n* @param pNals      pointer array of output NAL units\n* @param iNalCount  output NAL unit count\n* @param pInPic     input frame\n* @param pOutPic    output frame\n* @param bForceLogo add logo on the input frame ( when auth failed)\n* @return if succeed, return the total bin size of output, if failed, return the error code\n*/\n_h_dll_export int QY265EncoderEncodeFrame(void* pEncoder, QY265Nal** pNals, int* iNalCount, QY265Picture* pInpic, QY265Picture* pOutpic, int bForceLogo);\n\n// Request encoder to encode a Key Frame\n_h_dll_export void QY265EncoderKeyFrameRequest(void* pEncoder);\n// current buffered frames \n_h_dll_export int QY265EncoderDelayedFrames(void* pEncoder);\n\nstatic const char* const  qy265_preset_names[] = { \"superfast\", \"veryfast\", \"fast\", \"medium\", \"slow\", \"veryslow\", \"placebo\", 0 };\nstatic const char* const  qy265_tunes_names[] = { \"default\", \"selfshow\", \"game\", \"movie\", \"screen\", 0 };\nstatic const char* const  qy265_latency_names[] = { \"zerolatency\", \"lowdelay\", \"livestreaming\", \"offline\", 0 };\n// get default config values by preset, tune and latency. enum format\n_h_dll_export int QY265ConfigDefault(QY265EncConfig* pConfig, QY265Preset preset, QY265Tune tune, QY265Latency latency);\n\n// get default config values by preset, tune and latency. string format\n_h_dll_export int QY265ConfigDefaultPreset(QY265EncConfig* pConfig, char* preset, char* tune, char* latency);\n\n#define QY265_PARAM_BAD_NAME  (-1)\n#define QY265_PARAM_BAD_VALUE (-2)\n_h_dll_export int QY265ConfigParse(QY265EncConfig *p, const char *name, const char *value);\n#if defined(__cplusplus)\n}\n#endif//__cplusplus\n\n#endif\n"
  },
  {
    "path": "iOS_demo/lenthevcdec/lenthevcdec.h",
    "content": "#ifndef __LENTHEVCDEC_H__\n#define __LENTHEVCDEC_H__\n\n\n#ifdef __cplusplus\nextern \"C\" {\n#endif\n\n#include <stdint.h>\n\n#if defined(_WIN32) || defined(WIN32)\n\t#define LENTAPI __stdcall\n#else\n\t#define LENTAPI\n#endif\n\n\ttypedef struct lenthevcdec_frame {\n\t\t/* size in byte of this struct, initialized by caller for expand */\n\t\tint32_t size;\n\t\t/* width & height: picture size */\n\t\tint32_t width;\n\t\tint32_t height;\n\t\t/* line_stride & pixels: output picture pixel data */\n\t\tint32_t line_stride[3];\n\t\tvoid* pixels[3];\n\t\t/* bit depth of output picture pixel */\n\t\tint32_t bit_depth;\n\t\t/* return 1 if we got frame, then the pixels & line_stride & got_pts is valid */\n\t\tint32_t got_frame;\n\t\t/* pts of output frame */\n\t\tint64_t got_pts;\n\t\t/* 0 progressive, 1 top, 2 bottom */\n\t\tint32_t pic_struct;\n\t\t/* Sample Aspect Ratio */\n\t\tint32_t sar_width;\n\t\tint32_t sar_height;\n\t} lenthevcdec_frame;\n\n\ttypedef void* lenthevcdec_ctx;\n\n\tint             LENTAPI lenthevcdec_version(void);\n\n\tlenthevcdec_ctx LENTAPI lenthevcdec_create(int threads, int compatibility, void* reserved);\n\n\tvoid            LENTAPI lenthevcdec_destroy(lenthevcdec_ctx ctx);\n\n\tvoid            LENTAPI lenthevcdec_flush(lenthevcdec_ctx ctx);\n\n\t/* bs & bs_len: intput bitstream\n\t * pts: input play timestamp\n\t * out_frame: output picture warpper\n\t * return: byte count used by decoder, or negative number for error\n\t */\n\tint             LENTAPI lenthevcdec_decode_frame(lenthevcdec_ctx ctx,\n\t\t\t\t\t\t\t const void* bs, int bs_len,\n\t\t\t\t\t\t\t int64_t pts,\n\t\t\t\t\t\t\t lenthevcdec_frame *out_frame);\n\n#ifdef __cplusplus\n}\n#endif\n\n#endif/*__LENTHEVCDEC_H__*/\n"
  },
  {
    "path": "iOS_demo/x264/x264.h",
    "content": "/*****************************************************************************\n * x264.h: x264 public header\n *****************************************************************************\n * Copyright (C) 2003-2016 x264 project\n *\n * Authors: Laurent Aimar <fenrir@via.ecp.fr>\n *          Loren Merritt <lorenm@u.washington.edu>\n *          Fiona Glaser <fiona@x264.com>\n *\n * This program is free software; you can redistribute it and/or modify\n * it under the terms of the GNU General Public License as published by\n * the Free Software Foundation; either version 2 of the License, or\n * (at your option) any later version.\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n * GNU General Public License for more details.\n *\n * You should have received a copy of the GNU General Public License\n * along with this program; if not, write to the Free Software\n * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02111, USA.\n *\n * This program is also available under a commercial proprietary license.\n * For more information, contact us at licensing@x264.com.\n *****************************************************************************/\n\n#ifndef X264_X264_H\n#define X264_X264_H\n\n#ifdef __cplusplus\nextern \"C\" {\n#endif\n\n#if !defined(_STDINT_H) && !defined(_STDINT_H_) && !defined(_STDINT_H_INCLUDED) && !defined(_STDINT) &&\\\n    !defined(_SYS_STDINT_H_) && !defined(_INTTYPES_H) && !defined(_INTTYPES_H_) && !defined(_INTTYPES)\n# ifdef _MSC_VER\n#  pragma message(\"You must include stdint.h or inttypes.h before x264.h\")\n# else\n#  warning You must include stdint.h or inttypes.h before x264.h\n# endif\n#endif\n\n#include <stdarg.h>\n\n#include \"x264_config.h\"\n\n#define X264_BUILD 148\n\n/* Application developers planning to link against a shared library version of\n * libx264 from a Microsoft Visual Studio or similar development environment\n * will need to define X264_API_IMPORTS before including this header.\n * This clause does not apply to MinGW, similar development environments, or non\n * Windows platforms. */\n#ifdef X264_API_IMPORTS\n#define X264_API __declspec(dllimport)\n#else\n#define X264_API\n#endif\n\n/* x264_t:\n *      opaque handler for encoder */\ntypedef struct x264_t x264_t;\n\n/****************************************************************************\n * NAL structure and functions\n ****************************************************************************/\n\nenum nal_unit_type_e\n{\n    NAL_UNKNOWN     = 0,\n    NAL_SLICE       = 1,\n    NAL_SLICE_DPA   = 2,\n    NAL_SLICE_DPB   = 3,\n    NAL_SLICE_DPC   = 4,\n    NAL_SLICE_IDR   = 5,    /* ref_idc != 0 */\n    NAL_SEI         = 6,    /* ref_idc == 0 */\n    NAL_SPS         = 7,\n    NAL_PPS         = 8,\n    NAL_AUD         = 9,\n    NAL_FILLER      = 12,\n    /* ref_idc == 0 for 6,9,10,11,12 */\n};\nenum nal_priority_e\n{\n    NAL_PRIORITY_DISPOSABLE = 0,\n    NAL_PRIORITY_LOW        = 1,\n    NAL_PRIORITY_HIGH       = 2,\n    NAL_PRIORITY_HIGHEST    = 3,\n};\n\n/* The data within the payload is already NAL-encapsulated; the ref_idc and type\n * are merely in the struct for easy access by the calling application.\n * All data returned in an x264_nal_t, including the data in p_payload, is no longer\n * valid after the next call to x264_encoder_encode.  Thus it must be used or copied\n * before calling x264_encoder_encode or x264_encoder_headers again. */\ntypedef struct x264_nal_t\n{\n    int i_ref_idc;  /* nal_priority_e */\n    int i_type;     /* nal_unit_type_e */\n    int b_long_startcode;\n    int i_first_mb; /* If this NAL is a slice, the index of the first MB in the slice. */\n    int i_last_mb;  /* If this NAL is a slice, the index of the last MB in the slice. */\n\n    /* Size of payload (including any padding) in bytes. */\n    int     i_payload;\n    /* If param->b_annexb is set, Annex-B bytestream with startcode.\n     * Otherwise, startcode is replaced with a 4-byte size.\n     * This size is the size used in mp4/similar muxing; it is equal to i_payload-4 */\n    uint8_t *p_payload;\n\n    /* Size of padding in bytes. */\n    int i_padding;\n} x264_nal_t;\n\n/****************************************************************************\n * Encoder parameters\n ****************************************************************************/\n/* CPU flags */\n\n/* x86 */\n#define X264_CPU_CMOV            0x0000001\n#define X264_CPU_MMX             0x0000002\n#define X264_CPU_MMX2            0x0000004  /* MMX2 aka MMXEXT aka ISSE */\n#define X264_CPU_MMXEXT          X264_CPU_MMX2\n#define X264_CPU_SSE             0x0000008\n#define X264_CPU_SSE2            0x0000010\n#define X264_CPU_SSE3            0x0000020\n#define X264_CPU_SSSE3           0x0000040\n#define X264_CPU_SSE4            0x0000080  /* SSE4.1 */\n#define X264_CPU_SSE42           0x0000100  /* SSE4.2 */\n#define X264_CPU_LZCNT           0x0000200  /* Phenom support for \"leading zero count\" instruction. */\n#define X264_CPU_AVX             0x0000400  /* AVX support: requires OS support even if YMM registers aren't used. */\n#define X264_CPU_XOP             0x0000800  /* AMD XOP */\n#define X264_CPU_FMA4            0x0001000  /* AMD FMA4 */\n#define X264_CPU_FMA3            0x0002000  /* FMA3 */\n#define X264_CPU_AVX2            0x0004000  /* AVX2 */\n#define X264_CPU_BMI1            0x0008000  /* BMI1 */\n#define X264_CPU_BMI2            0x0010000  /* BMI2 */\n/* x86 modifiers */\n#define X264_CPU_CACHELINE_32    0x0020000  /* avoid memory loads that span the border between two cachelines */\n#define X264_CPU_CACHELINE_64    0x0040000  /* 32/64 is the size of a cacheline in bytes */\n#define X264_CPU_SSE2_IS_SLOW    0x0080000  /* avoid most SSE2 functions on Athlon64 */\n#define X264_CPU_SSE2_IS_FAST    0x0100000  /* a few functions are only faster on Core2 and Phenom */\n#define X264_CPU_SLOW_SHUFFLE    0x0200000  /* The Conroe has a slow shuffle unit (relative to overall SSE performance) */\n#define X264_CPU_STACK_MOD4      0x0400000  /* if stack is only mod4 and not mod16 */\n#define X264_CPU_SLOW_CTZ        0x0800000  /* BSR/BSF x86 instructions are really slow on some CPUs */\n#define X264_CPU_SLOW_ATOM       0x1000000  /* The Atom is terrible: slow SSE unaligned loads, slow\n                                             * SIMD multiplies, slow SIMD variable shifts, slow pshufb,\n                                             * cacheline split penalties -- gather everything here that\n                                             * isn't shared by other CPUs to avoid making half a dozen\n                                             * new SLOW flags. */\n#define X264_CPU_SLOW_PSHUFB     0x2000000  /* such as on the Intel Atom */\n#define X264_CPU_SLOW_PALIGNR    0x4000000  /* such as on the AMD Bobcat */\n\n/* PowerPC */\n#define X264_CPU_ALTIVEC         0x0000001\n\n/* ARM and AArch64 */\n#define X264_CPU_ARMV6           0x0000001\n#define X264_CPU_NEON            0x0000002  /* ARM NEON */\n#define X264_CPU_FAST_NEON_MRC   0x0000004  /* Transfer from NEON to ARM register is fast (Cortex-A9) */\n#define X264_CPU_ARMV8           0x0000008\n\n/* MIPS */\n#define X264_CPU_MSA             0x0000001  /* MIPS MSA */\n\n/* Analyse flags */\n#define X264_ANALYSE_I4x4       0x0001  /* Analyse i4x4 */\n#define X264_ANALYSE_I8x8       0x0002  /* Analyse i8x8 (requires 8x8 transform) */\n#define X264_ANALYSE_PSUB16x16  0x0010  /* Analyse p16x8, p8x16 and p8x8 */\n#define X264_ANALYSE_PSUB8x8    0x0020  /* Analyse p8x4, p4x8, p4x4 */\n#define X264_ANALYSE_BSUB16x16  0x0100  /* Analyse b16x8, b8x16 and b8x8 */\n#define X264_DIRECT_PRED_NONE        0\n#define X264_DIRECT_PRED_SPATIAL     1\n#define X264_DIRECT_PRED_TEMPORAL    2\n#define X264_DIRECT_PRED_AUTO        3\n#define X264_ME_DIA                  0\n#define X264_ME_HEX                  1\n#define X264_ME_UMH                  2\n#define X264_ME_ESA                  3\n#define X264_ME_TESA                 4\n#define X264_CQM_FLAT                0\n#define X264_CQM_JVT                 1\n#define X264_CQM_CUSTOM              2\n#define X264_RC_CQP                  0\n#define X264_RC_CRF                  1\n#define X264_RC_ABR                  2\n#define X264_QP_AUTO                 0\n#define X264_AQ_NONE                 0\n#define X264_AQ_VARIANCE             1\n#define X264_AQ_AUTOVARIANCE         2\n#define X264_AQ_AUTOVARIANCE_BIASED  3\n#define X264_B_ADAPT_NONE            0\n#define X264_B_ADAPT_FAST            1\n#define X264_B_ADAPT_TRELLIS         2\n#define X264_WEIGHTP_NONE            0\n#define X264_WEIGHTP_SIMPLE          1\n#define X264_WEIGHTP_SMART           2\n#define X264_B_PYRAMID_NONE          0\n#define X264_B_PYRAMID_STRICT        1\n#define X264_B_PYRAMID_NORMAL        2\n#define X264_KEYINT_MIN_AUTO         0\n#define X264_KEYINT_MAX_INFINITE     (1<<30)\n\nstatic const char * const x264_direct_pred_names[] = { \"none\", \"spatial\", \"temporal\", \"auto\", 0 };\nstatic const char * const x264_motion_est_names[] = { \"dia\", \"hex\", \"umh\", \"esa\", \"tesa\", 0 };\nstatic const char * const x264_b_pyramid_names[] = { \"none\", \"strict\", \"normal\", 0 };\nstatic const char * const x264_overscan_names[] = { \"undef\", \"show\", \"crop\", 0 };\nstatic const char * const x264_vidformat_names[] = { \"component\", \"pal\", \"ntsc\", \"secam\", \"mac\", \"undef\", 0 };\nstatic const char * const x264_fullrange_names[] = { \"off\", \"on\", 0 };\nstatic const char * const x264_colorprim_names[] = { \"\", \"bt709\", \"undef\", \"\", \"bt470m\", \"bt470bg\", \"smpte170m\", \"smpte240m\", \"film\", \"bt2020\", \"smpte428\",\n                                                     \"smpte431\", \"smpte432\", 0 };\nstatic const char * const x264_transfer_names[] = { \"\", \"bt709\", \"undef\", \"\", \"bt470m\", \"bt470bg\", \"smpte170m\", \"smpte240m\", \"linear\", \"log100\", \"log316\",\n                                                    \"iec61966-2-4\", \"bt1361e\", \"iec61966-2-1\", \"bt2020-10\", \"bt2020-12\", \"smpte2084\", \"smpte428\", 0 };\nstatic const char * const x264_colmatrix_names[] = { \"GBR\", \"bt709\", \"undef\", \"\", \"fcc\", \"bt470bg\", \"smpte170m\", \"smpte240m\", \"YCgCo\", \"bt2020nc\", \"bt2020c\",\n                                                     \"smpte2085\", 0 };\nstatic const char * const x264_nal_hrd_names[] = { \"none\", \"vbr\", \"cbr\", 0 };\n\n/* Colorspace type */\n#define X264_CSP_MASK           0x00ff  /* */\n#define X264_CSP_NONE           0x0000  /* Invalid mode     */\n#define X264_CSP_I420           0x0001  /* yuv 4:2:0 planar */\n#define X264_CSP_YV12           0x0002  /* yvu 4:2:0 planar */\n#define X264_CSP_NV12           0x0003  /* yuv 4:2:0, with one y plane and one packed u+v */\n#define X264_CSP_NV21           0x0004  /* yuv 4:2:0, with one y plane and one packed v+u */\n#define X264_CSP_I422           0x0005  /* yuv 4:2:2 planar */\n#define X264_CSP_YV16           0x0006  /* yvu 4:2:2 planar */\n#define X264_CSP_NV16           0x0007  /* yuv 4:2:2, with one y plane and one packed u+v */\n#define X264_CSP_V210           0x0008  /* 10-bit yuv 4:2:2 packed in 32 */\n#define X264_CSP_I444           0x0009  /* yuv 4:4:4 planar */\n#define X264_CSP_YV24           0x000a  /* yvu 4:4:4 planar */\n#define X264_CSP_BGR            0x000b  /* packed bgr 24bits   */\n#define X264_CSP_BGRA           0x000c  /* packed bgr 32bits   */\n#define X264_CSP_RGB            0x000d  /* packed rgb 24bits   */\n#define X264_CSP_MAX            0x000e  /* end of list */\n#define X264_CSP_VFLIP          0x1000  /* the csp is vertically flipped */\n#define X264_CSP_HIGH_DEPTH     0x2000  /* the csp has a depth of 16 bits per pixel component */\n\n/* Slice type */\n#define X264_TYPE_AUTO          0x0000  /* Let x264 choose the right type */\n#define X264_TYPE_IDR           0x0001\n#define X264_TYPE_I             0x0002\n#define X264_TYPE_P             0x0003\n#define X264_TYPE_BREF          0x0004  /* Non-disposable B-frame */\n#define X264_TYPE_B             0x0005\n#define X264_TYPE_KEYFRAME      0x0006  /* IDR or I depending on b_open_gop option */\n#define IS_X264_TYPE_I(x) ((x)==X264_TYPE_I || (x)==X264_TYPE_IDR || (x)==X264_TYPE_KEYFRAME)\n#define IS_X264_TYPE_B(x) ((x)==X264_TYPE_B || (x)==X264_TYPE_BREF)\n\n/* Log level */\n#define X264_LOG_NONE          (-1)\n#define X264_LOG_ERROR          0\n#define X264_LOG_WARNING        1\n#define X264_LOG_INFO           2\n#define X264_LOG_DEBUG          3\n\n/* Threading */\n#define X264_THREADS_AUTO 0 /* Automatically select optimal number of threads */\n#define X264_SYNC_LOOKAHEAD_AUTO (-1) /* Automatically select optimal lookahead thread buffer size */\n\n/* HRD */\n#define X264_NAL_HRD_NONE            0\n#define X264_NAL_HRD_VBR             1\n#define X264_NAL_HRD_CBR             2\n\n/* Zones: override ratecontrol or other options for specific sections of the video.\n * See x264_encoder_reconfig() for which options can be changed.\n * If zones overlap, whichever comes later in the list takes precedence. */\ntypedef struct x264_zone_t\n{\n    int i_start, i_end; /* range of frame numbers */\n    int b_force_qp; /* whether to use qp vs bitrate factor */\n    int i_qp;\n    float f_bitrate_factor;\n    struct x264_param_t *param;\n} x264_zone_t;\n\ntypedef struct x264_param_t\n{\n    /* CPU flags */\n    unsigned int cpu;\n    int         i_threads;           /* encode multiple frames in parallel */\n    int         i_lookahead_threads; /* multiple threads for lookahead analysis */\n    int         b_sliced_threads;  /* Whether to use slice-based threading. */\n    int         b_deterministic; /* whether to allow non-deterministic optimizations when threaded */\n    int         b_cpu_independent; /* force canonical behavior rather than cpu-dependent optimal algorithms */\n    int         i_sync_lookahead; /* threaded lookahead buffer */\n\n    /* Video Properties */\n    int         i_width;\n    int         i_height;\n    int         i_csp;         /* CSP of encoded bitstream */\n    int         i_level_idc;\n    int         i_frame_total; /* number of frames to encode if known, else 0 */\n\n    /* NAL HRD\n     * Uses Buffering and Picture Timing SEIs to signal HRD\n     * The HRD in H.264 was not designed with VFR in mind.\n     * It is therefore not recommendeded to use NAL HRD with VFR.\n     * Furthermore, reconfiguring the VBV (via x264_encoder_reconfig)\n     * will currently generate invalid HRD. */\n    int         i_nal_hrd;\n\n    struct\n    {\n        /* they will be reduced to be 0 < x <= 65535 and prime */\n        int         i_sar_height;\n        int         i_sar_width;\n\n        int         i_overscan;    /* 0=undef, 1=no overscan, 2=overscan */\n\n        /* see h264 annex E for the values of the following */\n        int         i_vidformat;\n        int         b_fullrange;\n        int         i_colorprim;\n        int         i_transfer;\n        int         i_colmatrix;\n        int         i_chroma_loc;    /* both top & bottom */\n    } vui;\n\n    /* Bitstream parameters */\n    int         i_frame_reference;  /* Maximum number of reference frames */\n    int         i_dpb_size;         /* Force a DPB size larger than that implied by B-frames and reference frames.\n                                     * Useful in combination with interactive error resilience. */\n    int         i_keyint_max;       /* Force an IDR keyframe at this interval */\n    int         i_keyint_min;       /* Scenecuts closer together than this are coded as I, not IDR. */\n    int         i_scenecut_threshold; /* how aggressively to insert extra I frames */\n    int         b_intra_refresh;    /* Whether or not to use periodic intra refresh instead of IDR frames. */\n\n    int         i_bframe;   /* how many b-frame between 2 references pictures */\n    int         i_bframe_adaptive;\n    int         i_bframe_bias;\n    int         i_bframe_pyramid;   /* Keep some B-frames as references: 0=off, 1=strict hierarchical, 2=normal */\n    int         b_open_gop;\n    int         b_bluray_compat;\n    int         i_avcintra_class;\n\n    int         b_deblocking_filter;\n    int         i_deblocking_filter_alphac0;    /* [-6, 6] -6 light filter, 6 strong */\n    int         i_deblocking_filter_beta;       /* [-6, 6]  idem */\n\n    int         b_cabac;\n    int         i_cabac_init_idc;\n\n    int         b_interlaced;\n    int         b_constrained_intra;\n\n    int         i_cqm_preset;\n    char        *psz_cqm_file;      /* filename (in UTF-8) of CQM file, JM format */\n    uint8_t     cqm_4iy[16];        /* used only if i_cqm_preset == X264_CQM_CUSTOM */\n    uint8_t     cqm_4py[16];\n    uint8_t     cqm_4ic[16];\n    uint8_t     cqm_4pc[16];\n    uint8_t     cqm_8iy[64];\n    uint8_t     cqm_8py[64];\n    uint8_t     cqm_8ic[64];\n    uint8_t     cqm_8pc[64];\n\n    /* Log */\n    void        (*pf_log)( void *, int i_level, const char *psz, va_list );\n    void        *p_log_private;\n    int         i_log_level;\n    int         b_full_recon;   /* fully reconstruct frames, even when not necessary for encoding.  Implied by psz_dump_yuv */\n    char        *psz_dump_yuv;  /* filename (in UTF-8) for reconstructed frames */\n\n    /* Encoder analyser parameters */\n    struct\n    {\n        unsigned int intra;     /* intra partitions */\n        unsigned int inter;     /* inter partitions */\n\n        int          b_transform_8x8;\n        int          i_weighted_pred; /* weighting for P-frames */\n        int          b_weighted_bipred; /* implicit weighting for B-frames */\n        int          i_direct_mv_pred; /* spatial vs temporal mv prediction */\n        int          i_chroma_qp_offset;\n\n        int          i_me_method; /* motion estimation algorithm to use (X264_ME_*) */\n        int          i_me_range; /* integer pixel motion estimation search range (from predicted mv) */\n        int          i_mv_range; /* maximum length of a mv (in pixels). -1 = auto, based on level */\n        int          i_mv_range_thread; /* minimum space between threads. -1 = auto, based on number of threads. */\n        int          i_subpel_refine; /* subpixel motion estimation quality */\n        int          b_chroma_me; /* chroma ME for subpel and mode decision in P-frames */\n        int          b_mixed_references; /* allow each mb partition to have its own reference number */\n        int          i_trellis;  /* trellis RD quantization */\n        int          b_fast_pskip; /* early SKIP detection on P-frames */\n        int          b_dct_decimate; /* transform coefficient thresholding on P-frames */\n        int          i_noise_reduction; /* adaptive pseudo-deadzone */\n        float        f_psy_rd; /* Psy RD strength */\n        float        f_psy_trellis; /* Psy trellis strength */\n        int          b_psy; /* Toggle all psy optimizations */\n\n        int          b_mb_info;            /* Use input mb_info data in x264_picture_t */\n        int          b_mb_info_update; /* Update the values in mb_info according to the results of encoding. */\n\n        /* the deadzone size that will be used in luma quantization */\n        int          i_luma_deadzone[2]; /* {inter, intra} */\n\n        int          b_psnr;    /* compute and print PSNR stats */\n        int          b_ssim;    /* compute and print SSIM stats */\n    } analyse;\n\n    /* Rate control parameters */\n    struct\n    {\n        int         i_rc_method;    /* X264_RC_* */\n\n        int         i_qp_constant;  /* 0 to (51 + 6*(x264_bit_depth-8)). 0=lossless */\n        int         i_qp_min;       /* min allowed QP value */\n        int         i_qp_max;       /* max allowed QP value */\n        int         i_qp_step;      /* max QP step between frames */\n\n        int         i_bitrate;\n        float       f_rf_constant;  /* 1pass VBR, nominal QP */\n        float       f_rf_constant_max;  /* In CRF mode, maximum CRF as caused by VBV */\n        float       f_rate_tolerance;\n        int         i_vbv_max_bitrate;\n        int         i_vbv_buffer_size;\n        float       f_vbv_buffer_init; /* <=1: fraction of buffer_size. >1: kbit */\n        float       f_ip_factor;\n        float       f_pb_factor;\n\n        /* VBV filler: force CBR VBV and use filler bytes to ensure hard-CBR.\n         * Implied by NAL-HRD CBR. */\n        int         b_filler;\n\n        int         i_aq_mode;      /* psy adaptive QP. (X264_AQ_*) */\n        float       f_aq_strength;\n        int         b_mb_tree;      /* Macroblock-tree ratecontrol. */\n        int         i_lookahead;\n\n        /* 2pass */\n        int         b_stat_write;   /* Enable stat writing in psz_stat_out */\n        char        *psz_stat_out;  /* output filename (in UTF-8) of the 2pass stats file */\n        int         b_stat_read;    /* Read stat from psz_stat_in and use it */\n        char        *psz_stat_in;   /* input filename (in UTF-8) of the 2pass stats file */\n\n        /* 2pass params (same as ffmpeg ones) */\n        float       f_qcompress;    /* 0.0 => cbr, 1.0 => constant qp */\n        float       f_qblur;        /* temporally blur quants */\n        float       f_complexity_blur; /* temporally blur complexity */\n        x264_zone_t *zones;         /* ratecontrol overrides */\n        int         i_zones;        /* number of zone_t's */\n        char        *psz_zones;     /* alternate method of specifying zones */\n    } rc;\n\n    /* Cropping Rectangle parameters: added to those implicitly defined by\n       non-mod16 video resolutions. */\n    struct\n    {\n        unsigned int i_left;\n        unsigned int i_top;\n        unsigned int i_right;\n        unsigned int i_bottom;\n    } crop_rect;\n\n    /* frame packing arrangement flag */\n    int i_frame_packing;\n\n    /* Muxing parameters */\n    int b_aud;                  /* generate access unit delimiters */\n    int b_repeat_headers;       /* put SPS/PPS before each keyframe */\n    int b_annexb;               /* if set, place start codes (4 bytes) before NAL units,\n                                 * otherwise place size (4 bytes) before NAL units. */\n    int i_sps_id;               /* SPS and PPS id number */\n    int b_vfr_input;            /* VFR input.  If 1, use timebase and timestamps for ratecontrol purposes.\n                                 * If 0, use fps only. */\n    int b_pulldown;             /* use explicity set timebase for CFR */\n    uint32_t i_fps_num;\n    uint32_t i_fps_den;\n    uint32_t i_timebase_num;    /* Timebase numerator */\n    uint32_t i_timebase_den;    /* Timebase denominator */\n\n    int b_tff;\n\n    /* Pulldown:\n     * The correct pic_struct must be passed with each input frame.\n     * The input timebase should be the timebase corresponding to the output framerate. This should be constant.\n     * e.g. for 3:2 pulldown timebase should be 1001/30000\n     * The PTS passed with each frame must be the PTS of the frame after pulldown is applied.\n     * Frame doubling and tripling require b_vfr_input set to zero (see H.264 Table D-1)\n     *\n     * Pulldown changes are not clearly defined in H.264. Therefore, it is the calling app's responsibility to manage this.\n     */\n\n    int b_pic_struct;\n\n    /* Fake Interlaced.\n     *\n     * Used only when b_interlaced=0. Setting this flag makes it possible to flag the stream as PAFF interlaced yet\n     * encode all frames progessively. It is useful for encoding 25p and 30p Blu-Ray streams.\n     */\n\n    int b_fake_interlaced;\n\n    /* Don't optimize header parameters based on video content, e.g. ensure that splitting an input video, compressing\n     * each part, and stitching them back together will result in identical SPS/PPS. This is necessary for stitching\n     * with container formats that don't allow multiple SPS/PPS. */\n    int b_stitchable;\n\n    int b_opencl;            /* use OpenCL when available */\n    int i_opencl_device;     /* specify count of GPU devices to skip, for CLI users */\n    void *opencl_device_id;  /* pass explicit cl_device_id as void*, for API users */\n    char *psz_clbin_file;    /* filename (in UTF-8) of the compiled OpenCL kernel cache file */\n\n    /* Slicing parameters */\n    int i_slice_max_size;    /* Max size per slice in bytes; includes estimated NAL overhead. */\n    int i_slice_max_mbs;     /* Max number of MBs per slice; overrides i_slice_count. */\n    int i_slice_min_mbs;     /* Min number of MBs per slice */\n    int i_slice_count;       /* Number of slices per frame: forces rectangular slices. */\n    int i_slice_count_max;   /* Absolute cap on slices per frame; stops applying slice-max-size\n                              * and slice-max-mbs if this is reached. */\n\n    /* Optional callback for freeing this x264_param_t when it is done being used.\n     * Only used when the x264_param_t sits in memory for an indefinite period of time,\n     * i.e. when an x264_param_t is passed to x264_t in an x264_picture_t or in zones.\n     * Not used when x264_encoder_reconfig is called directly. */\n    void (*param_free)( void* );\n\n    /* Optional low-level callback for low-latency encoding.  Called for each output NAL unit\n     * immediately after the NAL unit is finished encoding.  This allows the calling application\n     * to begin processing video data (e.g. by sending packets over a network) before the frame\n     * is done encoding.\n     *\n     * This callback MUST do the following in order to work correctly:\n     * 1) Have available an output buffer of at least size nal->i_payload*3/2 + 5 + 64.\n     * 2) Call x264_nal_encode( h, dst, nal ), where dst is the output buffer.\n     * After these steps, the content of nal is valid and can be used in the same way as if\n     * the NAL unit were output by x264_encoder_encode.\n     *\n     * This does not need to be synchronous with the encoding process: the data pointed to\n     * by nal (both before and after x264_nal_encode) will remain valid until the next\n     * x264_encoder_encode call.  The callback must be re-entrant.\n     *\n     * This callback does not work with frame-based threads; threads must be disabled\n     * or sliced-threads enabled.  This callback also does not work as one would expect\n     * with HRD -- since the buffering period SEI cannot be calculated until the frame\n     * is finished encoding, it will not be sent via this callback.\n     *\n     * Note also that the NALs are not necessarily returned in order when sliced threads is\n     * enabled.  Accordingly, the variable i_first_mb and i_last_mb are available in\n     * x264_nal_t to help the calling application reorder the slices if necessary.\n     *\n     * When this callback is enabled, x264_encoder_encode does not return valid NALs;\n     * the calling application is expected to acquire all output NALs through the callback.\n     *\n     * It is generally sensible to combine this callback with a use of slice-max-mbs or\n     * slice-max-size.\n     *\n     * The opaque pointer is the opaque pointer from the input frame associated with this\n     * NAL unit. This helps distinguish between nalu_process calls from different sources,\n     * e.g. if doing multiple encodes in one process.\n     */\n    void (*nalu_process)( x264_t *h, x264_nal_t *nal, void *opaque );\n} x264_param_t;\n\nvoid x264_nal_encode( x264_t *h, uint8_t *dst, x264_nal_t *nal );\n\n/****************************************************************************\n * H.264 level restriction information\n ****************************************************************************/\n\ntypedef struct x264_level_t\n{\n    int level_idc;\n    int mbps;        /* max macroblock processing rate (macroblocks/sec) */\n    int frame_size;  /* max frame size (macroblocks) */\n    int dpb;         /* max decoded picture buffer (mbs) */\n    int bitrate;     /* max bitrate (kbit/sec) */\n    int cpb;         /* max vbv buffer (kbit) */\n    int mv_range;    /* max vertical mv component range (pixels) */\n    int mvs_per_2mb; /* max mvs per 2 consecutive mbs. */\n    int slice_rate;  /* ?? */\n    int mincr;       /* min compression ratio */\n    int bipred8x8;   /* limit bipred to >=8x8 */\n    int direct8x8;   /* limit b_direct to >=8x8 */\n    int frame_only;  /* forbid interlacing */\n} x264_level_t;\n\n/* all of the levels defined in the standard, terminated by .level_idc=0 */\nX264_API extern const x264_level_t x264_levels[];\n\n/****************************************************************************\n * Basic parameter handling functions\n ****************************************************************************/\n\n/* x264_param_default:\n *      fill x264_param_t with default values and do CPU detection */\nvoid    x264_param_default( x264_param_t * );\n\n/* x264_param_parse:\n *  set one parameter by name.\n *  returns 0 on success, or returns one of the following errors.\n *  note: BAD_VALUE occurs only if it can't even parse the value,\n *  numerical range is not checked until x264_encoder_open() or\n *  x264_encoder_reconfig().\n *  value=NULL means \"true\" for boolean options, but is a BAD_VALUE for non-booleans. */\n#define X264_PARAM_BAD_NAME  (-1)\n#define X264_PARAM_BAD_VALUE (-2)\nint x264_param_parse( x264_param_t *, const char *name, const char *value );\n\n/****************************************************************************\n * Advanced parameter handling functions\n ****************************************************************************/\n\n/* These functions expose the full power of x264's preset-tune-profile system for\n * easy adjustment of large numbers of internal parameters.\n *\n * In order to replicate x264CLI's option handling, these functions MUST be called\n * in the following order:\n * 1) x264_param_default_preset\n * 2) Custom user options (via param_parse or directly assigned variables)\n * 3) x264_param_apply_fastfirstpass\n * 4) x264_param_apply_profile\n *\n * Additionally, x264CLI does not apply step 3 if the preset chosen is \"placebo\"\n * or --slow-firstpass is set. */\n\n/* x264_param_default_preset:\n *      The same as x264_param_default, but also use the passed preset and tune\n *      to modify the default settings.\n *      (either can be NULL, which implies no preset or no tune, respectively)\n *\n *      Currently available presets are, ordered from fastest to slowest: */\nstatic const char * const x264_preset_names[] = { \"ultrafast\", \"superfast\", \"veryfast\", \"faster\", \"fast\", \"medium\", \"slow\", \"slower\", \"veryslow\", \"placebo\", 0 };\n\n/*      The presets can also be indexed numerically, as in:\n *      x264_param_default_preset( &param, \"3\", ... )\n *      with ultrafast mapping to \"0\" and placebo mapping to \"9\".  This mapping may\n *      of course change if new presets are added in between, but will always be\n *      ordered from fastest to slowest.\n *\n *      Warning: the speed of these presets scales dramatically.  Ultrafast is a full\n *      100 times faster than placebo!\n *\n *      Currently available tunings are: */\nstatic const char * const x264_tune_names[] = { \"film\", \"animation\", \"grain\", \"stillimage\", \"psnr\", \"ssim\", \"fastdecode\", \"zerolatency\", 0 };\n\n/*      Multiple tunings can be used if separated by a delimiter in \",./-+\",\n *      however multiple psy tunings cannot be used.\n *      film, animation, grain, stillimage, psnr, and ssim are psy tunings.\n *\n *      returns 0 on success, negative on failure (e.g. invalid preset/tune name). */\nint     x264_param_default_preset( x264_param_t *, const char *preset, const char *tune );\n\n/* x264_param_apply_fastfirstpass:\n *      If first-pass mode is set (rc.b_stat_read == 0, rc.b_stat_write == 1),\n *      modify the encoder settings to disable options generally not useful on\n *      the first pass. */\nvoid    x264_param_apply_fastfirstpass( x264_param_t * );\n\n/* x264_param_apply_profile:\n *      Applies the restrictions of the given profile.\n *      Currently available profiles are, from most to least restrictive: */\nstatic const char * const x264_profile_names[] = { \"baseline\", \"main\", \"high\", \"high10\", \"high422\", \"high444\", 0 };\n\n/*      (can be NULL, in which case the function will do nothing)\n *\n *      Does NOT guarantee that the given profile will be used: if the restrictions\n *      of \"High\" are applied to settings that are already Baseline-compatible, the\n *      stream will remain baseline.  In short, it does not increase settings, only\n *      decrease them.\n *\n *      returns 0 on success, negative on failure (e.g. invalid profile name). */\nint     x264_param_apply_profile( x264_param_t *, const char *profile );\n\n/****************************************************************************\n * Picture structures and functions\n ****************************************************************************/\n\n/* x264_bit_depth:\n *      Specifies the number of bits per pixel that x264 uses. This is also the\n *      bit depth that x264 encodes in. If this value is > 8, x264 will read\n *      two bytes of input data for each pixel sample, and expect the upper\n *      (16-x264_bit_depth) bits to be zero.\n *      Note: The flag X264_CSP_HIGH_DEPTH must be used to specify the\n *      colorspace depth as well. */\nX264_API extern const int x264_bit_depth;\n\n/* x264_chroma_format:\n *      Specifies the chroma formats that x264 supports encoding. When this\n *      value is non-zero, then it represents a X264_CSP_* that is the only\n *      chroma format that x264 supports encoding. If the value is 0 then\n *      there are no restrictions. */\nX264_API extern const int x264_chroma_format;\n\nenum pic_struct_e\n{\n    PIC_STRUCT_AUTO              = 0, // automatically decide (default)\n    PIC_STRUCT_PROGRESSIVE       = 1, // progressive frame\n    // \"TOP\" and \"BOTTOM\" are not supported in x264 (PAFF only)\n    PIC_STRUCT_TOP_BOTTOM        = 4, // top field followed by bottom\n    PIC_STRUCT_BOTTOM_TOP        = 5, // bottom field followed by top\n    PIC_STRUCT_TOP_BOTTOM_TOP    = 6, // top field, bottom field, top field repeated\n    PIC_STRUCT_BOTTOM_TOP_BOTTOM = 7, // bottom field, top field, bottom field repeated\n    PIC_STRUCT_DOUBLE            = 8, // double frame\n    PIC_STRUCT_TRIPLE            = 9, // triple frame\n};\n\ntypedef struct x264_hrd_t\n{\n    double cpb_initial_arrival_time;\n    double cpb_final_arrival_time;\n    double cpb_removal_time;\n\n    double dpb_output_time;\n} x264_hrd_t;\n\n/* Arbitrary user SEI:\n * Payload size is in bytes and the payload pointer must be valid.\n * Payload types and syntax can be found in Annex D of the H.264 Specification.\n * SEI payload alignment bits as described in Annex D must be included at the\n * end of the payload if needed.\n * The payload should not be NAL-encapsulated.\n * Payloads are written first in order of input, apart from in the case when HRD\n * is enabled where payloads are written after the Buffering Period SEI. */\n\ntypedef struct x264_sei_payload_t\n{\n    int payload_size;\n    int payload_type;\n    uint8_t *payload;\n} x264_sei_payload_t;\n\ntypedef struct x264_sei_t\n{\n    int num_payloads;\n    x264_sei_payload_t *payloads;\n    /* In: optional callback to free each payload AND x264_sei_payload_t when used. */\n    void (*sei_free)( void* );\n} x264_sei_t;\n\ntypedef struct x264_image_t\n{\n    int     i_csp;       /* Colorspace */\n    int     i_plane;     /* Number of image planes */\n    int     i_stride[4]; /* Strides for each plane */\n    uint8_t *plane[4];   /* Pointers to each plane */\n} x264_image_t;\n\ntypedef struct x264_image_properties_t\n{\n    /* All arrays of data here are ordered as follows:\n     * each array contains one offset per macroblock, in raster scan order.  In interlaced\n     * mode, top-field MBs and bottom-field MBs are interleaved at the row level.\n     * Macroblocks are 16x16 blocks of pixels (with respect to the luma plane).  For the\n     * purposes of calculating the number of macroblocks, width and height are rounded up to\n     * the nearest 16.  If in interlaced mode, height is rounded up to the nearest 32 instead. */\n\n    /* In: an array of quantizer offsets to be applied to this image during encoding.\n     *     These are added on top of the decisions made by x264.\n     *     Offsets can be fractional; they are added before QPs are rounded to integer.\n     *     Adaptive quantization must be enabled to use this feature.  Behavior if quant\n     *     offsets differ between encoding passes is undefined. */\n    float *quant_offsets;\n    /* In: optional callback to free quant_offsets when used.\n     *     Useful if one wants to use a different quant_offset array for each frame. */\n    void (*quant_offsets_free)( void* );\n\n    /* In: optional array of flags for each macroblock.\n     *     Allows specifying additional information for the encoder such as which macroblocks\n     *     remain unchanged.  Usable flags are listed below.\n     *     x264_param_t.analyse.b_mb_info must be set to use this, since x264 needs to track\n     *     extra data internally to make full use of this information.\n     *\n     * Out: if b_mb_info_update is set, x264 will update this array as a result of encoding.\n     *\n     *      For \"MBINFO_CONSTANT\", it will remove this flag on any macroblock whose decoded\n     *      pixels have changed.  This can be useful for e.g. noting which areas of the\n     *      frame need to actually be blitted. Note: this intentionally ignores the effects\n     *      of deblocking for the current frame, which should be fine unless one needs exact\n     *      pixel-perfect accuracy.\n     *\n     *      Results for MBINFO_CONSTANT are currently only set for P-frames, and are not\n     *      guaranteed to enumerate all blocks which haven't changed.  (There may be false\n     *      negatives, but no false positives.)\n     */\n    uint8_t *mb_info;\n    /* In: optional callback to free mb_info when used. */\n    void (*mb_info_free)( void* );\n\n    /* The macroblock is constant and remains unchanged from the previous frame. */\n    #define X264_MBINFO_CONSTANT   (1<<0)\n    /* More flags may be added in the future. */\n\n    /* Out: SSIM of the the frame luma (if x264_param_t.b_ssim is set) */\n    double f_ssim;\n    /* Out: Average PSNR of the frame (if x264_param_t.b_psnr is set) */\n    double f_psnr_avg;\n    /* Out: PSNR of Y, U, and V (if x264_param_t.b_psnr is set) */\n    double f_psnr[3];\n\n    /* Out: Average effective CRF of the encoded frame */\n    double f_crf_avg;\n} x264_image_properties_t;\n\ntypedef struct x264_picture_t\n{\n    /* In: force picture type (if not auto)\n     *     If x264 encoding parameters are violated in the forcing of picture types,\n     *     x264 will correct the input picture type and log a warning.\n     * Out: type of the picture encoded */\n    int     i_type;\n    /* In: force quantizer for != X264_QP_AUTO */\n    int     i_qpplus1;\n    /* In: pic_struct, for pulldown/doubling/etc...used only if b_pic_struct=1.\n     *     use pic_struct_e for pic_struct inputs\n     * Out: pic_struct element associated with frame */\n    int     i_pic_struct;\n    /* Out: whether this frame is a keyframe.  Important when using modes that result in\n     * SEI recovery points being used instead of IDR frames. */\n    int     b_keyframe;\n    /* In: user pts, Out: pts of encoded picture (user)*/\n    int64_t i_pts;\n    /* Out: frame dts. When the pts of the first frame is close to zero,\n     *      initial frames may have a negative dts which must be dealt with by any muxer */\n    int64_t i_dts;\n    /* In: custom encoding parameters to be set from this frame forwards\n           (in coded order, not display order). If NULL, continue using\n           parameters from the previous frame.  Some parameters, such as\n           aspect ratio, can only be changed per-GOP due to the limitations\n           of H.264 itself; in this case, the caller must force an IDR frame\n           if it needs the changed parameter to apply immediately. */\n    x264_param_t *param;\n    /* In: raw image data */\n    /* Out: reconstructed image data.  x264 may skip part of the reconstruction process,\n            e.g. deblocking, in frames where it isn't necessary.  To force complete\n            reconstruction, at a small speed cost, set b_full_recon. */\n    x264_image_t img;\n    /* In: optional information to modify encoder decisions for this frame\n     * Out: information about the encoded frame */\n    x264_image_properties_t prop;\n    /* Out: HRD timing information. Output only when i_nal_hrd is set. */\n    x264_hrd_t hrd_timing;\n    /* In: arbitrary user SEI (e.g subtitles, AFDs) */\n    x264_sei_t extra_sei;\n    /* private user data. copied from input to output frames. */\n    void *opaque;\n} x264_picture_t;\n\n/* x264_picture_init:\n *  initialize an x264_picture_t.  Needs to be done if the calling application\n *  allocates its own x264_picture_t as opposed to using x264_picture_alloc. */\nvoid x264_picture_init( x264_picture_t *pic );\n\n/* x264_picture_alloc:\n *  alloc data for a picture. You must call x264_picture_clean on it.\n *  returns 0 on success, or -1 on malloc failure or invalid colorspace. */\nint x264_picture_alloc( x264_picture_t *pic, int i_csp, int i_width, int i_height );\n\n/* x264_picture_clean:\n *  free associated resource for a x264_picture_t allocated with\n *  x264_picture_alloc ONLY */\nvoid x264_picture_clean( x264_picture_t *pic );\n\n/****************************************************************************\n * Encoder functions\n ****************************************************************************/\n\n/* Force a link error in the case of linking against an incompatible API version.\n * Glue #defines exist to force correct macro expansion; the final output of the macro\n * is x264_encoder_open_##X264_BUILD (for purposes of dlopen). */\n#define x264_encoder_glue1(x,y) x##y\n#define x264_encoder_glue2(x,y) x264_encoder_glue1(x,y)\n#define x264_encoder_open x264_encoder_glue2(x264_encoder_open_,X264_BUILD)\n\n/* x264_encoder_open:\n *      create a new encoder handler, all parameters from x264_param_t are copied */\nx264_t *x264_encoder_open( x264_param_t * );\n\n/* x264_encoder_reconfig:\n *      various parameters from x264_param_t are copied.\n *      this takes effect immediately, on whichever frame is encoded next;\n *      due to delay, this may not be the next frame passed to encoder_encode.\n *      if the change should apply to some particular frame, use x264_picture_t->param instead.\n *      returns 0 on success, negative on parameter validation error.\n *      not all parameters can be changed; see the actual function for a detailed breakdown.\n *\n *      since not all parameters can be changed, moving from preset to preset may not always\n *      fully copy all relevant parameters, but should still work usably in practice. however,\n *      more so than for other presets, many of the speed shortcuts used in ultrafast cannot be\n *      switched out of; using reconfig to switch between ultrafast and other presets is not\n *      recommended without a more fine-grained breakdown of parameters to take this into account. */\nint     x264_encoder_reconfig( x264_t *, x264_param_t * );\n/* x264_encoder_parameters:\n *      copies the current internal set of parameters to the pointer provided\n *      by the caller.  useful when the calling application needs to know\n *      how x264_encoder_open has changed the parameters, or the current state\n *      of the encoder after multiple x264_encoder_reconfig calls.\n *      note that the data accessible through pointers in the returned param struct\n *      (e.g. filenames) should not be modified by the calling application. */\nvoid    x264_encoder_parameters( x264_t *, x264_param_t * );\n/* x264_encoder_headers:\n *      return the SPS and PPS that will be used for the whole stream.\n *      *pi_nal is the number of NAL units outputted in pp_nal.\n *      returns the number of bytes in the returned NALs.\n *      returns negative on error.\n *      the payloads of all output NALs are guaranteed to be sequential in memory. */\nint     x264_encoder_headers( x264_t *, x264_nal_t **pp_nal, int *pi_nal );\n/* x264_encoder_encode:\n *      encode one picture.\n *      *pi_nal is the number of NAL units outputted in pp_nal.\n *      returns the number of bytes in the returned NALs.\n *      returns negative on error and zero if no NAL units returned.\n *      the payloads of all output NALs are guaranteed to be sequential in memory. */\nint     x264_encoder_encode( x264_t *, x264_nal_t **pp_nal, int *pi_nal, x264_picture_t *pic_in, x264_picture_t *pic_out );\n/* x264_encoder_close:\n *      close an encoder handler */\nvoid    x264_encoder_close( x264_t * );\n/* x264_encoder_delayed_frames:\n *      return the number of currently delayed (buffered) frames\n *      this should be used at the end of the stream, to know when you have all the encoded frames. */\nint     x264_encoder_delayed_frames( x264_t * );\n/* x264_encoder_maximum_delayed_frames( x264_t *h ):\n *      return the maximum number of delayed (buffered) frames that can occur with the current\n *      parameters. */\nint     x264_encoder_maximum_delayed_frames( x264_t *h );\n/* x264_encoder_intra_refresh:\n *      If an intra refresh is not in progress, begin one with the next P-frame.\n *      If an intra refresh is in progress, begin one as soon as the current one finishes.\n *      Requires that b_intra_refresh be set.\n *\n *      Useful for interactive streaming where the client can tell the server that packet loss has\n *      occurred.  In this case, keyint can be set to an extremely high value so that intra refreshes\n *      only occur when calling x264_encoder_intra_refresh.\n *\n *      In multi-pass encoding, if x264_encoder_intra_refresh is called differently in each pass,\n *      behavior is undefined.\n *\n *      Should not be called during an x264_encoder_encode. */\nvoid    x264_encoder_intra_refresh( x264_t * );\n/* x264_encoder_invalidate_reference:\n *      An interactive error resilience tool, designed for use in a low-latency one-encoder-few-clients\n *      system.  When the client has packet loss or otherwise incorrectly decodes a frame, the encoder\n *      can be told with this command to \"forget\" the frame and all frames that depend on it, referencing\n *      only frames that occurred before the loss.  This will force a keyframe if no frames are left to\n *      reference after the aforementioned \"forgetting\".\n *\n *      It is strongly recommended to use a large i_dpb_size in this case, which allows the encoder to\n *      keep around extra, older frames to fall back on in case more recent frames are all invalidated.\n *      Unlike increasing i_frame_reference, this does not increase the number of frames used for motion\n *      estimation and thus has no speed impact.  It is also recommended to set a very large keyframe\n *      interval, so that keyframes are not used except as necessary for error recovery.\n *\n *      x264_encoder_invalidate_reference is not currently compatible with the use of B-frames or intra\n *      refresh.\n *\n *      In multi-pass encoding, if x264_encoder_invalidate_reference is called differently in each pass,\n *      behavior is undefined.\n *\n *      Should not be called during an x264_encoder_encode, but multiple calls can be made simultaneously.\n *\n *      Returns 0 on success, negative on failure. */\nint x264_encoder_invalidate_reference( x264_t *, int64_t pts );\n\n#ifdef __cplusplus\n}\n#endif\n\n#endif\n"
  },
  {
    "path": "iOS_demo/x264/x264_config.h",
    "content": "#define X264_BIT_DEPTH     8\n#define X264_GPL           1\n#define X264_INTERLACED    1\n#define X264_CHROMA_FORMAT 0\n#define X264_REV 2744\n#define X264_REV_DIFF 10\n#define X264_VERSION \" r2744+10M 0c6fcc5\"\n#define X264_POINTVER \"0.148.2744+10M 0c6fcc5\"\n"
  },
  {
    "path": "ubuntu_x64/READme.txt",
    "content": "ubuntu 14.04 \ngcc 4.8.4"
  }
]