Repository: FaceOnLive/License-Plate-Recognition-SDK-Android
Branch: main
Commit: 29a165725855
Files: 35
Total size: 116.2 KB
Directory structure:
gitextract_uah6n3qx/
├── .gitignore
├── README.md
├── build.gradle
├── demo/
│ ├── .gitignore
│ ├── build.gradle
│ ├── proguard-rules.pro
│ └── src/
│ └── main/
│ ├── AndroidManifest.xml
│ ├── java/
│ │ └── org/
│ │ └── buyun/
│ │ └── alpr/
│ │ ├── AlprVideoSequentialActivity.java
│ │ └── common/
│ │ ├── AlprActivity.java
│ │ ├── AlprBackgroundTask.java
│ │ ├── AlprCameraFragment.java
│ │ ├── AlprGLSurfaceView.java
│ │ ├── AlprImage.java
│ │ ├── AlprPlateView.java
│ │ └── AlprUtils.java
│ └── res/
│ ├── layout/
│ │ ├── activity_main.xml
│ │ └── fragment_camera.xml
│ ├── layout-land/
│ │ └── fragment_camera.xml
│ ├── values/
│ │ ├── base-strings.xml
│ │ ├── colors.xml
│ │ ├── strings.xml
│ │ ├── styles.xml
│ │ ├── template-dimens.xml
│ │ └── template-styles.xml
│ ├── values-sw600dp/
│ │ ├── template-dimens.xml
│ │ └── template-styles.xml
│ ├── values-v11/
│ │ └── template-styles.xml
│ └── values-v21/
│ ├── base-colors.xml
│ └── base-template-styles.xml
├── gradle/
│ └── wrapper/
│ ├── gradle-wrapper.jar
│ └── gradle-wrapper.properties
├── gradle.properties
├── gradlew
├── gradlew.bat
└── settings.gradle
================================================
FILE CONTENTS
================================================
================================================
FILE: .gitignore
================================================
*.iml
.gradle
/local.properties
/.idea/caches
/.idea/libraries
/.idea/modules.xml
/.idea/workspace.xml
/.idea/navEditor.xml
/.idea/assetWizardSettings.xml
.DS_Store
/build
/captures
.externalNativeBuild
.cxx
================================================
FILE: README.md
================================================
<div align="center">
<a href="https://join.slack.com/t/faceonlive/shared_invite/zt-2drx19c5t-vQsR4TUGPD8oL7i7BXdKZA">Slack</a>
·
<a href="https://www.faceonlive.com/">Website</a>
·
<a href="https://portfolio.faceonlive.com">Portfolio</a>
·
<a href="https://www.huggingface.co/FaceOnLive">Hugging Face</a>
·
<a href="https://getapi.faceonlive.com">Free APIs</a>
·
<a href="https://github.com/FaceOnLive/OpenKYC">OpenKYC</a>
·
<a href="https://github.com/FaceOnLive/Mask-Face-Attendance-App-Flutter">Face Attendance</a>
·
<a href="mailto:contact@faceonlive.com">Contact</a>
</div>
<h1 align="center">Robust, Realtime, On-Device License Plate Recognition SDK For Android</h1>
It can not only recognize number plate, but also detect vehicle model, color and country.
## :tada: Try It Yourself
<a href="https://drive.google.com/file/d/1RRLOMN0v9xm_uGPk5pDSryQgMtcrxTog/view?usp=sharing" target="_blank">
<img alt="Get it on Google Play" src="https://goo.gl/cR2qQH" height="100"/>
</a>
</br>
</br>
https://user-images.githubusercontent.com/91896009/186433213-6bb1bda3-6b1b-4f71-b950-85e7d233ddff.mp4
================================================
FILE: build.gradle
================================================
// Top-level build file where you can add configuration options common to all sub-projects/modules.
buildscript {
repositories {
google()
jcenter()
}
dependencies {
classpath 'com.android.tools.build:gradle:3.5.1'
// NOTE: Do not place your application dependencies here; they belong
// in the individual module build.gradle files
}
}
allprojects {
repositories {
google()
jcenter()
}
}
task clean(type: Delete) {
delete rootProject.buildDir
}
================================================
FILE: demo/.gitignore
================================================
/build
================================================
FILE: demo/build.gradle
================================================
apply plugin: 'com.android.application'
android {
compileSdkVersion 28
defaultConfig {
applicationId "org.buyun.alpr"
minSdkVersion 21
targetSdkVersion 28
versionCode 1
versionName "1.0"
testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner"
}
buildTypes {
release {
minifyEnabled true
proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro'
}
}
sourceSets {
main {
java.srcDirs += ['../common/src/main/java']
res.srcDirs += ['../common/src/main/res']
}
}
}
dependencies {
implementation fileTree(dir: 'libs', include: ['*.jar'])
implementation 'androidx.appcompat:appcompat:1.1.0'
implementation 'androidx.constraintlayout:constraintlayout:1.1.3'
implementation project(":sdk")
}
================================================
FILE: demo/proguard-rules.pro
================================================
# Add project specific ProGuard rules here.
# You can control the set of applied configuration files using the
# proguardFiles setting in build.gradle.
#
# For more details, see
# http://developer.android.com/guide/developing/tools/proguard.html
# If your project uses WebView with JS, uncomment the following
# and specify the fully qualified class name to the JavaScript interface
# class:
#-keepclassmembers class fqcn.of.javascript.interface.for.webview {
# public *;
#}
# Uncomment this to preserve the line number information for
# debugging stack traces.
#-keepattributes SourceFile,LineNumberTable
-keep class org.buyun.alpr.sdk.** {*;}
# If you keep the line number information, uncomment this to
# hide the original source file name.
#-renamesourcefileattribute SourceFile
================================================
FILE: demo/src/main/AndroidManifest.xml
================================================
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="org.buyun.alpr">
<uses-permission android:name="android.permission.CAMERA" />
<uses-permission android:name="android.permission.INTERNET" />
<uses-feature android:name="android.hardware.camera" />
<uses-feature android:name="android.hardware.camera.autofocus" />
<application android:allowBackup="true"
android:label="@string/app_name"
android:icon="@drawable/ic_launcher"
android:theme="@style/MaterialTheme">
<activity android:name=".AlprVideoSequentialActivity"
android:label="@string/app_name"
android:screenOrientation="landscape">
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
</activity>
</application>
</manifest>
================================================
FILE: demo/src/main/java/org/buyun/alpr/AlprVideoSequentialActivity.java
================================================
package org.buyun.alpr;
import android.os.Bundle;
import android.util.Log;
import android.util.Size;
import org.buyun.alpr.common.AlprActivity;
import org.buyun.alpr.common.AlprCameraFragment;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import java.util.Arrays;
import java.util.List;
/**
* Main activity
*/
public class AlprVideoSequentialActivity extends AlprActivity {
static final String TAG = AlprVideoSequentialActivity.class.getCanonicalName();
static final Size PREFERRED_SIZE = new Size(1280, 720);
static final String CONFIG_DEBUG_LEVEL = "info";
static final boolean CONFIG_DEBUG_WRITE_INPUT_IMAGE = false; // must be false unless you're debugging the code
static final int CONFIG_NUM_THREADS = -1;
static final boolean CONFIG_GPGPU_ENABLED = true;
static final int CONFIG_MAX_LATENCY = -1;
static final String CONFIG_CHARSET = "latin";
static final boolean CONFIG_IENV_ENABLED = false;
static final boolean CONFIG_OPENVINO_ENABLED = true;
static final String CONFIG_OPENVINO_DEVICE = "CPU";
static final double CONFIG_DETECT_MINSCORE = 0.1; // 10%
static final boolean CONFIG_CAR_NOPLATE_DETECT_ENABLED = false;
static final double CONFIG_CAR_NOPLATE_DETECT_MINSCORE = 0.8; // 80%
static final List<Float> CONFIG_DETECT_ROI = Arrays.asList(0.f, 0.f, 0.f, 0.f);
static final boolean CONFIG_PYRAMIDAL_SEARCH_ENABLED = true;
static final double CONFIG_PYRAMIDAL_SEARCH_SENSITIVITY= 0.28; // 28%
static final double CONFIG_PYRAMIDAL_SEARCH_MINSCORE = 0.5; // 50%
static final int CONFIG_PYRAMIDAL_SEARCH_MIN_IMAGE_SIZE_INPIXELS = 800; // pixels
static final boolean CONFIG_KLASS_LPCI_ENABLED = true;
static final boolean CONFIG_KLASS_VCR_ENABLED = true;
static final boolean CONFIG_KLASS_VMMR_ENABLED = true;
static final boolean CONFIG_KLASS_VBSR_ENABLED = false;
static final double CONFIG_KLASS_VCR_GAMMA = 1.5;
static final double CONFIG_RECOGN_MINSCORE = 0.4; // 40%
static final String CONFIG_RECOGN_SCORE_TYPE = "min";
static final boolean CONFIG_RECOGN_RECTIFY_ENABLED = false;
@Override
protected void onCreate(Bundle savedInstanceState) {
Log.i(TAG, "onCreate " + this);
super.onCreate(savedInstanceState);
getSupportFragmentManager().beginTransaction()
.replace(R.id.container, AlprCameraFragment.newInstance(PREFERRED_SIZE, this))
.commit();
}
@Override
public void onResume() {
super.onResume();
}
@Override
public void onDestroy() {
Log.i(TAG, "onDestroy " + this);
super.onDestroy();
}
@Override
protected int getLayoutResId() {
return R.layout.activity_main;
}
@Override
protected JSONObject getJsonConfig() {
JSONObject config = new JSONObject();
try {
config.put("debug_level", CONFIG_DEBUG_LEVEL);
config.put("debug_write_input_image_enabled", CONFIG_DEBUG_WRITE_INPUT_IMAGE);
config.put("debug_internal_data_path", getDebugInternalDataPath());
config.put("num_threads", CONFIG_NUM_THREADS);
config.put("gpgpu_enabled", CONFIG_GPGPU_ENABLED);
config.put("charset", CONFIG_CHARSET);
config.put("max_latency", CONFIG_MAX_LATENCY);
config.put("ienv_enabled", CONFIG_IENV_ENABLED);
config.put("openvino_enabled", CONFIG_OPENVINO_ENABLED);
config.put("openvino_device", CONFIG_OPENVINO_DEVICE);
config.put("detect_minscore", CONFIG_DETECT_MINSCORE);
config.put("detect_roi", new JSONArray(getDetectROI()));
config.put("car_noplate_detect_enabled", CONFIG_CAR_NOPLATE_DETECT_ENABLED);
config.put("car_noplate_detect_min_score", CONFIG_CAR_NOPLATE_DETECT_MINSCORE);
config.put("pyramidal_search_enabled", CONFIG_PYRAMIDAL_SEARCH_ENABLED);
config.put("pyramidal_search_sensitivity", CONFIG_PYRAMIDAL_SEARCH_SENSITIVITY);
config.put("pyramidal_search_minscore", CONFIG_PYRAMIDAL_SEARCH_MINSCORE);
config.put("pyramidal_search_min_image_size_inpixels", CONFIG_PYRAMIDAL_SEARCH_MIN_IMAGE_SIZE_INPIXELS);
config.put("klass_lpci_enabled", CONFIG_KLASS_LPCI_ENABLED);
config.put("klass_vcr_enabled", CONFIG_KLASS_VCR_ENABLED);
config.put("klass_vmmr_enabled", CONFIG_KLASS_VMMR_ENABLED);
config.put("klass_vbsr_enabled", CONFIG_KLASS_VBSR_ENABLED);
config.put("klass_vcr_gamma", CONFIG_KLASS_VCR_GAMMA);
config.put("recogn_minscore", CONFIG_RECOGN_MINSCORE);
config.put("recogn_score_type", CONFIG_RECOGN_SCORE_TYPE);
config.put("recogn_rectify_enabled", CONFIG_RECOGN_RECTIFY_ENABLED);
}
catch (JSONException e) {
e.printStackTrace();
}
return config;
}
@Override
protected boolean isParallelDeliveryEnabled() { return false; /* we want to deactivated parallel and use sequential delivery*/ }
@Override
protected List<Float> getDetectROI() { return CONFIG_DETECT_ROI; }
}
================================================
FILE: demo/src/main/java/org/buyun/alpr/common/AlprActivity.java
================================================
package org.buyun.alpr.common;
import android.graphics.RectF;
import android.media.ExifInterface;
import android.media.Image;
import android.os.Bundle;
import android.os.Environment;
import android.os.SystemClock;
import android.util.Log;
import android.util.Size;
import android.view.Window;
import android.view.WindowManager;
import androidx.annotation.NonNull;
import androidx.appcompat.app.AppCompatActivity;
import org.buyun.alpr.sdk.SDK_IMAGE_TYPE;
import org.buyun.alpr.sdk.AlprSdk;
import org.buyun.alpr.sdk.AlprCallback;
import org.buyun.alpr.sdk.AlprResult;
import org.json.JSONException;
import org.json.JSONObject;
import java.io.File;
import java.util.List;
/**
* Base activity to subclass to make our life easier
*/
public abstract class AlprActivity extends AppCompatActivity implements AlprCameraFragment.AlprCameraFragmentSink {
static final String TAG = AlprActivity.class.getCanonicalName();
private String mDebugInternalDataPath = null;
private boolean mIsProcessing = false;
private boolean mIsPaused = true;
/**
* Parallel callback delivery function used by the engine to notify for new deferred results
*/
static class MyUltAlprSdkParallelDeliveryCallback extends AlprCallback {
static final String TAG = MyUltAlprSdkParallelDeliveryCallback.class.getCanonicalName();
AlprPlateView mAlprPlateView;
Size mImageSize;
long mTotalDuration = 0;
int mOrientation = 0;
void setAlprPlateView(@NonNull final AlprPlateView view) {
mAlprPlateView = view;
}
void setImageSize(@NonNull final Size imageSize, @NonNull final int orientation) {
mImageSize = imageSize;
mOrientation = orientation;
}
void setDurationTime(final long totalDuration) {
mTotalDuration = totalDuration;
}
@Override
public void onNewResult(AlprResult result) {
Log.d(TAG, AlprUtils.resultToString(result));
if (mAlprPlateView!= null) {
mAlprPlateView.setResult(result, mImageSize, mTotalDuration, mOrientation);
}
}
static MyUltAlprSdkParallelDeliveryCallback newInstance() {
return new MyUltAlprSdkParallelDeliveryCallback();
}
}
/**
* The parallel delivery callback. Set to null to disable parallel mode
* and enforce sequential mode.
*/
private MyUltAlprSdkParallelDeliveryCallback mParallelDeliveryCallback;
private AlprPlateView mAlprPlateView;
@Override
protected void onCreate(final Bundle savedInstanceState) {
Log.i(TAG, "onCreate " + this);
super.onCreate(savedInstanceState);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN, WindowManager.LayoutParams.FLAG_FULLSCREEN);
setContentView(getLayoutResId());
// Create folder to dump input images for debugging
File dummyFile = new File(getExternalFilesDir(null), "dummyFile");
if (!dummyFile.getParentFile().exists() && !dummyFile.getParentFile().mkdirs()) {
Log.e(TAG,"mkdir failed: " + dummyFile.getParentFile().getAbsolutePath());
}
mDebugInternalDataPath = dummyFile.getParentFile().exists() ? dummyFile.getParent() : Environment.getExternalStorageDirectory().getAbsolutePath();
dummyFile.delete();
// Create parallel delivery callback is enabled
mParallelDeliveryCallback = isParallelDeliveryEnabled() ? MyUltAlprSdkParallelDeliveryCallback.newInstance() : null;
// Init the engine
final JSONObject config = getJsonConfig();
// Retrieve previously stored key from internal storage
final AlprResult alprResult = AlprUtils.assertIsOk(AlprSdk.init(
getAssets(),
config.toString(),
mParallelDeliveryCallback
));
Log.i(TAG,"ALPR engine initialized: " + AlprUtils.resultToString(alprResult));
}
@Override
public void onDestroy() {
Log.i(TAG, "onDestroy " + this);
final AlprResult result = AlprUtils.assertIsOk(AlprSdk.deInit());
Log.i(TAG,"ALPR engine deInitialized: " + AlprUtils.resultToString(result));
super.onDestroy();
}
@Override
public synchronized void onResume() {
super.onResume();
mIsPaused = false;
}
@Override
public synchronized void onPause() {
mIsPaused = true;
super.onPause();
}
@Override
public void setAlprPlateView(@NonNull final AlprPlateView view) {
mAlprPlateView = view;
if (mParallelDeliveryCallback != null) {
mParallelDeliveryCallback.setAlprPlateView(view);
}
final List<Float> roi = getDetectROI();
assert(roi.size() == 4);
mAlprPlateView.setDetectROI(
new RectF(
roi.get(0).floatValue(),
roi.get(2).floatValue(),
roi.get(1).floatValue(),
roi.get(3).floatValue()
)
);
}
@Override
public void setImage(@NonNull final Image image, final int jpegOrientation) {
// On sequential mode we just ignore the processing
if (mIsProcessing || mIsPaused) {
Log.d(TAG, "Inference function not returned yet: Processing or paused");
image.close();
return;
}
mIsProcessing = true;
final Size imageSize = new Size(image.getWidth(), image.getHeight());
// Orientation
// Convert from degree to real EXIF orientation
int exifOrientation;
switch (jpegOrientation) {
case 90: exifOrientation = ExifInterface.ORIENTATION_ROTATE_90; break;
case 180: exifOrientation = ExifInterface.ORIENTATION_ROTATE_180; break;
case 270: exifOrientation = ExifInterface.ORIENTATION_ROTATE_270; break;
case 0: default: exifOrientation = ExifInterface.ORIENTATION_NORMAL; break;
}
// Update image for the async callback
if (mParallelDeliveryCallback != null) {
mParallelDeliveryCallback.setImageSize((jpegOrientation % 180) == 0 ? imageSize : new Size(imageSize.getHeight(), imageSize.getWidth()), jpegOrientation);
}
// The actual ALPR inference is done here
// Do not worry about the time taken to perform the inference, the caller
// (most likely the camera fragment) set the current image using a background thread.
final Image.Plane[] planes = image.getPlanes();
final long startTimeInMillis = SystemClock.uptimeMillis();
final AlprResult result = /*AlprUtils.assertIsOk*/(AlprSdk.process(
SDK_IMAGE_TYPE.ULTALPR_SDK_IMAGE_TYPE_YUV420P,
planes[0].getBuffer(),
planes[1].getBuffer(),
planes[2].getBuffer(),
imageSize.getWidth(),
imageSize.getHeight(),
planes[0].getRowStride(),
planes[1].getRowStride(),
planes[2].getRowStride(),
planes[1].getPixelStride(),
exifOrientation
));
final long durationInMillis = SystemClock.uptimeMillis() - startTimeInMillis; // Total time: Inference + image processing (chroma conversion, rotation...)
if (mParallelDeliveryCallback != null) {
mParallelDeliveryCallback.setDurationTime(durationInMillis);
}
// Release the image and signal the inference process is finished
image.close();
mIsProcessing = false;
if (result.isOK()) {
Log.d(TAG, AlprUtils.resultToString(result));
} else {
Log.e(TAG, AlprUtils.resultToString(result));
}
// Display the result if sequential mode. Otherwise, let the parallel callback
// display the result when provided.
// Starting version 3.2 the callback will be called even if the result is empty
if (mAlprPlateView != null && (mParallelDeliveryCallback == null || (result.numPlates() == 0 && result.numCars() == 0))) { // means sequential call or no plate/car to expect from the parallel delivery callback
mAlprPlateView.setResult(result, (jpegOrientation % 180) == 0 ? imageSize : new Size(imageSize.getHeight(), imageSize.getWidth()), durationInMillis, jpegOrientation);
}
}
/**
* Gets the base folder defining a path where the application can write private
* data.
* @return The path
*/
protected String getDebugInternalDataPath() {
return mDebugInternalDataPath;
}
/**
* Gets the server url used to activate the license. Please contact us to get the correct URL.
* e.g. https://localhost:3600
* @return The URL
*/
protected String getActivationServerUrl() {
return "";
}
protected String getActivationMasterOrSlaveKey() {
return "";
}
/**
* Returns the layout Id for the activity
* @return
*/
protected abstract int getLayoutResId();
/**
* Returns JSON config to be used to initialize the ALPR/ANPR SDK.
* @return The JSON config
*/
protected abstract JSONObject getJsonConfig();
/**
*/
protected abstract boolean isParallelDeliveryEnabled();
protected abstract List<Float> getDetectROI();
}
================================================
FILE: demo/src/main/java/org/buyun/alpr/common/AlprBackgroundTask.java
================================================
package org.buyun.alpr.common;
import android.os.Handler;
import android.os.HandlerThread;
public class AlprBackgroundTask {
private Handler mHandler;
private HandlerThread mThread;
public synchronized final Handler getHandler() {
return mHandler;
}
public synchronized final boolean isRunning() { return mHandler != null; }
public synchronized void start(final String threadName) {
if (mThread != null) {
return;
}
mThread = new HandlerThread(threadName);
mThread.start();
mHandler = new Handler(mThread.getLooper());
}
public synchronized void stop() {
if (mThread == null) {
return;
}
mThread.quitSafely();
try {
mThread.join();
mThread = null;
mHandler = null;
} catch (InterruptedException e) {
e.printStackTrace();
}
}
public synchronized void post(final Runnable r) {
if (mHandler != null) {
mHandler.post(r);
}
}
}
================================================
FILE: demo/src/main/java/org/buyun/alpr/common/AlprCameraFragment.java
================================================
package org.buyun.alpr.common;
import android.Manifest;
import android.app.Activity;
import android.app.AlertDialog;
import android.app.Dialog;
import android.content.Context;
import android.content.DialogInterface;
import android.content.pm.PackageManager;
import android.content.res.Configuration;
import android.graphics.Color;
import android.graphics.ImageFormat;
import android.graphics.SurfaceTexture;
import android.hardware.camera2.CameraAccessException;
import android.hardware.camera2.CameraCaptureSession;
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CameraDevice;
import android.hardware.camera2.CameraManager;
import android.hardware.camera2.CaptureRequest;
import android.hardware.camera2.params.StreamConfigurationMap;
import android.media.Image;
import android.media.ImageReader;
import android.os.Bundle;
import androidx.annotation.NonNull;
import androidx.core.app.ActivityCompat;
import androidx.fragment.app.DialogFragment;
import androidx.fragment.app.Fragment;
import androidx.core.content.ContextCompat;
import android.text.TextUtils;
import android.util.Log;
import android.util.Size;
import android.util.SparseIntArray;
import android.view.LayoutInflater;
import android.view.Surface;
import android.view.View;
import android.view.ViewGroup;
import android.widget.Toast;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import java.util.concurrent.Semaphore;
import java.util.concurrent.TimeUnit;
import org.buyun.alpr.R; // FIXME(dmi): must remove
public class AlprCameraFragment extends Fragment
implements ActivityCompat.OnRequestPermissionsResultCallback {
static final int REQUEST_CAMERA_PERMISSION = 1;
static final String FRAGMENT_DIALOG = "dialog";
static final String TAG = AlprCameraFragment.class.getCanonicalName();
static final int VIDEO_FORMAT = ImageFormat.YUV_420_888; // All Android devices are required to support this format
private static final SparseIntArray ORIENTATIONS = new SparseIntArray();
static {
ORIENTATIONS.append(Surface.ROTATION_0, 90);
ORIENTATIONS.append(Surface.ROTATION_90, 0);
ORIENTATIONS.append(Surface.ROTATION_180, 270);
ORIENTATIONS.append(Surface.ROTATION_270, 180);
}
/**
* Using #2: processing and pending.
*/
static final int MAX_IMAGES = 2;
/**
* The camera preview size will be chosen to be the smallest frame by pixel size capable of
* containing a DESIRED_SIZE x DESIRED_SIZE square.
*/
static final int MINIMUM_PREVIEW_SIZE = 320;
private Size mPreferredSize = null;
/**
* ID of the current {@link CameraDevice}.
*/
private String mCameraId;
private int mJpegOrientation = 1;
/**
* An {@link AlprGLSurfaceView} for camera preview.
*/
private AlprGLSurfaceView mGLSurfaceView;
private AlprPlateView mPlateView;
/**
* A {@link CameraCaptureSession } for camera preview.
*/
private CameraCaptureSession mCaptureSession;
/**
* A reference to the opened {@link CameraDevice}.
*/
private CameraDevice mCameraDevice;
/**
* The {@link android.util.Size} of camera preview.
*/
private Size mPreviewSize;
private AlprCameraFragmentSink mSink;
private final AlprBackgroundTask mBackgroundTaskCamera = new AlprBackgroundTask();
private final AlprBackgroundTask mBackgroundTaskDrawing = new AlprBackgroundTask();
private final AlprBackgroundTask mBackgroundTaskInference = new AlprBackgroundTask();
/**
* {@link CameraDevice.StateCallback} is called when {@link CameraDevice} changes its state.
*/
private final CameraDevice.StateCallback mStateCallback = new CameraDevice.StateCallback() {
@Override
public void onOpened(@NonNull CameraDevice cameraDevice) {
// This method is called when the camera is opened. We start camera preview here.
mCameraOpenCloseLock.release();
mCameraDevice = cameraDevice;
createCameraCaptureSession();
}
@Override
public void onDisconnected(@NonNull CameraDevice cameraDevice) {
mCameraOpenCloseLock.release();
cameraDevice.close();
mCameraDevice = null;
}
@Override
public void onError(@NonNull CameraDevice cameraDevice, int error) {
mCameraOpenCloseLock.release();
cameraDevice.close();
mCameraDevice = null;
Activity activity = getActivity();
if (null != activity) {
activity.finish();
}
}
};
private boolean mClosingCamera = false;
/**
* An {@link ImageReader} that handles still image capture.
*/
private ImageReader mImageReaderInference;
private ImageReader mImageReaderDrawing;
/**
* This a callback object for the {@link ImageReader}. "onImageAvailable" will be called when a
* still image is ready to be saved.
*/
private final ImageReader.OnImageAvailableListener mOnImageAvailableListener
= new ImageReader.OnImageAvailableListener() {
@Override
public void onImageAvailable(ImageReader reader) {
if (mClosingCamera) {
Log.d(TAG, "Closing camera");
return;
}
try {
final Image image = reader.acquireLatestImage();
if (image == null) {
return;
}
final boolean isForDrawing = (reader.getSurface() == mImageReaderDrawing.getSurface());
if (isForDrawing) {
/*mBackgroundTaskDrawing.post(() ->*/ mGLSurfaceView.setImage(image, mJpegOrientation)/*)*/;
}
else {
/*mBackgroundTaskInference.post(() ->*/ mSink.setImage(image, mJpegOrientation)/*)*/;
}
} catch (final Exception e) {
e.printStackTrace();
Log.e(TAG, e.toString());
}
}
};
private CaptureRequest.Builder mCaptureRequestBuilder;
/**
* {@link CaptureRequest} generated by {@link #mCaptureRequestBuilder}
*/
private CaptureRequest mCaptureRequest;
/**
* A {@link Semaphore} to prevent the app from exiting before closing the camera.
*/
private Semaphore mCameraOpenCloseLock = new Semaphore(1);
/**
* Orientation of the camera sensor
*/
private int mSensorOrientation;
/**
* Default constructor automatically called when the fragment is recreated. Required.
* https://stackoverflow.com/questions/51831053/could-not-find-fragment-constructor
*/
public AlprCameraFragment() {
// nothing special here
}
private AlprCameraFragment(final Size preferredSize, final AlprCameraFragmentSink sink) {
mPreferredSize = preferredSize;
mSink = sink;
}
/**
* Public function to be called to create the fragment.
* @param preferredSize
* @return
*/
public static AlprCameraFragment newInstance(final Size preferredSize, final AlprCameraFragmentSink sink) {
return new AlprCameraFragment(preferredSize, sink);
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
return inflater.inflate(R.layout.fragment_camera, container, false);
}
@Override
public void onViewCreated(final View view, Bundle savedInstanceState) {
mGLSurfaceView = (AlprGLSurfaceView) view.findViewById(R.id.glSurfaceView);
mPlateView = (AlprPlateView) view.findViewById(R.id.plateView);
//mPlateView.setBackgroundColor(Color.RED);
}
@Override
public void onActivityCreated(Bundle savedInstanceState) {
super.onActivityCreated(savedInstanceState);
}
@Override
public synchronized void onResume() {
super.onResume();
startBackgroundThreads();
// Forward the plateView to the sink
if (mSink != null && mPlateView != null) {
mSink.setAlprPlateView(mPlateView);
}
// Open the camera
openCamera(mGLSurfaceView.getWidth(), mGLSurfaceView.getHeight());
}
@Override
public synchronized void onPause() {
closeCamera();
stopBackgroundThreads();
super.onPause();
}
private void requestCameraPermission() {
if (shouldShowRequestPermissionRationale(Manifest.permission.CAMERA)) {
new ConfirmationDialog().show(getChildFragmentManager(), FRAGMENT_DIALOG);
} else {
requestPermissions(new String[]{Manifest.permission.CAMERA}, REQUEST_CAMERA_PERMISSION);
}
}
@Override
public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions,
@NonNull int[] grantResults) {
if (requestCode == REQUEST_CAMERA_PERMISSION) {
if (grantResults.length != 1 || grantResults[0] != PackageManager.PERMISSION_GRANTED) {
ErrorDialog.newInstance(getString(R.string.request_permission))
.show(getChildFragmentManager(), FRAGMENT_DIALOG);
}
} else {
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
}
}
/**
* Shows a {@link Toast} on the UI thread.
*
* @param text The message to show
*/
private void showToast(final String text) {
final Activity activity = getActivity();
if (activity != null) {
activity.runOnUiThread(new Runnable() {
@Override
public void run() {
Toast.makeText(activity, text, Toast.LENGTH_SHORT).show();
}
});
}
}
/**
* Given {@code choices} of {@code Size}s supported by a camera, chooses the smallest one whose
* width and height are at least as large as the minimum of both, or an exact match if possible.
*
* @param choices The list of sizes that the camera supports for the intended output class
* @param width The minimum desired width
* @param height The minimum desired height
* @return The optimal {@code Size}, or an arbitrary one if none were big enough
*/
private static Size chooseOptimalSize(final Size[] choices, final int width, final int height) {
final int minSize = Math.max(Math.min(width, height), MINIMUM_PREVIEW_SIZE);
final Size desiredSize = new Size(width, height);
// Collect the supported resolutions that are at least as big as the preview Surface
boolean exactSizeFound = false;
final List<Size> bigEnough = new ArrayList<Size>();
final List<Size> tooSmall = new ArrayList<Size>();
for (final Size option : choices) {
if (option.equals(desiredSize)) {
// Set the size but don't return yet so that remaining sizes will still be logged.
exactSizeFound = true;
}
if (option.getHeight() >= minSize && option.getWidth() >= minSize) {
bigEnough.add(option);
} else {
tooSmall.add(option);
}
}
Log.i(TAG, "Desired size: " + desiredSize + ", min size: " + minSize + "x" + minSize);
Log.i(TAG, "Valid preview sizes: [" + TextUtils.join(", ", bigEnough) + "]");
Log.i(TAG, "Rejected preview sizes: [" + TextUtils.join(", ", tooSmall) + "]");
if (exactSizeFound) {
Log.i(TAG, "Exact size match found.");
return desiredSize;
}
// Pick the smallest of those, assuming we found any
if (bigEnough.size() > 0) {
final Size chosenSize = Collections.min(bigEnough, new CompareSizesByArea());
Log.i(TAG, "Chosen size: " + chosenSize.getWidth() + "x" + chosenSize.getHeight());
return chosenSize;
} else {
Log.e(TAG, "Couldn't find any suitable preview size");
return choices[0];
}
}
/**
* Sets up member variables related to camera.
*
*/
@SuppressWarnings("SuspiciousNameCombination")
private void setUpCameraOutputs() {
Activity activity = getActivity();
CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE);
try {
for (String cameraId : manager.getCameraIdList()) {
CameraCharacteristics characteristics
= manager.getCameraCharacteristics(cameraId);
// We don't use a front facing camera in this sample.
Integer facing = characteristics.get(CameraCharacteristics.LENS_FACING);
if (facing != null && facing == CameraCharacteristics.LENS_FACING_FRONT) {
continue;
}
StreamConfigurationMap map = characteristics.get(
CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
if (map == null) {
continue;
}
mSensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
// JPEG orientation
// https://developer.android.com/reference/android/hardware/camera2/CaptureRequest#JPEG_ORIENTATION
int rotation = activity.getWindowManager().getDefaultDisplay().getRotation();
mJpegOrientation = (ORIENTATIONS.get(rotation) + mSensorOrientation + 270) % 360;
// Danger, W.R.! Attempting to use too large a preview size could exceed the camera
// bus' bandwidth limitation, resulting in gorgeous previews but the storage of
// garbage capture data.
mPreviewSize =
chooseOptimalSize(
map.getOutputSizes(SurfaceTexture.class),
mPreferredSize.getWidth(),
mPreferredSize.getHeight());
// We fit the aspect ratio of TextureView to the size of preview we picked.
final int orientation = getResources().getConfiguration().orientation;
if (orientation == Configuration.ORIENTATION_LANDSCAPE) {
mGLSurfaceView.setAspectRatio(mPreviewSize.getWidth(), mPreviewSize.getHeight());
mPlateView.setAspectRatio(mPreviewSize.getWidth(), mPreviewSize.getHeight());
} else {
mGLSurfaceView.setAspectRatio(mPreviewSize.getHeight(), mPreviewSize.getWidth());
mPlateView.setAspectRatio(mPreviewSize.getHeight(), mPreviewSize.getWidth());
}
mCameraId = cameraId;
return;
}
} catch (CameraAccessException e) {
e.printStackTrace();
} catch (NullPointerException e) {
// Currently an NPE is thrown when the Camera2API is used but not supported on the
// device this code runs.
ErrorDialog.newInstance(getString(R.string.camera_error))
.show(getChildFragmentManager(), FRAGMENT_DIALOG);
}
}
/**
* Opens the camera specified by {@link AlprCameraFragment#mCameraId}.
*/
private void openCamera(int width, int height) {
if (ContextCompat.checkSelfPermission(getActivity(), Manifest.permission.CAMERA)
!= PackageManager.PERMISSION_GRANTED) {
requestCameraPermission();
return;
}
setUpCameraOutputs();
Activity activity = getActivity();
CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE);
try {
if (!mCameraOpenCloseLock.tryAcquire(2500, TimeUnit.MILLISECONDS)) {
throw new RuntimeException("Time out waiting to lock camera opening.");
}
manager.openCamera(mCameraId, mStateCallback, mBackgroundTaskCamera.getHandler());
} catch (CameraAccessException e) {
e.printStackTrace();
} catch (InterruptedException e) {
throw new RuntimeException("Interrupted while trying to lock camera opening.", e);
}
}
/**
* Closes the current {@link CameraDevice}.
*/
private void closeCamera() {
try {
mClosingCamera = true;
mCameraOpenCloseLock.acquire();
if (null != mCaptureSession) {
mCaptureSession.close();
mCaptureSession = null;
}
if (null != mCameraDevice) {
mCameraDevice.close();
mCameraDevice = null;
}
if (null != mImageReaderInference) {
mImageReaderInference.close();
mImageReaderInference = null;
}
if (null != mImageReaderDrawing) {
mImageReaderDrawing.close();
mImageReaderDrawing = null;
}
} catch (InterruptedException e) {
throw new RuntimeException("Interrupted while trying to lock camera closing.", e);
} finally {
mCameraOpenCloseLock.release();
mClosingCamera = false;
}
}
/**
* Starts a background threads
*/
private void startBackgroundThreads() {
mBackgroundTaskInference.start("InferenceBackgroundThread");
mBackgroundTaskDrawing.start("DrawingBackgroundThread");
mBackgroundTaskCamera.start("CameraBackgroundThread");
}
/**
* Stops the background threads
*/
private void stopBackgroundThreads() {
mBackgroundTaskInference.stop();
mBackgroundTaskDrawing.stop();
mBackgroundTaskCamera.stop();
}
/**
* Creates a new {@link CameraCaptureSession} for camera preview.
*/
private void createCameraCaptureSession() {
try {
// Create Image readers
mImageReaderInference = ImageReader.newInstance(mPreviewSize.getWidth(), mPreviewSize.getHeight(),
VIDEO_FORMAT, MAX_IMAGES);
mImageReaderInference.setOnImageAvailableListener(
mOnImageAvailableListener, mBackgroundTaskCamera.getHandler());
mImageReaderDrawing = ImageReader.newInstance(mPreviewSize.getWidth(), mPreviewSize.getHeight(),
VIDEO_FORMAT, MAX_IMAGES);
mImageReaderDrawing.setOnImageAvailableListener(
mOnImageAvailableListener, mBackgroundTaskCamera.getHandler());
// We set up a CaptureRequest.Builder with the output Surface to the image reader
mCaptureRequestBuilder
= mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_RECORD);
//mCaptureRequestBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, new Range<>(1, 25));
//mCaptureRequestBuilder.set(CaptureRequest.CONTROL_MODE,
// CaptureRequest.CONTROL_MODE_USE_SCENE_MODE);
//mCaptureRequestBuilder.set(CaptureRequest.CONTROL_SCENE_MODE,
// CaptureRequest.CONTROL_SCENE_MODE_HIGH_SPEED_VIDEO);
mCaptureRequestBuilder.addTarget(mImageReaderInference.getSurface());
mCaptureRequestBuilder.addTarget(mImageReaderDrawing.getSurface());
// Here, we create a CameraCaptureSession
mCameraDevice.createCaptureSession(Arrays.asList(mImageReaderInference.getSurface(), mImageReaderDrawing.getSurface()),
new CameraCaptureSession.StateCallback() {
@Override
public void onConfigured(@NonNull CameraCaptureSession cameraCaptureSession) {
// The camera is already closed
if (null == mCameraDevice) {
return;
}
// When the session is ready, we start displaying the preview.
mCaptureSession = cameraCaptureSession;
try {
// Auto focus should be continuous
mCaptureRequestBuilder.set(
CaptureRequest.CONTROL_AF_MODE,
CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
// Flash is automatically enabled when necessary.
mCaptureRequestBuilder.set(
CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH);
// Finally, we start grabbing the frames
mCaptureRequest = mCaptureRequestBuilder.build();
mCaptureSession.setRepeatingRequest(mCaptureRequest,
null, mBackgroundTaskCamera.getHandler());
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
@Override
public void onConfigureFailed(
@NonNull CameraCaptureSession cameraCaptureSession) {
showToast("Failed");
}
}, mBackgroundTaskCamera.getHandler()
);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
/**
*
*/
public static interface AlprCameraFragmentSink {
/**
*
* @param view
*/
public void setAlprPlateView(@NonNull final AlprPlateView view);
/**
*
* @param image
* @param jpegOrientation
*/
public void setImage(@NonNull final Image image, final int jpegOrientation);
}
/**
* Compares two {@code Size}s based on their areas.
*/
static class CompareSizesByArea implements Comparator<Size> {
@Override
public int compare(Size lhs, Size rhs) {
// We cast here to ensure the multiplications won't overflow
return Long.signum((long) lhs.getWidth() * lhs.getHeight() -
(long) rhs.getWidth() * rhs.getHeight());
}
}
/**
* Shows an error message dialog.
*/
public static class ErrorDialog extends DialogFragment {
private static final String ARG_MESSAGE = "message";
public static ErrorDialog newInstance(String message) {
ErrorDialog dialog = new ErrorDialog();
Bundle args = new Bundle();
args.putString(ARG_MESSAGE, message);
dialog.setArguments(args);
return dialog;
}
@NonNull
@Override
public Dialog onCreateDialog(Bundle savedInstanceState) {
final Activity activity = getActivity();
return new AlertDialog.Builder(activity)
.setMessage(getArguments().getString(ARG_MESSAGE))
.setPositiveButton(android.R.string.ok, new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialogInterface, int i) {
activity.finish();
}
})
.create();
}
}
/**
* Shows OK/Cancel confirmation dialog about camera permission.
*/
public static class ConfirmationDialog extends DialogFragment {
@NonNull
@Override
public Dialog onCreateDialog(Bundle savedInstanceState) {
final Fragment parent = getParentFragment();
return new AlertDialog.Builder(getActivity())
.setMessage(R.string.request_permission)
.setPositiveButton(android.R.string.ok, new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
parent.requestPermissions(new String[]{Manifest.permission.CAMERA},
REQUEST_CAMERA_PERMISSION);
}
})
.setNegativeButton(android.R.string.cancel,
new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
Activity activity = parent.getActivity();
if (activity != null) {
activity.finish();
}
}
})
.create();
}
}
}
================================================
FILE: demo/src/main/java/org/buyun/alpr/common/AlprGLSurfaceView.java
================================================
package org.buyun.alpr.common;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import java.nio.ShortBuffer;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;
import android.graphics.PixelFormat;
import android.media.Image;
import android.opengl.GLES20;
import android.opengl.GLSurfaceView;
import android.util.Log;
import android.view.SurfaceHolder;
/**
* GL surface view
*/
public class AlprGLSurfaceView extends GLSurfaceView implements GLSurfaceView.Renderer {
private static final String TAG = AlprGLSurfaceView.class.getCanonicalName();
private static final int FLOAT_SIZE_BYTES = 4;
private static final int SHORT_SIZE_BYTES = 2;
private static final int TRIANGLE_VERTICES_DATA_STRIDE_BYTES = 5 * FLOAT_SIZE_BYTES;
private static final int TRIANGLE_VERTICES_DATA_POS_OFFSET = 0;
private static final int TRIANGLE_VERTICES_DATA_UV_OFFSET = 3;
private static final float[] TRIANGLE_VERTICES_DATA_0 = {
1, -1, 0, 1, 1, // 0: bottom/right
1, 1, 0, 1, 0, // 1: top/right
-1, 1, 0, 0, 0, // 2: top/left
-1, -1, 0, 0, 1 // 3: bottom/left
};
private static final short[] INDICES_DATA_0 = {
0, 1, 2, // triangle #1: bottom/right, top/right, top/left
2, 3, 0 // triangle #2: top/left, bottom/left, bottom/right
};
private static final float[] TRIANGLE_VERTICES_DATA_90 = {
1, -1, 0, 1, 0,
1, 1, 0, 0, 0,
-1, 1, 0, 0, 1,
-1, -1, 0, 1, 1,
};
private static final short[] INDICES_DATA_90 = {
3, 0, 1,
1, 2, 3
};
private static final float[] TRIANGLE_VERTICES_DATA_180 = {
1, -1, 0, 0, 0,
1, 1, 0, 0, 1,
-1, 1, 0, 1, 1,
-1, -1, 0, 1, 0,
};
private static final short[] INDICES_DATA_180 = {
2, 3, 0,
0, 1, 2
};
private static final float[] TRIANGLE_VERTICES_DATA_270 = {
1, -1, 0, 0, 1,
1, 1, 0, 1, 1,
-1, 1, 0, 1, 0,
-1, -1, 0, 0, 0,
};
private static final short[] INDICES_DATA_270 = {
1, 2, 3,
3, 0, 1
};
private FloatBuffer mTriangleVertices;
private ShortBuffer mIndices;
private int mJpegOrientation = 0;
private boolean mJpegOrientationChanged = false;
private static final String VERTEX_SHADER_SOURCE = "precision mediump float;" +
"attribute vec4 aPosition;\n" +
"attribute vec2 aTextureCoord;\n" +
"varying vec2 vTextureCoord;\n" +
"void main() {\n" +
" gl_Position = aPosition;\n" +
" vTextureCoord = aTextureCoord;\n" +
"}\n";
private static final String FRAGMENT_SHADER_SOURCE = "precision mediump float;" +
"varying vec2 vTextureCoord;" +
"" +
"uniform sampler2D SamplerY; " +
"uniform sampler2D SamplerU;" +
"uniform sampler2D SamplerV;" +
"" +
"const mat3 yuv2rgb = mat3(1.164, 0, 1.596, 1.164, -0.391, -0.813, 1.164, 2.018, 0);" +
"" +
"void main() { " +
" vec3 yuv = vec3(1.1643 * (texture2D(SamplerY, vTextureCoord).r - 0.06274)," +
" texture2D(SamplerU, vTextureCoord).r - 0.5019," +
" texture2D(SamplerV, vTextureCoord).r - 0.5019);" +
" vec3 rgb = yuv * yuv2rgb; " +
" gl_FragColor = vec4(rgb, 1.0);" +
"} ";
private int mProgram;
private int maPositionHandle;
private int maTextureHandle;
private int muSamplerYHandle;
private int muSamplerUHandle;
private int muSamplerVHandle;
private int[] mTextureY = new int[1];
private int[] mTextureU = new int[1];
private int[] mTextureV = new int[1];
private boolean mSurfaceCreated;
private Image mImage = null;
private int mRatioWidth = 0;
private int mRatioHeight = 0;
public AlprGLSurfaceView(android.content.Context context) {
super(context);
initGL();
}
public AlprGLSurfaceView(android.content.Context context, android.util.AttributeSet attrs) {
super(context, attrs);
initGL();
}
private void initGL() {
setEGLContextClientVersion(2);
setEGLConfigChooser(8, 8, 8, 8, 16, 0);
setRenderer(this);
getHolder().setFormat(PixelFormat.TRANSLUCENT);
setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY);
mTriangleVertices = ByteBuffer.allocateDirect(TRIANGLE_VERTICES_DATA_0.length
* FLOAT_SIZE_BYTES).order(ByteOrder.nativeOrder()).asFloatBuffer();
mTriangleVertices.put(TRIANGLE_VERTICES_DATA_0).position(0);
mIndices = ByteBuffer.allocateDirect(INDICES_DATA_0.length
* SHORT_SIZE_BYTES).order(ByteOrder.nativeOrder()).asShortBuffer();
mIndices.put(INDICES_DATA_0).position(0);
}
/**
* Sets the aspect ratio for this view. The size of the view will be measured based on the ratio
* calculated from the parameters. Note that the actual sizes of parameters don't matter, that
* is, calling setAspectRatio(2, 3) and setAspectRatio(4, 6) make the same result.
*
* @param width Relative horizontal size
* @param height Relative vertical size
*/
public void setAspectRatio(int width, int height) {
if (width < 0 || height < 0) {
throw new IllegalArgumentException("Size cannot be negative.");
}
mRatioWidth = width;
mRatioHeight = height;
requestLayout();
}
/**
*
* @param
*/
public void setImage(final Image image, final int jpegOrientation){
if (!isReady()) {
Log.i(TAG, "Not ready");
image.close();
return;
}
if (mImage != null) {
Log.i(TAG, "Already rendering previous image");
image.close();
return;
}
// We need to save the image as the rendering is asynchronous
mImage = image;
if (mJpegOrientation != jpegOrientation) {
Log.i(TAG, "Orientation changed: " + mJpegOrientation + " -> " + jpegOrientation);
mJpegOrientation = jpegOrientation;
mJpegOrientationChanged = true;
}
// Signal the surface as dirty to force redrawing
requestRender();
}
public boolean isReady(){
return mSurfaceCreated;
}
@Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
super.onMeasure(widthMeasureSpec, heightMeasureSpec);
int width = MeasureSpec.getSize(widthMeasureSpec);
int height = MeasureSpec.getSize(heightMeasureSpec);
if (0 == mRatioWidth || 0 == mRatioHeight) {
setMeasuredDimension(width, height);
} else {
if (width < height * mRatioWidth / mRatioHeight) {
setMeasuredDimension(width, width * mRatioHeight / mRatioWidth);
} else {
setMeasuredDimension(height * mRatioWidth / mRatioHeight, height);
}
}
}
@Override
public void surfaceCreated(SurfaceHolder holder) {
super.surfaceCreated(holder);
mSurfaceCreated = true;
}
@Override
public void surfaceDestroyed(SurfaceHolder holder) {
mSurfaceCreated = false;
if (mImage != null) {
mImage.close();
mImage = null;
}
super.surfaceDestroyed(holder);
}
@Override
public void onDrawFrame(GL10 glUnused) {
if (mImage == null) {
return;
}
if (mJpegOrientationChanged) {
updateVertices();
mJpegOrientationChanged = false;
}
final boolean swapSize = (mJpegOrientation % 180) != 0;
final int imageWidth = mImage.getWidth();
final int imageHeight = mImage.getHeight();
final AlprUtils.AlprTransformationInfo tInfo = new AlprUtils.AlprTransformationInfo(swapSize ? imageHeight : imageWidth, swapSize ? imageWidth : imageHeight, getWidth(), getHeight());
GLES20.glViewport(tInfo.getXOffset(), tInfo.getYOffset(), tInfo.getWidth(), tInfo.getHeight());
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT /*| GLES20.GL_DEPTH_BUFFER_BIT*/);
GLES20.glUseProgram(mProgram);
checkGlError("glUseProgram");
final Image.Plane[] planes = mImage.getPlanes();
final ByteBuffer bufferY = planes[0].getBuffer();
final ByteBuffer bufferU = planes[1].getBuffer();
final ByteBuffer bufferV = planes[2].getBuffer();
final int uvPixelStride = planes[1].getPixelStride();
final int bufferWidthY = planes[0].getRowStride();
final int bufferHeightY = imageHeight;
final int bufferWidthUV = (planes[1].getRowStride() >> (uvPixelStride - 1));
final int bufferHeightUV = (bufferHeightY >> 1); // Always YUV420_888 -> half-height
final int uvFormat = uvPixelStride == 1 ? GLES20.GL_LUMINANCE : GLES20.GL_LUMINANCE_ALPHA; // Interleaved UV
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextureY[0]);
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE, bufferWidthY, bufferHeightY, 0, GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, bufferY);
GLES20.glUniform1i(muSamplerYHandle, 0);
GLES20.glActiveTexture(GLES20.GL_TEXTURE1);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextureU[0]);
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, uvFormat, bufferWidthUV, bufferHeightUV, 0, uvFormat, GLES20.GL_UNSIGNED_BYTE, bufferU);
GLES20.glUniform1i(muSamplerUHandle, 1);
GLES20.glActiveTexture(GLES20.GL_TEXTURE2);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextureV[0]);
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, uvFormat, bufferWidthUV, bufferHeightUV, 0, uvFormat, GLES20.GL_UNSIGNED_BYTE, bufferV);
GLES20.glUniform1i(muSamplerVHandle, 2);
GLES20.glDrawElements(GLES20.GL_TRIANGLES, INDICES_DATA_0.length, GLES20.GL_UNSIGNED_SHORT, mIndices);
mImage.close();
mImage = null;
}
@Override
public void onSurfaceChanged(GL10 glUnused, int width, int height) {
GLES20.glViewport(0, 0, width, height);
// GLU.gluPerspective(glUnused, 45.0f, (float)width/(float)height, 0.1f, 100.0f);
}
@Override
public void onSurfaceCreated(GL10 glUnused, EGLConfig config) {
GLES20.glEnable(GLES20.GL_BLEND);
GLES20.glDisable(GLES20.GL_DEPTH_TEST);
GLES20.glDisable(GLES20.GL_DITHER);
GLES20.glDisable(GLES20.GL_STENCIL_TEST);
GLES20.glDisable(GL10.GL_DITHER);
String extensions = GLES20.glGetString(GL10.GL_EXTENSIONS);
Log.d(TAG, "OpenGL extensions=" +extensions);
// Ignore the passed-in GL10 interface, and use the GLES20
// class's static methods instead.
mProgram = createProgram(VERTEX_SHADER_SOURCE, FRAGMENT_SHADER_SOURCE);
if (mProgram == 0) {
return;
}
maPositionHandle = GLES20.glGetAttribLocation(mProgram, "aPosition");
checkGlError("glGetAttribLocation aPosition");
if (maPositionHandle == -1) {
throw new RuntimeException("Could not get attrib location for aPosition");
}
maTextureHandle = GLES20.glGetAttribLocation(mProgram, "aTextureCoord");
checkGlError("glGetAttribLocation aTextureCoord");
if (maTextureHandle == -1) {
throw new RuntimeException("Could not get attrib location for aTextureCoord");
}
muSamplerYHandle = GLES20.glGetUniformLocation(mProgram, "SamplerY");
if (muSamplerYHandle == -1) {
throw new RuntimeException("Could not get uniform location for SamplerY");
}
muSamplerUHandle = GLES20.glGetUniformLocation(mProgram, "SamplerU");
if (muSamplerUHandle == -1) {
throw new RuntimeException("Could not get uniform location for SamplerU");
}
muSamplerVHandle = GLES20.glGetUniformLocation(mProgram, "SamplerV");
if (muSamplerVHandle == -1) {
throw new RuntimeException("Could not get uniform location for SamplerV");
}
updateVertices();
GLES20.glGenTextures(1, mTextureY, 0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextureY[0]);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glGenTextures(1, mTextureU, 0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextureU[0]);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glGenTextures(1, mTextureV, 0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextureV[0]);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
}
private int loadShader(int shaderType, String source) {
int shader = GLES20.glCreateShader(shaderType);
if (shader != 0) {
GLES20.glShaderSource(shader, source);
GLES20.glCompileShader(shader);
int[] compiled = new int[1];
GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compiled, 0);
if (compiled[0] == 0) {
Log.e(TAG, "Could not compile shader " + shaderType + ":");
Log.e(TAG, GLES20.glGetShaderInfoLog(shader));
GLES20.glDeleteShader(shader);
shader = 0;
}
}
return shader;
}
private int createProgram(String vertexSource, String fragmentSource) {
int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexSource);
if (vertexShader == 0) {
return 0;
}
int pixelShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource);
if (pixelShader == 0) {
return 0;
}
int program = GLES20.glCreateProgram();
if (program != 0) {
GLES20.glAttachShader(program, vertexShader);
checkGlError("glAttachShader");
GLES20.glAttachShader(program, pixelShader);
checkGlError("glAttachShader");
GLES20.glLinkProgram(program);
int[] linkStatus = new int[1];
GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0);
if (linkStatus[0] != GLES20.GL_TRUE) {
Log.e(TAG, "Could not link program: ");
Log.e(TAG, GLES20.glGetProgramInfoLog(program));
GLES20.glDeleteProgram(program);
program = 0;
}
}
return program;
}
private void updateVertices() {
mTriangleVertices.rewind();
mIndices.rewind();
switch (mJpegOrientation) {
case 90:
mTriangleVertices.put(TRIANGLE_VERTICES_DATA_90).position(0);
mIndices.put(INDICES_DATA_90).position(0);
break;
case 180:
mTriangleVertices.put(TRIANGLE_VERTICES_DATA_180).position(0);
mIndices.put(INDICES_DATA_180).position(0);
break;
case 270:
mTriangleVertices.put(TRIANGLE_VERTICES_DATA_270).position(0);
mIndices.put(INDICES_DATA_270).position(0);
break;
case 0:
mTriangleVertices.put(TRIANGLE_VERTICES_DATA_0).position(0);
mIndices.put(INDICES_DATA_0).position(0);
break;
default:
throw new RuntimeException("Invalid orientation:" + mJpegOrientation);
}
mTriangleVertices.position(TRIANGLE_VERTICES_DATA_POS_OFFSET);
GLES20.glVertexAttribPointer(maPositionHandle, 3, GLES20.GL_FLOAT, false, TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices);
checkGlError("glVertexAttribPointer maPosition");
mTriangleVertices.position(TRIANGLE_VERTICES_DATA_UV_OFFSET);
GLES20.glEnableVertexAttribArray(maPositionHandle);
checkGlError("glEnableVertexAttribArray maPositionHandle");
GLES20.glVertexAttribPointer(maTextureHandle, 2, GLES20.GL_FLOAT, false, TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices);
checkGlError("glVertexAttribPointer maTextureHandle");
GLES20.glEnableVertexAttribArray(maTextureHandle);
checkGlError("glEnableVertexAttribArray maTextureHandle");
}
private void checkGlError(String op) {
int error;
while ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) {
Log.e(TAG, op + ": glError " + error);
throw new RuntimeException(op + ": glError " + error);
}
}
}
================================================
FILE: demo/src/main/java/org/buyun/alpr/common/AlprImage.java
================================================
package org.buyun.alpr.common;
import android.media.Image;
import java.util.concurrent.atomic.AtomicInteger;
public class AlprImage {
Image mImage;
final AtomicInteger mRefCount;
private AlprImage(final Image image) {
assert image != null;
mImage = image;
mRefCount = new AtomicInteger(0);
}
public static AlprImage newInstance(final Image image) {
return new AlprImage(image);
}
public final Image getImage() {
assert mRefCount.intValue() >= 0;
return mImage;
}
public AlprImage takeRef() {
assert mRefCount.intValue() >= 0;
if (mRefCount.intValue() < 0) {
return null;
}
mRefCount.incrementAndGet();
return this;
}
public void releaseRef() {
assert mRefCount.intValue() >= 0;
final int refCount = mRefCount.decrementAndGet();
if (refCount <= 0) {
mImage.close();
mImage = null;
}
}
@Override
protected synchronized void finalize() {
if (mImage != null && mRefCount.intValue() < 0) {
mImage.close();
}
}
}
================================================
FILE: demo/src/main/java/org/buyun/alpr/common/AlprPlateView.java
================================================
package org.buyun.alpr.common;
import android.content.Context;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.DashPathEffect;
import android.graphics.Paint;
import android.graphics.Path;
import android.graphics.PointF;
import android.graphics.Rect;
import android.graphics.RectF;
import android.graphics.Typeface;
import android.util.AttributeSet;
import android.util.Log;
import android.util.Size;
import android.util.TypedValue;
import android.view.View;
import androidx.annotation.NonNull;
import org.buyun.alpr.sdk.AlprResult;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
public class AlprPlateView extends View {
static final String TAG = AlprPlateView.class.getCanonicalName();
static final float LPCI_MIN_CONFIDENCE = 80.f;
static final float VCR_MIN_CONFIDENCE = 80.f;
static final float VMMR_MIN_CONFIDENCE = 60.f;
static final float VBSR_MIN_CONFIDENCE = 70.f;
static final float VMMR_FUSE_DEFUSE_MIN_CONFIDENCE = 40.f;
static final int VMMR_FUSE_DEFUSE_MIN_OCCURRENCES = 3;
static final float TEXT_NUMBER_SIZE_DIP = 20;
static final float TEXT_LPCI_SIZE_DIP = 15;
static final float TEXT_CAR_SIZE_DIP = 15;
static final float TEXT_INFERENCE_TIME_SIZE_DIP = 20;
static final int STROKE_WIDTH = 10;
private final Paint mPaintTextNumber;
private final Paint mPaintTextNumberBackground;
private final Paint mPaintTextLPCI;
private final Paint mPaintTextLPCIBackground;
private final Paint mPaintTextCar;
private final Paint mPaintTextCarBackground;
private final Paint mPaintBorder;
private final Paint mPaintTextDurationTime;
private final Paint mPaintTextDurationTimeBackground;
private final Paint mPaintDetectROI;
private int mRatioWidth = 0;
private int mRatioHeight = 0;
private int mOrientation = 0;
private long mDurationTimeMillis;
private Size mImageSize;
private List<AlprUtils.Plate> mPlates = null;
private RectF mDetectROI;
/**
*
* @param context
* @param attrs
*/
public AlprPlateView(final Context context, final AttributeSet attrs) {
super(context, attrs);
// final Typeface fontALPR = Typeface.createFromAsset(context.getAssets(), "GlNummernschildEng-XgWd.ttf");
mPaintTextNumber = new Paint();
mPaintTextNumber.setTextSize(TypedValue.applyDimension(
TypedValue.COMPLEX_UNIT_DIP, TEXT_NUMBER_SIZE_DIP, getResources().getDisplayMetrics()));
mPaintTextNumber.setColor(Color.BLACK);
mPaintTextNumber.setStyle(Paint.Style.FILL_AND_STROKE);
// mPaintTextNumber.setTypeface(Typeface.create(fontALPR, Typeface.BOLD));
mPaintTextNumberBackground = new Paint();
mPaintTextNumberBackground.setColor(Color.YELLOW);
mPaintTextNumberBackground.setStrokeWidth(STROKE_WIDTH);
mPaintTextNumberBackground.setStyle(Paint.Style.FILL_AND_STROKE);
mPaintTextLPCI = new Paint();
mPaintTextLPCI.setTextSize(TypedValue.applyDimension(
TypedValue.COMPLEX_UNIT_DIP, TEXT_LPCI_SIZE_DIP, getResources().getDisplayMetrics()));
mPaintTextLPCI.setColor(Color.WHITE);
mPaintTextLPCI.setStyle(Paint.Style.FILL_AND_STROKE);
// mPaintTextLPCI.setTypeface(Typeface.create(fontALPR, Typeface.BOLD));
mPaintTextLPCIBackground = new Paint();
mPaintTextLPCIBackground.setColor(Color.BLUE);
mPaintTextLPCIBackground.setStrokeWidth(STROKE_WIDTH);
mPaintTextLPCIBackground.setStyle(Paint.Style.FILL_AND_STROKE);
mPaintTextCar = new Paint();
mPaintTextCar.setTextSize(TypedValue.applyDimension(
TypedValue.COMPLEX_UNIT_DIP, TEXT_CAR_SIZE_DIP, getResources().getDisplayMetrics()));
mPaintTextCar.setColor(Color.BLACK);
mPaintTextCar.setStyle(Paint.Style.FILL_AND_STROKE);
// mPaintTextCar.setTypeface(Typeface.create(fontALPR, Typeface.BOLD));
mPaintTextCarBackground = new Paint();
mPaintTextCarBackground.setColor(Color.RED);
mPaintTextCarBackground.setStrokeWidth(STROKE_WIDTH);
mPaintTextCarBackground.setStyle(Paint.Style.FILL_AND_STROKE);
mPaintBorder = new Paint();
mPaintBorder.setStrokeWidth(STROKE_WIDTH);
mPaintBorder.setPathEffect(null);
mPaintBorder.setColor(Color.YELLOW);
mPaintBorder.setStyle(Paint.Style.STROKE);
mPaintTextDurationTime = new Paint();
mPaintTextDurationTime.setTextSize(TypedValue.applyDimension(
TypedValue.COMPLEX_UNIT_DIP, TEXT_INFERENCE_TIME_SIZE_DIP, getResources().getDisplayMetrics()));
mPaintTextDurationTime.setColor(Color.WHITE);
mPaintTextDurationTime.setStyle(Paint.Style.FILL_AND_STROKE);
// mPaintTextDurationTime.setTypeface(Typeface.create(fontALPR, Typeface.BOLD));
mPaintTextDurationTimeBackground = new Paint();
mPaintTextDurationTimeBackground.setColor(Color.BLACK);
mPaintTextDurationTimeBackground.setStrokeWidth(STROKE_WIDTH);
mPaintTextDurationTimeBackground.setStyle(Paint.Style.FILL_AND_STROKE);
mPaintDetectROI = new Paint();
mPaintDetectROI.setColor(Color.RED);
mPaintDetectROI.setStrokeWidth(STROKE_WIDTH);
mPaintDetectROI.setStyle(Paint.Style.STROKE);
mPaintDetectROI.setPathEffect(new DashPathEffect(new float[] {10,20}, 0));
}
public void setDetectROI(final RectF roi) { mDetectROI = roi; }
/**
*
* @param width
* @param height
*/
public void setAspectRatio(int width, int height) {
if (width < 0 || height < 0) {
throw new IllegalArgumentException("Size cannot be negative.");
}
mRatioWidth = width;
mRatioHeight = height;
requestLayout();
}
@Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
Log.i(TAG, "onMeasure");
super.onMeasure(widthMeasureSpec, heightMeasureSpec);
int width = MeasureSpec.getSize(widthMeasureSpec);
int height = MeasureSpec.getSize(heightMeasureSpec);
if (0 == mRatioWidth || 0 == mRatioHeight) {
setMeasuredDimension(width, height);
} else {
if (width < height * mRatioWidth / mRatioHeight) {
setMeasuredDimension(width, width * mRatioHeight / mRatioWidth);
} else {
setMeasuredDimension(height * mRatioWidth / mRatioHeight, height);
}
}
}
/**
*
* @param result
* @param imageSize
*/
public synchronized void setResult(@NonNull final AlprResult result, @NonNull final Size imageSize, @NonNull final long durationTime, @NonNull final int orientation) {
mPlates = AlprUtils.extractPlates(result);
mImageSize = imageSize;
mDurationTimeMillis = durationTime;
mOrientation = orientation;
postInvalidate();
}
@Override
public synchronized void draw(final Canvas canvas) {
super.draw(canvas);
if (mImageSize == null) {
Log.i(TAG, "Not initialized yet");
return;
}
final String mInferenceTimeMillisString = "Point your camera at a License Plate ";
Rect boundsTextmInferenceTimeMillis = new Rect();
mPaintTextDurationTime.getTextBounds(mInferenceTimeMillisString, 0, mInferenceTimeMillisString.length(), boundsTextmInferenceTimeMillis);
int left = (canvas.getWidth() - boundsTextmInferenceTimeMillis.width()) / 2;
int top = 20;
canvas.drawRect(left, top, left + boundsTextmInferenceTimeMillis.width() + 5, top + boundsTextmInferenceTimeMillis.height() + 20, mPaintTextDurationTimeBackground);
canvas.drawText(mInferenceTimeMillisString, left, 20 + boundsTextmInferenceTimeMillis.height(), mPaintTextDurationTime);
// Transformation info
final AlprUtils.AlprTransformationInfo tInfo = new AlprUtils.AlprTransformationInfo(mImageSize.getWidth(), mImageSize.getHeight(), getWidth(), getHeight());
// ROI
if (mDetectROI != null && !mDetectROI.isEmpty()) {
canvas.drawRect(
new RectF(
tInfo.transformX(mDetectROI.left),
tInfo.transformY(mDetectROI.top),
tInfo.transformX(mDetectROI.right),
tInfo.transformY(mDetectROI.bottom)
),
mPaintDetectROI
);
}
// Plates
if (mPlates != null && !mPlates.isEmpty()) {
for (final AlprUtils.Plate plate : mPlates) {
// Transform corners
final float[] plateWarpedBox = plate.getWarpedBox();
final PointF plateCornerA = new PointF(tInfo.transformX(plateWarpedBox[0]), tInfo.transformY(plateWarpedBox[1]));
final PointF plateCornerB = new PointF(tInfo.transformX(plateWarpedBox[2]), tInfo.transformY(plateWarpedBox[3]));
final PointF plateCornerC = new PointF(tInfo.transformX(plateWarpedBox[4]), tInfo.transformY(plateWarpedBox[5]));
final PointF plateCornerD = new PointF(tInfo.transformX(plateWarpedBox[6]), tInfo.transformY(plateWarpedBox[7]));
// Draw border
final Path platePathBorder = new Path();
platePathBorder.moveTo(plateCornerA.x, plateCornerA.y);
platePathBorder.lineTo(plateCornerB.x, plateCornerB.y);
platePathBorder.lineTo(plateCornerC.x, plateCornerC.y);
platePathBorder.lineTo(plateCornerD.x, plateCornerD.y);
platePathBorder.lineTo(plateCornerA.x, plateCornerA.y);
platePathBorder.close();
mPaintBorder.setColor(mPaintTextNumberBackground.getColor());
canvas.drawPath(platePathBorder, mPaintBorder);
// Draw text number
final String number = plate.getNumber();
if (number != null && !number.isEmpty()) {
Rect boundsTextNumber = new Rect();
mPaintTextNumber.getTextBounds(number, 0, number.length(), boundsTextNumber);
final RectF rectTextNumber = new RectF(
plateCornerA.x,
plateCornerA.y - (boundsTextNumber.height() + 10) * 2,
plateCornerA.x + boundsTextNumber.width(),
plateCornerA.y - (boundsTextNumber.height() + 10)
);
final Path pathTextNumber = new Path();
pathTextNumber.moveTo(plateCornerA.x, plateCornerA.y - rectTextNumber.height() - 10);
pathTextNumber.lineTo(Math.max(plateCornerB.x, (plateCornerA.x + rectTextNumber.width())), plateCornerB.y - rectTextNumber.height() - 10);
pathTextNumber.addRect(rectTextNumber, Path.Direction.CCW);
pathTextNumber.close();
canvas.drawPath(pathTextNumber, mPaintTextNumberBackground);
canvas.drawTextOnPath(number, pathTextNumber, 0, 0, mPaintTextNumber);
}
// Draw Car
if (plate.getCar() != null) {
final AlprUtils.Car car = plate.getCar();
if (car.getConfidence() >= 80.f) {
String color = null;
if (car.getColors() != null) {
final AlprUtils.Car.Attribute colorObj0 = car.getColors().get(0); // sorted, most higher confidence first
if (colorObj0.getConfidence() >= VCR_MIN_CONFIDENCE) {
color = colorObj0.getName();
}
else if (car.getColors().size() >= 2) {
final AlprUtils.Car.Attribute colorObj1 = car.getColors().get(1);
final String colorMix = colorObj0.getName() + "/" + colorObj1.getName();
float confidence = colorObj0.getConfidence();
if ("white/silver,silver/white,gray/silver,silver/gray".indexOf(colorMix) != -1) {
confidence += colorObj1.getConfidence();
}
if (confidence >= VCR_MIN_CONFIDENCE) {
color = (colorMix.indexOf("white") == -1) ? "DarkSilver" : "LightSilver";
confidence = Math.max(colorObj0.getConfidence(), colorObj1.getConfidence());
}
}
}
String make = null, model = null;
if (car.getMakesModelsYears() != null) {
final List<AlprUtils.Car.MakeModelYear> makesModelsYears = car.getMakesModelsYears();
final AlprUtils.Car.MakeModelYear makeModelYear = makesModelsYears.get(0); // sorted, most higher confidence first
if (makeModelYear.getConfidence() >= VMMR_MIN_CONFIDENCE) {
make = makeModelYear.getMake();
model = makeModelYear.getModel();
}
else {
Map<String, Float> makes = new HashMap<>();
Map<String, Integer> occurrences = new HashMap<>();
// Fuse makes
for (final AlprUtils.Car.MakeModelYear mmy : makesModelsYears) {
makes.put(mmy.getMake(), AlprUtils.getOrDefault(makes, mmy.getMake(), 0.f) + mmy.getConfidence()); // Map.getOrDefault requires API level 24
occurrences.put(mmy.getMake(), AlprUtils.getOrDefault(occurrences, mmy.getMake(), 0) + 1); // Map.getOrDefault requires API level 24
}
// Find make with highest confidence. Stream requires Java8
Iterator<Map.Entry<String, Float> > itMake = makes.entrySet().iterator();
Map.Entry<String, Float> bestMake = itMake.next();
while (itMake.hasNext()) {
Map.Entry<String, Float> makeE = itMake.next();
if (makeE.getValue() > bestMake.getValue()) {
bestMake = makeE;
}
}
// Model fusion
if (bestMake.getValue() >= VMMR_MIN_CONFIDENCE || (occurrences.get(bestMake.getKey()) >= VMMR_FUSE_DEFUSE_MIN_OCCURRENCES && bestMake.getValue() >= VMMR_FUSE_DEFUSE_MIN_CONFIDENCE)) {
make = bestMake.getKey();
// Fuse models
Map<String, Float> models = new HashMap<>();
for (final AlprUtils.Car.MakeModelYear mmy : makesModelsYears) {
if (make.equals(mmy.getMake())) {
models.put(mmy.getModel(), AlprUtils.getOrDefault(models, mmy.getModel(), 0.f) + mmy.getConfidence()); // Map.getOrDefault requires API level 24
}
}
// Find model with highest confidence. Stream requires Java8
Iterator<Map.Entry<String, Float> > itModel = models.entrySet().iterator();
Map.Entry<String, Float> bestModel = itModel.next();
while (itModel.hasNext()) {
Map.Entry<String, Float> modelE = itModel.next();
if (modelE.getValue() > bestModel.getValue()) {
bestModel = modelE;
}
}
model = bestModel.getKey();
}
}
}
String bodyStyle = null;
if (car.getBodyStyles() != null) {
final AlprUtils.Car.Attribute vbsr = car.getBodyStyles().get(0); // sorted, most higher confidence first
if (vbsr.getConfidence() >= VBSR_MIN_CONFIDENCE) {
bodyStyle = vbsr.getName();
}
}
// Transform corners
final float[] carWarpedBox = car.getWarpedBox();
final PointF carCornerA = new PointF(tInfo.transformX(carWarpedBox[0]), tInfo.transformY(carWarpedBox[1]));
final PointF carCornerB = new PointF(tInfo.transformX(carWarpedBox[2]), tInfo.transformY(carWarpedBox[3]));
final PointF carCornerC = new PointF(tInfo.transformX(carWarpedBox[4]), tInfo.transformY(carWarpedBox[5]));
final PointF carCornerD = new PointF(tInfo.transformX(carWarpedBox[6]), tInfo.transformY(carWarpedBox[7]));
// Draw border
final Path carPathBorder = new Path();
carPathBorder.moveTo(carCornerA.x, carCornerA.y);
carPathBorder.lineTo(carCornerB.x, carCornerB.y);
carPathBorder.lineTo(carCornerC.x, carCornerC.y);
carPathBorder.lineTo(carCornerD.x, carCornerD.y);
carPathBorder.lineTo(carCornerA.x, carCornerA.y);
carPathBorder.close();
mPaintBorder.setColor(mPaintTextCarBackground.getColor());
canvas.drawPath(carPathBorder, mPaintBorder);
// Draw car information
final String carText = String.format(
"%s%s%s%s",
make != null ? make : "Car",
model != null ? ", " + model : "",
color != null ? ", " + color : "",
bodyStyle != null ? ", " + bodyStyle : ""
);
Rect boundsTextCar = new Rect();
mPaintTextNumber.getTextBounds(carText, 0, carText.length(), boundsTextCar);
final RectF rectTextNumber = new RectF(
plateCornerA.x,
plateCornerA.y - (boundsTextCar.height() + 5) * 3,
plateCornerA.x + boundsTextCar.width(),
plateCornerA.y - (boundsTextCar.height() + 5) * 2
);
final Path pathTextCar = new Path();
pathTextCar.moveTo(plateCornerA.x, plateCornerA.y - (rectTextNumber.height() + 5) * 2);
pathTextCar.lineTo(Math.max(plateCornerB.x, (plateCornerA.x + rectTextNumber.width())), plateCornerB.y - (rectTextNumber.height() + 5) * 2);
pathTextCar.addRect(rectTextNumber, Path.Direction.CCW);
pathTextCar.close();
canvas.drawPath(pathTextCar, mPaintTextNumberBackground);
canvas.drawTextOnPath(carText, pathTextCar, 0, 0, mPaintTextNumber);
}
}
if (plate.getCountries() != null) {
final AlprUtils.Country country = plate.getCountries().get(0); // sorted, most higher confidence first
if (country.getConfidence() >= LPCI_MIN_CONFIDENCE) {
final String countryString = country.getCode();
Rect boundsConfidenceLPCI = new Rect();
mPaintTextNumber.getTextBounds(countryString, 0, countryString.length(), boundsConfidenceLPCI);
final RectF rectTextLPCI = new RectF(
plateCornerA.x,
plateCornerA.y - (boundsConfidenceLPCI.height() + 10),
plateCornerA.x + (boundsConfidenceLPCI.width() + 10),
plateCornerA.y
);
final Path pathTextLPCI = new Path();
pathTextLPCI.moveTo(plateCornerA.x, plateCornerA.y);
pathTextLPCI.lineTo(Math.max(plateCornerB.x, (plateCornerA.x + boundsConfidenceLPCI.width())), plateCornerB.y);
pathTextLPCI.addRect(rectTextLPCI, Path.Direction.CCW);
pathTextLPCI.close();
canvas.drawPath(pathTextLPCI, mPaintTextNumberBackground);
canvas.drawTextOnPath(countryString, pathTextLPCI, 0, 0, mPaintTextNumber);
}
}
}
}
}
}
================================================
FILE: demo/src/main/java/org/buyun/alpr/common/AlprUtils.java
================================================
package org.buyun.alpr.common;
import android.content.res.AssetFileDescriptor;
import android.content.res.AssetManager;
import android.graphics.PointF;
import android.util.Log;
import androidx.annotation.NonNull;
import org.buyun.alpr.sdk.AlprResult;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import java.io.FileInputStream;
import java.io.IOException;
import java.nio.channels.FileChannel;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
/**
* Utility class
*/
public class AlprUtils {
static final String TAG = AlprUtils.class.getCanonicalName();
/**
*
*/
public static class AlprTransformationInfo {
final int mXOffset;
final int mYOffset;
final float mRatio;
final int mWidth;
final int mHeight;
public AlprTransformationInfo(final int imageWidth, final int imageHeight, final int canvasWidth, final int canvasHeight) {
final float xRatio = (float)canvasWidth / (float)imageWidth;
final float yRatio = (float)canvasHeight / (float)imageHeight;
mRatio = Math.min( xRatio, yRatio );
mWidth = (int)(imageWidth * mRatio);
mHeight = (int)(imageHeight * mRatio);
mXOffset = (canvasWidth - mWidth) >> 1;
mYOffset = (canvasHeight - mHeight) >> 1;
}
public float transformX(final float x) { return x * mRatio + mXOffset; }
public float transformY(final float y) { return y * mRatio + mYOffset; }
public PointF transform(final PointF p) { return new PointF(transformX(p.x), transformY(p.y)); }
public int getXOffset() { return mXOffset; }
public int getYOffset() { return mYOffset; }
public float getRatio() { return mRatio; }
public int getWidth() { return mWidth; }
public int getHeight() { return mHeight; }
}
static class Car {
static class Attribute {
private int mKlass;
private String mName;
private float mConfidence;
public int getKlass() { return mKlass; }
public String getName() { return mName; }
public float getConfidence() { return mConfidence; }
}
static class MakeModelYear {
private int mKlass;
private String mMake;
private String mModel;
private String mYear; // Not integer on purpose, could be interval or...
private float mConfidence;
public int getKlass() { return mKlass; }
public String getMake() { return mMake; }
public String getModel() { return mModel; }
public String getYear() { return mYear; }
public float getConfidence() { return mConfidence; }
}
private float mConfidence;
private float mWarpedBox[];
private List<Car.Attribute> mColors;
private List<Car.Attribute> mBodyStyles;
private List<Car.MakeModelYear> mMakesModelsYears;
public float[] getWarpedBox() { return mWarpedBox; }
public float getConfidence() { return mConfidence; }
public List<Car.Attribute> getColors() { return mColors; }
public List<Car.Attribute> getBodyStyles() { return mBodyStyles; }
public List<Car.MakeModelYear> getMakesModelsYears() { return mMakesModelsYears; }
}
/**
*
*/
static class Country {
private int mKlass;
private String mCode;
private String mName;
private String mState;
private String mOther;
private float mConfidence;
public int getKlass() { return mKlass; }
public String getCode() { return mCode; }
public String getName() { return mName; }
public String getState() { return mState; }
public String getOther() { return mOther; }
public float getConfidence() { return mConfidence; }
}
/**
*
*/
static class Plate {
private String mNumber;
private float mDetectionConfidence;
private float mRecognitionConfidence;
private float mWarpedBox[];
private List<Country> mCountries;
private Car mCar;
public String getNumber() { return mNumber; }
public float getDetectionConfidence() { return mDetectionConfidence; }
public float getRecognitionConfidence() { return mRecognitionConfidence; }
public float[] getWarpedBox() { return mWarpedBox; }
public List<Country> getCountries() { return mCountries; }
public Car getCar() { return mCar; }
}
static public final long extractFrameId(final AlprResult result) {
final String jsonString = result.json();
if (jsonString != null) {
try {
final JSONObject jObject = new JSONObject(jsonString);
return jObject.getLong("frame_id");
}
catch (JSONException e) { }
}
return 0;
}
static public final List<Plate> extractPlates(final AlprResult result) {
final List<Plate> plates = new LinkedList<>();
if (!result.isOK() || (result.numPlates() == 0 && result.numCars() == 0)) {
return plates;
}
final String jsonString = result.json();
//final String jsonString = "{\"frame_id\":178,\"lantency\":0,\"plates\":[{\"car\":{\"color\":[{\"confidence\":59.76562,\"klass\":11,\"name\":\"white\"},{\"confidence\":27.73438,\"klass\":0,\"name\":\"black\"},{\"confidence\":11.32812,\"klass\":9,\"name\":\"silver\"},{\"confidence\":0.390625,\"klass\":4,\"name\":\"gray\"},{\"confidence\":0.390625,\"klass\":5,\"name\":\"green\"}],\"confidence\":89.45312,\"makeModelYear\":[{\"confidence\":5.46875,\"klass\":8072,\"make\":\"nissan\",\"model\":\"nv\",\"year\":2012},{\"confidence\":3.90625,\"klass\":4885,\"make\":\"gmc\",\"model\":\"yukon 1500\",\"year\":2007},{\"confidence\":1.953125,\"klass\":3950,\"make\":\"ford\",\"model\":\"f150\",\"year\":2001},{\"confidence\":1.953125,\"klass\":4401,\"make\":\"ford\",\"model\":\"ranger\",\"year\":2008},{\"confidence\":1.953125,\"klass\":3954,\"make\":\"ford\",\"model\":\"f150\",\"year\":2005}],\"warpedBox\":[37.26704,655.171,253.8487,655.171,253.8487,897.6935,37.26704,897.6935]},\"confidences\":[86.99596,99.60938],\"country\":[{\"code\":\"RUS\",\"confidence\":99.60938,\"klass\":65,\"name\":\"Russian Federation\",\"other\":\"Private vehicle\",\"state\":\"Republic of Karelia\"},{\"code\":\"USA\",\"confidence\":0.0,\"klass\":88,\"name\":\"United States of America\",\"state\":\"Iowa\"},{\"code\":\"USA\",\"confidence\":0.0,\"klass\":80,\"name\":\"United States of America\",\"state\":\"Connecticut\"},{\"code\":\"USA\",\"confidence\":0.0,\"klass\":81,\"name\":\"United States of America\",\"state\":\"Delaware\"},{\"code\":\"USA\",\"confidence\":0.0,\"klass\":82,\"name\":\"United States of America\",\"state\":\"Florida\"}],\"text\":\"K643ET10\",\"warpedBox\":[61.73531,819.796,145.57,819.796,145.57,881.916,61.73531,881.916]}]}";
if (jsonString == null) { // No plate
return plates;
}
try {
final JSONObject jObject = new JSONObject(jsonString);
if (jObject.has("plates")) {
final JSONArray jPlates = jObject.getJSONArray("plates");
for (int i = 0; i < jPlates.length(); ++i) {
final JSONObject jPlate = jPlates.getJSONObject(i);
// The plate itself (backward-compatible with 2.0.0)
final Plate plate = new Plate();
plate.mWarpedBox = new float[8];
if (jPlate.has("text")) { // Starting 3.2 it's possible to have cars without plates when enabled
final JSONArray jConfidences = jPlate.getJSONArray("confidences");
final JSONArray jWarpedBox = jPlate.getJSONArray("warpedBox");
plate.mNumber = jPlate.getString("text");
for (int j = 0; j < 8; ++j) {
plate.mWarpedBox[j] = (float) jWarpedBox.getDouble(j);
}
plate.mRecognitionConfidence = (float) jConfidences.getDouble(0);
plate.mDetectionConfidence = (float) jConfidences.getDouble(1);
}
else {
plate.mNumber = "";
plate.mRecognitionConfidence = 0.f;
plate.mDetectionConfidence = 0.f;
}
if (jPlate.has("country")) {
plate.mCountries = new LinkedList<>();
final JSONArray jCountries = jPlate.getJSONArray("country");
for (int k = 0; k < jCountries.length(); ++k) {
final JSONObject jCountry = jCountries.getJSONObject(k);
final Country country = new Country();
country.mKlass = jCountry.getInt("klass");
country.mConfidence = (float) jCountry.getDouble("confidence");
country.mCode = jCountry.getString("code");
country.mName = jCountry.getString("name");
if (jCountry.has("state")) { // optional
country.mState = jCountry.getString("state");
}
if (jCountry.has("other")) { // optional
country.mOther = jCountry.getString("other");
}
plate.mCountries.add(country);
}
}
// Car (Added in 3.0.0)
if (jPlate.has("car")) {
final JSONObject jCar = jPlate.getJSONObject("car");
final JSONArray jCarWarpedBox = jCar.getJSONArray("warpedBox");
plate.mCar = new Car();
plate.mCar.mConfidence = (float) jCar.getDouble("confidence");
plate.mCar.mWarpedBox = new float[8];
for (int j = 0; j < 8; ++j) {
plate.mCar.mWarpedBox[j] = (float) jCarWarpedBox.getDouble(j);
}
if (jCar.has("color")) {
plate.mCar.mColors = new LinkedList<>();
final JSONArray jColors = jCar.getJSONArray("color");
for (int k = 0; k < jColors.length(); ++k) {
final JSONObject jColor = jColors.getJSONObject(k);
final Car.Attribute color = new Car.Attribute();
color.mKlass = jColor.getInt("klass");
color.mConfidence = (float) jColor.getDouble("confidence");
color.mName = jColor.getString("name"); // Name in English
plate.mCar.mColors.add(color);
}
}
if (jCar.has("makeModelYear")) {
plate.mCar.mMakesModelsYears = new LinkedList<>();
final JSONArray jMMYs = jCar.getJSONArray("makeModelYear");
for (int k = 0; k < jMMYs.length(); ++k) {
final JSONObject jMMY = jMMYs.getJSONObject(k);
final Car.MakeModelYear mmy = new Car.MakeModelYear();
mmy.mKlass = jMMY.getInt("klass");
mmy.mConfidence = (float) jMMY.getDouble("confidence");
mmy.mMake = jMMY.getString("make");
mmy.mModel = jMMY.getString("model");
mmy.mYear = jMMY.get("year").toString(); // Maybe Integer or String or whatever
plate.mCar.mMakesModelsYears.add(mmy);
}
}
if (jCar.has("bodyStyle")) {
plate.mCar.mBodyStyles = new LinkedList<>();
final JSONArray jBodyStyles = jCar.getJSONArray("bodyStyle");
for (int k = 0; k < jBodyStyles.length(); ++k) {
final JSONObject jBodyStyle = jBodyStyles.getJSONObject(k);
final Car.Attribute bodyStyle = new Car.Attribute();
bodyStyle.mKlass = jBodyStyle.getInt("klass");
bodyStyle.mConfidence = (float) jBodyStyle.getDouble("confidence");
bodyStyle.mName = jBodyStyle.getString("name"); // Name in English
plate.mCar.mBodyStyles.add(bodyStyle);
}
}
}
plates.add(plate);
}
}
}
catch (JSONException e) {
e.printStackTrace();
Log.e(TAG, e.toString());
}
return plates;
}
public static <K, V> V getOrDefault(@NonNull Map<K, V> map, K key, V defaultValue) {
V v;
return (((v = map.get(key)) != null) || map.containsKey(key))
? v
: defaultValue;
}
/**
* Checks if the returned result is success. An assertion will be raised if it's not the case.
* In production you should catch the exception and perform the appropriate action.
* @param result The result to check
* @return The same result
*/
static public final AlprResult assertIsOk(final AlprResult result) {
if (!result.isOK()) {
throw new AssertionError("Operation failed: " + result.phrase());
}
return result;
}
/**
* Converts the result to String.
* @param result
* @return
*/
static public final String resultToString(final AlprResult result) {
return "code: " + result.code() + ", phrase: " + result.phrase() + ", numPlates: " + result.numPlates() + ", json: " + result.json();
}
/**
*
* @param fileName
* @return Must close the returned object
*/
static public FileChannel readFileFromAssets(final AssetManager assets, final String fileName) {
FileInputStream inputStream = null;
try {
AssetFileDescriptor fileDescriptor = assets.openFd(fileName);
inputStream = new FileInputStream(fileDescriptor.getFileDescriptor());
return inputStream.getChannel();
// To return DirectByteBuffer: fileChannel.map(FileChannel.MapMode.READ_ONLY, fileDescriptor.getStartOffset(), fileDescriptor.getDeclaredLength());
} catch (IOException e) {
e.printStackTrace();
Log.e(TAG, e.toString());
return null;
}
}
}
================================================
FILE: demo/src/main/res/layout/activity_main.xml
================================================
<?xml version="1.0" encoding="utf-8"?>
<FrameLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
android:id="@+id/container"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:background="#000"
tools:context="org.buyun.alpr.common.AlprActivity" />
================================================
FILE: demo/src/main/res/layout/fragment_camera.xml
================================================
<?xml version="1.0" encoding="utf-8"?>
<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:app="http://schemas.android.com/apk/res-auto"
android:layout_width="match_parent"
android:layout_height="match_parent">
<org.buyun.alpr.common.AlprGLSurfaceView
android:id="@+id/glSurfaceView"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_alignParentStart="true"
android:layout_alignParentEnd="true"
android:layout_alignParentTop="true"
android:layout_alignParentBottom="true"
android:layout_alignParentLeft="true"
android:layout_alignParentRight="true"
app:layout_constraintEnd_toEndOf="parent"
app:layout_constraintStart_toStartOf="parent"
app:layout_constraintTop_toTopOf="parent"
app:layout_constraintBottom_toBottomOf="parent"
app:layout_constraintLeft_toLeftOf="parent"
app:layout_constraintRight_toRightOf="parent"
android:layout_marginStart="0dp"
android:layout_marginTop="0dp"
android:layout_marginBottom="0dp"/>
<org.buyun.alpr.common.AlprPlateView
android:id="@+id/plateView"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:layout_alignParentStart="true"
android:layout_alignParentEnd="true"
android:layout_alignParentTop="true"
android:layout_alignParentBottom="true"
android:layout_alignParentLeft="true"
android:layout_alignParentRight="true"
app:layout_constraintEnd_toEndOf="parent"
app:layout_constraintStart_toStartOf="parent"
app:layout_constraintTop_toTopOf="parent"
app:layout_constraintBottom_toBottomOf="parent"
app:layout_constraintLeft_toLeftOf="parent"
app:layout_constraintRight_toRightOf="parent"
android:layout_marginStart="-1dp"
android:layout_marginTop="0dp"
android:layout_marginBottom="0dp" />
</RelativeLayout>
================================================
FILE: demo/src/main/res/layout-land/fragment_camera.xml
================================================
<?xml version="1.0" encoding="utf-8"?>
<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:app="http://schemas.android.com/apk/res-auto"
android:layout_width="match_parent"
android:layout_height="match_parent">
<org.buyun.alpr.common.AlprGLSurfaceView
android:id="@+id/glSurfaceView"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_alignParentStart="true"
android:layout_alignParentEnd="true"
android:layout_alignParentTop="true"
android:layout_alignParentBottom="true"
android:layout_alignParentLeft="true"
android:layout_alignParentRight="true"
app:layout_constraintEnd_toEndOf="parent"
app:layout_constraintStart_toStartOf="parent"
app:layout_constraintTop_toTopOf="parent"
app:layout_constraintBottom_toBottomOf="parent"
app:layout_constraintLeft_toLeftOf="parent"
app:layout_constraintRight_toRightOf="parent"
android:layout_marginStart="0dp"
android:layout_marginTop="0dp"
android:layout_marginBottom="0dp" />
<org.buyun.alpr.common.AlprPlateView
android:id="@+id/plateView"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:layout_alignParentStart="true"
android:layout_alignParentEnd="true"
android:layout_alignParentTop="true"
android:layout_alignParentBottom="true"
android:layout_alignParentLeft="true"
android:layout_alignParentRight="true"
app:layout_constraintEnd_toEndOf="parent"
app:layout_constraintStart_toStartOf="parent"
app:layout_constraintTop_toTopOf="parent"
app:layout_constraintBottom_toBottomOf="parent"
app:layout_constraintLeft_toLeftOf="parent"
app:layout_constraintRight_toRightOf="parent"
android:layout_marginStart="0dp"
android:layout_marginTop="0dp"
android:layout_marginBottom="0dp" />
</RelativeLayout>
================================================
FILE: demo/src/main/res/values/base-strings.xml
================================================
<?xml version="1.0" encoding="UTF-8"?>
<resources>
<string name="app_name">Alpr Demo</string>
<string name="intro_message">
<![CDATA[
This sample demonstrates how to use the ALPR/ANPR SDK in sequential mode
to recognize license plates on video stream
]]>
</string>
</resources>
================================================
FILE: demo/src/main/res/values/colors.xml
================================================
<?xml version="1.0" encoding="utf-8"?>
<resources>
<color name="control_background">#cc4285f4</color>
</resources>
================================================
FILE: demo/src/main/res/values/strings.xml
================================================
<?xml version="1.0" encoding="utf-8"?>
<resources>
<string name="picture">Picture</string>
<string name="description_info">Info</string>
<string name="request_permission">This sample needs camera permission.</string>
<string name="camera_error">This device doesn\'t support Camera2 API.</string>
</resources>
================================================
FILE: demo/src/main/res/values/styles.xml
================================================
<?xml version="1.0" encoding="utf-8"?>
<resources>
<style name="MaterialTheme" parent="Theme.AppCompat.Light.NoActionBar">
<item name="android:windowFullscreen">true</item>
</style>
</resources>
================================================
FILE: demo/src/main/res/values/template-dimens.xml
================================================
<resources>
<dimen name="margin_tiny">4dp</dimen>
<dimen name="margin_small">8dp</dimen>
<dimen name="margin_medium">16dp</dimen>
<dimen name="margin_large">32dp</dimen>
<dimen name="margin_huge">64dp</dimen>
<dimen name="horizontal_page_margin">@dimen/margin_medium</dimen>
<dimen name="vertical_page_margin">@dimen/margin_medium</dimen>
</resources>
================================================
FILE: demo/src/main/res/values/template-styles.xml
================================================
<resources>
<!-- Activity themes -->
<style name="Theme.Base" parent="android:Theme.Light" />
<style name="Theme.Sample" parent="Theme.Base" />
<style name="AppTheme" parent="Theme.Sample" />
<!-- Widget styling -->
<style name="Widget" />
<style name="Widget.SampleMessage">
<item name="android:textAppearance">?android:textAppearanceMedium</item>
<item name="android:lineSpacingMultiplier">1.1</item>
</style>
<style name="Widget.SampleMessageTile">
<item name="android:background">@drawable/tile</item>
<item name="android:shadowColor">#7F000000</item>
<item name="android:shadowDy">-3.5</item>
<item name="android:shadowRadius">2</item>
</style>
</resources>
================================================
FILE: demo/src/main/res/values-sw600dp/template-dimens.xml
================================================
<resources>
<dimen name="horizontal_page_margin">@dimen/margin_huge</dimen>
<dimen name="vertical_page_margin">@dimen/margin_medium</dimen>
</resources>
================================================
FILE: demo/src/main/res/values-sw600dp/template-styles.xml
================================================
<resources>
<style name="Widget.SampleMessage">
<item name="android:textAppearance">?android:textAppearanceLarge</item>
<item name="android:lineSpacingMultiplier">1.2</item>
<item name="android:shadowDy">-6.5</item>
</style>
</resources>
================================================
FILE: demo/src/main/res/values-v11/template-styles.xml
================================================
<resources>
<!-- Activity themes -->
<style name="Theme.Base" parent="android:Theme.Holo.Light" />
</resources>
================================================
FILE: demo/src/main/res/values-v21/base-colors.xml
================================================
<?xml version="1.0" encoding="UTF-8"?>
<resources>
</resources>
================================================
FILE: demo/src/main/res/values-v21/base-template-styles.xml
================================================
<?xml version="1.0" encoding="UTF-8"?>
<resources>
<!-- Activity themes -->
<style name="Theme.Base" parent="android:Theme.Material.Light">
</style>
</resources>
================================================
FILE: gradle/wrapper/gradle-wrapper.properties
================================================
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-6.5-bin.zip
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists
================================================
FILE: gradle.properties
================================================
# Project-wide Gradle settings.
# IDE (e.g. Android Studio) users:
# Gradle settings configured through the IDE *will override*
# any settings specified in this file.
# For more details on how to configure your build environment visit
# http://www.gradle.org/docs/current/userguide/build_environment.html
# Specifies the JVM arguments used for the daemon process.
# The setting is particularly useful for tweaking memory settings.
org.gradle.jvmargs=-Xmx1536m
# When configured, Gradle will run in incubating parallel mode.
# This option should only be used with decoupled projects. More details, visit
# http://www.gradle.org/docs/current/userguide/multi_project_builds.html#sec:decoupled_projects
# org.gradle.parallel=true
# AndroidX package structure to make it clearer which packages are bundled with the
# Android operating system, and which are packaged with your app's APK
# https://developer.android.com/topic/libraries/support-library/androidx-rn
android.useAndroidX=true
# Automatically convert third-party libraries to use AndroidX
android.enableJetifier=true
================================================
FILE: gradlew
================================================
#!/usr/bin/env sh
#
# Copyright 2015 the original author or authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
##############################################################################
##
## Gradle start up script for UN*X
##
##############################################################################
# Attempt to set APP_HOME
# Resolve links: $0 may be a link
PRG="$0"
# Need this for relative symlinks.
while [ -h "$PRG" ] ; do
ls=`ls -ld "$PRG"`
link=`expr "$ls" : '.*-> \(.*\)$'`
if expr "$link" : '/.*' > /dev/null; then
PRG="$link"
else
PRG=`dirname "$PRG"`"/$link"
fi
done
SAVED="`pwd`"
cd "`dirname \"$PRG\"`/" >/dev/null
APP_HOME="`pwd -P`"
cd "$SAVED" >/dev/null
APP_NAME="Gradle"
APP_BASE_NAME=`basename "$0"`
# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"'
# Use the maximum available, or set MAX_FD != -1 to use that value.
MAX_FD="maximum"
warn () {
echo "$*"
}
die () {
echo
echo "$*"
echo
exit 1
}
# OS specific support (must be 'true' or 'false').
cygwin=false
msys=false
darwin=false
nonstop=false
case "`uname`" in
CYGWIN* )
cygwin=true
;;
Darwin* )
darwin=true
;;
MINGW* )
msys=true
;;
NONSTOP* )
nonstop=true
;;
esac
CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
# Determine the Java command to use to start the JVM.
if [ -n "$JAVA_HOME" ] ; then
if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
# IBM's JDK on AIX uses strange locations for the executables
JAVACMD="$JAVA_HOME/jre/sh/java"
else
JAVACMD="$JAVA_HOME/bin/java"
fi
if [ ! -x "$JAVACMD" ] ; then
die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
else
JAVACMD="java"
which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
# Increase the maximum file descriptors if we can.
if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then
MAX_FD_LIMIT=`ulimit -H -n`
if [ $? -eq 0 ] ; then
if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
MAX_FD="$MAX_FD_LIMIT"
fi
ulimit -n $MAX_FD
if [ $? -ne 0 ] ; then
warn "Could not set maximum file descriptor limit: $MAX_FD"
fi
else
warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
fi
fi
# For Darwin, add options to specify how the application appears in the dock
if $darwin; then
GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
fi
# For Cygwin or MSYS, switch paths to Windows format before running java
if [ "$cygwin" = "true" -o "$msys" = "true" ] ; then
APP_HOME=`cygpath --path --mixed "$APP_HOME"`
CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
JAVACMD=`cygpath --unix "$JAVACMD"`
# We build the pattern for arguments to be converted via cygpath
ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
SEP=""
for dir in $ROOTDIRSRAW ; do
ROOTDIRS="$ROOTDIRS$SEP$dir"
SEP="|"
done
OURCYGPATTERN="(^($ROOTDIRS))"
# Add a user-defined pattern to the cygpath arguments
if [ "$GRADLE_CYGPATTERN" != "" ] ; then
OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
fi
# Now convert the arguments - kludge to limit ourselves to /bin/sh
i=0
for arg in "$@" ; do
CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
else
eval `echo args$i`="\"$arg\""
fi
i=`expr $i + 1`
done
case $i in
0) set -- ;;
1) set -- "$args0" ;;
2) set -- "$args0" "$args1" ;;
3) set -- "$args0" "$args1" "$args2" ;;
4) set -- "$args0" "$args1" "$args2" "$args3" ;;
5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
esac
fi
# Escape application args
save () {
for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done
echo " "
}
APP_ARGS=`save "$@"`
# Collect all arguments for the java command, following the shell quoting and substitution rules
eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS"
exec "$JAVACMD" "$@"
================================================
FILE: gradlew.bat
================================================
@rem
@rem Copyright 2015 the original author or authors.
@rem
@rem Licensed under the Apache License, Version 2.0 (the "License");
@rem you may not use this file except in compliance with the License.
@rem You may obtain a copy of the License at
@rem
@rem https://www.apache.org/licenses/LICENSE-2.0
@rem
@rem Unless required by applicable law or agreed to in writing, software
@rem distributed under the License is distributed on an "AS IS" BASIS,
@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@rem See the License for the specific language governing permissions and
@rem limitations under the License.
@rem
@if "%DEBUG%" == "" @echo off
@rem ##########################################################################
@rem
@rem Gradle startup script for Windows
@rem
@rem ##########################################################################
@rem Set local scope for the variables with windows NT shell
if "%OS%"=="Windows_NT" setlocal
set DIRNAME=%~dp0
if "%DIRNAME%" == "" set DIRNAME=.
set APP_BASE_NAME=%~n0
set APP_HOME=%DIRNAME%
@rem Resolve any "." and ".." in APP_HOME to make it shorter.
for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi
@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m"
@rem Find java.exe
if defined JAVA_HOME goto findJavaFromJavaHome
set JAVA_EXE=java.exe
%JAVA_EXE% -version >NUL 2>&1
if "%ERRORLEVEL%" == "0" goto init
echo.
echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
echo.
echo Please set the JAVA_HOME variable in your environment to match the
echo location of your Java installation.
goto fail
:findJavaFromJavaHome
set JAVA_HOME=%JAVA_HOME:"=%
set JAVA_EXE=%JAVA_HOME%/bin/java.exe
if exist "%JAVA_EXE%" goto init
echo.
echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
echo.
echo Please set the JAVA_HOME variable in your environment to match the
echo location of your Java installation.
goto fail
:init
@rem Get command-line arguments, handling Windows variants
if not "%OS%" == "Windows_NT" goto win9xME_args
:win9xME_args
@rem Slurp the command line arguments.
set CMD_LINE_ARGS=
set _SKIP=2
:win9xME_args_slurp
if "x%~1" == "x" goto execute
set CMD_LINE_ARGS=%*
:execute
@rem Setup the command line
set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
@rem Execute Gradle
"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%
:end
@rem End local scope for the variables with windows NT shell
if "%ERRORLEVEL%"=="0" goto mainEnd
:fail
rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
rem the _cmd.exe /c_ return code!
if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
exit /b 1
:mainEnd
if "%OS%"=="Windows_NT" endlocal
:omega
================================================
FILE: settings.gradle
================================================
include ':sdk', ':demo'
rootProject.name='AlprDemo'
gitextract_uah6n3qx/ ├── .gitignore ├── README.md ├── build.gradle ├── demo/ │ ├── .gitignore │ ├── build.gradle │ ├── proguard-rules.pro │ └── src/ │ └── main/ │ ├── AndroidManifest.xml │ ├── java/ │ │ └── org/ │ │ └── buyun/ │ │ └── alpr/ │ │ ├── AlprVideoSequentialActivity.java │ │ └── common/ │ │ ├── AlprActivity.java │ │ ├── AlprBackgroundTask.java │ │ ├── AlprCameraFragment.java │ │ ├── AlprGLSurfaceView.java │ │ ├── AlprImage.java │ │ ├── AlprPlateView.java │ │ └── AlprUtils.java │ └── res/ │ ├── layout/ │ │ ├── activity_main.xml │ │ └── fragment_camera.xml │ ├── layout-land/ │ │ └── fragment_camera.xml │ ├── values/ │ │ ├── base-strings.xml │ │ ├── colors.xml │ │ ├── strings.xml │ │ ├── styles.xml │ │ ├── template-dimens.xml │ │ └── template-styles.xml │ ├── values-sw600dp/ │ │ ├── template-dimens.xml │ │ └── template-styles.xml │ ├── values-v11/ │ │ └── template-styles.xml │ └── values-v21/ │ ├── base-colors.xml │ └── base-template-styles.xml ├── gradle/ │ └── wrapper/ │ ├── gradle-wrapper.jar │ └── gradle-wrapper.properties ├── gradle.properties ├── gradlew ├── gradlew.bat └── settings.gradle
SYMBOL INDEX (145 symbols across 8 files)
FILE: demo/src/main/java/org/buyun/alpr/AlprVideoSequentialActivity.java
class AlprVideoSequentialActivity (line 19) | public class AlprVideoSequentialActivity extends AlprActivity {
method onCreate (line 49) | @Override
method onResume (line 59) | @Override
method onDestroy (line 65) | @Override
method getLayoutResId (line 71) | @Override
method getJsonConfig (line 76) | @Override
method isParallelDeliveryEnabled (line 121) | @Override
method getDetectROI (line 124) | @Override
FILE: demo/src/main/java/org/buyun/alpr/common/AlprActivity.java
class AlprActivity (line 30) | public abstract class AlprActivity extends AppCompatActivity implements ...
class MyUltAlprSdkParallelDeliveryCallback (line 42) | static class MyUltAlprSdkParallelDeliveryCallback extends AlprCallback {
method setAlprPlateView (line 50) | void setAlprPlateView(@NonNull final AlprPlateView view) {
method setImageSize (line 54) | void setImageSize(@NonNull final Size imageSize, @NonNull final int ...
method setDurationTime (line 59) | void setDurationTime(final long totalDuration) {
method onNewResult (line 63) | @Override
method newInstance (line 70) | static MyUltAlprSdkParallelDeliveryCallback newInstance() {
method onCreate (line 83) | @Override
method onDestroy (line 114) | @Override
method onResume (line 123) | @Override
method onPause (line 130) | @Override
method setAlprPlateView (line 137) | @Override
method setImage (line 155) | @Override
method getDebugInternalDataPath (line 232) | protected String getDebugInternalDataPath() {
method getActivationServerUrl (line 241) | protected String getActivationServerUrl() {
method getActivationMasterOrSlaveKey (line 245) | protected String getActivationMasterOrSlaveKey() {
method getLayoutResId (line 253) | protected abstract int getLayoutResId();
method getJsonConfig (line 259) | protected abstract JSONObject getJsonConfig();
method isParallelDeliveryEnabled (line 263) | protected abstract boolean isParallelDeliveryEnabled();
method getDetectROI (line 265) | protected abstract List<Float> getDetectROI();
FILE: demo/src/main/java/org/buyun/alpr/common/AlprBackgroundTask.java
class AlprBackgroundTask (line 7) | public class AlprBackgroundTask {
method getHandler (line 12) | public synchronized final Handler getHandler() {
method isRunning (line 15) | public synchronized final boolean isRunning() { return mHandler != nul...
method start (line 17) | public synchronized void start(final String threadName) {
method stop (line 26) | public synchronized void stop() {
method post (line 40) | public synchronized void post(final Runnable r) {
FILE: demo/src/main/java/org/buyun/alpr/common/AlprCameraFragment.java
class AlprCameraFragment (line 52) | public class AlprCameraFragment extends Fragment
method onOpened (line 124) | @Override
method onDisconnected (line 132) | @Override
method onError (line 139) | @Override
method onImageAvailable (line 168) | @Override
method AlprCameraFragment (line 216) | public AlprCameraFragment() {
method AlprCameraFragment (line 220) | private AlprCameraFragment(final Size preferredSize, final AlprCameraF...
method newInstance (line 230) | public static AlprCameraFragment newInstance(final Size preferredSize,...
method onCreateView (line 234) | @Override
method onViewCreated (line 240) | @Override
method onActivityCreated (line 247) | @Override
method onResume (line 252) | @Override
method onPause (line 266) | @Override
method requestCameraPermission (line 273) | private void requestCameraPermission() {
method onRequestPermissionsResult (line 281) | @Override
method showToast (line 299) | private void showToast(final String text) {
method chooseOptimalSize (line 320) | private static Size chooseOptimalSize(final Size[] choices, final int ...
method setUpCameraOutputs (line 365) | @SuppressWarnings("SuspiciousNameCombination")
method openCamera (line 428) | private void openCamera(int width, int height) {
method closeCamera (line 452) | private void closeCamera() {
method startBackgroundThreads (line 484) | private void startBackgroundThreads() {
method stopBackgroundThreads (line 493) | private void stopBackgroundThreads() {
method createCameraCaptureSession (line 502) | private void createCameraCaptureSession() {
type AlprCameraFragmentSink (line 573) | public static interface AlprCameraFragmentSink {
method setAlprPlateView (line 579) | public void setAlprPlateView(@NonNull final AlprPlateView view);
method setImage (line 586) | public void setImage(@NonNull final Image image, final int jpegOrien...
class CompareSizesByArea (line 592) | static class CompareSizesByArea implements Comparator<Size> {
method compare (line 594) | @Override
class ErrorDialog (line 606) | public static class ErrorDialog extends DialogFragment {
method newInstance (line 610) | public static ErrorDialog newInstance(String message) {
method onCreateDialog (line 618) | @NonNull
class ConfirmationDialog (line 638) | public static class ConfirmationDialog extends DialogFragment {
method onCreateDialog (line 640) | @NonNull
FILE: demo/src/main/java/org/buyun/alpr/common/AlprGLSurfaceView.java
class AlprGLSurfaceView (line 22) | public class AlprGLSurfaceView extends GLSurfaceView implements GLSurfac...
method AlprGLSurfaceView (line 122) | public AlprGLSurfaceView(android.content.Context context) {
method AlprGLSurfaceView (line 127) | public AlprGLSurfaceView(android.content.Context context, android.util...
method initGL (line 132) | private void initGL() {
method setAspectRatio (line 156) | public void setAspectRatio(int width, int height) {
method setImage (line 169) | public void setImage(final Image image, final int jpegOrientation){
method isReady (line 194) | public boolean isReady(){
method onMeasure (line 198) | @Override
method surfaceCreated (line 214) | @Override
method surfaceDestroyed (line 221) | @Override
method onDrawFrame (line 231) | @Override
method onSurfaceChanged (line 288) | @Override
method onSurfaceCreated (line 294) | @Override
method loadShader (line 361) | private int loadShader(int shaderType, String source) {
method createProgram (line 378) | private int createProgram(String vertexSource, String fragmentSource) {
method updateVertices (line 408) | private void updateVertices() {
method checkGlError (line 446) | private void checkGlError(String op) {
FILE: demo/src/main/java/org/buyun/alpr/common/AlprImage.java
class AlprImage (line 7) | public class AlprImage {
method AlprImage (line 12) | private AlprImage(final Image image) {
method newInstance (line 18) | public static AlprImage newInstance(final Image image) {
method getImage (line 22) | public final Image getImage() {
method takeRef (line 27) | public AlprImage takeRef() {
method releaseRef (line 36) | public void releaseRef() {
method finalize (line 45) | @Override
FILE: demo/src/main/java/org/buyun/alpr/common/AlprPlateView.java
class AlprPlateView (line 28) | public class AlprPlateView extends View {
method AlprPlateView (line 72) | public AlprPlateView(final Context context, final AttributeSet attrs) {
method setDetectROI (line 138) | public void setDetectROI(final RectF roi) { mDetectROI = roi; }
method setAspectRatio (line 145) | public void setAspectRatio(int width, int height) {
method onMeasure (line 154) | @Override
method setResult (line 176) | public synchronized void setResult(@NonNull final AlprResult result, @...
method draw (line 184) | @Override
FILE: demo/src/main/java/org/buyun/alpr/common/AlprUtils.java
class AlprUtils (line 25) | public class AlprUtils {
class AlprTransformationInfo (line 30) | public static class AlprTransformationInfo {
method AlprTransformationInfo (line 36) | public AlprTransformationInfo(final int imageWidth, final int imageH...
method transformX (line 45) | public float transformX(final float x) { return x * mRatio + mXOffse...
method transformY (line 46) | public float transformY(final float y) { return y * mRatio + mYOffse...
method transform (line 47) | public PointF transform(final PointF p) { return new PointF(transfor...
method getXOffset (line 48) | public int getXOffset() { return mXOffset; }
method getYOffset (line 49) | public int getYOffset() { return mYOffset; }
method getRatio (line 50) | public float getRatio() { return mRatio; }
method getWidth (line 51) | public int getWidth() { return mWidth; }
method getHeight (line 52) | public int getHeight() { return mHeight; }
class Car (line 55) | static class Car {
class Attribute (line 56) | static class Attribute {
method getKlass (line 61) | public int getKlass() { return mKlass; }
method getName (line 62) | public String getName() { return mName; }
method getConfidence (line 63) | public float getConfidence() { return mConfidence; }
class MakeModelYear (line 65) | static class MakeModelYear {
method getKlass (line 72) | public int getKlass() { return mKlass; }
method getMake (line 73) | public String getMake() { return mMake; }
method getModel (line 74) | public String getModel() { return mModel; }
method getYear (line 75) | public String getYear() { return mYear; }
method getConfidence (line 76) | public float getConfidence() { return mConfidence; }
method getWarpedBox (line 85) | public float[] getWarpedBox() { return mWarpedBox; }
method getConfidence (line 86) | public float getConfidence() { return mConfidence; }
method getColors (line 87) | public List<Car.Attribute> getColors() { return mColors; }
method getBodyStyles (line 88) | public List<Car.Attribute> getBodyStyles() { return mBodyStyles; }
method getMakesModelsYears (line 89) | public List<Car.MakeModelYear> getMakesModelsYears() { return mMakes...
class Country (line 95) | static class Country {
method getKlass (line 103) | public int getKlass() { return mKlass; }
method getCode (line 104) | public String getCode() { return mCode; }
method getName (line 105) | public String getName() { return mName; }
method getState (line 106) | public String getState() { return mState; }
method getOther (line 107) | public String getOther() { return mOther; }
method getConfidence (line 108) | public float getConfidence() { return mConfidence; }
class Plate (line 114) | static class Plate {
method getNumber (line 122) | public String getNumber() { return mNumber; }
method getDetectionConfidence (line 123) | public float getDetectionConfidence() { return mDetectionConfidence; }
method getRecognitionConfidence (line 124) | public float getRecognitionConfidence() { return mRecognitionConfide...
method getWarpedBox (line 125) | public float[] getWarpedBox() { return mWarpedBox; }
method getCountries (line 127) | public List<Country> getCountries() { return mCountries; }
method getCar (line 128) | public Car getCar() { return mCar; }
method extractFrameId (line 131) | static public final long extractFrameId(final AlprResult result) {
method extractPlates (line 143) | static public final List<Plate> extractPlates(final AlprResult result) {
method getOrDefault (line 268) | public static <K, V> V getOrDefault(@NonNull Map<K, V> map, K key, V d...
method assertIsOk (line 281) | static public final AlprResult assertIsOk(final AlprResult result) {
method resultToString (line 293) | static public final String resultToString(final AlprResult result) {
method readFileFromAssets (line 302) | static public FileChannel readFileFromAssets(final AssetManager assets...
Condensed preview — 35 files, each showing path, character count, and a content snippet. Download the .json file or copy for the full structured content (126K chars).
[
{
"path": ".gitignore",
"chars": 208,
"preview": "*.iml\n.gradle\n/local.properties\n/.idea/caches\n/.idea/libraries\n/.idea/modules.xml\n/.idea/workspace.xml\n/.idea/navEditor."
},
{
"path": "README.md",
"chars": 1163,
"preview": "<div align=\"center\">\n <a href=\"https://join.slack.com/t/faceonlive/shared_invite/zt-2drx19c5t-vQsR4TUGPD8oL7i7BXdKZA\">S"
},
{
"path": "build.gradle",
"chars": 558,
"preview": "// Top-level build file where you can add configuration options common to all sub-projects/modules.\n\nbuildscript {\n r"
},
{
"path": "demo/.gitignore",
"chars": 7,
"preview": "/build\n"
},
{
"path": "demo/build.gradle",
"chars": 912,
"preview": "apply plugin: 'com.android.application'\n\nandroid {\n compileSdkVersion 28\n\n\n defaultConfig {\n applicationId "
},
{
"path": "demo/proguard-rules.pro",
"chars": 790,
"preview": "# Add project specific ProGuard rules here.\n# You can control the set of applied configuration files using the\n# proguar"
},
{
"path": "demo/src/main/AndroidManifest.xml",
"chars": 992,
"preview": "<?xml version=\"1.0\" encoding=\"utf-8\"?>\n<manifest xmlns:android=\"http://schemas.android.com/apk/res/android\"\n package="
},
{
"path": "demo/src/main/java/org/buyun/alpr/AlprVideoSequentialActivity.java",
"chars": 5196,
"preview": "package org.buyun.alpr;\n\nimport android.os.Bundle;\nimport android.util.Log;\nimport android.util.Size;\n\nimport org.buyun."
},
{
"path": "demo/src/main/java/org/buyun/alpr/common/AlprActivity.java",
"chars": 9583,
"preview": "package org.buyun.alpr.common;\n\nimport android.graphics.RectF;\nimport android.media.ExifInterface;\nimport android.media."
},
{
"path": "demo/src/main/java/org/buyun/alpr/common/AlprBackgroundTask.java",
"chars": 1070,
"preview": "package org.buyun.alpr.common;\n\n\nimport android.os.Handler;\nimport android.os.HandlerThread;\n\npublic class AlprBackgroun"
},
{
"path": "demo/src/main/java/org/buyun/alpr/common/AlprCameraFragment.java",
"chars": 25281,
"preview": "package org.buyun.alpr.common;\n\nimport android.Manifest;\nimport android.app.Activity;\nimport android.app.AlertDialog;\nim"
},
{
"path": "demo/src/main/java/org/buyun/alpr/common/AlprGLSurfaceView.java",
"chars": 18002,
"preview": "package org.buyun.alpr.common;\n\nimport java.nio.ByteBuffer;\nimport java.nio.ByteOrder;\nimport java.nio.FloatBuffer;\nimpo"
},
{
"path": "demo/src/main/java/org/buyun/alpr/common/AlprImage.java",
"chars": 1160,
"preview": "package org.buyun.alpr.common;\n\nimport android.media.Image;\n\nimport java.util.concurrent.atomic.AtomicInteger;\n\npublic c"
},
{
"path": "demo/src/main/java/org/buyun/alpr/common/AlprPlateView.java",
"chars": 21455,
"preview": "package org.buyun.alpr.common;\n\nimport android.content.Context;\nimport android.graphics.Canvas;\nimport android.graphics."
},
{
"path": "demo/src/main/java/org/buyun/alpr/common/AlprUtils.java",
"chars": 15135,
"preview": "package org.buyun.alpr.common;\n\nimport android.content.res.AssetFileDescriptor;\nimport android.content.res.AssetManager;"
},
{
"path": "demo/src/main/res/layout/activity_main.xml",
"chars": 363,
"preview": "<?xml version=\"1.0\" encoding=\"utf-8\"?>\n<FrameLayout xmlns:android=\"http://schemas.android.com/apk/res/android\"\n xmlns"
},
{
"path": "demo/src/main/res/layout/fragment_camera.xml",
"chars": 2051,
"preview": "<?xml version=\"1.0\" encoding=\"utf-8\"?>\n<RelativeLayout xmlns:android=\"http://schemas.android.com/apk/res/android\"\n xm"
},
{
"path": "demo/src/main/res/layout-land/fragment_camera.xml",
"chars": 2051,
"preview": "<?xml version=\"1.0\" encoding=\"utf-8\"?>\n<RelativeLayout xmlns:android=\"http://schemas.android.com/apk/res/android\"\n xm"
},
{
"path": "demo/src/main/res/values/base-strings.xml",
"chars": 362,
"preview": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n\n<resources>\n <string name=\"app_name\">Alpr Demo</string>\n <string name=\"int"
},
{
"path": "demo/src/main/res/values/colors.xml",
"chars": 119,
"preview": "<?xml version=\"1.0\" encoding=\"utf-8\"?>\n<resources>\n <color name=\"control_background\">#cc4285f4</color>\n</resources>\n"
},
{
"path": "demo/src/main/res/values/strings.xml",
"chars": 325,
"preview": "<?xml version=\"1.0\" encoding=\"utf-8\"?>\n<resources>\n <string name=\"picture\">Picture</string>\n <string name=\"descrip"
},
{
"path": "demo/src/main/res/values/styles.xml",
"chars": 211,
"preview": "<?xml version=\"1.0\" encoding=\"utf-8\"?>\n<resources>\n <style name=\"MaterialTheme\" parent=\"Theme.AppCompat.Light.NoActio"
},
{
"path": "demo/src/main/res/values/template-dimens.xml",
"chars": 383,
"preview": "<resources>\n\n <dimen name=\"margin_tiny\">4dp</dimen>\n <dimen name=\"margin_small\">8dp</dimen>\n <dimen name=\"margi"
},
{
"path": "demo/src/main/res/values/template-styles.xml",
"chars": 759,
"preview": "<resources>\n\n <!-- Activity themes -->\n\n <style name=\"Theme.Base\" parent=\"android:Theme.Light\" />\n\n <style name"
},
{
"path": "demo/src/main/res/values-sw600dp/template-dimens.xml",
"chars": 163,
"preview": "<resources>\n\n <dimen name=\"horizontal_page_margin\">@dimen/margin_huge</dimen>\n <dimen name=\"vertical_page_margin\">"
},
{
"path": "demo/src/main/res/values-sw600dp/template-styles.xml",
"chars": 272,
"preview": "<resources>\n\n <style name=\"Widget.SampleMessage\">\n <item name=\"android:textAppearance\">?android:textAppearance"
},
{
"path": "demo/src/main/res/values-v11/template-styles.xml",
"chars": 122,
"preview": "<resources>\n\n <!-- Activity themes -->\n <style name=\"Theme.Base\" parent=\"android:Theme.Holo.Light\" />\n\n</resources"
},
{
"path": "demo/src/main/res/values-v21/base-colors.xml",
"chars": 65,
"preview": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<resources>\n\n</resources>\n"
},
{
"path": "demo/src/main/res/values-v21/base-template-styles.xml",
"chars": 176,
"preview": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<resources>\n\n <!-- Activity themes -->\n <style name=\"Theme.Base\" parent=\"an"
},
{
"path": "gradle/wrapper/gradle-wrapper.properties",
"chars": 200,
"preview": "distributionBase=GRADLE_USER_HOME\ndistributionPath=wrapper/dists\ndistributionUrl=https\\://services.gradle.org/distributi"
},
{
"path": "gradle.properties",
"chars": 1073,
"preview": "# Project-wide Gradle settings.\n# IDE (e.g. Android Studio) users:\n# Gradle settings configured through the IDE *will ov"
},
{
"path": "gradlew",
"chars": 5770,
"preview": "#!/usr/bin/env sh\n\n#\n# Copyright 2015 the original author or authors.\n#\n# Licensed under the Apache License, Version 2.0"
},
{
"path": "gradlew.bat",
"chars": 2954,
"preview": "@rem\n@rem Copyright 2015 the original author or authors.\n@rem\n@rem Licensed under the Apache License, Version 2.0 (the \""
},
{
"path": "settings.gradle",
"chars": 52,
"preview": "include ':sdk', ':demo'\nrootProject.name='AlprDemo'\n"
}
]
// ... and 1 more files (download for full content)
About this extraction
This page contains the full source code of the FaceOnLive/License-Plate-Recognition-SDK-Android GitHub repository, extracted and formatted as plain text for AI agents and large language models (LLMs). The extraction includes 35 files (116.2 KB), approximately 27.8k tokens, and a symbol index with 145 extracted functions, classes, methods, constants, and types. Use this with OpenClaw, Claude, ChatGPT, Cursor, Windsurf, or any other AI tool that accepts text input. You can copy the full output to your clipboard or download it as a .txt file.
Extracted by GitExtract — free GitHub repo to text converter for AI. Built by Nikandr Surkov.