Repository: FaceOnLive/License-Plate-Recognition-SDK-Android
Branch: main
Commit: 29a165725855
Files: 35
Total size: 116.2 KB
Directory structure:
gitextract_uah6n3qx/
├── .gitignore
├── README.md
├── build.gradle
├── demo/
│ ├── .gitignore
│ ├── build.gradle
│ ├── proguard-rules.pro
│ └── src/
│ └── main/
│ ├── AndroidManifest.xml
│ ├── java/
│ │ └── org/
│ │ └── buyun/
│ │ └── alpr/
│ │ ├── AlprVideoSequentialActivity.java
│ │ └── common/
│ │ ├── AlprActivity.java
│ │ ├── AlprBackgroundTask.java
│ │ ├── AlprCameraFragment.java
│ │ ├── AlprGLSurfaceView.java
│ │ ├── AlprImage.java
│ │ ├── AlprPlateView.java
│ │ └── AlprUtils.java
│ └── res/
│ ├── layout/
│ │ ├── activity_main.xml
│ │ └── fragment_camera.xml
│ ├── layout-land/
│ │ └── fragment_camera.xml
│ ├── values/
│ │ ├── base-strings.xml
│ │ ├── colors.xml
│ │ ├── strings.xml
│ │ ├── styles.xml
│ │ ├── template-dimens.xml
│ │ └── template-styles.xml
│ ├── values-sw600dp/
│ │ ├── template-dimens.xml
│ │ └── template-styles.xml
│ ├── values-v11/
│ │ └── template-styles.xml
│ └── values-v21/
│ ├── base-colors.xml
│ └── base-template-styles.xml
├── gradle/
│ └── wrapper/
│ ├── gradle-wrapper.jar
│ └── gradle-wrapper.properties
├── gradle.properties
├── gradlew
├── gradlew.bat
└── settings.gradle
================================================
FILE CONTENTS
================================================
================================================
FILE: .gitignore
================================================
*.iml
.gradle
/local.properties
/.idea/caches
/.idea/libraries
/.idea/modules.xml
/.idea/workspace.xml
/.idea/navEditor.xml
/.idea/assetWizardSettings.xml
.DS_Store
/build
/captures
.externalNativeBuild
.cxx
================================================
FILE: README.md
================================================
Robust, Realtime, On-Device License Plate Recognition SDK For Android
It can not only recognize number plate, but also detect vehicle model, color and country.
## :tada: Try It Yourself
https://user-images.githubusercontent.com/91896009/186433213-6bb1bda3-6b1b-4f71-b950-85e7d233ddff.mp4
================================================
FILE: build.gradle
================================================
// Top-level build file where you can add configuration options common to all sub-projects/modules.
buildscript {
repositories {
google()
jcenter()
}
dependencies {
classpath 'com.android.tools.build:gradle:3.5.1'
// NOTE: Do not place your application dependencies here; they belong
// in the individual module build.gradle files
}
}
allprojects {
repositories {
google()
jcenter()
}
}
task clean(type: Delete) {
delete rootProject.buildDir
}
================================================
FILE: demo/.gitignore
================================================
/build
================================================
FILE: demo/build.gradle
================================================
apply plugin: 'com.android.application'
android {
compileSdkVersion 28
defaultConfig {
applicationId "org.buyun.alpr"
minSdkVersion 21
targetSdkVersion 28
versionCode 1
versionName "1.0"
testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner"
}
buildTypes {
release {
minifyEnabled true
proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro'
}
}
sourceSets {
main {
java.srcDirs += ['../common/src/main/java']
res.srcDirs += ['../common/src/main/res']
}
}
}
dependencies {
implementation fileTree(dir: 'libs', include: ['*.jar'])
implementation 'androidx.appcompat:appcompat:1.1.0'
implementation 'androidx.constraintlayout:constraintlayout:1.1.3'
implementation project(":sdk")
}
================================================
FILE: demo/proguard-rules.pro
================================================
# Add project specific ProGuard rules here.
# You can control the set of applied configuration files using the
# proguardFiles setting in build.gradle.
#
# For more details, see
# http://developer.android.com/guide/developing/tools/proguard.html
# If your project uses WebView with JS, uncomment the following
# and specify the fully qualified class name to the JavaScript interface
# class:
#-keepclassmembers class fqcn.of.javascript.interface.for.webview {
# public *;
#}
# Uncomment this to preserve the line number information for
# debugging stack traces.
#-keepattributes SourceFile,LineNumberTable
-keep class org.buyun.alpr.sdk.** {*;}
# If you keep the line number information, uncomment this to
# hide the original source file name.
#-renamesourcefileattribute SourceFile
================================================
FILE: demo/src/main/AndroidManifest.xml
================================================
================================================
FILE: demo/src/main/java/org/buyun/alpr/AlprVideoSequentialActivity.java
================================================
package org.buyun.alpr;
import android.os.Bundle;
import android.util.Log;
import android.util.Size;
import org.buyun.alpr.common.AlprActivity;
import org.buyun.alpr.common.AlprCameraFragment;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import java.util.Arrays;
import java.util.List;
/**
* Main activity
*/
public class AlprVideoSequentialActivity extends AlprActivity {
static final String TAG = AlprVideoSequentialActivity.class.getCanonicalName();
static final Size PREFERRED_SIZE = new Size(1280, 720);
static final String CONFIG_DEBUG_LEVEL = "info";
static final boolean CONFIG_DEBUG_WRITE_INPUT_IMAGE = false; // must be false unless you're debugging the code
static final int CONFIG_NUM_THREADS = -1;
static final boolean CONFIG_GPGPU_ENABLED = true;
static final int CONFIG_MAX_LATENCY = -1;
static final String CONFIG_CHARSET = "latin";
static final boolean CONFIG_IENV_ENABLED = false;
static final boolean CONFIG_OPENVINO_ENABLED = true;
static final String CONFIG_OPENVINO_DEVICE = "CPU";
static final double CONFIG_DETECT_MINSCORE = 0.1; // 10%
static final boolean CONFIG_CAR_NOPLATE_DETECT_ENABLED = false;
static final double CONFIG_CAR_NOPLATE_DETECT_MINSCORE = 0.8; // 80%
static final List CONFIG_DETECT_ROI = Arrays.asList(0.f, 0.f, 0.f, 0.f);
static final boolean CONFIG_PYRAMIDAL_SEARCH_ENABLED = true;
static final double CONFIG_PYRAMIDAL_SEARCH_SENSITIVITY= 0.28; // 28%
static final double CONFIG_PYRAMIDAL_SEARCH_MINSCORE = 0.5; // 50%
static final int CONFIG_PYRAMIDAL_SEARCH_MIN_IMAGE_SIZE_INPIXELS = 800; // pixels
static final boolean CONFIG_KLASS_LPCI_ENABLED = true;
static final boolean CONFIG_KLASS_VCR_ENABLED = true;
static final boolean CONFIG_KLASS_VMMR_ENABLED = true;
static final boolean CONFIG_KLASS_VBSR_ENABLED = false;
static final double CONFIG_KLASS_VCR_GAMMA = 1.5;
static final double CONFIG_RECOGN_MINSCORE = 0.4; // 40%
static final String CONFIG_RECOGN_SCORE_TYPE = "min";
static final boolean CONFIG_RECOGN_RECTIFY_ENABLED = false;
@Override
protected void onCreate(Bundle savedInstanceState) {
Log.i(TAG, "onCreate " + this);
super.onCreate(savedInstanceState);
getSupportFragmentManager().beginTransaction()
.replace(R.id.container, AlprCameraFragment.newInstance(PREFERRED_SIZE, this))
.commit();
}
@Override
public void onResume() {
super.onResume();
}
@Override
public void onDestroy() {
Log.i(TAG, "onDestroy " + this);
super.onDestroy();
}
@Override
protected int getLayoutResId() {
return R.layout.activity_main;
}
@Override
protected JSONObject getJsonConfig() {
JSONObject config = new JSONObject();
try {
config.put("debug_level", CONFIG_DEBUG_LEVEL);
config.put("debug_write_input_image_enabled", CONFIG_DEBUG_WRITE_INPUT_IMAGE);
config.put("debug_internal_data_path", getDebugInternalDataPath());
config.put("num_threads", CONFIG_NUM_THREADS);
config.put("gpgpu_enabled", CONFIG_GPGPU_ENABLED);
config.put("charset", CONFIG_CHARSET);
config.put("max_latency", CONFIG_MAX_LATENCY);
config.put("ienv_enabled", CONFIG_IENV_ENABLED);
config.put("openvino_enabled", CONFIG_OPENVINO_ENABLED);
config.put("openvino_device", CONFIG_OPENVINO_DEVICE);
config.put("detect_minscore", CONFIG_DETECT_MINSCORE);
config.put("detect_roi", new JSONArray(getDetectROI()));
config.put("car_noplate_detect_enabled", CONFIG_CAR_NOPLATE_DETECT_ENABLED);
config.put("car_noplate_detect_min_score", CONFIG_CAR_NOPLATE_DETECT_MINSCORE);
config.put("pyramidal_search_enabled", CONFIG_PYRAMIDAL_SEARCH_ENABLED);
config.put("pyramidal_search_sensitivity", CONFIG_PYRAMIDAL_SEARCH_SENSITIVITY);
config.put("pyramidal_search_minscore", CONFIG_PYRAMIDAL_SEARCH_MINSCORE);
config.put("pyramidal_search_min_image_size_inpixels", CONFIG_PYRAMIDAL_SEARCH_MIN_IMAGE_SIZE_INPIXELS);
config.put("klass_lpci_enabled", CONFIG_KLASS_LPCI_ENABLED);
config.put("klass_vcr_enabled", CONFIG_KLASS_VCR_ENABLED);
config.put("klass_vmmr_enabled", CONFIG_KLASS_VMMR_ENABLED);
config.put("klass_vbsr_enabled", CONFIG_KLASS_VBSR_ENABLED);
config.put("klass_vcr_gamma", CONFIG_KLASS_VCR_GAMMA);
config.put("recogn_minscore", CONFIG_RECOGN_MINSCORE);
config.put("recogn_score_type", CONFIG_RECOGN_SCORE_TYPE);
config.put("recogn_rectify_enabled", CONFIG_RECOGN_RECTIFY_ENABLED);
}
catch (JSONException e) {
e.printStackTrace();
}
return config;
}
@Override
protected boolean isParallelDeliveryEnabled() { return false; /* we want to deactivated parallel and use sequential delivery*/ }
@Override
protected List getDetectROI() { return CONFIG_DETECT_ROI; }
}
================================================
FILE: demo/src/main/java/org/buyun/alpr/common/AlprActivity.java
================================================
package org.buyun.alpr.common;
import android.graphics.RectF;
import android.media.ExifInterface;
import android.media.Image;
import android.os.Bundle;
import android.os.Environment;
import android.os.SystemClock;
import android.util.Log;
import android.util.Size;
import android.view.Window;
import android.view.WindowManager;
import androidx.annotation.NonNull;
import androidx.appcompat.app.AppCompatActivity;
import org.buyun.alpr.sdk.SDK_IMAGE_TYPE;
import org.buyun.alpr.sdk.AlprSdk;
import org.buyun.alpr.sdk.AlprCallback;
import org.buyun.alpr.sdk.AlprResult;
import org.json.JSONException;
import org.json.JSONObject;
import java.io.File;
import java.util.List;
/**
* Base activity to subclass to make our life easier
*/
public abstract class AlprActivity extends AppCompatActivity implements AlprCameraFragment.AlprCameraFragmentSink {
static final String TAG = AlprActivity.class.getCanonicalName();
private String mDebugInternalDataPath = null;
private boolean mIsProcessing = false;
private boolean mIsPaused = true;
/**
* Parallel callback delivery function used by the engine to notify for new deferred results
*/
static class MyUltAlprSdkParallelDeliveryCallback extends AlprCallback {
static final String TAG = MyUltAlprSdkParallelDeliveryCallback.class.getCanonicalName();
AlprPlateView mAlprPlateView;
Size mImageSize;
long mTotalDuration = 0;
int mOrientation = 0;
void setAlprPlateView(@NonNull final AlprPlateView view) {
mAlprPlateView = view;
}
void setImageSize(@NonNull final Size imageSize, @NonNull final int orientation) {
mImageSize = imageSize;
mOrientation = orientation;
}
void setDurationTime(final long totalDuration) {
mTotalDuration = totalDuration;
}
@Override
public void onNewResult(AlprResult result) {
Log.d(TAG, AlprUtils.resultToString(result));
if (mAlprPlateView!= null) {
mAlprPlateView.setResult(result, mImageSize, mTotalDuration, mOrientation);
}
}
static MyUltAlprSdkParallelDeliveryCallback newInstance() {
return new MyUltAlprSdkParallelDeliveryCallback();
}
}
/**
* The parallel delivery callback. Set to null to disable parallel mode
* and enforce sequential mode.
*/
private MyUltAlprSdkParallelDeliveryCallback mParallelDeliveryCallback;
private AlprPlateView mAlprPlateView;
@Override
protected void onCreate(final Bundle savedInstanceState) {
Log.i(TAG, "onCreate " + this);
super.onCreate(savedInstanceState);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN, WindowManager.LayoutParams.FLAG_FULLSCREEN);
setContentView(getLayoutResId());
// Create folder to dump input images for debugging
File dummyFile = new File(getExternalFilesDir(null), "dummyFile");
if (!dummyFile.getParentFile().exists() && !dummyFile.getParentFile().mkdirs()) {
Log.e(TAG,"mkdir failed: " + dummyFile.getParentFile().getAbsolutePath());
}
mDebugInternalDataPath = dummyFile.getParentFile().exists() ? dummyFile.getParent() : Environment.getExternalStorageDirectory().getAbsolutePath();
dummyFile.delete();
// Create parallel delivery callback is enabled
mParallelDeliveryCallback = isParallelDeliveryEnabled() ? MyUltAlprSdkParallelDeliveryCallback.newInstance() : null;
// Init the engine
final JSONObject config = getJsonConfig();
// Retrieve previously stored key from internal storage
final AlprResult alprResult = AlprUtils.assertIsOk(AlprSdk.init(
getAssets(),
config.toString(),
mParallelDeliveryCallback
));
Log.i(TAG,"ALPR engine initialized: " + AlprUtils.resultToString(alprResult));
}
@Override
public void onDestroy() {
Log.i(TAG, "onDestroy " + this);
final AlprResult result = AlprUtils.assertIsOk(AlprSdk.deInit());
Log.i(TAG,"ALPR engine deInitialized: " + AlprUtils.resultToString(result));
super.onDestroy();
}
@Override
public synchronized void onResume() {
super.onResume();
mIsPaused = false;
}
@Override
public synchronized void onPause() {
mIsPaused = true;
super.onPause();
}
@Override
public void setAlprPlateView(@NonNull final AlprPlateView view) {
mAlprPlateView = view;
if (mParallelDeliveryCallback != null) {
mParallelDeliveryCallback.setAlprPlateView(view);
}
final List roi = getDetectROI();
assert(roi.size() == 4);
mAlprPlateView.setDetectROI(
new RectF(
roi.get(0).floatValue(),
roi.get(2).floatValue(),
roi.get(1).floatValue(),
roi.get(3).floatValue()
)
);
}
@Override
public void setImage(@NonNull final Image image, final int jpegOrientation) {
// On sequential mode we just ignore the processing
if (mIsProcessing || mIsPaused) {
Log.d(TAG, "Inference function not returned yet: Processing or paused");
image.close();
return;
}
mIsProcessing = true;
final Size imageSize = new Size(image.getWidth(), image.getHeight());
// Orientation
// Convert from degree to real EXIF orientation
int exifOrientation;
switch (jpegOrientation) {
case 90: exifOrientation = ExifInterface.ORIENTATION_ROTATE_90; break;
case 180: exifOrientation = ExifInterface.ORIENTATION_ROTATE_180; break;
case 270: exifOrientation = ExifInterface.ORIENTATION_ROTATE_270; break;
case 0: default: exifOrientation = ExifInterface.ORIENTATION_NORMAL; break;
}
// Update image for the async callback
if (mParallelDeliveryCallback != null) {
mParallelDeliveryCallback.setImageSize((jpegOrientation % 180) == 0 ? imageSize : new Size(imageSize.getHeight(), imageSize.getWidth()), jpegOrientation);
}
// The actual ALPR inference is done here
// Do not worry about the time taken to perform the inference, the caller
// (most likely the camera fragment) set the current image using a background thread.
final Image.Plane[] planes = image.getPlanes();
final long startTimeInMillis = SystemClock.uptimeMillis();
final AlprResult result = /*AlprUtils.assertIsOk*/(AlprSdk.process(
SDK_IMAGE_TYPE.ULTALPR_SDK_IMAGE_TYPE_YUV420P,
planes[0].getBuffer(),
planes[1].getBuffer(),
planes[2].getBuffer(),
imageSize.getWidth(),
imageSize.getHeight(),
planes[0].getRowStride(),
planes[1].getRowStride(),
planes[2].getRowStride(),
planes[1].getPixelStride(),
exifOrientation
));
final long durationInMillis = SystemClock.uptimeMillis() - startTimeInMillis; // Total time: Inference + image processing (chroma conversion, rotation...)
if (mParallelDeliveryCallback != null) {
mParallelDeliveryCallback.setDurationTime(durationInMillis);
}
// Release the image and signal the inference process is finished
image.close();
mIsProcessing = false;
if (result.isOK()) {
Log.d(TAG, AlprUtils.resultToString(result));
} else {
Log.e(TAG, AlprUtils.resultToString(result));
}
// Display the result if sequential mode. Otherwise, let the parallel callback
// display the result when provided.
// Starting version 3.2 the callback will be called even if the result is empty
if (mAlprPlateView != null && (mParallelDeliveryCallback == null || (result.numPlates() == 0 && result.numCars() == 0))) { // means sequential call or no plate/car to expect from the parallel delivery callback
mAlprPlateView.setResult(result, (jpegOrientation % 180) == 0 ? imageSize : new Size(imageSize.getHeight(), imageSize.getWidth()), durationInMillis, jpegOrientation);
}
}
/**
* Gets the base folder defining a path where the application can write private
* data.
* @return The path
*/
protected String getDebugInternalDataPath() {
return mDebugInternalDataPath;
}
/**
* Gets the server url used to activate the license. Please contact us to get the correct URL.
* e.g. https://localhost:3600
* @return The URL
*/
protected String getActivationServerUrl() {
return "";
}
protected String getActivationMasterOrSlaveKey() {
return "";
}
/**
* Returns the layout Id for the activity
* @return
*/
protected abstract int getLayoutResId();
/**
* Returns JSON config to be used to initialize the ALPR/ANPR SDK.
* @return The JSON config
*/
protected abstract JSONObject getJsonConfig();
/**
*/
protected abstract boolean isParallelDeliveryEnabled();
protected abstract List getDetectROI();
}
================================================
FILE: demo/src/main/java/org/buyun/alpr/common/AlprBackgroundTask.java
================================================
package org.buyun.alpr.common;
import android.os.Handler;
import android.os.HandlerThread;
public class AlprBackgroundTask {
private Handler mHandler;
private HandlerThread mThread;
public synchronized final Handler getHandler() {
return mHandler;
}
public synchronized final boolean isRunning() { return mHandler != null; }
public synchronized void start(final String threadName) {
if (mThread != null) {
return;
}
mThread = new HandlerThread(threadName);
mThread.start();
mHandler = new Handler(mThread.getLooper());
}
public synchronized void stop() {
if (mThread == null) {
return;
}
mThread.quitSafely();
try {
mThread.join();
mThread = null;
mHandler = null;
} catch (InterruptedException e) {
e.printStackTrace();
}
}
public synchronized void post(final Runnable r) {
if (mHandler != null) {
mHandler.post(r);
}
}
}
================================================
FILE: demo/src/main/java/org/buyun/alpr/common/AlprCameraFragment.java
================================================
package org.buyun.alpr.common;
import android.Manifest;
import android.app.Activity;
import android.app.AlertDialog;
import android.app.Dialog;
import android.content.Context;
import android.content.DialogInterface;
import android.content.pm.PackageManager;
import android.content.res.Configuration;
import android.graphics.Color;
import android.graphics.ImageFormat;
import android.graphics.SurfaceTexture;
import android.hardware.camera2.CameraAccessException;
import android.hardware.camera2.CameraCaptureSession;
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CameraDevice;
import android.hardware.camera2.CameraManager;
import android.hardware.camera2.CaptureRequest;
import android.hardware.camera2.params.StreamConfigurationMap;
import android.media.Image;
import android.media.ImageReader;
import android.os.Bundle;
import androidx.annotation.NonNull;
import androidx.core.app.ActivityCompat;
import androidx.fragment.app.DialogFragment;
import androidx.fragment.app.Fragment;
import androidx.core.content.ContextCompat;
import android.text.TextUtils;
import android.util.Log;
import android.util.Size;
import android.util.SparseIntArray;
import android.view.LayoutInflater;
import android.view.Surface;
import android.view.View;
import android.view.ViewGroup;
import android.widget.Toast;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import java.util.concurrent.Semaphore;
import java.util.concurrent.TimeUnit;
import org.buyun.alpr.R; // FIXME(dmi): must remove
public class AlprCameraFragment extends Fragment
implements ActivityCompat.OnRequestPermissionsResultCallback {
static final int REQUEST_CAMERA_PERMISSION = 1;
static final String FRAGMENT_DIALOG = "dialog";
static final String TAG = AlprCameraFragment.class.getCanonicalName();
static final int VIDEO_FORMAT = ImageFormat.YUV_420_888; // All Android devices are required to support this format
private static final SparseIntArray ORIENTATIONS = new SparseIntArray();
static {
ORIENTATIONS.append(Surface.ROTATION_0, 90);
ORIENTATIONS.append(Surface.ROTATION_90, 0);
ORIENTATIONS.append(Surface.ROTATION_180, 270);
ORIENTATIONS.append(Surface.ROTATION_270, 180);
}
/**
* Using #2: processing and pending.
*/
static final int MAX_IMAGES = 2;
/**
* The camera preview size will be chosen to be the smallest frame by pixel size capable of
* containing a DESIRED_SIZE x DESIRED_SIZE square.
*/
static final int MINIMUM_PREVIEW_SIZE = 320;
private Size mPreferredSize = null;
/**
* ID of the current {@link CameraDevice}.
*/
private String mCameraId;
private int mJpegOrientation = 1;
/**
* An {@link AlprGLSurfaceView} for camera preview.
*/
private AlprGLSurfaceView mGLSurfaceView;
private AlprPlateView mPlateView;
/**
* A {@link CameraCaptureSession } for camera preview.
*/
private CameraCaptureSession mCaptureSession;
/**
* A reference to the opened {@link CameraDevice}.
*/
private CameraDevice mCameraDevice;
/**
* The {@link android.util.Size} of camera preview.
*/
private Size mPreviewSize;
private AlprCameraFragmentSink mSink;
private final AlprBackgroundTask mBackgroundTaskCamera = new AlprBackgroundTask();
private final AlprBackgroundTask mBackgroundTaskDrawing = new AlprBackgroundTask();
private final AlprBackgroundTask mBackgroundTaskInference = new AlprBackgroundTask();
/**
* {@link CameraDevice.StateCallback} is called when {@link CameraDevice} changes its state.
*/
private final CameraDevice.StateCallback mStateCallback = new CameraDevice.StateCallback() {
@Override
public void onOpened(@NonNull CameraDevice cameraDevice) {
// This method is called when the camera is opened. We start camera preview here.
mCameraOpenCloseLock.release();
mCameraDevice = cameraDevice;
createCameraCaptureSession();
}
@Override
public void onDisconnected(@NonNull CameraDevice cameraDevice) {
mCameraOpenCloseLock.release();
cameraDevice.close();
mCameraDevice = null;
}
@Override
public void onError(@NonNull CameraDevice cameraDevice, int error) {
mCameraOpenCloseLock.release();
cameraDevice.close();
mCameraDevice = null;
Activity activity = getActivity();
if (null != activity) {
activity.finish();
}
}
};
private boolean mClosingCamera = false;
/**
* An {@link ImageReader} that handles still image capture.
*/
private ImageReader mImageReaderInference;
private ImageReader mImageReaderDrawing;
/**
* This a callback object for the {@link ImageReader}. "onImageAvailable" will be called when a
* still image is ready to be saved.
*/
private final ImageReader.OnImageAvailableListener mOnImageAvailableListener
= new ImageReader.OnImageAvailableListener() {
@Override
public void onImageAvailable(ImageReader reader) {
if (mClosingCamera) {
Log.d(TAG, "Closing camera");
return;
}
try {
final Image image = reader.acquireLatestImage();
if (image == null) {
return;
}
final boolean isForDrawing = (reader.getSurface() == mImageReaderDrawing.getSurface());
if (isForDrawing) {
/*mBackgroundTaskDrawing.post(() ->*/ mGLSurfaceView.setImage(image, mJpegOrientation)/*)*/;
}
else {
/*mBackgroundTaskInference.post(() ->*/ mSink.setImage(image, mJpegOrientation)/*)*/;
}
} catch (final Exception e) {
e.printStackTrace();
Log.e(TAG, e.toString());
}
}
};
private CaptureRequest.Builder mCaptureRequestBuilder;
/**
* {@link CaptureRequest} generated by {@link #mCaptureRequestBuilder}
*/
private CaptureRequest mCaptureRequest;
/**
* A {@link Semaphore} to prevent the app from exiting before closing the camera.
*/
private Semaphore mCameraOpenCloseLock = new Semaphore(1);
/**
* Orientation of the camera sensor
*/
private int mSensorOrientation;
/**
* Default constructor automatically called when the fragment is recreated. Required.
* https://stackoverflow.com/questions/51831053/could-not-find-fragment-constructor
*/
public AlprCameraFragment() {
// nothing special here
}
private AlprCameraFragment(final Size preferredSize, final AlprCameraFragmentSink sink) {
mPreferredSize = preferredSize;
mSink = sink;
}
/**
* Public function to be called to create the fragment.
* @param preferredSize
* @return
*/
public static AlprCameraFragment newInstance(final Size preferredSize, final AlprCameraFragmentSink sink) {
return new AlprCameraFragment(preferredSize, sink);
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
return inflater.inflate(R.layout.fragment_camera, container, false);
}
@Override
public void onViewCreated(final View view, Bundle savedInstanceState) {
mGLSurfaceView = (AlprGLSurfaceView) view.findViewById(R.id.glSurfaceView);
mPlateView = (AlprPlateView) view.findViewById(R.id.plateView);
//mPlateView.setBackgroundColor(Color.RED);
}
@Override
public void onActivityCreated(Bundle savedInstanceState) {
super.onActivityCreated(savedInstanceState);
}
@Override
public synchronized void onResume() {
super.onResume();
startBackgroundThreads();
// Forward the plateView to the sink
if (mSink != null && mPlateView != null) {
mSink.setAlprPlateView(mPlateView);
}
// Open the camera
openCamera(mGLSurfaceView.getWidth(), mGLSurfaceView.getHeight());
}
@Override
public synchronized void onPause() {
closeCamera();
stopBackgroundThreads();
super.onPause();
}
private void requestCameraPermission() {
if (shouldShowRequestPermissionRationale(Manifest.permission.CAMERA)) {
new ConfirmationDialog().show(getChildFragmentManager(), FRAGMENT_DIALOG);
} else {
requestPermissions(new String[]{Manifest.permission.CAMERA}, REQUEST_CAMERA_PERMISSION);
}
}
@Override
public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions,
@NonNull int[] grantResults) {
if (requestCode == REQUEST_CAMERA_PERMISSION) {
if (grantResults.length != 1 || grantResults[0] != PackageManager.PERMISSION_GRANTED) {
ErrorDialog.newInstance(getString(R.string.request_permission))
.show(getChildFragmentManager(), FRAGMENT_DIALOG);
}
} else {
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
}
}
/**
* Shows a {@link Toast} on the UI thread.
*
* @param text The message to show
*/
private void showToast(final String text) {
final Activity activity = getActivity();
if (activity != null) {
activity.runOnUiThread(new Runnable() {
@Override
public void run() {
Toast.makeText(activity, text, Toast.LENGTH_SHORT).show();
}
});
}
}
/**
* Given {@code choices} of {@code Size}s supported by a camera, chooses the smallest one whose
* width and height are at least as large as the minimum of both, or an exact match if possible.
*
* @param choices The list of sizes that the camera supports for the intended output class
* @param width The minimum desired width
* @param height The minimum desired height
* @return The optimal {@code Size}, or an arbitrary one if none were big enough
*/
private static Size chooseOptimalSize(final Size[] choices, final int width, final int height) {
final int minSize = Math.max(Math.min(width, height), MINIMUM_PREVIEW_SIZE);
final Size desiredSize = new Size(width, height);
// Collect the supported resolutions that are at least as big as the preview Surface
boolean exactSizeFound = false;
final List bigEnough = new ArrayList();
final List tooSmall = new ArrayList();
for (final Size option : choices) {
if (option.equals(desiredSize)) {
// Set the size but don't return yet so that remaining sizes will still be logged.
exactSizeFound = true;
}
if (option.getHeight() >= minSize && option.getWidth() >= minSize) {
bigEnough.add(option);
} else {
tooSmall.add(option);
}
}
Log.i(TAG, "Desired size: " + desiredSize + ", min size: " + minSize + "x" + minSize);
Log.i(TAG, "Valid preview sizes: [" + TextUtils.join(", ", bigEnough) + "]");
Log.i(TAG, "Rejected preview sizes: [" + TextUtils.join(", ", tooSmall) + "]");
if (exactSizeFound) {
Log.i(TAG, "Exact size match found.");
return desiredSize;
}
// Pick the smallest of those, assuming we found any
if (bigEnough.size() > 0) {
final Size chosenSize = Collections.min(bigEnough, new CompareSizesByArea());
Log.i(TAG, "Chosen size: " + chosenSize.getWidth() + "x" + chosenSize.getHeight());
return chosenSize;
} else {
Log.e(TAG, "Couldn't find any suitable preview size");
return choices[0];
}
}
/**
* Sets up member variables related to camera.
*
*/
@SuppressWarnings("SuspiciousNameCombination")
private void setUpCameraOutputs() {
Activity activity = getActivity();
CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE);
try {
for (String cameraId : manager.getCameraIdList()) {
CameraCharacteristics characteristics
= manager.getCameraCharacteristics(cameraId);
// We don't use a front facing camera in this sample.
Integer facing = characteristics.get(CameraCharacteristics.LENS_FACING);
if (facing != null && facing == CameraCharacteristics.LENS_FACING_FRONT) {
continue;
}
StreamConfigurationMap map = characteristics.get(
CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
if (map == null) {
continue;
}
mSensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
// JPEG orientation
// https://developer.android.com/reference/android/hardware/camera2/CaptureRequest#JPEG_ORIENTATION
int rotation = activity.getWindowManager().getDefaultDisplay().getRotation();
mJpegOrientation = (ORIENTATIONS.get(rotation) + mSensorOrientation + 270) % 360;
// Danger, W.R.! Attempting to use too large a preview size could exceed the camera
// bus' bandwidth limitation, resulting in gorgeous previews but the storage of
// garbage capture data.
mPreviewSize =
chooseOptimalSize(
map.getOutputSizes(SurfaceTexture.class),
mPreferredSize.getWidth(),
mPreferredSize.getHeight());
// We fit the aspect ratio of TextureView to the size of preview we picked.
final int orientation = getResources().getConfiguration().orientation;
if (orientation == Configuration.ORIENTATION_LANDSCAPE) {
mGLSurfaceView.setAspectRatio(mPreviewSize.getWidth(), mPreviewSize.getHeight());
mPlateView.setAspectRatio(mPreviewSize.getWidth(), mPreviewSize.getHeight());
} else {
mGLSurfaceView.setAspectRatio(mPreviewSize.getHeight(), mPreviewSize.getWidth());
mPlateView.setAspectRatio(mPreviewSize.getHeight(), mPreviewSize.getWidth());
}
mCameraId = cameraId;
return;
}
} catch (CameraAccessException e) {
e.printStackTrace();
} catch (NullPointerException e) {
// Currently an NPE is thrown when the Camera2API is used but not supported on the
// device this code runs.
ErrorDialog.newInstance(getString(R.string.camera_error))
.show(getChildFragmentManager(), FRAGMENT_DIALOG);
}
}
/**
* Opens the camera specified by {@link AlprCameraFragment#mCameraId}.
*/
private void openCamera(int width, int height) {
if (ContextCompat.checkSelfPermission(getActivity(), Manifest.permission.CAMERA)
!= PackageManager.PERMISSION_GRANTED) {
requestCameraPermission();
return;
}
setUpCameraOutputs();
Activity activity = getActivity();
CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE);
try {
if (!mCameraOpenCloseLock.tryAcquire(2500, TimeUnit.MILLISECONDS)) {
throw new RuntimeException("Time out waiting to lock camera opening.");
}
manager.openCamera(mCameraId, mStateCallback, mBackgroundTaskCamera.getHandler());
} catch (CameraAccessException e) {
e.printStackTrace();
} catch (InterruptedException e) {
throw new RuntimeException("Interrupted while trying to lock camera opening.", e);
}
}
/**
* Closes the current {@link CameraDevice}.
*/
private void closeCamera() {
try {
mClosingCamera = true;
mCameraOpenCloseLock.acquire();
if (null != mCaptureSession) {
mCaptureSession.close();
mCaptureSession = null;
}
if (null != mCameraDevice) {
mCameraDevice.close();
mCameraDevice = null;
}
if (null != mImageReaderInference) {
mImageReaderInference.close();
mImageReaderInference = null;
}
if (null != mImageReaderDrawing) {
mImageReaderDrawing.close();
mImageReaderDrawing = null;
}
} catch (InterruptedException e) {
throw new RuntimeException("Interrupted while trying to lock camera closing.", e);
} finally {
mCameraOpenCloseLock.release();
mClosingCamera = false;
}
}
/**
* Starts a background threads
*/
private void startBackgroundThreads() {
mBackgroundTaskInference.start("InferenceBackgroundThread");
mBackgroundTaskDrawing.start("DrawingBackgroundThread");
mBackgroundTaskCamera.start("CameraBackgroundThread");
}
/**
* Stops the background threads
*/
private void stopBackgroundThreads() {
mBackgroundTaskInference.stop();
mBackgroundTaskDrawing.stop();
mBackgroundTaskCamera.stop();
}
/**
* Creates a new {@link CameraCaptureSession} for camera preview.
*/
private void createCameraCaptureSession() {
try {
// Create Image readers
mImageReaderInference = ImageReader.newInstance(mPreviewSize.getWidth(), mPreviewSize.getHeight(),
VIDEO_FORMAT, MAX_IMAGES);
mImageReaderInference.setOnImageAvailableListener(
mOnImageAvailableListener, mBackgroundTaskCamera.getHandler());
mImageReaderDrawing = ImageReader.newInstance(mPreviewSize.getWidth(), mPreviewSize.getHeight(),
VIDEO_FORMAT, MAX_IMAGES);
mImageReaderDrawing.setOnImageAvailableListener(
mOnImageAvailableListener, mBackgroundTaskCamera.getHandler());
// We set up a CaptureRequest.Builder with the output Surface to the image reader
mCaptureRequestBuilder
= mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_RECORD);
//mCaptureRequestBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, new Range<>(1, 25));
//mCaptureRequestBuilder.set(CaptureRequest.CONTROL_MODE,
// CaptureRequest.CONTROL_MODE_USE_SCENE_MODE);
//mCaptureRequestBuilder.set(CaptureRequest.CONTROL_SCENE_MODE,
// CaptureRequest.CONTROL_SCENE_MODE_HIGH_SPEED_VIDEO);
mCaptureRequestBuilder.addTarget(mImageReaderInference.getSurface());
mCaptureRequestBuilder.addTarget(mImageReaderDrawing.getSurface());
// Here, we create a CameraCaptureSession
mCameraDevice.createCaptureSession(Arrays.asList(mImageReaderInference.getSurface(), mImageReaderDrawing.getSurface()),
new CameraCaptureSession.StateCallback() {
@Override
public void onConfigured(@NonNull CameraCaptureSession cameraCaptureSession) {
// The camera is already closed
if (null == mCameraDevice) {
return;
}
// When the session is ready, we start displaying the preview.
mCaptureSession = cameraCaptureSession;
try {
// Auto focus should be continuous
mCaptureRequestBuilder.set(
CaptureRequest.CONTROL_AF_MODE,
CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
// Flash is automatically enabled when necessary.
mCaptureRequestBuilder.set(
CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH);
// Finally, we start grabbing the frames
mCaptureRequest = mCaptureRequestBuilder.build();
mCaptureSession.setRepeatingRequest(mCaptureRequest,
null, mBackgroundTaskCamera.getHandler());
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
@Override
public void onConfigureFailed(
@NonNull CameraCaptureSession cameraCaptureSession) {
showToast("Failed");
}
}, mBackgroundTaskCamera.getHandler()
);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
/**
*
*/
public static interface AlprCameraFragmentSink {
/**
*
* @param view
*/
public void setAlprPlateView(@NonNull final AlprPlateView view);
/**
*
* @param image
* @param jpegOrientation
*/
public void setImage(@NonNull final Image image, final int jpegOrientation);
}
/**
* Compares two {@code Size}s based on their areas.
*/
static class CompareSizesByArea implements Comparator {
@Override
public int compare(Size lhs, Size rhs) {
// We cast here to ensure the multiplications won't overflow
return Long.signum((long) lhs.getWidth() * lhs.getHeight() -
(long) rhs.getWidth() * rhs.getHeight());
}
}
/**
* Shows an error message dialog.
*/
public static class ErrorDialog extends DialogFragment {
private static final String ARG_MESSAGE = "message";
public static ErrorDialog newInstance(String message) {
ErrorDialog dialog = new ErrorDialog();
Bundle args = new Bundle();
args.putString(ARG_MESSAGE, message);
dialog.setArguments(args);
return dialog;
}
@NonNull
@Override
public Dialog onCreateDialog(Bundle savedInstanceState) {
final Activity activity = getActivity();
return new AlertDialog.Builder(activity)
.setMessage(getArguments().getString(ARG_MESSAGE))
.setPositiveButton(android.R.string.ok, new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialogInterface, int i) {
activity.finish();
}
})
.create();
}
}
/**
* Shows OK/Cancel confirmation dialog about camera permission.
*/
public static class ConfirmationDialog extends DialogFragment {
@NonNull
@Override
public Dialog onCreateDialog(Bundle savedInstanceState) {
final Fragment parent = getParentFragment();
return new AlertDialog.Builder(getActivity())
.setMessage(R.string.request_permission)
.setPositiveButton(android.R.string.ok, new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
parent.requestPermissions(new String[]{Manifest.permission.CAMERA},
REQUEST_CAMERA_PERMISSION);
}
})
.setNegativeButton(android.R.string.cancel,
new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
Activity activity = parent.getActivity();
if (activity != null) {
activity.finish();
}
}
})
.create();
}
}
}
================================================
FILE: demo/src/main/java/org/buyun/alpr/common/AlprGLSurfaceView.java
================================================
package org.buyun.alpr.common;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import java.nio.ShortBuffer;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;
import android.graphics.PixelFormat;
import android.media.Image;
import android.opengl.GLES20;
import android.opengl.GLSurfaceView;
import android.util.Log;
import android.view.SurfaceHolder;
/**
* GL surface view
*/
public class AlprGLSurfaceView extends GLSurfaceView implements GLSurfaceView.Renderer {
private static final String TAG = AlprGLSurfaceView.class.getCanonicalName();
private static final int FLOAT_SIZE_BYTES = 4;
private static final int SHORT_SIZE_BYTES = 2;
private static final int TRIANGLE_VERTICES_DATA_STRIDE_BYTES = 5 * FLOAT_SIZE_BYTES;
private static final int TRIANGLE_VERTICES_DATA_POS_OFFSET = 0;
private static final int TRIANGLE_VERTICES_DATA_UV_OFFSET = 3;
private static final float[] TRIANGLE_VERTICES_DATA_0 = {
1, -1, 0, 1, 1, // 0: bottom/right
1, 1, 0, 1, 0, // 1: top/right
-1, 1, 0, 0, 0, // 2: top/left
-1, -1, 0, 0, 1 // 3: bottom/left
};
private static final short[] INDICES_DATA_0 = {
0, 1, 2, // triangle #1: bottom/right, top/right, top/left
2, 3, 0 // triangle #2: top/left, bottom/left, bottom/right
};
private static final float[] TRIANGLE_VERTICES_DATA_90 = {
1, -1, 0, 1, 0,
1, 1, 0, 0, 0,
-1, 1, 0, 0, 1,
-1, -1, 0, 1, 1,
};
private static final short[] INDICES_DATA_90 = {
3, 0, 1,
1, 2, 3
};
private static final float[] TRIANGLE_VERTICES_DATA_180 = {
1, -1, 0, 0, 0,
1, 1, 0, 0, 1,
-1, 1, 0, 1, 1,
-1, -1, 0, 1, 0,
};
private static final short[] INDICES_DATA_180 = {
2, 3, 0,
0, 1, 2
};
private static final float[] TRIANGLE_VERTICES_DATA_270 = {
1, -1, 0, 0, 1,
1, 1, 0, 1, 1,
-1, 1, 0, 1, 0,
-1, -1, 0, 0, 0,
};
private static final short[] INDICES_DATA_270 = {
1, 2, 3,
3, 0, 1
};
private FloatBuffer mTriangleVertices;
private ShortBuffer mIndices;
private int mJpegOrientation = 0;
private boolean mJpegOrientationChanged = false;
private static final String VERTEX_SHADER_SOURCE = "precision mediump float;" +
"attribute vec4 aPosition;\n" +
"attribute vec2 aTextureCoord;\n" +
"varying vec2 vTextureCoord;\n" +
"void main() {\n" +
" gl_Position = aPosition;\n" +
" vTextureCoord = aTextureCoord;\n" +
"}\n";
private static final String FRAGMENT_SHADER_SOURCE = "precision mediump float;" +
"varying vec2 vTextureCoord;" +
"" +
"uniform sampler2D SamplerY; " +
"uniform sampler2D SamplerU;" +
"uniform sampler2D SamplerV;" +
"" +
"const mat3 yuv2rgb = mat3(1.164, 0, 1.596, 1.164, -0.391, -0.813, 1.164, 2.018, 0);" +
"" +
"void main() { " +
" vec3 yuv = vec3(1.1643 * (texture2D(SamplerY, vTextureCoord).r - 0.06274)," +
" texture2D(SamplerU, vTextureCoord).r - 0.5019," +
" texture2D(SamplerV, vTextureCoord).r - 0.5019);" +
" vec3 rgb = yuv * yuv2rgb; " +
" gl_FragColor = vec4(rgb, 1.0);" +
"} ";
private int mProgram;
private int maPositionHandle;
private int maTextureHandle;
private int muSamplerYHandle;
private int muSamplerUHandle;
private int muSamplerVHandle;
private int[] mTextureY = new int[1];
private int[] mTextureU = new int[1];
private int[] mTextureV = new int[1];
private boolean mSurfaceCreated;
private Image mImage = null;
private int mRatioWidth = 0;
private int mRatioHeight = 0;
public AlprGLSurfaceView(android.content.Context context) {
super(context);
initGL();
}
public AlprGLSurfaceView(android.content.Context context, android.util.AttributeSet attrs) {
super(context, attrs);
initGL();
}
private void initGL() {
setEGLContextClientVersion(2);
setEGLConfigChooser(8, 8, 8, 8, 16, 0);
setRenderer(this);
getHolder().setFormat(PixelFormat.TRANSLUCENT);
setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY);
mTriangleVertices = ByteBuffer.allocateDirect(TRIANGLE_VERTICES_DATA_0.length
* FLOAT_SIZE_BYTES).order(ByteOrder.nativeOrder()).asFloatBuffer();
mTriangleVertices.put(TRIANGLE_VERTICES_DATA_0).position(0);
mIndices = ByteBuffer.allocateDirect(INDICES_DATA_0.length
* SHORT_SIZE_BYTES).order(ByteOrder.nativeOrder()).asShortBuffer();
mIndices.put(INDICES_DATA_0).position(0);
}
/**
* Sets the aspect ratio for this view. The size of the view will be measured based on the ratio
* calculated from the parameters. Note that the actual sizes of parameters don't matter, that
* is, calling setAspectRatio(2, 3) and setAspectRatio(4, 6) make the same result.
*
* @param width Relative horizontal size
* @param height Relative vertical size
*/
public void setAspectRatio(int width, int height) {
if (width < 0 || height < 0) {
throw new IllegalArgumentException("Size cannot be negative.");
}
mRatioWidth = width;
mRatioHeight = height;
requestLayout();
}
/**
*
* @param
*/
public void setImage(final Image image, final int jpegOrientation){
if (!isReady()) {
Log.i(TAG, "Not ready");
image.close();
return;
}
if (mImage != null) {
Log.i(TAG, "Already rendering previous image");
image.close();
return;
}
// We need to save the image as the rendering is asynchronous
mImage = image;
if (mJpegOrientation != jpegOrientation) {
Log.i(TAG, "Orientation changed: " + mJpegOrientation + " -> " + jpegOrientation);
mJpegOrientation = jpegOrientation;
mJpegOrientationChanged = true;
}
// Signal the surface as dirty to force redrawing
requestRender();
}
public boolean isReady(){
return mSurfaceCreated;
}
@Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
super.onMeasure(widthMeasureSpec, heightMeasureSpec);
int width = MeasureSpec.getSize(widthMeasureSpec);
int height = MeasureSpec.getSize(heightMeasureSpec);
if (0 == mRatioWidth || 0 == mRatioHeight) {
setMeasuredDimension(width, height);
} else {
if (width < height * mRatioWidth / mRatioHeight) {
setMeasuredDimension(width, width * mRatioHeight / mRatioWidth);
} else {
setMeasuredDimension(height * mRatioWidth / mRatioHeight, height);
}
}
}
@Override
public void surfaceCreated(SurfaceHolder holder) {
super.surfaceCreated(holder);
mSurfaceCreated = true;
}
@Override
public void surfaceDestroyed(SurfaceHolder holder) {
mSurfaceCreated = false;
if (mImage != null) {
mImage.close();
mImage = null;
}
super.surfaceDestroyed(holder);
}
@Override
public void onDrawFrame(GL10 glUnused) {
if (mImage == null) {
return;
}
if (mJpegOrientationChanged) {
updateVertices();
mJpegOrientationChanged = false;
}
final boolean swapSize = (mJpegOrientation % 180) != 0;
final int imageWidth = mImage.getWidth();
final int imageHeight = mImage.getHeight();
final AlprUtils.AlprTransformationInfo tInfo = new AlprUtils.AlprTransformationInfo(swapSize ? imageHeight : imageWidth, swapSize ? imageWidth : imageHeight, getWidth(), getHeight());
GLES20.glViewport(tInfo.getXOffset(), tInfo.getYOffset(), tInfo.getWidth(), tInfo.getHeight());
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT /*| GLES20.GL_DEPTH_BUFFER_BIT*/);
GLES20.glUseProgram(mProgram);
checkGlError("glUseProgram");
final Image.Plane[] planes = mImage.getPlanes();
final ByteBuffer bufferY = planes[0].getBuffer();
final ByteBuffer bufferU = planes[1].getBuffer();
final ByteBuffer bufferV = planes[2].getBuffer();
final int uvPixelStride = planes[1].getPixelStride();
final int bufferWidthY = planes[0].getRowStride();
final int bufferHeightY = imageHeight;
final int bufferWidthUV = (planes[1].getRowStride() >> (uvPixelStride - 1));
final int bufferHeightUV = (bufferHeightY >> 1); // Always YUV420_888 -> half-height
final int uvFormat = uvPixelStride == 1 ? GLES20.GL_LUMINANCE : GLES20.GL_LUMINANCE_ALPHA; // Interleaved UV
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextureY[0]);
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE, bufferWidthY, bufferHeightY, 0, GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, bufferY);
GLES20.glUniform1i(muSamplerYHandle, 0);
GLES20.glActiveTexture(GLES20.GL_TEXTURE1);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextureU[0]);
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, uvFormat, bufferWidthUV, bufferHeightUV, 0, uvFormat, GLES20.GL_UNSIGNED_BYTE, bufferU);
GLES20.glUniform1i(muSamplerUHandle, 1);
GLES20.glActiveTexture(GLES20.GL_TEXTURE2);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextureV[0]);
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, uvFormat, bufferWidthUV, bufferHeightUV, 0, uvFormat, GLES20.GL_UNSIGNED_BYTE, bufferV);
GLES20.glUniform1i(muSamplerVHandle, 2);
GLES20.glDrawElements(GLES20.GL_TRIANGLES, INDICES_DATA_0.length, GLES20.GL_UNSIGNED_SHORT, mIndices);
mImage.close();
mImage = null;
}
@Override
public void onSurfaceChanged(GL10 glUnused, int width, int height) {
GLES20.glViewport(0, 0, width, height);
// GLU.gluPerspective(glUnused, 45.0f, (float)width/(float)height, 0.1f, 100.0f);
}
@Override
public void onSurfaceCreated(GL10 glUnused, EGLConfig config) {
GLES20.glEnable(GLES20.GL_BLEND);
GLES20.glDisable(GLES20.GL_DEPTH_TEST);
GLES20.glDisable(GLES20.GL_DITHER);
GLES20.glDisable(GLES20.GL_STENCIL_TEST);
GLES20.glDisable(GL10.GL_DITHER);
String extensions = GLES20.glGetString(GL10.GL_EXTENSIONS);
Log.d(TAG, "OpenGL extensions=" +extensions);
// Ignore the passed-in GL10 interface, and use the GLES20
// class's static methods instead.
mProgram = createProgram(VERTEX_SHADER_SOURCE, FRAGMENT_SHADER_SOURCE);
if (mProgram == 0) {
return;
}
maPositionHandle = GLES20.glGetAttribLocation(mProgram, "aPosition");
checkGlError("glGetAttribLocation aPosition");
if (maPositionHandle == -1) {
throw new RuntimeException("Could not get attrib location for aPosition");
}
maTextureHandle = GLES20.glGetAttribLocation(mProgram, "aTextureCoord");
checkGlError("glGetAttribLocation aTextureCoord");
if (maTextureHandle == -1) {
throw new RuntimeException("Could not get attrib location for aTextureCoord");
}
muSamplerYHandle = GLES20.glGetUniformLocation(mProgram, "SamplerY");
if (muSamplerYHandle == -1) {
throw new RuntimeException("Could not get uniform location for SamplerY");
}
muSamplerUHandle = GLES20.glGetUniformLocation(mProgram, "SamplerU");
if (muSamplerUHandle == -1) {
throw new RuntimeException("Could not get uniform location for SamplerU");
}
muSamplerVHandle = GLES20.glGetUniformLocation(mProgram, "SamplerV");
if (muSamplerVHandle == -1) {
throw new RuntimeException("Could not get uniform location for SamplerV");
}
updateVertices();
GLES20.glGenTextures(1, mTextureY, 0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextureY[0]);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glGenTextures(1, mTextureU, 0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextureU[0]);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glGenTextures(1, mTextureV, 0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextureV[0]);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
}
private int loadShader(int shaderType, String source) {
int shader = GLES20.glCreateShader(shaderType);
if (shader != 0) {
GLES20.glShaderSource(shader, source);
GLES20.glCompileShader(shader);
int[] compiled = new int[1];
GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compiled, 0);
if (compiled[0] == 0) {
Log.e(TAG, "Could not compile shader " + shaderType + ":");
Log.e(TAG, GLES20.glGetShaderInfoLog(shader));
GLES20.glDeleteShader(shader);
shader = 0;
}
}
return shader;
}
private int createProgram(String vertexSource, String fragmentSource) {
int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexSource);
if (vertexShader == 0) {
return 0;
}
int pixelShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource);
if (pixelShader == 0) {
return 0;
}
int program = GLES20.glCreateProgram();
if (program != 0) {
GLES20.glAttachShader(program, vertexShader);
checkGlError("glAttachShader");
GLES20.glAttachShader(program, pixelShader);
checkGlError("glAttachShader");
GLES20.glLinkProgram(program);
int[] linkStatus = new int[1];
GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0);
if (linkStatus[0] != GLES20.GL_TRUE) {
Log.e(TAG, "Could not link program: ");
Log.e(TAG, GLES20.glGetProgramInfoLog(program));
GLES20.glDeleteProgram(program);
program = 0;
}
}
return program;
}
private void updateVertices() {
mTriangleVertices.rewind();
mIndices.rewind();
switch (mJpegOrientation) {
case 90:
mTriangleVertices.put(TRIANGLE_VERTICES_DATA_90).position(0);
mIndices.put(INDICES_DATA_90).position(0);
break;
case 180:
mTriangleVertices.put(TRIANGLE_VERTICES_DATA_180).position(0);
mIndices.put(INDICES_DATA_180).position(0);
break;
case 270:
mTriangleVertices.put(TRIANGLE_VERTICES_DATA_270).position(0);
mIndices.put(INDICES_DATA_270).position(0);
break;
case 0:
mTriangleVertices.put(TRIANGLE_VERTICES_DATA_0).position(0);
mIndices.put(INDICES_DATA_0).position(0);
break;
default:
throw new RuntimeException("Invalid orientation:" + mJpegOrientation);
}
mTriangleVertices.position(TRIANGLE_VERTICES_DATA_POS_OFFSET);
GLES20.glVertexAttribPointer(maPositionHandle, 3, GLES20.GL_FLOAT, false, TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices);
checkGlError("glVertexAttribPointer maPosition");
mTriangleVertices.position(TRIANGLE_VERTICES_DATA_UV_OFFSET);
GLES20.glEnableVertexAttribArray(maPositionHandle);
checkGlError("glEnableVertexAttribArray maPositionHandle");
GLES20.glVertexAttribPointer(maTextureHandle, 2, GLES20.GL_FLOAT, false, TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices);
checkGlError("glVertexAttribPointer maTextureHandle");
GLES20.glEnableVertexAttribArray(maTextureHandle);
checkGlError("glEnableVertexAttribArray maTextureHandle");
}
private void checkGlError(String op) {
int error;
while ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) {
Log.e(TAG, op + ": glError " + error);
throw new RuntimeException(op + ": glError " + error);
}
}
}
================================================
FILE: demo/src/main/java/org/buyun/alpr/common/AlprImage.java
================================================
package org.buyun.alpr.common;
import android.media.Image;
import java.util.concurrent.atomic.AtomicInteger;
public class AlprImage {
Image mImage;
final AtomicInteger mRefCount;
private AlprImage(final Image image) {
assert image != null;
mImage = image;
mRefCount = new AtomicInteger(0);
}
public static AlprImage newInstance(final Image image) {
return new AlprImage(image);
}
public final Image getImage() {
assert mRefCount.intValue() >= 0;
return mImage;
}
public AlprImage takeRef() {
assert mRefCount.intValue() >= 0;
if (mRefCount.intValue() < 0) {
return null;
}
mRefCount.incrementAndGet();
return this;
}
public void releaseRef() {
assert mRefCount.intValue() >= 0;
final int refCount = mRefCount.decrementAndGet();
if (refCount <= 0) {
mImage.close();
mImage = null;
}
}
@Override
protected synchronized void finalize() {
if (mImage != null && mRefCount.intValue() < 0) {
mImage.close();
}
}
}
================================================
FILE: demo/src/main/java/org/buyun/alpr/common/AlprPlateView.java
================================================
package org.buyun.alpr.common;
import android.content.Context;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.DashPathEffect;
import android.graphics.Paint;
import android.graphics.Path;
import android.graphics.PointF;
import android.graphics.Rect;
import android.graphics.RectF;
import android.graphics.Typeface;
import android.util.AttributeSet;
import android.util.Log;
import android.util.Size;
import android.util.TypedValue;
import android.view.View;
import androidx.annotation.NonNull;
import org.buyun.alpr.sdk.AlprResult;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
public class AlprPlateView extends View {
static final String TAG = AlprPlateView.class.getCanonicalName();
static final float LPCI_MIN_CONFIDENCE = 80.f;
static final float VCR_MIN_CONFIDENCE = 80.f;
static final float VMMR_MIN_CONFIDENCE = 60.f;
static final float VBSR_MIN_CONFIDENCE = 70.f;
static final float VMMR_FUSE_DEFUSE_MIN_CONFIDENCE = 40.f;
static final int VMMR_FUSE_DEFUSE_MIN_OCCURRENCES = 3;
static final float TEXT_NUMBER_SIZE_DIP = 20;
static final float TEXT_LPCI_SIZE_DIP = 15;
static final float TEXT_CAR_SIZE_DIP = 15;
static final float TEXT_INFERENCE_TIME_SIZE_DIP = 20;
static final int STROKE_WIDTH = 10;
private final Paint mPaintTextNumber;
private final Paint mPaintTextNumberBackground;
private final Paint mPaintTextLPCI;
private final Paint mPaintTextLPCIBackground;
private final Paint mPaintTextCar;
private final Paint mPaintTextCarBackground;
private final Paint mPaintBorder;
private final Paint mPaintTextDurationTime;
private final Paint mPaintTextDurationTimeBackground;
private final Paint mPaintDetectROI;
private int mRatioWidth = 0;
private int mRatioHeight = 0;
private int mOrientation = 0;
private long mDurationTimeMillis;
private Size mImageSize;
private List mPlates = null;
private RectF mDetectROI;
/**
*
* @param context
* @param attrs
*/
public AlprPlateView(final Context context, final AttributeSet attrs) {
super(context, attrs);
// final Typeface fontALPR = Typeface.createFromAsset(context.getAssets(), "GlNummernschildEng-XgWd.ttf");
mPaintTextNumber = new Paint();
mPaintTextNumber.setTextSize(TypedValue.applyDimension(
TypedValue.COMPLEX_UNIT_DIP, TEXT_NUMBER_SIZE_DIP, getResources().getDisplayMetrics()));
mPaintTextNumber.setColor(Color.BLACK);
mPaintTextNumber.setStyle(Paint.Style.FILL_AND_STROKE);
// mPaintTextNumber.setTypeface(Typeface.create(fontALPR, Typeface.BOLD));
mPaintTextNumberBackground = new Paint();
mPaintTextNumberBackground.setColor(Color.YELLOW);
mPaintTextNumberBackground.setStrokeWidth(STROKE_WIDTH);
mPaintTextNumberBackground.setStyle(Paint.Style.FILL_AND_STROKE);
mPaintTextLPCI = new Paint();
mPaintTextLPCI.setTextSize(TypedValue.applyDimension(
TypedValue.COMPLEX_UNIT_DIP, TEXT_LPCI_SIZE_DIP, getResources().getDisplayMetrics()));
mPaintTextLPCI.setColor(Color.WHITE);
mPaintTextLPCI.setStyle(Paint.Style.FILL_AND_STROKE);
// mPaintTextLPCI.setTypeface(Typeface.create(fontALPR, Typeface.BOLD));
mPaintTextLPCIBackground = new Paint();
mPaintTextLPCIBackground.setColor(Color.BLUE);
mPaintTextLPCIBackground.setStrokeWidth(STROKE_WIDTH);
mPaintTextLPCIBackground.setStyle(Paint.Style.FILL_AND_STROKE);
mPaintTextCar = new Paint();
mPaintTextCar.setTextSize(TypedValue.applyDimension(
TypedValue.COMPLEX_UNIT_DIP, TEXT_CAR_SIZE_DIP, getResources().getDisplayMetrics()));
mPaintTextCar.setColor(Color.BLACK);
mPaintTextCar.setStyle(Paint.Style.FILL_AND_STROKE);
// mPaintTextCar.setTypeface(Typeface.create(fontALPR, Typeface.BOLD));
mPaintTextCarBackground = new Paint();
mPaintTextCarBackground.setColor(Color.RED);
mPaintTextCarBackground.setStrokeWidth(STROKE_WIDTH);
mPaintTextCarBackground.setStyle(Paint.Style.FILL_AND_STROKE);
mPaintBorder = new Paint();
mPaintBorder.setStrokeWidth(STROKE_WIDTH);
mPaintBorder.setPathEffect(null);
mPaintBorder.setColor(Color.YELLOW);
mPaintBorder.setStyle(Paint.Style.STROKE);
mPaintTextDurationTime = new Paint();
mPaintTextDurationTime.setTextSize(TypedValue.applyDimension(
TypedValue.COMPLEX_UNIT_DIP, TEXT_INFERENCE_TIME_SIZE_DIP, getResources().getDisplayMetrics()));
mPaintTextDurationTime.setColor(Color.WHITE);
mPaintTextDurationTime.setStyle(Paint.Style.FILL_AND_STROKE);
// mPaintTextDurationTime.setTypeface(Typeface.create(fontALPR, Typeface.BOLD));
mPaintTextDurationTimeBackground = new Paint();
mPaintTextDurationTimeBackground.setColor(Color.BLACK);
mPaintTextDurationTimeBackground.setStrokeWidth(STROKE_WIDTH);
mPaintTextDurationTimeBackground.setStyle(Paint.Style.FILL_AND_STROKE);
mPaintDetectROI = new Paint();
mPaintDetectROI.setColor(Color.RED);
mPaintDetectROI.setStrokeWidth(STROKE_WIDTH);
mPaintDetectROI.setStyle(Paint.Style.STROKE);
mPaintDetectROI.setPathEffect(new DashPathEffect(new float[] {10,20}, 0));
}
public void setDetectROI(final RectF roi) { mDetectROI = roi; }
/**
*
* @param width
* @param height
*/
public void setAspectRatio(int width, int height) {
if (width < 0 || height < 0) {
throw new IllegalArgumentException("Size cannot be negative.");
}
mRatioWidth = width;
mRatioHeight = height;
requestLayout();
}
@Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
Log.i(TAG, "onMeasure");
super.onMeasure(widthMeasureSpec, heightMeasureSpec);
int width = MeasureSpec.getSize(widthMeasureSpec);
int height = MeasureSpec.getSize(heightMeasureSpec);
if (0 == mRatioWidth || 0 == mRatioHeight) {
setMeasuredDimension(width, height);
} else {
if (width < height * mRatioWidth / mRatioHeight) {
setMeasuredDimension(width, width * mRatioHeight / mRatioWidth);
} else {
setMeasuredDimension(height * mRatioWidth / mRatioHeight, height);
}
}
}
/**
*
* @param result
* @param imageSize
*/
public synchronized void setResult(@NonNull final AlprResult result, @NonNull final Size imageSize, @NonNull final long durationTime, @NonNull final int orientation) {
mPlates = AlprUtils.extractPlates(result);
mImageSize = imageSize;
mDurationTimeMillis = durationTime;
mOrientation = orientation;
postInvalidate();
}
@Override
public synchronized void draw(final Canvas canvas) {
super.draw(canvas);
if (mImageSize == null) {
Log.i(TAG, "Not initialized yet");
return;
}
final String mInferenceTimeMillisString = "Point your camera at a License Plate ";
Rect boundsTextmInferenceTimeMillis = new Rect();
mPaintTextDurationTime.getTextBounds(mInferenceTimeMillisString, 0, mInferenceTimeMillisString.length(), boundsTextmInferenceTimeMillis);
int left = (canvas.getWidth() - boundsTextmInferenceTimeMillis.width()) / 2;
int top = 20;
canvas.drawRect(left, top, left + boundsTextmInferenceTimeMillis.width() + 5, top + boundsTextmInferenceTimeMillis.height() + 20, mPaintTextDurationTimeBackground);
canvas.drawText(mInferenceTimeMillisString, left, 20 + boundsTextmInferenceTimeMillis.height(), mPaintTextDurationTime);
// Transformation info
final AlprUtils.AlprTransformationInfo tInfo = new AlprUtils.AlprTransformationInfo(mImageSize.getWidth(), mImageSize.getHeight(), getWidth(), getHeight());
// ROI
if (mDetectROI != null && !mDetectROI.isEmpty()) {
canvas.drawRect(
new RectF(
tInfo.transformX(mDetectROI.left),
tInfo.transformY(mDetectROI.top),
tInfo.transformX(mDetectROI.right),
tInfo.transformY(mDetectROI.bottom)
),
mPaintDetectROI
);
}
// Plates
if (mPlates != null && !mPlates.isEmpty()) {
for (final AlprUtils.Plate plate : mPlates) {
// Transform corners
final float[] plateWarpedBox = plate.getWarpedBox();
final PointF plateCornerA = new PointF(tInfo.transformX(plateWarpedBox[0]), tInfo.transformY(plateWarpedBox[1]));
final PointF plateCornerB = new PointF(tInfo.transformX(plateWarpedBox[2]), tInfo.transformY(plateWarpedBox[3]));
final PointF plateCornerC = new PointF(tInfo.transformX(plateWarpedBox[4]), tInfo.transformY(plateWarpedBox[5]));
final PointF plateCornerD = new PointF(tInfo.transformX(plateWarpedBox[6]), tInfo.transformY(plateWarpedBox[7]));
// Draw border
final Path platePathBorder = new Path();
platePathBorder.moveTo(plateCornerA.x, plateCornerA.y);
platePathBorder.lineTo(plateCornerB.x, plateCornerB.y);
platePathBorder.lineTo(plateCornerC.x, plateCornerC.y);
platePathBorder.lineTo(plateCornerD.x, plateCornerD.y);
platePathBorder.lineTo(plateCornerA.x, plateCornerA.y);
platePathBorder.close();
mPaintBorder.setColor(mPaintTextNumberBackground.getColor());
canvas.drawPath(platePathBorder, mPaintBorder);
// Draw text number
final String number = plate.getNumber();
if (number != null && !number.isEmpty()) {
Rect boundsTextNumber = new Rect();
mPaintTextNumber.getTextBounds(number, 0, number.length(), boundsTextNumber);
final RectF rectTextNumber = new RectF(
plateCornerA.x,
plateCornerA.y - (boundsTextNumber.height() + 10) * 2,
plateCornerA.x + boundsTextNumber.width(),
plateCornerA.y - (boundsTextNumber.height() + 10)
);
final Path pathTextNumber = new Path();
pathTextNumber.moveTo(plateCornerA.x, plateCornerA.y - rectTextNumber.height() - 10);
pathTextNumber.lineTo(Math.max(plateCornerB.x, (plateCornerA.x + rectTextNumber.width())), plateCornerB.y - rectTextNumber.height() - 10);
pathTextNumber.addRect(rectTextNumber, Path.Direction.CCW);
pathTextNumber.close();
canvas.drawPath(pathTextNumber, mPaintTextNumberBackground);
canvas.drawTextOnPath(number, pathTextNumber, 0, 0, mPaintTextNumber);
}
// Draw Car
if (plate.getCar() != null) {
final AlprUtils.Car car = plate.getCar();
if (car.getConfidence() >= 80.f) {
String color = null;
if (car.getColors() != null) {
final AlprUtils.Car.Attribute colorObj0 = car.getColors().get(0); // sorted, most higher confidence first
if (colorObj0.getConfidence() >= VCR_MIN_CONFIDENCE) {
color = colorObj0.getName();
}
else if (car.getColors().size() >= 2) {
final AlprUtils.Car.Attribute colorObj1 = car.getColors().get(1);
final String colorMix = colorObj0.getName() + "/" + colorObj1.getName();
float confidence = colorObj0.getConfidence();
if ("white/silver,silver/white,gray/silver,silver/gray".indexOf(colorMix) != -1) {
confidence += colorObj1.getConfidence();
}
if (confidence >= VCR_MIN_CONFIDENCE) {
color = (colorMix.indexOf("white") == -1) ? "DarkSilver" : "LightSilver";
confidence = Math.max(colorObj0.getConfidence(), colorObj1.getConfidence());
}
}
}
String make = null, model = null;
if (car.getMakesModelsYears() != null) {
final List makesModelsYears = car.getMakesModelsYears();
final AlprUtils.Car.MakeModelYear makeModelYear = makesModelsYears.get(0); // sorted, most higher confidence first
if (makeModelYear.getConfidence() >= VMMR_MIN_CONFIDENCE) {
make = makeModelYear.getMake();
model = makeModelYear.getModel();
}
else {
Map makes = new HashMap<>();
Map occurrences = new HashMap<>();
// Fuse makes
for (final AlprUtils.Car.MakeModelYear mmy : makesModelsYears) {
makes.put(mmy.getMake(), AlprUtils.getOrDefault(makes, mmy.getMake(), 0.f) + mmy.getConfidence()); // Map.getOrDefault requires API level 24
occurrences.put(mmy.getMake(), AlprUtils.getOrDefault(occurrences, mmy.getMake(), 0) + 1); // Map.getOrDefault requires API level 24
}
// Find make with highest confidence. Stream requires Java8
Iterator > itMake = makes.entrySet().iterator();
Map.Entry bestMake = itMake.next();
while (itMake.hasNext()) {
Map.Entry makeE = itMake.next();
if (makeE.getValue() > bestMake.getValue()) {
bestMake = makeE;
}
}
// Model fusion
if (bestMake.getValue() >= VMMR_MIN_CONFIDENCE || (occurrences.get(bestMake.getKey()) >= VMMR_FUSE_DEFUSE_MIN_OCCURRENCES && bestMake.getValue() >= VMMR_FUSE_DEFUSE_MIN_CONFIDENCE)) {
make = bestMake.getKey();
// Fuse models
Map models = new HashMap<>();
for (final AlprUtils.Car.MakeModelYear mmy : makesModelsYears) {
if (make.equals(mmy.getMake())) {
models.put(mmy.getModel(), AlprUtils.getOrDefault(models, mmy.getModel(), 0.f) + mmy.getConfidence()); // Map.getOrDefault requires API level 24
}
}
// Find model with highest confidence. Stream requires Java8
Iterator > itModel = models.entrySet().iterator();
Map.Entry bestModel = itModel.next();
while (itModel.hasNext()) {
Map.Entry modelE = itModel.next();
if (modelE.getValue() > bestModel.getValue()) {
bestModel = modelE;
}
}
model = bestModel.getKey();
}
}
}
String bodyStyle = null;
if (car.getBodyStyles() != null) {
final AlprUtils.Car.Attribute vbsr = car.getBodyStyles().get(0); // sorted, most higher confidence first
if (vbsr.getConfidence() >= VBSR_MIN_CONFIDENCE) {
bodyStyle = vbsr.getName();
}
}
// Transform corners
final float[] carWarpedBox = car.getWarpedBox();
final PointF carCornerA = new PointF(tInfo.transformX(carWarpedBox[0]), tInfo.transformY(carWarpedBox[1]));
final PointF carCornerB = new PointF(tInfo.transformX(carWarpedBox[2]), tInfo.transformY(carWarpedBox[3]));
final PointF carCornerC = new PointF(tInfo.transformX(carWarpedBox[4]), tInfo.transformY(carWarpedBox[5]));
final PointF carCornerD = new PointF(tInfo.transformX(carWarpedBox[6]), tInfo.transformY(carWarpedBox[7]));
// Draw border
final Path carPathBorder = new Path();
carPathBorder.moveTo(carCornerA.x, carCornerA.y);
carPathBorder.lineTo(carCornerB.x, carCornerB.y);
carPathBorder.lineTo(carCornerC.x, carCornerC.y);
carPathBorder.lineTo(carCornerD.x, carCornerD.y);
carPathBorder.lineTo(carCornerA.x, carCornerA.y);
carPathBorder.close();
mPaintBorder.setColor(mPaintTextCarBackground.getColor());
canvas.drawPath(carPathBorder, mPaintBorder);
// Draw car information
final String carText = String.format(
"%s%s%s%s",
make != null ? make : "Car",
model != null ? ", " + model : "",
color != null ? ", " + color : "",
bodyStyle != null ? ", " + bodyStyle : ""
);
Rect boundsTextCar = new Rect();
mPaintTextNumber.getTextBounds(carText, 0, carText.length(), boundsTextCar);
final RectF rectTextNumber = new RectF(
plateCornerA.x,
plateCornerA.y - (boundsTextCar.height() + 5) * 3,
plateCornerA.x + boundsTextCar.width(),
plateCornerA.y - (boundsTextCar.height() + 5) * 2
);
final Path pathTextCar = new Path();
pathTextCar.moveTo(plateCornerA.x, plateCornerA.y - (rectTextNumber.height() + 5) * 2);
pathTextCar.lineTo(Math.max(plateCornerB.x, (plateCornerA.x + rectTextNumber.width())), plateCornerB.y - (rectTextNumber.height() + 5) * 2);
pathTextCar.addRect(rectTextNumber, Path.Direction.CCW);
pathTextCar.close();
canvas.drawPath(pathTextCar, mPaintTextNumberBackground);
canvas.drawTextOnPath(carText, pathTextCar, 0, 0, mPaintTextNumber);
}
}
if (plate.getCountries() != null) {
final AlprUtils.Country country = plate.getCountries().get(0); // sorted, most higher confidence first
if (country.getConfidence() >= LPCI_MIN_CONFIDENCE) {
final String countryString = country.getCode();
Rect boundsConfidenceLPCI = new Rect();
mPaintTextNumber.getTextBounds(countryString, 0, countryString.length(), boundsConfidenceLPCI);
final RectF rectTextLPCI = new RectF(
plateCornerA.x,
plateCornerA.y - (boundsConfidenceLPCI.height() + 10),
plateCornerA.x + (boundsConfidenceLPCI.width() + 10),
plateCornerA.y
);
final Path pathTextLPCI = new Path();
pathTextLPCI.moveTo(plateCornerA.x, plateCornerA.y);
pathTextLPCI.lineTo(Math.max(plateCornerB.x, (plateCornerA.x + boundsConfidenceLPCI.width())), plateCornerB.y);
pathTextLPCI.addRect(rectTextLPCI, Path.Direction.CCW);
pathTextLPCI.close();
canvas.drawPath(pathTextLPCI, mPaintTextNumberBackground);
canvas.drawTextOnPath(countryString, pathTextLPCI, 0, 0, mPaintTextNumber);
}
}
}
}
}
}
================================================
FILE: demo/src/main/java/org/buyun/alpr/common/AlprUtils.java
================================================
package org.buyun.alpr.common;
import android.content.res.AssetFileDescriptor;
import android.content.res.AssetManager;
import android.graphics.PointF;
import android.util.Log;
import androidx.annotation.NonNull;
import org.buyun.alpr.sdk.AlprResult;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import java.io.FileInputStream;
import java.io.IOException;
import java.nio.channels.FileChannel;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
/**
* Utility class
*/
public class AlprUtils {
static final String TAG = AlprUtils.class.getCanonicalName();
/**
*
*/
public static class AlprTransformationInfo {
final int mXOffset;
final int mYOffset;
final float mRatio;
final int mWidth;
final int mHeight;
public AlprTransformationInfo(final int imageWidth, final int imageHeight, final int canvasWidth, final int canvasHeight) {
final float xRatio = (float)canvasWidth / (float)imageWidth;
final float yRatio = (float)canvasHeight / (float)imageHeight;
mRatio = Math.min( xRatio, yRatio );
mWidth = (int)(imageWidth * mRatio);
mHeight = (int)(imageHeight * mRatio);
mXOffset = (canvasWidth - mWidth) >> 1;
mYOffset = (canvasHeight - mHeight) >> 1;
}
public float transformX(final float x) { return x * mRatio + mXOffset; }
public float transformY(final float y) { return y * mRatio + mYOffset; }
public PointF transform(final PointF p) { return new PointF(transformX(p.x), transformY(p.y)); }
public int getXOffset() { return mXOffset; }
public int getYOffset() { return mYOffset; }
public float getRatio() { return mRatio; }
public int getWidth() { return mWidth; }
public int getHeight() { return mHeight; }
}
static class Car {
static class Attribute {
private int mKlass;
private String mName;
private float mConfidence;
public int getKlass() { return mKlass; }
public String getName() { return mName; }
public float getConfidence() { return mConfidence; }
}
static class MakeModelYear {
private int mKlass;
private String mMake;
private String mModel;
private String mYear; // Not integer on purpose, could be interval or...
private float mConfidence;
public int getKlass() { return mKlass; }
public String getMake() { return mMake; }
public String getModel() { return mModel; }
public String getYear() { return mYear; }
public float getConfidence() { return mConfidence; }
}
private float mConfidence;
private float mWarpedBox[];
private List mColors;
private List mBodyStyles;
private List mMakesModelsYears;
public float[] getWarpedBox() { return mWarpedBox; }
public float getConfidence() { return mConfidence; }
public List getColors() { return mColors; }
public List getBodyStyles() { return mBodyStyles; }
public List getMakesModelsYears() { return mMakesModelsYears; }
}
/**
*
*/
static class Country {
private int mKlass;
private String mCode;
private String mName;
private String mState;
private String mOther;
private float mConfidence;
public int getKlass() { return mKlass; }
public String getCode() { return mCode; }
public String getName() { return mName; }
public String getState() { return mState; }
public String getOther() { return mOther; }
public float getConfidence() { return mConfidence; }
}
/**
*
*/
static class Plate {
private String mNumber;
private float mDetectionConfidence;
private float mRecognitionConfidence;
private float mWarpedBox[];
private List mCountries;
private Car mCar;
public String getNumber() { return mNumber; }
public float getDetectionConfidence() { return mDetectionConfidence; }
public float getRecognitionConfidence() { return mRecognitionConfidence; }
public float[] getWarpedBox() { return mWarpedBox; }
public List getCountries() { return mCountries; }
public Car getCar() { return mCar; }
}
static public final long extractFrameId(final AlprResult result) {
final String jsonString = result.json();
if (jsonString != null) {
try {
final JSONObject jObject = new JSONObject(jsonString);
return jObject.getLong("frame_id");
}
catch (JSONException e) { }
}
return 0;
}
static public final List extractPlates(final AlprResult result) {
final List plates = new LinkedList<>();
if (!result.isOK() || (result.numPlates() == 0 && result.numCars() == 0)) {
return plates;
}
final String jsonString = result.json();
//final String jsonString = "{\"frame_id\":178,\"lantency\":0,\"plates\":[{\"car\":{\"color\":[{\"confidence\":59.76562,\"klass\":11,\"name\":\"white\"},{\"confidence\":27.73438,\"klass\":0,\"name\":\"black\"},{\"confidence\":11.32812,\"klass\":9,\"name\":\"silver\"},{\"confidence\":0.390625,\"klass\":4,\"name\":\"gray\"},{\"confidence\":0.390625,\"klass\":5,\"name\":\"green\"}],\"confidence\":89.45312,\"makeModelYear\":[{\"confidence\":5.46875,\"klass\":8072,\"make\":\"nissan\",\"model\":\"nv\",\"year\":2012},{\"confidence\":3.90625,\"klass\":4885,\"make\":\"gmc\",\"model\":\"yukon 1500\",\"year\":2007},{\"confidence\":1.953125,\"klass\":3950,\"make\":\"ford\",\"model\":\"f150\",\"year\":2001},{\"confidence\":1.953125,\"klass\":4401,\"make\":\"ford\",\"model\":\"ranger\",\"year\":2008},{\"confidence\":1.953125,\"klass\":3954,\"make\":\"ford\",\"model\":\"f150\",\"year\":2005}],\"warpedBox\":[37.26704,655.171,253.8487,655.171,253.8487,897.6935,37.26704,897.6935]},\"confidences\":[86.99596,99.60938],\"country\":[{\"code\":\"RUS\",\"confidence\":99.60938,\"klass\":65,\"name\":\"Russian Federation\",\"other\":\"Private vehicle\",\"state\":\"Republic of Karelia\"},{\"code\":\"USA\",\"confidence\":0.0,\"klass\":88,\"name\":\"United States of America\",\"state\":\"Iowa\"},{\"code\":\"USA\",\"confidence\":0.0,\"klass\":80,\"name\":\"United States of America\",\"state\":\"Connecticut\"},{\"code\":\"USA\",\"confidence\":0.0,\"klass\":81,\"name\":\"United States of America\",\"state\":\"Delaware\"},{\"code\":\"USA\",\"confidence\":0.0,\"klass\":82,\"name\":\"United States of America\",\"state\":\"Florida\"}],\"text\":\"K643ET10\",\"warpedBox\":[61.73531,819.796,145.57,819.796,145.57,881.916,61.73531,881.916]}]}";
if (jsonString == null) { // No plate
return plates;
}
try {
final JSONObject jObject = new JSONObject(jsonString);
if (jObject.has("plates")) {
final JSONArray jPlates = jObject.getJSONArray("plates");
for (int i = 0; i < jPlates.length(); ++i) {
final JSONObject jPlate = jPlates.getJSONObject(i);
// The plate itself (backward-compatible with 2.0.0)
final Plate plate = new Plate();
plate.mWarpedBox = new float[8];
if (jPlate.has("text")) { // Starting 3.2 it's possible to have cars without plates when enabled
final JSONArray jConfidences = jPlate.getJSONArray("confidences");
final JSONArray jWarpedBox = jPlate.getJSONArray("warpedBox");
plate.mNumber = jPlate.getString("text");
for (int j = 0; j < 8; ++j) {
plate.mWarpedBox[j] = (float) jWarpedBox.getDouble(j);
}
plate.mRecognitionConfidence = (float) jConfidences.getDouble(0);
plate.mDetectionConfidence = (float) jConfidences.getDouble(1);
}
else {
plate.mNumber = "";
plate.mRecognitionConfidence = 0.f;
plate.mDetectionConfidence = 0.f;
}
if (jPlate.has("country")) {
plate.mCountries = new LinkedList<>();
final JSONArray jCountries = jPlate.getJSONArray("country");
for (int k = 0; k < jCountries.length(); ++k) {
final JSONObject jCountry = jCountries.getJSONObject(k);
final Country country = new Country();
country.mKlass = jCountry.getInt("klass");
country.mConfidence = (float) jCountry.getDouble("confidence");
country.mCode = jCountry.getString("code");
country.mName = jCountry.getString("name");
if (jCountry.has("state")) { // optional
country.mState = jCountry.getString("state");
}
if (jCountry.has("other")) { // optional
country.mOther = jCountry.getString("other");
}
plate.mCountries.add(country);
}
}
// Car (Added in 3.0.0)
if (jPlate.has("car")) {
final JSONObject jCar = jPlate.getJSONObject("car");
final JSONArray jCarWarpedBox = jCar.getJSONArray("warpedBox");
plate.mCar = new Car();
plate.mCar.mConfidence = (float) jCar.getDouble("confidence");
plate.mCar.mWarpedBox = new float[8];
for (int j = 0; j < 8; ++j) {
plate.mCar.mWarpedBox[j] = (float) jCarWarpedBox.getDouble(j);
}
if (jCar.has("color")) {
plate.mCar.mColors = new LinkedList<>();
final JSONArray jColors = jCar.getJSONArray("color");
for (int k = 0; k < jColors.length(); ++k) {
final JSONObject jColor = jColors.getJSONObject(k);
final Car.Attribute color = new Car.Attribute();
color.mKlass = jColor.getInt("klass");
color.mConfidence = (float) jColor.getDouble("confidence");
color.mName = jColor.getString("name"); // Name in English
plate.mCar.mColors.add(color);
}
}
if (jCar.has("makeModelYear")) {
plate.mCar.mMakesModelsYears = new LinkedList<>();
final JSONArray jMMYs = jCar.getJSONArray("makeModelYear");
for (int k = 0; k < jMMYs.length(); ++k) {
final JSONObject jMMY = jMMYs.getJSONObject(k);
final Car.MakeModelYear mmy = new Car.MakeModelYear();
mmy.mKlass = jMMY.getInt("klass");
mmy.mConfidence = (float) jMMY.getDouble("confidence");
mmy.mMake = jMMY.getString("make");
mmy.mModel = jMMY.getString("model");
mmy.mYear = jMMY.get("year").toString(); // Maybe Integer or String or whatever
plate.mCar.mMakesModelsYears.add(mmy);
}
}
if (jCar.has("bodyStyle")) {
plate.mCar.mBodyStyles = new LinkedList<>();
final JSONArray jBodyStyles = jCar.getJSONArray("bodyStyle");
for (int k = 0; k < jBodyStyles.length(); ++k) {
final JSONObject jBodyStyle = jBodyStyles.getJSONObject(k);
final Car.Attribute bodyStyle = new Car.Attribute();
bodyStyle.mKlass = jBodyStyle.getInt("klass");
bodyStyle.mConfidence = (float) jBodyStyle.getDouble("confidence");
bodyStyle.mName = jBodyStyle.getString("name"); // Name in English
plate.mCar.mBodyStyles.add(bodyStyle);
}
}
}
plates.add(plate);
}
}
}
catch (JSONException e) {
e.printStackTrace();
Log.e(TAG, e.toString());
}
return plates;
}
public static V getOrDefault(@NonNull Map map, K key, V defaultValue) {
V v;
return (((v = map.get(key)) != null) || map.containsKey(key))
? v
: defaultValue;
}
/**
* Checks if the returned result is success. An assertion will be raised if it's not the case.
* In production you should catch the exception and perform the appropriate action.
* @param result The result to check
* @return The same result
*/
static public final AlprResult assertIsOk(final AlprResult result) {
if (!result.isOK()) {
throw new AssertionError("Operation failed: " + result.phrase());
}
return result;
}
/**
* Converts the result to String.
* @param result
* @return
*/
static public final String resultToString(final AlprResult result) {
return "code: " + result.code() + ", phrase: " + result.phrase() + ", numPlates: " + result.numPlates() + ", json: " + result.json();
}
/**
*
* @param fileName
* @return Must close the returned object
*/
static public FileChannel readFileFromAssets(final AssetManager assets, final String fileName) {
FileInputStream inputStream = null;
try {
AssetFileDescriptor fileDescriptor = assets.openFd(fileName);
inputStream = new FileInputStream(fileDescriptor.getFileDescriptor());
return inputStream.getChannel();
// To return DirectByteBuffer: fileChannel.map(FileChannel.MapMode.READ_ONLY, fileDescriptor.getStartOffset(), fileDescriptor.getDeclaredLength());
} catch (IOException e) {
e.printStackTrace();
Log.e(TAG, e.toString());
return null;
}
}
}
================================================
FILE: demo/src/main/res/layout/activity_main.xml
================================================
================================================
FILE: demo/src/main/res/layout/fragment_camera.xml
================================================
================================================
FILE: demo/src/main/res/layout-land/fragment_camera.xml
================================================
================================================
FILE: demo/src/main/res/values/base-strings.xml
================================================
Alpr Demo
================================================
FILE: demo/src/main/res/values/colors.xml
================================================
#cc4285f4
================================================
FILE: demo/src/main/res/values/strings.xml
================================================
Picture
Info
This sample needs camera permission.
This device doesn\'t support Camera2 API.
================================================
FILE: demo/src/main/res/values/styles.xml
================================================
================================================
FILE: demo/src/main/res/values/template-dimens.xml
================================================
4dp
8dp
16dp
32dp
64dp
@dimen/margin_medium
@dimen/margin_medium
================================================
FILE: demo/src/main/res/values/template-styles.xml
================================================
================================================
FILE: demo/src/main/res/values-sw600dp/template-dimens.xml
================================================
@dimen/margin_huge
@dimen/margin_medium
================================================
FILE: demo/src/main/res/values-sw600dp/template-styles.xml
================================================
================================================
FILE: demo/src/main/res/values-v11/template-styles.xml
================================================
================================================
FILE: demo/src/main/res/values-v21/base-colors.xml
================================================
================================================
FILE: demo/src/main/res/values-v21/base-template-styles.xml
================================================
================================================
FILE: gradle/wrapper/gradle-wrapper.properties
================================================
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-6.5-bin.zip
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists
================================================
FILE: gradle.properties
================================================
# Project-wide Gradle settings.
# IDE (e.g. Android Studio) users:
# Gradle settings configured through the IDE *will override*
# any settings specified in this file.
# For more details on how to configure your build environment visit
# http://www.gradle.org/docs/current/userguide/build_environment.html
# Specifies the JVM arguments used for the daemon process.
# The setting is particularly useful for tweaking memory settings.
org.gradle.jvmargs=-Xmx1536m
# When configured, Gradle will run in incubating parallel mode.
# This option should only be used with decoupled projects. More details, visit
# http://www.gradle.org/docs/current/userguide/multi_project_builds.html#sec:decoupled_projects
# org.gradle.parallel=true
# AndroidX package structure to make it clearer which packages are bundled with the
# Android operating system, and which are packaged with your app's APK
# https://developer.android.com/topic/libraries/support-library/androidx-rn
android.useAndroidX=true
# Automatically convert third-party libraries to use AndroidX
android.enableJetifier=true
================================================
FILE: gradlew
================================================
#!/usr/bin/env sh
#
# Copyright 2015 the original author or authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
##############################################################################
##
## Gradle start up script for UN*X
##
##############################################################################
# Attempt to set APP_HOME
# Resolve links: $0 may be a link
PRG="$0"
# Need this for relative symlinks.
while [ -h "$PRG" ] ; do
ls=`ls -ld "$PRG"`
link=`expr "$ls" : '.*-> \(.*\)$'`
if expr "$link" : '/.*' > /dev/null; then
PRG="$link"
else
PRG=`dirname "$PRG"`"/$link"
fi
done
SAVED="`pwd`"
cd "`dirname \"$PRG\"`/" >/dev/null
APP_HOME="`pwd -P`"
cd "$SAVED" >/dev/null
APP_NAME="Gradle"
APP_BASE_NAME=`basename "$0"`
# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"'
# Use the maximum available, or set MAX_FD != -1 to use that value.
MAX_FD="maximum"
warn () {
echo "$*"
}
die () {
echo
echo "$*"
echo
exit 1
}
# OS specific support (must be 'true' or 'false').
cygwin=false
msys=false
darwin=false
nonstop=false
case "`uname`" in
CYGWIN* )
cygwin=true
;;
Darwin* )
darwin=true
;;
MINGW* )
msys=true
;;
NONSTOP* )
nonstop=true
;;
esac
CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
# Determine the Java command to use to start the JVM.
if [ -n "$JAVA_HOME" ] ; then
if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
# IBM's JDK on AIX uses strange locations for the executables
JAVACMD="$JAVA_HOME/jre/sh/java"
else
JAVACMD="$JAVA_HOME/bin/java"
fi
if [ ! -x "$JAVACMD" ] ; then
die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
else
JAVACMD="java"
which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
# Increase the maximum file descriptors if we can.
if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then
MAX_FD_LIMIT=`ulimit -H -n`
if [ $? -eq 0 ] ; then
if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
MAX_FD="$MAX_FD_LIMIT"
fi
ulimit -n $MAX_FD
if [ $? -ne 0 ] ; then
warn "Could not set maximum file descriptor limit: $MAX_FD"
fi
else
warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
fi
fi
# For Darwin, add options to specify how the application appears in the dock
if $darwin; then
GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
fi
# For Cygwin or MSYS, switch paths to Windows format before running java
if [ "$cygwin" = "true" -o "$msys" = "true" ] ; then
APP_HOME=`cygpath --path --mixed "$APP_HOME"`
CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
JAVACMD=`cygpath --unix "$JAVACMD"`
# We build the pattern for arguments to be converted via cygpath
ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
SEP=""
for dir in $ROOTDIRSRAW ; do
ROOTDIRS="$ROOTDIRS$SEP$dir"
SEP="|"
done
OURCYGPATTERN="(^($ROOTDIRS))"
# Add a user-defined pattern to the cygpath arguments
if [ "$GRADLE_CYGPATTERN" != "" ] ; then
OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
fi
# Now convert the arguments - kludge to limit ourselves to /bin/sh
i=0
for arg in "$@" ; do
CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
else
eval `echo args$i`="\"$arg\""
fi
i=`expr $i + 1`
done
case $i in
0) set -- ;;
1) set -- "$args0" ;;
2) set -- "$args0" "$args1" ;;
3) set -- "$args0" "$args1" "$args2" ;;
4) set -- "$args0" "$args1" "$args2" "$args3" ;;
5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
esac
fi
# Escape application args
save () {
for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done
echo " "
}
APP_ARGS=`save "$@"`
# Collect all arguments for the java command, following the shell quoting and substitution rules
eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS"
exec "$JAVACMD" "$@"
================================================
FILE: gradlew.bat
================================================
@rem
@rem Copyright 2015 the original author or authors.
@rem
@rem Licensed under the Apache License, Version 2.0 (the "License");
@rem you may not use this file except in compliance with the License.
@rem You may obtain a copy of the License at
@rem
@rem https://www.apache.org/licenses/LICENSE-2.0
@rem
@rem Unless required by applicable law or agreed to in writing, software
@rem distributed under the License is distributed on an "AS IS" BASIS,
@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@rem See the License for the specific language governing permissions and
@rem limitations under the License.
@rem
@if "%DEBUG%" == "" @echo off
@rem ##########################################################################
@rem
@rem Gradle startup script for Windows
@rem
@rem ##########################################################################
@rem Set local scope for the variables with windows NT shell
if "%OS%"=="Windows_NT" setlocal
set DIRNAME=%~dp0
if "%DIRNAME%" == "" set DIRNAME=.
set APP_BASE_NAME=%~n0
set APP_HOME=%DIRNAME%
@rem Resolve any "." and ".." in APP_HOME to make it shorter.
for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi
@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m"
@rem Find java.exe
if defined JAVA_HOME goto findJavaFromJavaHome
set JAVA_EXE=java.exe
%JAVA_EXE% -version >NUL 2>&1
if "%ERRORLEVEL%" == "0" goto init
echo.
echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
echo.
echo Please set the JAVA_HOME variable in your environment to match the
echo location of your Java installation.
goto fail
:findJavaFromJavaHome
set JAVA_HOME=%JAVA_HOME:"=%
set JAVA_EXE=%JAVA_HOME%/bin/java.exe
if exist "%JAVA_EXE%" goto init
echo.
echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
echo.
echo Please set the JAVA_HOME variable in your environment to match the
echo location of your Java installation.
goto fail
:init
@rem Get command-line arguments, handling Windows variants
if not "%OS%" == "Windows_NT" goto win9xME_args
:win9xME_args
@rem Slurp the command line arguments.
set CMD_LINE_ARGS=
set _SKIP=2
:win9xME_args_slurp
if "x%~1" == "x" goto execute
set CMD_LINE_ARGS=%*
:execute
@rem Setup the command line
set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
@rem Execute Gradle
"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%
:end
@rem End local scope for the variables with windows NT shell
if "%ERRORLEVEL%"=="0" goto mainEnd
:fail
rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
rem the _cmd.exe /c_ return code!
if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
exit /b 1
:mainEnd
if "%OS%"=="Windows_NT" endlocal
:omega
================================================
FILE: settings.gradle
================================================
include ':sdk', ':demo'
rootProject.name='AlprDemo'