Repository: HarvestProfit/react-native-rectangle-scanner Branch: master Commit: 22fe3cfccf08 Files: 54 Total size: 176.7 KB Directory structure: gitextract_pw_p8jxp/ ├── .eslintrc.json ├── .gitignore ├── CONTRIBUTING.md ├── LICENSE.md ├── README.md ├── RNRectangleScanner.podspec ├── android/ │ ├── .settings/ │ │ └── org.eclipse.buildship.core.prefs │ ├── build.gradle │ ├── gradle/ │ │ └── wrapper/ │ │ ├── gradle-wrapper.jar │ │ └── gradle-wrapper.properties │ ├── gradle.properties │ ├── gradlew │ ├── gradlew.bat │ └── src/ │ └── main/ │ ├── AndroidManifest.xml │ ├── java/ │ │ └── com/ │ │ └── rectanglescanner/ │ │ ├── RNRectangleScannerManager.java │ │ ├── RNRectangleScannerModule.java │ │ ├── RectangleScannerPackage.java │ │ ├── helpers/ │ │ │ ├── CapturedImage.java │ │ │ ├── ImageProcessor.java │ │ │ ├── ImageProcessorMessage.java │ │ │ └── Quadrilateral.java │ │ └── views/ │ │ ├── CameraDeviceController.java │ │ ├── MainView.java │ │ ├── RNRectangleScannerView.java │ │ └── RectangleDetectionController.java │ └── res/ │ └── layout/ │ └── activity_rectangle_scanner.xml ├── example/ │ ├── .gitignore │ ├── App.js │ ├── app.json │ ├── babel.config.js │ ├── package.json │ └── src/ │ ├── ScanDocument/ │ │ ├── CameraControls.js │ │ ├── DocumentScanner.js │ │ ├── index.js │ │ └── styles.js │ └── useIsMultiTasking.js ├── index.js ├── ios/ │ ├── CameraDeviceController.h │ ├── CameraDeviceController.m │ ├── RNRectangleScanner.xcodeproj/ │ │ ├── project.pbxproj │ │ └── xcshareddata/ │ │ └── xcschemes/ │ │ └── RNRectangleScanner.xcscheme │ ├── RNRectangleScannerManager.h │ ├── RNRectangleScannerManager.m │ ├── RNRectangleScannerView.h │ ├── RNRectangleScannerView.m │ ├── RectangleDetectionController.h │ └── RectangleDetectionController.m ├── package.json ├── react-native.config.js └── src/ ├── Filters.js ├── FlashAnimation.js ├── RectangleOverlay.js ├── Scanner.js └── index.d.ts ================================================ FILE CONTENTS ================================================ ================================================ FILE: .eslintrc.json ================================================ { "env": { "jest": true }, "extends": "airbnb", "parser": "babel-eslint", "rules": { "react/no-unescaped-entities": 0, "react/jsx-filename-extension": [1, { "extensions": [".js", ".jsx"] }], "function-paren-newline": ["error", "consistent"], "object-curly-newline": ["error", { "consistent": true }], "react/destructuring-assignment": 0, "jsx-a11y/accessible-emoji": 0 } } ================================================ FILE: .gitignore ================================================ # OSX # .DS_Store # XDE .expo/ # VSCode .vscode/ jsconfig.json # Xcode # build/ *.pbxuser !default.pbxuser *.mode1v3 !default.mode1v3 *.mode2v3 !default.mode2v3 *.perspectivev3 !default.perspectivev3 xcuserdata *.xccheckout *.moved-aside DerivedData *.hmap *.ipa *.xcuserstate project.xcworkspace # Android/IntelliJ # build/ .idea .gradle local.properties *.iml # node.js # node_modules/ npm-debug.log yarn-debug.log yarn-error.log # BUCK buck-out/ \.buckd/ android/app/libs android/keystores/debug.keystore # generated by bob lib/ # example expo app (ignore ios and android folders) example/ios/ example/android/ ================================================ FILE: CONTRIBUTING.md ================================================ # Contributing ### Issues When opening an issue, try to be specific. For example, if you are opening an issue relating to the build process in android, it is helpful to include a stack trace and the gradle version you are using. I usually will reply to an issue within the first 24hrs or so asking for more information or providing help. If the issue requires a code fix, this will take longer. ### Pull Requests I'm always looking for additional help and am welcome to PRs! One thing to note, I am a big fan of understanding why code is being added or removed. So if you open a PR, please reference a link to why that change is being done (ex: Apple's docs say to do this... + link). This helps get the code merged in faster (otherwise, I will search the web and docs for the reason you are providing the PR.) and I think it helps other open programmers too. ### Design of Code This package is built for react developers. This means that the native code should not restrict the javascript functionality and instead supply a robust API. For example, instead of implementing a "Focus on Point" feature in iOS and Android, we instead supply the javascript with an api to focus the camera. The javascript developer can then implement their own algorithm for camera focusing if they wish. **When requesting a feature or creating a PR, you should take this into account** ================================================ FILE: LICENSE.md ================================================ Copyright (c) 2020 GitHub Inc. Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. ================================================ FILE: README.md ================================================ # `react-native-rectangle-scanner` ### ⚠️ Deprecation Notice ⚠️ iOS and Android have come a long way since this package was first released. Both iOS Vision and Android Google Play Services allows you to use their built in document scanner, both are much much better than the capabilities of this package (including editing the detected boundaries). https://github.com/WebsiteBeaver/react-native-document-scanner-plugin is an NPM package that we switched to which supports those above mentioned native APIs and is also working well in our Expo app. I did release one last version for this package which corrected a few minor things allowing this to work with Expo 50+ in dev client mode. ![Demo gif](images/demo.gif) [![npm](https://img.shields.io/npm/v/react-native-rectangle-scanner.svg)](https://www.npmjs.com/package/react-native-rectangle-scanner) ![Supports Android and iOS](https://img.shields.io/badge/platforms-android%20|%20ios%20-lightgrey.svg) ![MIT License](https://img.shields.io/npm/l/@react-native-community/netinfo.svg) Live photo rectangle detection library useful for scanning documents. On capture, it returns the URIs for the original and a cropped version of the image allowing you to use the images as you want. You can additionally apply filters to adjust the visibility of text on the image (similar to the iOS document scanner filters). - Live detection - Perspective correction and crop of the image - Filters - Flash - Orientation Changes - Camera permission and capabilities detection - Fully customizable UI ## Getting started Install the library using either yarn: ```sh yarn add react-native-rectangle-scanner ``` or npm: ```sh npm install react-native-rectangle-scanner --save ``` you will also need to install `react-native-svg`, which is used for drawing the detected rectangle over the camera view. ### iOS Only CocoaPods on iOS needs this extra step: ```sh cd ios && pod install && cd .. ``` **NOTE**: you need to be targeting iOS 10 or greater. Your pod file may need `platform :ios, '10.0'` at the top #### Info.plist Add Camera permissions request: Add the `NSCameraUsageDescription` tag, otherwise you will only see a black screen and no camera. iOS needs to know why you want to use the camera. ### Android Only If you do not have it already in your project, you must link openCV in your `settings.gradle` file ```java include ':openCVLibrary310' project(':openCVLibrary310').projectDir = new File(rootProject.projectDir,'../node_modules/react-native-rectangle-scanner/android/openCVLibrary310') ``` #### In android/app/src/main/AndroidManifest.xml Add Camera permissions request: ``` ``` ## Usage This is the most barebones usage of this. It will show a fullscreen camera preview with no controls on it. Calling `this.camera.current.capture()` will trigger a capture and after the image has been captured and processed (cropped, filtered, stored/cached), it will trigger the `onPictureProcessed` callback. ```javascript import React, { Component, useRef } from "react" import { View, Image } from "react-native" import Scanner from "react-native-rectangle-scanner" class DocumentScanner extends Component { handleOnPictureProcessed = ({croppedImage, initialImage}) => { this.props.doSomethingWithCroppedImagePath(croppedImage); this.props.doSomethingWithOriginalImagePath(initialImage); } onCapture = () => { this.camera.current.capture(); } render() { return ( ); } } ``` Above is a very barebones version of the scanner. Check out a full example in [example folder](example/CompleteExample.js). That will handle device specific things, rendering error states, camera controls for different device sizes, mult tasking mode, etc. This is what I would consider the production ready version of using this package (it's actually very similar to the component(s) that we use in production. ## Simulators This package works on a simulator. Android has a pretty cool VR world that emulates a camera. On iOS the preview will just be a black screen, and the `onDeviceSetup` property will return false for the `hasCamera` attribute so you can show a custom message like "This device doesnt have a camera". ## Properties | Prop | Default | Type | Description | | :-------------------------- | :-----: | :-------: | :--------------------------------------------------------- | | filterId | `none` | `integer` | The id of the filter to use. [See More](#filters) | | enableTorch | `false` | `bool` | If the flashlight should be turned on | | capturedQuality | `0.5` | `float` | The jpeg quality of the output images | | onTorchChanged | `null` | `func` | Called when the system changes the flash state | | onRectangleDetected | `null` | `func` | Called when the system detects a rectangle on the image, sends the coordinates | | onPictureTaken | `null` | `func` | Called after an image is captured. It hasn't been cached yet but it will send you the URIs of where it will store it | | onPictureProcessed | `null` | `func` | Called after an image was captured and cached. It sends the URIs of where it stored the images. | | styles | `null` | `object` | Styles the camera view (works best on fullscreen/flex: 1). | | onErrorProcessingImage | `null` | `func` | Called if there was an error capturing the image. Includes a `message` and the paths it was trying to save if the error was failing to save the image. | | onDeviceSetup | `null` | `func` | Called after the system sets up the camera allowing you to configure the view for different device setups. | | androidPermission | `null` | `object or false` | ANDROID ONLY: Allows specifying the permission object on android or disabling entirely (pass `false`). | ### onDeviceSetup This callback is really important. When you show the Scanner component, it will start setting up the camera. The `onDeviceSetup({hasCamera, permissionToUseCamera, flashIsAvailable, previewHeightPercent, previewWidthPercent})` contains all the details you need to preset the camera view. `hasCamera` will notify you if the device even has a camera. iOS simulators do not have a camera for example. This gives you the chance to hide the camera preview and show an error or something. `permissionToUseCamera` will tell you if the user has granted permission to use the camera. `flashIsAvailable` tells you if the device has a flashlight that you can use. `previewHeightPercent` and `previewWidthPercent` contain percentages of the portrait view that the preview takes up. This is important because on android devices, there are preset preview sizes that may or may not match the screen size. So you can't just show the preview at full screen or the preview will be stretched. See the example on how I handle this. ### Torch When changing the `enableTorch` property, the system will call the `onTorchChanged({enabled})` callback as well with the new state. This allows you to keep your component state in sync. Natively the torch will get turned off when the component cleans up or after an image is captured. This allows you to update the state. ### Rectangle Detection Rectangle detection does NOT show up on the UI automatically. You must take the coordinates from the `onRectangleDetected({detectedRectangle})` callback and render a view that displays a rectangle over the camera view. This can be done easily with a simple SVG by importing `RectangleOverlay` from this package and feeding it the detected rectangle object. Why not just handle in natively? Because it allows much more customization of the rectangle overlay. For example, you could black out the entire image, except where the detected rectangle is. This also lets you control auto capture and UI changes on detection in javascript. #### Auto Capture Auto capturing is handled entirely in the `RectangleOverlay` component by simply setting its `allowDetection={true}` and `onDetectedCapture={this.captureImage}` props. See that component for documentation. #### Focusing iOS and some android devices support `continuous focus` mode on their cameras. This means we don't need to worry about focusing the camera ever. There is a function you can call on the ref `focus()` which will trigger a refocus on android devices. *This will likely get expanded in the future to support points so you can focus on a specific location.* ### Capturing An Image To capture an image, you must create a ref to the component. This ref will allow you to call `capture()` which will trigger the capture asynchronously. Once triggered, it will take the current detected rectangle and crop, apply filters, and transform the image to correct the perspective. It will call `onPictureTaken({croppedImage, initialImage})` containing the URIs of the cropped image and the original image. NOTE: The image still needs to be cached which can take a few ms, so loading the image will not work yet. The picture will then start to be processed and cached. Once done, it will call `onPictureProcessed({croppedImage, initialImage})` containing the URIs of the images. This is called after the image is cached which means you can load the images into the UI. NOTE: There is no UI changes when you capture an image. No screen flash, only a camera sound. This is meant so you can design how you want. *The easiest way is to just use an animated view to flash a white screen.* You can import the `FlashAnimation` component to do this if you want. **NOTE**: captured images are stored in the app's cache directory under the `CACHE_FOLDER_NAME`. This allows you to clear the cached images when you are done. (This is advised although these may get deleted by the system.) **NOTE**: on iOS, it will try to correct the rotation of the image. If you are in portrait mode, but the phone is rotated to landscape, it will rotate the captured image automatically. ### Filters Instead of allowing you to customize the contrast, saturation, etc of the image, I prebuilt the filters. This is because the filter controls are massively different between platforms and changing those values results in much different image outputs. Below are the avilable filters. Honestly, the color controls where pretty bad on android, so the best ones for android are probably just using the Color and Black & White instead of showing all 4 (they are only slightly better than Greyscale and the original photo). | ID | Name | Default | Description | Preview | | -- | ------------- | ------- | -------------------------------------- | -------------------------------------------| | 1 | Color | | Optimzed for legibility with color. | ![Color jpeg](images/color.jpeg) | | 2 | Black & White | | Optimized for legibility without color | ![Color jpeg](images/black-and-white.jpeg) | | 3 | Greyscale | | A black & white version of the image | ![Color jpeg](images/greyscale.jpeg) | | 4 | Photo | YES | Just the photo | ![Color jpeg](images/photo.jpeg) | ================================================ FILE: RNRectangleScanner.podspec ================================================ require 'json' package = JSON.parse(File.read(File.join(__dir__, 'package.json'))) Pod::Spec.new do |s| s.name = 'RNRectangleScanner' s.version = package['version'] s.summary = package['description'] s.description = package['description'] s.license = package['license'] s.author = package['author'] s.homepage = 'https://github.com/HarvestProfit/react-native-rectangle-scanner' s.source = { git: 'https://github.com/HarvestProfit/react-native-rectangle-scanner.git', tag: s.version } s.requires_arc = true s.platform = :ios, '10.0' s.preserve_paths = 'README.md', 'package.json', 'index.js' s.source_files = 'ios/**/*.{h,m}' s.dependency 'React' end ================================================ FILE: android/.settings/org.eclipse.buildship.core.prefs ================================================ connection.project.dir= eclipse.preferences.version=1 ================================================ FILE: android/build.gradle ================================================ buildscript { repositories { mavenCentral() google() maven { // All of React Native (JS, Obj-C sources, Android binaries) is installed from npm url "$rootDir/../node_modules/react-native/android" } } dependencies { classpath("com.android.tools.build:gradle:7.3.1") } } apply plugin: 'com.android.library' android { compileSdkVersion 33 defaultConfig { minSdkVersion 16 targetSdkVersion 33 versionCode 1 versionName "1.0" ndk { abiFilters "armeabi-v7a", "x86" } } } repositories { mavenCentral() } dependencies { implementation 'org.opencv:opencv:4.9.0' implementation 'com.facebook.react:react-native:+' } ================================================ FILE: android/gradle/wrapper/gradle-wrapper.properties ================================================ #Thu Aug 01 13:05:36 CDT 2024 distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists distributionUrl=https\://services.gradle.org/distributions/gradle-7.4-bin.zip zipStoreBase=GRADLE_USER_HOME zipStorePath=wrapper/dists ================================================ FILE: android/gradle.properties ================================================ # Project-wide Gradle settings. # IDE (e.g. Android Studio) users: # Gradle settings configured through the IDE *will override* # any settings specified in this file. # For more details on how to configure your build environment visit # http://www.gradle.org/docs/current/userguide/build_environment.html # Specifies the JVM arguments used for the daemon process. # The setting is particularly useful for tweaking memory settings. # Default value: -Xmx10248m -XX:MaxPermSize=256m # org.gradle.jvmargs=-Xmx2048m -XX:MaxPermSize=512m -XX:+HeapDumpOnOutOfMemoryError -Dfile.encoding=UTF-8 # When configured, Gradle will run in incubating parallel mode. # This option should only be used with decoupled projects. More details, visit # http://www.gradle.org/docs/current/userguide/multi_project_builds.html#sec:decoupled_projects # org.gradle.parallel=true # android.useDeprecatedNdk=true ================================================ FILE: android/gradlew ================================================ #!/usr/bin/env bash ############################################################################## ## ## Gradle start up script for UN*X ## ############################################################################## # Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. DEFAULT_JVM_OPTS="" APP_NAME="Gradle" APP_BASE_NAME=`basename "$0"` # Use the maximum available, or set MAX_FD != -1 to use that value. MAX_FD="maximum" warn ( ) { echo "$*" } die ( ) { echo echo "$*" echo exit 1 } # OS specific support (must be 'true' or 'false'). cygwin=false msys=false darwin=false case "`uname`" in CYGWIN* ) cygwin=true ;; Darwin* ) darwin=true ;; MINGW* ) msys=true ;; esac # For Cygwin, ensure paths are in UNIX format before anything is touched. if $cygwin ; then [ -n "$JAVA_HOME" ] && JAVA_HOME=`cygpath --unix "$JAVA_HOME"` fi # Attempt to set APP_HOME # Resolve links: $0 may be a link PRG="$0" # Need this for relative symlinks. while [ -h "$PRG" ] ; do ls=`ls -ld "$PRG"` link=`expr "$ls" : '.*-> \(.*\)$'` if expr "$link" : '/.*' > /dev/null; then PRG="$link" else PRG=`dirname "$PRG"`"/$link" fi done SAVED="`pwd`" cd "`dirname \"$PRG\"`/" >&- APP_HOME="`pwd -P`" cd "$SAVED" >&- CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar # Determine the Java command to use to start the JVM. if [ -n "$JAVA_HOME" ] ; then if [ -x "$JAVA_HOME/jre/sh/java" ] ; then # IBM's JDK on AIX uses strange locations for the executables JAVACMD="$JAVA_HOME/jre/sh/java" else JAVACMD="$JAVA_HOME/bin/java" fi if [ ! -x "$JAVACMD" ] ; then die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME Please set the JAVA_HOME variable in your environment to match the location of your Java installation." fi else JAVACMD="java" which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. Please set the JAVA_HOME variable in your environment to match the location of your Java installation." fi # Increase the maximum file descriptors if we can. if [ "$cygwin" = "false" -a "$darwin" = "false" ] ; then MAX_FD_LIMIT=`ulimit -H -n` if [ $? -eq 0 ] ; then if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then MAX_FD="$MAX_FD_LIMIT" fi ulimit -n $MAX_FD if [ $? -ne 0 ] ; then warn "Could not set maximum file descriptor limit: $MAX_FD" fi else warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT" fi fi # For Darwin, add options to specify how the application appears in the dock if $darwin; then GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\"" fi # For Cygwin, switch paths to Windows format before running java if $cygwin ; then APP_HOME=`cygpath --path --mixed "$APP_HOME"` CLASSPATH=`cygpath --path --mixed "$CLASSPATH"` # We build the pattern for arguments to be converted via cygpath ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null` SEP="" for dir in $ROOTDIRSRAW ; do ROOTDIRS="$ROOTDIRS$SEP$dir" SEP="|" done OURCYGPATTERN="(^($ROOTDIRS))" # Add a user-defined pattern to the cygpath arguments if [ "$GRADLE_CYGPATTERN" != "" ] ; then OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)" fi # Now convert the arguments - kludge to limit ourselves to /bin/sh i=0 for arg in "$@" ; do CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -` CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"` else eval `echo args$i`="\"$arg\"" fi i=$((i+1)) done case $i in (0) set -- ;; (1) set -- "$args0" ;; (2) set -- "$args0" "$args1" ;; (3) set -- "$args0" "$args1" "$args2" ;; (4) set -- "$args0" "$args1" "$args2" "$args3" ;; (5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;; (6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;; (7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;; (8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;; (9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;; esac fi # Split up the JVM_OPTS And GRADLE_OPTS values into an array, following the shell quoting and substitution rules function splitJvmOpts() { JVM_OPTS=("$@") } eval splitJvmOpts $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS JVM_OPTS[${#JVM_OPTS[*]}]="-Dorg.gradle.appname=$APP_BASE_NAME" exec "$JAVACMD" "${JVM_OPTS[@]}" -classpath "$CLASSPATH" org.gradle.wrapper.GradleWrapperMain "$@" ================================================ FILE: android/gradlew.bat ================================================ @if "%DEBUG%" == "" @echo off @rem ########################################################################## @rem @rem Gradle startup script for Windows @rem @rem ########################################################################## @rem Set local scope for the variables with windows NT shell if "%OS%"=="Windows_NT" setlocal @rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. set DEFAULT_JVM_OPTS= set DIRNAME=%~dp0 if "%DIRNAME%" == "" set DIRNAME=. set APP_BASE_NAME=%~n0 set APP_HOME=%DIRNAME% @rem Find java.exe if defined JAVA_HOME goto findJavaFromJavaHome set JAVA_EXE=java.exe %JAVA_EXE% -version >NUL 2>&1 if "%ERRORLEVEL%" == "0" goto init echo. echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. echo. echo Please set the JAVA_HOME variable in your environment to match the echo location of your Java installation. goto fail :findJavaFromJavaHome set JAVA_HOME=%JAVA_HOME:"=% set JAVA_EXE=%JAVA_HOME%/bin/java.exe if exist "%JAVA_EXE%" goto init echo. echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% echo. echo Please set the JAVA_HOME variable in your environment to match the echo location of your Java installation. goto fail :init @rem Get command-line arguments, handling Windowz variants if not "%OS%" == "Windows_NT" goto win9xME_args if "%@eval[2+2]" == "4" goto 4NT_args :win9xME_args @rem Slurp the command line arguments. set CMD_LINE_ARGS= set _SKIP=2 :win9xME_args_slurp if "x%~1" == "x" goto execute set CMD_LINE_ARGS=%* goto execute :4NT_args @rem Get arguments from the 4NT Shell from JP Software set CMD_LINE_ARGS=%$ :execute @rem Setup the command line set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar @rem Execute Gradle "%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS% :end @rem End local scope for the variables with windows NT shell if "%ERRORLEVEL%"=="0" goto mainEnd :fail rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of rem the _cmd.exe /c_ return code! if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1 exit /b 1 :mainEnd if "%OS%"=="Windows_NT" endlocal :omega ================================================ FILE: android/src/main/AndroidManifest.xml ================================================ ================================================ FILE: android/src/main/java/com/rectanglescanner/RNRectangleScannerManager.java ================================================ package com.rectanglescanner; import android.app.Activity; import com.rectanglescanner.views.MainView; import com.facebook.react.bridge.WritableMap; import com.facebook.react.common.MapBuilder; import com.facebook.react.uimanager.ThemedReactContext; import com.facebook.react.uimanager.ViewGroupManager; import com.facebook.react.uimanager.annotations.ReactProp; import javax.annotation.Nullable; import java.util.Map; /** * Created by Jake on Jan 6, 2020. */ public class RNRectangleScannerManager extends ViewGroupManager { private static final String REACT_CLASS = "RNRectangleScanner"; private MainView view = null; @Override public String getName() { return REACT_CLASS; } @Override protected MainView createViewInstance(final ThemedReactContext reactContext) { MainView.createInstance(reactContext, (Activity) reactContext.getBaseContext()); view = MainView.getInstance(); return view; } // MARK: Props @ReactProp(name = "enableTorch", defaultBoolean = false) public void setEnableTorch(MainView view, Boolean enable) { view.setEnableTorch(enable); } @ReactProp(name = "capturedQuality", defaultDouble = 0.5) public void setCapturedQuality(MainView view, double quality) { view.setCapturedQuality(quality); } @ReactProp(name = "filterId", defaultInt = 1) public void setFilterId(MainView view, int filterId) { view.setFilterId(filterId); } // Life cycle Events @Override public @Nullable Map getExportedCustomDirectEventTypeConstants() { return MapBuilder.of( "onDeviceSetup", MapBuilder.of("registrationName", "onDeviceSetup"), "onPictureTaken", MapBuilder.of("registrationName", "onPictureTaken"), "onPictureProcessed", MapBuilder.of("registrationName", "onPictureProcessed"), "onErrorProcessingImage", MapBuilder.of("registrationName", "onErrorProcessingImage"), "onRectangleDetected", MapBuilder.of("registrationName", "onRectangleDetected"), "onTorchChanged", MapBuilder.of("registrationName", "onTorchChanged") ); } } ================================================ FILE: android/src/main/java/com/rectanglescanner/RNRectangleScannerModule.java ================================================ package com.rectanglescanner; import com.rectanglescanner.views.MainView; import com.facebook.react.bridge.ReactApplicationContext; import com.facebook.react.bridge.ReactContextBaseJavaModule; import com.facebook.react.bridge.ReactMethod; /** * Created by Jake on Jan 6, 2020. */ public class RNRectangleScannerModule extends ReactContextBaseJavaModule{ public RNRectangleScannerModule(ReactApplicationContext reactContext){ super(reactContext); } @Override public String getName() { return "RNRectangleScannerManager"; } @ReactMethod public void start(){ MainView view = MainView.getInstance(); view.startCamera(); } @ReactMethod public void stop(){ MainView view = MainView.getInstance(); view.stopCamera(); } @ReactMethod public void cleanup(){ MainView view = MainView.getInstance(); view.cleanupCamera(); } @ReactMethod public void refresh(){ MainView view = MainView.getInstance(); view.stopCamera(); view.startCamera(); } @ReactMethod public void capture(){ MainView view = MainView.getInstance(); view.capture(); } @ReactMethod public void focus() { MainView view = MainView.getInstance(); view.focusCamera(); } } ================================================ FILE: android/src/main/java/com/rectanglescanner/RectangleScannerPackage.java ================================================ package com.rectanglescanner; import com.facebook.react.ReactPackage; import com.facebook.react.bridge.JavaScriptModule; import com.facebook.react.bridge.NativeModule; import com.facebook.react.bridge.ReactApplicationContext; import com.facebook.react.uimanager.ViewManager; import java.util.Arrays; import java.util.Collections; import java.util.List; /** * Created by Jake on Jan 6, 2020. */ public class RectangleScannerPackage implements ReactPackage { @Override public List createNativeModules(ReactApplicationContext reactContext) { return Arrays.asList( new RNRectangleScannerModule(reactContext) ); } @Override public List createViewManagers(ReactApplicationContext reactContext) { return Arrays.asList( new RNRectangleScannerManager() ); } } ================================================ FILE: android/src/main/java/com/rectanglescanner/helpers/CapturedImage.java ================================================ package com.rectanglescanner.helpers; import org.opencv.core.Mat; import org.opencv.core.Point; import org.opencv.core.Size; /** * Created by Jake on Jan 6, 2020. */ public class CapturedImage { public Mat original; public Mat processed; public Point[] previewPoints; public Size previewSize; public Size originalSize; public Point[] originalPoints; public int heightWithRatio; public int widthWithRatio; public CapturedImage(Mat original) { this.original = original; } public Mat getProcessed() { return processed; } public CapturedImage setProcessed(Mat processed) { this.processed = processed; return this; } public void release() { if (processed != null) { processed.release(); } if (original != null) { original.release(); } } } ================================================ FILE: android/src/main/java/com/rectanglescanner/helpers/ImageProcessor.java ================================================ package com.rectanglescanner.helpers; import android.content.Context; import android.content.SharedPreferences; import android.os.Handler; import android.os.Looper; import android.os.Message; import android.preference.PreferenceManager; import android.util.Log; import com.rectanglescanner.views.RectangleDetectionController; import com.rectanglescanner.helpers.ImageProcessorMessage; import com.rectanglescanner.helpers.Quadrilateral; import com.rectanglescanner.helpers.CapturedImage; import android.view.Surface; import org.opencv.core.Core; import org.opencv.core.CvType; import org.opencv.core.Mat; import org.opencv.core.MatOfPoint; import org.opencv.core.MatOfPoint2f; import org.opencv.core.Point; import org.opencv.core.Size; import org.opencv.imgcodecs.Imgcodecs; import org.opencv.imgproc.Imgproc; import android.os.Bundle; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Comparator; import com.facebook.react.bridge.Arguments; /** Created by Jake on Jan 6, 2020. Async processes either the image preview frame to detect rectangles, or the captured image to crop and apply filters. */ public class ImageProcessor extends Handler { private static final String TAG = "ImageProcessor"; private final RectangleDetectionController mMainActivity; private Quadrilateral lastDetectedRectangle = null; public ImageProcessor(Looper looper, RectangleDetectionController mainActivity, Context context) { super(looper); this.mMainActivity = mainActivity; SharedPreferences sharedPref = PreferenceManager.getDefaultSharedPreferences(context); } /** Receives an event message to handle async */ public void handleMessage(Message msg) { if (msg.obj.getClass() == ImageProcessorMessage.class) { ImageProcessorMessage obj = (ImageProcessorMessage) msg.obj; String command = obj.getCommand(); Log.d(TAG, "Message Received: " + command + " - " + obj.getObj().toString()); if (command.equals("previewFrame")) { processPreviewFrame((Mat) obj.getObj()); } else if (command.equals("pictureTaken")) { processCapturedImage((Mat) obj.getObj()); } } } /** Detect a rectangle in the current frame from the camera video */ private void processPreviewFrame(Mat frame) { rotateImageForScreen(frame); detectRectangleInFrame(frame); frame.release(); mMainActivity.setImageProcessorBusy(false); } /** Process a single frame from the camera video */ private void processCapturedImage(Mat capturedImage) { // Mat capturedImage = Imgcodecs.imdecode(picture, Imgcodecs.IMREAD_UNCHANGED); // picture.release(); Log.d(TAG, "processCapturedImage - imported image " + capturedImage.size().width + "x" + capturedImage.size().height); rotateImageForScreen(capturedImage); CapturedImage doc = cropImageToLatestQuadrilateral(capturedImage); mMainActivity.onProcessedCapturedImage(doc); doc.release(); capturedImage.release(); mMainActivity.setImageProcessorBusy(false); } /** Detects a rectangle from the image and sets the last detected rectangle */ private void detectRectangleInFrame(Mat inputRgba) { ArrayList contours = findContours(inputRgba); Size srcSize = inputRgba.size(); this.lastDetectedRectangle = getQuadrilateral(contours, srcSize); Bundle data = new Bundle(); if (this.lastDetectedRectangle != null) { Bundle quadMap = this.lastDetectedRectangle.toBundle(); data.putBundle("detectedRectangle", quadMap); } else { data.putBoolean("detectedRectangle", false); } mMainActivity.rectangleWasDetected(Arguments.fromBundle(data)); } /** Crops the image to the latest detected rectangle and fixes perspective */ private CapturedImage cropImageToLatestQuadrilateral(Mat capturedImage) { applyFilters(capturedImage); Mat doc; if (this.lastDetectedRectangle != null) { Mat croppedCapturedImage = this.lastDetectedRectangle.cropImageToRectangleSize(capturedImage); doc = fourPointTransform(croppedCapturedImage, this.lastDetectedRectangle.getPointsForSize(croppedCapturedImage.size())); croppedCapturedImage.release(); } else { doc = new Mat(capturedImage.size(), CvType.CV_8UC4); capturedImage.copyTo(doc); } Core.flip(doc.t(), doc, 0); Core.flip(capturedImage.t(), capturedImage, 0); CapturedImage sd = new CapturedImage(capturedImage); sd.originalSize = capturedImage.size(); sd.heightWithRatio = Double.valueOf(sd.originalSize.width).intValue(); sd.widthWithRatio = Double.valueOf(sd.originalSize.height).intValue(); return sd.setProcessed(doc); } private Quadrilateral getQuadrilateral(ArrayList contours, Size srcSize) { int height = Double.valueOf(srcSize.height).intValue(); int width = Double.valueOf(srcSize.width).intValue(); Size size = new Size(width, height); Log.i(TAG, "Size----->" + size); for (MatOfPoint c : contours) { MatOfPoint2f c2f = new MatOfPoint2f(c.toArray()); double peri = Imgproc.arcLength(c2f, true); MatOfPoint2f approx = new MatOfPoint2f(); Imgproc.approxPolyDP(c2f, approx, 0.02 * peri, true); Point[] points = approx.toArray(); // select biggest 4 angles polygon // if (points.length == 4) { Point[] foundPoints = sortPoints(points); if (insideArea(foundPoints, size)) { return new Quadrilateral(c, foundPoints, new Size(srcSize.width, srcSize.height)); } // } } return null; } private Point[] sortPoints(Point[] src) { ArrayList srcPoints = new ArrayList<>(Arrays.asList(src)); Point[] result = { null, null, null, null }; Comparator sumComparator = new Comparator() { @Override public int compare(Point lhs, Point rhs) { return Double.compare(lhs.y + lhs.x, rhs.y + rhs.x); } }; Comparator diffComparator = new Comparator() { @Override public int compare(Point lhs, Point rhs) { return Double.compare(lhs.y - lhs.x, rhs.y - rhs.x); } }; // top-left corner = minimal sum result[0] = Collections.min(srcPoints, sumComparator); // bottom-right corner = maximal sum result[2] = Collections.max(srcPoints, sumComparator); // top-right corner = minimal difference result[1] = Collections.min(srcPoints, diffComparator); // bottom-left corner = maximal difference result[3] = Collections.max(srcPoints, diffComparator); return result; } private boolean insideArea(Point[] rp, Size size) { int width = Double.valueOf(size.width).intValue(); int height = Double.valueOf(size.height).intValue(); int minimumSize = width / 10; boolean isANormalShape = rp[0].x != rp[1].x && rp[1].y != rp[0].y && rp[2].y != rp[3].y && rp[3].x != rp[2].x; boolean isBigEnough = ((rp[1].x - rp[0].x >= minimumSize) && (rp[2].x - rp[3].x >= minimumSize) && (rp[3].y - rp[0].y >= minimumSize) && (rp[2].y - rp[1].y >= minimumSize)); double leftOffset = rp[0].x - rp[3].x; double rightOffset = rp[1].x - rp[2].x; double bottomOffset = rp[0].y - rp[1].y; double topOffset = rp[2].y - rp[3].y; boolean isAnActualRectangle = ((leftOffset <= minimumSize && leftOffset >= -minimumSize) && (rightOffset <= minimumSize && rightOffset >= -minimumSize) && (bottomOffset <= minimumSize && bottomOffset >= -minimumSize) && (topOffset <= minimumSize && topOffset >= -minimumSize)); return isANormalShape && isAnActualRectangle && isBigEnough; } private Mat fourPointTransform(Mat src, Point[] pts) { Point tl = pts[0]; Point tr = pts[1]; Point br = pts[2]; Point bl = pts[3]; double widthA = Math.sqrt(Math.pow(br.x - bl.x, 2) + Math.pow(br.y - bl.y, 2)); double widthB = Math.sqrt(Math.pow(tr.x - tl.x, 2) + Math.pow(tr.y - tl.y, 2)); double dw = Math.max(widthA, widthB); int maxWidth = Double.valueOf(dw).intValue(); double heightA = Math.sqrt(Math.pow(tr.x - br.x, 2) + Math.pow(tr.y - br.y, 2)); double heightB = Math.sqrt(Math.pow(tl.x - bl.x, 2) + Math.pow(tl.y - bl.y, 2)); double dh = Math.max(heightA, heightB); int maxHeight = Double.valueOf(dh).intValue(); Mat doc = new Mat(maxHeight, maxWidth, CvType.CV_8UC4); Mat src_mat = new Mat(4, 1, CvType.CV_32FC2); Mat dst_mat = new Mat(4, 1, CvType.CV_32FC2); src_mat.put(0, 0, tl.x, tl.y, tr.x, tr.y, br.x, br.y, bl.x, bl.y); dst_mat.put(0, 0, 0.0, 0.0, dw, 0.0, dw, dh, 0.0, dh); Mat m = Imgproc.getPerspectiveTransform(src_mat, dst_mat); Imgproc.warpPerspective(src, doc, m, doc.size()); return doc; } private ArrayList findContours(Mat src) { Mat grayImage; Mat cannedImage; Mat resizedImage; int height = Double.valueOf(src.size().height).intValue(); int width = Double.valueOf(src.size().width).intValue(); Size size = new Size(width, height); resizedImage = new Mat(size, CvType.CV_8UC4); grayImage = new Mat(size, CvType.CV_8UC4); cannedImage = new Mat(size, CvType.CV_8UC1); Imgproc.resize(src, resizedImage, size); Imgproc.cvtColor(resizedImage, grayImage, Imgproc.COLOR_RGBA2GRAY, 4); Imgproc.GaussianBlur(grayImage, grayImage, new Size(5, 5), 0); Imgproc.Canny(grayImage, cannedImage, 80, 100, 3, false); ArrayList contours = new ArrayList<>(); Mat hierarchy = new Mat(); Imgproc.findContours(cannedImage, contours, hierarchy, Imgproc.RETR_TREE, Imgproc.CHAIN_APPROX_SIMPLE); hierarchy.release(); Collections.sort(contours, new Comparator() { @Override public int compare(MatOfPoint lhs, MatOfPoint rhs) { return Double.compare(Imgproc.contourArea(rhs), Imgproc.contourArea(lhs)); } }); resizedImage.release(); grayImage.release(); cannedImage.release(); return contours; } /*! Applies filters to the image based on the set filter */ public void applyFilters(Mat image) { int filterId = this.mMainActivity.getFilterId(); switch (filterId) { case 1: { // original image break; } case 2: { applyGreyscaleFilterToImage(image); break; } case 3: { applyColorFilterToImage(image); break; } case 4: { applyBlackAndWhiteFilterToImage(image); break; } default: // original image } } /*! Slightly enhances the black and white image */ public Mat applyGreyscaleFilterToImage(Mat image) { Imgproc.cvtColor(image, image, Imgproc.COLOR_RGBA2GRAY); return image; } /*! Slightly enhances the black and white image */ public Mat applyBlackAndWhiteFilterToImage(Mat image) { Imgproc.cvtColor(image, image, Imgproc.COLOR_RGBA2GRAY); image.convertTo(image, -1, 1, 10); return image; } /*! Slightly enhances the color on the image */ public Mat applyColorFilterToImage(Mat image) { image.convertTo(image, -1, 1.2, 0); return image; } public void rotateImageForScreen(Mat image) { switch (this.mMainActivity.lastDetectedRotation) { case Surface.ROTATION_90: { // Do nothing break; } case Surface.ROTATION_180: { Core.flip(image.t(), image, 0); break; } case Surface.ROTATION_270: { Core.flip(image, image, 0); Core.flip(image, image, 1); break; } case Surface.ROTATION_0: default: { Core.flip(image.t(), image, 1); break; } } } } ================================================ FILE: android/src/main/java/com/rectanglescanner/helpers/ImageProcessorMessage.java ================================================ package com.rectanglescanner.helpers; /** * Created by Jake on Jan 6, 2020. */ public class ImageProcessorMessage { private String command; private Object obj; public ImageProcessorMessage(String command , Object obj ) { setObj(obj); setCommand(command); } public String getCommand() { return command; } public void setCommand(String command) { this.command = command; } public Object getObj() { return obj; } public void setObj(Object obj) { this.obj = obj; } } ================================================ FILE: android/src/main/java/com/rectanglescanner/helpers/Quadrilateral.java ================================================ package com.rectanglescanner.helpers; import org.opencv.core.MatOfPoint; import org.opencv.core.Rect; import org.opencv.core.Mat; import org.opencv.core.Point; import org.opencv.core.Size; import android.os.Bundle; /** * Created by Jake on Jan 6, 2020. * Represents the detected rectangle from an image */ public class Quadrilateral { public MatOfPoint contour; public Point[] points; public Size sourceSize; public Quadrilateral(MatOfPoint contour, Point[] points, Size sourceSize) { this.contour = contour; this.points = points; this.sourceSize = sourceSize; } /** Crops the edges of the image to the aspect ratio of the detected rectangle. */ public Mat cropImageToRectangleSize(Mat image) { Size imageSize = image.size(); double rectangleRatio = this.sourceSize.height / this.sourceSize.width; double imageRatio = imageSize.height / imageSize.width; double cropHeight = imageSize.height; double cropWidth = imageSize.width; // Used to center the crop in the middle int rectangleXCoord = 0; int rectangleYCoord = 0; if (imageRatio > rectangleRatio) { // Height should be cropped cropHeight = cropWidth * rectangleRatio; rectangleYCoord = (int)((imageSize.height - cropHeight) / 2); } else { // Width should be cropped cropWidth = cropHeight / rectangleRatio; rectangleXCoord = (int)((imageSize.width - cropWidth) / 2); } Rect rectCrop = new Rect(rectangleXCoord, rectangleYCoord, (int)cropWidth, (int)cropHeight); return new Mat(image, rectCrop); } /** Returns the points of the rectangle scaled to the given size */ public Point[] getPointsForSize(Size outputSize) { double scale = outputSize.height / this.sourceSize.height; if (scale == 1) { return this.points; } Point[] scaledPoints = new Point[4]; for (int i = 0;i < this.points.length;i++ ) { scaledPoints[i] = this.points[i].clone(); scaledPoints[i].x *= scale; scaledPoints[i].y *= scale; } return scaledPoints; } /** Returns the rectangle as a bundle object */ public Bundle toBundle() { Bundle quadMap = new Bundle(); Bundle bottomLeft = new Bundle(); bottomLeft.putDouble("x", this.points[2].x); bottomLeft.putDouble("y", this.points[2].y); quadMap.putBundle("bottomLeft", bottomLeft); Bundle bottomRight = new Bundle(); bottomRight.putDouble("x", this.points[1].x); bottomRight.putDouble("y", this.points[1].y); quadMap.putBundle("bottomRight", bottomRight); Bundle topLeft = new Bundle(); topLeft.putDouble("x", this.points[3].x); topLeft.putDouble("y", this.points[3].y); quadMap.putBundle("topLeft", topLeft); Bundle topRight = new Bundle(); topRight.putDouble("x", this.points[0].x); topRight.putDouble("y", this.points[0].y); quadMap.putBundle("topRight", topRight); Bundle dimensions = new Bundle(); dimensions.putDouble("height", this.sourceSize.height); dimensions.putDouble("width", this.sourceSize.width); quadMap.putBundle("dimensions", dimensions); return quadMap; } } ================================================ FILE: android/src/main/java/com/rectanglescanner/views/CameraDeviceController.java ================================================ package com.rectanglescanner.views; import android.app.Activity; import android.content.Context; import android.content.pm.PackageManager; import android.graphics.Bitmap; import android.graphics.BitmapFactory; import android.graphics.ImageFormat; import android.hardware.Camera; import android.hardware.Camera.PictureCallback; import android.media.AudioManager; import android.media.MediaActionSound; import android.os.Build; import android.util.AttributeSet; import android.util.Log; import android.view.Display; import android.view.Surface; import android.view.SurfaceHolder; import android.view.SurfaceView; import android.view.View; import android.view.WindowManager; import android.content.res.Configuration; import android.widget.FrameLayout; import com.rectanglescanner.R; import com.facebook.react.bridge.WritableMap; import com.facebook.react.bridge.WritableNativeMap; import org.opencv.android.JavaCameraView; import org.opencv.android.Utils; import org.opencv.core.CvType; import org.opencv.core.Mat; import org.opencv.core.Size; import org.opencv.imgproc.Imgproc; import java.util.List; /** Created by Jake on Jan 6, 2020. Handles Generic camera device setup and capture */ public class CameraDeviceController extends JavaCameraView implements PictureCallback { public static final String TAG = "CameraDeviceController"; protected Context mContext; private SurfaceView mSurfaceView; private SurfaceHolder mSurfaceHolder; protected final boolean mBugRotate = false; protected boolean safeToTakePicture; protected Activity mActivity; private PictureCallback pCallback; protected Boolean enableTorch = false; public int lastDetectedRotation = Surface.ROTATION_0; protected View mView = null; protected boolean cameraIsSetup = false; protected boolean isStopped = true; private WritableMap deviceConfiguration = new WritableNativeMap(); private int captureDevice = -1; private boolean imageProcessorBusy = true; private boolean cameraRequiresManualAutoFocus = false; private static CameraDeviceController mThis; public CameraDeviceController(Context context, AttributeSet attrs) { super(context, attrs); } public CameraDeviceController(Context context, Integer numCam, Activity activity, FrameLayout frameLayout) { super(context, numCam); this.mContext = context; this.mActivity = activity; pCallback = this; mView = frameLayout; context.getSystemService(Context.LAYOUT_INFLATER_SERVICE); } //================================================================================ // Setters //================================================================================ /** Toggles the flash on the camera device */ public void setEnableTorch(boolean enableTorch) { this.enableTorch = enableTorch; if (mCamera != null) { Camera.Parameters p = mCamera.getParameters(); p.setFlashMode(enableTorch ? Camera.Parameters.FLASH_MODE_TORCH : Camera.Parameters.FLASH_MODE_OFF); mCamera.setParameters(p); } torchWasChanged(enableTorch); } protected void torchWasChanged(boolean torchEnabled) {} /** Cleans up the camera view */ public void cleanupCamera() { if (mCamera != null) { mCamera.stopPreview(); mCamera.setPreviewCallback(null); mCamera.release(); mCamera = null; this.cameraIsSetup = false; } } /** Stops and restarts the camera */ private void refreshCamera() { stopCamera(); startCamera(); } /** Starts the capture session */ public void startCamera() { Log.d(TAG, "Starting preview"); if (this.isStopped) { try { if (!this.cameraIsSetup) { setupCameraView(); } mCamera.setPreviewDisplay(mSurfaceHolder); mCamera.startPreview(); mCamera.setPreviewCallback(this); this.isStopped = false; } catch (Exception e) { Log.d(TAG, "Error starting preview: " + e); } } } /** Stops the capture session */ public void stopCamera() { Log.d(TAG, "Stopping preview"); if (!this.isStopped) { try { if (mCamera != null) { mCamera.stopPreview(); } this.isStopped = true; } catch (Exception e) { Log.d(TAG, "Error stopping preview: " + e); } } } /** Tell the camera to focus */ public void focusCamera() { Log.d(TAG, "Autofocusing"); mCamera.autoFocus(null); } /** Sets the device configuration flash setting */ public void setDeviceConfigurationFlashAvailable(boolean isAvailable) { this.deviceConfiguration.putBoolean("flashIsAvailable", isAvailable); } /** Sets the device configuration permission setting */ public void setDeviceConfigurationPermissionToUseCamera(boolean granted){ this.deviceConfiguration.putBoolean("permissionToUseCamera", granted); } /** Sets the device configuration camera availablility */ public void setDeviceConfigurationHasCamera(boolean isAvailable){ this.deviceConfiguration.putBoolean("hasCamera", isAvailable); } /** Sets the percent size of the camera preview */ public void setDeviceConfigurationPreviewPercentSize(double heightPercent, double widthPercent) { this.deviceConfiguration.putDouble("previewHeightPercent", heightPercent); this.deviceConfiguration.putDouble("previewWidthPercent", widthPercent); } /** Sets the inital device configuration */ public void resetDeviceConfiguration() { this.deviceConfiguration = new WritableNativeMap(); setDeviceConfigurationFlashAvailable(false); setDeviceConfigurationPermissionToUseCamera(false); setDeviceConfigurationHasCamera(false); setDeviceConfigurationPreviewPercentSize(1.0, 1.0); } /** Called after the camera and session are set up. This lets you check if a camera is found and permission is granted to use it. */ public void commitDeviceConfiguration() { deviceWasSetup(this.deviceConfiguration); } protected void deviceWasSetup(WritableMap config) {} //================================================================================ // Getters //================================================================================ private int getCameraDevice() { int cameraId = -1; // Search for the back facing camera // get the number of cameras int numberOfCameras = Camera.getNumberOfCameras(); // for every camera check for (int i = 0; i < numberOfCameras; i++) { Camera.CameraInfo info = new Camera.CameraInfo(); Camera.getCameraInfo(i, info); if (info.facing == Camera.CameraInfo.CAMERA_FACING_BACK) { cameraId = i; break; } cameraId = i; } return cameraId; } /** Given a list of resolution sizes and a ratio to fit to, it will find the highest resolution that fits the ratio the best. */ private Camera.Size getOptimalResolution(float ratioToFitTo, List resolutionList) { int maxPixels = 0; int ratioMaxPixels = 0; double bestRatioDifference = 5; Camera.Size currentMaxRes = null; Camera.Size ratioCurrentMaxRes = null; for (Camera.Size r : resolutionList) { float pictureRatio = (float) r.width / r.height; Log.d(TAG, "supported resolution: " + r.width + "x" + r.height + " ratio: " + pictureRatio + " ratioToFitTo: " + ratioToFitTo); int resolutionPixels = r.width * r.height; double ratioDifference = Math.abs(ratioToFitTo - pictureRatio); if (resolutionPixels > ratioMaxPixels && ratioDifference < bestRatioDifference) { ratioMaxPixels = resolutionPixels; ratioCurrentMaxRes = r; bestRatioDifference = ratioDifference; } if (resolutionPixels > maxPixels) { maxPixels = resolutionPixels; currentMaxRes = r; } } if (ratioCurrentMaxRes != null) { Log.d(TAG, "Max supported resolution with aspect ratio: " + ratioCurrentMaxRes.width + "x" + ratioCurrentMaxRes.height); return ratioCurrentMaxRes; } return currentMaxRes; } //================================================================================ // Setup //================================================================================ /** Creates a session for the camera device and outputs it to a preview view. @note Called on view did load */ public void setupCameraView() { resetDeviceConfiguration(); if (mSurfaceView == null) { mSurfaceView = mView.findViewById(R.id.surfaceView); mSurfaceHolder = this.getHolder(); mSurfaceHolder.addCallback(this); mSurfaceHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS); } setupCamera(); commitDeviceConfiguration(); // [self listenForOrientationChanges]; this.cameraIsSetup = true; } /** Sets up the hardware and capture session asking for permission to use the camera if needed. */ public void setupCamera() { if (!setupCaptureDevice()) { return; } Camera.Parameters param; param = mCamera.getParameters(); PackageManager pm = mActivity.getPackageManager(); if (pm.hasSystemFeature(PackageManager.FEATURE_CAMERA_FLASH)) { param.setFlashMode(Camera.Parameters.FLASH_MODE_OFF); } if (param.getSupportedFocusModes().contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE)) { param.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE); } else if (param.getSupportedFocusModes().contains(Camera.Parameters.FOCUS_MODE_AUTO)) { param.setFocusMode(Camera.Parameters.FOCUS_MODE_AUTO); cameraRequiresManualAutoFocus = true; } param.setPictureFormat(ImageFormat.JPEG); mCamera.setDisplayOrientation(getScreenRotationOnPhone()); Display display = mActivity.getWindowManager().getDefaultDisplay(); android.graphics.Point size = new android.graphics.Point(); if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1) { display.getRealSize(size); } int displayWidth = Math.min(size.y, size.x); int displayHeight = Math.max(size.y, size.x); float displayRatio = (float) displayHeight / displayWidth; Camera.Size pSize = getOptimalResolution(displayRatio, getResolutionList()); param.setPreviewSize(pSize.width, pSize.height); param.setWhiteBalance(Camera.Parameters.WHITE_BALANCE_AUTO); float previewRatio = (float) pSize.width / pSize.height; setDevicePreviewSize(previewRatio); Camera.Size maxRes = getOptimalResolution(previewRatio, getPictureResolutionList()); if (maxRes != null) { param.setPictureSize(maxRes.width, maxRes.height); Log.d(TAG, "max supported picture resolution: " + maxRes.width + "x" + maxRes.height); } try { mCamera.setParameters(param); setDeviceConfigurationPermissionToUseCamera(true); safeToTakePicture = true; } catch (Exception e) { Log.d(TAG, "failed to initialize the camera settings"); } } /** Sets the surface preview ratio size. Some android devices will have a different sized preview than their full screen size so this allows for some size adjusting so the preview's aspect ratio is intact */ public void setDevicePreviewSize(float previewRatio) { Display display = mActivity.getWindowManager().getDefaultDisplay(); android.graphics.Point size = new android.graphics.Point(); if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1) { display.getRealSize(size); } int displayWidth = Math.min(size.y, size.x); int displayHeight = Math.max(size.y, size.x); float displayRatio = (float) displayHeight / displayWidth; int previewHeight = displayHeight; int previewWidth = displayWidth; int sizeY = size.y; int sizeX = size.x; if (this.lastDetectedRotation == Surface.ROTATION_90 || this.lastDetectedRotation == Surface.ROTATION_270) { sizeY = size.x; sizeX = size.y; } if (displayRatio > previewRatio) { // Adjust height previewHeight = (int) ((float) sizeY / displayRatio * previewRatio); } else if (displayRatio < previewRatio) { // Adjust Width previewWidth = (int) ((float) sizeX * displayRatio / previewRatio); } double percentOfScreenSizeHeight = (double) previewHeight / displayHeight; double percentOfScreenSizeWidth = (double) previewWidth / displayWidth; setDeviceConfigurationPreviewPercentSize(percentOfScreenSizeHeight, percentOfScreenSizeWidth); } /** Finds a physical camera, configures it, and sets the captureDevice property to it @return boolean if the camera was found and opened correctly */ public boolean setupCaptureDevice() { this.captureDevice = getCameraDevice(); try { int cameraId = getCameraDevice(); mCamera = Camera.open(cameraId); } catch (RuntimeException e) { System.err.println(e); return false; } setDeviceConfigurationHasCamera(true); PackageManager pm = mActivity.getPackageManager(); if (pm.hasSystemFeature(PackageManager.FEATURE_CAMERA_FLASH)) { setDeviceConfigurationFlashAvailable(true); } return true; } //================================================================================ // Capture Image //================================================================================ public void captureImageLater() { PackageManager pm = mActivity.getPackageManager(); if (this.safeToTakePicture) { this.safeToTakePicture = false; try { if (cameraRequiresManualAutoFocus) { mCamera.autoFocus(new Camera.AutoFocusCallback() { @Override public void onAutoFocus(boolean success, Camera camera) { if (success) { takePicture(); } else { onPictureFailed(); } } }); } else { takePicture(); } } catch (Exception e) { onPictureFailed(); } } } private void takePicture() { mCamera.takePicture(null, null, pCallback); makeShutterSound(); } private void onPictureFailed() { Log.d(TAG, "failed to capture image"); mCamera.cancelAutoFocus(); this.safeToTakePicture = true; } /** Responds to the capture image call. It will apply a few filters and call handleCapturedImage which can be overrided for more processing */ @Override public void onPictureTaken(byte[] data, Camera camera) { setEnableTorch(false); this.safeToTakePicture = true; Bitmap bitmap = BitmapFactory.decodeByteArray(data, 0, data.length); Mat picture = new Mat(); Bitmap bmp32 = bitmap.copy(Bitmap.Config.ARGB_8888, true); Utils.bitmapToMat(bmp32, picture); Mat mat = new Mat(); Imgproc.cvtColor(picture, mat, Imgproc.COLOR_BGR2RGB, 4); handleCapturedImage(mat); } public void handleCapturedImage(Mat capturedImage) {} public int getScreenRotationOnPhone() { final Display display = ((WindowManager) mContext .getSystemService(Context.WINDOW_SERVICE)).getDefaultDisplay(); this.lastDetectedRotation = display.getRotation(); switch (this.lastDetectedRotation) { case Surface.ROTATION_0: return 90; case Surface.ROTATION_90: return 0; case Surface.ROTATION_180: return 270; case Surface.ROTATION_270: return 180; } return 90; } @Override public void onConfigurationChanged(Configuration newConfig) { super.onConfigurationChanged(newConfig); mCamera.setDisplayOrientation(getScreenRotationOnPhone()); } @Override public void surfaceDestroyed(SurfaceHolder holder) { cleanupCamera(); } /** Processes the image output from the capture session. */ @Override public void onPreviewFrame(byte[] data, Camera camera) { try { mSurfaceView.setVisibility(SurfaceView.VISIBLE); Camera.Size pictureSize = camera.getParameters().getPreviewSize(); Mat yuv = new Mat(new Size(pictureSize.width, pictureSize.height * 1.5), CvType.CV_8UC1); yuv.put(0, 0, data); Mat mat = new Mat(new Size(pictureSize.width, pictureSize.height), CvType.CV_8UC4); Imgproc.cvtColor(yuv, mat, Imgproc.COLOR_YUV2RGBA_NV21, 4); yuv.release(); processOutput(mat); } catch(Exception e) { Log.d(TAG, "Error processing preview frame: " + e); } } public void processOutput(Mat image) {} private void makeShutterSound() { AudioManager audio = (AudioManager) mActivity.getSystemService(Context.AUDIO_SERVICE); if (audio.getRingerMode() == AudioManager.RINGER_MODE_NORMAL) { MediaActionSound sound = new MediaActionSound(); sound.play(MediaActionSound.SHUTTER_CLICK); } } private List getResolutionList() { return mCamera.getParameters().getSupportedPreviewSizes(); } private List getPictureResolutionList() { return mCamera.getParameters().getSupportedPictureSizes(); } } ================================================ FILE: android/src/main/java/com/rectanglescanner/views/MainView.java ================================================ package com.rectanglescanner.views; import android.app.Activity; import android.content.Context; import android.view.LayoutInflater; import android.widget.FrameLayout; import com.facebook.react.bridge.WritableMap; import com.facebook.react.bridge.WritableNativeMap; import com.facebook.react.bridge.ReactContext; import com.facebook.react.uimanager.events.RCTEventEmitter; import com.rectanglescanner.R; public class MainView extends FrameLayout { private RNRectangleScannerView view; public static MainView instance = null; public static MainView getInstance() { return instance; } public static void createInstance(Context context, Activity activity) { instance = new MainView(context, activity); } private MainView(Context context, Activity activity) { super(context); LayoutInflater lf = (LayoutInflater) context.getSystemService(Context.LAYOUT_INFLATER_SERVICE); FrameLayout frameLayout = (FrameLayout) lf.inflate(R.layout.activity_rectangle_scanner, null); view = new RNRectangleScannerView(context, -1, activity, frameLayout); view.setParent(this); addViewInLayout(view, 0, new FrameLayout.LayoutParams(LayoutParams.MATCH_PARENT, LayoutParams.MATCH_PARENT)); addViewInLayout(frameLayout, 1, view.getLayoutParams()); } @Override protected void onLayout(boolean changed, int l, int t, int r, int b) { for (int i = 0; i < getChildCount(); i++) { getChildAt(i).layout(l, t, r, b); } } public void setEnableTorch(boolean enable) { view.setEnableTorch(enable); } public void setCapturedQuality(double quality) { view.setCapturedQuality(quality); } public void setFilterId(int filterId) { view.setFilterId(filterId); } public void startCamera() { view.startCamera(); } public void stopCamera() { view.stopCamera(); } public void cleanupCamera() { view.cleanupCamera(); } public void capture() { view.capture(); } public void focusCamera() { view.focusCamera(); } public void deviceWasSetup(WritableMap config) { final ReactContext context = (ReactContext) getContext(); context.getJSModule(RCTEventEmitter.class).receiveEvent(getId(), "onDeviceSetup", config); } public void torchWasChanged(boolean torchEnabled) { WritableMap map = new WritableNativeMap(); map.putBoolean("enabled", torchEnabled); final ReactContext context = (ReactContext) getContext(); context.getJSModule(RCTEventEmitter.class).receiveEvent(getId(), "onTorchChanged", map); } public void rectangleWasDetected(WritableMap detection) { final ReactContext context = (ReactContext) getContext(); context.getJSModule(RCTEventEmitter.class).receiveEvent(getId(), "onRectangleDetected", detection); } public void pictureWasTaken(WritableMap pictureDetails) { final ReactContext context = (ReactContext) getContext(); context.getJSModule(RCTEventEmitter.class).receiveEvent(getId(), "onPictureTaken", pictureDetails); } public void pictureWasProcessed(WritableMap pictureDetails) { final ReactContext context = (ReactContext) getContext(); context.getJSModule(RCTEventEmitter.class).receiveEvent(getId(), "onPictureProcessed", pictureDetails); } public void pictureDidFailToProcess(WritableMap errorDetails) { final ReactContext context = (ReactContext) getContext(); context.getJSModule(RCTEventEmitter.class).receiveEvent(getId(), "onErrorProcessingImage", errorDetails); } } ================================================ FILE: android/src/main/java/com/rectanglescanner/views/RNRectangleScannerView.java ================================================ package com.rectanglescanner.views; import android.app.Activity; import android.content.Context; import android.util.Log; import android.widget.FrameLayout; import com.rectanglescanner.R; import com.rectanglescanner.helpers.CapturedImage; import com.facebook.react.bridge.WritableMap; import com.facebook.react.bridge.WritableNativeMap; import org.opencv.core.Core; import org.opencv.core.CvType; import org.opencv.core.Mat; import org.opencv.core.MatOfInt; import org.opencv.imgcodecs.Imgcodecs; import java.util.UUID; import java.io.File; import java.util.ArrayList; /** Created by Jake on Jan 6, 2020. Wraps up the camera and rectangle detection code into a simple interface. Allows you to call start, stop, cleanup, and capture. Also is responsible for deterining how to cache the output images. */ public class RNRectangleScannerView extends RectangleDetectionController { private String cacheFolderName = "RNRectangleScanner"; private double capturedQuality = 0.5; //================================================================================ // Setup //================================================================================ public RNRectangleScannerView(Context context, Integer numCam, Activity activity, FrameLayout frameLayout) { super(context, numCam, activity, frameLayout); } private MainView parentView = null; public void setParent(MainView view) { this.parentView = view; } /** Sets the jpeg quality of the output image */ public void setCapturedQuality(double quality) { this.capturedQuality = quality; } /** Call to capture an image */ public void capture() { captureImageLater(); } /** Called after a picture was captured */ private void pictureWasTaken(WritableMap pictureDetails) { Log.d(TAG, "picture taken"); this.parentView.pictureWasTaken(pictureDetails); } /** Called after a picture was captured and finished processing */ private void pictureWasProcessed(WritableMap pictureDetails) { Log.d(TAG, "picture processed"); this.parentView.pictureWasProcessed(pictureDetails); } /** Called if the picture faiiled to be captured */ private void pictureDidFailToProcess(WritableMap errorDetails) { Log.d(TAG, "picture failed to process"); this.parentView.pictureDidFailToProcess(errorDetails); } /** Called after the torch/flash state was changed */ @Override protected void torchWasChanged(boolean torchEnabled) { Log.d(TAG, "torch changed"); this.parentView.torchWasChanged(torchEnabled); } /** Called after the camera and session are set up. This lets you check if a camera is found and permission is granted to use it. */ @Override protected void deviceWasSetup(WritableMap config) { Log.d(TAG, "device setup"); this.parentView.deviceWasSetup(config); } /** Called after a frame is processed and a rectangle was found */ @Override public void rectangleWasDetected(WritableMap detection) { this.parentView.rectangleWasDetected(detection); } /** After an image is captured and cropped, this method is called */ @Override public void onProcessedCapturedImage(CapturedImage capturedImage) { WritableMap pictureWasTakenConfig = new WritableNativeMap(); WritableMap pictureWasProcessedConfig = new WritableNativeMap(); String croppedImageFileName = null; String originalImageFileName = null; boolean hasCroppedImage = (capturedImage.processed != null); try { originalImageFileName = generateStoredFileName("O"); if (hasCroppedImage) { croppedImageFileName = generateStoredFileName("C"); } else { croppedImageFileName = originalImageFileName; } } catch(Exception e) { WritableMap folderError = new WritableNativeMap(); folderError.putString("message", "Failed to create the cache directory"); pictureDidFailToProcess(folderError); return; } pictureWasTakenConfig.putString("croppedImage", "file://" + croppedImageFileName); pictureWasTakenConfig.putString("initialImage", "file://" + originalImageFileName); pictureWasProcessedConfig.putString("croppedImage", "file://" + croppedImageFileName); pictureWasProcessedConfig.putString("initialImage", "file://" + originalImageFileName); pictureWasTaken(pictureWasTakenConfig); if (hasCroppedImage && !this.saveToDirectory(capturedImage.processed, croppedImageFileName)) { WritableMap fileError = new WritableNativeMap(); fileError.putString("message", "Failed to write cropped image to cache"); fileError.putString("filePath", croppedImageFileName); pictureDidFailToProcess(fileError); return; } if (!this.saveToDirectory(capturedImage.original, originalImageFileName)) { WritableMap fileError = new WritableNativeMap(); fileError.putString("message", "Failed to write original image to cache"); fileError.putString("filePath", originalImageFileName); pictureDidFailToProcess(fileError); return; } pictureWasProcessed(pictureWasProcessedConfig); capturedImage.release(); Log.d(TAG, "Captured Images"); } private String generateStoredFileName(String name) throws Exception { String folderDir = this.mContext.getCacheDir().toString(); File folder = new File( folderDir + "/" + this.cacheFolderName); if (!folder.exists()) { boolean result = folder.mkdirs(); if (result) { Log.d(TAG, "wrote: created folder " + folder.getPath()); } else { Log.d(TAG, "Not possible to create folder"); throw new Exception("Failed to create the cache directory"); } } return folderDir + "/" + this.cacheFolderName + "/" + name + UUID.randomUUID() + ".png"; } /** Saves a file to a folder */ private boolean saveToDirectory(Mat doc, String fileName) { Mat endDoc = new Mat(doc.size(), CvType.CV_8UC4); doc.copyTo(endDoc); Core.flip(doc.t(), endDoc, 1); ArrayList parameters = new ArrayList(); parameters.add(Imgcodecs.IMWRITE_JPEG_QUALITY); parameters.add((int)(this.capturedQuality * 100)); MatOfInt par = new MatOfInt(); par.fromList(parameters); boolean success = Imgcodecs.imwrite(fileName, endDoc, par); endDoc.release(); return success; } } ================================================ FILE: android/src/main/java/com/rectanglescanner/views/RectangleDetectionController.java ================================================ package com.rectanglescanner.views; import android.app.Activity; import android.content.Context; import android.os.Build; import android.os.HandlerThread; import android.os.Message; import android.util.Log; import android.view.Display; import android.view.WindowManager; import android.widget.FrameLayout; import com.rectanglescanner.R; import com.rectanglescanner.helpers.ImageProcessor; import com.rectanglescanner.helpers.ImageProcessorMessage; import com.rectanglescanner.helpers.CapturedImage; import com.facebook.react.bridge.WritableMap; import org.opencv.android.OpenCVLoader; import org.opencv.core.Mat; /** Created by Jake on Jan 6, 2020. Takes the output from the camera device controller and attempts to detect rectangles from the output. On capture, it will also crop the image. */ public class RectangleDetectionController extends CameraDeviceController { private HandlerThread mImageThread; private ImageProcessor mImageProcessor; private int numberOfRectangles = 15; private boolean imageProcessorBusy = true; private int filterId = 1; public void setImageProcessorBusy(boolean isBusy) { this.imageProcessorBusy = isBusy; } public int getFilterId() { return this.filterId; } /** Sets the currently active filter */ public void setFilterId(int filterId) { this.filterId = filterId; } //================================================================================ // Setup //================================================================================ public RectangleDetectionController(Context context, Integer numCam, Activity activity, FrameLayout frameLayout) { super(context, numCam, activity, frameLayout); initializeImageProcessor(context); } /** Sets up the image processor. It uses OpenCV so it needs to load that first */ private void initializeImageProcessor(Context context) { mActivity.getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON); Display display = mActivity.getWindowManager().getDefaultDisplay(); android.graphics.Point size = new android.graphics.Point(); if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1) { display.getRealSize(size); } if (OpenCVLoader.initLocal()) { Log.i(TAG, "OpenCV loaded successfully"); } else { Log.e(TAG, "OpenCV initialization failed!"); return; } if (mImageThread == null) { mImageThread = new HandlerThread("Worker Thread"); mImageThread.start(); } if (mImageProcessor == null) { mImageProcessor = new ImageProcessor(mImageThread.getLooper(), this, mContext); } this.setImageProcessorBusy(false); } //================================================================================ // Image Detection //================================================================================ /** Runs each frame the image is being pushed to the preview layer */ @Override public void processOutput(Mat image) { detectRectangleFromImageLater(image); } /** Looks for a rectangle in the given image async */ private void detectRectangleFromImageLater(Mat image) { if (!imageProcessorBusy) { setImageProcessorBusy(true); Message msg = mImageProcessor.obtainMessage(); msg.obj = new ImageProcessorMessage("previewFrame", image); mImageProcessor.sendMessageDelayed(msg, 100); } } /** Called after a frame is processed and a rectangle was found */ public void rectangleWasDetected(WritableMap detection) {} //================================================================================ // Capture Image //================================================================================ /** After an image is captured, this fuction is called and handles cropping the image */ @Override public void handleCapturedImage(Mat capturedImage) { setImageProcessorBusy(true); Message msg = mImageProcessor.obtainMessage(); msg.obj = new ImageProcessorMessage("pictureTaken", capturedImage); mImageProcessor.sendMessageAtFrontOfQueue(msg); } /** After an image is captured and cropped, this method is called */ public void onProcessedCapturedImage(CapturedImage scannedDocument) { } } ================================================ FILE: android/src/main/res/layout/activity_rectangle_scanner.xml ================================================ ================================================ FILE: example/.gitignore ================================================ # Learn more https://docs.github.com/en/get-started/getting-started-with-git/ignoring-files # dependencies node_modules/ # Expo .expo/ dist/ web-build/ # Native *.orig.* *.jks *.p8 *.p12 *.key *.mobileprovision # Metro .metro-health-check* # debug npm-debug.* yarn-debug.* yarn-error.* # macOS .DS_Store *.pem # local env files .env*.local # typescript *.tsbuildinfo ================================================ FILE: example/App.js ================================================ import { StatusBar } from 'expo-status-bar'; import { StyleSheet, Text, View } from 'react-native'; import ScanDocument from './src/ScanDocument'; export default function App() { return ( Open up App.js to start working on your app! ); } const styles = StyleSheet.create({ container: { flex: 1, backgroundColor: '#fff', alignItems: 'center', justifyContent: 'center', }, }); ================================================ FILE: example/app.json ================================================ { "expo": { "name": "example", "slug": "example", "version": "1.0.0", "orientation": "portrait", "icon": "./assets/icon.png", "userInterfaceStyle": "light", "plugins": [ [ "expo-dev-launcher", { "launchMode": "most-recent" } ] ], "splash": { "image": "./assets/splash.png", "resizeMode": "contain", "backgroundColor": "#ffffff" }, "ios": { "supportsTablet": true, "bundleIdentifier": "com.example.example", "infoPlist": { "NSCameraUsageDescription": "Example App requires to access camera for taking pictures of documents." } }, "android": { "package": "com.example.example", "adaptiveIcon": { "foregroundImage": "./assets/adaptive-icon.png", "backgroundColor": "#ffffff" } }, "web": { "favicon": "./assets/favicon.png" } } } ================================================ FILE: example/babel.config.js ================================================ module.exports = function(api) { api.cache(true); return { presets: ['babel-preset-expo'], }; }; ================================================ FILE: example/package.json ================================================ { "name": "example", "version": "1.0.0", "main": "expo/AppEntry.js", "scripts": { "start": "expo start --dev-client --clear", "android": "expo run:android", "ios": "rm -f ios/.xcode.env.local && expo run:ios" }, "dependencies": { "expo": "^50.0.17", "expo-dev-client": "~3.3.12", "expo-status-bar": "~1.12.1", "react": "18.2.0", "react-native": "0.73.6", "react-native-rectangle-scanner": "file:../" }, "devDependencies": { "@babel/core": "^7.20.0" }, "private": true } ================================================ FILE: example/src/ScanDocument/CameraControls.js ================================================ import React from 'react'; import { SafeAreaView, Text, TouchableOpacity, View } from 'react-native'; import { Filters } from 'react-native-rectangle-scanner'; import { styles } from './styles'; const CameraControls = ({ closeScanner, capture, isCapturing, flashIsAvailable, flashOn, setFlashOn, filterId, setFilterId }) => ( {Filters.RECOMMENDED_PLATFORM_FILTERS.map((f) => ( setFilterId(f.id)}> {f.name} ))} Cancel null : () => capture} /> {flashIsAvailable && ( setFlashOn(!flashOn)} > Flash: {flashOn ? 'ON' : 'OFF'} )} ); export default CameraControls; ================================================ FILE: example/src/ScanDocument/DocumentScanner.js ================================================ import React, { useRef, useState } from 'react'; import { Animated, ActivityIndicator, Dimensions, Text, TouchableOpacity, View } from 'react-native'; import Scanner, { Filters, FlashAnimation, RectangleOverlay } from 'react-native-rectangle-scanner'; import { styles } from './styles'; import CameraControls from './CameraControls'; const JPEGQuality = 0.7; const DocumentScanner = ({ closeScanner, onScannedImage }) => { const [loadingCamera, setLoadingCamera] = useState(true); const [cameraError, setCameraError] = useState(); const [cameraOn, setCameraOn] = useState(true); const [flashOn, setFlashOn] = useState(false); const [filterId, setFilterId] = useState(Filters.PLATFORM_DEFAULT_FILTER_ID); const [flashIsAvailable, setFlashIsAvailable] = useState(false); const [processingImage, setProcessingImage] = useState(false); const [previewSize, setPreviewSize] = useState({}); const [detectedRectangle, setDetectedRectangle] = useState(); // const flashScreenOnCaptureAnimation = useRef(new Animated.Value(0)).current; const cameraRef = useRef(); const capture = () => { if (processingImage) return; setProcessingImage(true); cameraRef.current.capture(); // FlashAnimation.triggerSnapAnimation(flashScreenOnCaptureAnimation); } const onPictureProcessed = (event) => { console.log('cropped, transformed, and added filters to captured image'); onScannedImage(event); setProcessingImage(false); } const onDeviceSetup = (device) => { setLoadingCamera(false); setFlashIsAvailable(device.flashIsAvailable); if (!device.hasCamera) { setCameraError('Device does not have a camera'); setCameraOn(false); } else if (!device.permissionToUseCamera) { setCameraError('App does not have permission to use the camera'); setCameraOn(false); } const dimensions = Dimensions.get('window'); setPreviewSize({ height: `${device.previewHeightPercent * 100}%`, width: `${device.previewWidthPercent * 100}%`, marginTop: (1 - device.previewHeightPercent) * dimensions.height / 2, marginLeft: (1 - device.previewWidthPercent) * dimensions.width / 2, }); } if (cameraOn) { return ( console.log('picture captured...')} onPictureProcessed={onPictureProcessed} onErrorProcessingImage={(err) => console.error('Failed to capture scan', err?.message)} enableTorch={flashOn} filterId={filterId} ref={cameraRef} capturedQuality={JPEGQuality} onRectangleDetected={(value) => setDetectedRectangle(value.detectedRectangle)} onDeviceSetup={onDeviceSetup} onTorchChanged={({ enabled }) => setFlashOn(enabled)} style={styles.scanner} /> {!processingImage && ( )} {/* */} {loadingCamera && ( Loading Camera )} {processingImage && ( Processing )} ); } return ( Cancel {cameraError ? cameraError : 'Loading Camera'} ); } export default DocumentScanner; ================================================ FILE: example/src/ScanDocument/index.js ================================================ import React, { useState } from 'react'; import { Button, Text } from 'react-native'; import DocumentScanner from './DocumentScanner'; import useIsMultiTasking from '../useIsMultiTasking'; import { StatusBar } from 'expo-status-bar'; const ScanDocument = () => { const [scannerIsOn, setScannerIsOn] = useState(false); const [scannedImage, setScannedImage] = useState(); const onScannedImage = ({ croppedImage }) => { console.log('scanned an image!'); setScannedImage(croppedImage); } const isMultiTasking = useIsMultiTasking(); if (isMultiTasking) return Not allowed while multi tasking; if (!scannerIsOn) { if (!scannedImage) { return