Repository: WeLikeVis/CameraFilter
Branch: master
Commit: 0062da6aea0a
Files: 94
Total size: 149.7 KB
Directory structure:
gitextract_vkslxcew/
├── .gitignore
├── LICENSE
├── README.md
├── app/
│ ├── .gitignore
│ ├── build.gradle
│ ├── proguard-rules.pro
│ └── src/
│ └── main/
│ ├── AndroidManifest.xml
│ ├── java/
│ │ └── cn/
│ │ └── nekocode/
│ │ └── camerafilter/
│ │ ├── CameraRenderer.java
│ │ ├── MainActivity.java
│ │ ├── MyGLUtils.java
│ │ ├── RenderBuffer.java
│ │ └── filter/
│ │ ├── AsciiArtFilter.java
│ │ ├── BasicDeformFilter.java
│ │ ├── BlackAndWhiteFilter.java
│ │ ├── BlueorangeFilter.java
│ │ ├── CameraFilter.java
│ │ ├── CartoonFilter.java
│ │ ├── CastingFilter.java
│ │ ├── ChromaticAberrationFilter.java
│ │ ├── ContrastFilter.java
│ │ ├── CrackedFilter.java
│ │ ├── CrosshatchFilter.java
│ │ ├── EMInterferenceFilter.java
│ │ ├── EdgeDetectionFilter.java
│ │ ├── GrayFilter.java
│ │ ├── HexagonMosaicFilter.java
│ │ ├── JFAVoronoiFilter.java
│ │ ├── LegofiedFilter.java
│ │ ├── LichtensteinEsqueFilter.java
│ │ ├── MappingFilter.java
│ │ ├── MirrorFilter.java
│ │ ├── MoneyFilter.java
│ │ ├── NegativeFilter.java
│ │ ├── NoiseWarpFilter.java
│ │ ├── NostalgiaFilter.java
│ │ ├── OriginalFilter.java
│ │ ├── PixelizeFilter.java
│ │ ├── PolygonizationFilter.java
│ │ ├── RefractionFilter.java
│ │ ├── ReliefFilter.java
│ │ ├── SwirlFilter.java
│ │ ├── TileMosaicFilter.java
│ │ ├── TrianglesMosaicFilter.java
│ │ ├── TripleFilter.java
│ │ └── WaterReflectionFilter.java
│ └── res/
│ ├── menu/
│ │ └── filter.xml
│ ├── raw/
│ │ ├── ascii_art.fsh
│ │ ├── basic_deform.fsh
│ │ ├── black_and_white.fsh
│ │ ├── blue_orange.fsh
│ │ ├── cartoon.fsh
│ │ ├── casting.fsh
│ │ ├── chromatic_aberration.fsh
│ │ ├── contrast.fsh
│ │ ├── cracked.fsh
│ │ ├── crosshatch.fsh
│ │ ├── edge_detection.fsh
│ │ ├── em_interference.fsh
│ │ ├── gray.fsh
│ │ ├── hexagon_mosaic.fsh
│ │ ├── legofied.fsh
│ │ ├── lichtenstein_esque.fsh
│ │ ├── mapping.fsh
│ │ ├── mirror.fsh
│ │ ├── money_filter.fsh
│ │ ├── negative.fsh
│ │ ├── noise_warp.fsh
│ │ ├── nostalgia.fsh
│ │ ├── original.fsh
│ │ ├── original_rtt.fsh
│ │ ├── pixelize.fsh
│ │ ├── polygonization.fsh
│ │ ├── refraction.fsh
│ │ ├── relief.fsh
│ │ ├── swirl.fsh
│ │ ├── tile_mosaic.fsh
│ │ ├── triangles_mosaic.fsh
│ │ ├── triple.fsh
│ │ ├── vertext.vsh
│ │ ├── voronoi.fsh
│ │ ├── voronoi_buf_a.fsh
│ │ ├── voronoi_buf_b.fsh
│ │ ├── voronoi_buf_c.fsh
│ │ └── water_reflection.fsh
│ └── values/
│ ├── colors.xml
│ ├── strings.xml
│ └── styles.xml
├── build.gradle
├── gradle/
│ └── wrapper/
│ ├── gradle-wrapper.jar
│ └── gradle-wrapper.properties
├── gradle.properties
├── gradlew
├── gradlew.bat
└── settings.gradle
================================================
FILE CONTENTS
================================================
================================================
FILE: .gitignore
================================================
.gradle
.idea
/local.properties
/.idea/workspace.xml
/.idea/libraries
.DS_Store
/build
/captures
*.iml
*.apk
*.jobf
================================================
FILE: LICENSE
================================================
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
================================================
FILE: README.md
================================================
# CameraFilter
[](http://www.apache.org/licenses/LICENSE-2.0.html)
Realtime camera filters. Process frames by OpenGL shaders.
**[Download the apk](https://github.com/nekocode/CameraFilter/releases)** to have a try.
## Filters
Thanks to the original authors of the shader codes. I just port them from webgl to opengl es.
| Filter | Preview | Filter | Preview |
| :----- | :------ | :----- | :------ |
| [Edge Detection](https://www.shadertoy.com/view/Xtd3W7) |  | [Pixelize](https://www.shadertoy.com/view/4lXXDH) | |
| [EM Interference](https://www.shadertoy.com/view/lsXSWl) | | [Triangles Mosaic](https://www.shadertoy.com/view/4d2SWy) |  |
| [Legofied](https://www.shadertoy.com/view/XtBSzy) |  | [Tile Mosaic](https://www.shadertoy.com/view/MtfXRN) |  |
| [Blueorange](https://www.shadertoy.com/view/MslGzr) |  | [Chromatic Aberration](https://www.shadertoy.com/view/Mds3zn) |  |
| [Basic Deform](https://www.shadertoy.com/view/XdsGzH) |  | [Contrast](https://www.shadertoy.com/view/Xdf3RN) |  |
| [NoiseWarp](https://www.shadertoy.com/view/4sX3RN) |  | [Refraction](https://www.shadertoy.com/view/MsX3zN) |  |
| [Mapping](https://www.shadertoy.com/view/XsX3R7) |  | [Crosshatch](https://www.shadertoy.com/view/MdX3Dr) |  |
| [Lichtenstein-esque](https://www.shadertoy.com/view/Mdf3zS) |  | [Ascii Art](https://www.shadertoy.com/view/lssGDj) |  |
| [Money Filter](https://www.shadertoy.com/view/XlsXDN) |  | [Cracked](https://www.shadertoy.com/view/XdBSzW) |  |
| [Polygonization](https://www.shadertoy.com/view/4lsXR7) |  | [JFA Voronoi](https://www.shadertoy.com/view/4sy3W3) |  |
================================================
FILE: app/.gitignore
================================================
/build
================================================
FILE: app/build.gradle
================================================
apply plugin: "com.android.application"
android {
compileSdkVersion 27
buildToolsVersion "28.0.3"
defaultConfig {
applicationId "cn.nekocode.camerafilter"
minSdkVersion 15
targetSdkVersion 27
versionCode 212
versionName "2.4"
}
buildTypes {
release {
minifyEnabled false
proguardFiles getDefaultProguardFile("proguard-android.txt"), "proguard-rules.pro"
}
}
}
dependencies {
implementation fileTree(dir: "libs", include: ["*.jar"])
implementation "com.android.support:appcompat-v7:27.1.1"
}
================================================
FILE: app/proguard-rules.pro
================================================
# Add project specific ProGuard rules here.
# By default, the flags in this file are appended to flags specified
# in /usr/local/opt/android-sdk/tools/proguard/proguard-android.txt
# You can edit the include path and order by changing the proguardFiles
# directive in build.gradle.
#
# For more details, see
# http://developer.android.com/guide/developing/tools/proguard.html
# Add any project specific keep options here:
# If your project uses WebView with JS, uncomment the following
# and specify the fully qualified class name to the JavaScript interface
# class:
#-keepclassmembers class fqcn.of.javascript.interface.for.webview {
# public *;
#}
================================================
FILE: app/src/main/AndroidManifest.xml
================================================
================================================
FILE: app/src/main/java/cn/nekocode/camerafilter/CameraRenderer.java
================================================
/*
* Copyright 2016 nekocode
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cn.nekocode.camerafilter;
import android.content.Context;
import android.graphics.SurfaceTexture;
import android.hardware.Camera;
import android.opengl.GLES11Ext;
import android.opengl.GLES20;
import android.util.Log;
import android.util.Pair;
import android.util.SparseArray;
import android.view.TextureView;
import java.io.IOException;
import javax.microedition.khronos.egl.EGL10;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.egl.EGLContext;
import javax.microedition.khronos.egl.EGLDisplay;
import javax.microedition.khronos.egl.EGLSurface;
import cn.nekocode.camerafilter.filter.AsciiArtFilter;
import cn.nekocode.camerafilter.filter.BasicDeformFilter;
import cn.nekocode.camerafilter.filter.BlackAndWhiteFilter;
import cn.nekocode.camerafilter.filter.BlueorangeFilter;
import cn.nekocode.camerafilter.filter.CameraFilter;
import cn.nekocode.camerafilter.filter.CartoonFilter;
import cn.nekocode.camerafilter.filter.CastingFilter;
import cn.nekocode.camerafilter.filter.ChromaticAberrationFilter;
import cn.nekocode.camerafilter.filter.ContrastFilter;
import cn.nekocode.camerafilter.filter.CrackedFilter;
import cn.nekocode.camerafilter.filter.CrosshatchFilter;
import cn.nekocode.camerafilter.filter.EMInterferenceFilter;
import cn.nekocode.camerafilter.filter.EdgeDetectionFilter;
import cn.nekocode.camerafilter.filter.GrayFilter;
import cn.nekocode.camerafilter.filter.HexagonMosaicFilter;
import cn.nekocode.camerafilter.filter.JFAVoronoiFilter;
import cn.nekocode.camerafilter.filter.LegofiedFilter;
import cn.nekocode.camerafilter.filter.LichtensteinEsqueFilter;
import cn.nekocode.camerafilter.filter.MappingFilter;
import cn.nekocode.camerafilter.filter.MirrorFilter;
import cn.nekocode.camerafilter.filter.MoneyFilter;
import cn.nekocode.camerafilter.filter.NegativeFilter;
import cn.nekocode.camerafilter.filter.NoiseWarpFilter;
import cn.nekocode.camerafilter.filter.NostalgiaFilter;
import cn.nekocode.camerafilter.filter.OriginalFilter;
import cn.nekocode.camerafilter.filter.PixelizeFilter;
import cn.nekocode.camerafilter.filter.PolygonizationFilter;
import cn.nekocode.camerafilter.filter.RefractionFilter;
import cn.nekocode.camerafilter.filter.ReliefFilter;
import cn.nekocode.camerafilter.filter.SwirlFilter;
import cn.nekocode.camerafilter.filter.TileMosaicFilter;
import cn.nekocode.camerafilter.filter.TrianglesMosaicFilter;
import cn.nekocode.camerafilter.filter.TripleFilter;
import cn.nekocode.camerafilter.filter.WaterReflectionFilter;
/**
* @author nekocode (nekocode.cn@gmail.com)
*/
public class CameraRenderer implements Runnable, TextureView.SurfaceTextureListener {
private static final String TAG = "CameraRenderer";
private static final int EGL_OPENGL_ES2_BIT = 4;
private static final int EGL_CONTEXT_CLIENT_VERSION = 0x3098;
private static final int DRAW_INTERVAL = 1000 / 30;
private Thread renderThread;
private Context context;
private SurfaceTexture surfaceTexture;
private int gwidth, gheight;
private EGLDisplay eglDisplay;
private EGLSurface eglSurface;
private EGLContext eglContext;
private EGL10 egl10;
private Camera camera;
private SurfaceTexture cameraSurfaceTexture;
private int cameraTextureId;
private CameraFilter selectedFilter;
private int selectedFilterId = R.id.filter0;
private SparseArray cameraFilterMap = new SparseArray<>();
public CameraRenderer(Context context) {
this.context = context;
}
@Override
public void onSurfaceTextureUpdated(SurfaceTexture surface) {
}
@Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) {
gwidth = -width;
gheight = -height;
}
@Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
if (camera != null) {
camera.stopPreview();
camera.release();
}
if (renderThread != null && renderThread.isAlive()) {
renderThread.interrupt();
}
CameraFilter.release();
return true;
}
@Override
public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
if (renderThread != null && renderThread.isAlive()) {
renderThread.interrupt();
}
renderThread = new Thread(this);
surfaceTexture = surface;
gwidth = -width;
gheight = -height;
// Open camera
Pair backCamera = getBackCamera();
final int backCameraId = backCamera.second;
camera = Camera.open(backCameraId);
// Start rendering
renderThread.start();
}
public void setSelectedFilter(int id) {
selectedFilterId = id;
selectedFilter = cameraFilterMap.get(id);
if (selectedFilter != null)
selectedFilter.onAttach();
}
@Override
public void run() {
initGL(surfaceTexture);
// Setup camera filters map
cameraFilterMap.append(R.id.filter0, new OriginalFilter(context));
cameraFilterMap.append(R.id.filter1, new EdgeDetectionFilter(context));
cameraFilterMap.append(R.id.filter2, new PixelizeFilter(context));
cameraFilterMap.append(R.id.filter3, new EMInterferenceFilter(context));
cameraFilterMap.append(R.id.filter4, new TrianglesMosaicFilter(context));
cameraFilterMap.append(R.id.filter5, new LegofiedFilter(context));
cameraFilterMap.append(R.id.filter6, new TileMosaicFilter(context));
cameraFilterMap.append(R.id.filter7, new BlueorangeFilter(context));
cameraFilterMap.append(R.id.filter8, new ChromaticAberrationFilter(context));
cameraFilterMap.append(R.id.filter9, new BasicDeformFilter(context));
cameraFilterMap.append(R.id.filter10, new ContrastFilter(context));
cameraFilterMap.append(R.id.filter11, new NoiseWarpFilter(context));
cameraFilterMap.append(R.id.filter12, new RefractionFilter(context));
cameraFilterMap.append(R.id.filter13, new MappingFilter(context));
cameraFilterMap.append(R.id.filter14, new CrosshatchFilter(context));
cameraFilterMap.append(R.id.filter15, new LichtensteinEsqueFilter(context));
cameraFilterMap.append(R.id.filter16, new AsciiArtFilter(context));
cameraFilterMap.append(R.id.filter17, new MoneyFilter(context));
cameraFilterMap.append(R.id.filter18, new CrackedFilter(context));
cameraFilterMap.append(R.id.filter19, new PolygonizationFilter(context));
cameraFilterMap.append(R.id.filter20, new JFAVoronoiFilter(context));
cameraFilterMap.append(R.id.filter21, new BlackAndWhiteFilter(context));
cameraFilterMap.append(R.id.filter22, new GrayFilter(context));
cameraFilterMap.append(R.id.filter23, new NegativeFilter(context));
cameraFilterMap.append(R.id.filter24, new NostalgiaFilter(context));
cameraFilterMap.append(R.id.filter25, new CastingFilter(context));
cameraFilterMap.append(R.id.filter26, new ReliefFilter(context));
cameraFilterMap.append(R.id.filter27, new SwirlFilter(context));
cameraFilterMap.append(R.id.filter28, new HexagonMosaicFilter(context));
cameraFilterMap.append(R.id.filter29, new MirrorFilter(context));
cameraFilterMap.append(R.id.filter30, new TripleFilter(context));
cameraFilterMap.append(R.id.filter31, new CartoonFilter(context));
cameraFilterMap.append(R.id.filter32, new WaterReflectionFilter(context));
setSelectedFilter(selectedFilterId);
// Create texture for camera preview
cameraTextureId = MyGLUtils.genTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES);
cameraSurfaceTexture = new SurfaceTexture(cameraTextureId);
// Start camera preview
try {
camera.setPreviewTexture(cameraSurfaceTexture);
camera.startPreview();
} catch (IOException ioe) {
// Something bad happened
}
// Render loop
while (!Thread.currentThread().isInterrupted()) {
try {
if (gwidth < 0 && gheight < 0)
GLES20.glViewport(0, 0, gwidth = -gwidth, gheight = -gheight);
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
// Update the camera preview texture
synchronized (this) {
cameraSurfaceTexture.updateTexImage();
}
// Draw camera preview
selectedFilter.draw(cameraTextureId, gwidth, gheight);
// Flush
GLES20.glFlush();
egl10.eglSwapBuffers(eglDisplay, eglSurface);
Thread.sleep(DRAW_INTERVAL);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
}
cameraSurfaceTexture.release();
GLES20.glDeleteTextures(1, new int[]{cameraTextureId}, 0);
}
private void initGL(SurfaceTexture texture) {
egl10 = (EGL10) EGLContext.getEGL();
eglDisplay = egl10.eglGetDisplay(EGL10.EGL_DEFAULT_DISPLAY);
if (eglDisplay == EGL10.EGL_NO_DISPLAY) {
throw new RuntimeException("eglGetDisplay failed " +
android.opengl.GLUtils.getEGLErrorString(egl10.eglGetError()));
}
int[] version = new int[2];
if (!egl10.eglInitialize(eglDisplay, version)) {
throw new RuntimeException("eglInitialize failed " +
android.opengl.GLUtils.getEGLErrorString(egl10.eglGetError()));
}
int[] configsCount = new int[1];
EGLConfig[] configs = new EGLConfig[1];
int[] configSpec = {
EGL10.EGL_RENDERABLE_TYPE,
EGL_OPENGL_ES2_BIT,
EGL10.EGL_RED_SIZE, 8,
EGL10.EGL_GREEN_SIZE, 8,
EGL10.EGL_BLUE_SIZE, 8,
EGL10.EGL_ALPHA_SIZE, 8,
EGL10.EGL_DEPTH_SIZE, 0,
EGL10.EGL_STENCIL_SIZE, 0,
EGL10.EGL_NONE
};
EGLConfig eglConfig = null;
if (!egl10.eglChooseConfig(eglDisplay, configSpec, configs, 1, configsCount)) {
throw new IllegalArgumentException("eglChooseConfig failed " +
android.opengl.GLUtils.getEGLErrorString(egl10.eglGetError()));
} else if (configsCount[0] > 0) {
eglConfig = configs[0];
}
if (eglConfig == null) {
throw new RuntimeException("eglConfig not initialized");
}
int[] attrib_list = {EGL_CONTEXT_CLIENT_VERSION, 2, EGL10.EGL_NONE};
eglContext = egl10.eglCreateContext(eglDisplay, eglConfig, EGL10.EGL_NO_CONTEXT, attrib_list);
eglSurface = egl10.eglCreateWindowSurface(eglDisplay, eglConfig, texture, null);
if (eglSurface == null || eglSurface == EGL10.EGL_NO_SURFACE) {
int error = egl10.eglGetError();
if (error == EGL10.EGL_BAD_NATIVE_WINDOW) {
Log.e(TAG, "eglCreateWindowSurface returned EGL10.EGL_BAD_NATIVE_WINDOW");
return;
}
throw new RuntimeException("eglCreateWindowSurface failed " +
android.opengl.GLUtils.getEGLErrorString(error));
}
if (!egl10.eglMakeCurrent(eglDisplay, eglSurface, eglSurface, eglContext)) {
throw new RuntimeException("eglMakeCurrent failed " +
android.opengl.GLUtils.getEGLErrorString(egl10.eglGetError()));
}
}
private Pair getBackCamera() {
Camera.CameraInfo cameraInfo = new Camera.CameraInfo();
final int numberOfCameras = Camera.getNumberOfCameras();
for (int i = 0; i < numberOfCameras; ++i) {
Camera.getCameraInfo(i, cameraInfo);
if (cameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_BACK) {
return new Pair<>(cameraInfo, i);
}
}
return null;
}
}
================================================
FILE: app/src/main/java/cn/nekocode/camerafilter/MainActivity.java
================================================
/*
* Copyright 2016 nekocode
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cn.nekocode.camerafilter;
import android.Manifest;
import android.content.pm.PackageManager;
import android.graphics.Bitmap;
import android.os.Bundle;
import android.os.Environment;
import android.support.v4.app.ActivityCompat;
import android.support.v4.content.ContextCompat;
import android.support.v7.app.AppCompatActivity;
import android.view.GestureDetector;
import android.view.Menu;
import android.view.MenuItem;
import android.view.MotionEvent;
import android.view.TextureView;
import android.view.View;
import android.widget.FrameLayout;
import android.widget.Toast;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
/**
* @author nekocode (nekocode.cn@gmail.com)
*/
public class MainActivity extends AppCompatActivity implements GestureDetector.OnGestureListener {
private static final int REQUEST_CAMERA_PERMISSION = 101;
private FrameLayout container;
private CameraRenderer renderer;
private TextureView textureView;
private int filterId = R.id.filter0;
private int mCurrentFilterId = 0;
String[] TITLES = {"Original", "EdgeDectection", "Pixelize",
"EMInterference", "TrianglesMosaic", "Legofied",
"TileMosaic", "Blueorange", "ChromaticAberration",
"BasicDeform", "Contrast", "NoiseWarp", "Refraction",
"Mapping", "Crosshatch", "LichtensteinEsque",
"AsciiArt", "MoneyFilter", "Cracked", "Polygonization",
"JFAVoronoi", "BlackAndWhite", "Gray", "Negative",
"Nostalgia", "Casting", "Relief", "Swirl", "HexagonMosaic",
"Mirror", "Triple", "Cartoon", "WaterReflection"
};
Integer[] FILTER_RES_IDS = {R.id.filter0, R.id.filter1, R.id.filter2, R.id.filter3, R.id.filter4,
R.id.filter5, R.id.filter6, R.id.filter7, R.id.filter8, R.id.filter9, R.id.filter10,
R.id.filter11, R.id.filter12, R.id.filter13, R.id.filter14, R.id.filter15, R.id.filter16,
R.id.filter17, R.id.filter18, R.id.filter19, R.id.filter20,
R.id.filter21, R.id.filter22, R.id.filter23, R.id.filter24,
R.id.filter25, R.id.filter26, R.id.filter27, R.id.filter28,
R.id.filter29, R.id.filter30, R.id.filter31, R.id.filter32};
ArrayList mFilterArray = new ArrayList<>(Arrays.asList(FILTER_RES_IDS));
GestureDetector mGestureDetector;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(container = new FrameLayout(this));
setTitle(TITLES[mCurrentFilterId]);
if (ContextCompat.checkSelfPermission(this,
Manifest.permission.CAMERA)
!= PackageManager.PERMISSION_GRANTED) {
// Should we show an explanation?
if (ActivityCompat.shouldShowRequestPermissionRationale(this, Manifest.permission.CAMERA)) {
Toast.makeText(this, "Camera access is required.", Toast.LENGTH_SHORT).show();
} else {
ActivityCompat.requestPermissions(this, new String[]{Manifest.permission.CAMERA},
REQUEST_CAMERA_PERMISSION);
}
} else {
setupCameraPreviewView();
}
mGestureDetector = new GestureDetector(this, this);
}
@Override
public void onRequestPermissionsResult(int requestCode, String permissions[], int[] grantResults) {
switch (requestCode) {
case REQUEST_CAMERA_PERMISSION: {
if (grantResults.length > 0 && grantResults[0] == PackageManager.PERMISSION_GRANTED) {
setupCameraPreviewView();
}
}
}
}
void setupCameraPreviewView() {
renderer = new CameraRenderer(this);
textureView = new TextureView(this);
container.addView(textureView);
textureView.setSurfaceTextureListener(renderer);
// textureView.setOnTouchListener(this);
textureView.setOnTouchListener(new View.OnTouchListener() {
@Override
public boolean onTouch(View view, MotionEvent motionEvent) {
mGestureDetector.onTouchEvent(motionEvent);
return true;
}
});
textureView.addOnLayoutChangeListener(new View.OnLayoutChangeListener() {
@Override
public void onLayoutChange(View v, int left, int top, int right, int bottom, int oldLeft, int oldTop, int oldRight, int oldBottom) {
renderer.onSurfaceTextureSizeChanged(null, v.getWidth(), v.getHeight());
}
});
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
getMenuInflater().inflate(R.menu.filter, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
filterId = item.getItemId();
// TODO: need tidy up
if (filterId == R.id.capture) {
Toast.makeText(this,
capture() ? "The capture has been saved to your sdcard root path." :
"Save failed!",
Toast.LENGTH_SHORT).show();
return true;
}
setTitle(item.getTitle());
if (renderer != null)
renderer.setSelectedFilter(filterId);
mCurrentFilterId = mFilterArray.indexOf(filterId);
return true;
}
private boolean capture() {
String mPath = genSaveFileName(getTitle().toString() + "_", ".png");
File imageFile = new File(mPath);
if (imageFile.exists()) {
imageFile.delete();
}
// create bitmap screen capture
Bitmap bitmap = textureView.getBitmap();
OutputStream outputStream = null;
try {
outputStream = new FileOutputStream(imageFile);
bitmap.compress(Bitmap.CompressFormat.PNG, 90, outputStream);
outputStream.flush();
outputStream.close();
} catch (FileNotFoundException e) {
e.printStackTrace();
return false;
} catch (IOException e) {
e.printStackTrace();
return false;
}
return true;
}
private String genSaveFileName(String prefix, String suffix) {
Date date = new Date();
SimpleDateFormat dateformat1 = new SimpleDateFormat("yyyyMMdd_hhmmss");
String timeString = dateformat1.format(date);
String externalPath = Environment.getExternalStorageDirectory().toString();
return externalPath + "/" + prefix + timeString + suffix;
}
@Override
public boolean onDown(MotionEvent motionEvent) {
return false;
}
@Override
public void onShowPress(MotionEvent motionEvent) {
}
@Override
public boolean onSingleTapUp(MotionEvent motionEvent) {
return false;
}
@Override
public boolean onScroll(MotionEvent motionEvent, MotionEvent motionEvent1, float v, float v1) {
return false;
}
@Override
public void onLongPress(MotionEvent motionEvent) {
}
@Override
public boolean onFling(MotionEvent e1, MotionEvent e2, float velocityX, float velocityY) {
float velocity = Math.abs(velocityX) > Math.abs(velocityY) ? velocityX : velocityY;
int step = velocity > 0 ? -1 : 1;
mCurrentFilterId = circleLoop(TITLES.length, mCurrentFilterId, step);
setTitle(TITLES[mCurrentFilterId]);
if (renderer != null) {
renderer.setSelectedFilter(FILTER_RES_IDS[mCurrentFilterId]);
}
return true;
}
private int circleLoop(int size, int currentPos, int step) {
if (step == 0) {
return currentPos;
}
if (step > 0) {
if (currentPos + step >= size) {
return (currentPos + step) % size;
} else {
return currentPos + step;
}
} else {
if (currentPos + step < 0) {
return currentPos + step + size;
} else {
return currentPos + step;
}
}
}
}
================================================
FILE: app/src/main/java/cn/nekocode/camerafilter/MyGLUtils.java
================================================
/*
* Copyright 2016 nekocode
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cn.nekocode.camerafilter;
import android.content.Context;
import android.content.res.Resources;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.opengl.GLES11Ext;
import android.opengl.GLES20;
import android.opengl.GLUtils;
import android.util.Log;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import javax.microedition.khronos.opengles.GL10;
/**
* @author nekocode (nekocode.cn@gmail.com)
*/
public class MyGLUtils {
private static final String TAG = "MyGLUtils";
public static int genTexture() {
return genTexture(GLES20.GL_TEXTURE_2D);
}
public static int genTexture(int textureType) {
int[] genBuf = new int[1];
GLES20.glGenTextures(1, genBuf, 0);
GLES20.glBindTexture(textureType, genBuf[0]);
// Set texture default draw parameters
if (textureType == GLES11Ext.GL_TEXTURE_EXTERNAL_OES) {
GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GL10.GL_TEXTURE_MIN_FILTER, GL10.GL_LINEAR);
GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GL10.GL_TEXTURE_MAG_FILTER, GL10.GL_LINEAR);
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GL10.GL_TEXTURE_WRAP_S, GL10.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GL10.GL_TEXTURE_WRAP_T, GL10.GL_CLAMP_TO_EDGE);
} else {
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GL10.GL_TEXTURE_MIN_FILTER, GL10.GL_LINEAR);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GL10.GL_TEXTURE_MAG_FILTER, GL10.GL_LINEAR);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GL10.GL_TEXTURE_WRAP_S, GL10.GL_REPEAT);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GL10.GL_TEXTURE_WRAP_T, GL10.GL_REPEAT);
}
return genBuf[0];
}
public static int loadTexture(final Context context, final int resourceId, int[] size) {
final int texId = genTexture();
if (texId != 0) {
final BitmapFactory.Options options = new BitmapFactory.Options();
options.inScaled = false; // No pre-scaling
options.inJustDecodeBounds = true;
// Just decode bounds
BitmapFactory.decodeResource(context.getResources(), resourceId, options);
// Set return size
size[0] = options.outWidth;
size[1] = options.outHeight;
// Decode
options.inJustDecodeBounds = false;
Bitmap bitmap = BitmapFactory.decodeResource(context.getResources(), resourceId, options);
// Load the bitmap into the bound texture.
GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, bitmap, 0);
// Recycle the bitmap, since its data has been loaded into OpenGL.
bitmap.recycle();
}
return texId;
}
public static int buildProgram(Context context, int vertexSourceRawId, int fragmentSourceRawId) {
return buildProgram(getStringFromRaw(context, vertexSourceRawId),
getStringFromRaw(context, fragmentSourceRawId));
}
public static int buildProgram(String vertexSource, String fragmentSource) {
final int vertexShader = buildShader(GLES20.GL_VERTEX_SHADER, vertexSource);
if (vertexShader == 0) {
return 0;
}
final int fragmentShader = buildShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource);
if (fragmentShader == 0) {
return 0;
}
final int program = GLES20.glCreateProgram();
if (program == 0) {
return 0;
}
GLES20.glAttachShader(program, vertexShader);
GLES20.glAttachShader(program, fragmentShader);
GLES20.glLinkProgram(program);
return program;
}
public static int buildShader(int type, String shaderSource) {
final int shader = GLES20.glCreateShader(type);
if (shader == 0) {
return 0;
}
GLES20.glShaderSource(shader, shaderSource);
GLES20.glCompileShader(shader);
int[] status = new int[1];
GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, status, 0);
if (status[0] == 0) {
Log.e(TAG, GLES20.glGetShaderInfoLog(shader));
GLES20.glDeleteShader(shader);
return 0;
}
return shader;
}
private static String getStringFromRaw(Context context, int id) {
String str;
try {
Resources r = context.getResources();
InputStream is = r.openRawResource(id);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
int i = is.read();
while (i != -1) {
baos.write(i);
i = is.read();
}
str = baos.toString();
is.close();
} catch (IOException e) {
str = "";
}
return str;
}
}
================================================
FILE: app/src/main/java/cn/nekocode/camerafilter/RenderBuffer.java
================================================
/*
* Copyright 2016 nekocode
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cn.nekocode.camerafilter;
import android.opengl.GLES20;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.IntBuffer;
import javax.microedition.khronos.opengles.GL10;
/**
* @author nekocode (nekocode.cn@gmail.com)
*/
public class RenderBuffer {
private int texId = 0;
private int activeTexUnit = 0;
private int renderBufferId = 0;
private int frameBufferId = 0;
private int width, height;
public RenderBuffer(int width, int height, int activeTexUnit) {
this.width = width;
this.height = height;
this.activeTexUnit = activeTexUnit;
int[] genbuf = new int[1];
// Generate and bind 2d texture
GLES20.glActiveTexture(activeTexUnit);
texId = MyGLUtils.genTexture();
IntBuffer texBuffer =
ByteBuffer.allocateDirect(width * height * 4).order(ByteOrder.nativeOrder()).asIntBuffer();
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, width, height, 0, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, texBuffer);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GL10.GL_TEXTURE_MIN_FILTER, GL10.GL_LINEAR);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GL10.GL_TEXTURE_MAG_FILTER, GL10.GL_LINEAR);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GL10.GL_TEXTURE_WRAP_S, GL10.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GL10.GL_TEXTURE_WRAP_T, GL10.GL_CLAMP_TO_EDGE);
// Generate frame buffer
GLES20.glGenFramebuffers(1, genbuf, 0);
frameBufferId = genbuf[0];
// Bind frame buffer
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, frameBufferId);
// Generate render buffer
GLES20.glGenRenderbuffers(1, genbuf, 0);
renderBufferId = genbuf[0];
// Bind render buffer
GLES20.glBindRenderbuffer(GLES20.GL_RENDERBUFFER, renderBufferId);
GLES20.glRenderbufferStorage(GLES20.GL_RENDERBUFFER, GLES20.GL_DEPTH_COMPONENT16, width, height);
unbind();
}
public int getTexId() {
return texId;
}
public int getWidth() {
return width;
}
public int getHeight() {
return height;
}
public int getActiveTexUnit() {
return activeTexUnit;
}
public void bind() {
GLES20.glViewport(0, 0, width, height);
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, frameBufferId);
GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0,
GLES20.GL_TEXTURE_2D, texId, 0);
GLES20.glFramebufferRenderbuffer(GLES20.GL_FRAMEBUFFER, GLES20.GL_DEPTH_ATTACHMENT,
GLES20.GL_RENDERBUFFER, renderBufferId);
}
public void unbind() {
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
}
}
================================================
FILE: app/src/main/java/cn/nekocode/camerafilter/filter/AsciiArtFilter.java
================================================
/*
* Copyright 2016 nekocode
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cn.nekocode.camerafilter.filter;
import android.content.Context;
import android.opengl.GLES20;
import cn.nekocode.camerafilter.MyGLUtils;
import cn.nekocode.camerafilter.R;
/**
* @author nekocode (nekocode.cn@gmail.com)
*/
public class AsciiArtFilter extends CameraFilter {
private int program;
public AsciiArtFilter(Context context) {
super(context);
// Build shaders
program = MyGLUtils.buildProgram(context, R.raw.vertext, R.raw.ascii_art);
}
@Override
public void onDraw(int cameraTexId, int canvasWidth, int canvasHeight) {
setupShaderInputs(program,
new int[]{canvasWidth, canvasHeight},
new int[]{cameraTexId},
new int[][]{});
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
}
}
================================================
FILE: app/src/main/java/cn/nekocode/camerafilter/filter/BasicDeformFilter.java
================================================
/*
* Copyright 2016 nekocode
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cn.nekocode.camerafilter.filter;
import android.content.Context;
import android.opengl.GLES20;
import cn.nekocode.camerafilter.MyGLUtils;
import cn.nekocode.camerafilter.R;
/**
* @author nekocode (nekocode.cn@gmail.com)
*/
public class BasicDeformFilter extends CameraFilter {
private int program;
public BasicDeformFilter(Context context) {
super(context);
// Build shaders
program = MyGLUtils.buildProgram(context, R.raw.vertext, R.raw.basic_deform);
}
@Override
public void onDraw(int cameraTexId, int canvasWidth, int canvasHeight) {
setupShaderInputs(program,
new int[]{canvasWidth, canvasHeight},
new int[]{cameraTexId},
new int[][]{});
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
}
}
================================================
FILE: app/src/main/java/cn/nekocode/camerafilter/filter/BlackAndWhiteFilter.java
================================================
/*
* Copyright 2016 winston
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cn.nekocode.camerafilter.filter;
import android.content.Context;
import android.opengl.GLES20;
import cn.nekocode.camerafilter.MyGLUtils;
import cn.nekocode.camerafilter.R;
/**
* @author winston (1054669137@qq.com)
*/
public class BlackAndWhiteFilter extends CameraFilter {
private int program;
public BlackAndWhiteFilter(Context context) {
super(context);
program = MyGLUtils.buildProgram(context, R.raw.vertext, R.raw.black_and_white);
}
@Override
public void onDraw(int cameraTexId, int canvasWidth, int canvasHeight) {
setupShaderInputs(program,
new int[]{canvasWidth, canvasHeight},
new int[]{cameraTexId},
new int[][]{});
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
}
}
================================================
FILE: app/src/main/java/cn/nekocode/camerafilter/filter/BlueorangeFilter.java
================================================
/*
* Copyright 2016 nekocode
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cn.nekocode.camerafilter.filter;
import android.content.Context;
import android.opengl.GLES20;
import cn.nekocode.camerafilter.MyGLUtils;
import cn.nekocode.camerafilter.R;
/**
* @author nekocode (nekocode.cn@gmail.com)
*/
public class BlueorangeFilter extends CameraFilter {
private int program;
public BlueorangeFilter(Context context) {
super(context);
// Build shaders
program = MyGLUtils.buildProgram(context, R.raw.vertext, R.raw.blue_orange);
}
@Override
public void onDraw(int cameraTexId, int canvasWidth, int canvasHeight) {
setupShaderInputs(program,
new int[]{canvasWidth, canvasHeight},
new int[]{cameraTexId},
new int[][]{});
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
}
}
================================================
FILE: app/src/main/java/cn/nekocode/camerafilter/filter/CameraFilter.java
================================================
/*
* Copyright 2016 nekocode
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cn.nekocode.camerafilter.filter;
import android.content.Context;
import android.opengl.GLES11Ext;
import android.opengl.GLES20;
import android.support.annotation.CallSuper;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import cn.nekocode.camerafilter.MyGLUtils;
import cn.nekocode.camerafilter.R;
import cn.nekocode.camerafilter.RenderBuffer;
/**
* @author nekocode (nekocode.cn@gmail.com)
*/
public abstract class CameraFilter {
static final float SQUARE_COORDS[] = {
1.0f, -1.0f,
-1.0f, -1.0f,
1.0f, 1.0f,
-1.0f, 1.0f,
};
static final float TEXTURE_COORDS[] = {
1.0f, 0.0f,
0.0f, 0.0f,
1.0f, 1.0f,
0.0f, 1.0f,
};
static FloatBuffer VERTEX_BUF, TEXTURE_COORD_BUF;
static int PROGRAM = 0;
private static final int BUF_ACTIVE_TEX_UNIT = GLES20.GL_TEXTURE8;
private static RenderBuffer CAMERA_RENDER_BUF;
private static final float ROATED_TEXTURE_COORDS[] = {
1.0f, 0.0f,
1.0f, 1.0f,
0.0f, 0.0f,
0.0f, 1.0f,
};
private static FloatBuffer ROATED_TEXTURE_COORD_BUF;
final long START_TIME = System.currentTimeMillis();
int iFrame = 0;
public CameraFilter(Context context) {
// Setup default Buffers
if (VERTEX_BUF == null) {
VERTEX_BUF = ByteBuffer.allocateDirect(SQUARE_COORDS.length * 4)
.order(ByteOrder.nativeOrder()).asFloatBuffer();
VERTEX_BUF.put(SQUARE_COORDS);
VERTEX_BUF.position(0);
}
if (TEXTURE_COORD_BUF == null) {
TEXTURE_COORD_BUF = ByteBuffer.allocateDirect(TEXTURE_COORDS.length * 4)
.order(ByteOrder.nativeOrder()).asFloatBuffer();
TEXTURE_COORD_BUF.put(TEXTURE_COORDS);
TEXTURE_COORD_BUF.position(0);
}
if (ROATED_TEXTURE_COORD_BUF == null) {
ROATED_TEXTURE_COORD_BUF = ByteBuffer.allocateDirect(ROATED_TEXTURE_COORDS.length * 4)
.order(ByteOrder.nativeOrder()).asFloatBuffer();
ROATED_TEXTURE_COORD_BUF.put(ROATED_TEXTURE_COORDS);
ROATED_TEXTURE_COORD_BUF.position(0);
}
if (PROGRAM == 0) {
PROGRAM = MyGLUtils.buildProgram(context, R.raw.vertext, R.raw.original_rtt);
}
}
@CallSuper
public void onAttach() {
iFrame = 0;
}
final public void draw(int cameraTexId, int canvasWidth, int canvasHeight) {
// TODO move?
// Create camera render buffer
if (CAMERA_RENDER_BUF == null ||
CAMERA_RENDER_BUF.getWidth() != canvasWidth ||
CAMERA_RENDER_BUF.getHeight() != canvasHeight) {
CAMERA_RENDER_BUF = new RenderBuffer(canvasWidth, canvasHeight, BUF_ACTIVE_TEX_UNIT);
}
// Use shaders
GLES20.glUseProgram(PROGRAM);
int iChannel0Location = GLES20.glGetUniformLocation(PROGRAM, "iChannel0");
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, cameraTexId);
GLES20.glUniform1i(iChannel0Location, 0);
int vPositionLocation = GLES20.glGetAttribLocation(PROGRAM, "vPosition");
GLES20.glEnableVertexAttribArray(vPositionLocation);
GLES20.glVertexAttribPointer(vPositionLocation, 2, GLES20.GL_FLOAT, false, 4 * 2, VERTEX_BUF);
int vTexCoordLocation = GLES20.glGetAttribLocation(PROGRAM, "vTexCoord");
GLES20.glEnableVertexAttribArray(vTexCoordLocation);
GLES20.glVertexAttribPointer(vTexCoordLocation, 2, GLES20.GL_FLOAT, false, 4 * 2, ROATED_TEXTURE_COORD_BUF);
// Render to texture
CAMERA_RENDER_BUF.bind();
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
CAMERA_RENDER_BUF.unbind();
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
onDraw(CAMERA_RENDER_BUF.getTexId(), canvasWidth, canvasHeight);
iFrame++;
}
abstract void onDraw(int cameraTexId, int canvasWidth, int canvasHeight);
void setupShaderInputs(int program, int[] iResolution, int[] iChannels, int[][] iChannelResolutions) {
setupShaderInputs(program, VERTEX_BUF, TEXTURE_COORD_BUF, iResolution, iChannels, iChannelResolutions);
}
void setupShaderInputs(int program, FloatBuffer vertex, FloatBuffer textureCoord, int[] iResolution, int[] iChannels, int[][] iChannelResolutions) {
GLES20.glUseProgram(program);
int iResolutionLocation = GLES20.glGetUniformLocation(program, "iResolution");
GLES20.glUniform3fv(iResolutionLocation, 1,
FloatBuffer.wrap(new float[]{(float) iResolution[0], (float) iResolution[1], 1.0f}));
float time = ((float) (System.currentTimeMillis() - START_TIME)) / 1000.0f;
int iGlobalTimeLocation = GLES20.glGetUniformLocation(program, "iGlobalTime");
GLES20.glUniform1f(iGlobalTimeLocation, time);
int iFrameLocation = GLES20.glGetUniformLocation(program, "iFrame");
GLES20.glUniform1i(iFrameLocation, iFrame);
int vPositionLocation = GLES20.glGetAttribLocation(program, "vPosition");
GLES20.glEnableVertexAttribArray(vPositionLocation);
GLES20.glVertexAttribPointer(vPositionLocation, 2, GLES20.GL_FLOAT, false, 4 * 2, vertex);
int vTexCoordLocation = GLES20.glGetAttribLocation(program, "vTexCoord");
GLES20.glEnableVertexAttribArray(vTexCoordLocation);
GLES20.glVertexAttribPointer(vTexCoordLocation, 2, GLES20.GL_FLOAT, false, 4 * 2, textureCoord);
for (int i = 0; i < iChannels.length; i++) {
int sTextureLocation = GLES20.glGetUniformLocation(program, "iChannel" + i);
GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, iChannels[i]);
GLES20.glUniform1i(sTextureLocation, i);
}
float _iChannelResolutions[] = new float[iChannelResolutions.length * 3];
for (int i = 0; i < iChannelResolutions.length; i++) {
_iChannelResolutions[i * 3] = iChannelResolutions[i][0];
_iChannelResolutions[i * 3 + 1] = iChannelResolutions[i][1];
_iChannelResolutions[i * 3 + 2] = 1.0f;
}
int iChannelResolutionLocation = GLES20.glGetUniformLocation(program, "iChannelResolution");
GLES20.glUniform3fv(iChannelResolutionLocation,
_iChannelResolutions.length, FloatBuffer.wrap(_iChannelResolutions));
}
public static void release() {
PROGRAM = 0;
CAMERA_RENDER_BUF = null;
}
}
================================================
FILE: app/src/main/java/cn/nekocode/camerafilter/filter/CartoonFilter.java
================================================
/*
* Copyright 2016 winston
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cn.nekocode.camerafilter.filter;
import android.content.Context;
import android.opengl.GLES20;
import cn.nekocode.camerafilter.MyGLUtils;
import cn.nekocode.camerafilter.R;
/**
* @author winston (1054669137@qq.com)
*/
public class CartoonFilter extends CameraFilter {
private int program;
public CartoonFilter(Context context) {
super(context);
program = MyGLUtils.buildProgram(context, R.raw.vertext, R.raw.cartoon);
}
@Override
public void onDraw(int cameraTexId, int canvasWidth, int canvasHeight) {
setupShaderInputs(program,
new int[]{canvasWidth, canvasHeight},
new int[]{cameraTexId},
new int[][]{});
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
}
}
================================================
FILE: app/src/main/java/cn/nekocode/camerafilter/filter/CastingFilter.java
================================================
/*
* Copyright 2016 winston
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cn.nekocode.camerafilter.filter;
import android.content.Context;
import android.opengl.GLES20;
import cn.nekocode.camerafilter.MyGLUtils;
import cn.nekocode.camerafilter.R;
/**
* @author winston (1054669137@qq.com)
*/
public class CastingFilter extends CameraFilter {
private int program;
public CastingFilter(Context context) {
super(context);
program = MyGLUtils.buildProgram(context, R.raw.vertext, R.raw.casting);
}
@Override
public void onDraw(int cameraTexId, int canvasWidth, int canvasHeight) {
setupShaderInputs(program,
new int[]{canvasWidth, canvasHeight},
new int[]{cameraTexId},
new int[][]{});
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
}
}
================================================
FILE: app/src/main/java/cn/nekocode/camerafilter/filter/ChromaticAberrationFilter.java
================================================
/*
* Copyright 2016 nekocode
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cn.nekocode.camerafilter.filter;
import android.content.Context;
import android.opengl.GLES20;
import cn.nekocode.camerafilter.MyGLUtils;
import cn.nekocode.camerafilter.R;
/**
* @author nekocode (nekocode.cn@gmail.com)
*/
public class ChromaticAberrationFilter extends CameraFilter {
private int program;
public ChromaticAberrationFilter(Context context) {
super(context);
// Build shaders
program = MyGLUtils.buildProgram(context, R.raw.vertext, R.raw.chromatic_aberration);
}
@Override
public void onDraw(int cameraTexId, int canvasWidth, int canvasHeight) {
setupShaderInputs(program,
new int[]{canvasWidth, canvasHeight},
new int[]{cameraTexId},
new int[][]{});
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
}
}
================================================
FILE: app/src/main/java/cn/nekocode/camerafilter/filter/ContrastFilter.java
================================================
/*
* Copyright 2016 nekocode
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cn.nekocode.camerafilter.filter;
import android.content.Context;
import android.opengl.GLES20;
import cn.nekocode.camerafilter.MyGLUtils;
import cn.nekocode.camerafilter.R;
/**
* @author nekocode (nekocode.cn@gmail.com)
*/
public class ContrastFilter extends CameraFilter {
private int program;
public ContrastFilter(Context context) {
super(context);
// Build shaders
program = MyGLUtils.buildProgram(context, R.raw.vertext, R.raw.contrast);
}
@Override
public void onDraw(int cameraTexId, int canvasWidth, int canvasHeight) {
setupShaderInputs(program,
new int[]{canvasWidth, canvasHeight},
new int[]{cameraTexId},
new int[][]{});
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
}
}
================================================
FILE: app/src/main/java/cn/nekocode/camerafilter/filter/CrackedFilter.java
================================================
/*
* Copyright 2016 nekocode
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cn.nekocode.camerafilter.filter;
import android.content.Context;
import android.opengl.GLES20;
import cn.nekocode.camerafilter.MyGLUtils;
import cn.nekocode.camerafilter.R;
/**
* @author nekocode (nekocode.cn@gmail.com)
*/
public class CrackedFilter extends CameraFilter {
private int program;
public CrackedFilter(Context context) {
super(context);
// Build shaders
program = MyGLUtils.buildProgram(context, R.raw.vertext, R.raw.cracked);
}
@Override
public void onDraw(int cameraTexId, int canvasWidth, int canvasHeight) {
setupShaderInputs(program,
new int[]{canvasWidth, canvasHeight},
new int[]{cameraTexId},
new int[][]{});
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
}
}
================================================
FILE: app/src/main/java/cn/nekocode/camerafilter/filter/CrosshatchFilter.java
================================================
/*
* Copyright 2016 nekocode
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cn.nekocode.camerafilter.filter;
import android.content.Context;
import android.opengl.GLES20;
import cn.nekocode.camerafilter.MyGLUtils;
import cn.nekocode.camerafilter.R;
/**
* @author nekocode (nekocode.cn@gmail.com)
*/
public class CrosshatchFilter extends CameraFilter {
private int program;
public CrosshatchFilter(Context context) {
super(context);
// Build shaders
program = MyGLUtils.buildProgram(context, R.raw.vertext, R.raw.crosshatch);
}
@Override
public void onDraw(int cameraTexId, int canvasWidth, int canvasHeight) {
setupShaderInputs(program,
new int[]{canvasWidth, canvasHeight},
new int[]{cameraTexId},
new int[][]{});
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
}
}
================================================
FILE: app/src/main/java/cn/nekocode/camerafilter/filter/EMInterferenceFilter.java
================================================
/*
* Copyright 2016 nekocode
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cn.nekocode.camerafilter.filter;
import android.content.Context;
import android.opengl.GLES20;
import cn.nekocode.camerafilter.MyGLUtils;
import cn.nekocode.camerafilter.R;
/**
* @author nekocode (nekocode.cn@gmail.com)
*/
public class EMInterferenceFilter extends CameraFilter {
private int program;
public EMInterferenceFilter(Context context) {
super(context);
// Build shaders
program = MyGLUtils.buildProgram(context, R.raw.vertext, R.raw.em_interference);
}
@Override
public void onDraw(int cameraTexId, int canvasWidth, int canvasHeight) {
setupShaderInputs(program,
new int[]{canvasWidth, canvasHeight},
new int[]{cameraTexId},
new int[][]{});
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
}
}
================================================
FILE: app/src/main/java/cn/nekocode/camerafilter/filter/EdgeDetectionFilter.java
================================================
/*
* Copyright 2016 nekocode
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cn.nekocode.camerafilter.filter;
import android.content.Context;
import android.opengl.GLES20;
import cn.nekocode.camerafilter.MyGLUtils;
import cn.nekocode.camerafilter.R;
/**
* @author nekocode (nekocode.cn@gmail.com)
*/
public class EdgeDetectionFilter extends CameraFilter {
private int program;
public EdgeDetectionFilter(Context context) {
super(context);
// Build shaders
program = MyGLUtils.buildProgram(context, R.raw.vertext, R.raw.edge_detection);
}
@Override
public void onDraw(int cameraTexId, int canvasWidth, int canvasHeight) {
setupShaderInputs(program,
new int[]{canvasWidth, canvasHeight},
new int[]{cameraTexId},
new int[][]{});
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
}
}
================================================
FILE: app/src/main/java/cn/nekocode/camerafilter/filter/GrayFilter.java
================================================
/*
* Copyright 2016 winston
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cn.nekocode.camerafilter.filter;
import android.content.Context;
import android.opengl.GLES20;
import cn.nekocode.camerafilter.MyGLUtils;
import cn.nekocode.camerafilter.R;
/**
* @author winston (1054669137@qq.com)
*/
public class GrayFilter extends CameraFilter {
private int program;
public GrayFilter(Context context) {
super(context);
program = MyGLUtils.buildProgram(context, R.raw.vertext, R.raw.gray);
}
@Override
public void onDraw(int cameraTexId, int canvasWidth, int canvasHeight) {
setupShaderInputs(program,
new int[]{canvasWidth, canvasHeight},
new int[]{cameraTexId},
new int[][]{});
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
}
}
================================================
FILE: app/src/main/java/cn/nekocode/camerafilter/filter/HexagonMosaicFilter.java
================================================
/*
* Copyright 2016 winston
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cn.nekocode.camerafilter.filter;
import android.content.Context;
import android.opengl.GLES20;
import cn.nekocode.camerafilter.MyGLUtils;
import cn.nekocode.camerafilter.R;
/**
* @author winston (1054669137@qq.com)
*/
public class HexagonMosaicFilter extends CameraFilter {
private int program;
public HexagonMosaicFilter(Context context) {
super(context);
program = MyGLUtils.buildProgram(context, R.raw.vertext, R.raw.hexagon_mosaic);
}
@Override
public void onDraw(int cameraTexId, int canvasWidth, int canvasHeight) {
setupShaderInputs(program,
new int[]{canvasWidth, canvasHeight},
new int[]{cameraTexId},
new int[][]{});
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
}
}
================================================
FILE: app/src/main/java/cn/nekocode/camerafilter/filter/JFAVoronoiFilter.java
================================================
/*
* Copyright 2016 nekocode
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cn.nekocode.camerafilter.filter;
import android.content.Context;
import android.opengl.GLES20;
import cn.nekocode.camerafilter.MyGLUtils;
import cn.nekocode.camerafilter.R;
import cn.nekocode.camerafilter.RenderBuffer;
/**
* @author nekocode (nekocode.cn@gmail.com)
*/
public class JFAVoronoiFilter extends CameraFilter {
private int programImg;
private int programA;
private int programB;
private int programC;
private RenderBuffer bufA;
private RenderBuffer bufB;
private RenderBuffer bufC;
public JFAVoronoiFilter(Context context) {
super(context);
// Build shaders
programImg = MyGLUtils.buildProgram(context, R.raw.vertext, R.raw.voronoi);
programA = MyGLUtils.buildProgram(context, R.raw.vertext, R.raw.voronoi_buf_a);
programB = MyGLUtils.buildProgram(context, R.raw.vertext, R.raw.voronoi_buf_b);
programC = MyGLUtils.buildProgram(context, R.raw.vertext, R.raw.voronoi_buf_c);
}
@Override
public void onDraw(int cameraTexId, int canvasWidth, int canvasHeight) {
// TODO move?
if (bufA == null || bufA.getWidth() != canvasWidth || bufB.getHeight() != canvasHeight) {
// Create new textures for buffering
bufA = new RenderBuffer(canvasWidth, canvasHeight, GLES20.GL_TEXTURE4);
bufB = new RenderBuffer(canvasWidth, canvasHeight, GLES20.GL_TEXTURE5);
bufC = new RenderBuffer(canvasWidth, canvasHeight, GLES20.GL_TEXTURE6);
}
// Render to buf a
setupShaderInputs(programA,
new int[]{canvasWidth, canvasHeight},
new int[]{cameraTexId, bufA.getTexId()},
new int[][]{new int[]{canvasWidth, canvasHeight}, new int[]{canvasWidth, canvasHeight}});
bufA.bind();
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
bufA.unbind();
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
// Render to buf b
setupShaderInputs(programB,
new int[]{canvasWidth, canvasHeight},
new int[]{bufB.getTexId(), bufA.getTexId()},
new int[][]{new int[]{canvasWidth, canvasHeight}, new int[]{canvasWidth, canvasHeight}});
bufB.bind();
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
bufB.unbind();
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
// Render to buf c
setupShaderInputs(programC,
new int[]{canvasWidth, canvasHeight},
new int[]{bufC.getTexId(), bufB.getTexId()},
new int[][]{new int[]{canvasWidth, canvasHeight}, new int[]{canvasWidth, canvasHeight}});
bufC.bind();
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
bufC.unbind();
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
// Render to screen
setupShaderInputs(programImg,
new int[]{canvasWidth, canvasHeight},
new int[]{bufC.getTexId(), bufA.getTexId()},
new int[][]{new int[]{canvasWidth, canvasHeight}, new int[]{canvasWidth, canvasHeight}});
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
}
}
================================================
FILE: app/src/main/java/cn/nekocode/camerafilter/filter/LegofiedFilter.java
================================================
/*
* Copyright 2016 nekocode
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cn.nekocode.camerafilter.filter;
import android.content.Context;
import android.opengl.GLES20;
import cn.nekocode.camerafilter.MyGLUtils;
import cn.nekocode.camerafilter.R;
/**
* @author nekocode (nekocode.cn@gmail.com)
*/
public class LegofiedFilter extends CameraFilter {
private int program;
public LegofiedFilter(Context context) {
super(context);
// Build shaders
program = MyGLUtils.buildProgram(context, R.raw.vertext, R.raw.legofied);
}
@Override
public void onDraw(int cameraTexId, int canvasWidth, int canvasHeight) {
setupShaderInputs(program,
new int[]{canvasWidth, canvasHeight},
new int[]{cameraTexId},
new int[][]{});
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
}
}
================================================
FILE: app/src/main/java/cn/nekocode/camerafilter/filter/LichtensteinEsqueFilter.java
================================================
/*
* Copyright 2016 nekocode
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cn.nekocode.camerafilter.filter;
import android.content.Context;
import android.opengl.GLES20;
import cn.nekocode.camerafilter.MyGLUtils;
import cn.nekocode.camerafilter.R;
/**
* @author nekocode (nekocode.cn@gmail.com)
*/
public class LichtensteinEsqueFilter extends CameraFilter {
private int program;
public LichtensteinEsqueFilter(Context context) {
super(context);
// Build shaders
program = MyGLUtils.buildProgram(context, R.raw.vertext, R.raw.lichtenstein_esque);
}
@Override
public void onDraw(int cameraTexId, int canvasWidth, int canvasHeight) {
setupShaderInputs(program,
new int[]{canvasWidth, canvasHeight},
new int[]{cameraTexId},
new int[][]{});
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
}
}
================================================
FILE: app/src/main/java/cn/nekocode/camerafilter/filter/MappingFilter.java
================================================
/*
* Copyright 2016 nekocode
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cn.nekocode.camerafilter.filter;
import android.content.Context;
import android.opengl.GLES20;
import cn.nekocode.camerafilter.MyGLUtils;
import cn.nekocode.camerafilter.R;
/**
* @author nekocode (nekocode.cn@gmail.com)
*/
public class MappingFilter extends CameraFilter {
private int program;
private int texture2Id;
public MappingFilter(Context context) {
super(context);
// Build shaders
program = MyGLUtils.buildProgram(context, R.raw.vertext, R.raw.mapping);
// Load the texture will need for the shader
texture2Id = MyGLUtils.loadTexture(context, R.raw.tex00, new int[2]);
}
@Override
public void onDraw(int cameraTexId, int canvasWidth, int canvasHeight) {
setupShaderInputs(program,
new int[]{canvasWidth, canvasHeight},
new int[]{cameraTexId, texture2Id},
new int[][]{});
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
}
}
================================================
FILE: app/src/main/java/cn/nekocode/camerafilter/filter/MirrorFilter.java
================================================
/*
* Copyright 2016 winston
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cn.nekocode.camerafilter.filter;
import android.content.Context;
import android.opengl.GLES20;
import cn.nekocode.camerafilter.MyGLUtils;
import cn.nekocode.camerafilter.R;
/**
* @author winston (1054669137@qq.com)
*/
public class MirrorFilter extends CameraFilter {
private int program;
public MirrorFilter(Context context) {
super(context);
program = MyGLUtils.buildProgram(context, R.raw.vertext, R.raw.mirror);
}
@Override
public void onDraw(int cameraTexId, int canvasWidth, int canvasHeight) {
setupShaderInputs(program,
new int[]{canvasWidth, canvasHeight},
new int[]{cameraTexId},
new int[][]{});
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
}
}
================================================
FILE: app/src/main/java/cn/nekocode/camerafilter/filter/MoneyFilter.java
================================================
/*
* Copyright 2016 nekocode
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cn.nekocode.camerafilter.filter;
import android.content.Context;
import android.opengl.GLES20;
import cn.nekocode.camerafilter.MyGLUtils;
import cn.nekocode.camerafilter.R;
/**
* @author nekocode (nekocode.cn@gmail.com)
*/
public class MoneyFilter extends CameraFilter {
private int program;
public MoneyFilter(Context context) {
super(context);
// Build shaders
program = MyGLUtils.buildProgram(context, R.raw.vertext, R.raw.money_filter);
}
@Override
public void onDraw(int cameraTexId, int canvasWidth, int canvasHeight) {
setupShaderInputs(program,
new int[]{canvasWidth, canvasHeight},
new int[]{cameraTexId},
new int[][]{});
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
}
}
================================================
FILE: app/src/main/java/cn/nekocode/camerafilter/filter/NegativeFilter.java
================================================
/*
* Copyright 2016 winston
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cn.nekocode.camerafilter.filter;
import android.content.Context;
import android.opengl.GLES20;
import cn.nekocode.camerafilter.MyGLUtils;
import cn.nekocode.camerafilter.R;
/**
* @author winston (1054669137@qq.com)
*/
public class NegativeFilter extends CameraFilter {
private int program;
public NegativeFilter(Context context) {
super(context);
program = MyGLUtils.buildProgram(context, R.raw.vertext, R.raw.negative);
}
@Override
public void onDraw(int cameraTexId, int canvasWidth, int canvasHeight) {
setupShaderInputs(program,
new int[]{canvasWidth, canvasHeight},
new int[]{cameraTexId},
new int[][]{});
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
}
}
================================================
FILE: app/src/main/java/cn/nekocode/camerafilter/filter/NoiseWarpFilter.java
================================================
/*
* Copyright 2016 nekocode
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cn.nekocode.camerafilter.filter;
import android.content.Context;
import android.opengl.GLES20;
import cn.nekocode.camerafilter.MyGLUtils;
import cn.nekocode.camerafilter.R;
/**
* @author nekocode (nekocode.cn@gmail.com)
*/
public class NoiseWarpFilter extends CameraFilter {
private int program;
public NoiseWarpFilter(Context context) {
super(context);
// Build shaders
program = MyGLUtils.buildProgram(context, R.raw.vertext, R.raw.noise_warp);
}
@Override
public void onDraw(int cameraTexId, int canvasWidth, int canvasHeight) {
setupShaderInputs(program,
new int[]{canvasWidth, canvasHeight},
new int[]{cameraTexId},
new int[][]{});
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
}
}
================================================
FILE: app/src/main/java/cn/nekocode/camerafilter/filter/NostalgiaFilter.java
================================================
/*
* Copyright 2016 winston
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cn.nekocode.camerafilter.filter;
import android.content.Context;
import android.opengl.GLES20;
import cn.nekocode.camerafilter.MyGLUtils;
import cn.nekocode.camerafilter.R;
/**
* @author winston (1054669137@qq.com)
*/
public class NostalgiaFilter extends CameraFilter {
private int program;
public NostalgiaFilter(Context context) {
super(context);
program = MyGLUtils.buildProgram(context, R.raw.vertext, R.raw.nostalgia);
}
@Override
public void onDraw(int cameraTexId, int canvasWidth, int canvasHeight) {
setupShaderInputs(program,
new int[]{canvasWidth, canvasHeight},
new int[]{cameraTexId},
new int[][]{});
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
}
}
================================================
FILE: app/src/main/java/cn/nekocode/camerafilter/filter/OriginalFilter.java
================================================
/*
* Copyright 2016 nekocode
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cn.nekocode.camerafilter.filter;
import android.content.Context;
import android.opengl.GLES20;
import cn.nekocode.camerafilter.MyGLUtils;
import cn.nekocode.camerafilter.R;
/**
* @author nekocode (nekocode.cn@gmail.com)
*/
public class OriginalFilter extends CameraFilter {
private int program;
public OriginalFilter(Context context) {
super(context);
// Build shaders
program = MyGLUtils.buildProgram(context, R.raw.vertext, R.raw.original);
}
@Override
public void onDraw(int cameraTexId, int canvasWidth, int canvasHeight) {
setupShaderInputs(program,
new int[]{canvasWidth, canvasHeight},
new int[]{cameraTexId},
new int[][]{});
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
}
}
================================================
FILE: app/src/main/java/cn/nekocode/camerafilter/filter/PixelizeFilter.java
================================================
/*
* Copyright 2016 nekocode
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cn.nekocode.camerafilter.filter;
import android.content.Context;
import android.opengl.GLES20;
import cn.nekocode.camerafilter.MyGLUtils;
import cn.nekocode.camerafilter.R;
/**
* @author nekocode (nekocode.cn@gmail.com)
*/
public class PixelizeFilter extends CameraFilter {
private int program;
public PixelizeFilter(Context context) {
super(context);
// Build shaders
program = MyGLUtils.buildProgram(context, R.raw.vertext, R.raw.pixelize);
}
@Override
public void onDraw(int cameraTexId, int canvasWidth, int canvasHeight) {
setupShaderInputs(program,
new int[]{canvasWidth, canvasHeight},
new int[]{cameraTexId},
new int[][]{});
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
}
}
================================================
FILE: app/src/main/java/cn/nekocode/camerafilter/filter/PolygonizationFilter.java
================================================
/*
* Copyright 2016 nekocode
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cn.nekocode.camerafilter.filter;
import android.content.Context;
import android.opengl.GLES20;
import cn.nekocode.camerafilter.MyGLUtils;
import cn.nekocode.camerafilter.R;
/**
* @author nekocode (nekocode.cn@gmail.com)
*/
public class PolygonizationFilter extends CameraFilter {
private int program;
public PolygonizationFilter(Context context) {
super(context);
// Build shaders
program = MyGLUtils.buildProgram(context, R.raw.vertext, R.raw.polygonization);
}
@Override
public void onDraw(int cameraTexId, int canvasWidth, int canvasHeight) {
setupShaderInputs(program,
new int[]{canvasWidth, canvasHeight},
new int[]{cameraTexId},
new int[][]{});
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
}
}
================================================
FILE: app/src/main/java/cn/nekocode/camerafilter/filter/RefractionFilter.java
================================================
/*
* Copyright 2016 nekocode
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cn.nekocode.camerafilter.filter;
import android.content.Context;
import android.opengl.GLES20;
import cn.nekocode.camerafilter.MyGLUtils;
import cn.nekocode.camerafilter.R;
/**
* @author nekocode (nekocode.cn@gmail.com)
*/
public class RefractionFilter extends CameraFilter {
private int program;
private int texture2Id;
public RefractionFilter(Context context) {
super(context);
// Build shaders
program = MyGLUtils.buildProgram(context, R.raw.vertext, R.raw.refraction);
// Load the texture will need for the shader
texture2Id = MyGLUtils.loadTexture(context, R.raw.tex11, new int[2]);
}
@Override
public void onDraw(int cameraTexId, int canvasWidth, int canvasHeight) {
setupShaderInputs(program,
new int[]{canvasWidth, canvasHeight},
new int[]{cameraTexId, texture2Id},
new int[][]{});
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
}
}
================================================
FILE: app/src/main/java/cn/nekocode/camerafilter/filter/ReliefFilter.java
================================================
/*
* Copyright 2016 winston
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cn.nekocode.camerafilter.filter;
import android.content.Context;
import android.opengl.GLES20;
import cn.nekocode.camerafilter.MyGLUtils;
import cn.nekocode.camerafilter.R;
/**
* @author winston (1054669137@qq.com)
*/
public class ReliefFilter extends CameraFilter {
private int program;
public ReliefFilter(Context context) {
super(context);
program = MyGLUtils.buildProgram(context, R.raw.vertext, R.raw.relief);
}
@Override
public void onDraw(int cameraTexId, int canvasWidth, int canvasHeight) {
setupShaderInputs(program,
new int[]{canvasWidth, canvasHeight},
new int[]{cameraTexId},
new int[][]{});
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
}
}
================================================
FILE: app/src/main/java/cn/nekocode/camerafilter/filter/SwirlFilter.java
================================================
/*
* Copyright 2016 winston
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cn.nekocode.camerafilter.filter;
import android.content.Context;
import android.opengl.GLES20;
import cn.nekocode.camerafilter.MyGLUtils;
import cn.nekocode.camerafilter.R;
/**
* @author winston (1054669137@qq.com)
*/
public class SwirlFilter extends CameraFilter {
private int program;
public SwirlFilter(Context context) {
super(context);
program = MyGLUtils.buildProgram(context, R.raw.vertext, R.raw.swirl);
}
@Override
public void onDraw(int cameraTexId, int canvasWidth, int canvasHeight) {
setupShaderInputs(program,
new int[]{canvasWidth, canvasHeight},
new int[]{cameraTexId},
new int[][]{});
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
}
}
================================================
FILE: app/src/main/java/cn/nekocode/camerafilter/filter/TileMosaicFilter.java
================================================
/*
* Copyright 2016 nekocode
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cn.nekocode.camerafilter.filter;
import android.content.Context;
import android.opengl.GLES20;
import cn.nekocode.camerafilter.MyGLUtils;
import cn.nekocode.camerafilter.R;
/**
* @author nekocode (nekocode.cn@gmail.com)
*/
public class TileMosaicFilter extends CameraFilter {
private int program;
public TileMosaicFilter(Context context) {
super(context);
// Build shaders
program = MyGLUtils.buildProgram(context, R.raw.vertext, R.raw.tile_mosaic);
}
@Override
public void onDraw(int cameraTexId, int canvasWidth, int canvasHeight) {
setupShaderInputs(program,
new int[]{canvasWidth, canvasHeight},
new int[]{cameraTexId},
new int[][]{});
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
}
}
================================================
FILE: app/src/main/java/cn/nekocode/camerafilter/filter/TrianglesMosaicFilter.java
================================================
/*
* Copyright 2016 nekocode
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cn.nekocode.camerafilter.filter;
import android.content.Context;
import android.opengl.GLES20;
import cn.nekocode.camerafilter.MyGLUtils;
import cn.nekocode.camerafilter.R;
/**
* @author nekocode (nekocode.cn@gmail.com)
*/
public class TrianglesMosaicFilter extends CameraFilter {
private int program;
public TrianglesMosaicFilter(Context context) {
super(context);
// Build shaders
program = MyGLUtils.buildProgram(context, R.raw.vertext, R.raw.triangles_mosaic);
}
@Override
public void onDraw(int cameraTexId, int canvasWidth, int canvasHeight) {
setupShaderInputs(program,
new int[]{canvasWidth, canvasHeight},
new int[]{cameraTexId},
new int[][]{});
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
}
}
================================================
FILE: app/src/main/java/cn/nekocode/camerafilter/filter/TripleFilter.java
================================================
/*
* Copyright 2016 winston
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cn.nekocode.camerafilter.filter;
import android.content.Context;
import android.opengl.GLES20;
import cn.nekocode.camerafilter.MyGLUtils;
import cn.nekocode.camerafilter.R;
/**
* @author winston (1054669137@qq.com)
*/
public class TripleFilter extends CameraFilter {
private int program;
public TripleFilter(Context context) {
super(context);
program = MyGLUtils.buildProgram(context, R.raw.vertext, R.raw.triple);
}
@Override
public void onDraw(int cameraTexId, int canvasWidth, int canvasHeight) {
setupShaderInputs(program,
new int[]{canvasWidth, canvasHeight},
new int[]{cameraTexId},
new int[][]{});
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
}
}
================================================
FILE: app/src/main/java/cn/nekocode/camerafilter/filter/WaterReflectionFilter.java
================================================
/*
* Copyright 2016 winston
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cn.nekocode.camerafilter.filter;
import android.content.Context;
import android.opengl.GLES20;
import cn.nekocode.camerafilter.MyGLUtils;
import cn.nekocode.camerafilter.R;
/**
* @author winston (1054669137@qq.com)
*/
public class WaterReflectionFilter extends CameraFilter {
private int program;
public WaterReflectionFilter(Context context) {
super(context);
program = MyGLUtils.buildProgram(context, R.raw.vertext, R.raw.water_reflection);
}
@Override
public void onDraw(int cameraTexId, int canvasWidth, int canvasHeight) {
setupShaderInputs(program,
new int[]{canvasWidth, canvasHeight},
new int[]{cameraTexId},
new int[][]{});
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
}
}
================================================
FILE: app/src/main/res/menu/filter.xml
================================================
================================================
FILE: app/src/main/res/raw/ascii_art.fsh
================================================
precision highp float;
uniform vec3 iResolution;
uniform float iGlobalTime;
uniform sampler2D iChannel0;
varying vec2 texCoord;
// referenced the method of bitmap of iq : https://www.shadertoy.com/view/4dfXWj
#define r iResolution.xy
#define t iGlobalTime
#define zoom 2.
#define P(id,a,b,c,d,e,f,g,h) if( id == int(pos.y) ){ int pa = a+2*(b+2*(c+2*(d+2*(e+2*(f+2*(g+2*(h))))))); cha = floor(mod(float(pa)/pow(2.,float(pos.x)-1.),2.)); }
float gray(vec3 _i)
{
return (_i.x+_i.y+_i.z)/3.;
}
void mainImage( out vec4 fragColor, in vec2 fragCoord )
{
vec2 uv = vec2(floor(fragCoord.x/8./zoom)*8.*zoom,floor(fragCoord.y/12./zoom)*12.*zoom)/r;
ivec2 pos = ivec2(mod(fragCoord.x/zoom,8.),mod(fragCoord.y/zoom,12.));
vec4 tex = texture2D(iChannel0,uv);
float cha = 0.;
{
float g = gray(tex.xyz);
if( g < .125 )
{
P(11,0,0,0,0,0,0,0,0);
P(10,0,0,0,0,0,0,0,0);
P(9,0,0,0,0,0,0,0,0);
P(8,0,0,0,0,0,0,0,0);
P(7,0,0,0,0,0,0,0,0);
P(6,0,0,0,0,0,0,0,0);
P(5,0,0,0,0,0,0,0,0);
P(4,0,0,0,0,0,0,0,0);
P(3,0,0,0,0,0,0,0,0);
P(2,0,0,0,0,0,0,0,0);
P(1,0,0,0,0,0,0,0,0);
P(0,0,0,0,0,0,0,0,0);
}
else if( g < .25 ) // .
{
P(11,0,0,0,0,0,0,0,0);
P(10,0,0,0,0,0,0,0,0);
P(9,0,0,0,0,0,0,0,0);
P(8,0,0,0,0,0,0,0,0);
P(7,0,0,0,0,0,0,0,0);
P(6,0,0,0,0,0,0,0,0);
P(5,0,0,0,0,0,0,0,0);
P(4,0,0,0,1,1,0,0,0);
P(3,0,0,0,1,1,0,0,0);
P(2,0,0,0,0,0,0,0,0);
P(1,0,0,0,0,0,0,0,0);
P(0,0,0,0,0,0,0,0,0);
}
else if( g < .375 ) // ,
{
P(11,0,0,0,0,0,0,0,0);
P(10,0,0,0,0,0,0,0,0);
P(9,0,0,0,0,0,0,0,0);
P(8,0,0,0,0,0,0,0,0);
P(7,0,0,0,0,0,0,0,0);
P(6,0,0,0,0,0,0,0,0);
P(5,0,0,0,0,0,0,0,0);
P(4,0,0,0,1,1,0,0,0);
P(3,0,0,0,1,1,0,0,0);
P(2,0,0,0,0,1,0,0,0);
P(1,0,0,0,1,0,0,0,0);
P(0,0,0,0,0,0,0,0,0);
}
else if( g < .5 ) // -
{
P(11,0,0,0,0,0,0,0,0);
P(10,0,0,0,0,0,0,0,0);
P(9,0,0,0,0,0,0,0,0);
P(8,0,0,0,0,0,0,0,0);
P(7,0,0,0,0,0,0,0,0);
P(6,1,1,1,1,1,1,1,0);
P(5,0,0,0,0,0,0,0,0);
P(4,0,0,0,0,0,0,0,0);
P(3,0,0,0,0,0,0,0,0);
P(2,0,0,0,0,0,0,0,0);
P(1,0,0,0,0,0,0,0,0);
P(0,0,0,0,0,0,0,0,0);
}
else if(g < .625 ) // +
{
P(11,0,0,0,0,0,0,0,0);
P(10,0,0,0,0,0,0,0,0);
P(9,0,0,0,1,0,0,0,0);
P(8,0,0,0,1,0,0,0,0);
P(7,0,0,0,1,0,0,0,0);
P(6,1,1,1,1,1,1,1,0);
P(5,0,0,0,1,0,0,0,0);
P(4,0,0,0,1,0,0,0,0);
P(3,0,0,0,1,0,0,0,0);
P(2,0,0,0,0,0,0,0,0);
P(1,0,0,0,0,0,0,0,0);
P(0,0,0,0,0,0,0,0,0);
}
else if(g < .75 ) // *
{
P(11,0,0,0,0,0,0,0,0);
P(10,0,0,0,1,0,0,0,0);
P(9,1,0,0,1,0,0,1,0);
P(8,0,1,0,1,0,1,0,0);
P(7,0,0,1,1,1,0,0,0);
P(6,0,0,0,1,0,0,0,0);
P(5,0,0,1,1,1,0,0,0);
P(4,0,1,0,1,0,1,0,0);
P(3,1,0,0,1,0,0,1,0);
P(2,0,0,0,1,0,0,0,0);
P(1,0,0,0,0,0,0,0,0);
P(0,0,0,0,0,0,0,0,0);
}
else if(g < .875 ) // #
{
P(11,0,0,0,0,0,0,0,0);
P(10,0,0,1,0,0,1,0,0);
P(9,0,0,1,0,0,1,0,0);
P(8,1,1,1,1,1,1,1,0);
P(7,0,0,1,0,0,1,0,0);
P(6,0,0,1,0,0,1,0,0);
P(5,0,1,0,0,1,0,0,0);
P(4,0,1,0,0,1,0,0,0);
P(3,1,1,1,1,1,1,1,0);
P(2,0,1,0,0,1,0,0,0);
P(1,0,1,0,0,1,0,0,0);
P(0,0,0,0,0,0,0,0,0);
}
else // @
{
P(11,0,0,0,0,0,0,0,0);
P(10,0,0,1,1,1,1,0,0);
P(9,0,1,0,0,0,0,1,0);
P(8,1,0,0,0,1,1,1,0);
P(7,1,0,0,1,0,0,1,0);
P(6,1,0,0,1,0,0,1,0);
P(5,1,0,0,1,0,0,1,0);
P(4,1,0,0,1,0,0,1,0);
P(3,1,0,0,1,1,1,1,0);
P(2,0,1,0,0,0,0,0,0);
P(1,0,0,1,1,1,1,1,0);
P(0,0,0,0,0,0,0,0,0);
}
}
vec3 col = vec3(1.);
fragColor = vec4(cha*col,1.);
}
void main() {
mainImage(gl_FragColor, texCoord * iResolution.xy);
}
================================================
FILE: app/src/main/res/raw/basic_deform.fsh
================================================
precision highp float;
uniform vec3 iResolution;
uniform float iGlobalTime;
uniform sampler2D iChannel0;
varying vec2 texCoord;
void mainImage( out vec4 fragColor, in vec2 fragCoord )
{
float stongth = 0.3;
vec2 uv = fragCoord.xy;
float waveu = sin((uv.y + iGlobalTime) * 20.0) * 0.5 * 0.05 * stongth;
fragColor = texture2D(iChannel0, uv + vec2(waveu, 0));
}
void main() {
mainImage(gl_FragColor, texCoord);
}
================================================
FILE: app/src/main/res/raw/black_and_white.fsh
================================================
precision highp float;
uniform vec3 iResolution;
uniform sampler2D iChannel0;
varying vec2 texCoord;
void mainImage( out vec4 fragColor, in vec2 fragCoord )
{
vec4 mask = texture2D(iChannel0, fragCoord);
float color = (mask.r + mask.g + mask.b) / 3.0;
color = step(0.5, color);
vec4 tempColor = vec4(color, color, color, 1.0);
fragColor = tempColor;
}
void main() {
mainImage(gl_FragColor, texCoord);
}
================================================
FILE: app/src/main/res/raw/blue_orange.fsh
================================================
precision mediump float;
uniform vec3 iResolution;
uniform float iGlobalTime;
uniform sampler2D iChannel0;
varying vec2 texCoord;
void mainImage( out vec4 fragColor, in vec2 fragCoord )
{
vec2 uv = fragCoord.xy;
vec3 tex = texture2D( iChannel0, uv ).rgb;
float shade = dot(tex, vec3(0.333333));
vec3 col = mix(vec3(0.1, 0.36, 0.8) * (1.0-2.0*abs(shade-0.5)), vec3(1.06, 0.8, 0.55), 1.0-shade);
fragColor = vec4(col,1.0);
}
void main() {
mainImage(gl_FragColor, texCoord);
}
================================================
FILE: app/src/main/res/raw/cartoon.fsh
================================================
precision highp float;
uniform vec3 iResolution;
uniform sampler2D iChannel0;
varying vec2 texCoord;
void mainImage( out vec4 fragColor, in vec2 fragCoord )
{
vec4 color = texture2D(iChannel0, fragCoord);
float newR = abs(color.r + color.g * 2.0 - color.b) * color.r;
float newG = abs(color.r + color.b * 2.0 - color.g) * color.r;
float newB = abs(color.r + color.b * 2.0 - color.g) * color.g;
vec4 newColor = vec4(newR, newG, newB, 1.0);
fragColor = newColor;
}
void main() {
mainImage(gl_FragColor, texCoord);
}
================================================
FILE: app/src/main/res/raw/casting.fsh
================================================
precision highp float;
uniform vec3 iResolution;
uniform sampler2D iChannel0;
varying vec2 texCoord;
void mainImage( out vec4 fragColor, in vec2 fragCoord )
{
vec4 mask = texture2D(iChannel0, fragCoord);
vec4 tempColor = vec4(mask.r*0.5/(mask.g + mask.b + 0.01),
mask.g*0.5/(mask.r + mask.b + 0.01),
mask.b*0.5/(mask.r + mask.g + 0.01),
1.0);
fragColor = tempColor;
}
void main() {
mainImage(gl_FragColor, texCoord);
}
================================================
FILE: app/src/main/res/raw/chromatic_aberration.fsh
================================================
precision mediump float;
uniform vec3 iResolution;
uniform float iGlobalTime;
uniform sampler2D iChannel0;
varying vec2 texCoord;
void mainImage( out vec4 fragColor, in vec2 fragCoord )
{
vec2 uv = fragCoord.xy;
float amount = 0.0;
amount = (1.0 + sin(iGlobalTime*6.0)) * 0.5;
amount *= 1.0 + sin(iGlobalTime*16.0) * 0.5;
amount *= 1.0 + sin(iGlobalTime*19.0) * 0.5;
amount *= 1.0 + sin(iGlobalTime*27.0) * 0.5;
amount = pow(amount, 3.0);
amount *= 0.05;
vec3 col;
col.r = texture2D( iChannel0, vec2(uv.x+amount,uv.y) ).r;
col.g = texture2D( iChannel0, uv ).g;
col.b = texture2D( iChannel0, vec2(uv.x-amount,uv.y) ).b;
col *= (1.0 - amount * 0.5);
fragColor = vec4(col,1.0);
}
void main() {
mainImage(gl_FragColor, texCoord);
}
================================================
FILE: app/src/main/res/raw/contrast.fsh
================================================
precision highp float;
uniform vec3 iResolution;
uniform float iGlobalTime;
uniform sampler2D iChannel0;
varying vec2 texCoord;
float remap(float value, float inputMin, float inputMax, float outputMin, float outputMax)
{
return (value - inputMin) * ((outputMax - outputMin) / (inputMax - inputMin)) + outputMin;
}
void mainImage( out vec4 fragColor, in vec2 fragCoord )
{
vec2 uv = fragCoord.xy;
float normalizedContrast = 1.0;
float contrast = remap(normalizedContrast, 0.0, 1.0, 0.2 /*min*/, 4.0 /*max*/);
vec4 srcColor = texture2D(iChannel0, uv);
vec4 dstColor = vec4((srcColor.rgb - vec3(0.5)) * contrast + vec3(0.5), 1.0);
fragColor = clamp(dstColor, 0.0, 1.0);
}
void main() {
mainImage(gl_FragColor, texCoord);
}
================================================
FILE: app/src/main/res/raw/cracked.fsh
================================================
precision highp float;
uniform vec3 iResolution;
uniform float iGlobalTime;
uniform sampler2D iChannel0;
varying vec2 texCoord;
float rnd(vec2 s)
{
return 1.-2.*fract(sin(s.x*253.13+s.y*341.41)*589.19);
}
void mainImage( out vec4 fragColor, in vec2 fragCoord )
{
vec2 p=(fragCoord.xy*2.-iResolution.xy)/iResolution.x;
vec2 v=vec2(1E3);
vec2 v2=vec2(1E4);
vec2 center=vec2(.1,-.5);
for(int c=0;c<30;c++)
{
float angle=floor(rnd(vec2(float(c),387.44))*16.)*3.1415*.4-.5;
float dist=pow(rnd(vec2(float(c),78.21)),2.)*.5;
vec2 vc=vec2(center.x+cos(angle)*dist+rnd(vec2(float(c),349.3))*7E-3,
center.y+sin(angle)*dist+rnd(vec2(float(c),912.7))*7E-3);
if(length(vc-p) 0.1 )
tex = tex * ( 1.0 / brightestChannel );
else
tex.rgb = vec3(1.0,1.0,1.0);
#endif // COLOUR_HATCHES
if (brightness < hatch_1)
{
if (mod(fragCoord.x + fragCoord.y, density) <= width)
{
#ifdef COLOUR_HATCHES
res = vec3(tex.rgb * hatch_1_brightness);
#else
res = vec3(hatch_1_brightness);
#endif
}
}
if (brightness < hatch_2)
{
if (mod(fragCoord.x - fragCoord.y, density) <= width)
{
#ifdef COLOUR_HATCHES
res = vec3(tex.rgb * hatch_2_brightness);
#else
res = vec3(hatch_2_brightness);
#endif
}
}
if (brightness < hatch_3)
{
if (mod(fragCoord.x + fragCoord.y - (density*0.5), density) <= width)
{
#ifdef COLOUR_HATCHES
res = vec3(tex.rgb * hatch_3_brightness);
#else
res = vec3(hatch_3_brightness);
#endif
}
}
if (brightness < hatch_4)
{
if (mod(fragCoord.x - fragCoord.y - (density*0.5), density) <= width)
{
#ifdef COLOUR_HATCHES
res = vec3(tex.rgb * hatch_4_brightness);
#else
res = vec3(hatch_4_brightness);
#endif
}
}
vec2 p = fragCoord.xy;
// simple sobel edge detection,
// borrowed and tweaked from jmk's "edge glow" filter, here:
// https://www.shadertoy.com/view/Mdf3zr
float gx = 0.0;
gx += -1.0 * lookup(p, -1.0, -1.0);
gx += -2.0 * lookup(p, -1.0, 0.0);
gx += -1.0 * lookup(p, -1.0, 1.0);
gx += 1.0 * lookup(p, 1.0, -1.0);
gx += 2.0 * lookup(p, 1.0, 0.0);
gx += 1.0 * lookup(p, 1.0, 1.0);
float gy = 0.0;
gy += -1.0 * lookup(p, -1.0, -1.0);
gy += -2.0 * lookup(p, 0.0, -1.0);
gy += -1.0 * lookup(p, 1.0, -1.0);
gy += 1.0 * lookup(p, -1.0, 1.0);
gy += 2.0 * lookup(p, 0.0, 1.0);
gy += 1.0 * lookup(p, 1.0, 1.0);
// hack: use g^2 to conceal noise in the video
float g = gx*gx + gy*gy;
res *= (1.0-g);
fragColor = vec4(res, 1.0);
}
void main() {
mainImage(gl_FragColor, texCoord * iResolution.xy);
}
================================================
FILE: app/src/main/res/raw/edge_detection.fsh
================================================
#extension GL_OES_standard_derivatives : enable
precision mediump float;
uniform vec3 iResolution;
uniform sampler2D iChannel0;
varying vec2 texCoord;
void mainImage( out vec4 fragColor, in vec2 fragCoord )
{
vec2 uv = fragCoord.xy;
vec4 color = texture2D(iChannel0, fragCoord);
float gray = length(color.rgb);
fragColor = vec4(vec3(step(0.06, length(vec2(dFdx(gray), dFdy(gray))))), 1.0);
}
void main() {
mainImage(gl_FragColor, texCoord);
}
================================================
FILE: app/src/main/res/raw/em_interference.fsh
================================================
precision highp float;
uniform vec3 iResolution;
uniform float iGlobalTime;
uniform sampler2D iChannel0;
varying vec2 texCoord;
float rng2(vec2 seed)
{
return fract(sin(dot(seed * floor(iGlobalTime * 12.), vec2(127.1,311.7))) * 43758.5453123);
}
float rng(float seed)
{
return rng2(vec2(seed, 1.0));
}
void mainImage( out vec4 fragColor, in vec2 fragCoord )
{
vec2 uv = fragCoord.xy;
vec2 blockS = floor(uv * vec2(24., 9.));
vec2 blockL = floor(uv * vec2(8., 4.));
float r = rng2(uv);
vec3 noise = (vec3(r, 1. - r, r / 2. + 0.5) * 1.0 - 2.0) * 0.08;
float lineNoise = pow(rng2(blockS), 8.0) * pow(rng2(blockL), 3.0) - pow(rng(7.2341), 17.0) * 2.;
vec4 col1 = texture2D(iChannel0, uv);
vec4 col2 = texture2D(iChannel0, uv + vec2(lineNoise * 0.05 * rng(5.0), 0));
vec4 col3 = texture2D(iChannel0, uv - vec2(lineNoise * 0.05 * rng(31.0), 0));
fragColor = vec4(vec3(col1.x, col2.y, col3.z) + noise, 1.0);
}
void main() {
mainImage(gl_FragColor, texCoord);
}
================================================
FILE: app/src/main/res/raw/gray.fsh
================================================
precision highp float;
uniform vec3 iResolution;
uniform sampler2D iChannel0;
varying vec2 texCoord;
void mainImage( out vec4 fragColor, in vec2 fragCoord )
{
vec4 mask = texture2D(iChannel0, fragCoord);
float color = (mask.r + mask.g + mask.b) /3.0;
vec4 tempColor =vec4(color, color, color,1.0);
fragColor = tempColor;
}
void main() {
mainImage(gl_FragColor, texCoord);
}
================================================
FILE: app/src/main/res/raw/hexagon_mosaic.fsh
================================================
precision highp float;
uniform vec3 iResolution;
uniform sampler2D iChannel0;
varying vec2 texCoord;
const float mosaicSize = 0.03;
void mainImage( out vec4 fragColor, in vec2 fragCoord )
{
float length = mosaicSize;
float TR = 0.866025;
float x = texCoord.x;
float y = texCoord.y;
int wx = int(x / 1.5 / length);
int wy = int(y / TR / length);
vec2 v1, v2, vn;
if (wx/2 * 2 == wx) {
if (wy/2 * 2 == wy) {
//(0,0),(1,1)
v1 = vec2(length * 1.5 * float(wx), length * TR * float(wy));
v2 = vec2(length * 1.5 * float(wx + 1), length * TR * float(wy + 1));
} else {
//(0,1),(1,0)
v1 = vec2(length * 1.5 * float(wx), length * TR * float(wy + 1));
v2 = vec2(length * 1.5 * float(wx + 1), length * TR * float(wy));
}
}else {
if (wy/2 * 2 == wy) {
//(0,1),(1,0)
v1 = vec2(length * 1.5 * float(wx), length * TR * float(wy + 1));
v2 = vec2(length * 1.5 * float(wx + 1), length * TR * float(wy));
} else {
//(0,0),(1,1)
v1 = vec2(length * 1.5 * float(wx), length * TR * float(wy));
v2 = vec2(length * 1.5 * float(wx + 1), length * TR * float(wy + 1));
}
}
float s1 = sqrt(pow(v1.x - x, 2.0) + pow(v1.y - y, 2.0));
float s2 = sqrt(pow(v2.x - x, 2.0) + pow(v2.y - y, 2.0));
if (s1 < s2) {
vn = v1;
} else {
vn = v2;
}
vec4 color = texture2D(iChannel0, vn);
fragColor = color;
}
void main () {
mainImage(gl_FragColor, texCoord);
}
================================================
FILE: app/src/main/res/raw/legofied.fsh
================================================
precision highp float;
uniform vec3 iResolution;
uniform sampler2D iChannel0;
varying vec2 texCoord;
float c = 0.02; //amout of blocks = c*iResolution.x
void mainImage( out vec4 fragColor, in vec2 fragCoord ){
//blocked pixel coordinate
vec2 middle = floor(fragCoord*c+.5)/c;
vec3 color = texture2D(iChannel0, middle/iResolution.xy).rgb;
//lego block effects
//stud
float dis = distance(fragCoord,middle)*c*2.;
if(dis<.65&&dis>.55){
color *= dot(vec2(0.707),normalize(fragCoord-middle))*.5+1.;
}
//side shadow
vec2 delta = abs(fragCoord-middle)*c*2.;
float sdis = max(delta.x,delta.y);
if(sdis>.9){
color *= .8;
}
fragColor = vec4(color,1.0);
}
void main() {
mainImage(gl_FragColor, texCoord*iResolution.xy);
}
================================================
FILE: app/src/main/res/raw/lichtenstein_esque.fsh
================================================
precision highp float;
uniform vec3 iResolution;
uniform float iGlobalTime;
uniform sampler2D iChannel0;
varying vec2 texCoord;
// Size of the quad in pixels
const float size = 15.0;
// Radius of the circle
const float radius = size * 0.5;
void mainImage( out vec4 fragColor, in vec2 fragCoord )
{
// Current quad in pixels
vec2 quadPos = floor(fragCoord.xy / size) * size;
// Normalized quad position
vec2 quad = quadPos/iResolution.xy;
// Center of the quad
vec2 quadCenter = (quadPos + size/2.0);
// Distance to quad center
float dist = length(quadCenter - fragCoord.xy);
vec4 texel = texture2D(iChannel0, quad);
if (dist > radius)
{
fragColor = vec4(0.25);
}
else
{
fragColor = texel;
}
}
void main() {
mainImage(gl_FragColor, texCoord*iResolution.xy);
}
================================================
FILE: app/src/main/res/raw/mapping.fsh
================================================
precision highp float;
uniform vec3 iResolution;
uniform float iGlobalTime;
uniform sampler2D iChannel0;
uniform sampler2D iChannel1;
varying vec2 texCoord;
void mainImage( out vec4 fragColor, in vec2 fragCoord )
{
vec2 pos = fragCoord.xy;
vec2 uv2 = vec2( fragCoord.xy / iResolution.xy );
vec4 sound = texture2D( iChannel0, uv2 );
pos.x = pos.x + 150.0 * sound.r;
pos.y = pos.y + 150.0 * sound.b;
vec2 uv = pos / iResolution.xy;
vec4 col = texture2D( iChannel1, uv );
col.a += 1.0 - sin( col.x - col.y + iGlobalTime * 0.1 );
fragColor = col * sound.r;
}
void main() {
mainImage(gl_FragColor, texCoord * iResolution.xy);
}
================================================
FILE: app/src/main/res/raw/mirror.fsh
================================================
precision highp float;
uniform vec3 iResolution;
uniform sampler2D iChannel0;
varying vec2 texCoord;
void mainImage( out vec4 fragColor, in vec2 fragCoord )
{
vec2 flipCoord = vec2(1.0-fragCoord.x, fragCoord.y);
if(flipCoord.x >= 0.5){
fragColor = texture2D(iChannel0, vec2( flipCoord.x - 0.5, flipCoord.y ));
} else {
fragColor = texture2D(iChannel0, vec2( 0.5 - flipCoord.x,flipCoord.y ));
}
}
void main() {
mainImage(gl_FragColor, texCoord);
}
================================================
FILE: app/src/main/res/raw/money_filter.fsh
================================================
precision highp float;
uniform vec3 iResolution;
uniform float iGlobalTime;
uniform sampler2D iChannel0;
varying vec2 texCoord;
// Money filter by Giacomo Preciado
// Based on: "Free Engraved Illustration Effect Action for Photoshop" - http://snip.ly/j0gq
// e-mail: giacomo@kyrie.pe
// website: http://kyrie.pe
void mainImage( out vec4 fragColor, in vec2 fragCoord )
{
vec2 xy = fragCoord.xy / iResolution.yy;
float amplitud = 0.03;
float frecuencia = 10.0;
float gris = 1.0;
float divisor = 8.0 / iResolution.y;
float grosorInicial = divisor * 0.2;
const int kNumPatrones = 6;
// x: seno del angulo, y: coseno del angulo, z: factor de suavizado
vec3 datosPatron[kNumPatrones];
datosPatron[0] = vec3(-0.7071, 0.7071, 3.0); // -45
datosPatron[1] = vec3(0.0, 1.0, 0.6); // 0
datosPatron[2] = vec3(0.0, 1.0, 0.5); // 0
datosPatron[3] = vec3(1.0, 0.0, 0.4); // 90
datosPatron[4] = vec3(1.0, 0.0, 0.3); // 90
datosPatron[5] = vec3(0.0, 1.0, 0.2); // 0
vec4 color = texture2D(iChannel0, vec2(fragCoord.x / iResolution.x, xy.y));
fragColor = color;
for(int i = 0; i < kNumPatrones; i++)
{
float coseno = datosPatron[i].x;
float seno = datosPatron[i].y;
vec2 punto = vec2(
xy.x * coseno - xy.y * seno,
xy.x * seno + xy.y * coseno
);
float grosor = grosorInicial * float(i + 1);
float dist = mod(punto.y + grosor * 0.5 - sin(punto.x * frecuencia) * amplitud, divisor);
float brillo = 0.3 * color.r + 0.4 * color.g + 0.3 * color.b;
if(dist < grosor && brillo < 0.75 - 0.12 * float(i))
{
// Suavizado
float k = datosPatron[i].z;
float x = (grosor - dist) / grosor;
float fx = abs((x - 0.5) / k) - (0.5 - k) / k;
gris = min(fx, gris);
}
}
fragColor = vec4(gris, gris, gris, 1.0);
}
void main() {
mainImage(gl_FragColor, texCoord*iResolution.xy);
}
================================================
FILE: app/src/main/res/raw/negative.fsh
================================================
precision highp float;
uniform vec3 iResolution;
uniform sampler2D iChannel0;
varying vec2 texCoord;
void mainImage( out vec4 fragColor, in vec2 fragCoord )
{
vec4 mask = texture2D(iChannel0, fragCoord);
vec4 color =vec4(1.0-mask.r, 1.0-mask.g, 1.0-mask.r,1.0);
fragColor = color;
}
void main() {
mainImage(gl_FragColor, texCoord);
}
================================================
FILE: app/src/main/res/raw/noise_warp.fsh
================================================
precision highp float;
uniform vec3 iResolution;
uniform float iGlobalTime;
uniform sampler2D iChannel0;
varying vec2 texCoord;
float mod289(float x)
{
return x - floor(x * (1.0 / 289.0)) * 289.0;
}
vec4 mod289(vec4 x)
{
return x - floor(x * (1.0 / 289.0)) * 289.0;
}
vec4 perm(vec4 x)
{
return mod289(((x * 34.0) + 1.0) * x);
}
float noise3d(vec3 p)
{
vec3 a = floor(p);
vec3 d = p - a;
d = d * d * (3.0 - 2.0 * d);
vec4 b = a.xxyy + vec4(0.0, 1.0, 0.0, 1.0);
vec4 k1 = perm(b.xyxy);
vec4 k2 = perm(k1.xyxy + b.zzww);
vec4 c = k2 + a.zzzz;
vec4 k3 = perm(c);
vec4 k4 = perm(c + 1.0);
vec4 o1 = fract(k3 * (1.0 / 41.0));
vec4 o2 = fract(k4 * (1.0 / 41.0));
vec4 o3 = o2 * d.z + o1 * (1.0 - d.z);
vec2 o4 = o3.yw * d.x + o3.xz * (1.0 - d.x);
return o4.y * d.y + o4.x * (1.0 - d.y);
}
void mainImage( out vec4 fragColor, in vec2 fragCoord )
{
vec2 uv = fragCoord.xy;
float v1 = noise3d(vec3(uv * 10.0, 0.0));
float v2 = noise3d(vec3(uv * 10.0, 1.0));
vec4 color = texture2D(iChannel0, uv + vec2(v1, v2) * 0.1);
fragColor = color;
}
void main() {
mainImage(gl_FragColor, texCoord);
}
================================================
FILE: app/src/main/res/raw/nostalgia.fsh
================================================
precision highp float;
uniform vec3 iResolution;
uniform sampler2D iChannel0;
varying vec2 texCoord;
void mainImage( out vec4 fragColor, in vec2 fragCoord )
{
vec4 mask = texture2D(iChannel0, fragCoord);
vec4 tempColor = vec4(0.393 * mask.r + 0.769 * mask.g + 0.189 * mask.b,
0.349 * mask.r + 0.686 * mask.g + 0.168 * mask.b,
0.272 * mask.r + 0.534 * mask.g + 0.131 * mask.b, 1.0);
fragColor = tempColor;
}
void main() {
mainImage(gl_FragColor, texCoord);
}
================================================
FILE: app/src/main/res/raw/original.fsh
================================================
precision mediump float;
varying vec2 texCoord;
uniform sampler2D iChannel0;
void main() {
gl_FragColor = texture2D(iChannel0, texCoord);
}
================================================
FILE: app/src/main/res/raw/original_rtt.fsh
================================================
#extension GL_OES_EGL_image_external : require
precision mediump float;
varying vec2 texCoord;
uniform samplerExternalOES iChannel0;
void main() {
gl_FragColor = texture2D(iChannel0, texCoord);
}
================================================
FILE: app/src/main/res/raw/pixelize.fsh
================================================
precision mediump float;
uniform vec3 iResolution;
uniform sampler2D iChannel0;
varying vec2 texCoord;
#define S (iResolution.x / 6e1) // The cell size.
void mainImage(out vec4 c, vec2 p)
{
c = texture2D(iChannel0, floor((p + .5) / S) * S / iResolution.xy);
}
void main() {
mainImage(gl_FragColor, texCoord*iResolution.xy);
}
================================================
FILE: app/src/main/res/raw/polygonization.fsh
================================================
precision highp float;
uniform vec3 iResolution;
uniform float iGlobalTime;
uniform sampler2D iChannel0;
varying vec2 texCoord;
vec2 hash2( vec2 p )
{
// procedural white noise
return fract(sin(vec2(dot(p,vec2(127.1,311.7)),dot(p,vec2(269.5,183.3))))*43758.5453);
}
vec2 voronoi( in vec2 x )
{
vec2 n = floor(x);
vec2 f = fract(x);
//----------------------------------
// regular voronoi
//----------------------------------
vec2 mg, mr;
float md = 8.0;
for( int j=-1; j<=1; j++ )
for( int i=-1; i<=1; i++ )
{
vec2 g = vec2(float(i),float(j));
vec2 o = hash2( n + g );
vec2 r = g + o - f;
float d = dot(r,r);
if( d 0.333 && fragCoord.y<= 0.666){
fragColor = texture2D(iChannel0, fragCoord);
}else{
fragColor = texture2D(iChannel0, vec2(fragCoord.x,fragCoord.y - 0.333));
}
}
void main() {
mainImage(gl_FragColor, texCoord);
}
================================================
FILE: app/src/main/res/raw/vertext.vsh
================================================
attribute vec2 vPosition;
attribute vec2 vTexCoord;
varying vec2 texCoord;
void main() {
texCoord = vTexCoord;
gl_Position = vec4 ( vPosition.x, vPosition.y, 0.0, 1.0 );
}
================================================
FILE: app/src/main/res/raw/voronoi.fsh
================================================
precision highp float;
uniform int iFrame;
uniform vec3 iResolution;
uniform vec3 iChannelResolution[2];
uniform float iGlobalTime;
uniform sampler2D iChannel0;
uniform sampler2D iChannel1;
varying vec2 texCoord;
void mainImage( out vec4 fragColor, in vec2 fragCoord )
{
vec2 uv = fragCoord.xy / iResolution.xy;
#if 0 // debug feature extraction
fragColor = texture2D(iChannel1, uv).wwww;
#else
vec4 cell = texture2D(iChannel0, uv);
vec2 cell_uv = cell.xy;
vec4 video = texture2D(iChannel1, cell_uv);
vec2 dcell = cell_uv * iChannelResolution[0].xy - fragCoord.xy;
float len = length(dcell);
vec3 color = video.xyz * (.9 + len*.005);
fragColor = vec4(color, 1.);
#endif
}
void main() {
mainImage(gl_FragColor, texCoord*iResolution.xy);
}
================================================
FILE: app/src/main/res/raw/voronoi_buf_a.fsh
================================================
precision highp float;
uniform int iFrame;
uniform vec3 iResolution;
uniform vec3 iChannelResolution[2];
uniform float iGlobalTime;
uniform sampler2D iChannel0;
uniform sampler2D iChannel1;
varying vec2 texCoord;
// A super simple video source with feature detection
float grayScale(vec4 c) { return c.x*.29 + c.y*.58 + c.z*.13; }
//============================================================
vec4 GenerateSeed (in vec2 fragCoord)
{
vec2 uv = fragCoord / iResolution.xy;
vec3 dataStep = vec3( vec2(1.) / iChannelResolution[0].xy, 0.);
vec4 fragColor = texture2D( iChannel0, uv );
float d = grayScale(fragColor);
float dL = grayScale(texture2D( iChannel0, uv - dataStep.xz ));
float dR = grayScale(texture2D( iChannel0, uv + dataStep.xz ));
float dU = grayScale(texture2D( iChannel0, uv - dataStep.zy ));
float dD = grayScale(texture2D( iChannel0, uv + dataStep.zy ));
float w = float( d*0.99 > max(max(dL, dR), max(dU, dD)) );
w = max(w, texture2D( iChannel1, uv ).w*.9); // get some from previous frame
fragColor.w = w;
return fragColor;
}
//============================================================
void mainImage( out vec4 fragColor, in vec2 fragCoord )
{
fragColor = GenerateSeed(fragCoord);
}
void main() {
mainImage(gl_FragColor, texCoord*iResolution.xy);
}
================================================
FILE: app/src/main/res/raw/voronoi_buf_b.fsh
================================================
precision highp float;
uniform int iFrame;
uniform vec3 iResolution;
uniform vec3 iChannelResolution[2];
uniform float iGlobalTime;
uniform sampler2D iChannel0;
uniform sampler2D iChannel1;
varying vec2 texCoord;
// how many JFA steps to do. 2^c_maxSteps is max image size on x and y
const float c_maxSteps = 10.0;
//============================================================
vec4 StepJFA (in vec2 fragCoord, in float level)
{
float stepwidth = floor(exp2(c_maxSteps - 1. - level)+0.5);
float bestDistance = 9999.0;
vec2 bestCoord = vec2(0.0);
for (int y = -1; y <= 1; ++y) {
for (int x = -1; x <= 1; ++x) {
vec2 sampleCoord = fragCoord + vec2(x,y) * stepwidth;
vec4 data = texture2D( iChannel0, sampleCoord / iChannelResolution[0].xy);
vec2 seedCoord = data.xy * iChannelResolution[0].xy;
float dist = length(seedCoord - fragCoord);
if ((seedCoord.x != 0.0 || seedCoord.y != 0.0) && dist < bestDistance)
{
bestDistance = dist;
bestCoord = seedCoord;
}
}
}
return vec4(bestCoord / iChannelResolution[0].xy, 0.0, 0.0);
}
//============================================================
void mainImage( out vec4 fragColor, in vec2 fragCoord )
{
float fFrame = float(iFrame);
float level = mod(fFrame,c_maxSteps);
if (level < .5) {
if (texture2D(iChannel1, fragCoord / iResolution.xy).w > .5)
fragColor = vec4(fragCoord / iChannelResolution[0].xy, 0.0, 0.0);
else
fragColor = vec4(0.0);
return;
}
fragColor = StepJFA(fragCoord, level);
}
void main() {
mainImage(gl_FragColor, texCoord*iResolution.xy);
}
================================================
FILE: app/src/main/res/raw/voronoi_buf_c.fsh
================================================
precision highp float;
uniform int iFrame;
uniform vec3 iResolution;
uniform vec3 iChannelResolution[2];
uniform float iGlobalTime;
uniform sampler2D iChannel0;
uniform sampler2D iChannel1;
varying vec2 texCoord;
// A secondary buffer to get clean Voronoi every N-th frame
// this must be in sync with JFA algorithm constant
const float c_maxSteps = 10.0;
void mainImage( out vec4 fragColor, in vec2 fragCoord )
{
vec2 uv = fragCoord.xy / iResolution.xy;
if (mod(float(iFrame+1),c_maxSteps) < .5) {
fragColor = texture2D(iChannel1, uv); // update to new voronoi cell
} else {
fragColor = texture2D(iChannel0, uv); // no change
}
}
void main() {
mainImage(gl_FragColor, texCoord*iResolution.xy);
}
================================================
FILE: app/src/main/res/raw/water_reflection.fsh
================================================
precision highp float;
uniform vec3 iResolution;
uniform sampler2D iChannel0;
varying vec2 texCoord;
float waterLevel = 0.5;
float waveAmplitude = 0.01;
void mainImage( out vec4 fragColor, in vec2 fragCoord )
{
if(fragCoord.y >= waterLevel){
fragColor = texture2D(iChannel0, fragCoord);
}else{
fragColor = texture2D(iChannel0,vec2(fragCoord.x + fract(sin(dot(fragCoord.xy ,vec2(12.9898,78.233))) * 43758.5453) * waveAmplitude,
2.0 * waterLevel - fragCoord.y));
}
}
void main() {
mainImage(gl_FragColor, texCoord);
}
================================================
FILE: app/src/main/res/values/colors.xml
================================================
#646464
#646464
#FF4081
================================================
FILE: app/src/main/res/values/strings.xml
================================================
CameraFilter
================================================
FILE: app/src/main/res/values/styles.xml
================================================
================================================
FILE: build.gradle
================================================
buildscript {
repositories {
jcenter()
google()
}
dependencies {
classpath 'com.android.tools.build:gradle:3.4.0'
}
}
allprojects {
repositories {
jcenter()
google()
}
}
task clean(type: Delete) {
delete rootProject.buildDir
}
================================================
FILE: gradle/wrapper/gradle-wrapper.properties
================================================
#Sat Jun 01 14:43:03 CST 2019
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-5.1.1-all.zip
================================================
FILE: gradle.properties
================================================
## Project-wide Gradle settings.
#
# For more details on how to configure your build environment visit
# http://www.gradle.org/docs/current/userguide/build_environment.html
#
# Specifies the JVM arguments used for the daemon process.
# The setting is particularly useful for tweaking memory settings.
# Default value: -Xmx10248m -XX:MaxPermSize=256m
# org.gradle.jvmargs=-Xmx2048m -XX:MaxPermSize=512m -XX:+HeapDumpOnOutOfMemoryError -Dfile.encoding=UTF-8
#
# When configured, Gradle will run in incubating parallel mode.
# This option should only be used with decoupled projects. More details, visit
# http://www.gradle.org/docs/current/userguide/multi_project_builds.html#sec:decoupled_projects
# org.gradle.parallel=true
================================================
FILE: gradlew
================================================
#!/usr/bin/env bash
##############################################################################
##
## Gradle start up script for UN*X
##
##############################################################################
# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
DEFAULT_JVM_OPTS=""
APP_NAME="Gradle"
APP_BASE_NAME=`basename "$0"`
# Use the maximum available, or set MAX_FD != -1 to use that value.
MAX_FD="maximum"
warn ( ) {
echo "$*"
}
die ( ) {
echo
echo "$*"
echo
exit 1
}
# OS specific support (must be 'true' or 'false').
cygwin=false
msys=false
darwin=false
case "`uname`" in
CYGWIN* )
cygwin=true
;;
Darwin* )
darwin=true
;;
MINGW* )
msys=true
;;
esac
# Attempt to set APP_HOME
# Resolve links: $0 may be a link
PRG="$0"
# Need this for relative symlinks.
while [ -h "$PRG" ] ; do
ls=`ls -ld "$PRG"`
link=`expr "$ls" : '.*-> \(.*\)$'`
if expr "$link" : '/.*' > /dev/null; then
PRG="$link"
else
PRG=`dirname "$PRG"`"/$link"
fi
done
SAVED="`pwd`"
cd "`dirname \"$PRG\"`/" >/dev/null
APP_HOME="`pwd -P`"
cd "$SAVED" >/dev/null
CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
# Determine the Java command to use to start the JVM.
if [ -n "$JAVA_HOME" ] ; then
if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
# IBM's JDK on AIX uses strange locations for the executables
JAVACMD="$JAVA_HOME/jre/sh/java"
else
JAVACMD="$JAVA_HOME/bin/java"
fi
if [ ! -x "$JAVACMD" ] ; then
die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
else
JAVACMD="java"
which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
# Increase the maximum file descriptors if we can.
if [ "$cygwin" = "false" -a "$darwin" = "false" ] ; then
MAX_FD_LIMIT=`ulimit -H -n`
if [ $? -eq 0 ] ; then
if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
MAX_FD="$MAX_FD_LIMIT"
fi
ulimit -n $MAX_FD
if [ $? -ne 0 ] ; then
warn "Could not set maximum file descriptor limit: $MAX_FD"
fi
else
warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
fi
fi
# For Darwin, add options to specify how the application appears in the dock
if $darwin; then
GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
fi
# For Cygwin, switch paths to Windows format before running java
if $cygwin ; then
APP_HOME=`cygpath --path --mixed "$APP_HOME"`
CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
JAVACMD=`cygpath --unix "$JAVACMD"`
# We build the pattern for arguments to be converted via cygpath
ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
SEP=""
for dir in $ROOTDIRSRAW ; do
ROOTDIRS="$ROOTDIRS$SEP$dir"
SEP="|"
done
OURCYGPATTERN="(^($ROOTDIRS))"
# Add a user-defined pattern to the cygpath arguments
if [ "$GRADLE_CYGPATTERN" != "" ] ; then
OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
fi
# Now convert the arguments - kludge to limit ourselves to /bin/sh
i=0
for arg in "$@" ; do
CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
else
eval `echo args$i`="\"$arg\""
fi
i=$((i+1))
done
case $i in
(0) set -- ;;
(1) set -- "$args0" ;;
(2) set -- "$args0" "$args1" ;;
(3) set -- "$args0" "$args1" "$args2" ;;
(4) set -- "$args0" "$args1" "$args2" "$args3" ;;
(5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
(6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
(7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
(8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
(9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
esac
fi
# Split up the JVM_OPTS And GRADLE_OPTS values into an array, following the shell quoting and substitution rules
function splitJvmOpts() {
JVM_OPTS=("$@")
}
eval splitJvmOpts $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS
JVM_OPTS[${#JVM_OPTS[*]}]="-Dorg.gradle.appname=$APP_BASE_NAME"
exec "$JAVACMD" "${JVM_OPTS[@]}" -classpath "$CLASSPATH" org.gradle.wrapper.GradleWrapperMain "$@"
================================================
FILE: gradlew.bat
================================================
@if "%DEBUG%" == "" @echo off
@rem ##########################################################################
@rem
@rem Gradle startup script for Windows
@rem
@rem ##########################################################################
@rem Set local scope for the variables with windows NT shell
if "%OS%"=="Windows_NT" setlocal
@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
set DEFAULT_JVM_OPTS=
set DIRNAME=%~dp0
if "%DIRNAME%" == "" set DIRNAME=.
set APP_BASE_NAME=%~n0
set APP_HOME=%DIRNAME%
@rem Find java.exe
if defined JAVA_HOME goto findJavaFromJavaHome
set JAVA_EXE=java.exe
%JAVA_EXE% -version >NUL 2>&1
if "%ERRORLEVEL%" == "0" goto init
echo.
echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
echo.
echo Please set the JAVA_HOME variable in your environment to match the
echo location of your Java installation.
goto fail
:findJavaFromJavaHome
set JAVA_HOME=%JAVA_HOME:"=%
set JAVA_EXE=%JAVA_HOME%/bin/java.exe
if exist "%JAVA_EXE%" goto init
echo.
echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
echo.
echo Please set the JAVA_HOME variable in your environment to match the
echo location of your Java installation.
goto fail
:init
@rem Get command-line arguments, handling Windowz variants
if not "%OS%" == "Windows_NT" goto win9xME_args
if "%@eval[2+2]" == "4" goto 4NT_args
:win9xME_args
@rem Slurp the command line arguments.
set CMD_LINE_ARGS=
set _SKIP=2
:win9xME_args_slurp
if "x%~1" == "x" goto execute
set CMD_LINE_ARGS=%*
goto execute
:4NT_args
@rem Get arguments from the 4NT Shell from JP Software
set CMD_LINE_ARGS=%$
:execute
@rem Setup the command line
set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
@rem Execute Gradle
"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%
:end
@rem End local scope for the variables with windows NT shell
if "%ERRORLEVEL%"=="0" goto mainEnd
:fail
rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
rem the _cmd.exe /c_ return code!
if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
exit /b 1
:mainEnd
if "%OS%"=="Windows_NT" endlocal
:omega
================================================
FILE: settings.gradle
================================================
include ':app'