[
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/BaseClass/GLProgram.h",
    "content": "//  This is Jeff LaMarche's GLProgram OpenGL shader wrapper class from his OpenGL ES 2.0 book.\n//  A description of this can be found at his page on the topic:\n//  http://iphonedevelopment.blogspot.com/2010/11/opengl-es-20-for-ios-chapter-4.html\n//  I've extended this to be able to take programs as NSStrings in addition to files, for baked-in shaders\n\n#import <Foundation/Foundation.h>\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\n#import <OpenGLES/ES2/gl.h>\n#import <OpenGLES/ES2/glext.h>\n#else\n#import <OpenGL/OpenGL.h>\n#import <OpenGL/gl.h>\n#endif\n\n@interface GLProgram : NSObject \n{\n    NSMutableArray  *attributes;\n    NSMutableArray  *uniforms;\n    GLuint          program,\n\tvertShader, \n\tfragShader;\t\n}\n\n@property(readwrite, nonatomic) BOOL initialized;\n@property(readwrite, copy, nonatomic) NSString *vertexShaderLog;\n@property(readwrite, copy, nonatomic) NSString *fragmentShaderLog;\n@property(readwrite, copy, nonatomic) NSString *programLog;\n\n- (id)initWithVertexShaderString:(NSString *)vShaderString \n            fragmentShaderString:(NSString *)fShaderString;\n- (id)initWithVertexShaderString:(NSString *)vShaderString \n          fragmentShaderFilename:(NSString *)fShaderFilename;\n- (id)initWithVertexShaderFilename:(NSString *)vShaderFilename \n            fragmentShaderFilename:(NSString *)fShaderFilename;\n- (void)addAttribute:(NSString *)attributeName;\n- (GLuint)attributeIndex:(NSString *)attributeName;\n- (GLuint)uniformIndex:(NSString *)uniformName;\n- (BOOL)link;\n- (void)use;\n- (void)validate;\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/BaseClass/GLProgram.m",
    "content": "//  This is Jeff LaMarche's GLProgram OpenGL shader wrapper class from his OpenGL ES 2.0 book.\n//  A description of this can be found at his page on the topic:\n//  http://iphonedevelopment.blogspot.com/2010/11/opengl-es-20-for-ios-chapter-4.html\n\n\n#import \"GLProgram.h\"\n// START:typedefs\n#pragma mark Function Pointer Definitions\ntypedef void (*GLInfoFunction)(GLuint program, GLenum pname, GLint* params);\ntypedef void (*GLLogFunction) (GLuint program, GLsizei bufsize, GLsizei* length, GLchar* infolog);\n// END:typedefs\n#pragma mark -\n#pragma mark Private Extension Method Declaration\n// START:extension\n@interface GLProgram()\n\n- (BOOL)compileShader:(GLuint *)shader \n                 type:(GLenum)type \n               string:(NSString *)shaderString;\n@end\n// END:extension\n#pragma mark -\n\n@implementation GLProgram\n// START:init\n\n@synthesize initialized = _initialized;\n\n- (id)initWithVertexShaderString:(NSString *)vShaderString \n            fragmentShaderString:(NSString *)fShaderString;\n{\n    if ((self = [super init])) \n    {\n        _initialized = NO;\n        \n        attributes = [[NSMutableArray alloc] init];\n        uniforms = [[NSMutableArray alloc] init];\n        program = glCreateProgram();\n        \n        if (![self compileShader:&vertShader \n                            type:GL_VERTEX_SHADER \n                          string:vShaderString])\n        {\n            NSLog(@\"Failed to compile vertex shader\");\n        }\n        \n        // Create and compile fragment shader\n        if (![self compileShader:&fragShader \n                            type:GL_FRAGMENT_SHADER \n                          string:fShaderString])\n        {\n            NSLog(@\"Failed to compile fragment shader\");\n        }\n        \n        glAttachShader(program, vertShader);\n        glAttachShader(program, fragShader);\n    }\n    \n    return self;\n}\n\n- (id)initWithVertexShaderString:(NSString *)vShaderString \n          fragmentShaderFilename:(NSString *)fShaderFilename;\n{\n    NSString *fragShaderPathname = [[NSBundle mainBundle] pathForResource:fShaderFilename ofType:@\"fsh\"];\n    NSString *fragmentShaderString = [NSString stringWithContentsOfFile:fragShaderPathname encoding:NSUTF8StringEncoding error:nil];\n    \n    if ((self = [self initWithVertexShaderString:vShaderString fragmentShaderString:fragmentShaderString])) \n    {\n    }\n    \n    return self;\n}\n\n- (id)initWithVertexShaderFilename:(NSString *)vShaderFilename \n            fragmentShaderFilename:(NSString *)fShaderFilename;\n{\n    NSString *vertShaderPathname = [[NSBundle mainBundle] pathForResource:vShaderFilename ofType:@\"vsh\"];\n    NSString *vertexShaderString = [NSString stringWithContentsOfFile:vertShaderPathname encoding:NSUTF8StringEncoding error:nil];\n\n    NSString *fragShaderPathname = [[NSBundle mainBundle] pathForResource:fShaderFilename ofType:@\"fsh\"];\n    NSString *fragmentShaderString = [NSString stringWithContentsOfFile:fragShaderPathname encoding:NSUTF8StringEncoding error:nil];\n    \n    if ((self = [self initWithVertexShaderString:vertexShaderString fragmentShaderString:fragmentShaderString])) \n    {\n    }\n    \n    return self;\n}\n// END:init\n// START:compile\n- (BOOL)compileShader:(GLuint *)shader \n                 type:(GLenum)type \n               string:(NSString *)shaderString\n{\n//    CFAbsoluteTime startTime = CFAbsoluteTimeGetCurrent();\n\n    GLint status;\n    const GLchar *source;\n    \n    source = \n      (GLchar *)[shaderString UTF8String];\n    if (!source)\n    {\n        NSLog(@\"Failed to load vertex shader\");\n        return NO;\n    }\n    \n    *shader = glCreateShader(type);\n    glShaderSource(*shader, 1, &source, NULL);\n    glCompileShader(*shader);\n    \n    glGetShaderiv(*shader, GL_COMPILE_STATUS, &status);\n\n\tif (status != GL_TRUE)\n\t{\n\t\tGLint logLength;\n\t\tglGetShaderiv(*shader, GL_INFO_LOG_LENGTH, &logLength);\n\t\tif (logLength > 0)\n\t\t{\n\t\t\tGLchar *log = (GLchar *)malloc(logLength);\n\t\t\tglGetShaderInfoLog(*shader, logLength, &logLength, log);\n            if (shader == &vertShader)\n            {\n                self.vertexShaderLog = [NSString stringWithFormat:@\"%s\", log];\n            }\n            else\n            {\n                self.fragmentShaderLog = [NSString stringWithFormat:@\"%s\", log];\n            }\n\n\t\t\tfree(log);\n\t\t}\n\t}\t\n\t\n//    CFAbsoluteTime linkTime = (CFAbsoluteTimeGetCurrent() - startTime);\n//    NSLog(@\"Compiled in %f ms\", linkTime * 1000.0);\n\n    return status == GL_TRUE;\n}\n// END:compile\n#pragma mark -\n// START:addattribute\n- (void)addAttribute:(NSString *)attributeName\n{\n    if (![attributes containsObject:attributeName])\n    {\n        [attributes addObject:attributeName];\n        glBindAttribLocation(program, \n                             (GLuint)[attributes indexOfObject:attributeName],\n                             [attributeName UTF8String]);\n    }\n}\n// END:addattribute\n// START:indexmethods\n- (GLuint)attributeIndex:(NSString *)attributeName\n{\n    return (GLuint)[attributes indexOfObject:attributeName];\n}\n- (GLuint)uniformIndex:(NSString *)uniformName\n{\n    return glGetUniformLocation(program, [uniformName UTF8String]);\n}\n// END:indexmethods\n#pragma mark -\n// START:link\n- (BOOL)link\n{\n//    CFAbsoluteTime startTime = CFAbsoluteTimeGetCurrent();\n\n    GLint status;\n    \n    glLinkProgram(program);\n    \n    glGetProgramiv(program, GL_LINK_STATUS, &status);\n    if (status == GL_FALSE)\n        return NO;\n    \n    if (vertShader)\n    {\n        glDeleteShader(vertShader);\n        vertShader = 0;\n    }\n    if (fragShader)\n    {\n        glDeleteShader(fragShader);\n        fragShader = 0;\n    }\n    \n    self.initialized = YES;\n\n//    CFAbsoluteTime linkTime = (CFAbsoluteTimeGetCurrent() - startTime);\n//    NSLog(@\"Linked in %f ms\", linkTime * 1000.0);\n\n    return YES;\n}\n// END:link\n// START:use\n- (void)use\n{\n    glUseProgram(program);\n}\n// END:use\n#pragma mark -\n\n- (void)validate;\n{\n\tGLint logLength;\n\t\n\tglValidateProgram(program);\n\tglGetProgramiv(program, GL_INFO_LOG_LENGTH, &logLength);\n\tif (logLength > 0)\n\t{\n\t\tGLchar *log = (GLchar *)malloc(logLength);\n\t\tglGetProgramInfoLog(program, logLength, &logLength, log);\n        self.programLog = [NSString stringWithFormat:@\"%s\", log];\n\t\tfree(log);\n\t}\t\n}\n\n#pragma mark -\n// START:dealloc\n- (void)dealloc\n{\n    if (vertShader)\n        glDeleteShader(vertShader);\n        \n    if (fragShader)\n        glDeleteShader(fragShader);\n    \n    if (program)\n        glDeleteProgram(program);\n       \n}\n// END:dealloc\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/BaseClass/GPUImageBuffer.h",
    "content": "#import \"GPUImageFilter.h\"\n\n@interface GPUImageBuffer : GPUImageFilter\n{\n    NSMutableArray *bufferedFramebuffers;\n}\n\n@property(readwrite, nonatomic) NSUInteger bufferSize;\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/BaseClass/GPUImageBuffer.m",
    "content": "#import \"GPUImageBuffer.h\"\n\n@interface GPUImageBuffer()\n\n@end\n\n@implementation GPUImageBuffer\n\n@synthesize bufferSize = _bufferSize;\n\n#pragma mark -\n#pragma mark Initialization and teardown\n\n- (id)init;\n{\n    if (!(self = [self initWithFragmentShaderFromString:kGPUImagePassthroughFragmentShaderString]))\n    {\n        return nil;\n    }\n    \n    bufferedFramebuffers = [[NSMutableArray alloc] init];\n//    [bufferedTextures addObject:[NSNumber numberWithInt:outputTexture]];\n    _bufferSize = 1;\n    \n    return self;\n}\n\n- (void)dealloc\n{\n    for (GPUImageFramebuffer *currentFramebuffer in bufferedFramebuffers)\n    {\n        [currentFramebuffer unlock];\n    }\n}\n\n#pragma mark -\n#pragma mark GPUImageInput\n\n- (void)newFrameReadyAtTime:(CMTime)frameTime atIndex:(NSInteger)textureIndex;\n{\n    if ([bufferedFramebuffers count] >= _bufferSize)\n    {\n        outputFramebuffer = [bufferedFramebuffers objectAtIndex:0];\n        [bufferedFramebuffers removeObjectAtIndex:0];\n    }\n    else\n    {\n        // Nothing yet in the buffer, so don't process further until the buffer is full\n        outputFramebuffer = firstInputFramebuffer;\n        [firstInputFramebuffer lock];\n    }\n    \n    [bufferedFramebuffers addObject:firstInputFramebuffer];\n\n    // Need to pass along rotation information, as we're just holding on to buffered framebuffers and not rotating them ourselves\n    for (id<GPUImageInput> currentTarget in targets)\n    {\n        if (currentTarget != self.targetToIgnoreForUpdates)\n        {\n            NSInteger indexOfObject = [targets indexOfObject:currentTarget];\n            NSInteger textureIndex = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];\n            \n            [currentTarget setInputRotation:inputRotation atIndex:textureIndex];\n        }\n    }\n\n    // Let the downstream video elements see the previous frame from the buffer before rendering a new one into place\n    [self informTargetsAboutNewFrameAtTime:frameTime];\n \n//    [self renderToTextureWithVertices:imageVertices textureCoordinates:[[self class] textureCoordinatesForRotation:inputRotation]];\n}\n\n- (void)renderToTextureWithVertices:(const GLfloat *)vertices textureCoordinates:(const GLfloat *)textureCoordinates;\n{\n    // No need to render to another texture anymore, since we'll be hanging on to the textures in our buffer\n}\n\n#pragma mark -\n#pragma mark Accessors\n\n- (void)setBufferSize:(NSUInteger)newValue;\n{\n    if ( (newValue == _bufferSize) || (newValue < 1) )\n    {\n        return;\n    }\n        \n    if (newValue > _bufferSize)\n    {\n        NSUInteger texturesToAdd = newValue - _bufferSize;\n        for (NSUInteger currentTextureIndex = 0; currentTextureIndex < texturesToAdd; currentTextureIndex++)\n        {\n            // TODO: Deal with the growth of the size of the buffer by rotating framebuffers, no textures\n        }\n    }\n    else\n    {\n        NSUInteger texturesToRemove = _bufferSize - newValue;\n        for (NSUInteger currentTextureIndex = 0; currentTextureIndex < texturesToRemove; currentTextureIndex++)\n        {\n            GPUImageFramebuffer *lastFramebuffer = [bufferedFramebuffers lastObject];\n            [bufferedFramebuffers removeObjectAtIndex:([bufferedFramebuffers count] - 1)];\n            \n            [lastFramebuffer unlock];\n            lastFramebuffer = nil;\n        }\n    }\n\n  _bufferSize = newValue;\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/BaseClass/GPUImageFilter.h",
    "content": "#import \"GPUImageOutput.h\"\n\n#define STRINGIZE(x) #x\n#define STRINGIZE2(x) STRINGIZE(x)\n#define SHADER_STRING(text) @ STRINGIZE2(text)\n\n#define GPUImageHashIdentifier #\n#define GPUImageWrappedLabel(x) x\n#define GPUImageEscapedHashIdentifier(a) GPUImageWrappedLabel(GPUImageHashIdentifier)a\n\nextern NSString *const kGPUImageVertexShaderString;\nextern NSString *const kGPUImagePassthroughFragmentShaderString;\n\nstruct GPUVector4 {\n    GLfloat one;\n    GLfloat two;\n    GLfloat three;\n    GLfloat four;\n};\ntypedef struct GPUVector4 GPUVector4;\n\nstruct GPUVector3 {\n    GLfloat one;\n    GLfloat two;\n    GLfloat three;\n};\ntypedef struct GPUVector3 GPUVector3;\n\nstruct GPUMatrix4x4 {\n    GPUVector4 one;\n    GPUVector4 two;\n    GPUVector4 three;\n    GPUVector4 four;\n};\ntypedef struct GPUMatrix4x4 GPUMatrix4x4;\n\nstruct GPUMatrix3x3 {\n    GPUVector3 one;\n    GPUVector3 two;\n    GPUVector3 three;\n};\ntypedef struct GPUMatrix3x3 GPUMatrix3x3;\n\n/** GPUImage's base filter class\n \n Filters and other subsequent elements in the chain conform to the GPUImageInput protocol, which lets them take in the supplied or processed texture from the previous link in the chain and do something with it. Objects one step further down the chain are considered targets, and processing can be branched by adding multiple targets to a single output or filter.\n */\n@interface GPUImageFilter : GPUImageOutput <GPUImageInput>\n{\n    GPUImageFramebuffer *firstInputFramebuffer;\n    \n    GLProgram *filterProgram;\n    GLint filterPositionAttribute, filterTextureCoordinateAttribute;\n    GLint filterInputTextureUniform;\n    GLfloat backgroundColorRed, backgroundColorGreen, backgroundColorBlue, backgroundColorAlpha;\n    \n    BOOL isEndProcessing;\n\n    CGSize currentFilterSize;\n    GPUImageRotationMode inputRotation;\n    \n    BOOL currentlyReceivingMonochromeInput;\n    \n    NSMutableDictionary *uniformStateRestorationBlocks;\n    dispatch_semaphore_t imageCaptureSemaphore;\n}\n\n@property(readonly) CVPixelBufferRef renderTarget;\n@property(readwrite, nonatomic) BOOL preventRendering;\n@property(readwrite, nonatomic) BOOL currentlyReceivingMonochromeInput;\n\n/// @name Initialization and teardown\n\n/**\n Initialize with vertex and fragment shaders\n \n You make take advantage of the SHADER_STRING macro to write your shaders in-line.\n @param vertexShaderString Source code of the vertex shader to use\n @param fragmentShaderString Source code of the fragment shader to use\n */\n- (id)initWithVertexShaderFromString:(NSString *)vertexShaderString fragmentShaderFromString:(NSString *)fragmentShaderString;\n\n/**\n Initialize with a fragment shader\n \n You may take advantage of the SHADER_STRING macro to write your shader in-line.\n @param fragmentShaderString Source code of fragment shader to use\n */\n- (id)initWithFragmentShaderFromString:(NSString *)fragmentShaderString;\n/**\n Initialize with a fragment shader\n @param fragmentShaderFilename Filename of fragment shader to load\n */\n- (id)initWithFragmentShaderFromFile:(NSString *)fragmentShaderFilename;\n- (void)initializeAttributes;\n- (void)setupFilterForSize:(CGSize)filterFrameSize;\n- (CGSize)rotatedSize:(CGSize)sizeToRotate forIndex:(NSInteger)textureIndex;\n- (CGPoint)rotatedPoint:(CGPoint)pointToRotate forRotation:(GPUImageRotationMode)rotation;\n\n/// @name Managing the display FBOs\n/** Size of the frame buffer object\n */\n- (CGSize)sizeOfFBO;\n\n/// @name Rendering\n+ (const GLfloat *)textureCoordinatesForRotation:(GPUImageRotationMode)rotationMode;\n- (void)renderToTextureWithVertices:(const GLfloat *)vertices textureCoordinates:(const GLfloat *)textureCoordinates;\n- (void)informTargetsAboutNewFrameAtTime:(CMTime)frameTime;\n- (CGSize)outputFrameSize;\n\n/// @name Input parameters\n- (void)setBackgroundColorRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent alpha:(GLfloat)alphaComponent;\n- (void)setInteger:(GLint)newInteger forUniformName:(NSString *)uniformName;\n- (void)setFloat:(GLfloat)newFloat forUniformName:(NSString *)uniformName;\n- (void)setSize:(CGSize)newSize forUniformName:(NSString *)uniformName;\n- (void)setPoint:(CGPoint)newPoint forUniformName:(NSString *)uniformName;\n- (void)setFloatVec3:(GPUVector3)newVec3 forUniformName:(NSString *)uniformName;\n- (void)setFloatVec4:(GPUVector4)newVec4 forUniform:(NSString *)uniformName;\n- (void)setFloatArray:(GLfloat *)array length:(GLsizei)count forUniform:(NSString*)uniformName;\n\n- (void)setMatrix3f:(GPUMatrix3x3)matrix forUniform:(GLint)uniform program:(GLProgram *)shaderProgram;\n- (void)setMatrix4f:(GPUMatrix4x4)matrix forUniform:(GLint)uniform program:(GLProgram *)shaderProgram;\n- (void)setFloat:(GLfloat)floatValue forUniform:(GLint)uniform program:(GLProgram *)shaderProgram;\n- (void)setPoint:(CGPoint)pointValue forUniform:(GLint)uniform program:(GLProgram *)shaderProgram;\n- (void)setSize:(CGSize)sizeValue forUniform:(GLint)uniform program:(GLProgram *)shaderProgram;\n- (void)setVec3:(GPUVector3)vectorValue forUniform:(GLint)uniform program:(GLProgram *)shaderProgram;\n- (void)setVec4:(GPUVector4)vectorValue forUniform:(GLint)uniform program:(GLProgram *)shaderProgram;\n- (void)setFloatArray:(GLfloat *)arrayValue length:(GLsizei)arrayLength forUniform:(GLint)uniform program:(GLProgram *)shaderProgram;\n- (void)setInteger:(GLint)intValue forUniform:(GLint)uniform program:(GLProgram *)shaderProgram;\n- (void)setFloatVec2Array:(GLfloat *)array length:(GLsizei)length forUniform:(GLint)uniform program:(GLProgram *)program;\n\n- (void)setAndExecuteUniformStateCallbackAtIndex:(GLint)uniform forProgram:(GLProgram *)shaderProgram toBlock:(dispatch_block_t)uniformStateBlock;\n- (void)setUniformsForProgramAtIndex:(NSUInteger)programIndex;\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/BaseClass/GPUImageFilter.m",
    "content": "#import \"GPUImageFilter.h\"\n#import \"GPUImagePicture.h\"\n#import <AVFoundation/AVFoundation.h>\n\n// Hardcode the vertex shader for standard filters, but this can be overridden\nNSString *const kGPUImageVertexShaderString = SHADER_STRING\n(\n attribute vec4 position;\n attribute vec4 inputTextureCoordinate;\n \n varying vec2 textureCoordinate;\n \n void main()\n {\n     gl_Position = position;\n     textureCoordinate = inputTextureCoordinate.xy;\n }\n );\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\n\nNSString *const kGPUImagePassthroughFragmentShaderString = SHADER_STRING\n(\n varying highp vec2 textureCoordinate;\n \n uniform sampler2D inputImageTexture;\n \n void main()\n {\n     gl_FragColor = texture2D(inputImageTexture, textureCoordinate);\n }\n);\n\n#else\n\nNSString *const kGPUImagePassthroughFragmentShaderString = SHADER_STRING\n(\n varying vec2 textureCoordinate;\n \n uniform sampler2D inputImageTexture;\n \n void main()\n {\n     gl_FragColor = texture2D(inputImageTexture, textureCoordinate);\n }\n);\n#endif\n\n\n@implementation GPUImageFilter\n\n@synthesize preventRendering = _preventRendering;\n@synthesize currentlyReceivingMonochromeInput;\n\n#pragma mark -\n#pragma mark Initialization and teardown\n\n- (id)initWithVertexShaderFromString:(NSString *)vertexShaderString fragmentShaderFromString:(NSString *)fragmentShaderString;\n{\n    if (!(self = [super init]))\n    {\n\t\treturn nil;\n    }\n\n    uniformStateRestorationBlocks = [NSMutableDictionary dictionaryWithCapacity:10];\n    _preventRendering = NO;\n    currentlyReceivingMonochromeInput = NO;\n    inputRotation = kGPUImageNoRotation;\n    backgroundColorRed = 0.0;\n    backgroundColorGreen = 0.0;\n    backgroundColorBlue = 0.0;\n    backgroundColorAlpha = 0.0;\n    imageCaptureSemaphore = dispatch_semaphore_create(0);\n    dispatch_semaphore_signal(imageCaptureSemaphore);\n\n    runSynchronouslyOnVideoProcessingQueue(^{\n        [GPUImageContext useImageProcessingContext];\n\n        filterProgram = [[GPUImageContext sharedImageProcessingContext] programForVertexShaderString:vertexShaderString fragmentShaderString:fragmentShaderString];\n        \n        if (!filterProgram.initialized)\n        {\n            [self initializeAttributes];\n            \n            if (![filterProgram link])\n            {\n                NSString *progLog = [filterProgram programLog];\n                NSLog(@\"Program link log: %@\", progLog);\n                NSString *fragLog = [filterProgram fragmentShaderLog];\n                NSLog(@\"Fragment shader compile log: %@\", fragLog);\n                NSString *vertLog = [filterProgram vertexShaderLog];\n                NSLog(@\"Vertex shader compile log: %@\", vertLog);\n                filterProgram = nil;\n                NSAssert(NO, @\"Filter shader link failed\");\n            }\n        }\n        \n        filterPositionAttribute = [filterProgram attributeIndex:@\"position\"];\n        filterTextureCoordinateAttribute = [filterProgram attributeIndex:@\"inputTextureCoordinate\"];\n        filterInputTextureUniform = [filterProgram uniformIndex:@\"inputImageTexture\"]; // This does assume a name of \"inputImageTexture\" for the fragment shader\n        \n        [GPUImageContext setActiveShaderProgram:filterProgram];\n        \n        glEnableVertexAttribArray(filterPositionAttribute);\n        glEnableVertexAttribArray(filterTextureCoordinateAttribute);    \n    });\n    \n    return self;\n}\n\n- (id)initWithFragmentShaderFromString:(NSString *)fragmentShaderString;\n{\n    if (!(self = [self initWithVertexShaderFromString:kGPUImageVertexShaderString fragmentShaderFromString:fragmentShaderString]))\n    {\n\t\treturn nil;\n    }\n    \n    return self;\n}\n\n- (id)initWithFragmentShaderFromFile:(NSString *)fragmentShaderFilename;\n{\n    NSString *fragmentShaderPathname = [[NSBundle mainBundle] pathForResource:fragmentShaderFilename ofType:@\"fsh\"];\n    NSString *fragmentShaderString = [NSString stringWithContentsOfFile:fragmentShaderPathname encoding:NSUTF8StringEncoding error:nil];\n\n    if (!(self = [self initWithFragmentShaderFromString:fragmentShaderString]))\n    {\n\t\treturn nil;\n    }\n    \n    return self;\n}\n\n- (id)init;\n{\n    if (!(self = [self initWithFragmentShaderFromString:kGPUImagePassthroughFragmentShaderString]))\n    {\n\t\treturn nil;\n    }\n    \n    return self;\n}\n\n- (void)initializeAttributes;\n{\n    [filterProgram addAttribute:@\"position\"];\n\t[filterProgram addAttribute:@\"inputTextureCoordinate\"];\n\n    // Override this, calling back to this super method, in order to add new attributes to your vertex shader\n}\n\n- (void)setupFilterForSize:(CGSize)filterFrameSize;\n{\n    // This is where you can override to provide some custom setup, if your filter has a size-dependent element\n}\n\n- (void)dealloc\n{\n#if !OS_OBJECT_USE_OBJC\n    if (imageCaptureSemaphore != NULL)\n    {\n        dispatch_release(imageCaptureSemaphore);\n    }\n#endif\n\n}\n\n#pragma mark -\n#pragma mark Still image processing\n\n- (void)useNextFrameForImageCapture;\n{\n    usingNextFrameForImageCapture = YES;\n\n    // Set the semaphore high, if it isn't already\n    if (dispatch_semaphore_wait(imageCaptureSemaphore, DISPATCH_TIME_NOW) != 0)\n    {\n        return;\n    }\n}\n\n- (CGImageRef)newCGImageFromCurrentlyProcessedOutput\n{\n    // Give it three seconds to process, then abort if they forgot to set up the image capture properly\n    double timeoutForImageCapture = 3.0;\n    dispatch_time_t convertedTimeout = dispatch_time(DISPATCH_TIME_NOW, timeoutForImageCapture * NSEC_PER_SEC);\n\n    if (dispatch_semaphore_wait(imageCaptureSemaphore, convertedTimeout) != 0)\n    {\n        return NULL;\n    }\n\n    GPUImageFramebuffer* framebuffer = [self framebufferForOutput];\n    \n    usingNextFrameForImageCapture = NO;\n    dispatch_semaphore_signal(imageCaptureSemaphore);\n    \n    CGImageRef image = [framebuffer newCGImageFromFramebufferContents];\n    return image;\n}\n\n#pragma mark -\n#pragma mark Managing the display FBOs\n\n- (CGSize)sizeOfFBO;\n{\n    CGSize outputSize = [self maximumOutputSize];\n    if ( (CGSizeEqualToSize(outputSize, CGSizeZero)) || (inputTextureSize.width < outputSize.width) )\n    {\n        return inputTextureSize;\n    }\n    else\n    {\n        return outputSize;\n    }\n}\n\n#pragma mark -\n#pragma mark Rendering\n\n+ (const GLfloat *)textureCoordinatesForRotation:(GPUImageRotationMode)rotationMode;\n{\n    static const GLfloat noRotationTextureCoordinates[] = {\n        0.0f, 0.0f,\n        1.0f, 0.0f,\n        0.0f, 1.0f,\n        1.0f, 1.0f,\n    };\n    \n    static const GLfloat rotateLeftTextureCoordinates[] = {\n        1.0f, 0.0f,\n        1.0f, 1.0f,\n        0.0f, 0.0f,\n        0.0f, 1.0f,\n    };\n    \n    static const GLfloat rotateRightTextureCoordinates[] = {\n        0.0f, 1.0f,\n        0.0f, 0.0f,\n        1.0f, 1.0f,\n        1.0f, 0.0f,\n    };\n    \n    static const GLfloat verticalFlipTextureCoordinates[] = {\n        0.0f, 1.0f,\n        1.0f, 1.0f,\n        0.0f,  0.0f,\n        1.0f,  0.0f,\n    };\n    \n    static const GLfloat horizontalFlipTextureCoordinates[] = {\n        1.0f, 0.0f,\n        0.0f, 0.0f,\n        1.0f,  1.0f,\n        0.0f,  1.0f,\n    };\n    \n    static const GLfloat rotateRightVerticalFlipTextureCoordinates[] = {\n        0.0f, 0.0f,\n        0.0f, 1.0f,\n        1.0f, 0.0f,\n        1.0f, 1.0f,\n    };\n\n    static const GLfloat rotateRightHorizontalFlipTextureCoordinates[] = {\n        1.0f, 1.0f,\n        1.0f, 0.0f,\n        0.0f, 1.0f,\n        0.0f, 0.0f,\n    };\n\n    static const GLfloat rotate180TextureCoordinates[] = {\n        1.0f, 1.0f,\n        0.0f, 1.0f,\n        1.0f, 0.0f,\n        0.0f, 0.0f,\n    };\n\n    switch(rotationMode)\n    {\n        case kGPUImageNoRotation: return noRotationTextureCoordinates;\n        case kGPUImageRotateLeft: return rotateLeftTextureCoordinates;\n        case kGPUImageRotateRight: return rotateRightTextureCoordinates;\n        case kGPUImageFlipVertical: return verticalFlipTextureCoordinates;\n        case kGPUImageFlipHorizonal: return horizontalFlipTextureCoordinates;\n        case kGPUImageRotateRightFlipVertical: return rotateRightVerticalFlipTextureCoordinates;\n        case kGPUImageRotateRightFlipHorizontal: return rotateRightHorizontalFlipTextureCoordinates;\n        case kGPUImageRotate180: return rotate180TextureCoordinates;\n    }\n}\n\n- (void)renderToTextureWithVertices:(const GLfloat *)vertices textureCoordinates:(const GLfloat *)textureCoordinates;\n{\n    if (self.preventRendering)\n    {\n        [firstInputFramebuffer unlock];\n        return;\n    }\n    \n    [GPUImageContext setActiveShaderProgram:filterProgram];\n\n    outputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:[self sizeOfFBO] textureOptions:self.outputTextureOptions onlyTexture:NO];\n    [outputFramebuffer activateFramebuffer];\n    if (usingNextFrameForImageCapture)\n    {\n        [outputFramebuffer lock];\n    }\n\n    [self setUniformsForProgramAtIndex:0];\n    \n    glClearColor(backgroundColorRed, backgroundColorGreen, backgroundColorBlue, backgroundColorAlpha);\n    glClear(GL_COLOR_BUFFER_BIT);\n\n\tglActiveTexture(GL_TEXTURE2);\n\tglBindTexture(GL_TEXTURE_2D, [firstInputFramebuffer texture]);\n\t\n\tglUniform1i(filterInputTextureUniform, 2);\t\n\n    glVertexAttribPointer(filterPositionAttribute, 2, GL_FLOAT, 0, 0, vertices);\n\tglVertexAttribPointer(filterTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, textureCoordinates);\n    \n    glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);\n    \n    [firstInputFramebuffer unlock];\n    \n    if (usingNextFrameForImageCapture)\n    {\n        dispatch_semaphore_signal(imageCaptureSemaphore);\n    }\n}\n\n- (void)informTargetsAboutNewFrameAtTime:(CMTime)frameTime;\n{\n    if (self.frameProcessingCompletionBlock != NULL)\n    {\n        self.frameProcessingCompletionBlock(self, frameTime);\n    }\n    \n    // Get all targets the framebuffer so they can grab a lock on it\n    for (id<GPUImageInput> currentTarget in targets)\n    {\n        if (currentTarget != self.targetToIgnoreForUpdates)\n        {\n            NSInteger indexOfObject = [targets indexOfObject:currentTarget];\n            NSInteger textureIndex = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];\n\n            [self setInputFramebufferForTarget:currentTarget atIndex:textureIndex];\n            [currentTarget setInputSize:[self outputFrameSize] atIndex:textureIndex];\n        }\n    }\n    \n    // Release our hold so it can return to the cache immediately upon processing\n    [[self framebufferForOutput] unlock];\n    \n    if (usingNextFrameForImageCapture)\n    {\n//        usingNextFrameForImageCapture = NO;\n    }\n    else\n    {\n        [self removeOutputFramebuffer];\n    }    \n    \n    // Trigger processing last, so that our unlock comes first in serial execution, avoiding the need for a callback\n    for (id<GPUImageInput> currentTarget in targets)\n    {\n        if (currentTarget != self.targetToIgnoreForUpdates)\n        {\n            NSInteger indexOfObject = [targets indexOfObject:currentTarget];\n            NSInteger textureIndex = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];\n            [currentTarget newFrameReadyAtTime:frameTime atIndex:textureIndex];\n        }\n    }\n}\n\n- (CGSize)outputFrameSize;\n{\n    return inputTextureSize;\n}\n\n#pragma mark -\n#pragma mark Input parameters\n\n- (void)setBackgroundColorRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent alpha:(GLfloat)alphaComponent;\n{\n    backgroundColorRed = redComponent;\n    backgroundColorGreen = greenComponent;\n    backgroundColorBlue = blueComponent;\n    backgroundColorAlpha = alphaComponent;\n}\n\n- (void)setInteger:(GLint)newInteger forUniformName:(NSString *)uniformName;\n{\n    GLint uniformIndex = [filterProgram uniformIndex:uniformName];\n    [self setInteger:newInteger forUniform:uniformIndex program:filterProgram];\n}\n\n- (void)setFloat:(GLfloat)newFloat forUniformName:(NSString *)uniformName;\n{\n    GLint uniformIndex = [filterProgram uniformIndex:uniformName];\n    [self setFloat:newFloat forUniform:uniformIndex program:filterProgram];\n}\n\n- (void)setSize:(CGSize)newSize forUniformName:(NSString *)uniformName;\n{\n    GLint uniformIndex = [filterProgram uniformIndex:uniformName];\n    [self setSize:newSize forUniform:uniformIndex program:filterProgram];\n}\n\n- (void)setPoint:(CGPoint)newPoint forUniformName:(NSString *)uniformName;\n{\n    GLint uniformIndex = [filterProgram uniformIndex:uniformName];\n    [self setPoint:newPoint forUniform:uniformIndex program:filterProgram];\n}\n\n- (void)setFloatVec3:(GPUVector3)newVec3 forUniformName:(NSString *)uniformName;\n{\n    GLint uniformIndex = [filterProgram uniformIndex:uniformName];\n    [self setVec3:newVec3 forUniform:uniformIndex program:filterProgram];\n}\n\n- (void)setFloatVec4:(GPUVector4)newVec4 forUniform:(NSString *)uniformName;\n{\n    GLint uniformIndex = [filterProgram uniformIndex:uniformName];\n    [self setVec4:newVec4 forUniform:uniformIndex program:filterProgram];\n}\n\n- (void)setFloatArray:(GLfloat *)array length:(GLsizei)count forUniform:(NSString*)uniformName\n{\n    GLint uniformIndex = [filterProgram uniformIndex:uniformName];\n    \n    [self setFloatArray:array length:count forUniform:uniformIndex program:filterProgram];\n}\n\n- (void)setMatrix3f:(GPUMatrix3x3)matrix forUniform:(GLint)uniform program:(GLProgram *)shaderProgram;\n{\n    runAsynchronouslyOnVideoProcessingQueue(^{\n        [GPUImageContext setActiveShaderProgram:shaderProgram];\n        [self setAndExecuteUniformStateCallbackAtIndex:uniform forProgram:shaderProgram toBlock:^{\n            glUniformMatrix3fv(uniform, 1, GL_FALSE, (GLfloat *)&matrix);\n        }];\n    });\n}\n\n- (void)setMatrix4f:(GPUMatrix4x4)matrix forUniform:(GLint)uniform program:(GLProgram *)shaderProgram;\n{\n    runAsynchronouslyOnVideoProcessingQueue(^{\n        [GPUImageContext setActiveShaderProgram:shaderProgram];\n        [self setAndExecuteUniformStateCallbackAtIndex:uniform forProgram:shaderProgram toBlock:^{\n            glUniformMatrix4fv(uniform, 1, GL_FALSE, (GLfloat *)&matrix);\n        }];\n    });\n}\n\n- (void)setFloat:(GLfloat)floatValue forUniform:(GLint)uniform program:(GLProgram *)shaderProgram;\n{\n    runAsynchronouslyOnVideoProcessingQueue(^{\n        [GPUImageContext setActiveShaderProgram:shaderProgram];\n        [self setAndExecuteUniformStateCallbackAtIndex:uniform forProgram:shaderProgram toBlock:^{\n            glUniform1f(uniform, floatValue);\n        }];\n    });\n}\n\n- (void)setPoint:(CGPoint)pointValue forUniform:(GLint)uniform program:(GLProgram *)shaderProgram;\n{\n    runAsynchronouslyOnVideoProcessingQueue(^{\n        [GPUImageContext setActiveShaderProgram:shaderProgram];\n        [self setAndExecuteUniformStateCallbackAtIndex:uniform forProgram:shaderProgram toBlock:^{\n            GLfloat positionArray[2];\n            positionArray[0] = pointValue.x;\n            positionArray[1] = pointValue.y;\n            \n            glUniform2fv(uniform, 1, positionArray);\n        }];\n    });\n}\n\n- (void)setSize:(CGSize)sizeValue forUniform:(GLint)uniform program:(GLProgram *)shaderProgram;\n{\n    runAsynchronouslyOnVideoProcessingQueue(^{\n        [GPUImageContext setActiveShaderProgram:shaderProgram];\n        \n        [self setAndExecuteUniformStateCallbackAtIndex:uniform forProgram:shaderProgram toBlock:^{\n            GLfloat sizeArray[2];\n            sizeArray[0] = sizeValue.width;\n            sizeArray[1] = sizeValue.height;\n            \n            glUniform2fv(uniform, 1, sizeArray);\n        }];\n    });\n}\n\n- (void)setVec3:(GPUVector3)vectorValue forUniform:(GLint)uniform program:(GLProgram *)shaderProgram;\n{\n    runAsynchronouslyOnVideoProcessingQueue(^{\n        [GPUImageContext setActiveShaderProgram:shaderProgram];\n\n        [self setAndExecuteUniformStateCallbackAtIndex:uniform forProgram:shaderProgram toBlock:^{\n            glUniform3fv(uniform, 1, (GLfloat *)&vectorValue);\n        }];\n    });\n}\n\n- (void)setVec4:(GPUVector4)vectorValue forUniform:(GLint)uniform program:(GLProgram *)shaderProgram;\n{\n    runAsynchronouslyOnVideoProcessingQueue(^{\n        [GPUImageContext setActiveShaderProgram:shaderProgram];\n        \n        [self setAndExecuteUniformStateCallbackAtIndex:uniform forProgram:shaderProgram toBlock:^{\n            glUniform4fv(uniform, 1, (GLfloat *)&vectorValue);\n        }];\n    });\n}\n\n- (void)setFloatArray:(GLfloat *)arrayValue length:(GLsizei)arrayLength forUniform:(GLint)uniform program:(GLProgram *)shaderProgram;\n{\n    // Make a copy of the data, so it doesn't get overwritten before async call executes\n    NSData* arrayData = [NSData dataWithBytes:arrayValue length:arrayLength * sizeof(arrayValue[0])];\n\n    runAsynchronouslyOnVideoProcessingQueue(^{\n        [GPUImageContext setActiveShaderProgram:shaderProgram];\n        \n        [self setAndExecuteUniformStateCallbackAtIndex:uniform forProgram:shaderProgram toBlock:^{\n            glUniform1fv(uniform, arrayLength, [arrayData bytes]);\n        }];\n    });\n}\n\n\n\n- (void)setInteger:(GLint)intValue forUniform:(GLint)uniform program:(GLProgram *)shaderProgram;\n{\n    runAsynchronouslyOnVideoProcessingQueue(^{\n        [GPUImageContext setActiveShaderProgram:shaderProgram];\n\n        [self setAndExecuteUniformStateCallbackAtIndex:uniform forProgram:shaderProgram toBlock:^{\n            glUniform1i(uniform, intValue);\n        }];\n    });\n}\n\n- (void)setFloatVec2Array:(GLfloat *)array length:(GLsizei)length forUniform:(GLint)uniform program:(GLProgram *)program\n{\n    NSData *arrayData = [NSData dataWithBytes:array length:length * sizeof(array[0])];\n    \n    runAsynchronouslyOnVideoProcessingQueue(^{\n        [GPUImageContext setActiveShaderProgram:program];\n        [self setAndExecuteUniformStateCallbackAtIndex:uniform forProgram:program toBlock:^{\n            glUniform2fv(uniform, length, [arrayData bytes]);\n        }];\n    });\n}\n\n- (void)setAndExecuteUniformStateCallbackAtIndex:(GLint)uniform forProgram:(GLProgram *)shaderProgram toBlock:(dispatch_block_t)uniformStateBlock;\n{\n    [uniformStateRestorationBlocks setObject:[uniformStateBlock copy] forKey:[NSNumber numberWithInt:uniform]];\n    uniformStateBlock();\n}\n\n- (void)setUniformsForProgramAtIndex:(NSUInteger)programIndex;\n{\n    [uniformStateRestorationBlocks enumerateKeysAndObjectsUsingBlock:^(id key, id obj, BOOL *stop){\n        dispatch_block_t currentBlock = obj;\n        currentBlock();\n    }];\n}\n\n#pragma mark -\n#pragma mark GPUImageInput\n\n- (void)newFrameReadyAtTime:(CMTime)frameTime atIndex:(NSInteger)textureIndex;\n{\n    static const GLfloat imageVertices[] = {\n        -1.0f, -1.0f,\n        1.0f, -1.0f,\n        -1.0f,  1.0f,\n        1.0f,  1.0f,\n    };\n    \n    [self renderToTextureWithVertices:imageVertices textureCoordinates:[[self class] textureCoordinatesForRotation:inputRotation]];\n\n    [self informTargetsAboutNewFrameAtTime:frameTime];\n}\n\n- (NSInteger)nextAvailableTextureIndex;\n{\n    return 0;\n}\n\n- (void)setInputFramebuffer:(GPUImageFramebuffer *)newInputFramebuffer atIndex:(NSInteger)textureIndex;\n{\n    firstInputFramebuffer = newInputFramebuffer;\n    [firstInputFramebuffer lock];\n}\n\n- (CGSize)rotatedSize:(CGSize)sizeToRotate forIndex:(NSInteger)textureIndex;\n{\n    CGSize rotatedSize = sizeToRotate;\n    \n    if (GPUImageRotationSwapsWidthAndHeight(inputRotation))\n    {\n        rotatedSize.width = sizeToRotate.height;\n        rotatedSize.height = sizeToRotate.width;\n    }\n    \n    return rotatedSize; \n}\n\n- (CGPoint)rotatedPoint:(CGPoint)pointToRotate forRotation:(GPUImageRotationMode)rotation;\n{\n    CGPoint rotatedPoint;\n    switch(rotation)\n    {\n        case kGPUImageNoRotation: return pointToRotate; break;\n        case kGPUImageFlipHorizonal:\n        {\n            rotatedPoint.x = 1.0 - pointToRotate.x;\n            rotatedPoint.y = pointToRotate.y;\n        }; break;\n        case kGPUImageFlipVertical:\n        {\n            rotatedPoint.x = pointToRotate.x;\n            rotatedPoint.y = 1.0 - pointToRotate.y;\n        }; break;\n        case kGPUImageRotateLeft:\n        {\n            rotatedPoint.x = 1.0 - pointToRotate.y;\n            rotatedPoint.y = pointToRotate.x;\n        }; break;\n        case kGPUImageRotateRight:\n        {\n            rotatedPoint.x = pointToRotate.y;\n            rotatedPoint.y = 1.0 - pointToRotate.x;\n        }; break;\n        case kGPUImageRotateRightFlipVertical:\n        {\n            rotatedPoint.x = pointToRotate.y;\n            rotatedPoint.y = pointToRotate.x;\n        }; break;\n        case kGPUImageRotateRightFlipHorizontal:\n        {\n            rotatedPoint.x = 1.0 - pointToRotate.y;\n            rotatedPoint.y = 1.0 - pointToRotate.x;\n        }; break;\n        case kGPUImageRotate180:\n        {\n            rotatedPoint.x = 1.0 - pointToRotate.x;\n            rotatedPoint.y = 1.0 - pointToRotate.y;\n        }; break;\n    }\n    \n    return rotatedPoint;\n}\n\n- (void)setInputSize:(CGSize)newSize atIndex:(NSInteger)textureIndex;\n{\n    if (self.preventRendering)\n    {\n        return;\n    }\n    \n    if (overrideInputSize)\n    {\n        if (CGSizeEqualToSize(forcedMaximumSize, CGSizeZero))\n        {\n        }\n        else\n        {\n            CGRect insetRect = AVMakeRectWithAspectRatioInsideRect(newSize, CGRectMake(0.0, 0.0, forcedMaximumSize.width, forcedMaximumSize.height));\n            inputTextureSize = insetRect.size;\n        }\n    }\n    else\n    {\n        CGSize rotatedSize = [self rotatedSize:newSize forIndex:textureIndex];\n        \n        if (CGSizeEqualToSize(rotatedSize, CGSizeZero))\n        {\n            inputTextureSize = rotatedSize;\n        }\n        else if (!CGSizeEqualToSize(inputTextureSize, rotatedSize))\n        {\n            inputTextureSize = rotatedSize;\n        }\n    }\n    \n    [self setupFilterForSize:[self sizeOfFBO]];\n}\n\n- (void)setInputRotation:(GPUImageRotationMode)newInputRotation atIndex:(NSInteger)textureIndex;\n{\n    inputRotation = newInputRotation;\n}\n\n- (void)forceProcessingAtSize:(CGSize)frameSize;\n{    \n    if (CGSizeEqualToSize(frameSize, CGSizeZero))\n    {\n        overrideInputSize = NO;\n    }\n    else\n    {\n        overrideInputSize = YES;\n        inputTextureSize = frameSize;\n        forcedMaximumSize = CGSizeZero;\n    }\n}\n\n- (void)forceProcessingAtSizeRespectingAspectRatio:(CGSize)frameSize;\n{\n    if (CGSizeEqualToSize(frameSize, CGSizeZero))\n    {\n        overrideInputSize = NO;\n        inputTextureSize = CGSizeZero;\n        forcedMaximumSize = CGSizeZero;\n    }\n    else\n    {\n        overrideInputSize = YES;\n        forcedMaximumSize = frameSize;\n    }\n}\n\n- (CGSize)maximumOutputSize;\n{\n    // I'm temporarily disabling adjustments for smaller output sizes until I figure out how to make this work better\n    return CGSizeZero;\n\n    /*\n    if (CGSizeEqualToSize(cachedMaximumOutputSize, CGSizeZero))\n    {\n        for (id<GPUImageInput> currentTarget in targets)\n        {\n            if ([currentTarget maximumOutputSize].width > cachedMaximumOutputSize.width)\n            {\n                cachedMaximumOutputSize = [currentTarget maximumOutputSize];\n            }\n        }\n    }\n    \n    return cachedMaximumOutputSize;\n     */\n}\n\n- (void)endProcessing \n{\n    if (!isEndProcessing)\n    {\n        isEndProcessing = YES;\n        \n        for (id<GPUImageInput> currentTarget in targets)\n        {\n            [currentTarget endProcessing];\n        }\n    }\n}\n\n- (BOOL)wantsMonochromeInput;\n{\n    return NO;\n}\n\n#pragma mark -\n#pragma mark Accessors\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/BaseClass/GPUImageFilterGroup.h",
    "content": "#import \"GPUImageOutput.h\"\n#import \"GPUImageFilter.h\"\n\n@interface GPUImageFilterGroup : GPUImageOutput <GPUImageInput>\n{\n    NSMutableArray *filters;\n    BOOL isEndProcessing;\n}\n\n@property(readwrite, nonatomic, strong) GPUImageOutput<GPUImageInput> *terminalFilter;\n@property(readwrite, nonatomic, strong) NSArray *initialFilters;\n@property(readwrite, nonatomic, strong) GPUImageOutput<GPUImageInput> *inputFilterToIgnoreForUpdates; \n\n// Filter management\n- (void)addFilter:(GPUImageOutput<GPUImageInput> *)newFilter;\n- (GPUImageOutput<GPUImageInput> *)filterAtIndex:(NSUInteger)filterIndex;\n- (NSUInteger)filterCount;\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/BaseClass/GPUImageFilterGroup.m",
    "content": "#import \"GPUImageFilterGroup.h\"\n#import \"GPUImagePicture.h\"\n\n@implementation GPUImageFilterGroup\n\n@synthesize terminalFilter = _terminalFilter;\n@synthesize initialFilters = _initialFilters;\n@synthesize inputFilterToIgnoreForUpdates = _inputFilterToIgnoreForUpdates;\n\n- (id)init;\n{\n    if (!(self = [super init]))\n    {\n\t\treturn nil;\n    }\n    \n    filters = [[NSMutableArray alloc] init];\n    \n    return self;\n}\n\n#pragma mark -\n#pragma mark Filter management\n\n- (void)addFilter:(GPUImageOutput<GPUImageInput> *)newFilter;\n{\n    [filters addObject:newFilter];\n}\n\n- (GPUImageOutput<GPUImageInput> *)filterAtIndex:(NSUInteger)filterIndex;\n{\n    return [filters objectAtIndex:filterIndex];\n}\n\n- (NSUInteger)filterCount;\n{\n    return [filters count];\n}\n\n#pragma mark -\n#pragma mark Still image processing\n\n- (void)useNextFrameForImageCapture;\n{\n    [self.terminalFilter useNextFrameForImageCapture];\n}\n\n- (CGImageRef)newCGImageFromCurrentlyProcessedOutput;\n{\n    return [self.terminalFilter newCGImageFromCurrentlyProcessedOutput];\n}\n\n#pragma mark -\n#pragma mark GPUImageOutput overrides\n\n- (void)setTargetToIgnoreForUpdates:(id<GPUImageInput>)targetToIgnoreForUpdates;\n{\n    [_terminalFilter setTargetToIgnoreForUpdates:targetToIgnoreForUpdates];\n}\n\n- (void)addTarget:(id<GPUImageInput>)newTarget atTextureLocation:(NSInteger)textureLocation;\n{\n    [_terminalFilter addTarget:newTarget atTextureLocation:textureLocation];\n}\n\n- (void)removeTarget:(id<GPUImageInput>)targetToRemove;\n{\n    [_terminalFilter removeTarget:targetToRemove];\n}\n\n- (void)removeAllTargets;\n{\n    [_terminalFilter removeAllTargets];\n}\n\n- (NSArray *)targets;\n{\n    return [_terminalFilter targets];\n}\n\n- (void)setFrameProcessingCompletionBlock:(void (^)(GPUImageOutput *, CMTime))frameProcessingCompletionBlock;\n{\n    [_terminalFilter setFrameProcessingCompletionBlock:frameProcessingCompletionBlock];\n}\n\n- (void (^)(GPUImageOutput *, CMTime))frameProcessingCompletionBlock;\n{\n    return [_terminalFilter frameProcessingCompletionBlock];\n}\n\n#pragma mark -\n#pragma mark GPUImageInput protocol\n\n- (void)newFrameReadyAtTime:(CMTime)frameTime atIndex:(NSInteger)textureIndex;\n{\n    for (GPUImageOutput<GPUImageInput> *currentFilter in _initialFilters)\n    {\n        if (currentFilter != self.inputFilterToIgnoreForUpdates)\n        {\n            [currentFilter newFrameReadyAtTime:frameTime atIndex:textureIndex];\n        }\n    }\n}\n\n- (void)setInputFramebuffer:(GPUImageFramebuffer *)newInputFramebuffer atIndex:(NSInteger)textureIndex;\n{\n    for (GPUImageOutput<GPUImageInput> *currentFilter in _initialFilters)\n    {\n        [currentFilter setInputFramebuffer:newInputFramebuffer atIndex:textureIndex];\n    }\n}\n\n- (NSInteger)nextAvailableTextureIndex;\n{\n//    if ([_initialFilters count] > 0)\n//    {\n//        return [[_initialFilters objectAtIndex:0] nextAvailableTextureIndex];\n//    }\n    \n    return 0;\n}\n\n- (void)setInputSize:(CGSize)newSize atIndex:(NSInteger)textureIndex;\n{\n    for (GPUImageOutput<GPUImageInput> *currentFilter in _initialFilters)\n    {\n        [currentFilter setInputSize:newSize atIndex:textureIndex];\n    }\n}\n\n- (void)setInputRotation:(GPUImageRotationMode)newInputRotation atIndex:(NSInteger)textureIndex;\n{\n    for (GPUImageOutput<GPUImageInput> *currentFilter in _initialFilters)\n    {\n        [currentFilter setInputRotation:newInputRotation  atIndex:(NSInteger)textureIndex];\n    }\n}\n\n- (void)forceProcessingAtSize:(CGSize)frameSize;\n{\n    for (GPUImageOutput<GPUImageInput> *currentFilter in filters)\n    {\n        [currentFilter forceProcessingAtSize:frameSize];\n    }\n}\n\n- (void)forceProcessingAtSizeRespectingAspectRatio:(CGSize)frameSize;\n{\n    for (GPUImageOutput<GPUImageInput> *currentFilter in filters)\n    {\n        [currentFilter forceProcessingAtSizeRespectingAspectRatio:frameSize];\n    }\n}\n\n- (CGSize)maximumOutputSize;\n{\n    // I'm temporarily disabling adjustments for smaller output sizes until I figure out how to make this work better\n    return CGSizeZero;\n\n    /*\n    if (CGSizeEqualToSize(cachedMaximumOutputSize, CGSizeZero))\n    {\n        for (id<GPUImageInput> currentTarget in _initialFilters)\n        {\n            if ([currentTarget maximumOutputSize].width > cachedMaximumOutputSize.width)\n            {\n                cachedMaximumOutputSize = [currentTarget maximumOutputSize];\n            }\n        }\n    }\n    \n    return cachedMaximumOutputSize;\n     */\n}\n\n- (void)endProcessing;\n{\n    if (!isEndProcessing)\n    {\n        isEndProcessing = YES;\n        \n        for (id<GPUImageInput> currentTarget in _initialFilters)\n        {\n            [currentTarget endProcessing];\n        }\n    }\n}\n\n- (BOOL)wantsMonochromeInput;\n{\n    BOOL allInputsWantMonochromeInput = YES;\n    for (GPUImageOutput<GPUImageInput> *currentFilter in _initialFilters)\n    {\n        allInputsWantMonochromeInput = allInputsWantMonochromeInput && [currentFilter wantsMonochromeInput];\n    }\n    \n    return allInputsWantMonochromeInput;\n}\n\n- (void)setCurrentlyReceivingMonochromeInput:(BOOL)newValue;\n{\n    for (GPUImageOutput<GPUImageInput> *currentFilter in _initialFilters)\n    {\n        [currentFilter setCurrentlyReceivingMonochromeInput:newValue];\n    }\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/BaseClass/GPUImageFilterPipeline.h",
    "content": "#import <Foundation/Foundation.h>\n#import \"GPUImageOutput.h\"\n\n@interface GPUImageFilterPipeline : NSObject\n{\n    NSString *stringValue;\n}\n\n@property (strong) NSMutableArray *filters;\n\n@property (strong) GPUImageOutput *input;\n@property (strong) id <GPUImageInput> output;\n\n- (id) initWithOrderedFilters:(NSArray*) filters input:(GPUImageOutput*)input output:(id <GPUImageInput>)output;\n- (id) initWithConfiguration:(NSDictionary*) configuration input:(GPUImageOutput*)input output:(id <GPUImageInput>)output;\n- (id) initWithConfigurationFile:(NSURL*) configuration input:(GPUImageOutput*)input output:(id <GPUImageInput>)output;\n\n- (void) addFilter:(GPUImageOutput<GPUImageInput> *)filter;\n- (void) addFilter:(GPUImageOutput<GPUImageInput> *)filter atIndex:(NSUInteger)insertIndex;\n- (void) replaceFilterAtIndex:(NSUInteger)index withFilter:(GPUImageOutput<GPUImageInput> *)filter;\n- (void) replaceAllFilters:(NSArray *) newFilters;\n- (void) removeFilter:(GPUImageOutput<GPUImageInput> *)filter;\n- (void) removeFilterAtIndex:(NSUInteger)index;\n- (void) removeAllFilters;\n\n- (UIImage *) currentFilteredFrame;\n- (UIImage *) currentFilteredFrameWithOrientation:(UIImageOrientation)imageOrientation;\n- (CGImageRef) newCGImageFromCurrentFilteredFrame;\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/BaseClass/GPUImageFilterPipeline.m",
    "content": "#import \"GPUImageFilterPipeline.h\"\n\n@interface GPUImageFilterPipeline ()\n\n- (BOOL)_parseConfiguration:(NSDictionary *)configuration;\n\n- (void)_refreshFilters;\n\n@end\n\n@implementation GPUImageFilterPipeline\n\n@synthesize filters = _filters, input = _input, output = _output;\n\n#pragma mark Config file init\n\n- (id)initWithConfiguration:(NSDictionary *)configuration input:(GPUImageOutput *)input output:(id <GPUImageInput>)output {\n    self = [super init];\n    if (self) {\n        self.input = input;\n        self.output = output;\n        if (![self _parseConfiguration:configuration]) {\n            NSLog(@\"Sorry, a parsing error occurred.\");\n            abort();\n        }\n        [self _refreshFilters];\n    }\n    return self;\n}\n\n- (id)initWithConfigurationFile:(NSURL *)configuration input:(GPUImageOutput *)input output:(id <GPUImageInput>)output {\n    return [self initWithConfiguration:[NSDictionary dictionaryWithContentsOfURL:configuration] input:input output:output];\n}\n\n- (BOOL)_parseConfiguration:(NSDictionary *)configuration {\n    NSArray *filters = [configuration objectForKey:@\"Filters\"];\n    if (!filters) {\n        return NO;\n    }\n    \n    NSError *regexError = nil;\n    NSRegularExpression *parsingRegex = [NSRegularExpression regularExpressionWithPattern:@\"(float|CGPoint|NSString)\\\\((.*?)(?:,\\\\s*(.*?))*\\\\)\"\n                                                                                  options:0\n                                                                                    error:&regexError];\n    \n    // It's faster to put them into an array and then pass it to the filters property than it is to call [self addFilter:] every time\n    NSMutableArray *orderedFilters = [NSMutableArray arrayWithCapacity:[filters count]];\n    for (NSDictionary *filter in filters) {\n        NSString *filterName = [filter objectForKey:@\"FilterName\"];\n        Class theClass = NSClassFromString(filterName);\n        GPUImageOutput<GPUImageInput> *genericFilter = [[theClass alloc] init];\n        // Set up the properties\n        NSDictionary *filterAttributes;\n        if ((filterAttributes = [filter objectForKey:@\"Attributes\"])) {\n            for (NSString *propertyKey in filterAttributes) {\n                // Set up the selector\n                SEL theSelector = NSSelectorFromString(propertyKey);\n                NSInvocation *inv = [NSInvocation invocationWithMethodSignature:[theClass instanceMethodSignatureForSelector:theSelector]];\n                [inv setSelector:theSelector];\n                [inv setTarget:genericFilter];\n                \n                // check selector given with parameter\n                if ([propertyKey hasSuffix:@\":\"]) {\n                    \n                    stringValue = nil;\n                    \n                    // Then parse the arguments\n                    NSMutableArray *parsedArray;\n                    if ([[filterAttributes objectForKey:propertyKey] isKindOfClass:[NSArray class]]) {\n                        NSArray *array = [filterAttributes objectForKey:propertyKey];\n                        parsedArray = [NSMutableArray arrayWithCapacity:[array count]];\n                        for (NSString *string in array) {\n                            NSTextCheckingResult *parse = [parsingRegex firstMatchInString:string\n                                                                                   options:0\n                                                                                     range:NSMakeRange(0, [string length])];\n\n                            NSString *modifier = [string substringWithRange:[parse rangeAtIndex:1]];\n                            if ([modifier isEqualToString:@\"float\"]) {\n                                // Float modifier, one argument\n                                CGFloat value = [[string substringWithRange:[parse rangeAtIndex:2]] floatValue];\n                                [parsedArray addObject:[NSNumber numberWithFloat:value]];\n                                [inv setArgument:&value atIndex:2];\n                            } else if ([modifier isEqualToString:@\"CGPoint\"]) {\n                                // CGPoint modifier, two float arguments\n                                CGFloat x = [[string substringWithRange:[parse rangeAtIndex:2]] floatValue];\n                                CGFloat y = [[string substringWithRange:[parse rangeAtIndex:3]] floatValue];\n                                CGPoint value = CGPointMake(x, y);\n                                [parsedArray addObject:[NSValue valueWithCGPoint:value]];\n                            } else if ([modifier isEqualToString:@\"NSString\"]) {\n                                // NSString modifier, one string argument\n                                stringValue = [[string substringWithRange:[parse rangeAtIndex:2]] copy];\n                                [inv setArgument:&stringValue atIndex:2];\n                                \n                            } else {\n                                return NO;\n                            }\n                        }\n                        [inv setArgument:&parsedArray atIndex:2];\n                    } else {\n                        NSString *string = [filterAttributes objectForKey:propertyKey];\n                        NSTextCheckingResult *parse = [parsingRegex firstMatchInString:string\n                                                                               options:0\n                                                                                 range:NSMakeRange(0, [string length])];\n                        \n                        NSString *modifier = [string substringWithRange:[parse rangeAtIndex:1]];\n                        if ([modifier isEqualToString:@\"float\"]) {\n                            // Float modifier, one argument\n                            CGFloat value = [[string substringWithRange:[parse rangeAtIndex:2]] floatValue];\n                            [inv setArgument:&value atIndex:2];\n                        } else if ([modifier isEqualToString:@\"CGPoint\"]) {\n                            // CGPoint modifier, two float arguments\n                            CGFloat x = [[string substringWithRange:[parse rangeAtIndex:2]] floatValue];\n                            CGFloat y = [[string substringWithRange:[parse rangeAtIndex:3]] floatValue];\n                            CGPoint value = CGPointMake(x, y);\n                            [inv setArgument:&value atIndex:2];\n                        } else if ([modifier isEqualToString:@\"NSString\"]) {\n                            // NSString modifier, one string argument\n                            stringValue = [[string substringWithRange:[parse rangeAtIndex:2]] copy];\n                            [inv setArgument:&stringValue atIndex:2];\n                            \n                        } else {\n                            return NO;\n                        }\n                    }\n                }\n                \n\n                [inv invoke];\n            }\n        }\n        [orderedFilters addObject:genericFilter];\n    }\n    self.filters = orderedFilters;\n    \n    return YES;\n}\n\n#pragma mark Regular init\n\n- (id)initWithOrderedFilters:(NSArray *)filters input:(GPUImageOutput *)input output:(id <GPUImageInput>)output {\n    self = [super init];\n    if (self) {\n        self.input = input;\n        self.output = output;\n        self.filters = [NSMutableArray arrayWithArray:filters];\n        [self _refreshFilters];\n    }\n    return self;\n}\n\n- (void)addFilter:(GPUImageOutput<GPUImageInput> *)filter atIndex:(NSUInteger)insertIndex {\n    [self.filters insertObject:filter atIndex:insertIndex];\n    [self _refreshFilters];\n}\n\n- (void)addFilter:(GPUImageOutput<GPUImageInput> *)filter {\n    [self.filters addObject:filter];\n    [self _refreshFilters];\n}\n\n- (void)replaceFilterAtIndex:(NSUInteger)index withFilter:(GPUImageOutput<GPUImageInput> *)filter {\n    [self.filters replaceObjectAtIndex:index withObject:filter];\n    [self _refreshFilters];\n}\n\n- (void) removeFilter:(GPUImageOutput<GPUImageInput> *)filter;\n{\n    [self.filters removeObject:filter];\n    [self _refreshFilters];\n}\n\n- (void)removeFilterAtIndex:(NSUInteger)index {\n    [self.filters removeObjectAtIndex:index];\n    [self _refreshFilters];\n}\n\n- (void)removeAllFilters {\n    [self.filters removeAllObjects];\n    [self _refreshFilters];\n}\n\n- (void)replaceAllFilters:(NSArray *)newFilters {\n    self.filters = [NSMutableArray arrayWithArray:newFilters];\n    [self _refreshFilters];\n}\n\n- (void)_refreshFilters {\n    \n    id prevFilter = self.input;\n    GPUImageOutput<GPUImageInput> *theFilter = nil;\n    \n    for (int i = 0; i < [self.filters count]; i++) {\n        theFilter = [self.filters objectAtIndex:i];\n        [prevFilter removeAllTargets];\n        [prevFilter addTarget:theFilter];\n        prevFilter = theFilter;\n    }\n    \n    [prevFilter removeAllTargets];\n    \n    if (self.output != nil) {\n        [prevFilter addTarget:self.output];\n    }\n}\n\n- (UIImage *)currentFilteredFrame {\n    return [(GPUImageOutput<GPUImageInput> *)[_filters lastObject] imageFromCurrentFramebuffer];\n}\n\n- (UIImage *)currentFilteredFrameWithOrientation:(UIImageOrientation)imageOrientation {\n  return [(GPUImageOutput<GPUImageInput> *)[_filters lastObject] imageFromCurrentFramebufferWithOrientation:imageOrientation];\n}\n\n- (CGImageRef)newCGImageFromCurrentFilteredFrame {\n    return [(GPUImageOutput<GPUImageInput> *)[_filters lastObject] newCGImageFromCurrentlyProcessedOutput];\n}\n\n@end"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/BaseClass/GPUImageFourInputFilter.h",
    "content": "#import \"GPUImageThreeInputFilter.h\"\n\nextern NSString *const kGPUImageFourInputTextureVertexShaderString;\n\n@interface GPUImageFourInputFilter : GPUImageThreeInputFilter\n{\n    GPUImageFramebuffer *fourthInputFramebuffer;\n\n    GLint filterFourthTextureCoordinateAttribute;\n    GLint filterInputTextureUniform4;\n    GPUImageRotationMode inputRotation4;\n    GLuint filterSourceTexture4;\n    CMTime fourthFrameTime;\n    \n    BOOL hasSetThirdTexture, hasReceivedFourthFrame, fourthFrameWasVideo;\n    BOOL fourthFrameCheckDisabled;\n}\n\n- (void)disableFourthFrameCheck;\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/BaseClass/GPUImageFourInputFilter.m",
    "content": "#import \"GPUImageFourInputFilter.h\"\n\n\nNSString *const kGPUImageFourInputTextureVertexShaderString = SHADER_STRING\n(\n attribute vec4 position;\n attribute vec4 inputTextureCoordinate;\n attribute vec4 inputTextureCoordinate2;\n attribute vec4 inputTextureCoordinate3;\n attribute vec4 inputTextureCoordinate4;\n \n varying vec2 textureCoordinate;\n varying vec2 textureCoordinate2;\n varying vec2 textureCoordinate3;\n varying vec2 textureCoordinate4;\n \n void main()\n {\n     gl_Position = position;\n     textureCoordinate = inputTextureCoordinate.xy;\n     textureCoordinate2 = inputTextureCoordinate2.xy;\n     textureCoordinate3 = inputTextureCoordinate3.xy;\n     textureCoordinate4 = inputTextureCoordinate4.xy;\n }\n);\n\n@implementation GPUImageFourInputFilter\n\n#pragma mark -\n#pragma mark Initialization and teardown\n\n- (id)initWithFragmentShaderFromString:(NSString *)fragmentShaderString;\n{\n    if (!(self = [self initWithVertexShaderFromString:kGPUImageFourInputTextureVertexShaderString fragmentShaderFromString:fragmentShaderString]))\n    {\n\t\treturn nil;\n    }\n    \n    return self;\n}\n\n- (id)initWithVertexShaderFromString:(NSString *)vertexShaderString fragmentShaderFromString:(NSString *)fragmentShaderString;\n{\n    if (!(self = [super initWithVertexShaderFromString:vertexShaderString fragmentShaderFromString:fragmentShaderString]))\n    {\n\t\treturn nil;\n    }\n    \n    inputRotation4 = kGPUImageNoRotation;\n    \n    hasSetThirdTexture = NO;\n    \n    hasReceivedFourthFrame = NO;\n    fourthFrameWasVideo = NO;\n    fourthFrameCheckDisabled = NO;\n    \n    fourthFrameTime = kCMTimeInvalid;\n    \n    runSynchronouslyOnVideoProcessingQueue(^{\n        [GPUImageContext useImageProcessingContext];\n        filterFourthTextureCoordinateAttribute = [filterProgram attributeIndex:@\"inputTextureCoordinate4\"];\n        \n        filterInputTextureUniform4 = [filterProgram uniformIndex:@\"inputImageTexture4\"]; // This does assume a name of \"inputImageTexture3\" for the third input texture in the fragment shader\n        glEnableVertexAttribArray(filterFourthTextureCoordinateAttribute);\n    });\n    \n    return self;\n}\n\n- (void)initializeAttributes;\n{\n    [super initializeAttributes];\n    [filterProgram addAttribute:@\"inputTextureCoordinate4\"];\n}\n\n- (void)disableFourthFrameCheck;\n{\n    fourthFrameCheckDisabled = YES;\n}\n\n#pragma mark -\n#pragma mark Rendering\n\n- (void)renderToTextureWithVertices:(const GLfloat *)vertices textureCoordinates:(const GLfloat *)textureCoordinates;\n{\n    if (self.preventRendering)\n    {\n        [firstInputFramebuffer unlock];\n        [secondInputFramebuffer unlock];\n        [thirdInputFramebuffer unlock];\n        [fourthInputFramebuffer unlock];\n        return;\n    }\n    \n    [GPUImageContext setActiveShaderProgram:filterProgram];\n    outputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:[self sizeOfFBO] textureOptions:self.outputTextureOptions onlyTexture:NO];\n    [outputFramebuffer activateFramebuffer];\n    if (usingNextFrameForImageCapture)\n    {\n        [outputFramebuffer lock];\n    }\n\n    [self setUniformsForProgramAtIndex:0];\n    \n    glClearColor(backgroundColorRed, backgroundColorGreen, backgroundColorBlue, backgroundColorAlpha);\n    glClear(GL_COLOR_BUFFER_BIT);\n    \n\tglActiveTexture(GL_TEXTURE2);\n\tglBindTexture(GL_TEXTURE_2D, [firstInputFramebuffer texture]);\n\tglUniform1i(filterInputTextureUniform, 2);\n    \n    glActiveTexture(GL_TEXTURE3);\n    glBindTexture(GL_TEXTURE_2D, [secondInputFramebuffer texture]);\n    glUniform1i(filterInputTextureUniform2, 3);\n\n    glActiveTexture(GL_TEXTURE4);\n    glBindTexture(GL_TEXTURE_2D, [thirdInputFramebuffer texture]);\n    glUniform1i(filterInputTextureUniform3, 4);\n\n    glActiveTexture(GL_TEXTURE5);\n    glBindTexture(GL_TEXTURE_2D, [fourthInputFramebuffer texture]);\n    glUniform1i(filterInputTextureUniform4, 5);\n\n    glVertexAttribPointer(filterPositionAttribute, 2, GL_FLOAT, 0, 0, vertices);\n\tglVertexAttribPointer(filterTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, textureCoordinates);\n    glVertexAttribPointer(filterSecondTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, [[self class] textureCoordinatesForRotation:inputRotation2]);\n    glVertexAttribPointer(filterThirdTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, [[self class] textureCoordinatesForRotation:inputRotation3]);\n    glVertexAttribPointer(filterFourthTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, [[self class] textureCoordinatesForRotation:inputRotation4]);\n    \n    glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);\n    [firstInputFramebuffer unlock];\n    [secondInputFramebuffer unlock];\n    [thirdInputFramebuffer unlock];\n    [fourthInputFramebuffer unlock];\n    if (usingNextFrameForImageCapture)\n    {\n        dispatch_semaphore_signal(imageCaptureSemaphore);\n    }\n}\n\n#pragma mark -\n#pragma mark GPUImageInput\n\n- (NSInteger)nextAvailableTextureIndex;\n{\n    if (hasSetThirdTexture)\n    {\n        return 3;\n    }\n    else if (hasSetSecondTexture)\n    {\n        return 2;\n    }\n    else if (hasSetFirstTexture)\n    {\n        return 1;\n    }\n    else\n    {\n        return 0;\n    }\n}\n\n- (void)setInputFramebuffer:(GPUImageFramebuffer *)newInputFramebuffer atIndex:(NSInteger)textureIndex;\n{\n    if (textureIndex == 0)\n    {\n        firstInputFramebuffer = newInputFramebuffer;\n        hasSetFirstTexture = YES;\n        [firstInputFramebuffer lock];\n    }\n    else if (textureIndex == 1)\n    {\n        secondInputFramebuffer = newInputFramebuffer;\n        hasSetSecondTexture = YES;\n        [secondInputFramebuffer lock];\n    }\n    else if (textureIndex == 2)\n    {\n        thirdInputFramebuffer = newInputFramebuffer;\n        hasSetThirdTexture = YES;\n        [thirdInputFramebuffer lock];\n    }\n    else\n    {\n        fourthInputFramebuffer = newInputFramebuffer;\n        [fourthInputFramebuffer lock];\n    }\n}\n\n- (void)setInputSize:(CGSize)newSize atIndex:(NSInteger)textureIndex;\n{\n    if (textureIndex == 0)\n    {\n        [super setInputSize:newSize atIndex:textureIndex];\n        \n        if (CGSizeEqualToSize(newSize, CGSizeZero))\n        {\n            hasSetFirstTexture = NO;\n        }\n    }\n    else if (textureIndex == 1)\n    {\n        if (CGSizeEqualToSize(newSize, CGSizeZero))\n        {\n            hasSetSecondTexture = NO;\n        }\n    }\n    else if (textureIndex == 2)\n    {\n        if (CGSizeEqualToSize(newSize, CGSizeZero))\n        {\n            hasSetThirdTexture = NO;\n        }\n    }\n}\n\n- (void)setInputRotation:(GPUImageRotationMode)newInputRotation atIndex:(NSInteger)textureIndex;\n{\n    if (textureIndex == 0)\n    {\n        inputRotation = newInputRotation;\n    }\n    else if (textureIndex == 1)\n    {\n        inputRotation2 = newInputRotation;\n    }\n    else if (textureIndex == 2)\n    {\n        inputRotation3 = newInputRotation;\n    }\n    else\n    {\n        inputRotation4 = newInputRotation;\n    }\n}\n\n- (CGSize)rotatedSize:(CGSize)sizeToRotate forIndex:(NSInteger)textureIndex;\n{\n    CGSize rotatedSize = sizeToRotate;\n    \n    GPUImageRotationMode rotationToCheck;\n    if (textureIndex == 0)\n    {\n        rotationToCheck = inputRotation;\n    }\n    else if (textureIndex == 1)\n    {\n        rotationToCheck = inputRotation2;\n    }\n    else if (textureIndex == 2)\n    {\n        rotationToCheck = inputRotation3;\n    }\n    else\n    {\n        rotationToCheck = inputRotation4;\n    }\n    \n    if (GPUImageRotationSwapsWidthAndHeight(rotationToCheck))\n    {\n        rotatedSize.width = sizeToRotate.height;\n        rotatedSize.height = sizeToRotate.width;\n    }\n    \n    return rotatedSize;\n}\n\n- (void)newFrameReadyAtTime:(CMTime)frameTime atIndex:(NSInteger)textureIndex;\n{\n    // You can set up infinite update loops, so this helps to short circuit them\n    if (hasReceivedFirstFrame && hasReceivedSecondFrame && hasReceivedThirdFrame)\n    {\n        return;\n    }\n    \n    BOOL updatedMovieFrameOppositeStillImage = NO;\n    \n    if (textureIndex == 0)\n    {\n        hasReceivedFirstFrame = YES;\n        firstFrameTime = frameTime;\n        if (secondFrameCheckDisabled)\n        {\n            hasReceivedSecondFrame = YES;\n        }\n        if (thirdFrameCheckDisabled)\n        {\n            hasReceivedThirdFrame = YES;\n        }\n        if (fourthFrameCheckDisabled)\n        {\n            hasReceivedThirdFrame = YES;\n        }\n        \n        if (!CMTIME_IS_INDEFINITE(frameTime))\n        {\n            if CMTIME_IS_INDEFINITE(secondFrameTime)\n            {\n                updatedMovieFrameOppositeStillImage = YES;\n            }\n        }\n    }\n    else if (textureIndex == 1)\n    {\n        hasReceivedSecondFrame = YES;\n        secondFrameTime = frameTime;\n        if (firstFrameCheckDisabled)\n        {\n            hasReceivedFirstFrame = YES;\n        }\n        if (thirdFrameCheckDisabled)\n        {\n            hasReceivedThirdFrame = YES;\n        }\n        if (fourthFrameCheckDisabled)\n        {\n            hasReceivedFourthFrame = YES;\n        }\n\n        if (!CMTIME_IS_INDEFINITE(frameTime))\n        {\n            if CMTIME_IS_INDEFINITE(firstFrameTime)\n            {\n                updatedMovieFrameOppositeStillImage = YES;\n            }\n        }\n    }\n    else if (textureIndex == 2)\n    {\n        hasReceivedThirdFrame = YES;\n        thirdFrameTime = frameTime;\n        if (firstFrameCheckDisabled)\n        {\n            hasReceivedFirstFrame = YES;\n        }\n        if (secondFrameCheckDisabled)\n        {\n            hasReceivedSecondFrame = YES;\n        }\n        if (fourthFrameCheckDisabled)\n        {\n            hasReceivedFourthFrame = YES;\n        }\n\n        if (!CMTIME_IS_INDEFINITE(frameTime))\n        {\n            if CMTIME_IS_INDEFINITE(firstFrameTime)\n            {\n                updatedMovieFrameOppositeStillImage = YES;\n            }\n        }\n    }\n    else\n    {\n        hasReceivedFourthFrame = YES;\n        fourthFrameTime = frameTime;\n        if (firstFrameCheckDisabled)\n        {\n            hasReceivedFirstFrame = YES;\n        }\n        if (secondFrameCheckDisabled)\n        {\n            hasReceivedSecondFrame = YES;\n        }\n        if (thirdFrameCheckDisabled)\n        {\n            hasReceivedThirdFrame = YES;\n        }\n        \n        if (!CMTIME_IS_INDEFINITE(frameTime))\n        {\n            if CMTIME_IS_INDEFINITE(firstFrameTime)\n            {\n                updatedMovieFrameOppositeStillImage = YES;\n            }\n        }\n    }\n    \n    // || (hasReceivedFirstFrame && secondFrameCheckDisabled) || (hasReceivedSecondFrame && firstFrameCheckDisabled)\n    if ((hasReceivedFirstFrame && hasReceivedSecondFrame && hasReceivedThirdFrame && hasReceivedFourthFrame) || updatedMovieFrameOppositeStillImage)\n    {\n        static const GLfloat imageVertices[] = {\n            -1.0f, -1.0f,\n            1.0f, -1.0f,\n            -1.0f,  1.0f,\n            1.0f,  1.0f,\n        };\n        \n        [self renderToTextureWithVertices:imageVertices textureCoordinates:[[self class] textureCoordinatesForRotation:inputRotation]];\n        \n        [self informTargetsAboutNewFrameAtTime:frameTime];\n\n        hasReceivedFirstFrame = NO;\n        hasReceivedSecondFrame = NO;\n        hasReceivedThirdFrame = NO;\n        hasReceivedFourthFrame = NO;\n    }\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/BaseClass/GPUImageFramebuffer.h",
    "content": "#import <Foundation/Foundation.h>\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\n#import <OpenGLES/EAGL.h>\n#import <OpenGLES/ES2/gl.h>\n#import <OpenGLES/ES2/glext.h>\n#else\n#import <OpenGL/OpenGL.h>\n#import <OpenGL/gl.h>\n#endif\n\n#import <QuartzCore/QuartzCore.h>\n#import <CoreMedia/CoreMedia.h>\n\n\ntypedef struct GPUTextureOptions {\n    GLenum minFilter;\n    GLenum magFilter;\n    GLenum wrapS;\n    GLenum wrapT;\n    GLenum internalFormat;\n    GLenum format;\n    GLenum type;\n} GPUTextureOptions;\n\n@interface GPUImageFramebuffer : NSObject\n\n@property(readonly) CGSize size;\n@property(readonly) GPUTextureOptions textureOptions;\n@property(readonly) GLuint texture;\n@property(readonly) BOOL missingFramebuffer;\n\n// Initialization and teardown\n- (id)initWithSize:(CGSize)framebufferSize;\n- (id)initWithSize:(CGSize)framebufferSize textureOptions:(GPUTextureOptions)fboTextureOptions onlyTexture:(BOOL)onlyGenerateTexture;\n- (id)initWithSize:(CGSize)framebufferSize overriddenTexture:(GLuint)inputTexture;\n\n// Usage\n- (void)activateFramebuffer;\n\n// Reference counting\n- (void)lock;\n- (void)unlock;\n- (void)clearAllLocks;\n- (void)disableReferenceCounting;\n- (void)enableReferenceCounting;\n\n// Image capture\n- (CGImageRef)newCGImageFromFramebufferContents;\n- (void)restoreRenderTarget;\n\n// Raw data bytes\n- (void)lockForReading;\n- (void)unlockAfterReading;\n- (NSUInteger)bytesPerRow;\n- (GLubyte *)byteBuffer;\n- (CVPixelBufferRef)pixelBuffer;\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/BaseClass/GPUImageFramebuffer.m",
    "content": "#import \"GPUImageFramebuffer.h\"\n#import \"GPUImageOutput.h\"\n\n@interface GPUImageFramebuffer()\n{\n    GLuint framebuffer;\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\n    CVPixelBufferRef renderTarget;\n    CVOpenGLESTextureRef renderTexture;\n    NSUInteger readLockCount;\n#else\n#endif\n    NSUInteger framebufferReferenceCount;\n    BOOL referenceCountingDisabled;\n}\n\n- (void)generateFramebuffer;\n- (void)generateTexture;\n- (void)destroyFramebuffer;\n\n@end\n\nvoid dataProviderReleaseCallback (void *info, const void *data, size_t size);\nvoid dataProviderUnlockCallback (void *info, const void *data, size_t size);\n\n@implementation GPUImageFramebuffer\n\n@synthesize size = _size;\n@synthesize textureOptions = _textureOptions;\n@synthesize texture = _texture;\n@synthesize missingFramebuffer = _missingFramebuffer;\n\n#pragma mark -\n#pragma mark Initialization and teardown\n\n- (id)initWithSize:(CGSize)framebufferSize textureOptions:(GPUTextureOptions)fboTextureOptions onlyTexture:(BOOL)onlyGenerateTexture;\n{\n    if (!(self = [super init]))\n    {\n\t\treturn nil;\n    }\n    \n    _textureOptions = fboTextureOptions;\n    _size = framebufferSize;\n    framebufferReferenceCount = 0;\n    referenceCountingDisabled = NO;\n    _missingFramebuffer = onlyGenerateTexture;\n\n    if (_missingFramebuffer)\n    {\n        runSynchronouslyOnVideoProcessingQueue(^{\n            [GPUImageContext useImageProcessingContext];\n            [self generateTexture];\n            framebuffer = 0;\n        });\n    }\n    else\n    {\n        [self generateFramebuffer];\n    }\n    return self;\n}\n\n- (id)initWithSize:(CGSize)framebufferSize overriddenTexture:(GLuint)inputTexture;\n{\n    if (!(self = [super init]))\n    {\n\t\treturn nil;\n    }\n\n    GPUTextureOptions defaultTextureOptions;\n    defaultTextureOptions.minFilter = GL_LINEAR;\n    defaultTextureOptions.magFilter = GL_LINEAR;\n    defaultTextureOptions.wrapS = GL_CLAMP_TO_EDGE;\n    defaultTextureOptions.wrapT = GL_CLAMP_TO_EDGE;\n    defaultTextureOptions.internalFormat = GL_RGBA;\n    defaultTextureOptions.format = GL_BGRA;\n    defaultTextureOptions.type = GL_UNSIGNED_BYTE;\n\n    _textureOptions = defaultTextureOptions;\n    _size = framebufferSize;\n    framebufferReferenceCount = 0;\n    referenceCountingDisabled = YES;\n    \n    _texture = inputTexture;\n    \n    return self;\n}\n\n- (id)initWithSize:(CGSize)framebufferSize;\n{\n    GPUTextureOptions defaultTextureOptions;\n    defaultTextureOptions.minFilter = GL_LINEAR;\n    defaultTextureOptions.magFilter = GL_LINEAR;\n    defaultTextureOptions.wrapS = GL_CLAMP_TO_EDGE;\n    defaultTextureOptions.wrapT = GL_CLAMP_TO_EDGE;\n    defaultTextureOptions.internalFormat = GL_RGBA;\n    defaultTextureOptions.format = GL_BGRA;\n    defaultTextureOptions.type = GL_UNSIGNED_BYTE;\n\n    if (!(self = [self initWithSize:framebufferSize textureOptions:defaultTextureOptions onlyTexture:NO]))\n    {\n\t\treturn nil;\n    }\n\n    return self;\n}\n\n- (void)dealloc\n{\n    [self destroyFramebuffer];\n}\n\n#pragma mark -\n#pragma mark Internal\n\n- (void)generateTexture;\n{\n    glActiveTexture(GL_TEXTURE1);\n    glGenTextures(1, &_texture);\n    glBindTexture(GL_TEXTURE_2D, _texture);\n    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, _textureOptions.minFilter);\n    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, _textureOptions.magFilter);\n    // This is necessary for non-power-of-two textures\n    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, _textureOptions.wrapS);\n    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, _textureOptions.wrapT);\n    \n    // TODO: Handle mipmaps\n}\n\n- (void)generateFramebuffer;\n{\n    runSynchronouslyOnVideoProcessingQueue(^{\n        [GPUImageContext useImageProcessingContext];\n    \n        glGenFramebuffers(1, &framebuffer);\n        glBindFramebuffer(GL_FRAMEBUFFER, framebuffer);\n        \n        // By default, all framebuffers on iOS 5.0+ devices are backed by texture caches, using one shared cache\n        if ([GPUImageContext supportsFastTextureUpload])\n        {\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\n            CVOpenGLESTextureCacheRef coreVideoTextureCache = [[GPUImageContext sharedImageProcessingContext] coreVideoTextureCache];\n            // Code originally sourced from http://allmybrain.com/2011/12/08/rendering-to-a-texture-with-ios-5-texture-cache-api/\n            \n            CFDictionaryRef empty; // empty value for attr value.\n            CFMutableDictionaryRef attrs;\n            empty = CFDictionaryCreate(kCFAllocatorDefault, NULL, NULL, 0, &kCFTypeDictionaryKeyCallBacks, &kCFTypeDictionaryValueCallBacks); // our empty IOSurface properties dictionary\n            attrs = CFDictionaryCreateMutable(kCFAllocatorDefault, 1, &kCFTypeDictionaryKeyCallBacks, &kCFTypeDictionaryValueCallBacks);\n            CFDictionarySetValue(attrs, kCVPixelBufferIOSurfacePropertiesKey, empty);\n            \n            CVReturn err = CVPixelBufferCreate(kCFAllocatorDefault, (int)_size.width, (int)_size.height, kCVPixelFormatType_32BGRA, attrs, &renderTarget);\n            if (err)\n            {\n                NSLog(@\"FBO size: %f, %f\", _size.width, _size.height);\n                NSAssert(NO, @\"Error at CVPixelBufferCreate %d\", err);\n            }\n            \n            err = CVOpenGLESTextureCacheCreateTextureFromImage (kCFAllocatorDefault, coreVideoTextureCache, renderTarget,\n                                                                NULL, // texture attributes\n                                                                GL_TEXTURE_2D,\n                                                                _textureOptions.internalFormat, // opengl format\n                                                                (int)_size.width,\n                                                                (int)_size.height,\n                                                                _textureOptions.format, // native iOS format\n                                                                _textureOptions.type,\n                                                                0,\n                                                                &renderTexture);\n            if (err)\n            {\n                NSAssert(NO, @\"Error at CVOpenGLESTextureCacheCreateTextureFromImage %d\", err);\n            }\n            \n            CFRelease(attrs);\n            CFRelease(empty);\n            \n            glBindTexture(CVOpenGLESTextureGetTarget(renderTexture), CVOpenGLESTextureGetName(renderTexture));\n            _texture = CVOpenGLESTextureGetName(renderTexture);\n            glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, _textureOptions.wrapS);\n            glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, _textureOptions.wrapT);\n            \n            glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, CVOpenGLESTextureGetName(renderTexture), 0);\n#endif\n        }\n        else\n        {\n            [self generateTexture];\n\n            glBindTexture(GL_TEXTURE_2D, _texture);\n            \n            glTexImage2D(GL_TEXTURE_2D, 0, _textureOptions.internalFormat, (int)_size.width, (int)_size.height, 0, _textureOptions.format, _textureOptions.type, 0);\n            glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, _texture, 0);\n        }\n        \n        #ifndef NS_BLOCK_ASSERTIONS\n        GLenum status = glCheckFramebufferStatus(GL_FRAMEBUFFER);\n        NSAssert(status == GL_FRAMEBUFFER_COMPLETE, @\"Incomplete filter FBO: %d\", status);\n        #endif\n        \n        glBindTexture(GL_TEXTURE_2D, 0);\n    });\n}\n\n- (void)destroyFramebuffer;\n{\n    runSynchronouslyOnVideoProcessingQueue(^{\n        [GPUImageContext useImageProcessingContext];\n        \n        if (framebuffer)\n        {\n            glDeleteFramebuffers(1, &framebuffer);\n            framebuffer = 0;\n        }\n\n        \n        if ([GPUImageContext supportsFastTextureUpload] && (!_missingFramebuffer))\n        {\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\n            if (renderTarget)\n            {\n                CFRelease(renderTarget);\n                renderTarget = NULL;\n            }\n            \n            if (renderTexture)\n            {\n                CFRelease(renderTexture);\n                renderTexture = NULL;\n            }\n#endif\n        }\n        else\n        {\n            glDeleteTextures(1, &_texture);\n        }\n\n    });\n}\n\n#pragma mark -\n#pragma mark Usage\n\n- (void)activateFramebuffer;\n{\n    glBindFramebuffer(GL_FRAMEBUFFER, framebuffer);\n    glViewport(0, 0, (int)_size.width, (int)_size.height);\n}\n\n#pragma mark -\n#pragma mark Reference counting\n\n- (void)lock;\n{\n    if (referenceCountingDisabled)\n    {\n        return;\n    }\n    \n    framebufferReferenceCount++;\n}\n\n- (void)unlock;\n{\n    if (referenceCountingDisabled)\n    {\n        return;\n    }\n    \n    if (framebufferReferenceCount == 0) {\n        NSLog(@\"这里修改了源码----\");\n        return;\n    }\n    \n    NSAssert(framebufferReferenceCount > 0, @\"Tried to overrelease a framebuffer, did you forget to call -useNextFrameForImageCapture before using -imageFromCurrentFramebuffer?\");\n    framebufferReferenceCount--;\n    if (framebufferReferenceCount < 1)\n    {\n        [[GPUImageContext sharedFramebufferCache] returnFramebufferToCache:self];\n    }\n}\n\n- (void)clearAllLocks;\n{\n    framebufferReferenceCount = 0;\n}\n\n- (void)disableReferenceCounting;\n{\n    referenceCountingDisabled = YES;\n}\n\n- (void)enableReferenceCounting;\n{\n    referenceCountingDisabled = NO;\n}\n\n#pragma mark -\n#pragma mark Image capture\n\nvoid dataProviderReleaseCallback (void *info, const void *data, size_t size)\n{\n    free((void *)data);\n}\n\nvoid dataProviderUnlockCallback (void *info, const void *data, size_t size)\n{\n    GPUImageFramebuffer *framebuffer = (__bridge_transfer GPUImageFramebuffer*)info;\n    \n    [framebuffer restoreRenderTarget];\n    [framebuffer unlock];\n    [[GPUImageContext sharedFramebufferCache] removeFramebufferFromActiveImageCaptureList:framebuffer];\n}\n\n- (CGImageRef)newCGImageFromFramebufferContents;\n{\n    // a CGImage can only be created from a 'normal' color texture\n    NSAssert(self.textureOptions.internalFormat == GL_RGBA, @\"For conversion to a CGImage the output texture format for this filter must be GL_RGBA.\");\n    NSAssert(self.textureOptions.type == GL_UNSIGNED_BYTE, @\"For conversion to a CGImage the type of the output texture of this filter must be GL_UNSIGNED_BYTE.\");\n    \n    __block CGImageRef cgImageFromBytes;\n    \n    runSynchronouslyOnVideoProcessingQueue(^{\n        [GPUImageContext useImageProcessingContext];\n        \n        NSUInteger totalBytesForImage = (int)_size.width * (int)_size.height * 4;\n        // It appears that the width of a texture must be padded out to be a multiple of 8 (32 bytes) if reading from it using a texture cache\n        \n        GLubyte *rawImagePixels;\n        \n        CGDataProviderRef dataProvider = NULL;\n        if ([GPUImageContext supportsFastTextureUpload])\n        {\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\n            NSUInteger paddedWidthOfImage = CVPixelBufferGetBytesPerRow(renderTarget) / 4.0;\n            NSUInteger paddedBytesForImage = paddedWidthOfImage * (int)_size.height * 4;\n            \n            glFinish();\n            CFRetain(renderTarget); // I need to retain the pixel buffer here and release in the data source callback to prevent its bytes from being prematurely deallocated during a photo write operation\n            [self lockForReading];\n            rawImagePixels = (GLubyte *)CVPixelBufferGetBaseAddress(renderTarget);\n            dataProvider = CGDataProviderCreateWithData((__bridge_retained void*)self, rawImagePixels, paddedBytesForImage, dataProviderUnlockCallback);\n            [[GPUImageContext sharedFramebufferCache] addFramebufferToActiveImageCaptureList:self]; // In case the framebuffer is swapped out on the filter, need to have a strong reference to it somewhere for it to hang on while the image is in existence\n#else\n#endif\n        }\n        else\n        {\n            [self activateFramebuffer];\n            rawImagePixels = (GLubyte *)malloc(totalBytesForImage);\n            glReadPixels(0, 0, (int)_size.width, (int)_size.height, GL_RGBA, GL_UNSIGNED_BYTE, rawImagePixels);\n            dataProvider = CGDataProviderCreateWithData(NULL, rawImagePixels, totalBytesForImage, dataProviderReleaseCallback);\n            [self unlock]; // Don't need to keep this around anymore\n        }\n        \n        CGColorSpaceRef defaultRGBColorSpace = CGColorSpaceCreateDeviceRGB();\n        \n        if ([GPUImageContext supportsFastTextureUpload])\n        {\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\n            cgImageFromBytes = CGImageCreate((int)_size.width, (int)_size.height, 8, 32, CVPixelBufferGetBytesPerRow(renderTarget), defaultRGBColorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst, dataProvider, NULL, NO, kCGRenderingIntentDefault);\n#else\n#endif\n        }\n        else\n        {\n            cgImageFromBytes = CGImageCreate((int)_size.width, (int)_size.height, 8, 32, 4 * (int)_size.width, defaultRGBColorSpace, kCGBitmapByteOrderDefault | kCGImageAlphaLast, dataProvider, NULL, NO, kCGRenderingIntentDefault);\n        }\n        \n        // Capture image with current device orientation\n        CGDataProviderRelease(dataProvider);\n        CGColorSpaceRelease(defaultRGBColorSpace);\n        \n    });\n    \n    return cgImageFromBytes;\n}\n\n- (void)restoreRenderTarget;\n{\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\n    [self unlockAfterReading];\n    CFRelease(renderTarget);\n#else\n#endif\n}\n\n#pragma mark -\n#pragma mark Raw data bytes\n\n- (void)lockForReading\n{\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\n    if ([GPUImageContext supportsFastTextureUpload])\n    {\n        if (readLockCount == 0)\n        {\n            CVPixelBufferLockBaseAddress(renderTarget, 0);\n        }\n        readLockCount++;\n    }\n#endif\n}\n\n- (void)unlockAfterReading\n{\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\n    if ([GPUImageContext supportsFastTextureUpload])\n    {\n        NSAssert(readLockCount > 0, @\"Unbalanced call to -[GPUImageFramebuffer unlockAfterReading]\");\n        readLockCount--;\n        if (readLockCount == 0)\n        {\n            CVPixelBufferUnlockBaseAddress(renderTarget, 0);\n        }\n    }\n#endif\n}\n\n- (NSUInteger)bytesPerRow;\n{\n    if ([GPUImageContext supportsFastTextureUpload])\n    {\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\n        return CVPixelBufferGetBytesPerRow(renderTarget);\n#else\n        return _size.width * 4; // TODO: do more with this on the non-texture-cache side\n#endif\n    }\n    else\n    {\n        return _size.width * 4;\n    }\n}\n\n- (GLubyte *)byteBuffer;\n{\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\n    [self lockForReading];\n    GLubyte * bufferBytes = CVPixelBufferGetBaseAddress(renderTarget);\n    [self unlockAfterReading];\n    return bufferBytes;\n#else\n    return NULL; // TODO: do more with this on the non-texture-cache side\n#endif\n}\n\n- (CVPixelBufferRef )pixelBuffer;\n{\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\n    return renderTarget;\n#else\n    return NULL; // TODO: do more with this on the non-texture-cache side\n#endif\n}\n\n- (GLuint)texture;\n{\n//    NSLog(@\"Accessing texture: %d from FB: %@\", _texture, self);\n    return _texture;\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/BaseClass/GPUImageFramebufferCache.h",
    "content": "#import <Foundation/Foundation.h>\n#import <QuartzCore/QuartzCore.h>\n#import \"GPUImageFramebuffer.h\"\n\n@interface GPUImageFramebufferCache : NSObject\n\n// Framebuffer management\n- (GPUImageFramebuffer *)fetchFramebufferForSize:(CGSize)framebufferSize textureOptions:(GPUTextureOptions)textureOptions onlyTexture:(BOOL)onlyTexture;\n- (GPUImageFramebuffer *)fetchFramebufferForSize:(CGSize)framebufferSize onlyTexture:(BOOL)onlyTexture;\n- (void)returnFramebufferToCache:(GPUImageFramebuffer *)framebuffer;\n- (void)purgeAllUnassignedFramebuffers;\n- (void)addFramebufferToActiveImageCaptureList:(GPUImageFramebuffer *)framebuffer;\n- (void)removeFramebufferFromActiveImageCaptureList:(GPUImageFramebuffer *)framebuffer;\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/BaseClass/GPUImageFramebufferCache.m",
    "content": "#import \"GPUImageFramebufferCache.h\"\n#import \"GPUImageContext.h\"\n#import \"GPUImageOutput.h\"\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\n#import <UIKit/UIKit.h>\n#else\n#endif\n\n@interface GPUImageFramebufferCache()\n{\n//    NSCache *framebufferCache;\n    NSMutableDictionary *framebufferCache;\n    NSMutableDictionary *framebufferTypeCounts;\n    NSMutableArray *activeImageCaptureList; // Where framebuffers that may be lost by a filter, but which are still needed for a UIImage, etc., are stored\n    id memoryWarningObserver;\n\n    dispatch_queue_t framebufferCacheQueue;\n}\n\n- (NSString *)hashForSize:(CGSize)size textureOptions:(GPUTextureOptions)textureOptions onlyTexture:(BOOL)onlyTexture;\n\n@end\n\n\n@implementation GPUImageFramebufferCache\n\n#pragma mark -\n#pragma mark Initialization and teardown\n\n- (id)init;\n{\n    if (!(self = [super init]))\n    {\n\t\treturn nil;\n    }\n    \n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\n    __unsafe_unretained __typeof__ (self) weakSelf = self;\n    memoryWarningObserver = [[NSNotificationCenter defaultCenter] addObserverForName:UIApplicationDidReceiveMemoryWarningNotification object:nil queue:nil usingBlock:^(NSNotification *note) {\n        __typeof__ (self) strongSelf = weakSelf;\n        if (strongSelf) {\n            [strongSelf purgeAllUnassignedFramebuffers];\n        }\n    }];\n#else\n#endif\n\n//    framebufferCache = [[NSCache alloc] init];\n    framebufferCache = [[NSMutableDictionary alloc] init];\n    framebufferTypeCounts = [[NSMutableDictionary alloc] init];\n    activeImageCaptureList = [[NSMutableArray alloc] init];\n    framebufferCacheQueue = dispatch_queue_create(\"com.sunsetlakesoftware.GPUImage.framebufferCacheQueue\", GPUImageDefaultQueueAttribute());\n    \n    return self;\n}\n\n- (void)dealloc;\n{\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\n    [[NSNotificationCenter defaultCenter] removeObserver:self];\n#else\n#endif\n}\n\n#pragma mark -\n#pragma mark Framebuffer management\n\n- (NSString *)hashForSize:(CGSize)size textureOptions:(GPUTextureOptions)textureOptions onlyTexture:(BOOL)onlyTexture;\n{\n    if (onlyTexture)\n    {\n        return [NSString stringWithFormat:@\"%.1fx%.1f-%d:%d:%d:%d:%d:%d:%d-NOFB\", size.width, size.height, textureOptions.minFilter, textureOptions.magFilter, textureOptions.wrapS, textureOptions.wrapT, textureOptions.internalFormat, textureOptions.format, textureOptions.type];\n    }\n    else\n    {\n        return [NSString stringWithFormat:@\"%.1fx%.1f-%d:%d:%d:%d:%d:%d:%d\", size.width, size.height, textureOptions.minFilter, textureOptions.magFilter, textureOptions.wrapS, textureOptions.wrapT, textureOptions.internalFormat, textureOptions.format, textureOptions.type];\n    }\n}\n\n- (GPUImageFramebuffer *)fetchFramebufferForSize:(CGSize)framebufferSize textureOptions:(GPUTextureOptions)textureOptions onlyTexture:(BOOL)onlyTexture;\n{\n    __block GPUImageFramebuffer *framebufferFromCache = nil;\n//    dispatch_sync(framebufferCacheQueue, ^{\n    runSynchronouslyOnVideoProcessingQueue(^{\n        NSString *lookupHash = [self hashForSize:framebufferSize textureOptions:textureOptions onlyTexture:onlyTexture];\n        NSNumber *numberOfMatchingTexturesInCache = [framebufferTypeCounts objectForKey:lookupHash];\n        NSInteger numberOfMatchingTextures = [numberOfMatchingTexturesInCache integerValue];\n        \n        if ([numberOfMatchingTexturesInCache integerValue] < 1)\n        {\n            // Nothing in the cache, create a new framebuffer to use\n            framebufferFromCache = [[GPUImageFramebuffer alloc] initWithSize:framebufferSize textureOptions:textureOptions onlyTexture:onlyTexture];\n        }\n        else\n        {\n            // Something found, pull the old framebuffer and decrement the count\n            NSInteger currentTextureID = (numberOfMatchingTextures - 1);\n            while ((framebufferFromCache == nil) && (currentTextureID >= 0))\n            {\n                NSString *textureHash = [NSString stringWithFormat:@\"%@-%ld\", lookupHash, (long)currentTextureID];\n                framebufferFromCache = [framebufferCache objectForKey:textureHash];\n                // Test the values in the cache first, to see if they got invalidated behind our back\n                if (framebufferFromCache != nil)\n                {\n                    // Withdraw this from the cache while it's in use\n                    [framebufferCache removeObjectForKey:textureHash];\n                }\n                currentTextureID--;\n            }\n            \n            currentTextureID++;\n            \n            [framebufferTypeCounts setObject:[NSNumber numberWithInteger:currentTextureID] forKey:lookupHash];\n            \n            if (framebufferFromCache == nil)\n            {\n                framebufferFromCache = [[GPUImageFramebuffer alloc] initWithSize:framebufferSize textureOptions:textureOptions onlyTexture:onlyTexture];\n            }\n        }\n    });\n\n    [framebufferFromCache lock];\n    return framebufferFromCache;\n}\n\n- (GPUImageFramebuffer *)fetchFramebufferForSize:(CGSize)framebufferSize onlyTexture:(BOOL)onlyTexture;\n{\n    GPUTextureOptions defaultTextureOptions;\n    defaultTextureOptions.minFilter = GL_LINEAR;\n    defaultTextureOptions.magFilter = GL_LINEAR;\n    defaultTextureOptions.wrapS = GL_CLAMP_TO_EDGE;\n    defaultTextureOptions.wrapT = GL_CLAMP_TO_EDGE;\n    defaultTextureOptions.internalFormat = GL_RGBA;\n    defaultTextureOptions.format = GL_BGRA;\n    defaultTextureOptions.type = GL_UNSIGNED_BYTE;\n    \n    return [self fetchFramebufferForSize:framebufferSize textureOptions:defaultTextureOptions onlyTexture:onlyTexture];\n}\n\n- (void)returnFramebufferToCache:(GPUImageFramebuffer *)framebuffer;\n{\n    [framebuffer clearAllLocks];\n    \n//    dispatch_async(framebufferCacheQueue, ^{\n    runAsynchronouslyOnVideoProcessingQueue(^{\n        CGSize framebufferSize = framebuffer.size;\n        GPUTextureOptions framebufferTextureOptions = framebuffer.textureOptions;\n        NSString *lookupHash = [self hashForSize:framebufferSize textureOptions:framebufferTextureOptions onlyTexture:framebuffer.missingFramebuffer];\n        NSNumber *numberOfMatchingTexturesInCache = [framebufferTypeCounts objectForKey:lookupHash];\n        NSInteger numberOfMatchingTextures = [numberOfMatchingTexturesInCache integerValue];\n        \n        NSString *textureHash = [NSString stringWithFormat:@\"%@-%ld\", lookupHash, (long)numberOfMatchingTextures];\n        \n//        [framebufferCache setObject:framebuffer forKey:textureHash cost:round(framebufferSize.width * framebufferSize.height * 4.0)];\n        [framebufferCache setObject:framebuffer forKey:textureHash];\n        [framebufferTypeCounts setObject:[NSNumber numberWithInteger:(numberOfMatchingTextures + 1)] forKey:lookupHash];\n    });\n}\n\n- (void)purgeAllUnassignedFramebuffers;\n{\n    runAsynchronouslyOnVideoProcessingQueue(^{\n//    dispatch_async(framebufferCacheQueue, ^{\n        [framebufferCache removeAllObjects];\n        [framebufferTypeCounts removeAllObjects];\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\n        CVOpenGLESTextureCacheFlush([[GPUImageContext sharedImageProcessingContext] coreVideoTextureCache], 0);\n#else\n#endif\n    });\n}\n\n- (void)addFramebufferToActiveImageCaptureList:(GPUImageFramebuffer *)framebuffer;\n{\n    runAsynchronouslyOnVideoProcessingQueue(^{\n//    dispatch_async(framebufferCacheQueue, ^{\n        [activeImageCaptureList addObject:framebuffer];\n    });\n}\n\n- (void)removeFramebufferFromActiveImageCaptureList:(GPUImageFramebuffer *)framebuffer;\n{\n    runAsynchronouslyOnVideoProcessingQueue(^{\n//  dispatch_async(framebufferCacheQueue, ^{\n        [activeImageCaptureList removeObject:framebuffer];\n    });\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/BaseClass/GPUImageMovie.h",
    "content": "#import <Foundation/Foundation.h>\n#import <AVFoundation/AVFoundation.h>\n#import \"GPUImageContext.h\"\n#import \"GPUImageOutput.h\"\n\n/** Protocol for getting Movie played callback.\n */\n@protocol GPUImageMovieDelegate <NSObject>\n@optional\n- (void)didCompletePlayingMovie;\n\n- (void)willDisplayAtTime:(CMTime)time;\n@end\n\n/** Source object for filtering movies\n */\n@interface GPUImageMovie : GPUImageOutput\n\n@property (readwrite, retain) AVAsset *asset;\n@property (readwrite, retain) AVPlayerItem *playerItem;\n@property(readwrite, retain) NSURL *url;\n\n/** This enables the benchmarking mode, which logs out instantaneous and average frame times to the console\n */\n@property(readwrite, nonatomic) BOOL runBenchmark;\n\n/** This determines whether to play back a movie as fast as the frames can be processed, or if the original speed of the movie should be respected. Defaults to NO.\n */\n@property(readwrite, nonatomic) BOOL playAtActualSpeed;\n\n/** This determines whether the video should repeat (loop) at the end and restart from the beginning. Defaults to NO.\n */\n@property(readwrite, nonatomic) BOOL shouldRepeat;\n\n/** This specifies the progress of the process on a scale from 0 to 1.0. A value of 0 means the process has not yet begun, A value of 1.0 means the conversaion is complete.\n    This property is not key-value observable.\n */\n@property(readonly, nonatomic) float progress;\n\n/** This is used to send the delete Movie did complete playing alert\n */\n@property (readwrite, nonatomic, assign) id <GPUImageMovieDelegate>delegate;\n\n@property (readonly, nonatomic) AVAssetReader *assetReader;\n@property (readonly, nonatomic) BOOL audioEncodingIsFinished;\n@property (readonly, nonatomic) BOOL videoEncodingIsFinished;\n\n@property (readwrite, nonatomic) BOOL isPaused;\n/// @name Initialization and teardown\n- (id)initWithAsset:(AVAsset *)asset;\n- (id)initWithPlayerItem:(AVPlayerItem *)playerItem;\n- (id)initWithURL:(NSURL *)url;\n- (void)yuvConversionSetup;\n\n/// @name Movie processing\n- (void)enableSynchronizedEncodingUsingMovieWriter:(GPUImageMovieWriter *)movieWriter;\n- (BOOL)readNextVideoFrameFromOutput:(AVAssetReaderOutput *)readerVideoTrackOutput;\n- (BOOL)readNextAudioSampleFromOutput:(AVAssetReaderOutput *)readerAudioTrackOutput;\n- (void)startProcessing;\n- (void)endProcessing;\n- (void)cancelProcessing;\n- (void)processMovieFrame:(CMSampleBufferRef)movieSampleBuffer; \n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/BaseClass/GPUImageMovie.m",
    "content": "#import \"GPUImageMovie.h\"\n#import \"GPUImageMovieWriter.h\"\n#import \"GPUImageFilter.h\"\n#import \"GPUImageVideoCamera.h\"\n\n@interface GPUImageMovie () <AVPlayerItemOutputPullDelegate>\n{\n    BOOL audioEncodingIsFinished, videoEncodingIsFinished;\n    GPUImageMovieWriter *synchronizedMovieWriter;\n    AVAssetReader *reader;\n    AVPlayerItemVideoOutput *playerItemOutput;\n    CADisplayLink *displayLink;\n    CMTime previousFrameTime, processingFrameTime;\n    CFAbsoluteTime previousActualFrameTime;\n    BOOL keepLooping;\n\n    GLuint luminanceTexture, chrominanceTexture;\n\n    GLProgram *yuvConversionProgram;\n    GLint yuvConversionPositionAttribute, yuvConversionTextureCoordinateAttribute;\n    GLint yuvConversionLuminanceTextureUniform, yuvConversionChrominanceTextureUniform;\n    GLint yuvConversionMatrixUniform;\n    const GLfloat *_preferredConversion;\n    \n    BOOL isFullYUVRange;\n\n    int imageBufferWidth, imageBufferHeight;\n//解决播放movie有延时的bug\n    CGFloat delayoOffsetTime;\n}\n\n- (void)processAsset;\n\n@end\n\n@implementation GPUImageMovie\n\n@synthesize url = _url;\n@synthesize asset = _asset;\n@synthesize runBenchmark = _runBenchmark;\n@synthesize playAtActualSpeed = _playAtActualSpeed;\n@synthesize delegate = _delegate;\n@synthesize shouldRepeat = _shouldRepeat;\n\n#pragma mark -\n#pragma mark Initialization and teardown\n\n- (id)initWithURL:(NSURL *)url;\n{\n    if (!(self = [super init])) \n    {\n        return nil;\n    }\n\n    [self yuvConversionSetup];\n\n    self.url = url;\n    self.asset = nil;\n\n    return self;\n}\n\n- (id)initWithAsset:(AVAsset *)asset;\n{\n    if (!(self = [super init])) \n    {\n      return nil;\n    }\n    \n    [self yuvConversionSetup];\n\n    self.url = nil;\n    self.asset = asset;\n\n    return self;\n}\n\n- (id)initWithPlayerItem:(AVPlayerItem *)playerItem;\n{\n    if (!(self = [super init]))\n    {\n        return nil;\n    }\n\n    [self yuvConversionSetup];\n\n    self.url = nil;\n    self.asset = nil;\n    self.playerItem = playerItem;\n\n    return self;\n}\n\n- (void)yuvConversionSetup;\n{\n    if ([GPUImageContext supportsFastTextureUpload])\n    {\n        runSynchronouslyOnVideoProcessingQueue(^{\n            [GPUImageContext useImageProcessingContext];\n\n            _preferredConversion = kColorConversion709;\n            isFullYUVRange       = YES;\n            yuvConversionProgram = [[GPUImageContext sharedImageProcessingContext] programForVertexShaderString:kGPUImageVertexShaderString fragmentShaderString:kGPUImageYUVFullRangeConversionForLAFragmentShaderString];\n\n            if (!yuvConversionProgram.initialized)\n            {\n                [yuvConversionProgram addAttribute:@\"position\"];\n                [yuvConversionProgram addAttribute:@\"inputTextureCoordinate\"];\n\n                if (![yuvConversionProgram link])\n                {\n                    NSString *progLog = [yuvConversionProgram programLog];\n                    NSLog(@\"Program link log: %@\", progLog);\n                    NSString *fragLog = [yuvConversionProgram fragmentShaderLog];\n                    NSLog(@\"Fragment shader compile log: %@\", fragLog);\n                    NSString *vertLog = [yuvConversionProgram vertexShaderLog];\n                    NSLog(@\"Vertex shader compile log: %@\", vertLog);\n                    yuvConversionProgram = nil;\n                    NSAssert(NO, @\"Filter shader link failed\");\n                }\n            }\n\n            yuvConversionPositionAttribute = [yuvConversionProgram attributeIndex:@\"position\"];\n            yuvConversionTextureCoordinateAttribute = [yuvConversionProgram attributeIndex:@\"inputTextureCoordinate\"];\n            yuvConversionLuminanceTextureUniform = [yuvConversionProgram uniformIndex:@\"luminanceTexture\"];\n            yuvConversionChrominanceTextureUniform = [yuvConversionProgram uniformIndex:@\"chrominanceTexture\"];\n            yuvConversionMatrixUniform = [yuvConversionProgram uniformIndex:@\"colorConversionMatrix\"];\n\n            [GPUImageContext setActiveShaderProgram:yuvConversionProgram];\n\n            glEnableVertexAttribArray(yuvConversionPositionAttribute);\n            glEnableVertexAttribArray(yuvConversionTextureCoordinateAttribute);\n        });\n    }\n}\n\n- (void)dealloc\n{\n    // Moved into endProcessing\n    //if (self.playerItem && (displayLink != nil))\n    //{\n    //    [displayLink invalidate]; // remove from all run loops\n    //    displayLink = nil;\n    //}\n}\n\n#pragma mark -\n#pragma mark Movie processing\n\n- (void)enableSynchronizedEncodingUsingMovieWriter:(GPUImageMovieWriter *)movieWriter;\n{\n    synchronizedMovieWriter = movieWriter;\n    movieWriter.encodingLiveVideo = NO;\n}\n\n- (void)startProcessing\n{\n    if( self.playerItem ) {\n        [self processPlayerItem];\n        return;\n    }\n    if(self.url == nil)\n    {\n      [self processAsset];\n      return;\n    }\n    \n    if (_shouldRepeat) keepLooping = YES;\n    \n    previousFrameTime = kCMTimeZero;\n    previousActualFrameTime = CFAbsoluteTimeGetCurrent();\n  \n    NSDictionary *inputOptions = [NSDictionary dictionaryWithObject:[NSNumber numberWithBool:YES] forKey:AVURLAssetPreferPreciseDurationAndTimingKey];\n    AVURLAsset *inputAsset = [[AVURLAsset alloc] initWithURL:self.url options:inputOptions];\n    \n    GPUImageMovie __block *blockSelf = self;\n    \n    [inputAsset loadValuesAsynchronouslyForKeys:[NSArray arrayWithObject:@\"tracks\"] completionHandler: ^{\n        dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{\n            NSError *error = nil;\n            AVKeyValueStatus tracksStatus = [inputAsset statusOfValueForKey:@\"tracks\" error:&error];\n            if (tracksStatus != AVKeyValueStatusLoaded)\n            {\n                return;\n            }\n            blockSelf.asset = inputAsset;\n            [blockSelf processAsset];\n            blockSelf = nil;\n        });\n    }];\n}\n\n- (AVAssetReader*)createAssetReader\n{\n    NSError *error = nil;\n    AVAssetReader *assetReader = [AVAssetReader assetReaderWithAsset:self.asset error:&error];\n\n    NSMutableDictionary *outputSettings = [NSMutableDictionary dictionary];\n    if ([GPUImageContext supportsFastTextureUpload]) {\n        [outputSettings setObject:@(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) forKey:(id)kCVPixelBufferPixelFormatTypeKey];\n        isFullYUVRange = YES;\n    }\n    else {\n        [outputSettings setObject:@(kCVPixelFormatType_32BGRA) forKey:(id)kCVPixelBufferPixelFormatTypeKey];\n        isFullYUVRange = NO;\n    }\n    \n    // Maybe set alwaysCopiesSampleData to NO on iOS 5.0 for faster video decoding\n    AVAssetReaderTrackOutput *readerVideoTrackOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:[[self.asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] outputSettings:outputSettings];\n    readerVideoTrackOutput.alwaysCopiesSampleData = NO;\n    [assetReader addOutput:readerVideoTrackOutput];\n\n    NSArray *audioTracks = [self.asset tracksWithMediaType:AVMediaTypeAudio];\n    BOOL shouldRecordAudioTrack = (([audioTracks count] > 0) && (self.audioEncodingTarget != nil) );\n    AVAssetReaderTrackOutput *readerAudioTrackOutput = nil;\n\n    if (shouldRecordAudioTrack)\n    {\n        [self.audioEncodingTarget setShouldInvalidateAudioSampleWhenDone:YES];\n        \n        // This might need to be extended to handle movies with more than one audio track\n        AVAssetTrack* audioTrack = [audioTracks objectAtIndex:0];\n        readerAudioTrackOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:audioTrack outputSettings:nil];\n        readerAudioTrackOutput.alwaysCopiesSampleData = NO;\n        [assetReader addOutput:readerAudioTrackOutput];\n    }\n\n    return assetReader;\n}\n\n- (void)processAsset\n{\n    reader = [self createAssetReader];\n\n    AVAssetReaderOutput *readerVideoTrackOutput = nil;\n    AVAssetReaderOutput *readerAudioTrackOutput = nil;\n\n    audioEncodingIsFinished = YES;\n    for( AVAssetReaderOutput *output in reader.outputs ) {\n        if( [output.mediaType isEqualToString:AVMediaTypeAudio] ) {\n            audioEncodingIsFinished = NO;\n            readerAudioTrackOutput = output;\n        }\n        else if( [output.mediaType isEqualToString:AVMediaTypeVideo] ) {\n            readerVideoTrackOutput = output;\n        }\n    }\n\n    if ([reader startReading] == NO) \n    {\n            NSLog(@\"Error reading from file at URL: %@\", self.url);\n        return;\n    }\n\n    __unsafe_unretained GPUImageMovie *weakSelf = self;\n\n    if (synchronizedMovieWriter != nil)\n    {\n        [synchronizedMovieWriter setVideoInputReadyCallback:^{\n            return [weakSelf readNextVideoFrameFromOutput:readerVideoTrackOutput];\n        }];\n\n        [synchronizedMovieWriter setAudioInputReadyCallback:^{\n            return [weakSelf readNextAudioSampleFromOutput:readerAudioTrackOutput];\n        }];\n\n        [synchronizedMovieWriter enableSynchronizationCallbacks];\n    }\n    else\n    {\n        while (reader.status == AVAssetReaderStatusReading && (!_shouldRepeat || keepLooping))\n        {\n                [weakSelf readNextVideoFrameFromOutput:readerVideoTrackOutput];\n\n            if ( (readerAudioTrackOutput) && (!audioEncodingIsFinished) )\n            {\n                    [weakSelf readNextAudioSampleFromOutput:readerAudioTrackOutput];\n            }\n\n        }\n\n        if (reader.status == AVAssetReaderStatusCompleted) {\n                \n            [reader cancelReading];\n\n            if (keepLooping) {\n                reader = nil;\n                dispatch_async(dispatch_get_main_queue(), ^{\n                    [self startProcessing];\n                });\n            } else {\n                [weakSelf endProcessing];\n            }\n\n        }\n    }\n}\n\n- (void)processPlayerItem\n{\n    runSynchronouslyOnVideoProcessingQueue(^{\n        displayLink = [CADisplayLink displayLinkWithTarget:self selector:@selector(displayLinkCallback:)];\n        [displayLink addToRunLoop:[NSRunLoop currentRunLoop] forMode:NSRunLoopCommonModes];\n        [displayLink setPaused:YES];\n\n        dispatch_queue_t videoProcessingQueue = [GPUImageContext sharedContextQueue];\n        NSMutableDictionary *pixBuffAttributes = [NSMutableDictionary dictionary];\n        if ([GPUImageContext supportsFastTextureUpload]) {\n            [pixBuffAttributes setObject:@(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) forKey:(id)kCVPixelBufferPixelFormatTypeKey];\n        }\n        else {\n            [pixBuffAttributes setObject:@(kCVPixelFormatType_32BGRA) forKey:(id)kCVPixelBufferPixelFormatTypeKey];\n        }\n        playerItemOutput = [[AVPlayerItemVideoOutput alloc] initWithPixelBufferAttributes:pixBuffAttributes];\n        [playerItemOutput setDelegate:self queue:videoProcessingQueue];\n\n        [_playerItem addOutput:playerItemOutput];\n        [playerItemOutput requestNotificationOfMediaDataChangeWithAdvanceInterval:0.1];\n    });\n}\n\n- (void)outputMediaDataWillChange:(AVPlayerItemOutput *)sender\n{\n\t// Restart display link.\n\t[displayLink setPaused:NO];\n}\n\n- (void)displayLinkCallback:(CADisplayLink *)sender\n{\n\t/*\n\t The callback gets called once every Vsync.\n\t Using the display link's timestamp and duration we can compute the next time the screen will be refreshed, and copy the pixel buffer for that time\n\t This pixel buffer can then be processed and later rendered on screen.\n\t */\n\t// Calculate the nextVsync time which is when the screen will be refreshed next.\n\tCFTimeInterval nextVSync = ([sender timestamp] + [sender duration]);\n\n\tCMTime outputItemTime = [playerItemOutput itemTimeForHostTime:nextVSync];\n\n\tif ([playerItemOutput hasNewPixelBufferForItemTime:outputItemTime]) {\n        __unsafe_unretained GPUImageMovie *weakSelf = self;\n\t\tCVPixelBufferRef pixelBuffer = [playerItemOutput copyPixelBufferForItemTime:outputItemTime itemTimeForDisplay:NULL];\n        if( pixelBuffer )\n            runSynchronouslyOnVideoProcessingQueue(^{\n                [weakSelf processMovieFrame:pixelBuffer withSampleTime:outputItemTime];\n                CFRelease(pixelBuffer);\n            });\n\t}\n}\n\n//解决播放movie有延时的bug，有修改源码\n- (BOOL)readNextVideoFrameFromOutput:(AVAssetReaderOutput *)readerVideoTrackOutput;\n{\n    while (self.isPaused) {\n        usleep(100000);\n    }\n    if (reader.status == AVAssetReaderStatusReading && ! videoEncodingIsFinished)\n    {\n        CMSampleBufferRef sampleBufferRef = [readerVideoTrackOutput copyNextSampleBuffer];\n        if (sampleBufferRef) \n        {\n            //NSLog(@\"read a video frame: %@\", CFBridgingRelease(CMTimeCopyDescription(kCFAllocatorDefault, CMSampleBufferGetOutputPresentationTimeStamp(sampleBufferRef))));\n            if (_playAtActualSpeed)\n            {\n                // Do this outside of the video processing queue to not slow that down while waiting\n                CMTime currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(sampleBufferRef);\n                CMTime differenceFromLastFrame = CMTimeSubtract(currentSampleTime, previousFrameTime);\n\n                if ( differenceFromLastFrame.value > 0 ) {\n                    CFAbsoluteTime currentActualTime = CFAbsoluteTimeGetCurrent();\n                    \n                    CGFloat frameTimeDifference = CMTimeGetSeconds(differenceFromLastFrame);\n                    CGFloat actualTimeDifference = currentActualTime - previousActualFrameTime;\n                    if (frameTimeDifference > actualTimeDifference ){\n                        CGFloat difTime = (frameTimeDifference - actualTimeDifference) - delayoOffsetTime;\n                        if(difTime > 0){\n                            double time = 1000000.0 * difTime;\n                            usleep(time);\n                        }\n                        delayoOffsetTime =  CFAbsoluteTimeGetCurrent() - currentActualTime - difTime;\n                        if (delayoOffsetTime < 0) {\n                            delayoOffsetTime = 0;\n                        }\n                    }\n                    previousFrameTime = currentSampleTime;\n                    previousActualFrameTime = CFAbsoluteTimeGetCurrent();\n                }\n            }\n            __unsafe_unretained GPUImageMovie *weakSelf = self;\n            runSynchronouslyOnVideoProcessingQueue(^{\n                [weakSelf processMovieFrame:sampleBufferRef];\n                CMSampleBufferInvalidate(sampleBufferRef);\n                CFRelease(sampleBufferRef);\n            });\n\n            return YES;\n        }\n        else\n        {\n            if (!keepLooping) {\n                videoEncodingIsFinished = YES;\n                if( videoEncodingIsFinished && audioEncodingIsFinished )\n                    [self endProcessing];\n            }\n        }\n    }\n    else if (synchronizedMovieWriter != nil)\n    {\n        if (reader.status == AVAssetReaderStatusCompleted)\n        {\n            [self endProcessing];\n        }\n    }\n    return NO;\n}\n\n- (BOOL)readNextAudioSampleFromOutput:(AVAssetReaderOutput *)readerAudioTrackOutput;\n{\n    if (reader.status == AVAssetReaderStatusReading && ! audioEncodingIsFinished)\n    {\n        CMSampleBufferRef audioSampleBufferRef = [readerAudioTrackOutput copyNextSampleBuffer];\n        if (audioSampleBufferRef)\n        {\n            //NSLog(@\"read an audio frame: %@\", CFBridgingRelease(CMTimeCopyDescription(kCFAllocatorDefault, CMSampleBufferGetOutputPresentationTimeStamp(audioSampleBufferRef))));\n            [self.audioEncodingTarget processAudioBuffer:audioSampleBufferRef];\n            CFRelease(audioSampleBufferRef);\n            return YES;\n        }\n        else\n        {\n            if (!keepLooping) {\n                audioEncodingIsFinished = YES;\n                if( videoEncodingIsFinished && audioEncodingIsFinished )\n                    [self endProcessing];\n            }\n        }\n    }\n    else if (synchronizedMovieWriter != nil)\n    {\n        if (reader.status == AVAssetReaderStatusCompleted || reader.status == AVAssetReaderStatusFailed ||\n            reader.status == AVAssetReaderStatusCancelled)\n        {\n            [self endProcessing];\n        }\n    }\n    return NO;\n}\n\n- (void)processMovieFrame:(CMSampleBufferRef)movieSampleBuffer; \n{\n//    CMTimeGetSeconds\n//    CMTimeSubtract\n    \n    CMTime currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(movieSampleBuffer);\n    CVImageBufferRef movieFrame = CMSampleBufferGetImageBuffer(movieSampleBuffer);\n\n    processingFrameTime = currentSampleTime;\n    [self processMovieFrame:movieFrame withSampleTime:currentSampleTime];\n}\n\n- (float)progress\n{\n    if ( AVAssetReaderStatusReading == reader.status )\n    {\n        float current = processingFrameTime.value * 1.0f / processingFrameTime.timescale;\n        float duration = self.asset.duration.value * 1.0f / self.asset.duration.timescale;\n        return current / duration;\n    }\n    else if ( AVAssetReaderStatusCompleted == reader.status )\n    {\n        return 1.f;\n    }\n    else\n    {\n        return 0.f;\n    }\n}\n\n- (void)processMovieFrame:(CVPixelBufferRef)movieFrame withSampleTime:(CMTime)currentSampleTime\n{\n    \n    if (self.delegate && [self.delegate respondsToSelector:@selector(willDisplayAtTime:)])\n    {\n        [self.delegate willDisplayAtTime:currentSampleTime];\n    }\n\n    \n    int bufferHeight = (int) CVPixelBufferGetHeight(movieFrame);\n    int bufferWidth = (int) CVPixelBufferGetWidth(movieFrame);\n\n    CFTypeRef colorAttachments = CVBufferGetAttachment(movieFrame, kCVImageBufferYCbCrMatrixKey, NULL);\n    if (colorAttachments != NULL)\n    {\n        if(CFStringCompare(colorAttachments, kCVImageBufferYCbCrMatrix_ITU_R_601_4, 0) == kCFCompareEqualTo)\n        {\n            if (isFullYUVRange)\n            {\n                _preferredConversion = kColorConversion601FullRange;\n            }\n            else\n            {\n                _preferredConversion = kColorConversion601;\n            }\n        }\n        else\n        {\n            _preferredConversion = kColorConversion709;\n        }\n    }\n    else\n    {\n        if (isFullYUVRange)\n        {\n            _preferredConversion = kColorConversion601FullRange;\n        }\n        else\n        {\n            _preferredConversion = kColorConversion601;\n        }\n\n    }\n    \n    CFAbsoluteTime startTime = CFAbsoluteTimeGetCurrent();\n\n    // Fix issue 1580\n    [GPUImageContext useImageProcessingContext];\n    \n    if ([GPUImageContext supportsFastTextureUpload])\n    {\n        CVOpenGLESTextureRef luminanceTextureRef = NULL;\n        CVOpenGLESTextureRef chrominanceTextureRef = NULL;\n\n        //        if (captureAsYUV && [GPUImageContext deviceSupportsRedTextures])\n        if (CVPixelBufferGetPlaneCount(movieFrame) > 0) // Check for YUV planar inputs to do RGB conversion\n        {\n\n            if ( (imageBufferWidth != bufferWidth) && (imageBufferHeight != bufferHeight) )\n            {\n                imageBufferWidth = bufferWidth;\n                imageBufferHeight = bufferHeight;\n            }\n\n            CVReturn err;\n            // Y-plane\n            glActiveTexture(GL_TEXTURE4);\n            if ([GPUImageContext deviceSupportsRedTextures])\n            {\n                err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, [[GPUImageContext sharedImageProcessingContext] coreVideoTextureCache], movieFrame, NULL, GL_TEXTURE_2D, GL_LUMINANCE, bufferWidth, bufferHeight, GL_LUMINANCE, GL_UNSIGNED_BYTE, 0, &luminanceTextureRef);\n            }\n            else\n            {\n                err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, [[GPUImageContext sharedImageProcessingContext] coreVideoTextureCache], movieFrame, NULL, GL_TEXTURE_2D, GL_LUMINANCE, bufferWidth, bufferHeight, GL_LUMINANCE, GL_UNSIGNED_BYTE, 0, &luminanceTextureRef);\n            }\n            if (err)\n            {\n                NSLog(@\"Error at CVOpenGLESTextureCacheCreateTextureFromImage %d\", err);\n            }\n\n            luminanceTexture = CVOpenGLESTextureGetName(luminanceTextureRef);\n            glBindTexture(GL_TEXTURE_2D, luminanceTexture);\n            glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);\n            glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);\n\n            // UV-plane\n            glActiveTexture(GL_TEXTURE5);\n            if ([GPUImageContext deviceSupportsRedTextures])\n            {\n                err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, [[GPUImageContext sharedImageProcessingContext] coreVideoTextureCache], movieFrame, NULL, GL_TEXTURE_2D, GL_LUMINANCE_ALPHA, bufferWidth/2, bufferHeight/2, GL_LUMINANCE_ALPHA, GL_UNSIGNED_BYTE, 1, &chrominanceTextureRef);\n            }\n            else\n            {\n                err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, [[GPUImageContext sharedImageProcessingContext] coreVideoTextureCache], movieFrame, NULL, GL_TEXTURE_2D, GL_LUMINANCE_ALPHA, bufferWidth/2, bufferHeight/2, GL_LUMINANCE_ALPHA, GL_UNSIGNED_BYTE, 1, &chrominanceTextureRef);\n            }\n            if (err)\n            {\n                NSLog(@\"Error at CVOpenGLESTextureCacheCreateTextureFromImage %d\", err);\n            }\n\n            chrominanceTexture = CVOpenGLESTextureGetName(chrominanceTextureRef);\n            glBindTexture(GL_TEXTURE_2D, chrominanceTexture);\n            glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);\n            glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);\n\n//            if (!allTargetsWantMonochromeData)\n//            {\n                [self convertYUVToRGBOutput];\n//            }\n\n            for (id<GPUImageInput> currentTarget in targets)\n            {\n                NSInteger indexOfObject = [targets indexOfObject:currentTarget];\n                NSInteger targetTextureIndex = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];\n                [currentTarget setInputSize:CGSizeMake(bufferWidth, bufferHeight) atIndex:targetTextureIndex];\n                [currentTarget setInputFramebuffer:outputFramebuffer atIndex:targetTextureIndex];\n            }\n            \n            [outputFramebuffer unlock];\n\n            for (id<GPUImageInput> currentTarget in targets)\n            {\n                NSInteger indexOfObject = [targets indexOfObject:currentTarget];\n                NSInteger targetTextureIndex = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];\n                [currentTarget newFrameReadyAtTime:currentSampleTime atIndex:targetTextureIndex];\n            }\n\n            CVPixelBufferUnlockBaseAddress(movieFrame, 0);\n            CFRelease(luminanceTextureRef);\n            CFRelease(chrominanceTextureRef);\n        }\n        else\n        {\n            // TODO: Mesh this with the new framebuffer cache\n//            CVPixelBufferLockBaseAddress(movieFrame, 0);\n//\n//            CVReturn err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, coreVideoTextureCache, movieFrame, NULL, GL_TEXTURE_2D, GL_RGBA, bufferWidth, bufferHeight, GL_BGRA, GL_UNSIGNED_BYTE, 0, &texture);\n//\n//            if (!texture || err) {\n//                NSLog(@\"Movie CVOpenGLESTextureCacheCreateTextureFromImage failed (error: %d)\", err);\n//                NSAssert(NO, @\"Camera failure\");\n//                return;\n//            }\n//\n//            outputTexture = CVOpenGLESTextureGetName(texture);\n//            //        glBindTexture(CVOpenGLESTextureGetTarget(texture), outputTexture);\n//            glBindTexture(GL_TEXTURE_2D, outputTexture);\n//            glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);\n//            glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);\n//            glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);\n//            glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);\n//\n//            for (id<GPUImageInput> currentTarget in targets)\n//            {\n//                NSInteger indexOfObject = [targets indexOfObject:currentTarget];\n//                NSInteger targetTextureIndex = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];\n//\n//                [currentTarget setInputSize:CGSizeMake(bufferWidth, bufferHeight) atIndex:targetTextureIndex];\n//                [currentTarget setInputTexture:outputTexture atIndex:targetTextureIndex];\n//\n//                [currentTarget newFrameReadyAtTime:currentSampleTime atIndex:targetTextureIndex];\n//            }\n//\n//            CVPixelBufferUnlockBaseAddress(movieFrame, 0);\n//            CVOpenGLESTextureCacheFlush(coreVideoTextureCache, 0);\n//            CFRelease(texture);\n//            \n//            outputTexture = 0;\n        }\n    }\n    else\n    {\n        // Upload to texture\n        CVPixelBufferLockBaseAddress(movieFrame, 0);\n        \n        outputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:CGSizeMake(bufferWidth, bufferHeight) textureOptions:self.outputTextureOptions onlyTexture:YES];\n\n        glBindTexture(GL_TEXTURE_2D, [outputFramebuffer texture]);\n        // Using BGRA extension to pull in video frame data directly\n        glTexImage2D(GL_TEXTURE_2D,\n                     0,\n                     self.outputTextureOptions.internalFormat,\n                     bufferWidth,\n                     bufferHeight,\n                     0,\n                     self.outputTextureOptions.format,\n                     self.outputTextureOptions.type,\n                     CVPixelBufferGetBaseAddress(movieFrame));\n        \n        for (id<GPUImageInput> currentTarget in targets)\n        {\n            NSInteger indexOfObject = [targets indexOfObject:currentTarget];\n            NSInteger targetTextureIndex = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];\n            [currentTarget setInputSize:CGSizeMake(bufferWidth, bufferHeight) atIndex:targetTextureIndex];\n            [currentTarget setInputFramebuffer:outputFramebuffer atIndex:targetTextureIndex];\n        }\n        \n        [outputFramebuffer unlock];\n        \n        for (id<GPUImageInput> currentTarget in targets)\n        {\n            NSInteger indexOfObject = [targets indexOfObject:currentTarget];\n            NSInteger targetTextureIndex = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];\n            [currentTarget newFrameReadyAtTime:currentSampleTime atIndex:targetTextureIndex];\n        }\n        CVPixelBufferUnlockBaseAddress(movieFrame, 0);\n    }\n    \n    if (_runBenchmark)\n    {\n        CFAbsoluteTime currentFrameTime = (CFAbsoluteTimeGetCurrent() - startTime);\n        NSLog(@\"Current frame time : %f ms\", 1000.0 * currentFrameTime);\n    }\n}\n\n- (void)endProcessing;\n{\n    keepLooping = NO;\n    [displayLink setPaused:YES];\n\n    for (id<GPUImageInput> currentTarget in targets)\n    {\n        [currentTarget endProcessing];\n    }\n    \n    if (synchronizedMovieWriter != nil)\n    {\n        [synchronizedMovieWriter setVideoInputReadyCallback:^{return NO;}];\n        [synchronizedMovieWriter setAudioInputReadyCallback:^{return NO;}];\n    }\n    \n    if (self.playerItem && (displayLink != nil))\n    {\n        [displayLink invalidate]; // remove from all run loops\n        displayLink = nil;\n    }\n\n    if ([self.delegate respondsToSelector:@selector(didCompletePlayingMovie)]) {\n        [self.delegate didCompletePlayingMovie];\n    }\n    self.delegate = nil;\n}\n\n- (void)cancelProcessing\n{\n    if (reader) {\n        [reader cancelReading];\n    }\n    [self endProcessing];\n}\n\n- (void)convertYUVToRGBOutput;\n{\n    [GPUImageContext setActiveShaderProgram:yuvConversionProgram];\n    outputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:CGSizeMake(imageBufferWidth, imageBufferHeight) onlyTexture:NO];\n    [outputFramebuffer activateFramebuffer];\n\n    glClearColor(0.0f, 0.0f, 0.0f, 1.0f);\n    glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);\n\n    static const GLfloat squareVertices[] = {\n        -1.0f, -1.0f,\n        1.0f, -1.0f,\n        -1.0f,  1.0f,\n        1.0f,  1.0f,\n    };\n\n    static const GLfloat textureCoordinates[] = {\n        0.0f, 0.0f,\n        1.0f, 0.0f,\n        0.0f, 1.0f,\n        1.0f, 1.0f,\n    };\n\n\tglActiveTexture(GL_TEXTURE4);\n\tglBindTexture(GL_TEXTURE_2D, luminanceTexture);\n\tglUniform1i(yuvConversionLuminanceTextureUniform, 4);\n\n    glActiveTexture(GL_TEXTURE5);\n\tglBindTexture(GL_TEXTURE_2D, chrominanceTexture);\n\tglUniform1i(yuvConversionChrominanceTextureUniform, 5);\n\n    glUniformMatrix3fv(yuvConversionMatrixUniform, 1, GL_FALSE, _preferredConversion);\n\n    glVertexAttribPointer(yuvConversionPositionAttribute, 2, GL_FLOAT, 0, 0, squareVertices);\n\tglVertexAttribPointer(yuvConversionTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, textureCoordinates);\n\n    glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);\n}\n\n- (AVAssetReader*)assetReader {\n    return reader;\n}\n\n- (BOOL)audioEncodingIsFinished {\n    return audioEncodingIsFinished;\n}\n\n- (BOOL)videoEncodingIsFinished {\n    return videoEncodingIsFinished;\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/BaseClass/GPUImageMovieComposition.h",
    "content": "//\n//  GPUImageMovieComposition.h\n//  Givit\n//\n//  Created by Sean Meiners on 2013/01/25.\n//\n//\n\n#import \"GPUImageMovie.h\"\n\n@interface GPUImageMovieComposition : GPUImageMovie\n\n@property (readwrite, retain) AVComposition *compositon;\n@property (readwrite, retain) AVVideoComposition *videoComposition;\n@property (readwrite, retain) AVAudioMix *audioMix;\n\n- (id)initWithComposition:(AVComposition*)compositon\n      andVideoComposition:(AVVideoComposition*)videoComposition\n              andAudioMix:(AVAudioMix*)audioMix;\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/BaseClass/GPUImageMovieComposition.m",
    "content": "//\n//  GPUImageMovieComposition.m\n//  Givit\n//\n//  Created by Sean Meiners on 2013/01/25.\n//\n//\n\n#import \"GPUImageMovieComposition.h\"\n#import \"GPUImageMovieWriter.h\"\n\n@implementation GPUImageMovieComposition\n\n@synthesize compositon = _compositon;\n@synthesize videoComposition = _videoComposition;\n@synthesize audioMix = _audioMix;\n\n- (id)initWithComposition:(AVComposition*)compositon\n      andVideoComposition:(AVVideoComposition*)videoComposition\n              andAudioMix:(AVAudioMix*)audioMix {\n    if (!(self = [super init]))\n    {\n        return nil;\n    }\n\n    [self yuvConversionSetup];\n\n    self.compositon = compositon;\n    self.videoComposition = videoComposition;\n    self.audioMix = audioMix;\n\n    return self;\n}\n\n- (AVAssetReader*)createAssetReader\n {\n    //NSLog(@\"creating reader from composition: %@, video: %@, audio: %@ with duration: %@\", _compositon, _videoComposition, _audioMix, CFBridgingRelease(CMTimeCopyDescription(kCFAllocatorDefault, _compositon.duration)));\n\n    NSError *error = nil;\n    AVAssetReader *assetReader = [AVAssetReader assetReaderWithAsset:self.compositon error:&error];\n\n    NSDictionary *outputSettings = @{(id)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)};\n    AVAssetReaderVideoCompositionOutput *readerVideoOutput = [AVAssetReaderVideoCompositionOutput assetReaderVideoCompositionOutputWithVideoTracks:[_compositon tracksWithMediaType:AVMediaTypeVideo]\n                                                                                                                                     videoSettings:outputSettings];\n#if ! TARGET_IPHONE_SIMULATOR\n    if( [_videoComposition isKindOfClass:[AVMutableVideoComposition class]] )\n        [(AVMutableVideoComposition*)_videoComposition setRenderScale:1.0];\n#endif\n    readerVideoOutput.videoComposition = self.videoComposition;\n    readerVideoOutput.alwaysCopiesSampleData = NO;\n    [assetReader addOutput:readerVideoOutput];\n\n    NSArray *audioTracks = [_compositon tracksWithMediaType:AVMediaTypeAudio];\n    BOOL shouldRecordAudioTrack = (([audioTracks count] > 0) && (self.audioEncodingTarget != nil) );\n    AVAssetReaderAudioMixOutput *readerAudioOutput = nil;\n\n    if (shouldRecordAudioTrack)\n    {\n        [self.audioEncodingTarget setShouldInvalidateAudioSampleWhenDone:YES];\n        \n        readerAudioOutput = [AVAssetReaderAudioMixOutput assetReaderAudioMixOutputWithAudioTracks:audioTracks audioSettings:nil];\n        readerAudioOutput.audioMix = self.audioMix;\n        readerAudioOutput.alwaysCopiesSampleData = NO;\n        [assetReader addOutput:readerAudioOutput];\n    }\n\n    return assetReader;\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/BaseClass/GPUImageOutput.h",
    "content": "#import \"GPUImageContext.h\"\n#import \"GPUImageFramebuffer.h\"\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\n#import <UIKit/UIKit.h>\n#else\n// For now, just redefine this on the Mac\ntypedef NS_ENUM(NSInteger, UIImageOrientation) {\n    UIImageOrientationUp,            // default orientation\n    UIImageOrientationDown,          // 180 deg rotation\n    UIImageOrientationLeft,          // 90 deg CCW\n    UIImageOrientationRight,         // 90 deg CW\n    UIImageOrientationUpMirrored,    // as above but image mirrored along other axis. horizontal flip\n    UIImageOrientationDownMirrored,  // horizontal flip\n    UIImageOrientationLeftMirrored,  // vertical flip\n    UIImageOrientationRightMirrored, // vertical flip\n};\n#endif\n\ndispatch_queue_attr_t GPUImageDefaultQueueAttribute(void);\nvoid runOnMainQueueWithoutDeadlocking(void (^block)(void));\nvoid runSynchronouslyOnVideoProcessingQueue(void (^block)(void));\nvoid runAsynchronouslyOnVideoProcessingQueue(void (^block)(void));\nvoid runSynchronouslyOnContextQueue(GPUImageContext *context, void (^block)(void));\nvoid runAsynchronouslyOnContextQueue(GPUImageContext *context, void (^block)(void));\nvoid reportAvailableMemoryForGPUImage(NSString *tag);\n\n@class GPUImageMovieWriter;\n\n/** GPUImage's base source object\n \n Images or frames of video are uploaded from source objects, which are subclasses of GPUImageOutput. These include:\n \n - GPUImageVideoCamera (for live video from an iOS camera) \n - GPUImageStillCamera (for taking photos with the camera)\n - GPUImagePicture (for still images)\n - GPUImageMovie (for movies)\n \n Source objects upload still image frames to OpenGL ES as textures, then hand those textures off to the next objects in the processing chain.\n */\n@interface GPUImageOutput : NSObject\n{\n    GPUImageFramebuffer *outputFramebuffer;\n    \n    NSMutableArray *targets, *targetTextureIndices;\n    \n    CGSize inputTextureSize, cachedMaximumOutputSize, forcedMaximumSize;\n    \n    BOOL overrideInputSize;\n    \n    BOOL allTargetsWantMonochromeData;\n    BOOL usingNextFrameForImageCapture;\n}\n\n@property(readwrite, nonatomic) BOOL shouldSmoothlyScaleOutput;\n@property(readwrite, nonatomic) BOOL shouldIgnoreUpdatesToThisTarget;\n@property(readwrite, nonatomic, retain) GPUImageMovieWriter *audioEncodingTarget;\n@property(readwrite, nonatomic, unsafe_unretained) id<GPUImageInput> targetToIgnoreForUpdates;\n@property(nonatomic, copy) void(^frameProcessingCompletionBlock)(GPUImageOutput*, CMTime);\n@property(nonatomic) BOOL enabled;\n@property(readwrite, nonatomic) GPUTextureOptions outputTextureOptions;\n\n/// @name Managing targets\n- (void)setInputFramebufferForTarget:(id<GPUImageInput>)target atIndex:(NSInteger)inputTextureIndex;\n- (GPUImageFramebuffer *)framebufferForOutput;\n- (void)removeOutputFramebuffer;\n- (void)notifyTargetsAboutNewOutputTexture;\n\n/** Returns an array of the current targets.\n */\n- (NSArray*)targets;\n\n/** Adds a target to receive notifications when new frames are available.\n \n The target will be asked for its next available texture.\n \n See [GPUImageInput newFrameReadyAtTime:]\n \n @param newTarget Target to be added\n */\n- (void)addTarget:(id<GPUImageInput>)newTarget;\n\n/** Adds a target to receive notifications when new frames are available.\n \n See [GPUImageInput newFrameReadyAtTime:]\n \n @param newTarget Target to be added\n */\n- (void)addTarget:(id<GPUImageInput>)newTarget atTextureLocation:(NSInteger)textureLocation;\n\n/** Removes a target. The target will no longer receive notifications when new frames are available.\n \n @param targetToRemove Target to be removed\n */\n- (void)removeTarget:(id<GPUImageInput>)targetToRemove;\n\n/** Removes all targets.\n */\n- (void)removeAllTargets;\n\n/// @name Manage the output texture\n\n- (void)forceProcessingAtSize:(CGSize)frameSize;\n- (void)forceProcessingAtSizeRespectingAspectRatio:(CGSize)frameSize;\n\n/// @name Still image processing\n\n- (void)useNextFrameForImageCapture;\n- (CGImageRef)newCGImageFromCurrentlyProcessedOutput;\n- (CGImageRef)newCGImageByFilteringCGImage:(CGImageRef)imageToFilter;\n\n// Platform-specific image output methods\n// If you're trying to use these methods, remember that you need to set -useNextFrameForImageCapture before running -processImage or running video and calling any of these methods, or you will get a nil image\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\n- (UIImage *)imageFromCurrentFramebuffer;\n- (UIImage *)imageFromCurrentFramebufferWithOrientation:(UIImageOrientation)imageOrientation;\n- (UIImage *)imageByFilteringImage:(UIImage *)imageToFilter;\n- (CGImageRef)newCGImageByFilteringImage:(UIImage *)imageToFilter;\n#else\n- (NSImage *)imageFromCurrentFramebuffer;\n- (NSImage *)imageFromCurrentFramebufferWithOrientation:(UIImageOrientation)imageOrientation;\n- (NSImage *)imageByFilteringImage:(NSImage *)imageToFilter;\n- (CGImageRef)newCGImageByFilteringImage:(NSImage *)imageToFilter;\n#endif\n\n- (BOOL)providesMonochromeOutput;\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/BaseClass/GPUImageOutput.m",
    "content": "#import \"GPUImageOutput.h\"\n#import \"GPUImageMovieWriter.h\"\n#import \"GPUImagePicture.h\"\n#import <mach/mach.h>\n\ndispatch_queue_attr_t GPUImageDefaultQueueAttribute(void)\n{\n#if TARGET_OS_IPHONE\n    if ([[[UIDevice currentDevice] systemVersion] compare:@\"9.0\" options:NSNumericSearch] != NSOrderedAscending)\n    {\n        return dispatch_queue_attr_make_with_qos_class(DISPATCH_QUEUE_SERIAL, QOS_CLASS_DEFAULT, 0);\n    }\n#endif\n    return nil;\n}\n\nvoid runOnMainQueueWithoutDeadlocking(void (^block)(void))\n{\n\tif ([NSThread isMainThread])\n\t{\n\t\tblock();\n\t}\n\telse\n\t{\n\t\tdispatch_sync(dispatch_get_main_queue(), block);\n\t}\n}\n\nvoid runSynchronouslyOnVideoProcessingQueue(void (^block)(void))\n{\n    dispatch_queue_t videoProcessingQueue = [GPUImageContext sharedContextQueue];\n#if !OS_OBJECT_USE_OBJC\n#pragma clang diagnostic push\n#pragma clang diagnostic ignored \"-Wdeprecated-declarations\"\n    if (dispatch_get_current_queue() == videoProcessingQueue)\n#pragma clang diagnostic pop\n#else\n\tif (dispatch_get_specific([GPUImageContext contextKey]))\n#endif\n\t{\n\t\tblock();\n\t}else\n\t{\n\t\tdispatch_sync(videoProcessingQueue, block);\n\t}\n}\n\nvoid runAsynchronouslyOnVideoProcessingQueue(void (^block)(void))\n{\n    dispatch_queue_t videoProcessingQueue = [GPUImageContext sharedContextQueue];\n    \n#if !OS_OBJECT_USE_OBJC\n#pragma clang diagnostic push\n#pragma clang diagnostic ignored \"-Wdeprecated-declarations\"\n    if (dispatch_get_current_queue() == videoProcessingQueue)\n#pragma clang diagnostic pop\n#else\n    if (dispatch_get_specific([GPUImageContext contextKey]))\n#endif\n\t{\n\t\tblock();\n\t}else\n\t{\n\t\tdispatch_async(videoProcessingQueue, block);\n\t}\n}\n\nvoid runSynchronouslyOnContextQueue(GPUImageContext *context, void (^block)(void))\n{\n    dispatch_queue_t videoProcessingQueue = [context contextQueue];\n#if !OS_OBJECT_USE_OBJC\n#pragma clang diagnostic push\n#pragma clang diagnostic ignored \"-Wdeprecated-declarations\"\n    if (dispatch_get_current_queue() == videoProcessingQueue)\n#pragma clang diagnostic pop\n#else\n        if (dispatch_get_specific([GPUImageContext contextKey]))\n#endif\n        {\n            block();\n        }else\n        {\n            dispatch_sync(videoProcessingQueue, block);\n        }\n}\n\nvoid runAsynchronouslyOnContextQueue(GPUImageContext *context, void (^block)(void))\n{\n    dispatch_queue_t videoProcessingQueue = [context contextQueue];\n    \n#if !OS_OBJECT_USE_OBJC\n#pragma clang diagnostic push\n#pragma clang diagnostic ignored \"-Wdeprecated-declarations\"\n    if (dispatch_get_current_queue() == videoProcessingQueue)\n#pragma clang diagnostic pop\n#else\n        if (dispatch_get_specific([GPUImageContext contextKey]))\n#endif\n        {\n            block();\n        }else\n        {\n            dispatch_async(videoProcessingQueue, block);\n        }\n}\n\nvoid reportAvailableMemoryForGPUImage(NSString *tag) \n{    \n    if (!tag)\n        tag = @\"Default\";\n    \n    struct task_basic_info info;\n    \n    mach_msg_type_number_t size = sizeof(info);\n    \n    kern_return_t kerr = task_info(mach_task_self(),\n                                   \n                                   TASK_BASIC_INFO,\n                                   \n                                   (task_info_t)&info,\n                                   \n                                   &size);    \n    if( kerr == KERN_SUCCESS ) {        \n        NSLog(@\"%@ - Memory used: %u\", tag, (unsigned int)info.resident_size); //in bytes\n    } else {        \n        NSLog(@\"%@ - Error: %s\", tag, mach_error_string(kerr));        \n    }    \n}\n\n@implementation GPUImageOutput\n\n@synthesize shouldSmoothlyScaleOutput = _shouldSmoothlyScaleOutput;\n@synthesize shouldIgnoreUpdatesToThisTarget = _shouldIgnoreUpdatesToThisTarget;\n@synthesize audioEncodingTarget = _audioEncodingTarget;\n@synthesize targetToIgnoreForUpdates = _targetToIgnoreForUpdates;\n@synthesize frameProcessingCompletionBlock = _frameProcessingCompletionBlock;\n@synthesize enabled = _enabled;\n@synthesize outputTextureOptions = _outputTextureOptions;\n\n#pragma mark -\n#pragma mark Initialization and teardown\n\n- (id)init; \n{\n\tif (!(self = [super init]))\n    {\n\t\treturn nil;\n    }\n\n    targets = [[NSMutableArray alloc] init];\n    targetTextureIndices = [[NSMutableArray alloc] init];\n    _enabled = YES;\n    allTargetsWantMonochromeData = YES;\n    usingNextFrameForImageCapture = NO;\n    \n    // set default texture options\n    _outputTextureOptions.minFilter = GL_LINEAR;\n    _outputTextureOptions.magFilter = GL_LINEAR;\n    _outputTextureOptions.wrapS = GL_CLAMP_TO_EDGE;\n    _outputTextureOptions.wrapT = GL_CLAMP_TO_EDGE;\n    _outputTextureOptions.internalFormat = GL_RGBA;\n    _outputTextureOptions.format = GL_BGRA;\n    _outputTextureOptions.type = GL_UNSIGNED_BYTE;\n\n    return self;\n}\n\n- (void)dealloc \n{\n    [self removeAllTargets];\n}\n\n#pragma mark -\n#pragma mark Managing targets\n\n- (void)setInputFramebufferForTarget:(id<GPUImageInput>)target atIndex:(NSInteger)inputTextureIndex;\n{\n    [target setInputFramebuffer:[self framebufferForOutput] atIndex:inputTextureIndex];\n}\n\n- (GPUImageFramebuffer *)framebufferForOutput;\n{\n    return outputFramebuffer;\n}\n\n- (void)removeOutputFramebuffer;\n{\n    outputFramebuffer = nil;\n}\n\n- (void)notifyTargetsAboutNewOutputTexture;\n{\n    for (id<GPUImageInput> currentTarget in targets)\n    {\n        NSInteger indexOfObject = [targets indexOfObject:currentTarget];\n        NSInteger textureIndex = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];\n        \n        [self setInputFramebufferForTarget:currentTarget atIndex:textureIndex];\n    }\n}\n\n- (NSArray*)targets;\n{\n\treturn [NSArray arrayWithArray:targets];\n}\n\n- (void)addTarget:(id<GPUImageInput>)newTarget;\n{\n    NSInteger nextAvailableTextureIndex = [newTarget nextAvailableTextureIndex];\n    [self addTarget:newTarget atTextureLocation:nextAvailableTextureIndex];\n    \n    if ([newTarget shouldIgnoreUpdatesToThisTarget])\n    {\n        _targetToIgnoreForUpdates = newTarget;\n    }\n}\n\n- (void)addTarget:(id<GPUImageInput>)newTarget atTextureLocation:(NSInteger)textureLocation;\n{\n    if([targets containsObject:newTarget])\n    {\n        return;\n    }\n    \n    cachedMaximumOutputSize = CGSizeZero;\n    runSynchronouslyOnVideoProcessingQueue(^{\n        [self setInputFramebufferForTarget:newTarget atIndex:textureLocation];\n        [targets addObject:newTarget];\n        [targetTextureIndices addObject:[NSNumber numberWithInteger:textureLocation]];\n        \n        allTargetsWantMonochromeData = allTargetsWantMonochromeData && [newTarget wantsMonochromeInput];\n    });\n}\n\n- (void)removeTarget:(id<GPUImageInput>)targetToRemove;\n{\n    if(![targets containsObject:targetToRemove])\n    {\n        return;\n    }\n    \n    if (_targetToIgnoreForUpdates == targetToRemove)\n    {\n        _targetToIgnoreForUpdates = nil;\n    }\n    \n    cachedMaximumOutputSize = CGSizeZero;\n    \n    NSInteger indexOfObject = [targets indexOfObject:targetToRemove];\n    NSInteger textureIndexOfTarget = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];\n\n    runSynchronouslyOnVideoProcessingQueue(^{\n        [targetToRemove setInputSize:CGSizeZero atIndex:textureIndexOfTarget];\n\t\t[targetToRemove setInputRotation:kGPUImageNoRotation atIndex:textureIndexOfTarget];\n\n        [targetTextureIndices removeObjectAtIndex:indexOfObject];\n        [targets removeObject:targetToRemove];\n        [targetToRemove endProcessing];\n    });\n}\n\n- (void)removeAllTargets;\n{\n    cachedMaximumOutputSize = CGSizeZero;\n    runSynchronouslyOnVideoProcessingQueue(^{\n        for (id<GPUImageInput> targetToRemove in targets)\n        {\n            NSInteger indexOfObject = [targets indexOfObject:targetToRemove];\n            NSInteger textureIndexOfTarget = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];\n            \n            [targetToRemove setInputSize:CGSizeZero atIndex:textureIndexOfTarget];\n            [targetToRemove setInputRotation:kGPUImageNoRotation atIndex:textureIndexOfTarget];\n        }\n        [targets removeAllObjects];\n        [targetTextureIndices removeAllObjects];\n        \n        allTargetsWantMonochromeData = YES;\n    });\n}\n\n#pragma mark -\n#pragma mark Manage the output texture\n\n- (void)forceProcessingAtSize:(CGSize)frameSize;\n{\n    \n}\n\n- (void)forceProcessingAtSizeRespectingAspectRatio:(CGSize)frameSize;\n{\n}\n\n#pragma mark -\n#pragma mark Still image processing\n\n- (void)useNextFrameForImageCapture;\n{\n\n}\n\n- (CGImageRef)newCGImageFromCurrentlyProcessedOutput;\n{\n    return nil;\n}\n\n- (CGImageRef)newCGImageByFilteringCGImage:(CGImageRef)imageToFilter;\n{\n    GPUImagePicture *stillImageSource = [[GPUImagePicture alloc] initWithCGImage:imageToFilter];\n    \n    [self useNextFrameForImageCapture];\n    [stillImageSource addTarget:(id<GPUImageInput>)self];\n    [stillImageSource processImage];\n    \n    CGImageRef processedImage = [self newCGImageFromCurrentlyProcessedOutput];\n    \n    [stillImageSource removeTarget:(id<GPUImageInput>)self];\n    return processedImage;\n}\n\n- (BOOL)providesMonochromeOutput;\n{\n    return NO;\n}\n\n#pragma mark -\n#pragma mark Platform-specific image output methods\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\n\n- (UIImage *)imageFromCurrentFramebuffer;\n{\n\tUIDeviceOrientation deviceOrientation = [[UIDevice currentDevice] orientation];\n    UIImageOrientation imageOrientation = UIImageOrientationLeft;\n\tswitch (deviceOrientation)\n    {\n\t\tcase UIDeviceOrientationPortrait:\n\t\t\timageOrientation = UIImageOrientationUp;\n\t\t\tbreak;\n\t\tcase UIDeviceOrientationPortraitUpsideDown:\n\t\t\timageOrientation = UIImageOrientationDown;\n\t\t\tbreak;\n\t\tcase UIDeviceOrientationLandscapeLeft:\n\t\t\timageOrientation = UIImageOrientationLeft;\n\t\t\tbreak;\n\t\tcase UIDeviceOrientationLandscapeRight:\n\t\t\timageOrientation = UIImageOrientationRight;\n\t\t\tbreak;\n\t\tdefault:\n\t\t\timageOrientation = UIImageOrientationUp;\n\t\t\tbreak;\n\t}\n    \n    return [self imageFromCurrentFramebufferWithOrientation:imageOrientation];\n}\n\n- (UIImage *)imageFromCurrentFramebufferWithOrientation:(UIImageOrientation)imageOrientation;\n{\n    CGImageRef cgImageFromBytes = [self newCGImageFromCurrentlyProcessedOutput];\n    UIImage *finalImage = [UIImage imageWithCGImage:cgImageFromBytes scale:1.0 orientation:imageOrientation];\n    CGImageRelease(cgImageFromBytes);\n    \n    return finalImage;\n}\n\n- (UIImage *)imageByFilteringImage:(UIImage *)imageToFilter;\n{\n    CGImageRef image = [self newCGImageByFilteringCGImage:[imageToFilter CGImage]];\n    UIImage *processedImage = [UIImage imageWithCGImage:image scale:[imageToFilter scale] orientation:[imageToFilter imageOrientation]];\n    CGImageRelease(image);\n    return processedImage;\n}\n\n- (CGImageRef)newCGImageByFilteringImage:(UIImage *)imageToFilter\n{\n    return [self newCGImageByFilteringCGImage:[imageToFilter CGImage]];\n}\n\n#else\n\n- (NSImage *)imageFromCurrentFramebuffer;\n{\n    return [self imageFromCurrentFramebufferWithOrientation:UIImageOrientationLeft];\n}\n\n- (NSImage *)imageFromCurrentFramebufferWithOrientation:(UIImageOrientation)imageOrientation;\n{\n    CGImageRef cgImageFromBytes = [self newCGImageFromCurrentlyProcessedOutput];\n    NSImage *finalImage = [[NSImage alloc] initWithCGImage:cgImageFromBytes size:NSZeroSize];\n    CGImageRelease(cgImageFromBytes);\n    \n    return finalImage;\n}\n\n- (NSImage *)imageByFilteringImage:(NSImage *)imageToFilter;\n{\n    CGImageRef image = [self newCGImageByFilteringCGImage:[imageToFilter CGImageForProposedRect:NULL context:[NSGraphicsContext currentContext] hints:nil]];\n    NSImage *processedImage = [[NSImage alloc] initWithCGImage:image size:NSZeroSize];\n    CGImageRelease(image);\n    return processedImage;\n}\n\n- (CGImageRef)newCGImageByFilteringImage:(NSImage *)imageToFilter\n{\n    return [self newCGImageByFilteringCGImage:[imageToFilter CGImageForProposedRect:NULL context:[NSGraphicsContext currentContext] hints:nil]];\n}\n\n#endif\n\n#pragma mark -\n#pragma mark Accessors\n\n- (void)setAudioEncodingTarget:(GPUImageMovieWriter *)newValue;\n{    \n    _audioEncodingTarget = newValue;\n    if( ! _audioEncodingTarget.hasAudioTrack )\n    {\n        _audioEncodingTarget.hasAudioTrack = YES;\n    }\n}\n\n-(void)setOutputTextureOptions:(GPUTextureOptions)outputTextureOptions\n{\n    _outputTextureOptions = outputTextureOptions;\n    \n    if( outputFramebuffer.texture )\n    {\n        glBindTexture(GL_TEXTURE_2D,  outputFramebuffer.texture);\n        //_outputTextureOptions.format\n        //_outputTextureOptions.internalFormat\n        //_outputTextureOptions.magFilter\n        //_outputTextureOptions.minFilter\n        //_outputTextureOptions.type\n        glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, _outputTextureOptions.wrapS);\n        glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, _outputTextureOptions.wrapT);\n        glBindTexture(GL_TEXTURE_2D, 0);\n    }\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/BaseClass/GPUImageTextureInput.h",
    "content": "#import \"GPUImageOutput.h\"\n\n@interface GPUImageTextureInput : GPUImageOutput\n{\n    CGSize textureSize;\n}\n\n// Initialization and teardown\n- (id)initWithTexture:(GLuint)newInputTexture size:(CGSize)newTextureSize;\n\n// Image rendering\n- (void)processTextureWithFrameTime:(CMTime)frameTime;\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/BaseClass/GPUImageTextureInput.m",
    "content": "#import \"GPUImageTextureInput.h\"\n\n@implementation GPUImageTextureInput\n\n#pragma mark -\n#pragma mark Initialization and teardown\n\n- (id)initWithTexture:(GLuint)newInputTexture size:(CGSize)newTextureSize;\n{\n    if (!(self = [super init]))\n    {\n        return nil;\n    }\n\n    runSynchronouslyOnVideoProcessingQueue(^{\n        [GPUImageContext useImageProcessingContext];\n    });\n    \n    textureSize = newTextureSize;\n\n    runSynchronouslyOnVideoProcessingQueue(^{\n        outputFramebuffer = [[GPUImageFramebuffer alloc] initWithSize:newTextureSize overriddenTexture:newInputTexture];\n    });\n    \n    return self;\n}\n\n#pragma mark -\n#pragma mark Image rendering\n\n- (void)processTextureWithFrameTime:(CMTime)frameTime;\n{\n    runAsynchronouslyOnVideoProcessingQueue(^{\n        for (id<GPUImageInput> currentTarget in targets)\n        {\n            NSInteger indexOfObject = [targets indexOfObject:currentTarget];\n            NSInteger targetTextureIndex = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];\n            \n            [currentTarget setInputSize:textureSize atIndex:targetTextureIndex];\n            [currentTarget setInputFramebuffer:outputFramebuffer atIndex:targetTextureIndex];\n            [currentTarget newFrameReadyAtTime:frameTime atIndex:targetTextureIndex];\n        }\n    });\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/BaseClass/GPUImageTextureOutput.h",
    "content": "#import <Foundation/Foundation.h>\n#import \"GPUImageContext.h\"\n\n@protocol GPUImageTextureOutputDelegate;\n\n@interface GPUImageTextureOutput : NSObject <GPUImageInput>\n{\n    GPUImageFramebuffer *firstInputFramebuffer;\n}\n\n@property(readwrite, unsafe_unretained, nonatomic) id<GPUImageTextureOutputDelegate> delegate;\n@property(readonly) GLuint texture;\n@property(nonatomic) BOOL enabled;\n\n- (void)doneWithTexture;\n\n@end\n\n@protocol GPUImageTextureOutputDelegate\n- (void)newFrameReadyFromTextureOutput:(GPUImageTextureOutput *)callbackTextureOutput;\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/BaseClass/GPUImageTextureOutput.m",
    "content": "#import \"GPUImageTextureOutput.h\"\n\n@implementation GPUImageTextureOutput\n\n@synthesize delegate = _delegate;\n@synthesize texture = _texture;\n@synthesize enabled;\n\n#pragma mark -\n#pragma mark Initialization and teardown\n\n- (id)init;\n{\n    if (!(self = [super init]))\n    {\n\t\treturn nil;\n    }\n    \n    self.enabled = YES;\n    \n    return self;\n}\n\n- (void)doneWithTexture;\n{\n    [firstInputFramebuffer unlock];\n}\n\n#pragma mark -\n#pragma mark GPUImageInput protocol\n\n- (void)newFrameReadyAtTime:(CMTime)frameTime atIndex:(NSInteger)textureIndex;\n{\n    [_delegate newFrameReadyFromTextureOutput:self];\n}\n\n- (NSInteger)nextAvailableTextureIndex;\n{\n    return 0;\n}\n\n// TODO: Deal with the fact that the texture changes regularly as a result of the caching\n- (void)setInputFramebuffer:(GPUImageFramebuffer *)newInputFramebuffer atIndex:(NSInteger)textureIndex;\n{\n    firstInputFramebuffer = newInputFramebuffer;\n    [firstInputFramebuffer lock];\n    \n    _texture = [firstInputFramebuffer texture];\n}\n\n- (void)setInputRotation:(GPUImageRotationMode)newInputRotation atIndex:(NSInteger)textureIndex;\n{\n}\n\n- (void)setInputSize:(CGSize)newSize atIndex:(NSInteger)textureIndex;\n{\n}\n\n- (CGSize)maximumOutputSize;\n{\n    return CGSizeZero;\n}\n\n- (void)endProcessing\n{\n}\n\n- (BOOL)shouldIgnoreUpdatesToThisTarget;\n{\n    return NO;\n}\n\n- (BOOL)wantsMonochromeInput;\n{\n    return NO;\n}\n\n- (void)setCurrentlyReceivingMonochromeInput:(BOOL)newValue;\n{\n    \n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/BaseClass/GPUImageThreeInputFilter.h",
    "content": "#import \"GPUImageTwoInputFilter.h\"\n\nextern NSString *const kGPUImageThreeInputTextureVertexShaderString;\n\n@interface GPUImageThreeInputFilter : GPUImageTwoInputFilter\n{\n    GPUImageFramebuffer *thirdInputFramebuffer;\n\n    GLint filterThirdTextureCoordinateAttribute;\n    GLint filterInputTextureUniform3;\n    GPUImageRotationMode inputRotation3;\n    GLuint filterSourceTexture3;\n    CMTime thirdFrameTime;\n    \n    BOOL hasSetSecondTexture, hasReceivedThirdFrame, thirdFrameWasVideo;\n    BOOL thirdFrameCheckDisabled;\n}\n\n- (void)disableThirdFrameCheck;\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/BaseClass/GPUImageThreeInputFilter.m",
    "content": "#import \"GPUImageThreeInputFilter.h\"\n\n\nNSString *const kGPUImageThreeInputTextureVertexShaderString = SHADER_STRING\n(\n attribute vec4 position;\n attribute vec4 inputTextureCoordinate;\n attribute vec4 inputTextureCoordinate2;\n attribute vec4 inputTextureCoordinate3;\n \n varying vec2 textureCoordinate;\n varying vec2 textureCoordinate2;\n varying vec2 textureCoordinate3;\n \n void main()\n {\n     gl_Position = position;\n     textureCoordinate = inputTextureCoordinate.xy;\n     textureCoordinate2 = inputTextureCoordinate2.xy;\n     textureCoordinate3 = inputTextureCoordinate3.xy;\n }\n);\n\n@implementation GPUImageThreeInputFilter\n\n#pragma mark -\n#pragma mark Initialization and teardown\n\n- (id)initWithFragmentShaderFromString:(NSString *)fragmentShaderString;\n{\n    if (!(self = [self initWithVertexShaderFromString:kGPUImageThreeInputTextureVertexShaderString fragmentShaderFromString:fragmentShaderString]))\n    {\n\t\treturn nil;\n    }\n    \n    return self;\n}\n\n- (id)initWithVertexShaderFromString:(NSString *)vertexShaderString fragmentShaderFromString:(NSString *)fragmentShaderString;\n{\n    if (!(self = [super initWithVertexShaderFromString:vertexShaderString fragmentShaderFromString:fragmentShaderString]))\n    {\n\t\treturn nil;\n    }\n    \n    inputRotation3 = kGPUImageNoRotation;\n    \n    hasSetSecondTexture = NO;\n    \n    hasReceivedThirdFrame = NO;\n    thirdFrameWasVideo = NO;\n    thirdFrameCheckDisabled = NO;\n    \n    thirdFrameTime = kCMTimeInvalid;\n    \n    runSynchronouslyOnVideoProcessingQueue(^{\n        [GPUImageContext useImageProcessingContext];\n        filterThirdTextureCoordinateAttribute = [filterProgram attributeIndex:@\"inputTextureCoordinate3\"];\n        \n        filterInputTextureUniform3 = [filterProgram uniformIndex:@\"inputImageTexture3\"]; // This does assume a name of \"inputImageTexture3\" for the third input texture in the fragment shader\n        glEnableVertexAttribArray(filterThirdTextureCoordinateAttribute);\n    });\n    \n    return self;\n}\n\n- (void)initializeAttributes;\n{\n    [super initializeAttributes];\n    [filterProgram addAttribute:@\"inputTextureCoordinate3\"];\n}\n\n- (void)disableThirdFrameCheck;\n{\n    thirdFrameCheckDisabled = YES;\n}\n\n#pragma mark -\n#pragma mark Rendering\n\n- (void)renderToTextureWithVertices:(const GLfloat *)vertices textureCoordinates:(const GLfloat *)textureCoordinates;\n{\n    if (self.preventRendering)\n    {\n        [firstInputFramebuffer unlock];\n        [secondInputFramebuffer unlock];\n        [thirdInputFramebuffer unlock];\n        return;\n    }\n    \n    [GPUImageContext setActiveShaderProgram:filterProgram];\n    outputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:[self sizeOfFBO] textureOptions:self.outputTextureOptions onlyTexture:NO];\n    [outputFramebuffer activateFramebuffer];\n    if (usingNextFrameForImageCapture)\n    {\n        [outputFramebuffer lock];\n    }\n\n    [self setUniformsForProgramAtIndex:0];\n    \n    glClearColor(backgroundColorRed, backgroundColorGreen, backgroundColorBlue, backgroundColorAlpha);\n    glClear(GL_COLOR_BUFFER_BIT);\n    \n\tglActiveTexture(GL_TEXTURE2);\n\tglBindTexture(GL_TEXTURE_2D, [firstInputFramebuffer texture]);\n\tglUniform1i(filterInputTextureUniform, 2);\n    \n    glActiveTexture(GL_TEXTURE3);\n    glBindTexture(GL_TEXTURE_2D, [secondInputFramebuffer texture]);\n    glUniform1i(filterInputTextureUniform2, 3);\n\n    glActiveTexture(GL_TEXTURE4);\n    glBindTexture(GL_TEXTURE_2D, [thirdInputFramebuffer texture]);\n    glUniform1i(filterInputTextureUniform3, 4);\n\n    glVertexAttribPointer(filterPositionAttribute, 2, GL_FLOAT, 0, 0, vertices);\n\tglVertexAttribPointer(filterTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, textureCoordinates);\n    glVertexAttribPointer(filterSecondTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, [[self class] textureCoordinatesForRotation:inputRotation2]);\n    glVertexAttribPointer(filterThirdTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, [[self class] textureCoordinatesForRotation:inputRotation3]);\n    \n    glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);\n    [firstInputFramebuffer unlock];\n    [secondInputFramebuffer unlock];\n    [thirdInputFramebuffer unlock];\n    if (usingNextFrameForImageCapture)\n    {\n        dispatch_semaphore_signal(imageCaptureSemaphore);\n    }\n}\n\n#pragma mark -\n#pragma mark GPUImageInput\n\n- (NSInteger)nextAvailableTextureIndex;\n{\n    if (hasSetSecondTexture)\n    {\n        return 2;\n    }\n    else if (hasSetFirstTexture)\n    {\n        return 1;\n    }\n    else\n    {\n        return 0;\n    }\n}\n\n- (void)setInputFramebuffer:(GPUImageFramebuffer *)newInputFramebuffer atIndex:(NSInteger)textureIndex;\n{\n    if (textureIndex == 0)\n    {\n        firstInputFramebuffer = newInputFramebuffer;\n        hasSetFirstTexture = YES;\n        [firstInputFramebuffer lock];\n    }\n    else if (textureIndex == 1)\n    {\n        secondInputFramebuffer = newInputFramebuffer;\n        hasSetSecondTexture = YES;\n        [secondInputFramebuffer lock];\n    }\n    else\n    {\n        thirdInputFramebuffer = newInputFramebuffer;\n        [thirdInputFramebuffer lock];\n    }\n}\n\n- (void)setInputSize:(CGSize)newSize atIndex:(NSInteger)textureIndex;\n{\n    if (textureIndex == 0)\n    {\n        [super setInputSize:newSize atIndex:textureIndex];\n        \n        if (CGSizeEqualToSize(newSize, CGSizeZero))\n        {\n            hasSetFirstTexture = NO;\n        }\n    }\n    else if (textureIndex == 1)\n    {\n        if (CGSizeEqualToSize(newSize, CGSizeZero))\n        {\n            hasSetSecondTexture = NO;\n        }\n    }\n}\n\n- (void)setInputRotation:(GPUImageRotationMode)newInputRotation atIndex:(NSInteger)textureIndex;\n{\n    if (textureIndex == 0)\n    {\n        inputRotation = newInputRotation;\n    }\n    else if (textureIndex == 1)\n    {\n        inputRotation2 = newInputRotation;\n    }\n    else\n    {\n        inputRotation3 = newInputRotation;\n    }\n}\n\n- (CGSize)rotatedSize:(CGSize)sizeToRotate forIndex:(NSInteger)textureIndex;\n{\n    CGSize rotatedSize = sizeToRotate;\n    \n    GPUImageRotationMode rotationToCheck;\n    if (textureIndex == 0)\n    {\n        rotationToCheck = inputRotation;\n    }\n    else if (textureIndex == 1)\n    {\n        rotationToCheck = inputRotation2;\n    }\n    else\n    {\n        rotationToCheck = inputRotation3;\n    }\n    \n    if (GPUImageRotationSwapsWidthAndHeight(rotationToCheck))\n    {\n        rotatedSize.width = sizeToRotate.height;\n        rotatedSize.height = sizeToRotate.width;\n    }\n    \n    return rotatedSize;\n}\n\n- (void)newFrameReadyAtTime:(CMTime)frameTime atIndex:(NSInteger)textureIndex;\n{\n    // You can set up infinite update loops, so this helps to short circuit them\n    if (hasReceivedFirstFrame && hasReceivedSecondFrame && hasReceivedThirdFrame)\n    {\n        return;\n    }\n    \n    BOOL updatedMovieFrameOppositeStillImage = NO;\n    \n    if (textureIndex == 0)\n    {\n        hasReceivedFirstFrame = YES;\n        firstFrameTime = frameTime;\n        if (secondFrameCheckDisabled)\n        {\n            hasReceivedSecondFrame = YES;\n        }\n        if (thirdFrameCheckDisabled)\n        {\n            hasReceivedThirdFrame = YES;\n        }\n        \n        if (!CMTIME_IS_INDEFINITE(frameTime))\n        {\n            if CMTIME_IS_INDEFINITE(secondFrameTime)\n            {\n                updatedMovieFrameOppositeStillImage = YES;\n            }\n        }\n    }\n    else if (textureIndex == 1)\n    {\n        hasReceivedSecondFrame = YES;\n        secondFrameTime = frameTime;\n        if (firstFrameCheckDisabled)\n        {\n            hasReceivedFirstFrame = YES;\n        }\n        if (thirdFrameCheckDisabled)\n        {\n            hasReceivedThirdFrame = YES;\n        }\n\n        if (!CMTIME_IS_INDEFINITE(frameTime))\n        {\n            if CMTIME_IS_INDEFINITE(firstFrameTime)\n            {\n                updatedMovieFrameOppositeStillImage = YES;\n            }\n        }\n    }\n    else\n    {\n        hasReceivedThirdFrame = YES;\n        thirdFrameTime = frameTime;\n        if (firstFrameCheckDisabled)\n        {\n            hasReceivedFirstFrame = YES;\n        }\n        if (secondFrameCheckDisabled)\n        {\n            hasReceivedSecondFrame = YES;\n        }\n        \n        if (!CMTIME_IS_INDEFINITE(frameTime))\n        {\n            if CMTIME_IS_INDEFINITE(firstFrameTime)\n            {\n                updatedMovieFrameOppositeStillImage = YES;\n            }\n        }\n    }\n    \n    // || (hasReceivedFirstFrame && secondFrameCheckDisabled) || (hasReceivedSecondFrame && firstFrameCheckDisabled)\n    if ((hasReceivedFirstFrame && hasReceivedSecondFrame && hasReceivedThirdFrame) || updatedMovieFrameOppositeStillImage)\n    {\n        static const GLfloat imageVertices[] = {\n            -1.0f, -1.0f,\n            1.0f, -1.0f,\n            -1.0f,  1.0f,\n            1.0f,  1.0f,\n        };\n        \n        [self renderToTextureWithVertices:imageVertices textureCoordinates:[[self class] textureCoordinatesForRotation:inputRotation]];\n        \n        [self informTargetsAboutNewFrameAtTime:frameTime];\n\n        hasReceivedFirstFrame = NO;\n        hasReceivedSecondFrame = NO;\n        hasReceivedThirdFrame = NO;\n    }\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/BaseClass/GPUImageTwoInputFilter.h",
    "content": "#import \"GPUImageFilter.h\"\n\nextern NSString *const kGPUImageTwoInputTextureVertexShaderString;\n\n@interface GPUImageTwoInputFilter : GPUImageFilter\n{\n    GPUImageFramebuffer *secondInputFramebuffer;\n\n    GLint filterSecondTextureCoordinateAttribute;\n    GLint filterInputTextureUniform2;\n    GPUImageRotationMode inputRotation2;\n    CMTime firstFrameTime, secondFrameTime;\n    \n    BOOL hasSetFirstTexture, hasReceivedFirstFrame, hasReceivedSecondFrame, firstFrameWasVideo, secondFrameWasVideo;\n    BOOL firstFrameCheckDisabled, secondFrameCheckDisabled;\n}\n\n- (void)disableFirstFrameCheck;\n- (void)disableSecondFrameCheck;\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/BaseClass/GPUImageTwoInputFilter.m",
    "content": "#import \"GPUImageTwoInputFilter.h\"\n\nNSString *const kGPUImageTwoInputTextureVertexShaderString = SHADER_STRING\n(\n attribute vec4 position;\n attribute vec4 inputTextureCoordinate;\n attribute vec4 inputTextureCoordinate2;\n \n varying vec2 textureCoordinate;\n varying vec2 textureCoordinate2;\n \n void main()\n {\n     gl_Position = position;\n     textureCoordinate = inputTextureCoordinate.xy;\n     textureCoordinate2 = inputTextureCoordinate2.xy;\n }\n);\n\n\n@implementation GPUImageTwoInputFilter\n\n#pragma mark -\n#pragma mark Initialization and teardown\n\n- (id)initWithFragmentShaderFromString:(NSString *)fragmentShaderString;\n{\n    if (!(self = [self initWithVertexShaderFromString:kGPUImageTwoInputTextureVertexShaderString fragmentShaderFromString:fragmentShaderString]))\n    {\n\t\treturn nil;\n    }\n    \n    return self;\n}\n\n- (id)initWithVertexShaderFromString:(NSString *)vertexShaderString fragmentShaderFromString:(NSString *)fragmentShaderString;\n{\n    if (!(self = [super initWithVertexShaderFromString:vertexShaderString fragmentShaderFromString:fragmentShaderString]))\n    {\n\t\treturn nil;\n    }\n    \n    inputRotation2 = kGPUImageNoRotation;\n    \n    hasSetFirstTexture = NO;\n    \n    hasReceivedFirstFrame = NO;\n    hasReceivedSecondFrame = NO;\n    firstFrameWasVideo = NO;\n    secondFrameWasVideo = NO;\n    firstFrameCheckDisabled = NO;\n    secondFrameCheckDisabled = NO;\n    \n    firstFrameTime = kCMTimeInvalid;\n    secondFrameTime = kCMTimeInvalid;\n        \n    runSynchronouslyOnVideoProcessingQueue(^{\n        [GPUImageContext useImageProcessingContext];\n        filterSecondTextureCoordinateAttribute = [filterProgram attributeIndex:@\"inputTextureCoordinate2\"];\n        \n        filterInputTextureUniform2 = [filterProgram uniformIndex:@\"inputImageTexture2\"]; // This does assume a name of \"inputImageTexture2\" for second input texture in the fragment shader\n        glEnableVertexAttribArray(filterSecondTextureCoordinateAttribute);\n    });\n    \n    return self;\n}\n\n- (void)initializeAttributes;\n{\n    [super initializeAttributes];\n    [filterProgram addAttribute:@\"inputTextureCoordinate2\"];\n}\n\n- (void)disableFirstFrameCheck;\n{\n    firstFrameCheckDisabled = YES;\n}\n\n- (void)disableSecondFrameCheck;\n{\n    secondFrameCheckDisabled = YES;\n}\n\n#pragma mark -\n#pragma mark Rendering\n\n- (void)renderToTextureWithVertices:(const GLfloat *)vertices textureCoordinates:(const GLfloat *)textureCoordinates;\n{\n    if (self.preventRendering)\n    {\n        [firstInputFramebuffer unlock];\n        [secondInputFramebuffer unlock];\n        return;\n    }\n    \n    [GPUImageContext setActiveShaderProgram:filterProgram];\n    outputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:[self sizeOfFBO] textureOptions:self.outputTextureOptions onlyTexture:NO];\n    [outputFramebuffer activateFramebuffer];\n    if (usingNextFrameForImageCapture)\n    {\n        [outputFramebuffer lock];\n    }\n\n    [self setUniformsForProgramAtIndex:0];\n        \n    glClearColor(backgroundColorRed, backgroundColorGreen, backgroundColorBlue, backgroundColorAlpha);\n    glClear(GL_COLOR_BUFFER_BIT);\n    \n\tglActiveTexture(GL_TEXTURE2);\n\tglBindTexture(GL_TEXTURE_2D, [firstInputFramebuffer texture]);\n\tglUniform1i(filterInputTextureUniform, 2);\t\n    \n    glActiveTexture(GL_TEXTURE3);\n    glBindTexture(GL_TEXTURE_2D, [secondInputFramebuffer texture]);\n    glUniform1i(filterInputTextureUniform2, 3);\n    \n    glVertexAttribPointer(filterPositionAttribute, 2, GL_FLOAT, 0, 0, vertices);\n\tglVertexAttribPointer(filterTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, textureCoordinates);\n    glVertexAttribPointer(filterSecondTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, [[self class] textureCoordinatesForRotation:inputRotation2]);\n    \n    glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);\n\n    [firstInputFramebuffer unlock];\n    [secondInputFramebuffer unlock];\n    if (usingNextFrameForImageCapture)\n    {\n        dispatch_semaphore_signal(imageCaptureSemaphore);\n    }\n}\n\n#pragma mark -\n#pragma mark GPUImageInput\n\n- (NSInteger)nextAvailableTextureIndex;\n{\n    if (hasSetFirstTexture)\n    {\n        return 1;\n    }\n    else\n    {\n        return 0;\n    }\n}\n\n- (void)setInputFramebuffer:(GPUImageFramebuffer *)newInputFramebuffer atIndex:(NSInteger)textureIndex;\n{\n    if (textureIndex == 0)\n    {\n        firstInputFramebuffer = newInputFramebuffer;\n        hasSetFirstTexture = YES;\n        [firstInputFramebuffer lock];\n    }\n    else\n    {\n        secondInputFramebuffer = newInputFramebuffer;\n        [secondInputFramebuffer lock];\n    }\n}\n\n- (void)setInputSize:(CGSize)newSize atIndex:(NSInteger)textureIndex;\n{\n    if (textureIndex == 0)\n    {\n        [super setInputSize:newSize atIndex:textureIndex];\n        \n        if (CGSizeEqualToSize(newSize, CGSizeZero))\n        {\n            hasSetFirstTexture = NO;\n        }\n    }\n}\n\n- (void)setInputRotation:(GPUImageRotationMode)newInputRotation atIndex:(NSInteger)textureIndex;\n{\n    if (textureIndex == 0)\n    {\n        inputRotation = newInputRotation;\n    }\n    else\n    {\n        inputRotation2 = newInputRotation;\n    }\n}\n\n- (CGSize)rotatedSize:(CGSize)sizeToRotate forIndex:(NSInteger)textureIndex;\n{\n    CGSize rotatedSize = sizeToRotate;\n    \n    GPUImageRotationMode rotationToCheck;\n    if (textureIndex == 0)\n    {\n        rotationToCheck = inputRotation;\n    }\n    else\n    {\n        rotationToCheck = inputRotation2;\n    }\n    \n    if (GPUImageRotationSwapsWidthAndHeight(rotationToCheck))\n    {\n        rotatedSize.width = sizeToRotate.height;\n        rotatedSize.height = sizeToRotate.width;\n    }\n    \n    return rotatedSize; \n}\n\n- (void)newFrameReadyAtTime:(CMTime)frameTime atIndex:(NSInteger)textureIndex;\n{\n    // You can set up infinite update loops, so this helps to short circuit them\n    if (hasReceivedFirstFrame && hasReceivedSecondFrame)\n    {\n        return;\n    }\n    \n    BOOL updatedMovieFrameOppositeStillImage = NO;\n    \n    if (textureIndex == 0)\n    {\n        hasReceivedFirstFrame = YES;\n        firstFrameTime = frameTime;\n        if (secondFrameCheckDisabled)\n        {\n            hasReceivedSecondFrame = YES;\n        }\n        \n        if (!CMTIME_IS_INDEFINITE(frameTime))\n        {\n            if CMTIME_IS_INDEFINITE(secondFrameTime)\n            {\n                updatedMovieFrameOppositeStillImage = YES;\n            }\n        }\n    }\n    else\n    {\n        hasReceivedSecondFrame = YES;\n        secondFrameTime = frameTime;\n        if (firstFrameCheckDisabled)\n        {\n            hasReceivedFirstFrame = YES;\n        }\n\n        if (!CMTIME_IS_INDEFINITE(frameTime))\n        {\n            if CMTIME_IS_INDEFINITE(firstFrameTime)\n            {\n                updatedMovieFrameOppositeStillImage = YES;\n            }\n        }\n    }\n\n    // || (hasReceivedFirstFrame && secondFrameCheckDisabled) || (hasReceivedSecondFrame && firstFrameCheckDisabled)\n    if ((hasReceivedFirstFrame && hasReceivedSecondFrame) || updatedMovieFrameOppositeStillImage)\n    {\n        CMTime passOnFrameTime = (!CMTIME_IS_INDEFINITE(firstFrameTime)) ? firstFrameTime : secondFrameTime;\n        [super newFrameReadyAtTime:passOnFrameTime atIndex:0]; // Bugfix when trying to record: always use time from first input (unless indefinite, in which case use the second input)\n        hasReceivedFirstFrame = NO;\n        hasReceivedSecondFrame = NO;\n    }\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/BaseClass/GPUImageTwoPassFilter.h",
    "content": "#import \"GPUImageFilter.h\"\n\n@interface GPUImageTwoPassFilter : GPUImageFilter\n{\n    GPUImageFramebuffer *secondOutputFramebuffer;\n\n    GLProgram *secondFilterProgram;\n    GLint secondFilterPositionAttribute, secondFilterTextureCoordinateAttribute;\n    GLint secondFilterInputTextureUniform, secondFilterInputTextureUniform2;\n    \n    NSMutableDictionary *secondProgramUniformStateRestorationBlocks;\n}\n\n// Initialization and teardown\n- (id)initWithFirstStageVertexShaderFromString:(NSString *)firstStageVertexShaderString firstStageFragmentShaderFromString:(NSString *)firstStageFragmentShaderString secondStageVertexShaderFromString:(NSString *)secondStageVertexShaderString secondStageFragmentShaderFromString:(NSString *)secondStageFragmentShaderString;\n- (id)initWithFirstStageFragmentShaderFromString:(NSString *)firstStageFragmentShaderString secondStageFragmentShaderFromString:(NSString *)secondStageFragmentShaderString;\n- (void)initializeSecondaryAttributes;\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/BaseClass/GPUImageTwoPassFilter.m",
    "content": "#import \"GPUImageTwoPassFilter.h\"\n\n@implementation GPUImageTwoPassFilter\n\n#pragma mark -\n#pragma mark Initialization and teardown\n\n- (id)initWithFirstStageVertexShaderFromString:(NSString *)firstStageVertexShaderString firstStageFragmentShaderFromString:(NSString *)firstStageFragmentShaderString secondStageVertexShaderFromString:(NSString *)secondStageVertexShaderString secondStageFragmentShaderFromString:(NSString *)secondStageFragmentShaderString;\n{\n    if (!(self = [super initWithVertexShaderFromString:firstStageVertexShaderString fragmentShaderFromString:firstStageFragmentShaderString]))\n    {\n\t\treturn nil;\n    }\n    \n    secondProgramUniformStateRestorationBlocks = [NSMutableDictionary dictionaryWithCapacity:10];\n\n    runSynchronouslyOnVideoProcessingQueue(^{\n        [GPUImageContext useImageProcessingContext];\n\n        secondFilterProgram = [[GPUImageContext sharedImageProcessingContext] programForVertexShaderString:secondStageVertexShaderString fragmentShaderString:secondStageFragmentShaderString];\n        \n        if (!secondFilterProgram.initialized)\n        {\n            [self initializeSecondaryAttributes];\n            \n            if (![secondFilterProgram link])\n            {\n                NSString *progLog = [secondFilterProgram programLog];\n                NSLog(@\"Program link log: %@\", progLog);\n                NSString *fragLog = [secondFilterProgram fragmentShaderLog];\n                NSLog(@\"Fragment shader compile log: %@\", fragLog);\n                NSString *vertLog = [secondFilterProgram vertexShaderLog];\n                NSLog(@\"Vertex shader compile log: %@\", vertLog);\n                secondFilterProgram = nil;\n                NSAssert(NO, @\"Filter shader link failed\");\n            }\n        }\n        \n        secondFilterPositionAttribute = [secondFilterProgram attributeIndex:@\"position\"];\n        secondFilterTextureCoordinateAttribute = [secondFilterProgram attributeIndex:@\"inputTextureCoordinate\"];\n        secondFilterInputTextureUniform = [secondFilterProgram uniformIndex:@\"inputImageTexture\"]; // This does assume a name of \"inputImageTexture\" for the fragment shader\n        secondFilterInputTextureUniform2 = [secondFilterProgram uniformIndex:@\"inputImageTexture2\"]; // This does assume a name of \"inputImageTexture2\" for second input texture in the fragment shader\n        \n        [GPUImageContext setActiveShaderProgram:secondFilterProgram];\n        \n        glEnableVertexAttribArray(secondFilterPositionAttribute);\n        glEnableVertexAttribArray(secondFilterTextureCoordinateAttribute);\n    });\n\n    return self;\n}\n\n- (id)initWithFirstStageFragmentShaderFromString:(NSString *)firstStageFragmentShaderString secondStageFragmentShaderFromString:(NSString *)secondStageFragmentShaderString;\n{\n    if (!(self = [self initWithFirstStageVertexShaderFromString:kGPUImageVertexShaderString firstStageFragmentShaderFromString:firstStageFragmentShaderString secondStageVertexShaderFromString:kGPUImageVertexShaderString secondStageFragmentShaderFromString:secondStageFragmentShaderString]))\n    {\n\t\treturn nil;\n    }\n    \n    return self;\n}\n\n- (void)initializeSecondaryAttributes;\n{\n    [secondFilterProgram addAttribute:@\"position\"];\n\t[secondFilterProgram addAttribute:@\"inputTextureCoordinate\"];\n}\n\n#pragma mark -\n#pragma mark Managing targets\n\n- (GPUImageFramebuffer *)framebufferForOutput;\n{\n    return secondOutputFramebuffer;\n}\n\n- (void)removeOutputFramebuffer;\n{\n    secondOutputFramebuffer = nil;\n}\n\n#pragma mark -\n#pragma mark Rendering\n\n- (void)renderToTextureWithVertices:(const GLfloat *)vertices textureCoordinates:(const GLfloat *)textureCoordinates;\n{\n    if (self.preventRendering)\n    {\n        [firstInputFramebuffer unlock];\n        return;\n    }\n    \n    [GPUImageContext setActiveShaderProgram:filterProgram];\n    \n    outputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:[self sizeOfFBO] textureOptions:self.outputTextureOptions onlyTexture:NO];\n    [outputFramebuffer activateFramebuffer];\n    \n    [self setUniformsForProgramAtIndex:0];\n    \n    glClearColor(backgroundColorRed, backgroundColorGreen, backgroundColorBlue, backgroundColorAlpha);\n    glClear(GL_COLOR_BUFFER_BIT);\n    \n\tglActiveTexture(GL_TEXTURE2);\n\tglBindTexture(GL_TEXTURE_2D, [firstInputFramebuffer texture]);\n\t\n\tglUniform1i(filterInputTextureUniform, 2);\n    \n    glVertexAttribPointer(filterPositionAttribute, 2, GL_FLOAT, 0, 0, vertices);\n\tglVertexAttribPointer(filterTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, textureCoordinates);\n    \n    glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);\n    \n    [firstInputFramebuffer unlock];\n    firstInputFramebuffer = nil;\n    \n    // This assumes that any two-pass filter that says it desires monochrome input is using the first pass for a luminance conversion, which can be dropped\n//    if (!currentlyReceivingMonochromeInput)\n//    {\n        // Run the first stage of the two-pass filter\n//        [super renderToTextureWithVertices:vertices textureCoordinates:textureCoordinates];\n//    }\n\n    // Run the second stage of the two-pass filter\n    secondOutputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:[self sizeOfFBO] textureOptions:self.outputTextureOptions onlyTexture:NO];\n    [secondOutputFramebuffer activateFramebuffer];\n    [GPUImageContext setActiveShaderProgram:secondFilterProgram];\n    if (usingNextFrameForImageCapture)\n    {\n        [secondOutputFramebuffer lock];\n    }\n\n    [self setUniformsForProgramAtIndex:1];\n    \n    glActiveTexture(GL_TEXTURE3);\n    glBindTexture(GL_TEXTURE_2D, [outputFramebuffer texture]);\n    glVertexAttribPointer(secondFilterTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, [[self class] textureCoordinatesForRotation:kGPUImageNoRotation]);\n\n    // TODO: Re-enable this monochrome optimization\n//    if (!currentlyReceivingMonochromeInput)\n//    {\n//        glActiveTexture(GL_TEXTURE3);\n//        glBindTexture(GL_TEXTURE_2D, outputTexture);\n//        glVertexAttribPointer(secondFilterTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, [[self class] textureCoordinatesForRotation:kGPUImageNoRotation]);\n//    }\n//    else\n//    {\n//        glActiveTexture(GL_TEXTURE3);\n//        glBindTexture(GL_TEXTURE_2D, sourceTexture);\n//        glVertexAttribPointer(secondFilterTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, textureCoordinates);\n//    }\n    \n\tglUniform1i(secondFilterInputTextureUniform, 3);\n    \n    glVertexAttribPointer(secondFilterPositionAttribute, 2, GL_FLOAT, 0, 0, vertices);\n\n    glClearColor(0.0f, 0.0f, 0.0f, 1.0f);\n    glClear(GL_COLOR_BUFFER_BIT);\n\n    glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);\n    [outputFramebuffer unlock];\n    outputFramebuffer = nil;\n    \n    if (usingNextFrameForImageCapture)\n    {\n        dispatch_semaphore_signal(imageCaptureSemaphore);\n    }\n}\n\n- (void)setAndExecuteUniformStateCallbackAtIndex:(GLint)uniform forProgram:(GLProgram *)shaderProgram toBlock:(dispatch_block_t)uniformStateBlock;\n{\n// TODO: Deal with the fact that two-pass filters may have the same shader program identifier\n    if (shaderProgram == filterProgram)\n    {\n        [uniformStateRestorationBlocks setObject:[uniformStateBlock copy] forKey:[NSNumber numberWithInt:uniform]];\n    }\n    else\n    {\n        [secondProgramUniformStateRestorationBlocks setObject:[uniformStateBlock copy] forKey:[NSNumber numberWithInt:uniform]];\n    }\n    uniformStateBlock();\n}\n\n- (void)setUniformsForProgramAtIndex:(NSUInteger)programIndex;\n{\n    if (programIndex == 0)\n    {\n        [uniformStateRestorationBlocks enumerateKeysAndObjectsUsingBlock:^(id key, id obj, BOOL *stop){\n            dispatch_block_t currentBlock = obj;\n            currentBlock();\n        }];\n    }\n    else\n    {\n        [secondProgramUniformStateRestorationBlocks enumerateKeysAndObjectsUsingBlock:^(id key, id obj, BOOL *stop){\n            dispatch_block_t currentBlock = obj;\n            currentBlock();\n        }];\n    }\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/BaseClass/GPUImageTwoPassTextureSamplingFilter.h",
    "content": "#import \"GPUImageTwoPassFilter.h\"\n\n@interface GPUImageTwoPassTextureSamplingFilter : GPUImageTwoPassFilter\n{\n    GLint verticalPassTexelWidthOffsetUniform, verticalPassTexelHeightOffsetUniform, horizontalPassTexelWidthOffsetUniform, horizontalPassTexelHeightOffsetUniform;\n    GLfloat verticalPassTexelWidthOffset, verticalPassTexelHeightOffset, horizontalPassTexelWidthOffset, horizontalPassTexelHeightOffset;\n    CGFloat _verticalTexelSpacing, _horizontalTexelSpacing;\n}\n\n// This sets the spacing between texels (in pixels) when sampling for the first. By default, this is 1.0\n@property(readwrite, nonatomic) CGFloat verticalTexelSpacing, horizontalTexelSpacing;\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/BaseClass/GPUImageTwoPassTextureSamplingFilter.m",
    "content": "#import \"GPUImageTwoPassTextureSamplingFilter.h\"\n\n@implementation GPUImageTwoPassTextureSamplingFilter\n\n@synthesize verticalTexelSpacing = _verticalTexelSpacing;\n@synthesize horizontalTexelSpacing = _horizontalTexelSpacing;\n\n#pragma mark -\n#pragma mark Initialization and teardown\n\n- (id)initWithFirstStageVertexShaderFromString:(NSString *)firstStageVertexShaderString firstStageFragmentShaderFromString:(NSString *)firstStageFragmentShaderString secondStageVertexShaderFromString:(NSString *)secondStageVertexShaderString secondStageFragmentShaderFromString:(NSString *)secondStageFragmentShaderString\n{\n    if (!(self = [super initWithFirstStageVertexShaderFromString:firstStageVertexShaderString firstStageFragmentShaderFromString:firstStageFragmentShaderString secondStageVertexShaderFromString:secondStageVertexShaderString secondStageFragmentShaderFromString:secondStageFragmentShaderString]))\n    {\n\t\treturn nil;\n    }\n    \n    runSynchronouslyOnVideoProcessingQueue(^{\n        [GPUImageContext useImageProcessingContext];\n\n        verticalPassTexelWidthOffsetUniform = [filterProgram uniformIndex:@\"texelWidthOffset\"];\n        verticalPassTexelHeightOffsetUniform = [filterProgram uniformIndex:@\"texelHeightOffset\"];\n        \n        horizontalPassTexelWidthOffsetUniform = [secondFilterProgram uniformIndex:@\"texelWidthOffset\"];\n        horizontalPassTexelHeightOffsetUniform = [secondFilterProgram uniformIndex:@\"texelHeightOffset\"];\n    });\n    \n    self.verticalTexelSpacing = 1.0;\n    self.horizontalTexelSpacing = 1.0;\n    \n    return self;\n}\n\n- (void)setUniformsForProgramAtIndex:(NSUInteger)programIndex;\n{\n    [super setUniformsForProgramAtIndex:programIndex];\n    \n    if (programIndex == 0)\n    {\n        glUniform1f(verticalPassTexelWidthOffsetUniform, verticalPassTexelWidthOffset);\n        glUniform1f(verticalPassTexelHeightOffsetUniform, verticalPassTexelHeightOffset);\n    }\n    else\n    {\n        glUniform1f(horizontalPassTexelWidthOffsetUniform, horizontalPassTexelWidthOffset);\n        glUniform1f(horizontalPassTexelHeightOffsetUniform, horizontalPassTexelHeightOffset);\n    }\n}\n\n- (void)setupFilterForSize:(CGSize)filterFrameSize;\n{\n    runSynchronouslyOnVideoProcessingQueue(^{\n        // The first pass through the framebuffer may rotate the inbound image, so need to account for that by changing up the kernel ordering for that pass\n        if (GPUImageRotationSwapsWidthAndHeight(inputRotation))\n        {\n            verticalPassTexelWidthOffset = _verticalTexelSpacing / filterFrameSize.height;\n            verticalPassTexelHeightOffset = 0.0;\n        }\n        else\n        {\n            verticalPassTexelWidthOffset = 0.0;\n            verticalPassTexelHeightOffset = _verticalTexelSpacing / filterFrameSize.height;\n        }\n        \n        horizontalPassTexelWidthOffset = _horizontalTexelSpacing / filterFrameSize.width;\n        horizontalPassTexelHeightOffset = 0.0;\n    });\n}\n\n#pragma mark -\n#pragma mark Accessors\n\n- (void)setVerticalTexelSpacing:(CGFloat)newValue;\n{\n    _verticalTexelSpacing = newValue;\n    [self setupFilterForSize:[self sizeOfFBO]];\n}\n\n- (void)setHorizontalTexelSpacing:(CGFloat)newValue;\n{\n    _horizontalTexelSpacing = newValue;\n    [self setupFilterForSize:[self sizeOfFBO]];\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/BaseClass/GPUImageVideoCamera.h",
    "content": "#import <Foundation/Foundation.h>\n#import <AVFoundation/AVFoundation.h>\n#import <CoreMedia/CoreMedia.h>\n#import \"GPUImageContext.h\"\n#import \"GPUImageOutput.h\"\n#import \"GPUImageColorConversion.h\"\n\n//Optionally override the YUV to RGB matrices\nvoid setColorConversion601( GLfloat conversionMatrix[9] );\nvoid setColorConversion601FullRange( GLfloat conversionMatrix[9] );\nvoid setColorConversion709( GLfloat conversionMatrix[9] );\n\n\n//Delegate Protocal for Face Detection.\n@protocol GPUImageVideoCameraDelegate <NSObject>\n\n@optional\n- (void)willOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer;\n@end\n\n\n/**\n A GPUImageOutput that provides frames from either camera\n*/\n@interface GPUImageVideoCamera : GPUImageOutput <AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAudioDataOutputSampleBufferDelegate>\n{\n    NSUInteger numberOfFramesCaptured;\n    CGFloat totalFrameTimeDuringCapture;\n    \n    AVCaptureSession *_captureSession;\n    AVCaptureDevice *_inputCamera;\n    AVCaptureDevice *_microphone;\n    AVCaptureDeviceInput *videoInput;\n\tAVCaptureVideoDataOutput *videoOutput;\n\n    BOOL capturePaused;\n    GPUImageRotationMode outputRotation, internalRotation;\n    dispatch_semaphore_t frameRenderingSemaphore;\n        \n    BOOL captureAsYUV;\n    GLuint luminanceTexture, chrominanceTexture;\n\n    __unsafe_unretained id<GPUImageVideoCameraDelegate> _delegate;\n}\n\n/// Whether or not the underlying AVCaptureSession is running\n@property(readonly, nonatomic) BOOL isRunning;\n\n/// The AVCaptureSession used to capture from the camera\n@property(readonly, retain, nonatomic) AVCaptureSession *captureSession;\n\n/// This enables the capture session preset to be changed on the fly\n@property (readwrite, nonatomic, copy) NSString *captureSessionPreset;\n\n/// This sets the frame rate of the camera (iOS 5 and above only)\n/**\n Setting this to 0 or below will set the frame rate back to the default setting for a particular preset.\n */\n@property (readwrite) int32_t frameRate;\n\n/// Easy way to tell which cameras are present on device\n@property (readonly, getter = isFrontFacingCameraPresent) BOOL frontFacingCameraPresent;\n@property (readonly, getter = isBackFacingCameraPresent) BOOL backFacingCameraPresent;\n\n/// This enables the benchmarking mode, which logs out instantaneous and average frame times to the console\n@property(readwrite, nonatomic) BOOL runBenchmark;\n\n/// Use this property to manage camera settings. Focus point, exposure point, etc.\n@property(readonly) AVCaptureDevice *inputCamera;\n\n/// This determines the rotation applied to the output image, based on the source material\n@property(readwrite, nonatomic) UIInterfaceOrientation outputImageOrientation;\n\n/// These properties determine whether or not the two camera orientations should be mirrored. By default, both are NO.\n@property(readwrite, nonatomic) BOOL horizontallyMirrorFrontFacingCamera, horizontallyMirrorRearFacingCamera;\n\n@property(nonatomic, assign) id<GPUImageVideoCameraDelegate> delegate;\n\n/// @name Initialization and teardown\n\n/** Begin a capture session\n \n See AVCaptureSession for acceptable values\n \n @param sessionPreset Session preset to use\n @param cameraPosition Camera to capture from\n */\n- (id)initWithSessionPreset:(NSString *)sessionPreset cameraPosition:(AVCaptureDevicePosition)cameraPosition;\n\n/** Add audio capture to the session. Adding inputs and outputs freezes the capture session momentarily, so you\n    can use this method to add the audio inputs and outputs early, if you're going to set the audioEncodingTarget \n    later. Returns YES is the audio inputs and outputs were added, or NO if they had already been added.\n */\n- (BOOL)addAudioInputsAndOutputs;\n\n/** Remove the audio capture inputs and outputs from this session. Returns YES if the audio inputs and outputs\n    were removed, or NO is they hadn't already been added.\n */\n- (BOOL)removeAudioInputsAndOutputs;\n\n/** Tear down the capture session\n */\n- (void)removeInputsAndOutputs;\n\n/// @name Manage the camera video stream\n\n/** Start camera capturing\n */\n- (void)startCameraCapture;\n\n/** Stop camera capturing\n */\n- (void)stopCameraCapture;\n\n/** Pause camera capturing\n */\n- (void)pauseCameraCapture;\n\n/** Resume camera capturing\n */\n- (void)resumeCameraCapture;\n\n/** Process a video sample\n @param sampleBuffer Buffer to process\n */\n- (void)processVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer;\n\n/** Process an audio sample\n @param sampleBuffer Buffer to process\n */\n- (void)processAudioSampleBuffer:(CMSampleBufferRef)sampleBuffer;\n\n/** Get the position (front, rear) of the source camera\n */\n- (AVCaptureDevicePosition)cameraPosition;\n\n/** Get the AVCaptureConnection of the source camera\n */\n- (AVCaptureConnection *)videoCaptureConnection;\n\n/** This flips between the front and rear cameras\n */\n- (void)rotateCamera;\n\n/// @name Benchmarking\n\n/** When benchmarking is enabled, this will keep a running average of the time from uploading, processing, and final recording or display\n */\n- (CGFloat)averageFrameDurationDuringCapture;\n\n- (void)resetBenchmarkAverage;\n\n+ (BOOL)isBackFacingCameraPresent;\n+ (BOOL)isFrontFacingCameraPresent;\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/BaseClass/GPUImageVideoCamera.m",
    "content": "#import \"GPUImageVideoCamera.h\"\n#import \"GPUImageMovieWriter.h\"\n#import \"GPUImageFilter.h\"\n\nvoid setColorConversion601( GLfloat conversionMatrix[9] )\n{\n    kColorConversion601 = conversionMatrix;\n}\n\nvoid setColorConversion601FullRange( GLfloat conversionMatrix[9] )\n{\n    kColorConversion601FullRange = conversionMatrix;\n}\n\nvoid setColorConversion709( GLfloat conversionMatrix[9] )\n{\n    kColorConversion709 = conversionMatrix;\n}\n\n#pragma mark -\n#pragma mark Private methods and instance variables\n\n@interface GPUImageVideoCamera () \n{\n\tAVCaptureDeviceInput *audioInput;\n\tAVCaptureAudioDataOutput *audioOutput;\n    NSDate *startingCaptureTime;\n\t\n    dispatch_queue_t cameraProcessingQueue, audioProcessingQueue;\n    \n    GLProgram *yuvConversionProgram;\n    GLint yuvConversionPositionAttribute, yuvConversionTextureCoordinateAttribute;\n    GLint yuvConversionLuminanceTextureUniform, yuvConversionChrominanceTextureUniform;\n    GLint yuvConversionMatrixUniform;\n    const GLfloat *_preferredConversion;\n    \n    BOOL isFullYUVRange;\n    \n    int imageBufferWidth, imageBufferHeight;\n    \n    BOOL addedAudioInputsDueToEncodingTarget;\n}\n\n- (void)updateOrientationSendToTargets;\n- (void)convertYUVToRGBOutput;\n\n@end\n\n@implementation GPUImageVideoCamera\n\n@synthesize captureSessionPreset = _captureSessionPreset;\n@synthesize captureSession = _captureSession;\n@synthesize inputCamera = _inputCamera;\n@synthesize runBenchmark = _runBenchmark;\n@synthesize outputImageOrientation = _outputImageOrientation;\n@synthesize delegate = _delegate;\n@synthesize horizontallyMirrorFrontFacingCamera = _horizontallyMirrorFrontFacingCamera, horizontallyMirrorRearFacingCamera = _horizontallyMirrorRearFacingCamera;\n@synthesize frameRate = _frameRate;\n\n#pragma mark -\n#pragma mark Initialization and teardown\n\n- (id)init;\n{\n    if (!(self = [self initWithSessionPreset:AVCaptureSessionPreset640x480 cameraPosition:AVCaptureDevicePositionBack]))\n    {\n\t\treturn nil;\n    }\n    \n    return self;\n}\n\n- (id)initWithSessionPreset:(NSString *)sessionPreset cameraPosition:(AVCaptureDevicePosition)cameraPosition; \n{\n\tif (!(self = [super init]))\n    {\n\t\treturn nil;\n    }\n    \n    cameraProcessingQueue = dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH,0);\n\taudioProcessingQueue = dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_LOW,0);\n\n    frameRenderingSemaphore = dispatch_semaphore_create(1);\n\n\t_frameRate = 0; // This will not set frame rate unless this value gets set to 1 or above\n    _runBenchmark = NO;\n    capturePaused = NO;\n    outputRotation = kGPUImageNoRotation;\n    internalRotation = kGPUImageNoRotation;\n    captureAsYUV = YES;\n    _preferredConversion = kColorConversion709;\n    \n\t// Grab the back-facing or front-facing camera\n    _inputCamera = nil;\n\tNSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];\n\tfor (AVCaptureDevice *device in devices) \n\t{\n\t\tif ([device position] == cameraPosition)\n\t\t{\n\t\t\t_inputCamera = device;\n\t\t}\n\t}\n    \n    if (!_inputCamera) {\n        return nil;\n    }\n    \n\t// Create the capture session\n\t_captureSession = [[AVCaptureSession alloc] init];\n\t\n    [_captureSession beginConfiguration];\n    \n\t// Add the video input\t\n\tNSError *error = nil;\n\tvideoInput = [[AVCaptureDeviceInput alloc] initWithDevice:_inputCamera error:&error];\n\tif ([_captureSession canAddInput:videoInput]) \n\t{\n\t\t[_captureSession addInput:videoInput];\n\t}\n\t\n\t// Add the video frame output\t\n\tvideoOutput = [[AVCaptureVideoDataOutput alloc] init];\n\t[videoOutput setAlwaysDiscardsLateVideoFrames:NO];\n    \n//    if (captureAsYUV && [GPUImageContext deviceSupportsRedTextures])\n    if (captureAsYUV && [GPUImageContext supportsFastTextureUpload])\n    {\n        BOOL supportsFullYUVRange = NO;\n        NSArray *supportedPixelFormats = videoOutput.availableVideoCVPixelFormatTypes;\n        for (NSNumber *currentPixelFormat in supportedPixelFormats)\n        {\n            if ([currentPixelFormat intValue] == kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)\n            {\n                supportsFullYUVRange = YES;\n            }\n        }\n        \n        if (supportsFullYUVRange)\n        {\n            [videoOutput setVideoSettings:[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarFullRange] forKey:(id)kCVPixelBufferPixelFormatTypeKey]];\n            isFullYUVRange = YES;\n        }\n        else\n        {\n            [videoOutput setVideoSettings:[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange] forKey:(id)kCVPixelBufferPixelFormatTypeKey]];\n            isFullYUVRange = NO;\n        }\n    }\n    else\n    {\n        [videoOutput setVideoSettings:[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_32BGRA] forKey:(id)kCVPixelBufferPixelFormatTypeKey]];\n    }\n    \n    runSynchronouslyOnVideoProcessingQueue(^{\n        \n        if (captureAsYUV)\n        {\n            [GPUImageContext useImageProcessingContext];\n            //            if ([GPUImageContext deviceSupportsRedTextures])\n            //            {\n            //                yuvConversionProgram = [[GPUImageContext sharedImageProcessingContext] programForVertexShaderString:kGPUImageVertexShaderString fragmentShaderString:kGPUImageYUVVideoRangeConversionForRGFragmentShaderString];\n            //            }\n            //            else\n            //            {\n            if (isFullYUVRange)\n            {\n                yuvConversionProgram = [[GPUImageContext sharedImageProcessingContext] programForVertexShaderString:kGPUImageVertexShaderString fragmentShaderString:kGPUImageYUVFullRangeConversionForLAFragmentShaderString];\n            }\n            else\n            {\n                yuvConversionProgram = [[GPUImageContext sharedImageProcessingContext] programForVertexShaderString:kGPUImageVertexShaderString fragmentShaderString:kGPUImageYUVVideoRangeConversionForLAFragmentShaderString];\n            }\n\n            //            }\n            \n            if (!yuvConversionProgram.initialized)\n            {\n                [yuvConversionProgram addAttribute:@\"position\"];\n                [yuvConversionProgram addAttribute:@\"inputTextureCoordinate\"];\n                \n                if (![yuvConversionProgram link])\n                {\n                    NSString *progLog = [yuvConversionProgram programLog];\n                    NSLog(@\"Program link log: %@\", progLog);\n                    NSString *fragLog = [yuvConversionProgram fragmentShaderLog];\n                    NSLog(@\"Fragment shader compile log: %@\", fragLog);\n                    NSString *vertLog = [yuvConversionProgram vertexShaderLog];\n                    NSLog(@\"Vertex shader compile log: %@\", vertLog);\n                    yuvConversionProgram = nil;\n                    NSAssert(NO, @\"Filter shader link failed\");\n                }\n            }\n            \n            yuvConversionPositionAttribute = [yuvConversionProgram attributeIndex:@\"position\"];\n            yuvConversionTextureCoordinateAttribute = [yuvConversionProgram attributeIndex:@\"inputTextureCoordinate\"];\n            yuvConversionLuminanceTextureUniform = [yuvConversionProgram uniformIndex:@\"luminanceTexture\"];\n            yuvConversionChrominanceTextureUniform = [yuvConversionProgram uniformIndex:@\"chrominanceTexture\"];\n            yuvConversionMatrixUniform = [yuvConversionProgram uniformIndex:@\"colorConversionMatrix\"];\n            \n            [GPUImageContext setActiveShaderProgram:yuvConversionProgram];\n            \n            glEnableVertexAttribArray(yuvConversionPositionAttribute);\n            glEnableVertexAttribArray(yuvConversionTextureCoordinateAttribute);\n        }\n    });\n    \n    [videoOutput setSampleBufferDelegate:self queue:cameraProcessingQueue];\n\tif ([_captureSession canAddOutput:videoOutput])\n\t{\n\t\t[_captureSession addOutput:videoOutput];\n\t}\n\telse\n\t{\n\t\tNSLog(@\"Couldn't add video output\");\n        return nil;\n\t}\n    \n\t_captureSessionPreset = sessionPreset;\n    [_captureSession setSessionPreset:_captureSessionPreset];\n\n// This will let you get 60 FPS video from the 720p preset on an iPhone 4S, but only that device and that preset\n//    AVCaptureConnection *conn = [videoOutput connectionWithMediaType:AVMediaTypeVideo];\n//    \n//    if (conn.supportsVideoMinFrameDuration)\n//        conn.videoMinFrameDuration = CMTimeMake(1,60);\n//    if (conn.supportsVideoMaxFrameDuration)\n//        conn.videoMaxFrameDuration = CMTimeMake(1,60);\n    \n    [_captureSession commitConfiguration];\n    \n\treturn self;\n}\n\n- (GPUImageFramebuffer *)framebufferForOutput;\n{\n    return outputFramebuffer;\n}\n\n- (void)dealloc \n{\n    [self stopCameraCapture];\n    [videoOutput setSampleBufferDelegate:nil queue:dispatch_get_main_queue()];\n    [audioOutput setSampleBufferDelegate:nil queue:dispatch_get_main_queue()];\n    \n    [self removeInputsAndOutputs];\n    \n// ARC forbids explicit message send of 'release'; since iOS 6 even for dispatch_release() calls: stripping it out in that case is required.\n#if !OS_OBJECT_USE_OBJC\n    if (frameRenderingSemaphore != NULL)\n    {\n        dispatch_release(frameRenderingSemaphore);\n    }\n#endif\n}\n\n- (BOOL)addAudioInputsAndOutputs\n{\n    if (audioOutput)\n        return NO;\n    \n    [_captureSession beginConfiguration];\n    \n    _microphone = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];\n    audioInput = [AVCaptureDeviceInput deviceInputWithDevice:_microphone error:nil];\n    if ([_captureSession canAddInput:audioInput])\n    {\n        [_captureSession addInput:audioInput];\n    }\n    audioOutput = [[AVCaptureAudioDataOutput alloc] init];\n    \n    if ([_captureSession canAddOutput:audioOutput])\n    {\n        [_captureSession addOutput:audioOutput];\n    }\n    else\n    {\n        NSLog(@\"Couldn't add audio output\");\n    }\n    [audioOutput setSampleBufferDelegate:self queue:audioProcessingQueue];\n    \n    [_captureSession commitConfiguration];\n    return YES;\n}\n\n- (BOOL)removeAudioInputsAndOutputs\n{\n    if (!audioOutput)\n        return NO;\n    \n    [_captureSession beginConfiguration];\n    [_captureSession removeInput:audioInput];\n    [_captureSession removeOutput:audioOutput];\n    audioInput = nil;\n    audioOutput = nil;\n    _microphone = nil;\n    [_captureSession commitConfiguration];\n    return YES;\n}\n\n- (void)removeInputsAndOutputs;\n{\n    [_captureSession beginConfiguration];\n    if (videoInput) {\n        [_captureSession removeInput:videoInput];\n        [_captureSession removeOutput:videoOutput];\n        videoInput = nil;\n        videoOutput = nil;\n    }\n    if (_microphone != nil)\n    {\n        [_captureSession removeInput:audioInput];\n        [_captureSession removeOutput:audioOutput];\n        audioInput = nil;\n        audioOutput = nil;\n        _microphone = nil;\n    }\n    [_captureSession commitConfiguration];\n}\n\n#pragma mark -\n#pragma mark Managing targets\n\n- (void)addTarget:(id<GPUImageInput>)newTarget atTextureLocation:(NSInteger)textureLocation;\n{\n    [super addTarget:newTarget atTextureLocation:textureLocation];\n    \n    [newTarget setInputRotation:outputRotation atIndex:textureLocation];\n}\n\n#pragma mark -\n#pragma mark Manage the camera video stream\n\n- (BOOL)isRunning;\n{\n    return [_captureSession isRunning];\n}\n\n- (void)startCameraCapture;\n{\n    if (![_captureSession isRunning])\n\t{\n        startingCaptureTime = [NSDate date];\n\t\t[_captureSession startRunning];\n\t};\n}\n\n- (void)stopCameraCapture;\n{\n    if ([_captureSession isRunning])\n    {\n        [_captureSession stopRunning];\n    }\n}\n\n- (void)pauseCameraCapture;\n{\n    capturePaused = YES;\n}\n\n- (void)resumeCameraCapture;\n{\n    capturePaused = NO;\n}\n\n- (void)rotateCamera\n{\n\tif (self.frontFacingCameraPresent == NO)\n\t\treturn;\n\t\n    NSError *error;\n    AVCaptureDeviceInput *newVideoInput;\n    AVCaptureDevicePosition currentCameraPosition = [[videoInput device] position];\n    \n    if (currentCameraPosition == AVCaptureDevicePositionBack)\n    {\n        currentCameraPosition = AVCaptureDevicePositionFront;\n    }\n    else\n    {\n        currentCameraPosition = AVCaptureDevicePositionBack;\n    }\n    \n    AVCaptureDevice *backFacingCamera = nil;\n    NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];\n\tfor (AVCaptureDevice *device in devices) \n\t{\n\t\tif ([device position] == currentCameraPosition)\n\t\t{\n\t\t\tbackFacingCamera = device;\n\t\t}\n\t}\n    newVideoInput = [[AVCaptureDeviceInput alloc] initWithDevice:backFacingCamera error:&error];\n    \n    if (newVideoInput != nil)\n    {\n        [_captureSession beginConfiguration];\n        \n        [_captureSession removeInput:videoInput];\n        if ([_captureSession canAddInput:newVideoInput])\n        {\n            [_captureSession addInput:newVideoInput];\n            videoInput = newVideoInput;\n        }\n        else\n        {\n            [_captureSession addInput:videoInput];\n        }\n        //captureSession.sessionPreset = oriPreset;\n        [_captureSession commitConfiguration];\n    }\n    \n    _inputCamera = backFacingCamera;\n    [self setOutputImageOrientation:_outputImageOrientation];\n}\n\n- (AVCaptureDevicePosition)cameraPosition \n{\n    return [[videoInput device] position];\n}\n\n+ (BOOL)isBackFacingCameraPresent;\n{\n\tNSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];\n\t\n\tfor (AVCaptureDevice *device in devices)\n\t{\n\t\tif ([device position] == AVCaptureDevicePositionBack)\n\t\t\treturn YES;\n\t}\n\t\n\treturn NO;\n}\n\n- (BOOL)isBackFacingCameraPresent\n{\n    return [GPUImageVideoCamera isBackFacingCameraPresent];\n}\n\n+ (BOOL)isFrontFacingCameraPresent;\n{\n\tNSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];\n\t\n\tfor (AVCaptureDevice *device in devices)\n\t{\n\t\tif ([device position] == AVCaptureDevicePositionFront)\n\t\t\treturn YES;\n\t}\n\t\n\treturn NO;\n}\n\n- (BOOL)isFrontFacingCameraPresent\n{\n    return [GPUImageVideoCamera isFrontFacingCameraPresent];\n}\n\n- (void)setCaptureSessionPreset:(NSString *)captureSessionPreset;\n{\n\t[_captureSession beginConfiguration];\n\t\n\t_captureSessionPreset = captureSessionPreset;\n\t[_captureSession setSessionPreset:_captureSessionPreset];\n\t\n\t[_captureSession commitConfiguration];\n}\n\n- (void)setFrameRate:(int32_t)frameRate;\n{\n\t_frameRate = frameRate;\n\t\n\tif (_frameRate > 0)\n\t{\n\t\tif ([_inputCamera respondsToSelector:@selector(setActiveVideoMinFrameDuration:)] &&\n            [_inputCamera respondsToSelector:@selector(setActiveVideoMaxFrameDuration:)]) {\n            \n            NSError *error;\n            [_inputCamera lockForConfiguration:&error];\n            if (error == nil) {\n#if defined(__IPHONE_7_0)\n                [_inputCamera setActiveVideoMinFrameDuration:CMTimeMake(1, _frameRate)];\n                [_inputCamera setActiveVideoMaxFrameDuration:CMTimeMake(1, _frameRate)];\n#endif\n            }\n            [_inputCamera unlockForConfiguration];\n            \n        } else {\n            \n            for (AVCaptureConnection *connection in videoOutput.connections)\n            {\n#pragma clang diagnostic push\n#pragma clang diagnostic ignored \"-Wdeprecated-declarations\"\n                if ([connection respondsToSelector:@selector(setVideoMinFrameDuration:)])\n                    connection.videoMinFrameDuration = CMTimeMake(1, _frameRate);\n                \n                if ([connection respondsToSelector:@selector(setVideoMaxFrameDuration:)])\n                    connection.videoMaxFrameDuration = CMTimeMake(1, _frameRate);\n#pragma clang diagnostic pop\n            }\n        }\n        \n\t}\n\telse\n\t{\n\t\tif ([_inputCamera respondsToSelector:@selector(setActiveVideoMinFrameDuration:)] &&\n            [_inputCamera respondsToSelector:@selector(setActiveVideoMaxFrameDuration:)]) {\n            \n            NSError *error;\n            [_inputCamera lockForConfiguration:&error];\n            if (error == nil) {\n#if defined(__IPHONE_7_0)\n                [_inputCamera setActiveVideoMinFrameDuration:kCMTimeInvalid];\n                [_inputCamera setActiveVideoMaxFrameDuration:kCMTimeInvalid];\n#endif\n            }\n            [_inputCamera unlockForConfiguration];\n            \n        } else {\n            \n            for (AVCaptureConnection *connection in videoOutput.connections)\n            {\n#pragma clang diagnostic push\n#pragma clang diagnostic ignored \"-Wdeprecated-declarations\"\n                if ([connection respondsToSelector:@selector(setVideoMinFrameDuration:)])\n                    connection.videoMinFrameDuration = kCMTimeInvalid; // This sets videoMinFrameDuration back to default\n                \n                if ([connection respondsToSelector:@selector(setVideoMaxFrameDuration:)])\n                    connection.videoMaxFrameDuration = kCMTimeInvalid; // This sets videoMaxFrameDuration back to default\n#pragma clang diagnostic pop\n            }\n        }\n        \n\t}\n}\n\n- (int32_t)frameRate;\n{\n\treturn _frameRate;\n}\n\n- (AVCaptureConnection *)videoCaptureConnection {\n    for (AVCaptureConnection *connection in [videoOutput connections] ) {\n\t\tfor ( AVCaptureInputPort *port in [connection inputPorts] ) {\n\t\t\tif ( [[port mediaType] isEqual:AVMediaTypeVideo] ) {\n\t\t\t\treturn connection;\n\t\t\t}\n\t\t}\n\t}\n    \n    return nil;\n}\n\n#define INITIALFRAMESTOIGNOREFORBENCHMARK 5\n\n- (void)updateTargetsForVideoCameraUsingCacheTextureAtWidth:(int)bufferWidth height:(int)bufferHeight time:(CMTime)currentTime;\n{\n    // First, update all the framebuffers in the targets\n    for (id<GPUImageInput> currentTarget in targets)\n    {\n        if ([currentTarget enabled])\n        {\n            NSInteger indexOfObject = [targets indexOfObject:currentTarget];\n            NSInteger textureIndexOfTarget = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];\n            \n            if (currentTarget != self.targetToIgnoreForUpdates)\n            {\n                [currentTarget setInputRotation:outputRotation atIndex:textureIndexOfTarget];\n                [currentTarget setInputSize:CGSizeMake(bufferWidth, bufferHeight) atIndex:textureIndexOfTarget];\n                \n                if ([currentTarget wantsMonochromeInput] && captureAsYUV)\n                {\n                    [currentTarget setCurrentlyReceivingMonochromeInput:YES];\n                    // TODO: Replace optimization for monochrome output\n                    [currentTarget setInputFramebuffer:outputFramebuffer atIndex:textureIndexOfTarget];\n                }\n                else\n                {\n                    [currentTarget setCurrentlyReceivingMonochromeInput:NO];\n                    [currentTarget setInputFramebuffer:outputFramebuffer atIndex:textureIndexOfTarget];\n                }\n            }\n            else\n            {\n                [currentTarget setInputRotation:outputRotation atIndex:textureIndexOfTarget];\n                [currentTarget setInputFramebuffer:outputFramebuffer atIndex:textureIndexOfTarget];\n            }\n        }\n    }\n    \n    // Then release our hold on the local framebuffer to send it back to the cache as soon as it's no longer needed\n    [outputFramebuffer unlock];\n    outputFramebuffer = nil;\n    \n    // Finally, trigger rendering as needed\n    for (id<GPUImageInput> currentTarget in targets)\n    {\n        if ([currentTarget enabled])\n        {\n            NSInteger indexOfObject = [targets indexOfObject:currentTarget];\n            NSInteger textureIndexOfTarget = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];\n            \n            if (currentTarget != self.targetToIgnoreForUpdates)\n            {\n                [currentTarget newFrameReadyAtTime:currentTime atIndex:textureIndexOfTarget];\n            }\n        }\n    }\n}\n\n- (void)processVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer;\n{\n    if (capturePaused)\n    {\n        return;\n    }\n    \n    CFAbsoluteTime startTime = CFAbsoluteTimeGetCurrent();\n    CVImageBufferRef cameraFrame = CMSampleBufferGetImageBuffer(sampleBuffer);\n    int bufferWidth = (int) CVPixelBufferGetWidth(cameraFrame);\n    int bufferHeight = (int) CVPixelBufferGetHeight(cameraFrame);\n    \n    CFTypeRef colorAttachments = CVBufferGetAttachment(cameraFrame, kCVImageBufferYCbCrMatrixKey, NULL);\n    if (colorAttachments != NULL)\n    {\n        if(CFStringCompare(colorAttachments, kCVImageBufferYCbCrMatrix_ITU_R_601_4, 0) == kCFCompareEqualTo)\n        {\n            if (isFullYUVRange)\n            {\n                _preferredConversion = kColorConversion601FullRange;\n            }\n            else\n            {\n                _preferredConversion = kColorConversion601;\n            }\n        }\n        else\n        {\n            _preferredConversion = kColorConversion709;\n        }\n    }\n    else\n    {\n        if (isFullYUVRange)\n        {\n            _preferredConversion = kColorConversion601FullRange;\n        }\n        else\n        {\n            _preferredConversion = kColorConversion601;\n        }\n    }\n\n\tCMTime currentTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);\n\n    [GPUImageContext useImageProcessingContext];\n\n    if ([GPUImageContext supportsFastTextureUpload] && captureAsYUV)\n    {\n        CVOpenGLESTextureRef luminanceTextureRef = NULL;\n        CVOpenGLESTextureRef chrominanceTextureRef = NULL;\n\n//        if (captureAsYUV && [GPUImageContext deviceSupportsRedTextures])\n        if (CVPixelBufferGetPlaneCount(cameraFrame) > 0) // Check for YUV planar inputs to do RGB conversion\n        {\n            CVPixelBufferLockBaseAddress(cameraFrame, 0);\n            \n            if ( (imageBufferWidth != bufferWidth) && (imageBufferHeight != bufferHeight) )\n            {\n                imageBufferWidth = bufferWidth;\n                imageBufferHeight = bufferHeight;\n            }\n            \n            CVReturn err;\n            // Y-plane\n            glActiveTexture(GL_TEXTURE4);\n            if ([GPUImageContext deviceSupportsRedTextures])\n            {\n//                err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, coreVideoTextureCache, cameraFrame, NULL, GL_TEXTURE_2D, GL_RED_EXT, bufferWidth, bufferHeight, GL_RED_EXT, GL_UNSIGNED_BYTE, 0, &luminanceTextureRef);\n                err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, [[GPUImageContext sharedImageProcessingContext] coreVideoTextureCache], cameraFrame, NULL, GL_TEXTURE_2D, GL_LUMINANCE, bufferWidth, bufferHeight, GL_LUMINANCE, GL_UNSIGNED_BYTE, 0, &luminanceTextureRef);\n            }\n            else\n            {\n                err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, [[GPUImageContext sharedImageProcessingContext] coreVideoTextureCache], cameraFrame, NULL, GL_TEXTURE_2D, GL_LUMINANCE, bufferWidth, bufferHeight, GL_LUMINANCE, GL_UNSIGNED_BYTE, 0, &luminanceTextureRef);\n            }\n            if (err)\n            {\n                NSLog(@\"Error at CVOpenGLESTextureCacheCreateTextureFromImage %d\", err);\n            }\n            \n            luminanceTexture = CVOpenGLESTextureGetName(luminanceTextureRef);\n            glBindTexture(GL_TEXTURE_2D, luminanceTexture);\n            glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);\n            glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);\n            \n            // UV-plane\n            glActiveTexture(GL_TEXTURE5);\n            if ([GPUImageContext deviceSupportsRedTextures])\n            {\n//                err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, coreVideoTextureCache, cameraFrame, NULL, GL_TEXTURE_2D, GL_RG_EXT, bufferWidth/2, bufferHeight/2, GL_RG_EXT, GL_UNSIGNED_BYTE, 1, &chrominanceTextureRef);\n                err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, [[GPUImageContext sharedImageProcessingContext] coreVideoTextureCache], cameraFrame, NULL, GL_TEXTURE_2D, GL_LUMINANCE_ALPHA, bufferWidth/2, bufferHeight/2, GL_LUMINANCE_ALPHA, GL_UNSIGNED_BYTE, 1, &chrominanceTextureRef);\n            }\n            else\n            {\n                err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, [[GPUImageContext sharedImageProcessingContext] coreVideoTextureCache], cameraFrame, NULL, GL_TEXTURE_2D, GL_LUMINANCE_ALPHA, bufferWidth/2, bufferHeight/2, GL_LUMINANCE_ALPHA, GL_UNSIGNED_BYTE, 1, &chrominanceTextureRef);\n            }\n            if (err)\n            {\n                NSLog(@\"Error at CVOpenGLESTextureCacheCreateTextureFromImage %d\", err);\n            }\n            \n            chrominanceTexture = CVOpenGLESTextureGetName(chrominanceTextureRef);\n            glBindTexture(GL_TEXTURE_2D, chrominanceTexture);\n            glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);\n            glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);\n            \n//            if (!allTargetsWantMonochromeData)\n//            {\n                [self convertYUVToRGBOutput];\n//            }\n\n            int rotatedImageBufferWidth = bufferWidth, rotatedImageBufferHeight = bufferHeight;\n            \n            if (GPUImageRotationSwapsWidthAndHeight(internalRotation))\n            {\n                rotatedImageBufferWidth = bufferHeight;\n                rotatedImageBufferHeight = bufferWidth;\n            }\n            \n            [self updateTargetsForVideoCameraUsingCacheTextureAtWidth:rotatedImageBufferWidth height:rotatedImageBufferHeight time:currentTime];\n            \n            CVPixelBufferUnlockBaseAddress(cameraFrame, 0);\n            CFRelease(luminanceTextureRef);\n            CFRelease(chrominanceTextureRef);\n        }\n        else\n        {\n            // TODO: Mesh this with the output framebuffer structure\n            \n//            CVPixelBufferLockBaseAddress(cameraFrame, 0);\n//            \n//            CVReturn err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, [[GPUImageContext sharedImageProcessingContext] coreVideoTextureCache], cameraFrame, NULL, GL_TEXTURE_2D, GL_RGBA, bufferWidth, bufferHeight, GL_BGRA, GL_UNSIGNED_BYTE, 0, &texture);\n//            \n//            if (!texture || err) {\n//                NSLog(@\"Camera CVOpenGLESTextureCacheCreateTextureFromImage failed (error: %d)\", err);\n//                NSAssert(NO, @\"Camera failure\");\n//                return;\n//            }\n//            \n//            outputTexture = CVOpenGLESTextureGetName(texture);\n//            //        glBindTexture(CVOpenGLESTextureGetTarget(texture), outputTexture);\n//            glBindTexture(GL_TEXTURE_2D, outputTexture);\n//            glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);\n//            glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);\n//            glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);\n//            glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);\n//            \n//            [self updateTargetsForVideoCameraUsingCacheTextureAtWidth:bufferWidth height:bufferHeight time:currentTime];\n//\n//            CVPixelBufferUnlockBaseAddress(cameraFrame, 0);\n//            CFRelease(texture);\n//\n//            outputTexture = 0;\n        }\n        \n        \n        if (_runBenchmark)\n        {\n            numberOfFramesCaptured++;\n            if (numberOfFramesCaptured > INITIALFRAMESTOIGNOREFORBENCHMARK)\n            {\n                CFAbsoluteTime currentFrameTime = (CFAbsoluteTimeGetCurrent() - startTime);\n                totalFrameTimeDuringCapture += currentFrameTime;\n                NSLog(@\"GPUImageVideoCamera-->Average frame time : %f ms\", [self averageFrameDurationDuringCapture]);\n                NSLog(@\"GPUImageVideoCamera-->Current frame time : %f ms\", 1000.0 * currentFrameTime);\n            }\n        }\n    }\n    else\n    {\n        CVPixelBufferLockBaseAddress(cameraFrame, 0);\n        \n        int bytesPerRow = (int) CVPixelBufferGetBytesPerRow(cameraFrame);\n        outputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:CGSizeMake(bytesPerRow / 4, bufferHeight) onlyTexture:YES];\n        [outputFramebuffer activateFramebuffer];\n\n        glBindTexture(GL_TEXTURE_2D, [outputFramebuffer texture]);\n        \n        //        glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, bufferWidth, bufferHeight, 0, GL_BGRA, GL_UNSIGNED_BYTE, CVPixelBufferGetBaseAddress(cameraFrame));\n        \n        // Using BGRA extension to pull in video frame data directly\n        // The use of bytesPerRow / 4 accounts for a display glitch present in preview video frames when using the photo preset on the camera\n        glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, bytesPerRow / 4, bufferHeight, 0, GL_BGRA, GL_UNSIGNED_BYTE, CVPixelBufferGetBaseAddress(cameraFrame));\n        \n        [self updateTargetsForVideoCameraUsingCacheTextureAtWidth:bytesPerRow / 4 height:bufferHeight time:currentTime];\n        \n        CVPixelBufferUnlockBaseAddress(cameraFrame, 0);\n        \n        if (_runBenchmark)\n        {\n            numberOfFramesCaptured++;\n            if (numberOfFramesCaptured > INITIALFRAMESTOIGNOREFORBENCHMARK)\n            {\n                CFAbsoluteTime currentFrameTime = (CFAbsoluteTimeGetCurrent() - startTime);\n                totalFrameTimeDuringCapture += currentFrameTime;\n            }\n        }\n    }  \n}\n\n- (void)processAudioSampleBuffer:(CMSampleBufferRef)sampleBuffer;\n{\n    [self.audioEncodingTarget processAudioBuffer:sampleBuffer]; \n}\n\n- (void)convertYUVToRGBOutput;\n{\n    [GPUImageContext setActiveShaderProgram:yuvConversionProgram];\n\n    int rotatedImageBufferWidth = imageBufferWidth, rotatedImageBufferHeight = imageBufferHeight;\n\n    if (GPUImageRotationSwapsWidthAndHeight(internalRotation))\n    {\n        rotatedImageBufferWidth = imageBufferHeight;\n        rotatedImageBufferHeight = imageBufferWidth;\n    }\n\n    outputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:CGSizeMake(rotatedImageBufferWidth, rotatedImageBufferHeight) textureOptions:self.outputTextureOptions onlyTexture:NO];\n    [outputFramebuffer activateFramebuffer];\n\n    glClearColor(0.0f, 0.0f, 0.0f, 1.0f);\n    glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);\n    \n    static const GLfloat squareVertices[] = {\n        -1.0f, -1.0f,\n        1.0f, -1.0f,\n        -1.0f,  1.0f,\n        1.0f,  1.0f,\n    };\n    \n\tglActiveTexture(GL_TEXTURE4);\n\tglBindTexture(GL_TEXTURE_2D, luminanceTexture);\n\tglUniform1i(yuvConversionLuminanceTextureUniform, 4);\n\n    glActiveTexture(GL_TEXTURE5);\n\tglBindTexture(GL_TEXTURE_2D, chrominanceTexture);\n\tglUniform1i(yuvConversionChrominanceTextureUniform, 5);\n\n    glUniformMatrix3fv(yuvConversionMatrixUniform, 1, GL_FALSE, _preferredConversion);\n\n    glVertexAttribPointer(yuvConversionPositionAttribute, 2, GL_FLOAT, 0, 0, squareVertices);\n\tglVertexAttribPointer(yuvConversionTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, [GPUImageFilter textureCoordinatesForRotation:internalRotation]);\n    \n    glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);\n}\n\n#pragma mark -\n#pragma mark Benchmarking\n\n- (CGFloat)averageFrameDurationDuringCapture;\n{\n    return (totalFrameTimeDuringCapture / (CGFloat)(numberOfFramesCaptured - INITIALFRAMESTOIGNOREFORBENCHMARK)) * 1000.0;\n}\n\n- (void)resetBenchmarkAverage;\n{\n    numberOfFramesCaptured = 0;\n    totalFrameTimeDuringCapture = 0.0;\n}\n\n#pragma mark -\n#pragma mark AVCaptureVideoDataOutputSampleBufferDelegate\n\n- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection\n{\n    if (!self.captureSession.isRunning)\n    {\n        return;\n    }\n    else if (captureOutput == audioOutput)\n    {\n        [self processAudioSampleBuffer:sampleBuffer];\n    }\n    else\n    {\n        if (dispatch_semaphore_wait(frameRenderingSemaphore, DISPATCH_TIME_NOW) != 0)\n        {\n            return;\n        }\n        \n        CFRetain(sampleBuffer);\n        runAsynchronouslyOnVideoProcessingQueue(^{\n            //Feature Detection Hook.\n            if (self.delegate && [self.delegate respondsToSelector:@selector(willOutputSampleBuffer:)])\n            {\n                [self.delegate willOutputSampleBuffer:sampleBuffer];\n            }\n            \n            [self processVideoSampleBuffer:sampleBuffer];\n            \n            CFRelease(sampleBuffer);\n            dispatch_semaphore_signal(frameRenderingSemaphore);\n        });\n    }\n}\n\n#pragma mark -\n#pragma mark Accessors\n\n- (void)setAudioEncodingTarget:(GPUImageMovieWriter *)newValue;\n{\n    if (newValue) {\n        /* Add audio inputs and outputs, if necessary */\n        addedAudioInputsDueToEncodingTarget |= [self addAudioInputsAndOutputs];\n    } else if (addedAudioInputsDueToEncodingTarget) {\n        /* Remove audio inputs and outputs, if they were added by previously setting the audio encoding target */\n        [self removeAudioInputsAndOutputs];\n        addedAudioInputsDueToEncodingTarget = NO;\n    }\n    \n    [super setAudioEncodingTarget:newValue];\n}\n\n- (void)updateOrientationSendToTargets;\n{\n    runSynchronouslyOnVideoProcessingQueue(^{\n        \n        //    From the iOS 5.0 release notes:\n        //    In previous iOS versions, the front-facing camera would always deliver buffers in AVCaptureVideoOrientationLandscapeLeft and the back-facing camera would always deliver buffers in AVCaptureVideoOrientationLandscapeRight.\n        \n        if (captureAsYUV && [GPUImageContext supportsFastTextureUpload])\n        {\n            outputRotation = kGPUImageNoRotation;\n            if ([self cameraPosition] == AVCaptureDevicePositionBack)\n            {\n                if (_horizontallyMirrorRearFacingCamera)\n                {\n                    switch(_outputImageOrientation)\n                    {\n                        case UIInterfaceOrientationPortrait:internalRotation = kGPUImageRotateRightFlipVertical; break;\n                        case UIInterfaceOrientationPortraitUpsideDown:internalRotation = kGPUImageRotate180; break;\n                        case UIInterfaceOrientationLandscapeLeft:internalRotation = kGPUImageFlipHorizonal; break;\n                        case UIInterfaceOrientationLandscapeRight:internalRotation = kGPUImageFlipVertical; break;\n                        default:internalRotation = kGPUImageNoRotation;\n                    }\n                }\n                else\n                {\n                    switch(_outputImageOrientation)\n                    {\n                        case UIInterfaceOrientationPortrait:internalRotation = kGPUImageRotateRight; break;\n                        case UIInterfaceOrientationPortraitUpsideDown:internalRotation = kGPUImageRotateLeft; break;\n                        case UIInterfaceOrientationLandscapeLeft:internalRotation = kGPUImageRotate180; break;\n                        case UIInterfaceOrientationLandscapeRight:internalRotation = kGPUImageNoRotation; break;\n                        default:internalRotation = kGPUImageNoRotation;\n                    }\n                }\n            }\n            else\n            {\n                if (_horizontallyMirrorFrontFacingCamera)\n                {\n                    switch(_outputImageOrientation)\n                    {\n                        case UIInterfaceOrientationPortrait:internalRotation = kGPUImageRotateRightFlipVertical; break;\n                        case UIInterfaceOrientationPortraitUpsideDown:internalRotation = kGPUImageRotateRightFlipHorizontal; break;\n                        case UIInterfaceOrientationLandscapeLeft:internalRotation = kGPUImageFlipHorizonal; break;\n                        case UIInterfaceOrientationLandscapeRight:internalRotation = kGPUImageFlipVertical; break;\n                        default:internalRotation = kGPUImageNoRotation;\n                   }\n                }\n                else\n                {\n                    switch(_outputImageOrientation)\n                    {\n                        case UIInterfaceOrientationPortrait:internalRotation = kGPUImageRotateRight; break;\n                        case UIInterfaceOrientationPortraitUpsideDown:internalRotation = kGPUImageRotateLeft; break;\n                        case UIInterfaceOrientationLandscapeLeft:internalRotation = kGPUImageNoRotation; break;\n                        case UIInterfaceOrientationLandscapeRight:internalRotation = kGPUImageRotate180; break;\n                        default:internalRotation = kGPUImageNoRotation;\n                    }\n                }\n            }\n        }\n        else\n        {\n            if ([self cameraPosition] == AVCaptureDevicePositionBack)\n            {\n                if (_horizontallyMirrorRearFacingCamera)\n                {\n                    switch(_outputImageOrientation)\n                    {\n                        case UIInterfaceOrientationPortrait:outputRotation = kGPUImageRotateRightFlipVertical; break;\n                        case UIInterfaceOrientationPortraitUpsideDown:outputRotation = kGPUImageRotate180; break;\n                        case UIInterfaceOrientationLandscapeLeft:outputRotation = kGPUImageFlipHorizonal; break;\n                        case UIInterfaceOrientationLandscapeRight:outputRotation = kGPUImageFlipVertical; break;\n                        default:outputRotation = kGPUImageNoRotation;\n                    }\n                }\n                else\n                {\n                    switch(_outputImageOrientation)\n                    {\n                        case UIInterfaceOrientationPortrait:outputRotation = kGPUImageRotateRight; break;\n                        case UIInterfaceOrientationPortraitUpsideDown:outputRotation = kGPUImageRotateLeft; break;\n                        case UIInterfaceOrientationLandscapeLeft:outputRotation = kGPUImageRotate180; break;\n                        case UIInterfaceOrientationLandscapeRight:outputRotation = kGPUImageNoRotation; break;\n                        default:outputRotation = kGPUImageNoRotation;\n                    }\n                }\n            }\n            else\n            {\n                if (_horizontallyMirrorFrontFacingCamera)\n                {\n                    switch(_outputImageOrientation)\n                    {\n                        case UIInterfaceOrientationPortrait:outputRotation = kGPUImageRotateRightFlipVertical; break;\n                        case UIInterfaceOrientationPortraitUpsideDown:outputRotation = kGPUImageRotateRightFlipHorizontal; break;\n                        case UIInterfaceOrientationLandscapeLeft:outputRotation = kGPUImageFlipHorizonal; break;\n                        case UIInterfaceOrientationLandscapeRight:outputRotation = kGPUImageFlipVertical; break;\n                        default:outputRotation = kGPUImageNoRotation;\n                    }\n                }\n                else\n                {\n                    switch(_outputImageOrientation)\n                    {\n                        case UIInterfaceOrientationPortrait:outputRotation = kGPUImageRotateRight; break;\n                        case UIInterfaceOrientationPortraitUpsideDown:outputRotation = kGPUImageRotateLeft; break;\n                        case UIInterfaceOrientationLandscapeLeft:outputRotation = kGPUImageNoRotation; break;\n                        case UIInterfaceOrientationLandscapeRight:outputRotation = kGPUImageRotate180; break;\n                        default:outputRotation = kGPUImageNoRotation;\n                    }\n                }\n            }\n        }\n        \n        for (id<GPUImageInput> currentTarget in targets)\n        {\n            NSInteger indexOfObject = [targets indexOfObject:currentTarget];\n            [currentTarget setInputRotation:outputRotation atIndex:[[targetTextureIndices objectAtIndex:indexOfObject] integerValue]];\n        }\n    });\n}\n\n- (void)setOutputImageOrientation:(UIInterfaceOrientation)newValue;\n{\n    _outputImageOrientation = newValue;\n    [self updateOrientationSendToTargets];\n}\n\n- (void)setHorizontallyMirrorFrontFacingCamera:(BOOL)newValue\n{\n    _horizontallyMirrorFrontFacingCamera = newValue;\n    [self updateOrientationSendToTargets];\n}\n\n- (void)setHorizontallyMirrorRearFacingCamera:(BOOL)newValue\n{\n    _horizontallyMirrorRearFacingCamera = newValue;\n    [self updateOrientationSendToTargets];\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImage3x3ConvolutionFilter.h",
    "content": "#import \"GPUImage3x3TextureSamplingFilter.h\"\n\n/** Runs a 3x3 convolution kernel against the image\n */\n@interface GPUImage3x3ConvolutionFilter : GPUImage3x3TextureSamplingFilter\n{\n    GLint convolutionMatrixUniform;\n}\n\n/** Convolution kernel to run against the image\n \n The convolution kernel is a 3x3 matrix of values to apply to the pixel and its 8 surrounding pixels.\n The matrix is specified in row-major order, with the top left pixel being one.one and the bottom right three.three\n If the values in the matrix don't add up to 1.0, the image could be brightened or darkened.\n */\n@property(readwrite, nonatomic) GPUMatrix3x3 convolutionKernel;\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImage3x3ConvolutionFilter.m",
    "content": "#import \"GPUImage3x3ConvolutionFilter.h\"\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImage3x3ConvolutionFragmentShaderString = SHADER_STRING\n(\n precision highp float;\n\n uniform sampler2D inputImageTexture;\n \n uniform mediump mat3 convolutionMatrix;\n \n varying vec2 textureCoordinate;\n varying vec2 leftTextureCoordinate;\n varying vec2 rightTextureCoordinate;\n \n varying vec2 topTextureCoordinate;\n varying vec2 topLeftTextureCoordinate;\n varying vec2 topRightTextureCoordinate;\n \n varying vec2 bottomTextureCoordinate;\n varying vec2 bottomLeftTextureCoordinate;\n varying vec2 bottomRightTextureCoordinate;\n \n void main()\n {\n     mediump vec3 bottomColor = texture2D(inputImageTexture, bottomTextureCoordinate).rgb;\n     mediump vec3 bottomLeftColor = texture2D(inputImageTexture, bottomLeftTextureCoordinate).rgb;\n     mediump vec3 bottomRightColor = texture2D(inputImageTexture, bottomRightTextureCoordinate).rgb;\n     mediump vec4 centerColor = texture2D(inputImageTexture, textureCoordinate);\n     mediump vec3 leftColor = texture2D(inputImageTexture, leftTextureCoordinate).rgb;\n     mediump vec3 rightColor = texture2D(inputImageTexture, rightTextureCoordinate).rgb;\n     mediump vec3 topColor = texture2D(inputImageTexture, topTextureCoordinate).rgb;\n     mediump vec3 topRightColor = texture2D(inputImageTexture, topRightTextureCoordinate).rgb;\n     mediump vec3 topLeftColor = texture2D(inputImageTexture, topLeftTextureCoordinate).rgb;\n\n     mediump vec3 resultColor = topLeftColor * convolutionMatrix[0][0] + topColor * convolutionMatrix[0][1] + topRightColor * convolutionMatrix[0][2];\n     resultColor += leftColor * convolutionMatrix[1][0] + centerColor.rgb * convolutionMatrix[1][1] + rightColor * convolutionMatrix[1][2];\n     resultColor += bottomLeftColor * convolutionMatrix[2][0] + bottomColor * convolutionMatrix[2][1] + bottomRightColor * convolutionMatrix[2][2];\n\n     gl_FragColor = vec4(resultColor, centerColor.a);\n }\n);                                                                         \n#else\nNSString *const kGPUImage3x3ConvolutionFragmentShaderString = SHADER_STRING\n(\n uniform sampler2D inputImageTexture;\n \n uniform mat3 convolutionMatrix;\n \n varying vec2 textureCoordinate;\n varying vec2 leftTextureCoordinate;\n varying vec2 rightTextureCoordinate;\n \n varying vec2 topTextureCoordinate;\n varying vec2 topLeftTextureCoordinate;\n varying vec2 topRightTextureCoordinate;\n \n varying vec2 bottomTextureCoordinate;\n varying vec2 bottomLeftTextureCoordinate;\n varying vec2 bottomRightTextureCoordinate;\n \n void main()\n {\n     vec3 bottomColor = texture2D(inputImageTexture, bottomTextureCoordinate).rgb;\n     vec3 bottomLeftColor = texture2D(inputImageTexture, bottomLeftTextureCoordinate).rgb;\n     vec3 bottomRightColor = texture2D(inputImageTexture, bottomRightTextureCoordinate).rgb;\n     vec4 centerColor = texture2D(inputImageTexture, textureCoordinate);\n     vec3 leftColor = texture2D(inputImageTexture, leftTextureCoordinate).rgb;\n     vec3 rightColor = texture2D(inputImageTexture, rightTextureCoordinate).rgb;\n     vec3 topColor = texture2D(inputImageTexture, topTextureCoordinate).rgb;\n     vec3 topRightColor = texture2D(inputImageTexture, topRightTextureCoordinate).rgb;\n     vec3 topLeftColor = texture2D(inputImageTexture, topLeftTextureCoordinate).rgb;\n     \n     vec3 resultColor = topLeftColor * convolutionMatrix[0][0] + topColor * convolutionMatrix[0][1] + topRightColor * convolutionMatrix[0][2];\n     resultColor += leftColor * convolutionMatrix[1][0] + centerColor.rgb * convolutionMatrix[1][1] + rightColor * convolutionMatrix[1][2];\n     resultColor += bottomLeftColor * convolutionMatrix[2][0] + bottomColor * convolutionMatrix[2][1] + bottomRightColor * convolutionMatrix[2][2];\n     \n     gl_FragColor = vec4(resultColor, centerColor.a);\n }\n);\n#endif\n\n@implementation GPUImage3x3ConvolutionFilter\n\n@synthesize convolutionKernel = _convolutionKernel;\n\n#pragma mark -\n#pragma mark Initialization and teardown\n\n- (id)init;\n{\n    if (!(self = [self initWithFragmentShaderFromString:kGPUImage3x3ConvolutionFragmentShaderString]))\n    {\n\t\treturn nil;\n    }\n\n    self.convolutionKernel = (GPUMatrix3x3){\n        {0.f, 0.f, 0.f},\n        {0.f, 1.f, 0.f},\n        {0.f, 0.f, 0.f}\n    };\n\n    return self;\n}\n\n- (id)initWithFragmentShaderFromString:(NSString *)fragmentShaderString;\n{\n    if (!(self = [super initWithFragmentShaderFromString:fragmentShaderString]))\n    {\n        return nil;\n    }\n    \n    convolutionMatrixUniform = [filterProgram uniformIndex:@\"convolutionMatrix\"];\n    \n    return self;\n}\n\n#pragma mark -\n#pragma mark Accessors\n\n- (void)setConvolutionKernel:(GPUMatrix3x3)newValue;\n{\n    _convolutionKernel = newValue;\n    \n    [self setMatrix3f:_convolutionKernel forUniform:convolutionMatrixUniform program:filterProgram];\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImage3x3TextureSamplingFilter.h",
    "content": "#import \"GPUImageFilter.h\"\n\nextern NSString *const kGPUImageNearbyTexelSamplingVertexShaderString;\n\n@interface GPUImage3x3TextureSamplingFilter : GPUImageFilter\n{\n    GLint texelWidthUniform, texelHeightUniform;\n    \n    CGFloat texelWidth, texelHeight;\n    BOOL hasOverriddenImageSizeFactor;\n}\n\n// The texel width and height determines how far out to sample from this texel. By default, this is the normalized width of a pixel, but this can be overridden for different effects.\n@property(readwrite, nonatomic) CGFloat texelWidth; \n@property(readwrite, nonatomic) CGFloat texelHeight; \n\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImage3x3TextureSamplingFilter.m",
    "content": "#import \"GPUImage3x3TextureSamplingFilter.h\"\n\n// Override vertex shader to remove dependent texture reads \nNSString *const kGPUImageNearbyTexelSamplingVertexShaderString = SHADER_STRING\n(\n attribute vec4 position;\n attribute vec4 inputTextureCoordinate;\n \n uniform float texelWidth;\n uniform float texelHeight; \n \n varying vec2 textureCoordinate;\n varying vec2 leftTextureCoordinate;\n varying vec2 rightTextureCoordinate;\n \n varying vec2 topTextureCoordinate;\n varying vec2 topLeftTextureCoordinate;\n varying vec2 topRightTextureCoordinate;\n \n varying vec2 bottomTextureCoordinate;\n varying vec2 bottomLeftTextureCoordinate;\n varying vec2 bottomRightTextureCoordinate;\n \n void main()\n {\n     gl_Position = position;\n     \n     vec2 widthStep = vec2(texelWidth, 0.0);\n     vec2 heightStep = vec2(0.0, texelHeight);\n     vec2 widthHeightStep = vec2(texelWidth, texelHeight);\n     vec2 widthNegativeHeightStep = vec2(texelWidth, -texelHeight);\n     \n     textureCoordinate = inputTextureCoordinate.xy;\n     leftTextureCoordinate = inputTextureCoordinate.xy - widthStep;\n     rightTextureCoordinate = inputTextureCoordinate.xy + widthStep;\n     \n     topTextureCoordinate = inputTextureCoordinate.xy - heightStep;\n     topLeftTextureCoordinate = inputTextureCoordinate.xy - widthHeightStep;\n     topRightTextureCoordinate = inputTextureCoordinate.xy + widthNegativeHeightStep;\n     \n     bottomTextureCoordinate = inputTextureCoordinate.xy + heightStep;\n     bottomLeftTextureCoordinate = inputTextureCoordinate.xy - widthNegativeHeightStep;\n     bottomRightTextureCoordinate = inputTextureCoordinate.xy + widthHeightStep;\n }\n);\n\n\n@implementation GPUImage3x3TextureSamplingFilter\n\n@synthesize texelWidth = _texelWidth; \n@synthesize texelHeight = _texelHeight; \n\n#pragma mark -\n#pragma mark Initialization and teardown\n\n- (id)initWithVertexShaderFromString:(NSString *)vertexShaderString fragmentShaderFromString:(NSString *)fragmentShaderString;\n{\n    if (!(self = [super initWithVertexShaderFromString:vertexShaderString fragmentShaderFromString:fragmentShaderString]))\n    {\n        return nil;\n    }\n    \n    texelWidthUniform = [filterProgram uniformIndex:@\"texelWidth\"];\n    texelHeightUniform = [filterProgram uniformIndex:@\"texelHeight\"];\n    \n    return self;\n}\n\n- (id)initWithFragmentShaderFromString:(NSString *)fragmentShaderString;\n{\n    if (!(self = [self initWithVertexShaderFromString:kGPUImageNearbyTexelSamplingVertexShaderString fragmentShaderFromString:fragmentShaderString]))\n    {\n        return nil;\n    }\n    \n    return self;\n}\n\n- (void)setupFilterForSize:(CGSize)filterFrameSize;\n{\n    if (!hasOverriddenImageSizeFactor)\n    {\n        _texelWidth = 1.0 / filterFrameSize.width;\n        _texelHeight = 1.0 / filterFrameSize.height;\n        \n        runSynchronouslyOnVideoProcessingQueue(^{\n            [GPUImageContext setActiveShaderProgram:filterProgram];\n            if (GPUImageRotationSwapsWidthAndHeight(inputRotation))\n            {\n                glUniform1f(texelWidthUniform, _texelHeight);\n                glUniform1f(texelHeightUniform, _texelWidth);\n            }\n            else\n            {\n                glUniform1f(texelWidthUniform, _texelWidth);\n                glUniform1f(texelHeightUniform, _texelHeight);\n            }\n        });\n    }\n}\n\n#pragma mark -\n#pragma mark Accessors\n\n- (void)setTexelWidth:(CGFloat)newValue;\n{\n    hasOverriddenImageSizeFactor = YES;\n    _texelWidth = newValue;\n    \n    [self setFloat:_texelWidth forUniform:texelWidthUniform program:filterProgram];\n}\n\n- (void)setTexelHeight:(CGFloat)newValue;\n{\n    hasOverriddenImageSizeFactor = YES;\n    _texelHeight = newValue;\n\n    [self setFloat:_texelHeight forUniform:texelHeightUniform program:filterProgram];\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageAdaptiveThresholdFilter.h",
    "content": "#import \"GPUImageFilterGroup.h\"\n\n@interface GPUImageAdaptiveThresholdFilter : GPUImageFilterGroup\n\n/** A multiplier for the background averaging blur radius in pixels, with a default of 4\n */\n@property(readwrite, nonatomic) CGFloat blurRadiusInPixels;\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageAdaptiveThresholdFilter.m",
    "content": "#import \"GPUImageAdaptiveThresholdFilter.h\"\n#import \"GPUImageFilter.h\"\n#import \"GPUImageTwoInputFilter.h\"\n#import \"GPUImageGrayscaleFilter.h\"\n#import \"GPUImageBoxBlurFilter.h\"\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageAdaptiveThresholdFragmentShaderString = SHADER_STRING\n( \n varying highp vec2 textureCoordinate;\n varying highp vec2 textureCoordinate2;\n \n uniform sampler2D inputImageTexture;\n uniform sampler2D inputImageTexture2; \n \n void main()\n {\n     highp float blurredInput = texture2D(inputImageTexture, textureCoordinate).r;\n     highp float localLuminance = texture2D(inputImageTexture2, textureCoordinate2).r;\n     highp float thresholdResult = step(blurredInput - 0.05, localLuminance);\n     \n     gl_FragColor = vec4(vec3(thresholdResult), 1.0);\n }\n);\n#else\nNSString *const kGPUImageAdaptiveThresholdFragmentShaderString = SHADER_STRING\n(\n varying vec2 textureCoordinate;\n varying vec2 textureCoordinate2;\n \n uniform sampler2D inputImageTexture;\n uniform sampler2D inputImageTexture2;\n \n void main()\n {\n     float blurredInput = texture2D(inputImageTexture, textureCoordinate).r;\n     float localLuminance = texture2D(inputImageTexture2, textureCoordinate2).r;\n     float thresholdResult = step(blurredInput - 0.05, localLuminance);\n     \n     gl_FragColor = vec4(vec3(thresholdResult), 1.0);\n }\n);\n#endif\n\n@interface GPUImageAdaptiveThresholdFilter()\n{\n    GPUImageBoxBlurFilter *boxBlurFilter;\n}\n@end\n\n@implementation GPUImageAdaptiveThresholdFilter\n\n#pragma mark -\n#pragma mark Initialization and teardown\n\n- (id)init;\n{\n    if (!(self = [super init]))\n    {\n\t\treturn nil;\n    }\n\n    // First pass: reduce to luminance\n    GPUImageGrayscaleFilter *luminanceFilter = [[GPUImageGrayscaleFilter alloc] init];\n    [self addFilter:luminanceFilter];\n    \n    // Second pass: perform a box blur\n    boxBlurFilter = [[GPUImageBoxBlurFilter alloc] init];\n    [self addFilter:boxBlurFilter];\n    \n    // Third pass: compare the blurred background luminance to the local value\n    GPUImageFilter *adaptiveThresholdFilter = [[GPUImageTwoInputFilter alloc] initWithFragmentShaderFromString:kGPUImageAdaptiveThresholdFragmentShaderString];\n    [self addFilter:adaptiveThresholdFilter];\n    \n    [luminanceFilter addTarget:boxBlurFilter];\n    \n    [boxBlurFilter addTarget:adaptiveThresholdFilter];\n    // To prevent double updating of this filter, disable updates from the sharp luminance image side\n    [luminanceFilter addTarget:adaptiveThresholdFilter];\n    \n    self.initialFilters = [NSArray arrayWithObject:luminanceFilter];\n    self.terminalFilter = adaptiveThresholdFilter;\n    \n    return self;\n}\n\n#pragma mark -\n#pragma mark Accessors\n\n- (void)setBlurRadiusInPixels:(CGFloat)newValue;\n{\n    boxBlurFilter.blurRadiusInPixels = newValue;\n}\n\n- (CGFloat)blurRadiusInPixels;\n{\n    return boxBlurFilter.blurRadiusInPixels;\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageAddBlendFilter.h",
    "content": "#import \"GPUImageTwoInputFilter.h\"\n\n@interface GPUImageAddBlendFilter : GPUImageTwoInputFilter\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageAddBlendFilter.m",
    "content": "#import \"GPUImageAddBlendFilter.h\"\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageAddBlendFragmentShaderString = SHADER_STRING\n(\n varying highp vec2 textureCoordinate;\n varying highp vec2 textureCoordinate2;\n\n uniform sampler2D inputImageTexture;\n uniform sampler2D inputImageTexture2;\n \n void main()\n {\n\t lowp vec4 base = texture2D(inputImageTexture, textureCoordinate);\n\t lowp vec4 overlay = texture2D(inputImageTexture2, textureCoordinate2);\n\t \n   mediump float r;\n   if (overlay.r * base.a + base.r * overlay.a >= overlay.a * base.a) {\n     r = overlay.a * base.a + overlay.r * (1.0 - base.a) + base.r * (1.0 - overlay.a);\n   } else {\n     r = overlay.r + base.r;\n   }\n\n   mediump float g;\n   if (overlay.g * base.a + base.g * overlay.a >= overlay.a * base.a) {\n     g = overlay.a * base.a + overlay.g * (1.0 - base.a) + base.g * (1.0 - overlay.a);\n   } else {\n     g = overlay.g + base.g;\n   }\n\n   mediump float b;\n   if (overlay.b * base.a + base.b * overlay.a >= overlay.a * base.a) {\n     b = overlay.a * base.a + overlay.b * (1.0 - base.a) + base.b * (1.0 - overlay.a);\n   } else {\n     b = overlay.b + base.b;\n   }\n\n   mediump float a = overlay.a + base.a - overlay.a * base.a;\n   \n\t gl_FragColor = vec4(r, g, b, a);\n }\n);\n#else\nNSString *const kGPUImageAddBlendFragmentShaderString = SHADER_STRING\n(\n varying vec2 textureCoordinate;\n varying vec2 textureCoordinate2;\n \n uniform sampler2D inputImageTexture;\n uniform sampler2D inputImageTexture2;\n \n void main()\n {\n\t vec4 base = texture2D(inputImageTexture, textureCoordinate);\n\t vec4 overlay = texture2D(inputImageTexture2, textureCoordinate2);\n\t \n     float r;\n     if (overlay.r * base.a + base.r * overlay.a >= overlay.a * base.a) {\n         r = overlay.a * base.a + overlay.r * (1.0 - base.a) + base.r * (1.0 - overlay.a);\n     } else {\n         r = overlay.r + base.r;\n     }\n     \n     float g;\n     if (overlay.g * base.a + base.g * overlay.a >= overlay.a * base.a) {\n         g = overlay.a * base.a + overlay.g * (1.0 - base.a) + base.g * (1.0 - overlay.a);\n     } else {\n         g = overlay.g + base.g;\n     }\n     \n     float b;\n     if (overlay.b * base.a + base.b * overlay.a >= overlay.a * base.a) {\n         b = overlay.a * base.a + overlay.b * (1.0 - base.a) + base.b * (1.0 - overlay.a);\n     } else {\n         b = overlay.b + base.b;\n     }\n     \n     float a = overlay.a + base.a - overlay.a * base.a;\n     \n\t gl_FragColor = vec4(r, g, b, a);\n }\n);\n#endif\n\n\n\n@implementation GPUImageAddBlendFilter\n\n- (id)init;\n{\n    if (!(self = [super initWithFragmentShaderFromString:kGPUImageAddBlendFragmentShaderString]))\n    {\n\t\treturn nil;\n    }\n    \n    return self;\n}\n\n@end\n\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageAlphaBlendFilter.h",
    "content": "#import \"GPUImageTwoInputFilter.h\"\n\n@interface GPUImageAlphaBlendFilter : GPUImageTwoInputFilter\n{\n    GLint mixUniform;\n}\n\n// Mix ranges from 0.0 (only image 1) to 1.0 (only image 2), with 1.0 as the normal level\n@property(readwrite, nonatomic) CGFloat mix; \n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageAlphaBlendFilter.m",
    "content": "#import \"GPUImageAlphaBlendFilter.h\"\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageAlphaBlendFragmentShaderString = SHADER_STRING\n(\n varying highp vec2 textureCoordinate;\n varying highp vec2 textureCoordinate2;\n\n uniform sampler2D inputImageTexture;\n uniform sampler2D inputImageTexture2;\n \n uniform lowp float mixturePercent;\n\n void main()\n {\n\t lowp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);\n\t lowp vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate2);\n\t \n\t gl_FragColor = vec4(mix(textureColor.rgb, textureColor2.rgb, textureColor2.a * mixturePercent), textureColor.a);\n }\n);\n#else\nNSString *const kGPUImageAlphaBlendFragmentShaderString = SHADER_STRING\n(\n varying vec2 textureCoordinate;\n varying vec2 textureCoordinate2;\n \n uniform sampler2D inputImageTexture;\n uniform sampler2D inputImageTexture2;\n \n uniform float mixturePercent;\n \n void main()\n {\n\t vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);\n\t vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate2);\n\t \n\t gl_FragColor = vec4(mix(textureColor.rgb, textureColor2.rgb, textureColor2.a * mixturePercent), textureColor.a);\n }\n);\n#endif\n\n@implementation GPUImageAlphaBlendFilter\n\n@synthesize mix = _mix;\n\n- (id)init;\n{\n    if (!(self = [super initWithFragmentShaderFromString:kGPUImageAlphaBlendFragmentShaderString]))\n    {\n\t\treturn nil;\n    }\n    \n    mixUniform = [filterProgram uniformIndex:@\"mixturePercent\"];\n    self.mix = 0.5;\n    \n    return self;\n}\n\n\n#pragma mark -\n#pragma mark Accessors\n\n- (void)setMix:(CGFloat)newValue;\n{\n    _mix = newValue;\n    \n    [self setFloat:_mix forUniform:mixUniform program:filterProgram];\n}\n\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageAmatorkaFilter.h",
    "content": "#import \"GPUImageFilterGroup.h\"\n\n@class GPUImagePicture;\n\n/** A photo filter based on Photoshop action by Amatorka\n    http://amatorka.deviantart.com/art/Amatorka-Action-2-121069631\n */\n\n// Note: If you want to use this effect you have to add lookup_amatorka.png\n//       from Resources folder to your application bundle.\n\n@interface GPUImageAmatorkaFilter : GPUImageFilterGroup\n{\n    GPUImagePicture *lookupImageSource;\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageAmatorkaFilter.m",
    "content": "#import \"GPUImageAmatorkaFilter.h\"\n#import \"GPUImagePicture.h\"\n#import \"GPUImageLookupFilter.h\"\n\n@implementation GPUImageAmatorkaFilter\n\n- (id)init;\n{\n    if (!(self = [super init]))\n    {\n\t\treturn nil;\n    }\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\n    UIImage *image = [UIImage imageNamed:@\"lookup_amatorka.png\"];\n#else\n    NSImage *image = [NSImage imageNamed:@\"lookup_amatorka.png\"];\n#endif\n    \n    NSAssert(image, @\"To use GPUImageAmatorkaFilter you need to add lookup_amatorka.png from GPUImage/framework/Resources to your application bundle.\");\n    \n    lookupImageSource = [[GPUImagePicture alloc] initWithImage:image];\n    GPUImageLookupFilter *lookupFilter = [[GPUImageLookupFilter alloc] init];\n    [self addFilter:lookupFilter];\n    \n    [lookupImageSource addTarget:lookupFilter atTextureLocation:1];\n    [lookupImageSource processImage];\n\n    self.initialFilters = [NSArray arrayWithObjects:lookupFilter, nil];\n    self.terminalFilter = lookupFilter;\n    \n    return self;\n}\n\n#pragma mark -\n#pragma mark Accessors\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageAverageColor.h",
    "content": "#import \"GPUImageFilter.h\"\n\nextern NSString *const kGPUImageColorAveragingVertexShaderString;\n\n@interface GPUImageAverageColor : GPUImageFilter\n{\n    GLint texelWidthUniform, texelHeightUniform;\n    \n    NSUInteger numberOfStages;\n    \n    GLubyte *rawImagePixels;\n    CGSize finalStageSize;\n}\n\n// This block is called on the completion of color averaging for a frame\n@property(nonatomic, copy) void(^colorAverageProcessingFinishedBlock)(CGFloat redComponent, CGFloat greenComponent, CGFloat blueComponent, CGFloat alphaComponent, CMTime frameTime);\n\n- (void)extractAverageColorAtFrameTime:(CMTime)frameTime;\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageAverageColor.m",
    "content": "#import \"GPUImageAverageColor.h\"\n\nNSString *const kGPUImageColorAveragingVertexShaderString = SHADER_STRING\n(\n attribute vec4 position;\n attribute vec4 inputTextureCoordinate;\n \n uniform float texelWidth;\n uniform float texelHeight;\n \n varying vec2 upperLeftInputTextureCoordinate;\n varying vec2 upperRightInputTextureCoordinate;\n varying vec2 lowerLeftInputTextureCoordinate;\n varying vec2 lowerRightInputTextureCoordinate;\n \n void main()\n {\n     gl_Position = position;\n     \n     upperLeftInputTextureCoordinate = inputTextureCoordinate.xy + vec2(-texelWidth, -texelHeight);\n     upperRightInputTextureCoordinate = inputTextureCoordinate.xy + vec2(texelWidth, -texelHeight);\n     lowerLeftInputTextureCoordinate = inputTextureCoordinate.xy + vec2(-texelWidth, texelHeight);\n     lowerRightInputTextureCoordinate = inputTextureCoordinate.xy + vec2(texelWidth, texelHeight);\n }\n );\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageColorAveragingFragmentShaderString = SHADER_STRING\n(\n precision highp float;\n \n uniform sampler2D inputImageTexture;\n \n varying highp vec2 outputTextureCoordinate;\n \n varying highp vec2 upperLeftInputTextureCoordinate;\n varying highp vec2 upperRightInputTextureCoordinate;\n varying highp vec2 lowerLeftInputTextureCoordinate;\n varying highp vec2 lowerRightInputTextureCoordinate;\n \n void main()\n {\n     highp vec4 upperLeftColor = texture2D(inputImageTexture, upperLeftInputTextureCoordinate);\n     highp vec4 upperRightColor = texture2D(inputImageTexture, upperRightInputTextureCoordinate);\n     highp vec4 lowerLeftColor = texture2D(inputImageTexture, lowerLeftInputTextureCoordinate);\n     highp vec4 lowerRightColor = texture2D(inputImageTexture, lowerRightInputTextureCoordinate);\n     \n     gl_FragColor = 0.25 * (upperLeftColor + upperRightColor + lowerLeftColor + lowerRightColor);\n }\n);\n#else\nNSString *const kGPUImageColorAveragingFragmentShaderString = SHADER_STRING\n(\n uniform sampler2D inputImageTexture;\n \n varying vec2 outputTextureCoordinate;\n \n varying vec2 upperLeftInputTextureCoordinate;\n varying vec2 upperRightInputTextureCoordinate;\n varying vec2 lowerLeftInputTextureCoordinate;\n varying vec2 lowerRightInputTextureCoordinate;\n \n void main()\n {\n     vec4 upperLeftColor = texture2D(inputImageTexture, upperLeftInputTextureCoordinate);\n     vec4 upperRightColor = texture2D(inputImageTexture, upperRightInputTextureCoordinate);\n     vec4 lowerLeftColor = texture2D(inputImageTexture, lowerLeftInputTextureCoordinate);\n     vec4 lowerRightColor = texture2D(inputImageTexture, lowerRightInputTextureCoordinate);\n     \n     gl_FragColor = 0.25 * (upperLeftColor + upperRightColor + lowerLeftColor + lowerRightColor);\n }\n);\n#endif\n\n@implementation GPUImageAverageColor\n\n@synthesize colorAverageProcessingFinishedBlock = _colorAverageProcessingFinishedBlock;\n\n#pragma mark -\n#pragma mark Initialization and teardown\n\n- (id)init;\n{\n    if (!(self = [super initWithVertexShaderFromString:kGPUImageColorAveragingVertexShaderString fragmentShaderFromString:kGPUImageColorAveragingFragmentShaderString]))\n    {\n        return nil;\n    }\n    \n    texelWidthUniform = [filterProgram uniformIndex:@\"texelWidth\"];\n    texelHeightUniform = [filterProgram uniformIndex:@\"texelHeight\"];\n    finalStageSize = CGSizeMake(1.0, 1.0);\n    \n    __unsafe_unretained GPUImageAverageColor *weakSelf = self;\n    [self setFrameProcessingCompletionBlock:^(GPUImageOutput *filter, CMTime frameTime) {\n        [weakSelf extractAverageColorAtFrameTime:frameTime];\n    }];\n\n    return self;\n}\n\n- (void)dealloc;\n{\n    if (rawImagePixels != NULL)\n    {\n        free(rawImagePixels);\n    }\n}\n\n#pragma mark -\n#pragma mark Managing the display FBOs\n\n- (void)renderToTextureWithVertices:(const GLfloat *)vertices textureCoordinates:(const GLfloat *)textureCoordinates;\n{\n    if (self.preventRendering)\n    {\n        [firstInputFramebuffer unlock];\n        return;\n    }\n    \n    outputFramebuffer = nil;\n    [GPUImageContext setActiveShaderProgram:filterProgram];\n\n    glVertexAttribPointer(filterPositionAttribute, 2, GL_FLOAT, 0, 0, vertices);\n    glVertexAttribPointer(filterTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, textureCoordinates);\n\n    GLuint currentTexture = [firstInputFramebuffer texture];\n    \n    NSUInteger numberOfReductionsInX = floor(log(inputTextureSize.width) / log(4.0));\n    NSUInteger numberOfReductionsInY = floor(log(inputTextureSize.height) / log(4.0));\n    NSUInteger reductionsToHitSideLimit = MIN(numberOfReductionsInX, numberOfReductionsInY);\n    for (NSUInteger currentReduction = 0; currentReduction < reductionsToHitSideLimit; currentReduction++)\n    {\n        CGSize currentStageSize = CGSizeMake(floor(inputTextureSize.width / pow(4.0, currentReduction + 1.0)), floor(inputTextureSize.height / pow(4.0, currentReduction + 1.0)));\n\n        [outputFramebuffer unlock];\n        outputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:currentStageSize textureOptions:self.outputTextureOptions onlyTexture:NO];\n        [outputFramebuffer activateFramebuffer];\n\n        glClearColor(0.0f, 0.0f, 0.0f, 1.0f);\n        glClear(GL_COLOR_BUFFER_BIT);\n        \n        glActiveTexture(GL_TEXTURE2);\n        glBindTexture(GL_TEXTURE_2D, currentTexture);\n        \n        glUniform1i(filterInputTextureUniform, 2);\n        \n        glUniform1f(texelWidthUniform, 0.25 / currentStageSize.width);\n        glUniform1f(texelHeightUniform, 0.25 / currentStageSize.height);\n        \n        glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);\n\n        currentTexture = [outputFramebuffer texture];\n        finalStageSize = currentStageSize;\n    }\n\n    [firstInputFramebuffer unlock];\n}\n\n- (void)setInputRotation:(GPUImageRotationMode)newInputRotation atIndex:(NSInteger)textureIndex;\n{\n    inputRotation = kGPUImageNoRotation;\n}\n\n- (void)extractAverageColorAtFrameTime:(CMTime)frameTime;\n{\n    runSynchronouslyOnVideoProcessingQueue(^{\n        // we need a normal color texture for averaging the color values\n        NSAssert(self.outputTextureOptions.internalFormat == GL_RGBA, @\"The output texture internal format for this filter must be GL_RGBA.\");\n        NSAssert(self.outputTextureOptions.type == GL_UNSIGNED_BYTE, @\"The type of the output texture of this filter must be GL_UNSIGNED_BYTE.\");\n        \n        NSUInteger totalNumberOfPixels = round(finalStageSize.width * finalStageSize.height);\n        \n        if (rawImagePixels == NULL)\n        {\n            rawImagePixels = (GLubyte *)malloc(totalNumberOfPixels * 4);\n        }\n        \n        [GPUImageContext useImageProcessingContext];\n        [outputFramebuffer activateFramebuffer];\n        glReadPixels(0, 0, (int)finalStageSize.width, (int)finalStageSize.height, GL_RGBA, GL_UNSIGNED_BYTE, rawImagePixels);\n        \n        NSUInteger redTotal = 0, greenTotal = 0, blueTotal = 0, alphaTotal = 0;\n        NSUInteger byteIndex = 0;\n        for (NSUInteger currentPixel = 0; currentPixel < totalNumberOfPixels; currentPixel++)\n        {\n            redTotal += rawImagePixels[byteIndex++];\n            greenTotal += rawImagePixels[byteIndex++];\n            blueTotal += rawImagePixels[byteIndex++];\n            alphaTotal += rawImagePixels[byteIndex++];\n        }\n        \n        CGFloat normalizedRedTotal = (CGFloat)redTotal / (CGFloat)totalNumberOfPixels / 255.0;\n        CGFloat normalizedGreenTotal = (CGFloat)greenTotal / (CGFloat)totalNumberOfPixels / 255.0;\n        CGFloat normalizedBlueTotal = (CGFloat)blueTotal / (CGFloat)totalNumberOfPixels / 255.0;\n        CGFloat normalizedAlphaTotal = (CGFloat)alphaTotal / (CGFloat)totalNumberOfPixels / 255.0;\n        \n        if (_colorAverageProcessingFinishedBlock != NULL)\n        {\n            _colorAverageProcessingFinishedBlock(normalizedRedTotal, normalizedGreenTotal, normalizedBlueTotal, normalizedAlphaTotal, frameTime);\n        }\n    });\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageAverageLuminanceThresholdFilter.h",
    "content": "#import \"GPUImageFilterGroup.h\"\n\n@interface GPUImageAverageLuminanceThresholdFilter : GPUImageFilterGroup\n\n// This is multiplied by the continually calculated average image luminosity to arrive at the final threshold. Default is 1.0.\n@property(readwrite, nonatomic) CGFloat thresholdMultiplier;\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageAverageLuminanceThresholdFilter.m",
    "content": "#import \"GPUImageAverageLuminanceThresholdFilter.h\"\n#import \"GPUImageLuminosity.h\"\n#import \"GPUImageLuminanceThresholdFilter.h\"\n\n@interface GPUImageAverageLuminanceThresholdFilter()\n{\n    GPUImageLuminosity *luminosityFilter;\n    GPUImageLuminanceThresholdFilter *luminanceThresholdFilter;\n}\n@end\n\n@implementation GPUImageAverageLuminanceThresholdFilter\n\n@synthesize thresholdMultiplier = _thresholdMultiplier;\n\n#pragma mark -\n#pragma mark Initialization and teardown\n\n- (id)init;\n{\n    if (!(self = [super init]))\n    {\n\t\treturn nil;\n    }\n    \n    self.thresholdMultiplier = 1.0;\n    \n    luminosityFilter = [[GPUImageLuminosity alloc] init];\n    [self addFilter:luminosityFilter];\n    \n    luminanceThresholdFilter = [[GPUImageLuminanceThresholdFilter alloc] init];\n    [self addFilter:luminanceThresholdFilter];\n    \n    __unsafe_unretained GPUImageAverageLuminanceThresholdFilter *weakSelf = self;\n    __unsafe_unretained GPUImageLuminanceThresholdFilter *weakThreshold = luminanceThresholdFilter;\n    \n    [luminosityFilter setLuminosityProcessingFinishedBlock:^(CGFloat luminosity, CMTime frameTime) {\n        weakThreshold.threshold = luminosity * weakSelf.thresholdMultiplier;\n    }];\n    \n    self.initialFilters = [NSArray arrayWithObjects:luminosityFilter, luminanceThresholdFilter, nil];\n    self.terminalFilter = luminanceThresholdFilter;\n    \n    return self;\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageBilateralFilter.h",
    "content": "#import \"GPUImageGaussianBlurFilter.h\"\n\n@interface GPUImageBilateralFilter : GPUImageGaussianBlurFilter\n{\n    CGFloat firstDistanceNormalizationFactorUniform;\n    CGFloat secondDistanceNormalizationFactorUniform;\n}\n// A normalization factor for the distance between central color and sample color.\n@property(nonatomic, readwrite) CGFloat distanceNormalizationFactor;\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageBilateralFilter.m",
    "content": "#import \"GPUImageBilateralFilter.h\"\n\nNSString *const kGPUImageBilateralBlurVertexShaderString = SHADER_STRING\n(\n attribute vec4 position;\n attribute vec4 inputTextureCoordinate;\n \n const int GAUSSIAN_SAMPLES = 9;\n \n uniform float texelWidthOffset;\n uniform float texelHeightOffset;\n \n varying vec2 textureCoordinate;\n varying vec2 blurCoordinates[GAUSSIAN_SAMPLES];\n \n void main()\n {\n     gl_Position = position;\n     textureCoordinate = inputTextureCoordinate.xy;\n     \n     // Calculate the positions for the blur\n     int multiplier = 0;\n     vec2 blurStep;\n     vec2 singleStepOffset = vec2(texelWidthOffset, texelHeightOffset);\n     \n     for (int i = 0; i < GAUSSIAN_SAMPLES; i++)\n     {\n         multiplier = (i - ((GAUSSIAN_SAMPLES - 1) / 2));\n         // Blur in x (horizontal)\n         blurStep = float(multiplier) * singleStepOffset;\n         blurCoordinates[i] = inputTextureCoordinate.xy + blurStep;\n     }\n }\n);\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageBilateralFilterFragmentShaderString = SHADER_STRING\n(\n uniform sampler2D inputImageTexture;\n \n const lowp int GAUSSIAN_SAMPLES = 9;\n \n varying highp vec2 textureCoordinate;\n varying highp vec2 blurCoordinates[GAUSSIAN_SAMPLES];\n \n uniform mediump float distanceNormalizationFactor;\n \n void main()\n {\n     lowp vec4 centralColor;\n     lowp float gaussianWeightTotal;\n     lowp vec4 sum;\n     lowp vec4 sampleColor;\n     lowp float distanceFromCentralColor;\n     lowp float gaussianWeight;\n     \n     centralColor = texture2D(inputImageTexture, blurCoordinates[4]);\n     gaussianWeightTotal = 0.18;\n     sum = centralColor * 0.18;\n     \n     sampleColor = texture2D(inputImageTexture, blurCoordinates[0]);\n     distanceFromCentralColor = min(distance(centralColor, sampleColor) * distanceNormalizationFactor, 1.0);\n     gaussianWeight = 0.05 * (1.0 - distanceFromCentralColor);\n     gaussianWeightTotal += gaussianWeight;\n     sum += sampleColor * gaussianWeight;\n\n     sampleColor = texture2D(inputImageTexture, blurCoordinates[1]);\n     distanceFromCentralColor = min(distance(centralColor, sampleColor) * distanceNormalizationFactor, 1.0);\n     gaussianWeight = 0.09 * (1.0 - distanceFromCentralColor);\n     gaussianWeightTotal += gaussianWeight;\n     sum += sampleColor * gaussianWeight;\n\n     sampleColor = texture2D(inputImageTexture, blurCoordinates[2]);\n     distanceFromCentralColor = min(distance(centralColor, sampleColor) * distanceNormalizationFactor, 1.0);\n     gaussianWeight = 0.12 * (1.0 - distanceFromCentralColor);\n     gaussianWeightTotal += gaussianWeight;\n     sum += sampleColor * gaussianWeight;\n\n     sampleColor = texture2D(inputImageTexture, blurCoordinates[3]);\n     distanceFromCentralColor = min(distance(centralColor, sampleColor) * distanceNormalizationFactor, 1.0);\n     gaussianWeight = 0.15 * (1.0 - distanceFromCentralColor);\n     gaussianWeightTotal += gaussianWeight;\n     sum += sampleColor * gaussianWeight;\n\n     sampleColor = texture2D(inputImageTexture, blurCoordinates[5]);\n     distanceFromCentralColor = min(distance(centralColor, sampleColor) * distanceNormalizationFactor, 1.0);\n     gaussianWeight = 0.15 * (1.0 - distanceFromCentralColor);\n     gaussianWeightTotal += gaussianWeight;\n     sum += sampleColor * gaussianWeight;\n\n     sampleColor = texture2D(inputImageTexture, blurCoordinates[6]);\n     distanceFromCentralColor = min(distance(centralColor, sampleColor) * distanceNormalizationFactor, 1.0);\n     gaussianWeight = 0.12 * (1.0 - distanceFromCentralColor);\n     gaussianWeightTotal += gaussianWeight;\n     sum += sampleColor * gaussianWeight;\n\n     sampleColor = texture2D(inputImageTexture, blurCoordinates[7]);\n     distanceFromCentralColor = min(distance(centralColor, sampleColor) * distanceNormalizationFactor, 1.0);\n     gaussianWeight = 0.09 * (1.0 - distanceFromCentralColor);\n     gaussianWeightTotal += gaussianWeight;\n     sum += sampleColor * gaussianWeight;\n\n     sampleColor = texture2D(inputImageTexture, blurCoordinates[8]);\n     distanceFromCentralColor = min(distance(centralColor, sampleColor) * distanceNormalizationFactor, 1.0);\n     gaussianWeight = 0.05 * (1.0 - distanceFromCentralColor);\n     gaussianWeightTotal += gaussianWeight;\n     sum += sampleColor * gaussianWeight;\n     \n     gl_FragColor = sum / gaussianWeightTotal;\n }\n);\n#else\nNSString *const kGPUImageBilateralFilterFragmentShaderString = SHADER_STRING\n(\n uniform sampler2D inputImageTexture;\n \n const int GAUSSIAN_SAMPLES = 9;\n \n varying vec2 textureCoordinate;\n varying vec2 blurCoordinates[GAUSSIAN_SAMPLES];\n \n uniform float distanceNormalizationFactor;\n \n void main()\n {\n     vec4 centralColor;\n     float gaussianWeightTotal;\n     vec4 sum;\n     vec4 sampleColor;\n     float distanceFromCentralColor;\n     float gaussianWeight;\n     \n     centralColor = texture2D(inputImageTexture, blurCoordinates[4]);\n     gaussianWeightTotal = 0.18;\n     sum = centralColor * 0.18;\n     \n     sampleColor = texture2D(inputImageTexture, blurCoordinates[0]);\n     distanceFromCentralColor = min(distance(centralColor, sampleColor) * distanceNormalizationFactor, 1.0);\n     gaussianWeight = 0.05 * (1.0 - distanceFromCentralColor);\n     gaussianWeightTotal += gaussianWeight;\n     sum += sampleColor * gaussianWeight;\n     \n     sampleColor = texture2D(inputImageTexture, blurCoordinates[1]);\n     distanceFromCentralColor = min(distance(centralColor, sampleColor) * distanceNormalizationFactor, 1.0);\n     gaussianWeight = 0.09 * (1.0 - distanceFromCentralColor);\n     gaussianWeightTotal += gaussianWeight;\n     sum += sampleColor * gaussianWeight;\n     \n     sampleColor = texture2D(inputImageTexture, blurCoordinates[2]);\n     distanceFromCentralColor = min(distance(centralColor, sampleColor) * distanceNormalizationFactor, 1.0);\n     gaussianWeight = 0.12 * (1.0 - distanceFromCentralColor);\n     gaussianWeightTotal += gaussianWeight;\n     sum += sampleColor * gaussianWeight;\n     \n     sampleColor = texture2D(inputImageTexture, blurCoordinates[3]);\n     distanceFromCentralColor = min(distance(centralColor, sampleColor) * distanceNormalizationFactor, 1.0);\n     gaussianWeight = 0.15 * (1.0 - distanceFromCentralColor);\n     gaussianWeightTotal += gaussianWeight;\n     sum += sampleColor * gaussianWeight;\n     \n     sampleColor = texture2D(inputImageTexture, blurCoordinates[5]);\n     distanceFromCentralColor = min(distance(centralColor, sampleColor) * distanceNormalizationFactor, 1.0);\n     gaussianWeight = 0.15 * (1.0 - distanceFromCentralColor);\n     gaussianWeightTotal += gaussianWeight;\n     sum += sampleColor * gaussianWeight;\n     \n     sampleColor = texture2D(inputImageTexture, blurCoordinates[6]);\n     distanceFromCentralColor = min(distance(centralColor, sampleColor) * distanceNormalizationFactor, 1.0);\n     gaussianWeight = 0.12 * (1.0 - distanceFromCentralColor);\n     gaussianWeightTotal += gaussianWeight;\n     sum += sampleColor * gaussianWeight;\n     \n     sampleColor = texture2D(inputImageTexture, blurCoordinates[7]);\n     distanceFromCentralColor = min(distance(centralColor, sampleColor) * distanceNormalizationFactor, 1.0);\n     gaussianWeight = 0.09 * (1.0 - distanceFromCentralColor);\n     gaussianWeightTotal += gaussianWeight;\n     sum += sampleColor * gaussianWeight;\n     \n     sampleColor = texture2D(inputImageTexture, blurCoordinates[8]);\n     distanceFromCentralColor = min(distance(centralColor, sampleColor) * distanceNormalizationFactor, 1.0);\n     gaussianWeight = 0.05 * (1.0 - distanceFromCentralColor);\n     gaussianWeightTotal += gaussianWeight;\n     sum += sampleColor * gaussianWeight;\n     \n     gl_FragColor = sum / gaussianWeightTotal;\n }\n);\n#endif\n\n@implementation GPUImageBilateralFilter\n\n@synthesize distanceNormalizationFactor = _distanceNormalizationFactor;\n\n- (id)init;\n{\n    \n    if (!(self = [super initWithFirstStageVertexShaderFromString:kGPUImageBilateralBlurVertexShaderString\n                              firstStageFragmentShaderFromString:kGPUImageBilateralFilterFragmentShaderString\n                               secondStageVertexShaderFromString:kGPUImageBilateralBlurVertexShaderString\n                             secondStageFragmentShaderFromString:kGPUImageBilateralFilterFragmentShaderString])) {\n        return nil;\n    }\n    \n    firstDistanceNormalizationFactorUniform  = [filterProgram uniformIndex:@\"distanceNormalizationFactor\"];\n    secondDistanceNormalizationFactorUniform = [filterProgram uniformIndex:@\"distanceNormalizationFactor\"];\n\n    self.texelSpacingMultiplier = 4.0;\n    self.distanceNormalizationFactor = 8.0;\n\n    \n    return self;\n}\n\n\n#pragma mark -\n#pragma mark Accessors\n\n- (void)setDistanceNormalizationFactor:(CGFloat)newValue\n{\n    _distanceNormalizationFactor = newValue;\n    \n    [self setFloat:newValue\n        forUniform:firstDistanceNormalizationFactorUniform\n           program:filterProgram];\n    \n    [self setFloat:newValue\n        forUniform:secondDistanceNormalizationFactorUniform\n           program:secondFilterProgram];\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageBoxBlurFilter.h",
    "content": "#import \"GPUImageGaussianBlurFilter.h\"\n\n/** A hardware-accelerated box blur of an image\n */\n@interface GPUImageBoxBlurFilter : GPUImageGaussianBlurFilter\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageBoxBlurFilter.m",
    "content": "#import \"GPUImageBoxBlurFilter.h\"\n\n\n@implementation GPUImageBoxBlurFilter\n\n+ (NSString *)vertexShaderForOptimizedBlurOfRadius:(NSUInteger)blurRadius sigma:(CGFloat)sigma;\n{\n    if (blurRadius < 1)\n    {\n        return kGPUImageVertexShaderString;\n    }\n\n    // From these weights we calculate the offsets to read interpolated values from\n    NSUInteger numberOfOptimizedOffsets = MIN(blurRadius / 2 + (blurRadius % 2), 7);\n    \n    NSMutableString *shaderString = [[NSMutableString alloc] init];\n    // Header\n    [shaderString appendFormat:@\"\\\n     attribute vec4 position;\\n\\\n     attribute vec4 inputTextureCoordinate;\\n\\\n     \\n\\\n     uniform float texelWidthOffset;\\n\\\n     uniform float texelHeightOffset;\\n\\\n     \\n\\\n     varying vec2 blurCoordinates[%lu];\\n\\\n     \\n\\\n     void main()\\n\\\n     {\\n\\\n     gl_Position = position;\\n\\\n     \\n\\\n     vec2 singleStepOffset = vec2(texelWidthOffset, texelHeightOffset);\\n\", (unsigned long)(1 + (numberOfOptimizedOffsets * 2))];\n    \n    // Inner offset loop\n    [shaderString appendString:@\"blurCoordinates[0] = inputTextureCoordinate.xy;\\n\"];\n    for (NSUInteger currentOptimizedOffset = 0; currentOptimizedOffset < numberOfOptimizedOffsets; currentOptimizedOffset++)\n    {\n        GLfloat optimizedOffset = (GLfloat)(currentOptimizedOffset * 2) + 1.5;\n        \n        [shaderString appendFormat:@\"\\\n         blurCoordinates[%lu] = inputTextureCoordinate.xy + singleStepOffset * %f;\\n\\\n         blurCoordinates[%lu] = inputTextureCoordinate.xy - singleStepOffset * %f;\\n\", (unsigned long)((currentOptimizedOffset * 2) + 1), optimizedOffset, (unsigned long)((currentOptimizedOffset * 2) + 2), optimizedOffset];\n    }\n    \n    // Footer\n    [shaderString appendString:@\"}\\n\"];\n\n    return shaderString;\n}\n\n+ (NSString *)fragmentShaderForOptimizedBlurOfRadius:(NSUInteger)blurRadius sigma:(CGFloat)sigma;\n{\n    if (blurRadius < 1)\n    {\n        return kGPUImagePassthroughFragmentShaderString;\n    }\n\n    NSUInteger numberOfOptimizedOffsets = MIN(blurRadius / 2 + (blurRadius % 2), 7);\n    NSUInteger trueNumberOfOptimizedOffsets = blurRadius / 2 + (blurRadius % 2);\n    \n    NSMutableString *shaderString = [[NSMutableString alloc] init];\n    \n    // Header\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\n    [shaderString appendFormat:@\"\\\n     uniform sampler2D inputImageTexture;\\n\\\n     uniform highp float texelWidthOffset;\\n\\\n     uniform highp float texelHeightOffset;\\n\\\n     \\n\\\n     varying highp vec2 blurCoordinates[%lu];\\n\\\n     \\n\\\n     void main()\\n\\\n     {\\n\\\n     lowp vec4 sum = vec4(0.0);\\n\", (unsigned long)(1 + (numberOfOptimizedOffsets * 2)) ];\n#else\n    [shaderString appendFormat:@\"\\\n     uniform sampler2D inputImageTexture;\\n\\\n     uniform float texelWidthOffset;\\n\\\n     uniform float texelHeightOffset;\\n\\\n     \\n\\\n     varying vec2 blurCoordinates[%lu];\\n\\\n     \\n\\\n     void main()\\n\\\n     {\\n\\\n     vec4 sum = vec4(0.0);\\n\", 1 + (numberOfOptimizedOffsets * 2) ];\n#endif\n    \n    GLfloat boxWeight = 1.0 / (GLfloat)((blurRadius * 2) + 1);\n    \n    // Inner texture loop\n    [shaderString appendFormat:@\"sum += texture2D(inputImageTexture, blurCoordinates[0]) * %f;\\n\", boxWeight];\n    \n    for (NSUInteger currentBlurCoordinateIndex = 0; currentBlurCoordinateIndex < numberOfOptimizedOffsets; currentBlurCoordinateIndex++)\n    {\n        [shaderString appendFormat:@\"sum += texture2D(inputImageTexture, blurCoordinates[%lu]) * %f;\\n\", (unsigned long)((currentBlurCoordinateIndex * 2) + 1), boxWeight * 2.0];\n        [shaderString appendFormat:@\"sum += texture2D(inputImageTexture, blurCoordinates[%lu]) * %f;\\n\", (unsigned long)((currentBlurCoordinateIndex * 2) + 2), boxWeight * 2.0];\n    }\n    \n    // If the number of required samples exceeds the amount we can pass in via varyings, we have to do dependent texture reads in the fragment shader\n    if (trueNumberOfOptimizedOffsets > numberOfOptimizedOffsets)\n    {\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\n        [shaderString appendString:@\"highp vec2 singleStepOffset = vec2(texelWidthOffset, texelHeightOffset);\\n\"];\n#else\n        [shaderString appendString:@\"vec2 singleStepOffset = vec2(texelWidthOffset, texelHeightOffset);\\n\"];\n#endif\n        \n        for (NSUInteger currentOverlowTextureRead = numberOfOptimizedOffsets; currentOverlowTextureRead < trueNumberOfOptimizedOffsets; currentOverlowTextureRead++)\n        {\n            GLfloat optimizedOffset = (GLfloat)(currentOverlowTextureRead * 2) + 1.5;\n            \n            [shaderString appendFormat:@\"sum += texture2D(inputImageTexture, blurCoordinates[0] + singleStepOffset * %f) * %f;\\n\", optimizedOffset, boxWeight * 2.0];\n            [shaderString appendFormat:@\"sum += texture2D(inputImageTexture, blurCoordinates[0] - singleStepOffset * %f) * %f;\\n\", optimizedOffset, boxWeight * 2.0];\n        }\n    }\n    \n    // Footer\n    [shaderString appendString:@\"\\\n     gl_FragColor = sum;\\n\\\n     }\\n\"];\n    \n    return shaderString;\n}\n\n- (void)setupFilterForSize:(CGSize)filterFrameSize;\n{\n    [super setupFilterForSize:filterFrameSize];\n    \n    if (shouldResizeBlurRadiusWithImageSize == YES)\n    {\n        \n    }\n}\n\n#pragma mark -\n#pragma mark Initialization and teardown\n\n- (id)init;\n{\n    //    NSString *currentGaussianBlurVertexShader = [GPUImageGaussianBlurFilter vertexShaderForStandardGaussianOfRadius:4 sigma:2.0];\n    //    NSString *currentGaussianBlurFragmentShader = [GPUImageGaussianBlurFilter fragmentShaderForStandardGaussianOfRadius:4 sigma:2.0];\n    \n    NSString *currentBoxBlurVertexShader = [[self class] vertexShaderForOptimizedBlurOfRadius:4 sigma:0.0];\n    NSString *currentBoxBlurFragmentShader = [[self class] fragmentShaderForOptimizedBlurOfRadius:4 sigma:0.0];\n    \n    if (!(self = [super initWithFirstStageVertexShaderFromString:currentBoxBlurVertexShader firstStageFragmentShaderFromString:currentBoxBlurFragmentShader secondStageVertexShaderFromString:currentBoxBlurVertexShader secondStageFragmentShaderFromString:currentBoxBlurFragmentShader]))\n    {\n        return nil;\n    }\n    \n    _blurRadiusInPixels = 4.0;\n    \n    return self;\n}\n\n#pragma mark -\n#pragma mark Accessors\n\n- (void)setBlurRadiusInPixels:(CGFloat)newValue;\n{\n    CGFloat newBlurRadius = round(round(newValue / 2.0) * 2.0); // For now, only do even radii\n    \n    if (newBlurRadius != _blurRadiusInPixels)\n    {\n        _blurRadiusInPixels = newBlurRadius;\n        \n        NSString *newGaussianBlurVertexShader = [[self class] vertexShaderForOptimizedBlurOfRadius:_blurRadiusInPixels sigma:0.0];\n        NSString *newGaussianBlurFragmentShader = [[self class] fragmentShaderForOptimizedBlurOfRadius:_blurRadiusInPixels sigma:0.0];\n        \n        //        NSLog(@\"Optimized vertex shader: \\n%@\", newGaussianBlurVertexShader);\n        //        NSLog(@\"Optimized fragment shader: \\n%@\", newGaussianBlurFragmentShader);\n        //\n        [self switchToVertexShader:newGaussianBlurVertexShader fragmentShader:newGaussianBlurFragmentShader];\n    }\n    shouldResizeBlurRadiusWithImageSize = NO;\n}\n\n@end\n\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageBrightnessFilter.h",
    "content": "#import \"GPUImageFilter.h\"\n\n@interface GPUImageBrightnessFilter : GPUImageFilter\n{\n    GLint brightnessUniform;\n}\n\n// Brightness ranges from -1.0 to 1.0, with 0.0 as the normal level\n@property(readwrite, nonatomic) CGFloat brightness; \n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageBrightnessFilter.m",
    "content": "#import \"GPUImageBrightnessFilter.h\"\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageBrightnessFragmentShaderString = SHADER_STRING\n(\n varying highp vec2 textureCoordinate;\n \n uniform sampler2D inputImageTexture;\n uniform lowp float brightness;\n \n void main()\n {\n     lowp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);\n     \n     gl_FragColor = vec4((textureColor.rgb + vec3(brightness)), textureColor.w);\n }\n);\n#else\nNSString *const kGPUImageBrightnessFragmentShaderString = SHADER_STRING\n(\n varying vec2 textureCoordinate;\n \n uniform sampler2D inputImageTexture;\n uniform float brightness;\n \n void main()\n {\n     vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);\n     \n     gl_FragColor = vec4((textureColor.rgb + vec3(brightness)), textureColor.w);\n }\n );\n#endif\n\n@implementation GPUImageBrightnessFilter\n\n@synthesize brightness = _brightness;\n\n#pragma mark -\n#pragma mark Initialization and teardown\n\n- (id)init;\n{\n    if (!(self = [super initWithFragmentShaderFromString:kGPUImageBrightnessFragmentShaderString]))\n    {\n\t\treturn nil;\n    }\n    \n    brightnessUniform = [filterProgram uniformIndex:@\"brightness\"];\n    self.brightness = 0.0;\n    \n    return self;\n}\n\n#pragma mark -\n#pragma mark Accessors\n\n- (void)setBrightness:(CGFloat)newValue;\n{\n    _brightness = newValue;\n    \n    [self setFloat:_brightness forUniform:brightnessUniform program:filterProgram];\n}\n\n@end\n\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageBulgeDistortionFilter.h",
    "content": "#import \"GPUImageFilter.h\"\n\n/// Creates a bulge distortion on the image\n@interface GPUImageBulgeDistortionFilter : GPUImageFilter\n{\n    GLint aspectRatioUniform, radiusUniform, centerUniform, scaleUniform;\n}\n\n/// The center about which to apply the distortion, with a default of (0.5, 0.5)\n@property(readwrite, nonatomic) CGPoint center;\n/// The radius of the distortion, ranging from 0.0 to 1.0, with a default of 0.25\n@property(readwrite, nonatomic) CGFloat radius;\n/// The amount of distortion to apply, from -1.0 to 1.0, with a default of 0.5\n@property(readwrite, nonatomic) CGFloat scale;\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageBulgeDistortionFilter.m",
    "content": "#import \"GPUImageBulgeDistortionFilter.h\"\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageBulgeDistortionFragmentShaderString = SHADER_STRING\n(\n varying highp vec2 textureCoordinate;\n \n uniform sampler2D inputImageTexture;\n \n uniform highp float aspectRatio;\n uniform highp vec2 center;\n uniform highp float radius;\n uniform highp float scale;\n\n void main()\n {\n    highp vec2 textureCoordinateToUse = vec2(textureCoordinate.x, ((textureCoordinate.y - center.y) * aspectRatio) + center.y);\n    highp float dist = distance(center, textureCoordinateToUse);\n    textureCoordinateToUse = textureCoordinate;\n    \n    if (dist < radius)\n    {\n        textureCoordinateToUse -= center;\n        highp float percent = 1.0 - ((radius - dist) / radius) * scale;\n        percent = percent * percent;\n        \n        textureCoordinateToUse = textureCoordinateToUse * percent;\n        textureCoordinateToUse += center;\n    }\n    \n    gl_FragColor = texture2D(inputImageTexture, textureCoordinateToUse );    \n }\n);\n#else\nNSString *const kGPUImageBulgeDistortionFragmentShaderString = SHADER_STRING\n(\n varying vec2 textureCoordinate;\n \n uniform sampler2D inputImageTexture;\n \n uniform float aspectRatio;\n uniform vec2 center;\n uniform float radius;\n uniform float scale;\n \n void main()\n {\n    vec2 textureCoordinateToUse = vec2(textureCoordinate.x, ((textureCoordinate.y - center.y) * aspectRatio) + center.y);\n    float dist = distance(center, textureCoordinateToUse);\n    textureCoordinateToUse = textureCoordinate;\n    \n    if (dist < radius)\n    {\n        textureCoordinateToUse -= center;\n        float percent = 1.0 - ((radius - dist) / radius) * scale;\n        percent = percent * percent;\n        \n        textureCoordinateToUse = textureCoordinateToUse * percent;\n        textureCoordinateToUse += center;\n    }\n    \n    gl_FragColor = texture2D(inputImageTexture, textureCoordinateToUse );\n }\n);\n#endif\n\n\n@interface GPUImageBulgeDistortionFilter ()\n\n- (void)adjustAspectRatio;\n\n@property (readwrite, nonatomic) CGFloat aspectRatio;\n\n@end\n\n@implementation GPUImageBulgeDistortionFilter\n\n@synthesize aspectRatio = _aspectRatio;\n@synthesize center = _center;\n@synthesize radius = _radius;\n@synthesize scale = _scale;\n\n#pragma mark -\n#pragma mark Initialization and teardown\n\n- (id)init;\n{\n    if (!(self = [super initWithFragmentShaderFromString:kGPUImageBulgeDistortionFragmentShaderString]))\n    {\n\t\treturn nil;\n    }\n    \n    aspectRatioUniform = [filterProgram uniformIndex:@\"aspectRatio\"];\n    radiusUniform = [filterProgram uniformIndex:@\"radius\"];\n    scaleUniform = [filterProgram uniformIndex:@\"scale\"];\n    centerUniform = [filterProgram uniformIndex:@\"center\"];\n\n    self.radius = 0.25;\n    self.scale = 0.5;\n    self.center = CGPointMake(0.5, 0.5);\n    \n    return self;\n}\n\n#pragma mark -\n#pragma mark Accessors\n\n- (void)adjustAspectRatio;\n{\n    if (GPUImageRotationSwapsWidthAndHeight(inputRotation))\n    {\n        [self setAspectRatio:(inputTextureSize.width / inputTextureSize.height)];\n    }\n    else\n    {\n        [self setAspectRatio:(inputTextureSize.height / inputTextureSize.width)];\n    }\n}\n\n- (void)forceProcessingAtSize:(CGSize)frameSize;\n{\n    [super forceProcessingAtSize:frameSize];\n    [self adjustAspectRatio];\n}\n\n- (void)setInputSize:(CGSize)newSize atIndex:(NSInteger)textureIndex;\n{\n    CGSize oldInputSize = inputTextureSize;\n    [super setInputSize:newSize atIndex:textureIndex];\n    \n    if ( (!CGSizeEqualToSize(oldInputSize, inputTextureSize)) && (!CGSizeEqualToSize(newSize, CGSizeZero)) )\n    {\n        [self adjustAspectRatio];\n    }\n}\n\n- (void)setAspectRatio:(CGFloat)newValue;\n{\n    _aspectRatio = newValue;\n    \n    [self setFloat:_aspectRatio forUniform:aspectRatioUniform program:filterProgram];\n}\n\n- (void)setInputRotation:(GPUImageRotationMode)newInputRotation atIndex:(NSInteger)textureIndex;\n{\n    [super setInputRotation:newInputRotation atIndex:textureIndex];\n    [self setCenter:self.center];\n    [self adjustAspectRatio];\n}\n\n- (void)setRadius:(CGFloat)newValue;\n{\n    _radius = newValue;\n    \n    [self setFloat:_radius forUniform:radiusUniform program:filterProgram];\n}\n\n- (void)setScale:(CGFloat)newValue;\n{\n    _scale = newValue;\n\n    [self setFloat:_scale forUniform:scaleUniform program:filterProgram];\n}\n\n- (void)setCenter:(CGPoint)newValue;\n{\n    _center = newValue;\n    \n    CGPoint rotatedPoint = [self rotatedPoint:_center forRotation:inputRotation];\n    \n    [self setPoint:rotatedPoint forUniform:centerUniform program:filterProgram];\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageCGAColorspaceFilter.h",
    "content": "#import \"GPUImageFilter.h\"\n\n@interface GPUImageCGAColorspaceFilter : GPUImageFilter\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageCGAColorspaceFilter.m",
    "content": "//\n//  GPUImageCGAColorspaceFilter.m\n//\n\n#import \"GPUImageCGAColorspaceFilter.h\"\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageCGAColorspaceFragmentShaderString = SHADER_STRING\n(\n varying highp vec2 textureCoordinate;\n \n uniform sampler2D inputImageTexture;\n \n void main()\n {\n     highp vec2 sampleDivisor = vec2(1.0 / 200.0, 1.0 / 320.0);\n     //highp vec4 colorDivisor = vec4(colorDepth);\n     \n     highp vec2 samplePos = textureCoordinate - mod(textureCoordinate, sampleDivisor);\n     highp vec4 color = texture2D(inputImageTexture, samplePos );\n     \n     //gl_FragColor = texture2D(inputImageTexture, samplePos );\n     mediump vec4 colorCyan = vec4(85.0 / 255.0, 1.0, 1.0, 1.0);\n     mediump vec4 colorMagenta = vec4(1.0, 85.0 / 255.0, 1.0, 1.0);\n     mediump vec4 colorWhite = vec4(1.0, 1.0, 1.0, 1.0);\n     mediump vec4 colorBlack = vec4(0.0, 0.0, 0.0, 1.0);\n     \n     mediump vec4 endColor;\n     highp float blackDistance = distance(color, colorBlack);\n     highp float whiteDistance = distance(color, colorWhite);\n     highp float magentaDistance = distance(color, colorMagenta);\n     highp float cyanDistance = distance(color, colorCyan);\n     \n     mediump vec4 finalColor;\n     \n     highp float colorDistance = min(magentaDistance, cyanDistance);\n     colorDistance = min(colorDistance, whiteDistance);\n     colorDistance = min(colorDistance, blackDistance); \n     \n     if (colorDistance == blackDistance) {\n         finalColor = colorBlack;\n     } else if (colorDistance == whiteDistance) {\n         finalColor = colorWhite;\n     } else if (colorDistance == cyanDistance) {\n         finalColor = colorCyan;\n     } else {\n         finalColor = colorMagenta;\n     }\n     \n     gl_FragColor = finalColor;\n }\n);\n#else\nNSString *const kGPUImageCGAColorspaceFragmentShaderString = SHADER_STRING\n(\n varying vec2 textureCoordinate;\n \n uniform sampler2D inputImageTexture;\n \n void main()\n {\n     vec2 sampleDivisor = vec2(1.0 / 200.0, 1.0 / 320.0);\n     //highp vec4 colorDivisor = vec4(colorDepth);\n     \n     vec2 samplePos = textureCoordinate - mod(textureCoordinate, sampleDivisor);\n     vec4 color = texture2D(inputImageTexture, samplePos );\n     \n     //gl_FragColor = texture2D(inputImageTexture, samplePos );\n     vec4 colorCyan = vec4(85.0 / 255.0, 1.0, 1.0, 1.0);\n     vec4 colorMagenta = vec4(1.0, 85.0 / 255.0, 1.0, 1.0);\n     vec4 colorWhite = vec4(1.0, 1.0, 1.0, 1.0);\n     vec4 colorBlack = vec4(0.0, 0.0, 0.0, 1.0);\n     \n     vec4 endColor;\n     float blackDistance = distance(color, colorBlack);\n     float whiteDistance = distance(color, colorWhite);\n     float magentaDistance = distance(color, colorMagenta);\n     float cyanDistance = distance(color, colorCyan);\n     \n     vec4 finalColor;\n     \n     float colorDistance = min(magentaDistance, cyanDistance);\n     colorDistance = min(colorDistance, whiteDistance);\n     colorDistance = min(colorDistance, blackDistance);\n     \n     if (colorDistance == blackDistance) {\n         finalColor = colorBlack;\n     } else if (colorDistance == whiteDistance) {\n         finalColor = colorWhite;\n     } else if (colorDistance == cyanDistance) {\n         finalColor = colorCyan;\n     } else {\n         finalColor = colorMagenta;\n     }\n     \n     gl_FragColor = finalColor;\n }\n);\n#endif\n\n@implementation GPUImageCGAColorspaceFilter\n\n- (id)init;\n{\n    if (!(self = [super initWithFragmentShaderFromString:kGPUImageCGAColorspaceFragmentShaderString]))\n    {\n\t\treturn nil;\n    }\n    \n    return self;\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageCannyEdgeDetectionFilter.h",
    "content": "#import \"GPUImageFilterGroup.h\"\n\n@class GPUImageGrayscaleFilter;\n@class GPUImageSingleComponentGaussianBlurFilter;\n@class GPUImageDirectionalSobelEdgeDetectionFilter;\n@class GPUImageDirectionalNonMaximumSuppressionFilter;\n@class GPUImageWeakPixelInclusionFilter;\n\n/** This applies the edge detection process described by John Canny in \n \n Canny, J., A Computational Approach To Edge Detection, IEEE Trans. Pattern Analysis and Machine Intelligence, 8(6):679–698, 1986.\n \n and implemented in OpenGL ES by \n\n A. Ensor, S. Hall. GPU-based Image Analysis on Mobile Devices. Proceedings of Image and Vision Computing New Zealand 2011.\n\n It starts with a conversion to luminance, followed by an accelerated 9-hit Gaussian blur. A Sobel operator is applied to obtain the overall\n gradient strength in the blurred image, as well as the direction (in texture sampling steps) of the gradient. A non-maximum suppression filter\n acts along the direction of the gradient, highlighting strong edges that pass the threshold and completely removing those that fail the lower \n threshold. Finally, pixels from in-between these thresholds are either included in edges or rejected based on neighboring pixels.\n */\n@interface GPUImageCannyEdgeDetectionFilter : GPUImageFilterGroup\n{\n    GPUImageGrayscaleFilter *luminanceFilter;\n    GPUImageSingleComponentGaussianBlurFilter *blurFilter;\n    GPUImageDirectionalSobelEdgeDetectionFilter *edgeDetectionFilter;\n    GPUImageDirectionalNonMaximumSuppressionFilter *nonMaximumSuppressionFilter;\n    GPUImageWeakPixelInclusionFilter *weakPixelInclusionFilter;\n}\n\n/** The image width and height factors tweak the appearance of the edges.\n \n These parameters affect the visibility of the detected edges\n \n By default, they match the inverse of the filter size in pixels\n */\n@property(readwrite, nonatomic) CGFloat texelWidth; \n/** The image width and height factors tweak the appearance of the edges.\n \n These parameters affect the visibility of the detected edges\n \n By default, they match the inverse of the filter size in pixels\n */\n@property(readwrite, nonatomic) CGFloat texelHeight; \n\n/** The underlying blur radius for the Gaussian blur. Default is 2.0.\n */\n@property (readwrite, nonatomic) CGFloat blurRadiusInPixels;\n\n/** The underlying blur texel spacing multiplier. Default is 1.0.\n */\n@property (readwrite, nonatomic) CGFloat blurTexelSpacingMultiplier;\n\n/** Any edge with a gradient magnitude above this threshold will pass and show up in the final result.\n */\n@property(readwrite, nonatomic) CGFloat upperThreshold; \n\n/** Any edge with a gradient magnitude below this threshold will fail and be removed from the final result.\n */\n@property(readwrite, nonatomic) CGFloat lowerThreshold; \n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageCannyEdgeDetectionFilter.m",
    "content": "#import \"GPUImageCannyEdgeDetectionFilter.h\"\n\n#import \"GPUImageGrayscaleFilter.h\"\n#import \"GPUImageDirectionalSobelEdgeDetectionFilter.h\"\n#import \"GPUImageDirectionalNonMaximumSuppressionFilter.h\"\n#import \"GPUImageWeakPixelInclusionFilter.h\"\n#import \"GPUImageSingleComponentGaussianBlurFilter.h\"\n\n@implementation GPUImageCannyEdgeDetectionFilter\n\n@synthesize upperThreshold;\n@synthesize lowerThreshold;\n@synthesize blurRadiusInPixels;\n@synthesize blurTexelSpacingMultiplier;\n@synthesize texelWidth;\n@synthesize texelHeight;\n\n- (id)init;\n{\n    if (!(self = [super init]))\n    {\n\t\treturn nil;\n    }\n    \n    // First pass: convert image to luminance\n    luminanceFilter = [[GPUImageGrayscaleFilter alloc] init];\n    [self addFilter:luminanceFilter];\n    \n    // Second pass: apply a variable Gaussian blur\n    blurFilter = [[GPUImageSingleComponentGaussianBlurFilter alloc] init];\n    [self addFilter:blurFilter];\n    \n    // Third pass: run the Sobel edge detection, with calculated gradient directions, on this blurred image\n    edgeDetectionFilter = [[GPUImageDirectionalSobelEdgeDetectionFilter alloc] init];\n    [self addFilter:edgeDetectionFilter];\n    \n    // Fourth pass: apply non-maximum suppression    \n    nonMaximumSuppressionFilter = [[GPUImageDirectionalNonMaximumSuppressionFilter alloc] init];\n    [self addFilter:nonMaximumSuppressionFilter];\n    \n    // Fifth pass: include weak pixels to complete edges\n    weakPixelInclusionFilter = [[GPUImageWeakPixelInclusionFilter alloc] init];\n    [self addFilter:weakPixelInclusionFilter];\n    \n    [luminanceFilter addTarget:blurFilter];\n    [blurFilter addTarget:edgeDetectionFilter];\n    [edgeDetectionFilter addTarget:nonMaximumSuppressionFilter];\n    [nonMaximumSuppressionFilter addTarget:weakPixelInclusionFilter];\n    \n    self.initialFilters = [NSArray arrayWithObject:luminanceFilter];\n//    self.terminalFilter = nonMaximumSuppressionFilter;\n    self.terminalFilter = weakPixelInclusionFilter;\n    \n    self.blurRadiusInPixels = 2.0;\n    self.blurTexelSpacingMultiplier = 1.0;\n    self.upperThreshold = 0.4;\n    self.lowerThreshold = 0.1;\n    \n    return self;\n}\n\n#pragma mark -\n#pragma mark Accessors\n\n- (void)setBlurRadiusInPixels:(CGFloat)newValue;\n{\n    blurFilter.blurRadiusInPixels = newValue;\n}\n\n- (CGFloat)blurRadiusInPixels;\n{\n    return blurFilter.blurRadiusInPixels;\n}\n\n- (void)setBlurTexelSpacingMultiplier:(CGFloat)newValue;\n{\n    blurFilter.texelSpacingMultiplier = newValue;\n}\n\n- (CGFloat)blurTexelSpacingMultiplier;\n{\n    return blurFilter.texelSpacingMultiplier;\n}\n\n- (void)setTexelWidth:(CGFloat)newValue;\n{\n    edgeDetectionFilter.texelWidth = newValue;\n}\n\n- (CGFloat)texelWidth;\n{\n    return edgeDetectionFilter.texelWidth;\n}\n\n- (void)setTexelHeight:(CGFloat)newValue;\n{\n    edgeDetectionFilter.texelHeight = newValue;\n}\n\n- (CGFloat)texelHeight;\n{\n    return edgeDetectionFilter.texelHeight;\n}\n\n- (void)setUpperThreshold:(CGFloat)newValue;\n{\n    nonMaximumSuppressionFilter.upperThreshold = newValue;\n}\n\n- (CGFloat)upperThreshold;\n{\n    return nonMaximumSuppressionFilter.upperThreshold;\n}\n\n- (void)setLowerThreshold:(CGFloat)newValue;\n{\n    nonMaximumSuppressionFilter.lowerThreshold = newValue;\n}\n\n- (CGFloat)lowerThreshold;\n{\n    return nonMaximumSuppressionFilter.lowerThreshold;\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageChromaKeyBlendFilter.h",
    "content": "#import \"GPUImageTwoInputFilter.h\"\n\n/** Selectively replaces a color in the first image with the second image\n */\n@interface GPUImageChromaKeyBlendFilter : GPUImageTwoInputFilter\n{\n    GLint colorToReplaceUniform, thresholdSensitivityUniform, smoothingUniform;\n}\n\n/** The threshold sensitivity controls how similar pixels need to be colored to be replaced\n \n The default value is 0.3\n */\n@property(readwrite, nonatomic) CGFloat thresholdSensitivity;\n\n/** The degree of smoothing controls how gradually similar colors are replaced in the image\n \n The default value is 0.1\n */\n@property(readwrite, nonatomic) CGFloat smoothing;\n\n/** The color to be replaced is specified using individual red, green, and blue components (normalized to 1.0).\n \n The default is green: (0.0, 1.0, 0.0).\n \n @param redComponent Red component of color to be replaced\n @param greenComponent Green component of color to be replaced\n @param blueComponent Blue component of color to be replaced\n */\n- (void)setColorToReplaceRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent;\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageChromaKeyBlendFilter.m",
    "content": "#import \"GPUImageChromaKeyBlendFilter.h\"\n\n// Shader code based on Apple's CIChromaKeyFilter example: https://developer.apple.com/library/mac/#samplecode/CIChromaKeyFilter/Introduction/Intro.html\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageChromaKeyBlendFragmentShaderString = SHADER_STRING\n(\n precision highp float;\n \n varying highp vec2 textureCoordinate;\n varying highp vec2 textureCoordinate2;\n\n uniform float thresholdSensitivity;\n uniform float smoothing;\n uniform vec3 colorToReplace;\n uniform sampler2D inputImageTexture;\n uniform sampler2D inputImageTexture2;\n \n void main()\n {\n     vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);\n     vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate2);\n     \n     float maskY = 0.2989 * colorToReplace.r + 0.5866 * colorToReplace.g + 0.1145 * colorToReplace.b;\n     float maskCr = 0.7132 * (colorToReplace.r - maskY);\n     float maskCb = 0.5647 * (colorToReplace.b - maskY);\n     \n     float Y = 0.2989 * textureColor.r + 0.5866 * textureColor.g + 0.1145 * textureColor.b;\n     float Cr = 0.7132 * (textureColor.r - Y);\n     float Cb = 0.5647 * (textureColor.b - Y);\n     \n//     float blendValue = 1.0 - smoothstep(thresholdSensitivity - smoothing, thresholdSensitivity , abs(Cr - maskCr) + abs(Cb - maskCb));\n     float blendValue = 1.0 - smoothstep(thresholdSensitivity, thresholdSensitivity + smoothing, distance(vec2(Cr, Cb), vec2(maskCr, maskCb)));\n     gl_FragColor = mix(textureColor, textureColor2, blendValue);\n }\n);\n#else\nNSString *const kGPUImageChromaKeyBlendFragmentShaderString = SHADER_STRING\n(\n varying vec2 textureCoordinate;\n varying vec2 textureCoordinate2;\n \n uniform float thresholdSensitivity;\n uniform float smoothing;\n uniform vec3 colorToReplace;\n uniform sampler2D inputImageTexture;\n uniform sampler2D inputImageTexture2;\n \n void main()\n {\n     vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);\n     vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate2);\n     \n     float maskY = 0.2989 * colorToReplace.r + 0.5866 * colorToReplace.g + 0.1145 * colorToReplace.b;\n     float maskCr = 0.7132 * (colorToReplace.r - maskY);\n     float maskCb = 0.5647 * (colorToReplace.b - maskY);\n     \n     float Y = 0.2989 * textureColor.r + 0.5866 * textureColor.g + 0.1145 * textureColor.b;\n     float Cr = 0.7132 * (textureColor.r - Y);\n     float Cb = 0.5647 * (textureColor.b - Y);\n     \n     //     float blendValue = 1.0 - smoothstep(thresholdSensitivity - smoothing, thresholdSensitivity , abs(Cr - maskCr) + abs(Cb - maskCb));\n     float blendValue = 1.0 - smoothstep(thresholdSensitivity, thresholdSensitivity + smoothing, distance(vec2(Cr, Cb), vec2(maskCr, maskCb)));\n     gl_FragColor = mix(textureColor, textureColor2, blendValue);\n }\n);\n#endif\n\n@implementation GPUImageChromaKeyBlendFilter\n\n@synthesize thresholdSensitivity = _thresholdSensitivity;\n@synthesize smoothing = _smoothing;\n\n- (id)init;\n{\n    if (!(self = [super initWithFragmentShaderFromString:kGPUImageChromaKeyBlendFragmentShaderString]))\n    {\n\t\treturn nil;\n    }\n    \n    thresholdSensitivityUniform = [filterProgram uniformIndex:@\"thresholdSensitivity\"];\n    smoothingUniform = [filterProgram uniformIndex:@\"smoothing\"];\n    colorToReplaceUniform = [filterProgram uniformIndex:@\"colorToReplace\"];\n    \n    self.thresholdSensitivity = 0.4;\n    self.smoothing = 0.1;\n    [self setColorToReplaceRed:0.0 green:1.0 blue:0.0];\n    \n    return self;\n}\n\n#pragma mark -\n#pragma mark Accessors\n\n- (void)setColorToReplaceRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent;\n{\n    GPUVector3 colorToReplace = {redComponent, greenComponent, blueComponent};\n    \n    [self setVec3:colorToReplace forUniform:colorToReplaceUniform program:filterProgram];\n}\n\n- (void)setThresholdSensitivity:(CGFloat)newValue;\n{\n    _thresholdSensitivity = newValue;\n\n    [self setFloat:(GLfloat)_thresholdSensitivity forUniform:thresholdSensitivityUniform program:filterProgram];\n}\n\n- (void)setSmoothing:(CGFloat)newValue;\n{\n    _smoothing = newValue;\n    \n    [self setFloat:(GLfloat)_smoothing forUniform:smoothingUniform program:filterProgram];\n}\n\n@end\n\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageChromaKeyFilter.h",
    "content": "#import \"GPUImageFilter.h\"\n\n@interface GPUImageChromaKeyFilter : GPUImageFilter\n{\n    GLint colorToReplaceUniform, thresholdSensitivityUniform, smoothingUniform;\n}\n\n/** The threshold sensitivity controls how similar pixels need to be colored to be replaced\n \n The default value is 0.3\n */\n@property(readwrite, nonatomic) CGFloat thresholdSensitivity;\n\n/** The degree of smoothing controls how gradually similar colors are replaced in the image\n \n The default value is 0.1\n */\n@property(readwrite, nonatomic) CGFloat smoothing;\n\n/** The color to be replaced is specified using individual red, green, and blue components (normalized to 1.0).\n \n The default is green: (0.0, 1.0, 0.0).\n \n @param redComponent Red component of color to be replaced\n @param greenComponent Green component of color to be replaced\n @param blueComponent Blue component of color to be replaced\n */\n- (void)setColorToReplaceRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent;\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageChromaKeyFilter.m",
    "content": "#import \"GPUImageChromaKeyFilter.h\"\n\n// Shader code based on Apple's CIChromaKeyFilter example: https://developer.apple.com/library/mac/#samplecode/CIChromaKeyFilter/Introduction/Intro.html\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageChromaKeyFragmentShaderString = SHADER_STRING\n(\n precision highp float;\n \n varying highp vec2 textureCoordinate;\n \n uniform float thresholdSensitivity;\n uniform float smoothing;\n uniform vec3 colorToReplace;\n uniform sampler2D inputImageTexture;\n uniform sampler2D inputImageTexture2;\n \n void main()\n {\n     vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);\n     \n     float maskY = 0.2989 * colorToReplace.r + 0.5866 * colorToReplace.g + 0.1145 * colorToReplace.b;\n     float maskCr = 0.7132 * (colorToReplace.r - maskY);\n     float maskCb = 0.5647 * (colorToReplace.b - maskY);\n     \n     float Y = 0.2989 * textureColor.r + 0.5866 * textureColor.g + 0.1145 * textureColor.b;\n     float Cr = 0.7132 * (textureColor.r - Y);\n     float Cb = 0.5647 * (textureColor.b - Y);\n     \n     //     float blendValue = 1.0 - smoothstep(thresholdSensitivity - smoothing, thresholdSensitivity , abs(Cr - maskCr) + abs(Cb - maskCb));\n     float blendValue = smoothstep(thresholdSensitivity, thresholdSensitivity + smoothing, distance(vec2(Cr, Cb), vec2(maskCr, maskCb)));\n     gl_FragColor = vec4(textureColor.rgb, textureColor.a * blendValue);\n }\n);\n#else\nNSString *const kGPUImageChromaKeyFragmentShaderString = SHADER_STRING\n(\n varying vec2 textureCoordinate;\n \n uniform float thresholdSensitivity;\n uniform float smoothing;\n uniform vec3 colorToReplace;\n uniform sampler2D inputImageTexture;\n uniform sampler2D inputImageTexture2;\n \n void main()\n {\n     vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);\n     \n     float maskY = 0.2989 * colorToReplace.r + 0.5866 * colorToReplace.g + 0.1145 * colorToReplace.b;\n     float maskCr = 0.7132 * (colorToReplace.r - maskY);\n     float maskCb = 0.5647 * (colorToReplace.b - maskY);\n     \n     float Y = 0.2989 * textureColor.r + 0.5866 * textureColor.g + 0.1145 * textureColor.b;\n     float Cr = 0.7132 * (textureColor.r - Y);\n     float Cb = 0.5647 * (textureColor.b - Y);\n     \n     //     float blendValue = 1.0 - smoothstep(thresholdSensitivity - smoothing, thresholdSensitivity , abs(Cr - maskCr) + abs(Cb - maskCb));\n     float blendValue = smoothstep(thresholdSensitivity, thresholdSensitivity + smoothing, distance(vec2(Cr, Cb), vec2(maskCr, maskCb)));\n     gl_FragColor = vec4(textureColor.rgb, textureColor.a * blendValue);\n }\n );\n#endif\n\n@implementation GPUImageChromaKeyFilter\n\n@synthesize thresholdSensitivity = _thresholdSensitivity;\n@synthesize smoothing = _smoothing;\n\n- (id)init;\n{\n    if (!(self = [super initWithFragmentShaderFromString:kGPUImageChromaKeyFragmentShaderString]))\n    {\n\t\treturn nil;\n    }\n    \n    thresholdSensitivityUniform = [filterProgram uniformIndex:@\"thresholdSensitivity\"];\n    smoothingUniform = [filterProgram uniformIndex:@\"smoothing\"];\n    colorToReplaceUniform = [filterProgram uniformIndex:@\"colorToReplace\"];\n    \n    self.thresholdSensitivity = 0.4;\n    self.smoothing = 0.1;\n    [self setColorToReplaceRed:0.0 green:1.0 blue:0.0];\n    \n    return self;\n}\n\n#pragma mark -\n#pragma mark Accessors\n\n- (void)setColorToReplaceRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent;\n{\n    GPUVector3 colorToReplace = {redComponent, greenComponent, blueComponent};\n    \n    [self setVec3:colorToReplace forUniform:colorToReplaceUniform program:filterProgram];\n}\n\n- (void)setThresholdSensitivity:(CGFloat)newValue;\n{\n    _thresholdSensitivity = newValue;\n    \n    [self setFloat:(GLfloat)_thresholdSensitivity forUniform:thresholdSensitivityUniform program:filterProgram];\n}\n\n- (void)setSmoothing:(CGFloat)newValue;\n{\n    _smoothing = newValue;\n    \n    [self setFloat:(GLfloat)_smoothing forUniform:smoothingUniform program:filterProgram];\n}\n\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageClosingFilter.h",
    "content": "#import \"GPUImageFilterGroup.h\"\n\n@class GPUImageErosionFilter;\n@class GPUImageDilationFilter;\n\n// A filter that first performs a dilation on the red channel of an image, followed by an erosion of the same radius. \n// This helps to filter out smaller dark elements.\n\n@interface GPUImageClosingFilter : GPUImageFilterGroup\n{\n    GPUImageErosionFilter *erosionFilter;\n    GPUImageDilationFilter *dilationFilter;\n}\n\n@property(readwrite, nonatomic) CGFloat verticalTexelSpacing, horizontalTexelSpacing;\n\n- (id)initWithRadius:(NSUInteger)radius;\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageClosingFilter.m",
    "content": "#import \"GPUImageClosingFilter.h\"\n#import \"GPUImageErosionFilter.h\"\n#import \"GPUImageDilationFilter.h\"\n\n@implementation GPUImageClosingFilter\n\n@synthesize verticalTexelSpacing = _verticalTexelSpacing;\n@synthesize horizontalTexelSpacing = _horizontalTexelSpacing;\n\n- (id)init;\n{\n    if (!(self = [self initWithRadius:1]))\n    {\n\t\treturn nil;\n    }\n    \n    return self;\n}\n\n- (id)initWithRadius:(NSUInteger)radius;\n{\n    if (!(self = [super init]))\n    {\n\t\treturn nil;\n    }\n    \n    // First pass: dilation\n    dilationFilter = [[GPUImageDilationFilter alloc] initWithRadius:radius];\n    [self addFilter:dilationFilter];\n    \n    // Second pass: erosion\n    erosionFilter = [[GPUImageErosionFilter alloc] initWithRadius:radius];\n    [self addFilter:erosionFilter];\n    \n    [dilationFilter addTarget:erosionFilter];\n    \n    self.initialFilters = [NSArray arrayWithObjects:dilationFilter, nil];\n    self.terminalFilter = erosionFilter;\n    \n    return self;\n}\n\n- (void)setVerticalTexelSpacing:(CGFloat)newValue;\n{\n    _verticalTexelSpacing = newValue;\n    erosionFilter.verticalTexelSpacing = newValue;\n    dilationFilter.verticalTexelSpacing = newValue;\n}\n\n- (void)setHorizontalTexelSpacing:(CGFloat)newValue;\n{\n    _horizontalTexelSpacing = newValue;\n    erosionFilter.horizontalTexelSpacing = newValue;\n    dilationFilter.horizontalTexelSpacing = newValue;\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageColorBlendFilter.h",
    "content": "#import \"GPUImageTwoInputFilter.h\"\n\n@interface GPUImageColorBlendFilter : GPUImageTwoInputFilter\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageColorBlendFilter.m",
    "content": "#import \"GPUImageColorBlendFilter.h\"\n\n/**\n * Color blend mode based upon pseudo code from the PDF specification.\n */\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageColorBlendFragmentShaderString = SHADER_STRING\n(\n varying highp vec2 textureCoordinate;\n varying highp vec2 textureCoordinate2;\n \n uniform sampler2D inputImageTexture;\n uniform sampler2D inputImageTexture2;\n \n highp float lum(lowp vec3 c) {\n     return dot(c, vec3(0.3, 0.59, 0.11));\n }\n \n lowp vec3 clipcolor(lowp vec3 c) {\n     highp float l = lum(c);\n     lowp float n = min(min(c.r, c.g), c.b);\n     lowp float x = max(max(c.r, c.g), c.b);\n     \n     if (n < 0.0) {\n         c.r = l + ((c.r - l) * l) / (l - n);\n         c.g = l + ((c.g - l) * l) / (l - n);\n         c.b = l + ((c.b - l) * l) / (l - n);\n     }\n     if (x > 1.0) {\n         c.r = l + ((c.r - l) * (1.0 - l)) / (x - l);\n         c.g = l + ((c.g - l) * (1.0 - l)) / (x - l);\n         c.b = l + ((c.b - l) * (1.0 - l)) / (x - l);\n     }\n     \n     return c;\n }\n\n lowp vec3 setlum(lowp vec3 c, highp float l) {\n     highp float d = l - lum(c);\n     c = c + vec3(d);\n     return clipcolor(c);\n }\n \n void main()\n {\n\t highp vec4 baseColor = texture2D(inputImageTexture, textureCoordinate);\n\t highp vec4 overlayColor = texture2D(inputImageTexture2, textureCoordinate2);\n\n     gl_FragColor = vec4(baseColor.rgb * (1.0 - overlayColor.a) + setlum(overlayColor.rgb, lum(baseColor.rgb)) * overlayColor.a, baseColor.a);\n }\n);\n#else\nNSString *const kGPUImageColorBlendFragmentShaderString = SHADER_STRING\n(\n varying vec2 textureCoordinate;\n varying vec2 textureCoordinate2;\n \n uniform sampler2D inputImageTexture;\n uniform sampler2D inputImageTexture2;\n \n float lum(vec3 c) {\n     return dot(c, vec3(0.3, 0.59, 0.11));\n }\n \n vec3 clipcolor(vec3 c) {\n     float l = lum(c);\n     float n = min(min(c.r, c.g), c.b);\n     float x = max(max(c.r, c.g), c.b);\n     \n     if (n < 0.0) {\n         c.r = l + ((c.r - l) * l) / (l - n);\n         c.g = l + ((c.g - l) * l) / (l - n);\n         c.b = l + ((c.b - l) * l) / (l - n);\n     }\n     if (x > 1.0) {\n         c.r = l + ((c.r - l) * (1.0 - l)) / (x - l);\n         c.g = l + ((c.g - l) * (1.0 - l)) / (x - l);\n         c.b = l + ((c.b - l) * (1.0 - l)) / (x - l);\n     }\n     \n     return c;\n }\n \n vec3 setlum(vec3 c, float l) {\n     float d = l - lum(c);\n     c = c + vec3(d);\n     return clipcolor(c);\n }\n \n void main()\n {\n\t vec4 baseColor = texture2D(inputImageTexture, textureCoordinate);\n\t vec4 overlayColor = texture2D(inputImageTexture2, textureCoordinate2);\n     \n     gl_FragColor = vec4(baseColor.rgb * (1.0 - overlayColor.a) + setlum(overlayColor.rgb, lum(baseColor.rgb)) * overlayColor.a, baseColor.a);\n }\n);\n#endif\n\n\n@implementation GPUImageColorBlendFilter\n\n- (id)init;\n{\n    if (!(self = [super initWithFragmentShaderFromString:kGPUImageColorBlendFragmentShaderString]))\n    {\n\t\treturn nil;\n    }\n    \n    return self;\n}\n\n@end"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageColorBurnBlendFilter.h",
    "content": "#import \"GPUImageTwoInputFilter.h\"\n\n/** Applies a color burn blend of two images\n */\n@interface GPUImageColorBurnBlendFilter : GPUImageTwoInputFilter\n{\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageColorBurnBlendFilter.m",
    "content": "#import \"GPUImageColorBurnBlendFilter.h\"\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageColorBurnBlendFragmentShaderString = SHADER_STRING\n(\n varying highp vec2 textureCoordinate;\n varying highp vec2 textureCoordinate2;\n\n uniform sampler2D inputImageTexture;\n uniform sampler2D inputImageTexture2;\n \n void main()\n {\n    mediump vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);\n    mediump vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate2);\n    mediump vec4 whiteColor = vec4(1.0);\n    gl_FragColor = whiteColor - (whiteColor - textureColor) / textureColor2;\n }\n);\n#else\nNSString *const kGPUImageColorBurnBlendFragmentShaderString = SHADER_STRING\n(\n varying vec2 textureCoordinate;\n varying vec2 textureCoordinate2;\n \n uniform sampler2D inputImageTexture;\n uniform sampler2D inputImageTexture2;\n \n void main()\n {\n     vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);\n     vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate2);\n     vec4 whiteColor = vec4(1.0);\n     gl_FragColor = whiteColor - (whiteColor - textureColor) / textureColor2;\n }\n);\n#endif\n\n@implementation GPUImageColorBurnBlendFilter\n\n- (id)init;\n{\n    if (!(self = [super initWithFragmentShaderFromString:kGPUImageColorBurnBlendFragmentShaderString]))\n    {\n\t\treturn nil;\n    }\n    \n    return self;\n}\n\n@end\n\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageColorConversion.h",
    "content": "#ifndef GPUImageColorConversion_h\n#define GPUImageColorConversion_h\n\nextern GLfloat *kColorConversion601;\nextern GLfloat *kColorConversion601FullRange;\nextern GLfloat *kColorConversion709;\nextern NSString *const kGPUImageYUVVideoRangeConversionForRGFragmentShaderString;\nextern NSString *const kGPUImageYUVFullRangeConversionForLAFragmentShaderString;\nextern NSString *const kGPUImageYUVVideoRangeConversionForLAFragmentShaderString;\n\n\n#endif /* GPUImageColorConversion_h */\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageColorConversion.m",
    "content": "#import \"GPUImageFilter.h\"\n\n// Color Conversion Constants (YUV to RGB) including adjustment from 16-235/16-240 (video range)\n\n// BT.601, which is the standard for SDTV.\nGLfloat kColorConversion601Default[] = {\n    1.164,  1.164, 1.164,\n    0.0, -0.392, 2.017,\n    1.596, -0.813,   0.0,\n};\n\n// BT.601 full range (ref: http://www.equasys.de/colorconversion.html)\nGLfloat kColorConversion601FullRangeDefault[] = {\n    1.0,    1.0,    1.0,\n    0.0,    -0.343, 1.765,\n    1.4,    -0.711, 0.0,\n};\n\n// BT.709, which is the standard for HDTV.\nGLfloat kColorConversion709Default[] = {\n    1.164,  1.164, 1.164,\n    0.0, -0.213, 2.112,\n    1.793, -0.533,   0.0,\n};\n\n\nGLfloat *kColorConversion601 = kColorConversion601Default;\nGLfloat *kColorConversion601FullRange = kColorConversion601FullRangeDefault;\nGLfloat *kColorConversion709 = kColorConversion709Default;\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageYUVVideoRangeConversionForRGFragmentShaderString = SHADER_STRING\n(\n varying highp vec2 textureCoordinate;\n \n uniform sampler2D luminanceTexture;\n uniform sampler2D chrominanceTexture;\n uniform mediump mat3 colorConversionMatrix;\n \n void main()\n {\n     mediump vec3 yuv;\n     lowp vec3 rgb;\n     \n     yuv.x = texture2D(luminanceTexture, textureCoordinate).r;\n     yuv.yz = texture2D(chrominanceTexture, textureCoordinate).rg - vec2(0.5, 0.5);\n     rgb = colorConversionMatrix * yuv;\n     \n     gl_FragColor = vec4(rgb, 1);\n }\n );\n#else\nNSString *const kGPUImageYUVVideoRangeConversionForRGFragmentShaderString = SHADER_STRING\n(\n varying vec2 textureCoordinate;\n \n uniform sampler2D luminanceTexture;\n uniform sampler2D chrominanceTexture;\n \n void main()\n {\n     vec3 yuv;\n     vec3 rgb;\n     \n     yuv.x = texture2D(luminanceTexture, textureCoordinate).r;\n     yuv.yz = texture2D(chrominanceTexture, textureCoordinate).rg - vec2(0.5, 0.5);\n     \n     // BT.601, which is the standard for SDTV is provided as a reference\n     /*\n      rgb = mat3(      1,       1,       1,\n      0, -.39465, 2.03211,\n      1.13983, -.58060,       0) * yuv;\n      */\n     \n     // Using BT.709 which is the standard for HDTV\n     rgb = mat3(      1,       1,       1,\n                0, -.21482, 2.12798,\n                1.28033, -.38059,       0) * yuv;\n     \n     gl_FragColor = vec4(rgb, 1);\n }\n );\n#endif\n\nNSString *const kGPUImageYUVFullRangeConversionForLAFragmentShaderString = SHADER_STRING\n(\n varying highp vec2 textureCoordinate;\n \n uniform sampler2D luminanceTexture;\n uniform sampler2D chrominanceTexture;\n uniform mediump mat3 colorConversionMatrix;\n \n void main()\n {\n     mediump vec3 yuv;\n     lowp vec3 rgb;\n     \n     yuv.x = texture2D(luminanceTexture, textureCoordinate).r;\n     yuv.yz = texture2D(chrominanceTexture, textureCoordinate).ra - vec2(0.5, 0.5);\n     rgb = colorConversionMatrix * yuv;\n     \n     gl_FragColor = vec4(rgb, 1);\n }\n );\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageYUVVideoRangeConversionForLAFragmentShaderString = SHADER_STRING\n(\n varying highp vec2 textureCoordinate;\n \n uniform sampler2D luminanceTexture;\n uniform sampler2D chrominanceTexture;\n uniform mediump mat3 colorConversionMatrix;\n \n void main()\n {\n     mediump vec3 yuv;\n     lowp vec3 rgb;\n     \n     yuv.x = texture2D(luminanceTexture, textureCoordinate).r - (16.0/255.0);\n     yuv.yz = texture2D(chrominanceTexture, textureCoordinate).ra - vec2(0.5, 0.5);\n     rgb = colorConversionMatrix * yuv;\n     \n     gl_FragColor = vec4(rgb, 1);\n }\n );\n#else\nNSString *const kGPUImageYUVVideoRangeConversionForLAFragmentShaderString = SHADER_STRING\n(\n varying vec2 textureCoordinate;\n \n uniform sampler2D luminanceTexture;\n uniform sampler2D chrominanceTexture;\n \n void main()\n {\n     vec3 yuv;\n     vec3 rgb;\n     \n     yuv.x = texture2D(luminanceTexture, textureCoordinate).r;\n     yuv.yz = texture2D(chrominanceTexture, textureCoordinate).ra - vec2(0.5, 0.5);\n     \n     // BT.601, which is the standard for SDTV is provided as a reference\n     /*\n      rgb = mat3(      1,       1,       1,\n      0, -.39465, 2.03211,\n      1.13983, -.58060,       0) * yuv;\n      */\n     \n     // Using BT.709 which is the standard for HDTV\n     rgb = mat3(      1,       1,       1,\n                0, -.21482, 2.12798,\n                1.28033, -.38059,       0) * yuv;\n     \n     gl_FragColor = vec4(rgb, 1);\n }\n );\n#endif\n\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageColorDodgeBlendFilter.h",
    "content": "#import \"GPUImageTwoInputFilter.h\"\n\n/** Applies a color dodge blend of two images\n */\n@interface GPUImageColorDodgeBlendFilter : GPUImageTwoInputFilter\n{\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageColorDodgeBlendFilter.m",
    "content": "#import \"GPUImageColorDodgeBlendFilter.h\"\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageColorDodgeBlendFragmentShaderString = SHADER_STRING\n( \n \n precision mediump float;\n \n varying highp vec2 textureCoordinate;\n varying highp vec2 textureCoordinate2;\n \n uniform sampler2D inputImageTexture;\n uniform sampler2D inputImageTexture2;\n \n void main()\n {\n     vec4 base = texture2D(inputImageTexture, textureCoordinate);\n     vec4 overlay = texture2D(inputImageTexture2, textureCoordinate2);\n     \n     vec3 baseOverlayAlphaProduct = vec3(overlay.a * base.a);\n     vec3 rightHandProduct = overlay.rgb * (1.0 - base.a) + base.rgb * (1.0 - overlay.a);\n     \n     vec3 firstBlendColor = baseOverlayAlphaProduct + rightHandProduct;\n     vec3 overlayRGB = clamp((overlay.rgb / clamp(overlay.a, 0.01, 1.0)) * step(0.0, overlay.a), 0.0, 0.99);\n     \n     vec3 secondBlendColor = (base.rgb * overlay.a) / (1.0 - overlayRGB) + rightHandProduct;\n     \n     vec3 colorChoice = step((overlay.rgb * base.a + base.rgb * overlay.a), baseOverlayAlphaProduct);\n     \n     gl_FragColor = vec4(mix(firstBlendColor, secondBlendColor, colorChoice), 1.0);\n }\n);\n#else\nNSString *const kGPUImageColorDodgeBlendFragmentShaderString = SHADER_STRING\n(\n varying vec2 textureCoordinate;\n varying vec2 textureCoordinate2;\n \n uniform sampler2D inputImageTexture;\n uniform sampler2D inputImageTexture2;\n \n void main()\n {\n     vec4 base = texture2D(inputImageTexture, textureCoordinate);\n     vec4 overlay = texture2D(inputImageTexture2, textureCoordinate2);\n     \n     vec3 baseOverlayAlphaProduct = vec3(overlay.a * base.a);\n     vec3 rightHandProduct = overlay.rgb * (1.0 - base.a) + base.rgb * (1.0 - overlay.a);\n     \n     vec3 firstBlendColor = baseOverlayAlphaProduct + rightHandProduct;\n     vec3 overlayRGB = clamp((overlay.rgb / clamp(overlay.a, 0.01, 1.0)) * step(0.0, overlay.a), 0.0, 0.99);\n     \n     vec3 secondBlendColor = (base.rgb * overlay.a) / (1.0 - overlayRGB) + rightHandProduct;\n     \n     vec3 colorChoice = step((overlay.rgb * base.a + base.rgb * overlay.a), baseOverlayAlphaProduct);\n     \n     gl_FragColor = vec4(mix(firstBlendColor, secondBlendColor, colorChoice), 1.0);\n }\n);\n#endif\n\n@implementation GPUImageColorDodgeBlendFilter\n\n- (id)init;\n{\n    if (!(self = [super initWithFragmentShaderFromString:kGPUImageColorDodgeBlendFragmentShaderString]))\n    {\n\t\treturn nil;\n    }\n    \n    return self;\n}\n\n@end\n\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageColorInvertFilter.h",
    "content": "#import \"GPUImageFilter.h\"\n\n@interface GPUImageColorInvertFilter : GPUImageFilter\n{\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageColorInvertFilter.m",
    "content": "#import \"GPUImageColorInvertFilter.h\"\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageInvertFragmentShaderString = SHADER_STRING\n(\n varying highp vec2 textureCoordinate;\n \n uniform sampler2D inputImageTexture;\n \n void main()\n {\n    lowp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);\n    \n    gl_FragColor = vec4((1.0 - textureColor.rgb), textureColor.w);\n }\n);                                                                    \n#else\nNSString *const kGPUImageInvertFragmentShaderString = SHADER_STRING\n(\n varying vec2 textureCoordinate;\n \n uniform sampler2D inputImageTexture;\n \n void main()\n {\n     vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);\n     \n     gl_FragColor = vec4((1.0 - textureColor.rgb), textureColor.w);\n }\n );\n#endif\n\n@implementation GPUImageColorInvertFilter\n\n- (id)init;\n{\n    if (!(self = [super initWithFragmentShaderFromString:kGPUImageInvertFragmentShaderString]))\n    {\n\t\treturn nil;\n    }\n    \n    return self;\n}\n\n@end\n\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageColorLocalBinaryPatternFilter.h",
    "content": "#import \"GPUImage3x3TextureSamplingFilter.h\"\n\n@interface GPUImageColorLocalBinaryPatternFilter : GPUImage3x3TextureSamplingFilter\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageColorLocalBinaryPatternFilter.m",
    "content": "#import \"GPUImageColorLocalBinaryPatternFilter.h\"\n\n// This is based on \"Accelerating image recognition on mobile devices using GPGPU\" by Miguel Bordallo Lopez, Henri Nykanen, Jari Hannuksela, Olli Silven and Markku Vehvilainen\n// http://www.ee.oulu.fi/~jhannuks/publications/SPIE2011a.pdf\n\n// Right pixel is the most significant bit, traveling clockwise to get to the upper right, which is the least significant\n// If the external pixel is greater than or equal to the center, set to 1, otherwise 0\n//\n// 2 1 0\n// 3   7\n// 4 5 6\n\n// 01101101\n// 76543210\n\n@implementation GPUImageColorLocalBinaryPatternFilter\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageColorLocalBinaryPatternFragmentShaderString = SHADER_STRING\n(\n precision highp float;\n \n varying vec2 textureCoordinate;\n varying vec2 leftTextureCoordinate;\n varying vec2 rightTextureCoordinate;\n \n varying vec2 topTextureCoordinate;\n varying vec2 topLeftTextureCoordinate;\n varying vec2 topRightTextureCoordinate;\n \n varying vec2 bottomTextureCoordinate;\n varying vec2 bottomLeftTextureCoordinate;\n varying vec2 bottomRightTextureCoordinate;\n \n uniform sampler2D inputImageTexture;\n \n void main()\n {\n     lowp vec3 centerColor = texture2D(inputImageTexture, textureCoordinate).rgb;\n     lowp vec3 bottomLeftColor = texture2D(inputImageTexture, bottomLeftTextureCoordinate).rgb;\n     lowp vec3 topRightColor = texture2D(inputImageTexture, topRightTextureCoordinate).rgb;\n     lowp vec3 topLeftColor = texture2D(inputImageTexture, topLeftTextureCoordinate).rgb;\n     lowp vec3 bottomRightColor = texture2D(inputImageTexture, bottomRightTextureCoordinate).rgb;\n     lowp vec3 leftColor = texture2D(inputImageTexture, leftTextureCoordinate).rgb;\n     lowp vec3 rightColor = texture2D(inputImageTexture, rightTextureCoordinate).rgb;\n     lowp vec3 bottomColor = texture2D(inputImageTexture, bottomTextureCoordinate).rgb;\n     lowp vec3 topColor = texture2D(inputImageTexture, topTextureCoordinate).rgb;\n\n     lowp float redByteTally = 1.0 / 255.0 * step(centerColor.r, topRightColor.r);\n     redByteTally += 2.0 / 255.0 * step(centerColor.r, topColor.r);\n     redByteTally += 4.0 / 255.0 * step(centerColor.r, topLeftColor.r);\n     redByteTally += 8.0 / 255.0 * step(centerColor.r, leftColor.r);\n     redByteTally += 16.0 / 255.0 * step(centerColor.r, bottomLeftColor.r);\n     redByteTally += 32.0 / 255.0 * step(centerColor.r, bottomColor.r);\n     redByteTally += 64.0 / 255.0 * step(centerColor.r, bottomRightColor.r);\n     redByteTally += 128.0 / 255.0 * step(centerColor.r, rightColor.r);\n\n     lowp float blueByteTally = 1.0 / 255.0 * step(centerColor.b, topRightColor.b);\n     blueByteTally += 2.0 / 255.0 * step(centerColor.b, topColor.b);\n     blueByteTally += 4.0 / 255.0 * step(centerColor.b, topLeftColor.b);\n     blueByteTally += 8.0 / 255.0 * step(centerColor.b, leftColor.b);\n     blueByteTally += 16.0 / 255.0 * step(centerColor.b, bottomLeftColor.b);\n     blueByteTally += 32.0 / 255.0 * step(centerColor.b, bottomColor.b);\n     blueByteTally += 64.0 / 255.0 * step(centerColor.b, bottomRightColor.b);\n     blueByteTally += 128.0 / 255.0 * step(centerColor.b, rightColor.b);\n\n     lowp float greenByteTally = 1.0 / 255.0 * step(centerColor.g, topRightColor.g);\n     greenByteTally += 2.0 / 255.0 * step(centerColor.g, topColor.g);\n     greenByteTally += 4.0 / 255.0 * step(centerColor.g, topLeftColor.g);\n     greenByteTally += 8.0 / 255.0 * step(centerColor.g, leftColor.g);\n     greenByteTally += 16.0 / 255.0 * step(centerColor.g, bottomLeftColor.g);\n     greenByteTally += 32.0 / 255.0 * step(centerColor.g, bottomColor.g);\n     greenByteTally += 64.0 / 255.0 * step(centerColor.g, bottomRightColor.g);\n     greenByteTally += 128.0 / 255.0 * step(centerColor.g, rightColor.g);\n\n     // TODO: Replace the above with a dot product and two vec4s\n     // TODO: Apply step to a matrix, rather than individually\n     \n     gl_FragColor = vec4(redByteTally, blueByteTally, greenByteTally, 1.0);\n }\n);\n#else\nNSString *const kGPUImageColorLocalBinaryPatternFragmentShaderString = SHADER_STRING\n(\n varying vec2 textureCoordinate;\n varying vec2 leftTextureCoordinate;\n varying vec2 rightTextureCoordinate;\n \n varying vec2 topTextureCoordinate;\n varying vec2 topLeftTextureCoordinate;\n varying vec2 topRightTextureCoordinate;\n \n varying vec2 bottomTextureCoordinate;\n varying vec2 bottomLeftTextureCoordinate;\n varying vec2 bottomRightTextureCoordinate;\n \n uniform sampler2D inputImageTexture;\n \n void main()\n {\n     vec3 centerColor = texture2D(inputImageTexture, textureCoordinate).rgb;\n     vec3 bottomLeftColor = texture2D(inputImageTexture, bottomLeftTextureCoordinate).rgb;\n     vec3 topRightColor = texture2D(inputImageTexture, topRightTextureCoordinate).rgb;\n     vec3 topLeftColor = texture2D(inputImageTexture, topLeftTextureCoordinate).rgb;\n     vec3 bottomRightColor = texture2D(inputImageTexture, bottomRightTextureCoordinate).rgb;\n     vec3 leftColor = texture2D(inputImageTexture, leftTextureCoordinate).rgb;\n     vec3 rightColor = texture2D(inputImageTexture, rightTextureCoordinate).rgb;\n     vec3 bottomColor = texture2D(inputImageTexture, bottomTextureCoordinate).rgb;\n     vec3 topColor = texture2D(inputImageTexture, topTextureCoordinate).rgb;\n     \n     float redByteTally = 1.0 / 255.0 * step(centerColor.r, topRightColor.r);\n     redByteTally += 2.0 / 255.0 * step(centerColor.r, topColor.r);\n     redByteTally += 4.0 / 255.0 * step(centerColor.r, topLeftColor.r);\n     redByteTally += 8.0 / 255.0 * step(centerColor.r, leftColor.r);\n     redByteTally += 16.0 / 255.0 * step(centerColor.r, bottomLeftColor.r);\n     redByteTally += 32.0 / 255.0 * step(centerColor.r, bottomColor.r);\n     redByteTally += 64.0 / 255.0 * step(centerColor.r, bottomRightColor.r);\n     redByteTally += 128.0 / 255.0 * step(centerColor.r, rightColor.r);\n     \n     float blueByteTally = 1.0 / 255.0 * step(centerColor.b, topRightColor.b);\n     blueByteTally += 2.0 / 255.0 * step(centerColor.b, topColor.b);\n     blueByteTally += 4.0 / 255.0 * step(centerColor.b, topLeftColor.b);\n     blueByteTally += 8.0 / 255.0 * step(centerColor.b, leftColor.b);\n     blueByteTally += 16.0 / 255.0 * step(centerColor.b, bottomLeftColor.b);\n     blueByteTally += 32.0 / 255.0 * step(centerColor.b, bottomColor.b);\n     blueByteTally += 64.0 / 255.0 * step(centerColor.b, bottomRightColor.b);\n     blueByteTally += 128.0 / 255.0 * step(centerColor.b, rightColor.b);\n     \n     float greenByteTally = 1.0 / 255.0 * step(centerColor.g, topRightColor.g);\n     greenByteTally += 2.0 / 255.0 * step(centerColor.g, topColor.g);\n     greenByteTally += 4.0 / 255.0 * step(centerColor.g, topLeftColor.g);\n     greenByteTally += 8.0 / 255.0 * step(centerColor.g, leftColor.g);\n     greenByteTally += 16.0 / 255.0 * step(centerColor.g, bottomLeftColor.g);\n     greenByteTally += 32.0 / 255.0 * step(centerColor.g, bottomColor.g);\n     greenByteTally += 64.0 / 255.0 * step(centerColor.g, bottomRightColor.g);\n     greenByteTally += 128.0 / 255.0 * step(centerColor.g, rightColor.g);\n     \n     // TODO: Replace the above with a dot product and two vec4s\n     // TODO: Apply step to a matrix, rather than individually\n     \n     gl_FragColor = vec4(redByteTally, blueByteTally, greenByteTally, 1.0);\n }\n);\n#endif\n\n#pragma mark -\n#pragma mark Initialization and teardown\n\n- (id)init;\n{\n    if (!(self = [super initWithFragmentShaderFromString:kGPUImageColorLocalBinaryPatternFragmentShaderString]))\n    {\n\t\treturn nil;\n    }\n    \n    return self;\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageColorMatrixFilter.h",
    "content": "#import \"GPUImageFilter.h\"\n\n/** Transforms the colors of an image by applying a matrix to them\n */\n@interface GPUImageColorMatrixFilter : GPUImageFilter\n{\n    GLint colorMatrixUniform;\n    GLint intensityUniform;\n}\n\n/** A 4x4 matrix used to transform each color in an image\n */\n@property(readwrite, nonatomic) GPUMatrix4x4 colorMatrix;\n\n/** The degree to which the new transformed color replaces the original color for each pixel\n */\n@property(readwrite, nonatomic) CGFloat intensity;\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageColorMatrixFilter.m",
    "content": "#import \"GPUImageColorMatrixFilter.h\"\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageColorMatrixFragmentShaderString = SHADER_STRING\n(\n varying highp vec2 textureCoordinate;\n \n uniform sampler2D inputImageTexture;\n \n uniform lowp mat4 colorMatrix;\n uniform lowp float intensity;\n \n void main()\n {\n     lowp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);\n     lowp vec4 outputColor = textureColor * colorMatrix;\n     \n     gl_FragColor = (intensity * outputColor) + ((1.0 - intensity) * textureColor);\n }\n);\n#else\nNSString *const kGPUImageColorMatrixFragmentShaderString = SHADER_STRING\n(\n varying vec2 textureCoordinate;\n \n uniform sampler2D inputImageTexture;\n \n uniform mat4 colorMatrix;\n uniform float intensity;\n \n void main()\n {\n     vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);\n     vec4 outputColor = textureColor * colorMatrix;\n     \n     gl_FragColor = (intensity * outputColor) + ((1.0 - intensity) * textureColor);\n }\n);\n#endif\n\n@implementation GPUImageColorMatrixFilter\n\n@synthesize intensity = _intensity;\n@synthesize colorMatrix = _colorMatrix;\n\n#pragma mark -\n#pragma mark Initialization and teardown\n\n- (id)init;\n{\n    if (!(self = [super initWithFragmentShaderFromString:kGPUImageColorMatrixFragmentShaderString]))\n    {\n        return nil;\n    }\n    \n    colorMatrixUniform = [filterProgram uniformIndex:@\"colorMatrix\"];\n    intensityUniform = [filterProgram uniformIndex:@\"intensity\"];\n    \n    self.intensity = 1.f;\n    self.colorMatrix = (GPUMatrix4x4){\n        {1.f, 0.f, 0.f, 0.f},\n        {0.f, 1.f, 0.f, 0.f},\n        {0.f, 0.f, 1.f, 0.f},\n        {0.f, 0.f, 0.f, 1.f}\n    };\n    \n    return self;\n}\n\n#pragma mark -\n#pragma mark Accessors\n\n- (void)setIntensity:(CGFloat)newIntensity;\n{\n    _intensity = newIntensity;\n    \n    [self setFloat:_intensity forUniform:intensityUniform program:filterProgram];\n}\n\n- (void)setColorMatrix:(GPUMatrix4x4)newColorMatrix;\n{\n    _colorMatrix = newColorMatrix;\n    \n    [self setMatrix4f:_colorMatrix forUniform:colorMatrixUniform program:filterProgram];\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageColorPackingFilter.h",
    "content": "#import \"GPUImageFilter.h\"\n\n@interface GPUImageColorPackingFilter : GPUImageFilter\n{\n    GLint texelWidthUniform, texelHeightUniform;\n    \n    CGFloat texelWidth, texelHeight;\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageColorPackingFilter.m",
    "content": "#import \"GPUImageColorPackingFilter.h\"\n\nNSString *const kGPUImageColorPackingVertexShaderString = SHADER_STRING\n(\n attribute vec4 position;\n attribute vec4 inputTextureCoordinate;\n \n uniform float texelWidth;\n uniform float texelHeight;\n \n varying vec2 upperLeftInputTextureCoordinate;\n varying vec2 upperRightInputTextureCoordinate;\n varying vec2 lowerLeftInputTextureCoordinate;\n varying vec2 lowerRightInputTextureCoordinate;\n \n void main()\n {\n     gl_Position = position;\n     \n     upperLeftInputTextureCoordinate = inputTextureCoordinate.xy + vec2(-texelWidth, -texelHeight);\n     upperRightInputTextureCoordinate = inputTextureCoordinate.xy + vec2(texelWidth, -texelHeight);\n     lowerLeftInputTextureCoordinate = inputTextureCoordinate.xy + vec2(-texelWidth, texelHeight);\n     lowerRightInputTextureCoordinate = inputTextureCoordinate.xy + vec2(texelWidth, texelHeight);\n }\n);\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageColorPackingFragmentShaderString = SHADER_STRING\n(\n precision lowp float;\n \n uniform sampler2D inputImageTexture;\n \n uniform mediump mat3 convolutionMatrix;\n \n varying highp vec2 outputTextureCoordinate;\n \n varying highp vec2 upperLeftInputTextureCoordinate;\n varying highp vec2 upperRightInputTextureCoordinate;\n varying highp vec2 lowerLeftInputTextureCoordinate;\n varying highp vec2 lowerRightInputTextureCoordinate;\n \n void main()\n {\n     float upperLeftIntensity = texture2D(inputImageTexture, upperLeftInputTextureCoordinate).r;\n     float upperRightIntensity = texture2D(inputImageTexture, upperRightInputTextureCoordinate).r;\n     float lowerLeftIntensity = texture2D(inputImageTexture, lowerLeftInputTextureCoordinate).r;\n     float lowerRightIntensity = texture2D(inputImageTexture, lowerRightInputTextureCoordinate).r;\n     \n     gl_FragColor = vec4(upperLeftIntensity, upperRightIntensity, lowerLeftIntensity, lowerRightIntensity);\n }\n);\n#else\nNSString *const kGPUImageColorPackingFragmentShaderString = SHADER_STRING\n(\n uniform sampler2D inputImageTexture;\n \n uniform mat3 convolutionMatrix;\n \n varying vec2 outputTextureCoordinate;\n \n varying vec2 upperLeftInputTextureCoordinate;\n varying vec2 upperRightInputTextureCoordinate;\n varying vec2 lowerLeftInputTextureCoordinate;\n varying vec2 lowerRightInputTextureCoordinate;\n \n void main()\n {\n     float upperLeftIntensity = texture2D(inputImageTexture, upperLeftInputTextureCoordinate).r;\n     float upperRightIntensity = texture2D(inputImageTexture, upperRightInputTextureCoordinate).r;\n     float lowerLeftIntensity = texture2D(inputImageTexture, lowerLeftInputTextureCoordinate).r;\n     float lowerRightIntensity = texture2D(inputImageTexture, lowerRightInputTextureCoordinate).r;\n     \n     gl_FragColor = vec4(upperLeftIntensity, upperRightIntensity, lowerLeftIntensity, lowerRightIntensity);\n }\n);\n#endif\n\n@implementation GPUImageColorPackingFilter\n\n#pragma mark -\n#pragma mark Initialization and teardown\n\n- (id)init;\n{\n    if (!(self = [super initWithVertexShaderFromString:kGPUImageColorPackingVertexShaderString fragmentShaderFromString:kGPUImageColorPackingFragmentShaderString]))\n    {\n        return nil;\n    }\n    \n    texelWidthUniform = [filterProgram uniformIndex:@\"texelWidth\"];\n    texelHeightUniform = [filterProgram uniformIndex:@\"texelHeight\"];\n    \n    return self;\n}\n\n- (void)setupFilterForSize:(CGSize)filterFrameSize;\n{\n    texelWidth = 0.5 / inputTextureSize.width;\n    texelHeight = 0.5 / inputTextureSize.height;\n\n    runSynchronouslyOnVideoProcessingQueue(^{\n        [GPUImageContext setActiveShaderProgram:filterProgram];\n        glUniform1f(texelWidthUniform, texelWidth);\n        glUniform1f(texelHeightUniform, texelHeight);\n    });\n}\n\n#pragma mark -\n#pragma mark Managing the display FBOs\n\n- (CGSize)sizeOfFBO;\n{\n    CGSize outputSize = [self maximumOutputSize];\n    if ( (CGSizeEqualToSize(outputSize, CGSizeZero)) || (inputTextureSize.width < outputSize.width) )\n    {\n        CGSize quarterSize;\n        quarterSize.width = inputTextureSize.width / 2.0;\n        quarterSize.height = inputTextureSize.height / 2.0;\n        return quarterSize;\n    }\n    else\n    {\n        return outputSize;\n    }\n}\n\n#pragma mark -\n#pragma mark Rendering\n\n- (CGSize)outputFrameSize;\n{\n    CGSize quarterSize;\n    quarterSize.width = inputTextureSize.width / 2.0;\n    quarterSize.height = inputTextureSize.height / 2.0;\n    return quarterSize;\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageColourFASTFeatureDetector.h",
    "content": "#import \"GPUImageFilterGroup.h\"\n\n// This generates image-wide feature descriptors using the ColourFAST process, as developed and described in\n//\n// A. Ensor and S. Hall. ColourFAST: GPU-based feature point detection and tracking on mobile devices. 28th International Conference of Image and Vision Computing, New Zealand, 2013, p. 124-129.\n//\n// Seth Hall, \"GPU accelerated feature algorithms for mobile devices\", PhD thesis, School of Computing and Mathematical Sciences, Auckland University of Technology 2014.\n// http://aut.researchgateway.ac.nz/handle/10292/7991\n\n@class GPUImageColourFASTSamplingOperation;\n@class GPUImageBoxBlurFilter;\n\n@interface GPUImageColourFASTFeatureDetector : GPUImageFilterGroup\n{\n    GPUImageBoxBlurFilter *blurFilter;\n    GPUImageColourFASTSamplingOperation *colourFASTSamplingOperation;\n}\n// The blur radius of the underlying box blur. The default is 3.0.\n@property (readwrite, nonatomic) CGFloat blurRadiusInPixels;\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageColourFASTFeatureDetector.m",
    "content": "#import \"GPUImageColourFASTFeatureDetector.h\"\n#import \"GPUImageColourFASTSamplingOperation.h\"\n#import \"GPUImageBoxBlurFilter.h\"\n\n@implementation GPUImageColourFASTFeatureDetector\n\n@synthesize blurRadiusInPixels;\n\n- (id)init;\n{\n    if (!(self = [super init]))\n    {\n\t\treturn nil;\n    }\n    \n    // First pass: apply a variable Gaussian blur\n    blurFilter = [[GPUImageBoxBlurFilter alloc] init];\n    [self addFilter:blurFilter];\n        \n    // Second pass: combine the blurred image with the original sharp one\n    colourFASTSamplingOperation = [[GPUImageColourFASTSamplingOperation alloc] init];\n    [self addFilter:colourFASTSamplingOperation];\n    \n    // Texture location 0 needs to be the sharp image for both the blur and the second stage processing\n    [blurFilter addTarget:colourFASTSamplingOperation atTextureLocation:1];\n    \n    self.initialFilters = [NSArray arrayWithObjects:blurFilter, colourFASTSamplingOperation, nil];\n    self.terminalFilter = colourFASTSamplingOperation;\n    \n    self.blurRadiusInPixels = 3.0;\n    \n    return self;\n}\n\n#pragma mark -\n#pragma mark Accessors\n\n- (void)setBlurRadiusInPixels:(CGFloat)newValue;\n{\n    blurFilter.blurRadiusInPixels = newValue;\n}\n\n- (CGFloat)blurRadiusInPixels;\n{\n    return blurFilter.blurRadiusInPixels;\n}\n\n@end"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageColourFASTSamplingOperation.h",
    "content": "#import \"GPUImageTwoInputFilter.h\"\n\n// This is the feature extraction phase of the ColourFAST feature detector, as described in:\n//\n// A. Ensor and S. Hall. ColourFAST: GPU-based feature point detection and tracking on mobile devices. 28th International Conference of Image and Vision Computing, New Zealand, 2013, p. 124-129.\n//\n// Seth Hall, \"GPU accelerated feature algorithms for mobile devices\", PhD thesis, School of Computing and Mathematical Sciences, Auckland University of Technology 2014.\n// http://aut.researchgateway.ac.nz/handle/10292/7991\n\n@interface GPUImageColourFASTSamplingOperation : GPUImageTwoInputFilter\n{\n    GLint texelWidthUniform, texelHeightUniform;\n    \n    CGFloat texelWidth, texelHeight;\n    BOOL hasOverriddenImageSizeFactor;\n}\n\n// The texel width and height determines how far out to sample from this texel. By default, this is the normalized width of a pixel, but this can be overridden for different effects.\n@property(readwrite, nonatomic) CGFloat texelWidth;\n@property(readwrite, nonatomic) CGFloat texelHeight;\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageColourFASTSamplingOperation.m",
    "content": "#import \"GPUImageColourFASTSamplingOperation.h\"\n\nNSString *const kGPUImageColourFASTSamplingVertexShaderString = SHADER_STRING\n(\n attribute vec4 position;\n attribute vec4 inputTextureCoordinate;\n attribute vec4 inputTextureCoordinate2;\n \n uniform float texelWidth;\n uniform float texelHeight;\n \n varying vec2 textureCoordinate;\n varying vec2 pointATextureCoordinate;\n varying vec2 pointBTextureCoordinate;\n varying vec2 pointCTextureCoordinate;\n varying vec2 pointDTextureCoordinate;\n varying vec2 pointETextureCoordinate;\n varying vec2 pointFTextureCoordinate;\n varying vec2 pointGTextureCoordinate;\n varying vec2 pointHTextureCoordinate;\n\n void main()\n {\n     gl_Position = position;\n     \n     float tripleTexelWidth = 3.0 * texelWidth;\n     float tripleTexelHeight = 3.0 * texelHeight;\n     \n     textureCoordinate = inputTextureCoordinate.xy;\n     \n     pointATextureCoordinate = vec2(textureCoordinate.x + tripleTexelWidth, textureCoordinate.y + texelHeight);\n     pointBTextureCoordinate = vec2(textureCoordinate.x + texelWidth, textureCoordinate.y + tripleTexelHeight);\n     pointCTextureCoordinate = vec2(textureCoordinate.x - texelWidth, textureCoordinate.y + tripleTexelHeight);\n     pointDTextureCoordinate = vec2(textureCoordinate.x - tripleTexelWidth, textureCoordinate.y + texelHeight);\n     pointETextureCoordinate = vec2(textureCoordinate.x - tripleTexelWidth, textureCoordinate.y - texelHeight);\n     pointFTextureCoordinate = vec2(textureCoordinate.x - texelWidth, textureCoordinate.y - tripleTexelHeight);\n     pointGTextureCoordinate = vec2(textureCoordinate.x + texelWidth, textureCoordinate.y - tripleTexelHeight);\n     pointHTextureCoordinate = vec2(textureCoordinate.x + tripleTexelWidth, textureCoordinate.y - texelHeight);\n }\n);\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageColourFASTSamplingFragmentShaderString = SHADER_STRING\n(\n precision highp float;\n \n varying vec2 textureCoordinate;\n varying vec2 pointATextureCoordinate;\n varying vec2 pointBTextureCoordinate;\n varying vec2 pointCTextureCoordinate;\n varying vec2 pointDTextureCoordinate;\n varying vec2 pointETextureCoordinate;\n varying vec2 pointFTextureCoordinate;\n varying vec2 pointGTextureCoordinate;\n varying vec2 pointHTextureCoordinate;\n \n uniform sampler2D inputImageTexture;\n uniform sampler2D inputImageTexture2;\n const float PITwo = 6.2832;\n const float PI = 3.1416;\n void main()\n {\n     vec3 centerColor = texture2D(inputImageTexture, textureCoordinate).rgb;\n     \n     vec3 pointAColor = texture2D(inputImageTexture, pointATextureCoordinate).rgb;\n     vec3 pointBColor = texture2D(inputImageTexture, pointBTextureCoordinate).rgb;\n     vec3 pointCColor = texture2D(inputImageTexture, pointCTextureCoordinate).rgb;\n     vec3 pointDColor = texture2D(inputImageTexture, pointDTextureCoordinate).rgb;\n     vec3 pointEColor = texture2D(inputImageTexture, pointETextureCoordinate).rgb;\n     vec3 pointFColor = texture2D(inputImageTexture, pointFTextureCoordinate).rgb;\n     vec3 pointGColor = texture2D(inputImageTexture, pointGTextureCoordinate).rgb;\n     vec3 pointHColor = texture2D(inputImageTexture, pointHTextureCoordinate).rgb;\n\n     vec3 colorComparison = ((pointAColor + pointBColor + pointCColor + pointDColor + pointEColor + pointFColor + pointGColor + pointHColor) * 0.125) - centerColor;\n\n     // Direction calculation drawn from Appendix B of Seth Hall's Ph.D. thesis\n     \n     vec3 dirX = (pointAColor*0.94868) + (pointBColor*0.316227) - (pointCColor*0.316227) - (pointDColor*0.94868) - (pointEColor*0.94868) - (pointFColor*0.316227) + (pointGColor*0.316227) + (pointHColor*0.94868);\n     vec3 dirY = (pointAColor*0.316227) + (pointBColor*0.94868) + (pointCColor*0.94868) + (pointDColor*0.316227) - (pointEColor*0.316227) - (pointFColor*0.94868) - (pointGColor*0.94868) - (pointHColor*0.316227);\n     vec3 absoluteDifference = abs(colorComparison);\n     float componentLength = length(colorComparison);\n     float avgX = dot(absoluteDifference, dirX) / componentLength;\n     float avgY = dot(absoluteDifference, dirY) / componentLength;\n     float angle = atan(avgY, avgX);\n     \n     vec3 normalizedColorComparison = (colorComparison + 1.0) * 0.5;\n     \n     gl_FragColor = vec4(normalizedColorComparison, (angle+PI)/PITwo);\n }\n);\n#else\nNSString *const kGPUImageColourFASTSamplingFragmentShaderString = SHADER_STRING\n(\n varying vec2 textureCoordinate;\n varying vec2 pointATextureCoordinate;\n varying vec2 pointBTextureCoordinate;\n varying vec2 pointCTextureCoordinate;\n varying vec2 pointDTextureCoordinate;\n varying vec2 pointETextureCoordinate;\n varying vec2 pointFTextureCoordinate;\n varying vec2 pointGTextureCoordinate;\n varying vec2 pointHTextureCoordinate;\n \n uniform sampler2D inputImageTexture;\n uniform sampler2D inputImageTexture2;\n const float PITwo = 6.2832;\n const float PI = 3.1416;\n void main()\n {\n     vec3 centerColor = texture2D(inputImageTexture, textureCoordinate).rgb;\n     \n     vec3 pointAColor = texture2D(inputImageTexture, pointATextureCoordinate).rgb;\n     vec3 pointBColor = texture2D(inputImageTexture, pointBTextureCoordinate).rgb;\n     vec3 pointCColor = texture2D(inputImageTexture, pointCTextureCoordinate).rgb;\n     vec3 pointDColor = texture2D(inputImageTexture, pointDTextureCoordinate).rgb;\n     vec3 pointEColor = texture2D(inputImageTexture, pointETextureCoordinate).rgb;\n     vec3 pointFColor = texture2D(inputImageTexture, pointFTextureCoordinate).rgb;\n     vec3 pointGColor = texture2D(inputImageTexture, pointGTextureCoordinate).rgb;\n     vec3 pointHColor = texture2D(inputImageTexture, pointHTextureCoordinate).rgb;\n     \n     vec3 colorComparison = ((pointAColor + pointBColor + pointCColor + pointDColor + pointEColor + pointFColor + pointGColor + pointHColor) * 0.125) - centerColor;\n     \n     // Direction calculation drawn from Appendix B of Seth Hall's Ph.D. thesis\n     \n     vec3 dirX = (pointAColor*0.94868) + (pointBColor*0.316227) - (pointCColor*0.316227) - (pointDColor*0.94868) - (pointEColor*0.94868) - (pointFColor*0.316227) + (pointGColor*0.316227) + (pointHColor*0.94868);\n     vec3 dirY = (pointAColor*0.316227) + (pointBColor*0.94868) + (pointCColor*0.94868) + (pointDColor*0.316227) - (pointEColor*0.316227) - (pointFColor*0.94868) - (pointGColor*0.94868) - (pointHColor*0.316227);\n     vec3 absoluteDifference = abs(colorComparison);\n     float componentLength = length(colorComparison);\n     float avgX = dot(absoluteDifference, dirX) / componentLength;\n     float avgY = dot(absoluteDifference, dirY) / componentLength;\n     float angle = atan(avgY, avgX);\n     \n     vec3 normalizedColorComparison = (colorComparison + 1.0) * 0.5;\n     \n     gl_FragColor = vec4(normalizedColorComparison, (angle+PI)/PITwo);\n }\n);\n#endif\n\n\n@implementation GPUImageColourFASTSamplingOperation\n\n@synthesize texelWidth = _texelWidth;\n@synthesize texelHeight = _texelHeight;\n\n#pragma mark -\n#pragma mark Initialization and teardown\n\n- (id)initWithFragmentShaderFromString:(NSString *)fragmentShaderString;\n{\n    if (!(self = [super initWithVertexShaderFromString:kGPUImageColourFASTSamplingVertexShaderString fragmentShaderFromString:kGPUImageColourFASTSamplingFragmentShaderString]))\n    {\n        return nil;\n    }\n    \n    texelWidthUniform = [filterProgram uniformIndex:@\"texelWidth\"];\n    texelHeightUniform = [filterProgram uniformIndex:@\"texelHeight\"];\n    \n    return self;\n}\n\n- (void)setupFilterForSize:(CGSize)filterFrameSize;\n{\n    if (!hasOverriddenImageSizeFactor)\n    {\n        _texelWidth = 1.0 / filterFrameSize.width;\n        _texelHeight = 1.0 / filterFrameSize.height;\n        \n        runSynchronouslyOnVideoProcessingQueue(^{\n            [GPUImageContext setActiveShaderProgram:filterProgram];\n            if (GPUImageRotationSwapsWidthAndHeight(inputRotation))\n            {\n                glUniform1f(texelWidthUniform, _texelHeight);\n                glUniform1f(texelHeightUniform, _texelWidth);\n            }\n            else\n            {\n                glUniform1f(texelWidthUniform, _texelWidth);\n                glUniform1f(texelHeightUniform, _texelHeight);\n            }\n        });\n    }\n}\n\n#pragma mark -\n#pragma mark Accessors\n\n- (void)setTexelWidth:(CGFloat)newValue;\n{\n    hasOverriddenImageSizeFactor = YES;\n    _texelWidth = newValue;\n    \n    [self setFloat:_texelWidth forUniform:texelWidthUniform program:filterProgram];\n}\n\n- (void)setTexelHeight:(CGFloat)newValue;\n{\n    hasOverriddenImageSizeFactor = YES;\n    _texelHeight = newValue;\n    \n    [self setFloat:_texelHeight forUniform:texelHeightUniform program:filterProgram];\n}\n\n@end"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageContrastFilter.h",
    "content": "#import \"GPUImageFilter.h\"\n\n/** Adjusts the contrast of the image\n */\n@interface GPUImageContrastFilter : GPUImageFilter\n{\n    GLint contrastUniform;\n}\n\n/** Contrast ranges from 0.0 to 4.0 (max contrast), with 1.0 as the normal level\n */\n@property(readwrite, nonatomic) CGFloat contrast; \n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageContrastFilter.m",
    "content": "#import \"GPUImageContrastFilter.h\"\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageContrastFragmentShaderString = SHADER_STRING\n( \n varying highp vec2 textureCoordinate;\n \n uniform sampler2D inputImageTexture;\n uniform lowp float contrast;\n \n void main()\n {\n     lowp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);\n     \n     gl_FragColor = vec4(((textureColor.rgb - vec3(0.5)) * contrast + vec3(0.5)), textureColor.w);\n }\n);\n#else\nNSString *const kGPUImageContrastFragmentShaderString = SHADER_STRING\n(\n varying vec2 textureCoordinate;\n \n uniform sampler2D inputImageTexture;\n uniform float contrast;\n \n void main()\n {\n     vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);\n     \n     gl_FragColor = vec4(((textureColor.rgb - vec3(0.5)) * contrast + vec3(0.5)), textureColor.w);\n }\n );\n#endif\n\n@implementation GPUImageContrastFilter\n\n@synthesize contrast = _contrast;\n\n#pragma mark -\n#pragma mark Initialization\n\n- (id)init;\n{\n    if (!(self = [super initWithFragmentShaderFromString:kGPUImageContrastFragmentShaderString]))\n    {\n\t\treturn nil;\n    }\n    \n    contrastUniform = [filterProgram uniformIndex:@\"contrast\"];\n    self.contrast = 1.0;\n    \n    return self;\n}\n\n#pragma mark -\n#pragma mark Accessors\n\n- (void)setContrast:(CGFloat)newValue;\n{\n    _contrast = newValue;\n    \n    [self setFloat:_contrast forUniform:contrastUniform program:filterProgram];\n}\n\n@end\n\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageCropFilter.h",
    "content": "#import \"GPUImageFilter.h\"\n\n@interface GPUImageCropFilter : GPUImageFilter\n{\n    GLfloat cropTextureCoordinates[8];\n}\n\n// The crop region is the rectangle within the image to crop. It is normalized to a coordinate space from 0.0 to 1.0, with 0.0, 0.0 being the upper left corner of the image\n@property(readwrite, nonatomic) CGRect cropRegion;\n\n// Initialization and teardown\n- (id)initWithCropRegion:(CGRect)newCropRegion;\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageCropFilter.m",
    "content": "#import \"GPUImageCropFilter.h\"\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageCropFragmentShaderString =  SHADER_STRING\n(\n varying highp vec2 textureCoordinate;\n \n uniform sampler2D inputImageTexture;\n \n void main()\n {\n     gl_FragColor = texture2D(inputImageTexture, textureCoordinate);\n }\n);\n#else\nNSString *const kGPUImageCropFragmentShaderString =  SHADER_STRING\n(\n varying vec2 textureCoordinate;\n \n uniform sampler2D inputImageTexture;\n \n void main()\n {\n     gl_FragColor = texture2D(inputImageTexture, textureCoordinate);\n }\n);\n#endif\n\n@interface GPUImageCropFilter ()\n\n- (void)calculateCropTextureCoordinates;\n\n@end\n\n@interface GPUImageCropFilter()\n{\n    CGSize originallySuppliedInputSize;\n}\n\n@end\n\n@implementation GPUImageCropFilter\n\n@synthesize cropRegion = _cropRegion;\n\n#pragma mark -\n#pragma mark Initialization and teardown\n\n- (id)initWithCropRegion:(CGRect)newCropRegion;\n{\n    if (!(self = [super initWithFragmentShaderFromString:kGPUImageCropFragmentShaderString]))\n    {\n        return nil;\n    }\n    \n    self.cropRegion = newCropRegion;\n\n    return self;\n}\n\n- (id)init;\n{\n    if (!(self = [self initWithCropRegion:CGRectMake(0.0, 0.0, 1.0, 1.0)]))\n    {\n        return nil;\n    }\n    \n    return self;\n}\n\n#pragma mark -\n#pragma mark Rendering\n\n- (void)setInputSize:(CGSize)newSize atIndex:(NSInteger)textureIndex;\n{\n    if (self.preventRendering)\n    {\n        return;\n    }\n    \n//    if (overrideInputSize)\n//    {\n//        if (CGSizeEqualToSize(forcedMaximumSize, CGSizeZero))\n//        {\n//            return;\n//        }\n//        else\n//        {\n//            CGRect insetRect = AVMakeRectWithAspectRatioInsideRect(newSize, CGRectMake(0.0, 0.0, forcedMaximumSize.width, forcedMaximumSize.height));\n//            inputTextureSize = insetRect.size;\n//            return;\n//        }\n//    }\n    \n    CGSize rotatedSize = [self rotatedSize:newSize forIndex:textureIndex];\n    originallySuppliedInputSize = rotatedSize;\n\n    CGSize scaledSize;\n    scaledSize.width = rotatedSize.width * _cropRegion.size.width;\n    scaledSize.height = rotatedSize.height * _cropRegion.size.height;\n\n    \n    if (CGSizeEqualToSize(scaledSize, CGSizeZero))\n    {\n        inputTextureSize = scaledSize;\n    }\n    else if (!CGSizeEqualToSize(inputTextureSize, scaledSize))\n    {\n        inputTextureSize = scaledSize;\n    }\n}\n\n#pragma mark -\n#pragma mark GPUImageInput\n\n- (void)calculateCropTextureCoordinates;\n{\n    CGFloat minX = _cropRegion.origin.x;\n    CGFloat minY = _cropRegion.origin.y;\n    CGFloat maxX = CGRectGetMaxX(_cropRegion);\n    CGFloat maxY = CGRectGetMaxY(_cropRegion);\n    \n    switch(inputRotation)\n    {\n        case kGPUImageNoRotation: // Works\n        {\n            cropTextureCoordinates[0] = minX; // 0,0\n            cropTextureCoordinates[1] = minY;\n            \n            cropTextureCoordinates[2] = maxX; // 1,0\n            cropTextureCoordinates[3] = minY;\n\n            cropTextureCoordinates[4] = minX; // 0,1\n            cropTextureCoordinates[5] = maxY;\n\n            cropTextureCoordinates[6] = maxX; // 1,1\n            cropTextureCoordinates[7] = maxY;\n        }; break;\n        case kGPUImageRotateLeft: // Fixed\n        {\n            cropTextureCoordinates[0] = maxY; // 1,0\n            cropTextureCoordinates[1] = 1.0 - maxX;\n\n            cropTextureCoordinates[2] = maxY; // 1,1\n            cropTextureCoordinates[3] = 1.0 - minX;\n\n            cropTextureCoordinates[4] = minY; // 0,0\n            cropTextureCoordinates[5] = 1.0 - maxX;\n\n            cropTextureCoordinates[6] = minY; // 0,1\n            cropTextureCoordinates[7] = 1.0 - minX;\n        }; break;\n        case kGPUImageRotateRight: // Fixed\n        {\n            cropTextureCoordinates[0] = minY; // 0,1\n            cropTextureCoordinates[1] = 1.0 - minX;\n\n            cropTextureCoordinates[2] = minY; // 0,0\n            cropTextureCoordinates[3] = 1.0 - maxX;\n            \n            cropTextureCoordinates[4] = maxY; // 1,1\n            cropTextureCoordinates[5] = 1.0 - minX;\n\n            cropTextureCoordinates[6] = maxY; // 1,0\n            cropTextureCoordinates[7] = 1.0 - maxX;\n        }; break;\n        case kGPUImageFlipVertical: // Works for me\n        {\n            cropTextureCoordinates[0] = minX; // 0,1\n            cropTextureCoordinates[1] = maxY;\n\n            cropTextureCoordinates[2] = maxX; // 1,1\n            cropTextureCoordinates[3] = maxY;\n\n            cropTextureCoordinates[4] = minX; // 0,0\n            cropTextureCoordinates[5] = minY;\n            \n            cropTextureCoordinates[6] = maxX; // 1,0\n            cropTextureCoordinates[7] = minY;\n        }; break;\n        case kGPUImageFlipHorizonal: // Works for me\n        {\n            cropTextureCoordinates[0] = maxX; // 1,0\n            cropTextureCoordinates[1] = minY;\n\n            cropTextureCoordinates[2] = minX; // 0,0\n            cropTextureCoordinates[3] = minY;\n            \n            cropTextureCoordinates[4] = maxX; // 1,1\n            cropTextureCoordinates[5] = maxY;\n            \n            cropTextureCoordinates[6] = minX; // 0,1\n            cropTextureCoordinates[7] = maxY;\n        }; break;\n        case kGPUImageRotate180: // Fixed\n        {\n            cropTextureCoordinates[0] = maxX; // 1,1\n            cropTextureCoordinates[1] = maxY;\n\n            cropTextureCoordinates[2] = minX; // 0,1\n            cropTextureCoordinates[3] = maxY;\n\n            cropTextureCoordinates[4] = maxX; // 1,0\n            cropTextureCoordinates[5] = minY;\n\n            cropTextureCoordinates[6] = minX; // 0,0\n            cropTextureCoordinates[7] = minY;\n        }; break;\n        case kGPUImageRotateRightFlipVertical: // Fixed\n        {\n            cropTextureCoordinates[0] = minY; // 0,0\n            cropTextureCoordinates[1] = 1.0 - maxX;\n            \n            cropTextureCoordinates[2] = minY; // 0,1\n            cropTextureCoordinates[3] = 1.0 - minX;\n\n            cropTextureCoordinates[4] = maxY; // 1,0\n            cropTextureCoordinates[5] = 1.0 - maxX;\n            \n            cropTextureCoordinates[6] = maxY; // 1,1\n            cropTextureCoordinates[7] = 1.0 - minX;\n        }; break;\n        case kGPUImageRotateRightFlipHorizontal: // Fixed\n        {\n            cropTextureCoordinates[0] = maxY; // 1,1\n            cropTextureCoordinates[1] = 1.0 - minX;\n\n            cropTextureCoordinates[2] = maxY; // 1,0\n            cropTextureCoordinates[3] = 1.0 - maxX;\n\n            cropTextureCoordinates[4] = minY; // 0,1\n            cropTextureCoordinates[5] = 1.0 - minX;\n\n            cropTextureCoordinates[6] = minY; // 0,0\n            cropTextureCoordinates[7] = 1.0 - maxX;\n        }; break;\n    }\n}\n\n- (void)newFrameReadyAtTime:(CMTime)frameTime atIndex:(NSInteger)textureIndex;\n{\n    static const GLfloat cropSquareVertices[] = {\n        -1.0f, -1.0f,\n        1.0f, -1.0f,\n        -1.0f,  1.0f,\n        1.0f,  1.0f,\n    };\n    \n    [self renderToTextureWithVertices:cropSquareVertices textureCoordinates:cropTextureCoordinates];\n\n    [self informTargetsAboutNewFrameAtTime:frameTime];\n}\n\n#pragma mark -\n#pragma mark Accessors\n\n- (void)setCropRegion:(CGRect)newValue;\n{\n    NSParameterAssert(newValue.origin.x >= 0 && newValue.origin.x <= 1 &&\n                      newValue.origin.y >= 0 && newValue.origin.y <= 1 &&\n                      newValue.size.width >= 0 && newValue.size.width <= 1 &&\n                      newValue.size.height >= 0 && newValue.size.height <= 1);\n\n    _cropRegion = newValue;\n    [self calculateCropTextureCoordinates];\n}\n\n- (void)setInputRotation:(GPUImageRotationMode)newInputRotation atIndex:(NSInteger)textureIndex;\n{\n    [super setInputRotation:newInputRotation atIndex:textureIndex];\n    [self calculateCropTextureCoordinates];\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageCrosshairGenerator.h",
    "content": "#import \"GPUImageFilter.h\"\n\n@interface GPUImageCrosshairGenerator : GPUImageFilter\n{\n    GLint crosshairWidthUniform, crosshairColorUniform;\n}\n\n// The width of the displayed crosshairs, in pixels. Currently this only works well for odd widths. The default is 5.\n@property(readwrite, nonatomic) CGFloat crosshairWidth;\n\n// The color of the crosshairs is specified using individual red, green, and blue components (normalized to 1.0). The default is green: (0.0, 1.0, 0.0).\n- (void)setCrosshairColorRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent;\n\n// Rendering\n- (void)renderCrosshairsFromArray:(GLfloat *)crosshairCoordinates count:(NSUInteger)numberOfCrosshairs frameTime:(CMTime)frameTime;\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageCrosshairGenerator.m",
    "content": "#import \"GPUImageCrosshairGenerator.h\"\n\nNSString *const kGPUImageCrosshairVertexShaderString = SHADER_STRING\n(\n attribute vec4 position;\n \n uniform float crosshairWidth;\n \n varying vec2 centerLocation;\n varying float pointSpacing;\n \n void main()\n {\n     gl_Position = vec4(((position.xy * 2.0) - 1.0), 0.0, 1.0);\n     gl_PointSize = crosshairWidth + 1.0;\n     pointSpacing = 1.0 / crosshairWidth;\n     centerLocation = vec2(pointSpacing * ceil(crosshairWidth / 2.0), pointSpacing * ceil(crosshairWidth / 2.0));\n }\n);\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageCrosshairFragmentShaderString = SHADER_STRING\n(\n uniform lowp vec3 crosshairColor;\n\n varying highp vec2 centerLocation;\n varying highp float pointSpacing;\n\n void main()\n {\n     lowp vec2 distanceFromCenter = abs(centerLocation - gl_PointCoord.xy);\n     lowp float axisTest = step(pointSpacing, gl_PointCoord.y) * step(distanceFromCenter.x, 0.09) + step(pointSpacing, gl_PointCoord.x) * step(distanceFromCenter.y, 0.09);\n\n     gl_FragColor = vec4(crosshairColor * axisTest, axisTest);\n//     gl_FragColor = vec4(distanceFromCenterInX, distanceFromCenterInY, 0.0, 1.0);\n }\n);\n#else\nNSString *const kGPUImageCrosshairFragmentShaderString = SHADER_STRING\n(\n GPUImageEscapedHashIdentifier(version 120)\\n\n \n uniform vec3 crosshairColor;\n \n varying vec2 centerLocation;\n varying float pointSpacing;\n \n void main()\n {\n     vec2 distanceFromCenter = abs(centerLocation - gl_PointCoord.xy);\n     float axisTest = step(pointSpacing, gl_PointCoord.y) * step(distanceFromCenter.x, 0.09) + step(pointSpacing, gl_PointCoord.x) * step(distanceFromCenter.y, 0.09);\n     \n     gl_FragColor = vec4(crosshairColor * axisTest, axisTest);\n     //     gl_FragColor = vec4(distanceFromCenterInX, distanceFromCenterInY, 0.0, 1.0);\n }\n);\n#endif\n\n@implementation GPUImageCrosshairGenerator\n\n@synthesize crosshairWidth = _crosshairWidth;\n\n#pragma mark -\n#pragma mark Initialization and teardown\n\n- (id)init;\n{\n    if (!(self = [super initWithVertexShaderFromString:kGPUImageCrosshairVertexShaderString fragmentShaderFromString:kGPUImageCrosshairFragmentShaderString]))\n    {\n        return nil;\n    }\n    \n    runSynchronouslyOnVideoProcessingQueue(^{\n        crosshairWidthUniform = [filterProgram uniformIndex:@\"crosshairWidth\"];\n        crosshairColorUniform = [filterProgram uniformIndex:@\"crosshairColor\"];\n        \n        self.crosshairWidth = 5.0;\n        [self setCrosshairColorRed:0.0 green:1.0 blue:0.0];\n    });\n    \n    return self;\n}\n\n#pragma mark -\n#pragma mark Rendering\n\n- (void)renderCrosshairsFromArray:(GLfloat *)crosshairCoordinates count:(NSUInteger)numberOfCrosshairs frameTime:(CMTime)frameTime;\n{\n    if (self.preventRendering)\n    {\n        return;\n    }\n    \n    runSynchronouslyOnVideoProcessingQueue(^{\n        [GPUImageContext setActiveShaderProgram:filterProgram];\n        \n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\n#else\n        glEnable(GL_POINT_SPRITE);\n        glEnable(GL_VERTEX_PROGRAM_POINT_SIZE);\n#endif\n        \n        outputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:[self sizeOfFBO] textureOptions:self.outputTextureOptions onlyTexture:NO];\n        [outputFramebuffer activateFramebuffer];\n        \n        glClearColor(0.0, 0.0, 0.0, 0.0);\n        glClear(GL_COLOR_BUFFER_BIT);\n        \n        glVertexAttribPointer(filterPositionAttribute, 2, GL_FLOAT, 0, 0, crosshairCoordinates);\n        \n        glDrawArrays(GL_POINTS, 0, (GLsizei)numberOfCrosshairs);\n        \n        [self informTargetsAboutNewFrameAtTime:frameTime];\n    });\n}\n\n- (void)renderToTextureWithVertices:(const GLfloat *)vertices textureCoordinates:(const GLfloat *)textureCoordinates;\n{\n    // Prevent rendering of the frame by normal means\n}\n\n#pragma mark -\n#pragma mark Accessors\n\n- (void)setCrosshairWidth:(CGFloat)newValue;\n{\n    _crosshairWidth = newValue;\n    \n    [self setFloat:_crosshairWidth forUniform:crosshairWidthUniform program:filterProgram];\n}\n\n- (void)setCrosshairColorRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent;\n{\n    GPUVector3 crosshairColor = {redComponent, greenComponent, blueComponent};\n    \n    [self setVec3:crosshairColor forUniform:crosshairColorUniform program:filterProgram];\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageCrosshatchFilter.h",
    "content": "#import \"GPUImageFilter.h\"\n\n@interface GPUImageCrosshatchFilter : GPUImageFilter\n{\n    GLint crossHatchSpacingUniform, lineWidthUniform;\n}\n// The fractional width of the image to use as the spacing for the crosshatch. The default is 0.03.\n@property(readwrite, nonatomic) CGFloat crossHatchSpacing;\n\n// A relative width for the crosshatch lines. The default is 0.003.\n@property(readwrite, nonatomic) CGFloat lineWidth;\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageCrosshatchFilter.m",
    "content": "#import \"GPUImageCrosshatchFilter.h\"\n\n// Shader code based on http://machinesdontcare.wordpress.com/\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageCrosshatchFragmentShaderString = SHADER_STRING\n(\n varying highp vec2 textureCoordinate;\n \n uniform sampler2D inputImageTexture;\n\n uniform highp float crossHatchSpacing;\n uniform highp float lineWidth;\n \n const highp vec3 W = vec3(0.2125, 0.7154, 0.0721);\n\n void main()\n {\n     highp float luminance = dot(texture2D(inputImageTexture, textureCoordinate).rgb, W);\n     \n     lowp vec4 colorToDisplay = vec4(1.0, 1.0, 1.0, 1.0);\n     if (luminance < 1.00) \n     {\n         if (mod(textureCoordinate.x + textureCoordinate.y, crossHatchSpacing) <= lineWidth) \n         {\n             colorToDisplay = vec4(0.0, 0.0, 0.0, 1.0);\n         }\n     }\n     if (luminance < 0.75) \n     {\n         if (mod(textureCoordinate.x - textureCoordinate.y, crossHatchSpacing) <= lineWidth) \n         {\n             colorToDisplay = vec4(0.0, 0.0, 0.0, 1.0);\n         }\n     }\n     if (luminance < 0.50) \n     {\n         if (mod(textureCoordinate.x + textureCoordinate.y - (crossHatchSpacing / 2.0), crossHatchSpacing) <= lineWidth) \n         {\n             colorToDisplay = vec4(0.0, 0.0, 0.0, 1.0);\n         }\n     }\n     if (luminance < 0.3) \n     {\n         if (mod(textureCoordinate.x - textureCoordinate.y - (crossHatchSpacing / 2.0), crossHatchSpacing) <= lineWidth) \n         {\n             colorToDisplay = vec4(0.0, 0.0, 0.0, 1.0);\n         }\n     }\n\n     gl_FragColor = colorToDisplay;\n }\n);\n#else\nNSString *const kGPUImageCrosshatchFragmentShaderString = SHADER_STRING\n(\n varying vec2 textureCoordinate;\n \n uniform sampler2D inputImageTexture;\n \n uniform float crossHatchSpacing;\n uniform float lineWidth;\n \n const vec3 W = vec3(0.2125, 0.7154, 0.0721);\n \n void main()\n {\n     float luminance = dot(texture2D(inputImageTexture, textureCoordinate).rgb, W);\n     \n     vec4 colorToDisplay = vec4(1.0, 1.0, 1.0, 1.0);\n     if (luminance < 1.00)\n     {\n         if (mod(textureCoordinate.x + textureCoordinate.y, crossHatchSpacing) <= lineWidth)\n         {\n             colorToDisplay = vec4(0.0, 0.0, 0.0, 1.0);\n         }\n     }\n     if (luminance < 0.75)\n     {\n         if (mod(textureCoordinate.x - textureCoordinate.y, crossHatchSpacing) <= lineWidth)\n         {\n             colorToDisplay = vec4(0.0, 0.0, 0.0, 1.0);\n         }\n     }\n     if (luminance < 0.50)\n     {\n         if (mod(textureCoordinate.x + textureCoordinate.y - (crossHatchSpacing / 2.0), crossHatchSpacing) <= lineWidth)\n         {\n             colorToDisplay = vec4(0.0, 0.0, 0.0, 1.0);\n         }\n     }\n     if (luminance < 0.3)\n     {\n         if (mod(textureCoordinate.x - textureCoordinate.y - (crossHatchSpacing / 2.0), crossHatchSpacing) <= lineWidth)\n         {\n             colorToDisplay = vec4(0.0, 0.0, 0.0, 1.0);\n         }\n     }\n     \n     gl_FragColor = colorToDisplay;\n }\n);\n#endif\n\n@implementation GPUImageCrosshatchFilter\n\n@synthesize crossHatchSpacing = _crossHatchSpacing;\n@synthesize lineWidth = _lineWidth;\n\n#pragma mark -\n#pragma mark Initialization and teardown\n\n- (id)init;\n{\n    if (!(self = [super initWithFragmentShaderFromString:kGPUImageCrosshatchFragmentShaderString]))\n    {\n\t\treturn nil;\n    }\n    \n    crossHatchSpacingUniform = [filterProgram uniformIndex:@\"crossHatchSpacing\"];\n    lineWidthUniform = [filterProgram uniformIndex:@\"lineWidth\"];\n    \n    self.crossHatchSpacing = 0.03;\n    self.lineWidth = 0.003;\n    \n    return self;\n}\n\n#pragma mark -\n#pragma mark Accessors\n\n- (void)setCrossHatchSpacing:(CGFloat)newValue;\n{\n    CGFloat singlePixelSpacing;\n    if (inputTextureSize.width != 0.0)\n    {\n        singlePixelSpacing = 1.0 / inputTextureSize.width;\n    }\n    else\n    {\n        singlePixelSpacing = 1.0 / 2048.0;\n    }\n    \n    if (newValue < singlePixelSpacing)\n    {\n        _crossHatchSpacing = singlePixelSpacing;\n    }\n    else\n    {\n        _crossHatchSpacing = newValue;\n    }\n    \n    [self setFloat:_crossHatchSpacing forUniform:crossHatchSpacingUniform program:filterProgram];\n}\n\n- (void)setLineWidth:(CGFloat)newValue;\n{\n    _lineWidth = newValue;\n    \n    [self setFloat:_lineWidth forUniform:lineWidthUniform program:filterProgram];\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageDarkenBlendFilter.h",
    "content": "#import \"GPUImageTwoInputFilter.h\"\n\n@interface GPUImageDarkenBlendFilter : GPUImageTwoInputFilter\n{\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageDarkenBlendFilter.m",
    "content": "#import \"GPUImageDarkenBlendFilter.h\"\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageDarkenBlendFragmentShaderString = SHADER_STRING\n(\n varying highp vec2 textureCoordinate;\n varying highp vec2 textureCoordinate2;\n\n uniform sampler2D inputImageTexture;\n uniform sampler2D inputImageTexture2;\n \n void main()\n {\n    lowp vec4 base = texture2D(inputImageTexture, textureCoordinate);\n    lowp vec4 overlayer = texture2D(inputImageTexture2, textureCoordinate2);\n    \n    gl_FragColor = vec4(min(overlayer.rgb * base.a, base.rgb * overlayer.a) + overlayer.rgb * (1.0 - base.a) + base.rgb * (1.0 - overlayer.a), 1.0);\n }\n);\n#else\nNSString *const kGPUImageDarkenBlendFragmentShaderString = SHADER_STRING\n(\n varying vec2 textureCoordinate;\n varying vec2 textureCoordinate2;\n \n uniform sampler2D inputImageTexture;\n uniform sampler2D inputImageTexture2;\n \n void main()\n {\n     vec4 base = texture2D(inputImageTexture, textureCoordinate);\n     vec4 overlayer = texture2D(inputImageTexture2, textureCoordinate2);\n     \n     gl_FragColor = vec4(min(overlayer.rgb * base.a, base.rgb * overlayer.a) + overlayer.rgb * (1.0 - base.a) + base.rgb * (1.0 - overlayer.a), 1.0);\n }\n );\n#endif\n\n@implementation GPUImageDarkenBlendFilter\n\n- (id)init;\n{\n    if (!(self = [super initWithFragmentShaderFromString:kGPUImageDarkenBlendFragmentShaderString]))\n    {\n\t\treturn nil;\n    }\n    \n    return self;\n}\n\n@end\n\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageDifferenceBlendFilter.h",
    "content": "#import \"GPUImageTwoInputFilter.h\"\n\n@interface GPUImageDifferenceBlendFilter : GPUImageTwoInputFilter\n{\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageDifferenceBlendFilter.m",
    "content": "#import \"GPUImageDifferenceBlendFilter.h\"\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageDifferenceBlendFragmentShaderString = SHADER_STRING\n(\n varying highp vec2 textureCoordinate;\n varying highp vec2 textureCoordinate2;\n\n uniform sampler2D inputImageTexture;\n uniform sampler2D inputImageTexture2;\n \n void main()\n {\n     mediump vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);\n     mediump vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate2);\n     gl_FragColor = vec4(abs(textureColor2.rgb - textureColor.rgb), textureColor.a);\n }\n);\n#else\nNSString *const kGPUImageDifferenceBlendFragmentShaderString = SHADER_STRING\n(\n varying vec2 textureCoordinate;\n varying vec2 textureCoordinate2;\n \n uniform sampler2D inputImageTexture;\n uniform sampler2D inputImageTexture2;\n \n void main()\n {\n     vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);\n     vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate2);\n     gl_FragColor = vec4(abs(textureColor2.rgb - textureColor.rgb), textureColor.a);\n }\n);\n#endif\n\n@implementation GPUImageDifferenceBlendFilter\n\n- (id)init;\n{\n    if (!(self = [super initWithFragmentShaderFromString:kGPUImageDifferenceBlendFragmentShaderString]))\n    {\n\t\treturn nil;\n    }\n    \n    return self;\n}\n\n@end\n\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageDilationFilter.h",
    "content": "#import \"GPUImageTwoPassTextureSamplingFilter.h\"\n\n// For each pixel, this sets it to the maximum value of the red channel in a rectangular neighborhood extending out dilationRadius pixels from the center.\n// This extends out bright features, and is most commonly used with black-and-white thresholded images.\n\nextern NSString *const kGPUImageDilationRadiusOneVertexShaderString;\nextern NSString *const kGPUImageDilationRadiusTwoVertexShaderString;\nextern NSString *const kGPUImageDilationRadiusThreeVertexShaderString;\nextern NSString *const kGPUImageDilationRadiusFourVertexShaderString;\n\n@interface GPUImageDilationFilter : GPUImageTwoPassTextureSamplingFilter\n\n// Acceptable values for dilationRadius, which sets the distance in pixels to sample out from the center, are 1, 2, 3, and 4.\n- (id)initWithRadius:(NSUInteger)dilationRadius;\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageDilationFilter.m",
    "content": "#import \"GPUImageDilationFilter.h\"\n\n@implementation GPUImageDilationFilter\n\nNSString *const kGPUImageDilationRadiusOneVertexShaderString = SHADER_STRING\n(\n attribute vec4 position;\n attribute vec2 inputTextureCoordinate;\n \n uniform float texelWidthOffset; \n uniform float texelHeightOffset; \n \n varying vec2 centerTextureCoordinate;\n varying vec2 oneStepPositiveTextureCoordinate;\n varying vec2 oneStepNegativeTextureCoordinate;\n \n void main()\n {\n     gl_Position = position;\n     \n     vec2 offset = vec2(texelWidthOffset, texelHeightOffset);\n     \n     centerTextureCoordinate = inputTextureCoordinate;\n     oneStepNegativeTextureCoordinate = inputTextureCoordinate - offset;\n     oneStepPositiveTextureCoordinate = inputTextureCoordinate + offset;\n }\n);\n\nNSString *const kGPUImageDilationRadiusTwoVertexShaderString = SHADER_STRING\n(\n attribute vec4 position;\n attribute vec2 inputTextureCoordinate;\n \n uniform float texelWidthOffset;\n uniform float texelHeightOffset;\n \n varying vec2 centerTextureCoordinate;\n varying vec2 oneStepPositiveTextureCoordinate;\n varying vec2 oneStepNegativeTextureCoordinate;\n varying vec2 twoStepsPositiveTextureCoordinate;\n varying vec2 twoStepsNegativeTextureCoordinate;\n \n void main()\n {\n     gl_Position = position;\n     \n     vec2 offset = vec2(texelWidthOffset, texelHeightOffset);\n     \n     centerTextureCoordinate = inputTextureCoordinate;\n     oneStepNegativeTextureCoordinate = inputTextureCoordinate - offset;\n     oneStepPositiveTextureCoordinate = inputTextureCoordinate + offset;\n     twoStepsNegativeTextureCoordinate = inputTextureCoordinate - (offset * 2.0);\n     twoStepsPositiveTextureCoordinate = inputTextureCoordinate + (offset * 2.0);\n }\n);\n\nNSString *const kGPUImageDilationRadiusThreeVertexShaderString = SHADER_STRING\n(\n attribute vec4 position;\n attribute vec2 inputTextureCoordinate;\n \n uniform float texelWidthOffset;\n uniform float texelHeightOffset;\n \n varying vec2 centerTextureCoordinate;\n varying vec2 oneStepPositiveTextureCoordinate;\n varying vec2 oneStepNegativeTextureCoordinate;\n varying vec2 twoStepsPositiveTextureCoordinate;\n varying vec2 twoStepsNegativeTextureCoordinate;\n varying vec2 threeStepsPositiveTextureCoordinate;\n varying vec2 threeStepsNegativeTextureCoordinate;\n \n void main()\n {\n     gl_Position = position;\n     \n     vec2 offset = vec2(texelWidthOffset, texelHeightOffset);\n     \n     centerTextureCoordinate = inputTextureCoordinate;\n     oneStepNegativeTextureCoordinate = inputTextureCoordinate - offset;\n     oneStepPositiveTextureCoordinate = inputTextureCoordinate + offset;\n     twoStepsNegativeTextureCoordinate = inputTextureCoordinate - (offset * 2.0);\n     twoStepsPositiveTextureCoordinate = inputTextureCoordinate + (offset * 2.0);\n     threeStepsNegativeTextureCoordinate = inputTextureCoordinate - (offset * 3.0);\n     threeStepsPositiveTextureCoordinate = inputTextureCoordinate + (offset * 3.0);\n }\n);\n\nNSString *const kGPUImageDilationRadiusFourVertexShaderString = SHADER_STRING\n(\n attribute vec4 position;\n attribute vec2 inputTextureCoordinate;\n \n uniform float texelWidthOffset;\n uniform float texelHeightOffset;\n \n varying vec2 centerTextureCoordinate;\n varying vec2 oneStepPositiveTextureCoordinate;\n varying vec2 oneStepNegativeTextureCoordinate;\n varying vec2 twoStepsPositiveTextureCoordinate;\n varying vec2 twoStepsNegativeTextureCoordinate;\n varying vec2 threeStepsPositiveTextureCoordinate;\n varying vec2 threeStepsNegativeTextureCoordinate;\n varying vec2 fourStepsPositiveTextureCoordinate;\n varying vec2 fourStepsNegativeTextureCoordinate;\n \n void main()\n {\n     gl_Position = position;\n     \n     vec2 offset = vec2(texelWidthOffset, texelHeightOffset);\n     \n     centerTextureCoordinate = inputTextureCoordinate;\n     oneStepNegativeTextureCoordinate = inputTextureCoordinate - offset;\n     oneStepPositiveTextureCoordinate = inputTextureCoordinate + offset;\n     twoStepsNegativeTextureCoordinate = inputTextureCoordinate - (offset * 2.0);\n     twoStepsPositiveTextureCoordinate = inputTextureCoordinate + (offset * 2.0);\n     threeStepsNegativeTextureCoordinate = inputTextureCoordinate - (offset * 3.0);\n     threeStepsPositiveTextureCoordinate = inputTextureCoordinate + (offset * 3.0);\n     fourStepsNegativeTextureCoordinate = inputTextureCoordinate - (offset * 4.0);\n     fourStepsPositiveTextureCoordinate = inputTextureCoordinate + (offset * 4.0);\n }\n);\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageDilationRadiusOneFragmentShaderString = SHADER_STRING\n(\n precision lowp float;\n \n varying vec2 centerTextureCoordinate;\n varying vec2 oneStepPositiveTextureCoordinate;\n varying vec2 oneStepNegativeTextureCoordinate;\n  \n uniform sampler2D inputImageTexture;\n \n void main()\n {\n     float centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate).r;\n     float oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate).r;\n     float oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate).r;\n          \n     lowp float maxValue = max(centerIntensity, oneStepPositiveIntensity);\n     maxValue = max(maxValue, oneStepNegativeIntensity);\n\n     gl_FragColor = vec4(vec3(maxValue), 1.0);\n }\n);\n\nNSString *const kGPUImageDilationRadiusTwoFragmentShaderString = SHADER_STRING\n(\n precision lowp float;\n \n varying vec2 centerTextureCoordinate;\n varying vec2 oneStepPositiveTextureCoordinate;\n varying vec2 oneStepNegativeTextureCoordinate;\n varying vec2 twoStepsPositiveTextureCoordinate;\n varying vec2 twoStepsNegativeTextureCoordinate;\n\n uniform sampler2D inputImageTexture;\n \n void main()\n {\n     float centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate).r;\n     float oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate).r;\n     float oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate).r;\n     float twoStepsPositiveIntensity = texture2D(inputImageTexture, twoStepsPositiveTextureCoordinate).r;\n     float twoStepsNegativeIntensity = texture2D(inputImageTexture, twoStepsNegativeTextureCoordinate).r;\n     \n     lowp float maxValue = max(centerIntensity, oneStepPositiveIntensity);\n     maxValue = max(maxValue, oneStepNegativeIntensity);\n     maxValue = max(maxValue, twoStepsPositiveIntensity);\n     maxValue = max(maxValue, twoStepsNegativeIntensity);\n     \n     gl_FragColor = vec4(vec3(maxValue), 1.0);\n }\n);\n\nNSString *const kGPUImageDilationRadiusThreeFragmentShaderString = SHADER_STRING\n(\n precision lowp float;\n \n varying vec2 centerTextureCoordinate;\n varying vec2 oneStepPositiveTextureCoordinate;\n varying vec2 oneStepNegativeTextureCoordinate;\n varying vec2 twoStepsPositiveTextureCoordinate;\n varying vec2 twoStepsNegativeTextureCoordinate;\n varying vec2 threeStepsPositiveTextureCoordinate;\n varying vec2 threeStepsNegativeTextureCoordinate;\n\n uniform sampler2D inputImageTexture;\n \n void main()\n {\n     float centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate).r;\n     float oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate).r;\n     float oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate).r;\n     float twoStepsPositiveIntensity = texture2D(inputImageTexture, twoStepsPositiveTextureCoordinate).r;\n     float twoStepsNegativeIntensity = texture2D(inputImageTexture, twoStepsNegativeTextureCoordinate).r;\n     float threeStepsPositiveIntensity = texture2D(inputImageTexture, threeStepsPositiveTextureCoordinate).r;\n     float threeStepsNegativeIntensity = texture2D(inputImageTexture, threeStepsNegativeTextureCoordinate).r;\n     \n     lowp float maxValue = max(centerIntensity, oneStepPositiveIntensity);\n     maxValue = max(maxValue, oneStepNegativeIntensity);\n     maxValue = max(maxValue, twoStepsPositiveIntensity);\n     maxValue = max(maxValue, twoStepsNegativeIntensity);\n     maxValue = max(maxValue, threeStepsPositiveIntensity);\n     maxValue = max(maxValue, threeStepsNegativeIntensity);\n     \n     gl_FragColor = vec4(vec3(maxValue), 1.0);\n }\n);\n\nNSString *const kGPUImageDilationRadiusFourFragmentShaderString = SHADER_STRING\n(\n precision lowp float;\n \n varying vec2 centerTextureCoordinate;\n varying vec2 oneStepPositiveTextureCoordinate;\n varying vec2 oneStepNegativeTextureCoordinate;\n varying vec2 twoStepsPositiveTextureCoordinate;\n varying vec2 twoStepsNegativeTextureCoordinate;\n varying vec2 threeStepsPositiveTextureCoordinate;\n varying vec2 threeStepsNegativeTextureCoordinate;\n varying vec2 fourStepsPositiveTextureCoordinate;\n varying vec2 fourStepsNegativeTextureCoordinate;\n \n uniform sampler2D inputImageTexture;\n \n void main()\n {\n     float centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate).r;\n     float oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate).r;\n     float oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate).r;\n     float twoStepsPositiveIntensity = texture2D(inputImageTexture, twoStepsPositiveTextureCoordinate).r;\n     float twoStepsNegativeIntensity = texture2D(inputImageTexture, twoStepsNegativeTextureCoordinate).r;\n     float threeStepsPositiveIntensity = texture2D(inputImageTexture, threeStepsPositiveTextureCoordinate).r;\n     float threeStepsNegativeIntensity = texture2D(inputImageTexture, threeStepsNegativeTextureCoordinate).r;\n     float fourStepsPositiveIntensity = texture2D(inputImageTexture, fourStepsPositiveTextureCoordinate).r;\n     float fourStepsNegativeIntensity = texture2D(inputImageTexture, fourStepsNegativeTextureCoordinate).r;\n     \n     lowp float maxValue = max(centerIntensity, oneStepPositiveIntensity);\n     maxValue = max(maxValue, oneStepNegativeIntensity);\n     maxValue = max(maxValue, twoStepsPositiveIntensity);\n     maxValue = max(maxValue, twoStepsNegativeIntensity);\n     maxValue = max(maxValue, threeStepsPositiveIntensity);\n     maxValue = max(maxValue, threeStepsNegativeIntensity);\n     maxValue = max(maxValue, fourStepsPositiveIntensity);\n     maxValue = max(maxValue, fourStepsNegativeIntensity);\n     \n     gl_FragColor = vec4(vec3(maxValue), 1.0);\n }\n);\n#else\nNSString *const kGPUImageDilationRadiusOneFragmentShaderString = SHADER_STRING\n(\n varying vec2 centerTextureCoordinate;\n varying vec2 oneStepPositiveTextureCoordinate;\n varying vec2 oneStepNegativeTextureCoordinate;\n \n uniform sampler2D inputImageTexture;\n \n void main()\n {\n     float centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate).r;\n     float oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate).r;\n     float oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate).r;\n     \n     float maxValue = max(centerIntensity, oneStepPositiveIntensity);\n     maxValue = max(maxValue, oneStepNegativeIntensity);\n     \n     gl_FragColor = vec4(vec3(maxValue), 1.0);\n }\n);\n\nNSString *const kGPUImageDilationRadiusTwoFragmentShaderString = SHADER_STRING\n(\n varying vec2 centerTextureCoordinate;\n varying vec2 oneStepPositiveTextureCoordinate;\n varying vec2 oneStepNegativeTextureCoordinate;\n varying vec2 twoStepsPositiveTextureCoordinate;\n varying vec2 twoStepsNegativeTextureCoordinate;\n \n uniform sampler2D inputImageTexture;\n \n void main()\n {\n     float centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate).r;\n     float oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate).r;\n     float oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate).r;\n     float twoStepsPositiveIntensity = texture2D(inputImageTexture, twoStepsPositiveTextureCoordinate).r;\n     float twoStepsNegativeIntensity = texture2D(inputImageTexture, twoStepsNegativeTextureCoordinate).r;\n     \n     float maxValue = max(centerIntensity, oneStepPositiveIntensity);\n     maxValue = max(maxValue, oneStepNegativeIntensity);\n     maxValue = max(maxValue, twoStepsPositiveIntensity);\n     maxValue = max(maxValue, twoStepsNegativeIntensity);\n     \n     gl_FragColor = vec4(vec3(maxValue), 1.0);\n }\n);\n\nNSString *const kGPUImageDilationRadiusThreeFragmentShaderString = SHADER_STRING\n(\n varying vec2 centerTextureCoordinate;\n varying vec2 oneStepPositiveTextureCoordinate;\n varying vec2 oneStepNegativeTextureCoordinate;\n varying vec2 twoStepsPositiveTextureCoordinate;\n varying vec2 twoStepsNegativeTextureCoordinate;\n varying vec2 threeStepsPositiveTextureCoordinate;\n varying vec2 threeStepsNegativeTextureCoordinate;\n \n uniform sampler2D inputImageTexture;\n \n void main()\n {\n     float centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate).r;\n     float oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate).r;\n     float oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate).r;\n     float twoStepsPositiveIntensity = texture2D(inputImageTexture, twoStepsPositiveTextureCoordinate).r;\n     float twoStepsNegativeIntensity = texture2D(inputImageTexture, twoStepsNegativeTextureCoordinate).r;\n     float threeStepsPositiveIntensity = texture2D(inputImageTexture, threeStepsPositiveTextureCoordinate).r;\n     float threeStepsNegativeIntensity = texture2D(inputImageTexture, threeStepsNegativeTextureCoordinate).r;\n     \n     float maxValue = max(centerIntensity, oneStepPositiveIntensity);\n     maxValue = max(maxValue, oneStepNegativeIntensity);\n     maxValue = max(maxValue, twoStepsPositiveIntensity);\n     maxValue = max(maxValue, twoStepsNegativeIntensity);\n     maxValue = max(maxValue, threeStepsPositiveIntensity);\n     maxValue = max(maxValue, threeStepsNegativeIntensity);\n     \n     gl_FragColor = vec4(vec3(maxValue), 1.0);\n }\n);\n\nNSString *const kGPUImageDilationRadiusFourFragmentShaderString = SHADER_STRING\n(\n varying vec2 centerTextureCoordinate;\n varying vec2 oneStepPositiveTextureCoordinate;\n varying vec2 oneStepNegativeTextureCoordinate;\n varying vec2 twoStepsPositiveTextureCoordinate;\n varying vec2 twoStepsNegativeTextureCoordinate;\n varying vec2 threeStepsPositiveTextureCoordinate;\n varying vec2 threeStepsNegativeTextureCoordinate;\n varying vec2 fourStepsPositiveTextureCoordinate;\n varying vec2 fourStepsNegativeTextureCoordinate;\n \n uniform sampler2D inputImageTexture;\n \n void main()\n {\n     float centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate).r;\n     float oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate).r;\n     float oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate).r;\n     float twoStepsPositiveIntensity = texture2D(inputImageTexture, twoStepsPositiveTextureCoordinate).r;\n     float twoStepsNegativeIntensity = texture2D(inputImageTexture, twoStepsNegativeTextureCoordinate).r;\n     float threeStepsPositiveIntensity = texture2D(inputImageTexture, threeStepsPositiveTextureCoordinate).r;\n     float threeStepsNegativeIntensity = texture2D(inputImageTexture, threeStepsNegativeTextureCoordinate).r;\n     float fourStepsPositiveIntensity = texture2D(inputImageTexture, fourStepsPositiveTextureCoordinate).r;\n     float fourStepsNegativeIntensity = texture2D(inputImageTexture, fourStepsNegativeTextureCoordinate).r;\n     \n     float maxValue = max(centerIntensity, oneStepPositiveIntensity);\n     maxValue = max(maxValue, oneStepNegativeIntensity);\n     maxValue = max(maxValue, twoStepsPositiveIntensity);\n     maxValue = max(maxValue, twoStepsNegativeIntensity);\n     maxValue = max(maxValue, threeStepsPositiveIntensity);\n     maxValue = max(maxValue, threeStepsNegativeIntensity);\n     maxValue = max(maxValue, fourStepsPositiveIntensity);\n     maxValue = max(maxValue, fourStepsNegativeIntensity);\n     \n     gl_FragColor = vec4(vec3(maxValue), 1.0);\n }\n);\n#endif\n\n#pragma mark -\n#pragma mark Initialization and teardown\n\n- (id)initWithRadius:(NSUInteger)dilationRadius;\n{    \n    NSString *fragmentShaderForThisRadius = nil;\n    NSString *vertexShaderForThisRadius = nil;\n    \n    switch (dilationRadius)\n    {\n        case 0:\n        case 1:\n        {\n            vertexShaderForThisRadius = kGPUImageDilationRadiusOneVertexShaderString;\n            fragmentShaderForThisRadius = kGPUImageDilationRadiusOneFragmentShaderString;\n        }; break;\n        case 2:\n        {\n            vertexShaderForThisRadius = kGPUImageDilationRadiusTwoVertexShaderString;\n            fragmentShaderForThisRadius = kGPUImageDilationRadiusTwoFragmentShaderString;\n        }; break;\n        case 3:\n        {\n            vertexShaderForThisRadius = kGPUImageDilationRadiusThreeVertexShaderString;\n            fragmentShaderForThisRadius = kGPUImageDilationRadiusThreeFragmentShaderString;\n        }; break;\n        case 4:\n        {\n            vertexShaderForThisRadius = kGPUImageDilationRadiusFourVertexShaderString;\n            fragmentShaderForThisRadius = kGPUImageDilationRadiusFourFragmentShaderString;\n        }; break;\n        default:\n        {\n            vertexShaderForThisRadius = kGPUImageDilationRadiusFourVertexShaderString;\n            fragmentShaderForThisRadius = kGPUImageDilationRadiusFourFragmentShaderString;\n        }; break;\n    }\n    \n    if (!(self = [super initWithFirstStageVertexShaderFromString:vertexShaderForThisRadius firstStageFragmentShaderFromString:fragmentShaderForThisRadius secondStageVertexShaderFromString:vertexShaderForThisRadius secondStageFragmentShaderFromString:fragmentShaderForThisRadius]))\n    {\n        return nil;\n    }\n    \n    return self;\n}\n\n- (id)init;\n{\n    if (!(self = [self initWithRadius:1]))\n    {\n        return nil;\n    }\n    \n    return self;\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageDirectionalNonMaximumSuppressionFilter.h",
    "content": "#import \"GPUImageFilter.h\"\n\n@interface GPUImageDirectionalNonMaximumSuppressionFilter : GPUImageFilter\n{\n    GLint texelWidthUniform, texelHeightUniform;\n    GLint upperThresholdUniform, lowerThresholdUniform;\n    \n    BOOL hasOverriddenImageSizeFactor;\n}\n\n// The texel width and height determines how far out to sample from this texel. By default, this is the normalized width of a pixel, but this can be overridden for different effects.\n@property(readwrite, nonatomic) CGFloat texelWidth; \n@property(readwrite, nonatomic) CGFloat texelHeight; \n\n// These thresholds set cutoffs for the intensities that definitely get registered (upper threshold) and those that definitely don't (lower threshold)\n@property(readwrite, nonatomic) CGFloat upperThreshold;\n@property(readwrite, nonatomic) CGFloat lowerThreshold;\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageDirectionalNonMaximumSuppressionFilter.m",
    "content": "#import \"GPUImageDirectionalNonMaximumSuppressionFilter.h\"\n\n@implementation GPUImageDirectionalNonMaximumSuppressionFilter\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageDirectionalNonmaximumSuppressionFragmentShaderString = SHADER_STRING\n(\n precision mediump float;\n \n varying highp vec2 textureCoordinate;\n \n uniform sampler2D inputImageTexture;\n uniform highp float texelWidth; \n uniform highp float texelHeight; \n uniform mediump float upperThreshold; \n uniform mediump float lowerThreshold; \n\n void main()\n {\n     vec3 currentGradientAndDirection = texture2D(inputImageTexture, textureCoordinate).rgb;\n     vec2 gradientDirection = ((currentGradientAndDirection.gb * 2.0) - 1.0) * vec2(texelWidth, texelHeight);\n     \n     float firstSampledGradientMagnitude = texture2D(inputImageTexture, textureCoordinate + gradientDirection).r;\n     float secondSampledGradientMagnitude = texture2D(inputImageTexture, textureCoordinate - gradientDirection).r;\n     \n     float multiplier = step(firstSampledGradientMagnitude, currentGradientAndDirection.r);\n     multiplier = multiplier * step(secondSampledGradientMagnitude, currentGradientAndDirection.r);\n     \n     float thresholdCompliance = smoothstep(lowerThreshold, upperThreshold, currentGradientAndDirection.r);\n     multiplier = multiplier * thresholdCompliance;\n     \n     gl_FragColor = vec4(multiplier, multiplier, multiplier, 1.0);\n }\n);\n#else\nNSString *const kGPUImageDirectionalNonmaximumSuppressionFragmentShaderString = SHADER_STRING\n(\n varying vec2 textureCoordinate;\n \n uniform sampler2D inputImageTexture;\n uniform float texelWidth;\n uniform float texelHeight;\n uniform float upperThreshold;\n uniform float lowerThreshold;\n \n void main()\n {\n     vec3 currentGradientAndDirection = texture2D(inputImageTexture, textureCoordinate).rgb;\n     vec2 gradientDirection = ((currentGradientAndDirection.gb * 2.0) - 1.0) * vec2(texelWidth, texelHeight);\n     \n     float firstSampledGradientMagnitude = texture2D(inputImageTexture, textureCoordinate + gradientDirection).r;\n     float secondSampledGradientMagnitude = texture2D(inputImageTexture, textureCoordinate - gradientDirection).r;\n     \n     float multiplier = step(firstSampledGradientMagnitude, currentGradientAndDirection.r);\n     multiplier = multiplier * step(secondSampledGradientMagnitude, currentGradientAndDirection.r);\n     \n     float thresholdCompliance = smoothstep(lowerThreshold, upperThreshold, currentGradientAndDirection.r);\n     multiplier = multiplier * thresholdCompliance;\n     \n     gl_FragColor = vec4(multiplier, multiplier, multiplier, 1.0);\n }\n);\n#endif\n\n@synthesize texelWidth = _texelWidth; \n@synthesize texelHeight = _texelHeight; \n@synthesize upperThreshold = _upperThreshold;\n@synthesize lowerThreshold = _lowerThreshold;\n\n#pragma mark -\n#pragma mark Initialization and teardown\n\n- (id)init;\n{\n    if (!(self = [super initWithFragmentShaderFromString:kGPUImageDirectionalNonmaximumSuppressionFragmentShaderString]))\n    {\n        return nil;\n    }\n    \n    texelWidthUniform = [filterProgram uniformIndex:@\"texelWidth\"];\n    texelHeightUniform = [filterProgram uniformIndex:@\"texelHeight\"];\n    upperThresholdUniform = [filterProgram uniformIndex:@\"upperThreshold\"];\n    lowerThresholdUniform = [filterProgram uniformIndex:@\"lowerThreshold\"];\n    \n    self.upperThreshold = 0.5;\n    self.lowerThreshold = 0.1;\n    \n    return self;\n}\n\n- (void)setupFilterForSize:(CGSize)filterFrameSize;\n{\n    if (!hasOverriddenImageSizeFactor)\n    {\n        _texelWidth = 1.0 / filterFrameSize.width;\n        _texelHeight = 1.0 / filterFrameSize.height;\n        \n        runSynchronouslyOnVideoProcessingQueue(^{\n            [GPUImageContext setActiveShaderProgram:filterProgram];\n            glUniform1f(texelWidthUniform, _texelWidth);\n            glUniform1f(texelHeightUniform, _texelHeight);\n        });\n    }\n}\n\n#pragma mark -\n#pragma mark Accessors\n\n- (void)setTexelWidth:(CGFloat)newValue;\n{\n    hasOverriddenImageSizeFactor = YES;\n    _texelWidth = newValue;\n    \n    [self setFloat:_texelWidth forUniform:texelWidthUniform program:filterProgram];\n}\n\n- (void)setTexelHeight:(CGFloat)newValue;\n{\n    hasOverriddenImageSizeFactor = YES;\n    _texelHeight = newValue;\n    \n    [self setFloat:_texelHeight forUniform:texelHeightUniform program:filterProgram];\n}\n\n- (void)setLowerThreshold:(CGFloat)newValue;\n{\n    _lowerThreshold = newValue;\n    \n    [self setFloat:_lowerThreshold forUniform:lowerThresholdUniform program:filterProgram];\n}\n\n- (void)setUpperThreshold:(CGFloat)newValue;\n{\n    _upperThreshold = newValue;\n\n    [self setFloat:_upperThreshold forUniform:upperThresholdUniform program:filterProgram];\n}\n\n\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageDirectionalSobelEdgeDetectionFilter.h",
    "content": "#import \"GPUImage3x3TextureSamplingFilter.h\"\n\n@interface GPUImageDirectionalSobelEdgeDetectionFilter : GPUImage3x3TextureSamplingFilter\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageDirectionalSobelEdgeDetectionFilter.m",
    "content": "#import \"GPUImageDirectionalSobelEdgeDetectionFilter.h\"\n\n@implementation GPUImageDirectionalSobelEdgeDetectionFilter\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageDirectionalSobelEdgeDetectionFragmentShaderString = SHADER_STRING\n(\n precision mediump float;\n \n varying vec2 textureCoordinate;\n varying vec2 leftTextureCoordinate;\n varying vec2 rightTextureCoordinate;\n \n varying vec2 topTextureCoordinate;\n varying vec2 topLeftTextureCoordinate;\n varying vec2 topRightTextureCoordinate;\n \n varying vec2 bottomTextureCoordinate;\n varying vec2 bottomLeftTextureCoordinate;\n varying vec2 bottomRightTextureCoordinate;\n \n uniform sampler2D inputImageTexture;\n \n void main()\n {\n     float bottomLeftIntensity = texture2D(inputImageTexture, bottomLeftTextureCoordinate).r;\n     float topRightIntensity = texture2D(inputImageTexture, topRightTextureCoordinate).r;\n     float topLeftIntensity = texture2D(inputImageTexture, topLeftTextureCoordinate).r;\n     float bottomRightIntensity = texture2D(inputImageTexture, bottomRightTextureCoordinate).r;\n     float leftIntensity = texture2D(inputImageTexture, leftTextureCoordinate).r;\n     float rightIntensity = texture2D(inputImageTexture, rightTextureCoordinate).r;\n     float bottomIntensity = texture2D(inputImageTexture, bottomTextureCoordinate).r;\n     float topIntensity = texture2D(inputImageTexture, topTextureCoordinate).r;\n     \n     vec2 gradientDirection;\n     gradientDirection.x = -bottomLeftIntensity - 2.0 * leftIntensity - topLeftIntensity + bottomRightIntensity + 2.0 * rightIntensity + topRightIntensity;\n     gradientDirection.y = -topLeftIntensity - 2.0 * topIntensity - topRightIntensity + bottomLeftIntensity + 2.0 * bottomIntensity + bottomRightIntensity;\n     \n     float gradientMagnitude = length(gradientDirection);\n     vec2 normalizedDirection = normalize(gradientDirection);\n     normalizedDirection = sign(normalizedDirection) * floor(abs(normalizedDirection) + 0.617316); // Offset by 1-sin(pi/8) to set to 0 if near axis, 1 if away\n     normalizedDirection = (normalizedDirection + 1.0) * 0.5; // Place -1.0 - 1.0 within 0 - 1.0\n     \n     gl_FragColor = vec4(gradientMagnitude, normalizedDirection.x, normalizedDirection.y, 1.0);\n }\n);\n#else\nNSString *const kGPUImageDirectionalSobelEdgeDetectionFragmentShaderString = SHADER_STRING\n(\n varying vec2 textureCoordinate;\n varying vec2 leftTextureCoordinate;\n varying vec2 rightTextureCoordinate;\n \n varying vec2 topTextureCoordinate;\n varying vec2 topLeftTextureCoordinate;\n varying vec2 topRightTextureCoordinate;\n \n varying vec2 bottomTextureCoordinate;\n varying vec2 bottomLeftTextureCoordinate;\n varying vec2 bottomRightTextureCoordinate;\n \n uniform sampler2D inputImageTexture;\n \n void main()\n {\n     float bottomLeftIntensity = texture2D(inputImageTexture, bottomLeftTextureCoordinate).r;\n     float topRightIntensity = texture2D(inputImageTexture, topRightTextureCoordinate).r;\n     float topLeftIntensity = texture2D(inputImageTexture, topLeftTextureCoordinate).r;\n     float bottomRightIntensity = texture2D(inputImageTexture, bottomRightTextureCoordinate).r;\n     float leftIntensity = texture2D(inputImageTexture, leftTextureCoordinate).r;\n     float rightIntensity = texture2D(inputImageTexture, rightTextureCoordinate).r;\n     float bottomIntensity = texture2D(inputImageTexture, bottomTextureCoordinate).r;\n     float topIntensity = texture2D(inputImageTexture, topTextureCoordinate).r;\n     \n     vec2 gradientDirection;\n     gradientDirection.x = -bottomLeftIntensity - 2.0 * leftIntensity - topLeftIntensity + bottomRightIntensity + 2.0 * rightIntensity + topRightIntensity;\n     gradientDirection.y = -topLeftIntensity - 2.0 * topIntensity - topRightIntensity + bottomLeftIntensity + 2.0 * bottomIntensity + bottomRightIntensity;\n     \n     float gradientMagnitude = length(gradientDirection);\n     vec2 normalizedDirection = normalize(gradientDirection);\n     normalizedDirection = sign(normalizedDirection) * floor(abs(normalizedDirection) + 0.617316); // Offset by 1-sin(pi/8) to set to 0 if near axis, 1 if away\n     normalizedDirection = (normalizedDirection + 1.0) * 0.5; // Place -1.0 - 1.0 within 0 - 1.0\n     \n     gl_FragColor = vec4(gradientMagnitude, normalizedDirection.x, normalizedDirection.y, 1.0);\n }\n);\n#endif\n\n#pragma mark -\n#pragma mark Initialization and teardown\n\n- (id)init;\n{\n    if (!(self = [super initWithFragmentShaderFromString:kGPUImageDirectionalSobelEdgeDetectionFragmentShaderString]))\n    {\n        return nil;\n    }\n\n    return self;\n}\n\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageDissolveBlendFilter.h",
    "content": "#import \"GPUImageTwoInputFilter.h\"\n\n@interface GPUImageDissolveBlendFilter : GPUImageTwoInputFilter\n{\n    GLint mixUniform;\n}\n\n// Mix ranges from 0.0 (only image 1) to 1.0 (only image 2), with 0.5 (half of either) as the normal level\n@property(readwrite, nonatomic) CGFloat mix; \n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageDissolveBlendFilter.m",
    "content": "#import \"GPUImageDissolveBlendFilter.h\"\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageDissolveBlendFragmentShaderString = SHADER_STRING\n(\n varying highp vec2 textureCoordinate;\n varying highp vec2 textureCoordinate2;\n\n uniform sampler2D inputImageTexture;\n uniform sampler2D inputImageTexture2;\n uniform lowp float mixturePercent;\n \n void main()\n {\n    lowp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);\n    lowp vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate2);\n    \n    gl_FragColor = mix(textureColor, textureColor2, mixturePercent);\n }\n);\n#else\nNSString *const kGPUImageDissolveBlendFragmentShaderString = SHADER_STRING\n(\n varying vec2 textureCoordinate;\n varying vec2 textureCoordinate2;\n \n uniform sampler2D inputImageTexture;\n uniform sampler2D inputImageTexture2;\n uniform float mixturePercent;\n \n void main()\n {\n     vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);\n     vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate2);\n     \n     gl_FragColor = mix(textureColor, textureColor2, mixturePercent);\n }\n);\n#endif\n\n@implementation GPUImageDissolveBlendFilter\n\n@synthesize mix = _mix;\n\n#pragma mark -\n#pragma mark Initialization and teardown\n\n- (id)init;\n{\n    if (!(self = [super initWithFragmentShaderFromString:kGPUImageDissolveBlendFragmentShaderString]))\n    {\n\t\treturn nil;\n    }\n    \n    mixUniform = [filterProgram uniformIndex:@\"mixturePercent\"];\n    self.mix = 0.5;\n    \n    return self;\n}\n\n#pragma mark -\n#pragma mark Accessors\n\n- (void)setMix:(CGFloat)newValue;\n{\n    _mix = newValue;\n    \n    [self setFloat:_mix forUniform:mixUniform program:filterProgram];\n}\n\n@end\n\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageDivideBlendFilter.h",
    "content": "#import \"GPUImageTwoInputFilter.h\"\n\n@interface GPUImageDivideBlendFilter : GPUImageTwoInputFilter\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageDivideBlendFilter.m",
    "content": "#import \"GPUImageDivideBlendFilter.h\"\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageDivideBlendFragmentShaderString = SHADER_STRING\n(\n varying highp vec2 textureCoordinate;\n varying highp vec2 textureCoordinate2;\n\n uniform sampler2D inputImageTexture;\n uniform sampler2D inputImageTexture2;\n \n void main()\n {\n\t mediump vec4 base = texture2D(inputImageTexture, textureCoordinate);\n\t mediump vec4 overlay = texture2D(inputImageTexture2, textureCoordinate2);\n   \n   mediump float ra;\n   if (overlay.a == 0.0 || ((base.r / overlay.r) > (base.a / overlay.a)))\n     ra = overlay.a * base.a + overlay.r * (1.0 - base.a) + base.r * (1.0 - overlay.a);\n   else\n     ra = (base.r * overlay.a * overlay.a) / overlay.r + overlay.r * (1.0 - base.a) + base.r * (1.0 - overlay.a);\n   \n\n   mediump float ga;\n   if (overlay.a == 0.0 || ((base.g / overlay.g) > (base.a / overlay.a)))\n     ga = overlay.a * base.a + overlay.g * (1.0 - base.a) + base.g * (1.0 - overlay.a);\n   else\n     ga = (base.g * overlay.a * overlay.a) / overlay.g + overlay.g * (1.0 - base.a) + base.g * (1.0 - overlay.a);\n\n   \n   mediump float ba;\n   if (overlay.a == 0.0 || ((base.b / overlay.b) > (base.a / overlay.a)))\n     ba = overlay.a * base.a + overlay.b * (1.0 - base.a) + base.b * (1.0 - overlay.a);\n   else\n     ba = (base.b * overlay.a * overlay.a) / overlay.b + overlay.b * (1.0 - base.a) + base.b * (1.0 - overlay.a);\n\n   mediump float a = overlay.a + base.a - overlay.a * base.a;\n   \n\t gl_FragColor = vec4(ra, ga, ba, a);\n }\n);\n#else\nNSString *const kGPUImageDivideBlendFragmentShaderString = SHADER_STRING\n(\n varying vec2 textureCoordinate;\n varying vec2 textureCoordinate2;\n \n uniform sampler2D inputImageTexture;\n uniform sampler2D inputImageTexture2;\n \n void main()\n {\n\t vec4 base = texture2D(inputImageTexture, textureCoordinate);\n\t vec4 overlay = texture2D(inputImageTexture2, textureCoordinate2);\n     \n     float ra;\n     if (overlay.a == 0.0 || ((base.r / overlay.r) > (base.a / overlay.a)))\n         ra = overlay.a * base.a + overlay.r * (1.0 - base.a) + base.r * (1.0 - overlay.a);\n     else\n         ra = (base.r * overlay.a * overlay.a) / overlay.r + overlay.r * (1.0 - base.a) + base.r * (1.0 - overlay.a);\n     \n     \n     float ga;\n     if (overlay.a == 0.0 || ((base.g / overlay.g) > (base.a / overlay.a)))\n         ga = overlay.a * base.a + overlay.g * (1.0 - base.a) + base.g * (1.0 - overlay.a);\n     else\n         ga = (base.g * overlay.a * overlay.a) / overlay.g + overlay.g * (1.0 - base.a) + base.g * (1.0 - overlay.a);\n     \n     \n     float ba;\n     if (overlay.a == 0.0 || ((base.b / overlay.b) > (base.a / overlay.a)))\n         ba = overlay.a * base.a + overlay.b * (1.0 - base.a) + base.b * (1.0 - overlay.a);\n     else\n         ba = (base.b * overlay.a * overlay.a) / overlay.b + overlay.b * (1.0 - base.a) + base.b * (1.0 - overlay.a);\n     \n     float a = overlay.a + base.a - overlay.a * base.a;\n     \n\t gl_FragColor = vec4(ra, ga, ba, a);\n }\n );\n#endif\n\n@implementation GPUImageDivideBlendFilter\n\n- (id)init;\n{\n    if (!(self = [super initWithFragmentShaderFromString:kGPUImageDivideBlendFragmentShaderString]))\n    {\n\t\treturn nil;\n    }\n    \n    return self;\n}\n\n@end\n\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageEmbossFilter.h",
    "content": "#import \"GPUImage3x3ConvolutionFilter.h\"\n\n@interface GPUImageEmbossFilter : GPUImage3x3ConvolutionFilter\n\n// The strength of the embossing, from  0.0 to 4.0, with 1.0 as the normal level\n@property(readwrite, nonatomic) CGFloat intensity; \n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageEmbossFilter.m",
    "content": "#import \"GPUImageEmbossFilter.h\"\n\n@implementation GPUImageEmbossFilter\n\n@synthesize intensity = _intensity; \n\n- (id)init;\n{\n    if (!(self = [super init]))\n    {\n\t\treturn nil;\n    }\n    \n    self.intensity = 1.0;\n    \n    return self;\n}\n\n#pragma mark -\n#pragma mark Accessors\n\n- (void)setIntensity:(CGFloat)newValue;\n{\n//    [(GPUImage3x3ConvolutionFilter *)filter setConvolutionMatrix:(GPUMatrix3x3){\n//        {-2.0f, -1.0f, 0.0f},\n//        {-1.0f,  1.0f, 1.0f},\n//        { 0.0f,  1.0f, 2.0f}\n//    }];\n    \n    _intensity = newValue;\n    \n    GPUMatrix3x3 newConvolutionMatrix;\n    newConvolutionMatrix.one.one = _intensity * (-2.0);\n    newConvolutionMatrix.one.two = -_intensity;    \n    newConvolutionMatrix.one.three = 0.0f;\n\n    newConvolutionMatrix.two.one = -_intensity;\n    newConvolutionMatrix.two.two = 1.0;    \n    newConvolutionMatrix.two.three = _intensity;\n    \n    newConvolutionMatrix.three.one = 0.0f;\n    newConvolutionMatrix.three.two = _intensity;    \n    newConvolutionMatrix.three.three = _intensity * 2.0;\n\n    self.convolutionKernel = newConvolutionMatrix;\n}\n\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageErosionFilter.h",
    "content": "#import \"GPUImageTwoPassTextureSamplingFilter.h\"\n\n// For each pixel, this sets it to the minimum value of the red channel in a rectangular neighborhood extending out dilationRadius pixels from the center.\n// This extends out dark features, and is most commonly used with black-and-white thresholded images.\n\n@interface GPUImageErosionFilter : GPUImageTwoPassTextureSamplingFilter\n\n// Acceptable values for erosionRadius, which sets the distance in pixels to sample out from the center, are 1, 2, 3, and 4.\n- (id)initWithRadius:(NSUInteger)erosionRadius;\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageErosionFilter.m",
    "content": "#import \"GPUImageErosionFilter.h\"\n#import \"GPUImageDilationFilter.h\"\n\n@implementation GPUImageErosionFilter\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageErosionRadiusOneFragmentShaderString = SHADER_STRING\n(\n precision lowp float;\n \n varying vec2 centerTextureCoordinate;\n varying vec2 oneStepPositiveTextureCoordinate;\n varying vec2 oneStepNegativeTextureCoordinate;\n \n uniform sampler2D inputImageTexture;\n \n void main()\n {\n     float centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate).r;\n     float oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate).r;\n     float oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate).r;\n     \n     lowp float minValue = min(centerIntensity, oneStepPositiveIntensity);\n     minValue = min(minValue, oneStepNegativeIntensity);\n     \n     gl_FragColor = vec4(vec3(minValue), 1.0);\n }\n);\n\nNSString *const kGPUImageErosionRadiusTwoFragmentShaderString = SHADER_STRING\n(\n precision lowp float;\n \n varying vec2 centerTextureCoordinate;\n varying vec2 oneStepPositiveTextureCoordinate;\n varying vec2 oneStepNegativeTextureCoordinate;\n varying vec2 twoStepsPositiveTextureCoordinate;\n varying vec2 twoStepsNegativeTextureCoordinate;\n \n uniform sampler2D inputImageTexture;\n \n void main()\n {\n     float centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate).r;\n     float oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate).r;\n     float oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate).r;\n     float twoStepsPositiveIntensity = texture2D(inputImageTexture, twoStepsPositiveTextureCoordinate).r;\n     float twoStepsNegativeIntensity = texture2D(inputImageTexture, twoStepsNegativeTextureCoordinate).r;\n     \n     lowp float minValue = min(centerIntensity, oneStepPositiveIntensity);\n     minValue = min(minValue, oneStepNegativeIntensity);\n     minValue = min(minValue, twoStepsPositiveIntensity);\n     minValue = min(minValue, twoStepsNegativeIntensity);\n     \n     gl_FragColor = vec4(vec3(minValue), 1.0);\n }\n);\n\nNSString *const kGPUImageErosionRadiusThreeFragmentShaderString = SHADER_STRING\n(\n precision lowp float;\n \n varying vec2 centerTextureCoordinate;\n varying vec2 oneStepPositiveTextureCoordinate;\n varying vec2 oneStepNegativeTextureCoordinate;\n varying vec2 twoStepsPositiveTextureCoordinate;\n varying vec2 twoStepsNegativeTextureCoordinate;\n varying vec2 threeStepsPositiveTextureCoordinate;\n varying vec2 threeStepsNegativeTextureCoordinate;\n \n uniform sampler2D inputImageTexture;\n \n void main()\n {\n     float centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate).r;\n     float oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate).r;\n     float oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate).r;\n     float twoStepsPositiveIntensity = texture2D(inputImageTexture, twoStepsPositiveTextureCoordinate).r;\n     float twoStepsNegativeIntensity = texture2D(inputImageTexture, twoStepsNegativeTextureCoordinate).r;\n     float threeStepsPositiveIntensity = texture2D(inputImageTexture, threeStepsPositiveTextureCoordinate).r;\n     float threeStepsNegativeIntensity = texture2D(inputImageTexture, threeStepsNegativeTextureCoordinate).r;\n     \n     lowp float minValue = min(centerIntensity, oneStepPositiveIntensity);\n     minValue = min(minValue, oneStepNegativeIntensity);\n     minValue = min(minValue, twoStepsPositiveIntensity);\n     minValue = min(minValue, twoStepsNegativeIntensity);\n     minValue = min(minValue, threeStepsPositiveIntensity);\n     minValue = min(minValue, threeStepsNegativeIntensity);\n     \n     gl_FragColor = vec4(vec3(minValue), 1.0);\n }\n);\n\nNSString *const kGPUImageErosionRadiusFourFragmentShaderString = SHADER_STRING\n(\n precision lowp float;\n \n varying vec2 centerTextureCoordinate;\n varying vec2 oneStepPositiveTextureCoordinate;\n varying vec2 oneStepNegativeTextureCoordinate;\n varying vec2 twoStepsPositiveTextureCoordinate;\n varying vec2 twoStepsNegativeTextureCoordinate;\n varying vec2 threeStepsPositiveTextureCoordinate;\n varying vec2 threeStepsNegativeTextureCoordinate;\n varying vec2 fourStepsPositiveTextureCoordinate;\n varying vec2 fourStepsNegativeTextureCoordinate;\n \n uniform sampler2D inputImageTexture;\n \n void main()\n {\n     float centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate).r;\n     float oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate).r;\n     float oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate).r;\n     float twoStepsPositiveIntensity = texture2D(inputImageTexture, twoStepsPositiveTextureCoordinate).r;\n     float twoStepsNegativeIntensity = texture2D(inputImageTexture, twoStepsNegativeTextureCoordinate).r;\n     float threeStepsPositiveIntensity = texture2D(inputImageTexture, threeStepsPositiveTextureCoordinate).r;\n     float threeStepsNegativeIntensity = texture2D(inputImageTexture, threeStepsNegativeTextureCoordinate).r;\n     float fourStepsPositiveIntensity = texture2D(inputImageTexture, fourStepsPositiveTextureCoordinate).r;\n     float fourStepsNegativeIntensity = texture2D(inputImageTexture, fourStepsNegativeTextureCoordinate).r;\n     \n     lowp float minValue = min(centerIntensity, oneStepPositiveIntensity);\n     minValue = min(minValue, oneStepNegativeIntensity);\n     minValue = min(minValue, twoStepsPositiveIntensity);\n     minValue = min(minValue, twoStepsNegativeIntensity);\n     minValue = min(minValue, threeStepsPositiveIntensity);\n     minValue = min(minValue, threeStepsNegativeIntensity);\n     minValue = min(minValue, fourStepsPositiveIntensity);\n     minValue = min(minValue, fourStepsNegativeIntensity);\n     \n     gl_FragColor = vec4(vec3(minValue), 1.0);\n }\n);\n#else\nNSString *const kGPUImageErosionRadiusOneFragmentShaderString = SHADER_STRING\n(\n varying vec2 centerTextureCoordinate;\n varying vec2 oneStepPositiveTextureCoordinate;\n varying vec2 oneStepNegativeTextureCoordinate;\n \n uniform sampler2D inputImageTexture;\n \n void main()\n {\n     float centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate).r;\n     float oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate).r;\n     float oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate).r;\n     \n     float minValue = min(centerIntensity, oneStepPositiveIntensity);\n     minValue = min(minValue, oneStepNegativeIntensity);\n     \n     gl_FragColor = vec4(vec3(minValue), 1.0);\n }\n);\n\nNSString *const kGPUImageErosionRadiusTwoFragmentShaderString = SHADER_STRING\n(\n varying vec2 centerTextureCoordinate;\n varying vec2 oneStepPositiveTextureCoordinate;\n varying vec2 oneStepNegativeTextureCoordinate;\n varying vec2 twoStepsPositiveTextureCoordinate;\n varying vec2 twoStepsNegativeTextureCoordinate;\n \n uniform sampler2D inputImageTexture;\n \n void main()\n {\n     float centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate).r;\n     float oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate).r;\n     float oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate).r;\n     float twoStepsPositiveIntensity = texture2D(inputImageTexture, twoStepsPositiveTextureCoordinate).r;\n     float twoStepsNegativeIntensity = texture2D(inputImageTexture, twoStepsNegativeTextureCoordinate).r;\n     \n     float minValue = min(centerIntensity, oneStepPositiveIntensity);\n     minValue = min(minValue, oneStepNegativeIntensity);\n     minValue = min(minValue, twoStepsPositiveIntensity);\n     minValue = min(minValue, twoStepsNegativeIntensity);\n     \n     gl_FragColor = vec4(vec3(minValue), 1.0);\n }\n);\n\nNSString *const kGPUImageErosionRadiusThreeFragmentShaderString = SHADER_STRING\n(\n varying vec2 centerTextureCoordinate;\n varying vec2 oneStepPositiveTextureCoordinate;\n varying vec2 oneStepNegativeTextureCoordinate;\n varying vec2 twoStepsPositiveTextureCoordinate;\n varying vec2 twoStepsNegativeTextureCoordinate;\n varying vec2 threeStepsPositiveTextureCoordinate;\n varying vec2 threeStepsNegativeTextureCoordinate;\n \n uniform sampler2D inputImageTexture;\n \n void main()\n {\n     float centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate).r;\n     float oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate).r;\n     float oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate).r;\n     float twoStepsPositiveIntensity = texture2D(inputImageTexture, twoStepsPositiveTextureCoordinate).r;\n     float twoStepsNegativeIntensity = texture2D(inputImageTexture, twoStepsNegativeTextureCoordinate).r;\n     float threeStepsPositiveIntensity = texture2D(inputImageTexture, threeStepsPositiveTextureCoordinate).r;\n     float threeStepsNegativeIntensity = texture2D(inputImageTexture, threeStepsNegativeTextureCoordinate).r;\n     \n     float minValue = min(centerIntensity, oneStepPositiveIntensity);\n     minValue = min(minValue, oneStepNegativeIntensity);\n     minValue = min(minValue, twoStepsPositiveIntensity);\n     minValue = min(minValue, twoStepsNegativeIntensity);\n     minValue = min(minValue, threeStepsPositiveIntensity);\n     minValue = min(minValue, threeStepsNegativeIntensity);\n     \n     gl_FragColor = vec4(vec3(minValue), 1.0);\n }\n);\n\nNSString *const kGPUImageErosionRadiusFourFragmentShaderString = SHADER_STRING\n(\n varying vec2 centerTextureCoordinate;\n varying vec2 oneStepPositiveTextureCoordinate;\n varying vec2 oneStepNegativeTextureCoordinate;\n varying vec2 twoStepsPositiveTextureCoordinate;\n varying vec2 twoStepsNegativeTextureCoordinate;\n varying vec2 threeStepsPositiveTextureCoordinate;\n varying vec2 threeStepsNegativeTextureCoordinate;\n varying vec2 fourStepsPositiveTextureCoordinate;\n varying vec2 fourStepsNegativeTextureCoordinate;\n \n uniform sampler2D inputImageTexture;\n \n void main()\n {\n     float centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate).r;\n     float oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate).r;\n     float oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate).r;\n     float twoStepsPositiveIntensity = texture2D(inputImageTexture, twoStepsPositiveTextureCoordinate).r;\n     float twoStepsNegativeIntensity = texture2D(inputImageTexture, twoStepsNegativeTextureCoordinate).r;\n     float threeStepsPositiveIntensity = texture2D(inputImageTexture, threeStepsPositiveTextureCoordinate).r;\n     float threeStepsNegativeIntensity = texture2D(inputImageTexture, threeStepsNegativeTextureCoordinate).r;\n     float fourStepsPositiveIntensity = texture2D(inputImageTexture, fourStepsPositiveTextureCoordinate).r;\n     float fourStepsNegativeIntensity = texture2D(inputImageTexture, fourStepsNegativeTextureCoordinate).r;\n     \n     float minValue = min(centerIntensity, oneStepPositiveIntensity);\n     minValue = min(minValue, oneStepNegativeIntensity);\n     minValue = min(minValue, twoStepsPositiveIntensity);\n     minValue = min(minValue, twoStepsNegativeIntensity);\n     minValue = min(minValue, threeStepsPositiveIntensity);\n     minValue = min(minValue, threeStepsNegativeIntensity);\n     minValue = min(minValue, fourStepsPositiveIntensity);\n     minValue = min(minValue, fourStepsNegativeIntensity);\n     \n     gl_FragColor = vec4(vec3(minValue), 1.0);\n }\n);\n#endif\n\n#pragma mark -\n#pragma mark Initialization and teardown\n\n- (id)initWithRadius:(NSUInteger)dilationRadius;\n{    \n    NSString *fragmentShaderForThisRadius = nil;\n    NSString *vertexShaderForThisRadius = nil;\n    \n    switch (dilationRadius)\n    {\n        case 0:\n        case 1:\n        {\n            vertexShaderForThisRadius = kGPUImageDilationRadiusOneVertexShaderString;\n            fragmentShaderForThisRadius = kGPUImageErosionRadiusOneFragmentShaderString;\n        }; break;\n        case 2:\n        {\n            vertexShaderForThisRadius = kGPUImageDilationRadiusTwoVertexShaderString;\n            fragmentShaderForThisRadius = kGPUImageErosionRadiusTwoFragmentShaderString;\n        }; break;\n        case 3:\n        {\n            vertexShaderForThisRadius = kGPUImageDilationRadiusThreeVertexShaderString;\n            fragmentShaderForThisRadius = kGPUImageErosionRadiusThreeFragmentShaderString;\n        }; break;\n        case 4:\n        {\n            vertexShaderForThisRadius = kGPUImageDilationRadiusFourVertexShaderString;\n            fragmentShaderForThisRadius = kGPUImageErosionRadiusFourFragmentShaderString;\n        }; break;\n        default:\n        {\n            vertexShaderForThisRadius = kGPUImageDilationRadiusFourVertexShaderString;\n            fragmentShaderForThisRadius = kGPUImageErosionRadiusFourFragmentShaderString;\n        }; break;\n    }\n    \n    if (!(self = [super initWithFirstStageVertexShaderFromString:vertexShaderForThisRadius firstStageFragmentShaderFromString:fragmentShaderForThisRadius secondStageVertexShaderFromString:vertexShaderForThisRadius secondStageFragmentShaderFromString:fragmentShaderForThisRadius]))\n    {\n        return nil;\n    }\n    \n    return self;\n}\n\n- (id)init;\n{\n    if (!(self = [self initWithRadius:1]))\n    {\n        return nil;\n    }\n    \n    return self;\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageExclusionBlendFilter.h",
    "content": "#import \"GPUImageTwoInputFilter.h\"\n\n@interface GPUImageExclusionBlendFilter : GPUImageTwoInputFilter\n{\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageExclusionBlendFilter.m",
    "content": "#import \"GPUImageExclusionBlendFilter.h\"\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageExclusionBlendFragmentShaderString = SHADER_STRING\n(\n varying highp vec2 textureCoordinate;\n varying highp vec2 textureCoordinate2;\n\n uniform sampler2D inputImageTexture;\n uniform sampler2D inputImageTexture2;\n \n void main()\n {\n     mediump vec4 base = texture2D(inputImageTexture, textureCoordinate);\n     mediump vec4 overlay = texture2D(inputImageTexture2, textureCoordinate2);\n     \n     //     Dca = (Sca.Da + Dca.Sa - 2.Sca.Dca) + Sca.(1 - Da) + Dca.(1 - Sa)\n     \n     gl_FragColor = vec4((overlay.rgb * base.a + base.rgb * overlay.a - 2.0 * overlay.rgb * base.rgb) + overlay.rgb * (1.0 - base.a) + base.rgb * (1.0 - overlay.a), base.a);\n }\n);\n#else\nNSString *const kGPUImageExclusionBlendFragmentShaderString = SHADER_STRING\n(\n varying vec2 textureCoordinate;\n varying vec2 textureCoordinate2;\n \n uniform sampler2D inputImageTexture;\n uniform sampler2D inputImageTexture2;\n \n void main()\n {\n     vec4 base = texture2D(inputImageTexture, textureCoordinate);\n     vec4 overlay = texture2D(inputImageTexture2, textureCoordinate2);\n     \n     //     Dca = (Sca.Da + Dca.Sa - 2.Sca.Dca) + Sca.(1 - Da) + Dca.(1 - Sa)\n     \n     gl_FragColor = vec4((overlay.rgb * base.a + base.rgb * overlay.a - 2.0 * overlay.rgb * base.rgb) + overlay.rgb * (1.0 - base.a) + base.rgb * (1.0 - overlay.a), base.a);\n }\n );\n#endif\n\n@implementation GPUImageExclusionBlendFilter\n\n- (id)init;\n{\n    if (!(self = [super initWithFragmentShaderFromString:kGPUImageExclusionBlendFragmentShaderString]))\n    {\n\t\treturn nil;\n    }\n    \n    return self;\n}\n\n@end\n\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageExposureFilter.h",
    "content": "#import \"GPUImageFilter.h\"\n\n@interface GPUImageExposureFilter : GPUImageFilter\n{\n    GLint exposureUniform;\n}\n\n// Exposure ranges from -10.0 to 10.0, with 0.0 as the normal level\n@property(readwrite, nonatomic) CGFloat exposure; \n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageExposureFilter.m",
    "content": "#import \"GPUImageExposureFilter.h\"\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageExposureFragmentShaderString = SHADER_STRING\n(\n varying highp vec2 textureCoordinate;\n \n uniform sampler2D inputImageTexture;\n uniform highp float exposure;\n \n void main()\n {\n     highp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);\n     \n     gl_FragColor = vec4(textureColor.rgb * pow(2.0, exposure), textureColor.w);\n }\n);\n#else\nNSString *const kGPUImageExposureFragmentShaderString = SHADER_STRING\n(\n varying vec2 textureCoordinate;\n \n uniform sampler2D inputImageTexture;\n uniform float exposure;\n \n void main()\n {\n     vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);\n     \n     gl_FragColor = vec4(textureColor.rgb * pow(2.0, exposure), textureColor.w);\n }\n);\n#endif\n\n@implementation GPUImageExposureFilter\n\n@synthesize exposure = _exposure;\n\n#pragma mark -\n#pragma mark Initialization and teardown\n\n- (id)init;\n{\n    if (!(self = [super initWithFragmentShaderFromString:kGPUImageExposureFragmentShaderString]))\n    {\n\t\treturn nil;\n    }\n    \n    exposureUniform = [filterProgram uniformIndex:@\"exposure\"];\n    self.exposure = 0.0;\n    \n    return self;\n}\n\n#pragma mark -\n#pragma mark Accessors\n\n- (void)setExposure:(CGFloat)newValue;\n{\n    _exposure = newValue;\n    \n    [self setFloat:_exposure forUniform:exposureUniform program:filterProgram];\n}\n\n@end\n\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageFASTCornerDetectionFilter.h",
    "content": "#import \"GPUImageFilterGroup.h\"\n\n@class GPUImageGrayscaleFilter;\n@class GPUImage3x3TextureSamplingFilter;\n@class GPUImageNonMaximumSuppressionFilter;\n\n/* \n An implementation of the Features from Accelerated Segment Test (FAST) feature detector as described in the following publications:\n \n E. Rosten and T. Drummond. Fusing points and lines for high performance tracking. IEEE International Conference on Computer Vision, 2005.\n E. Rosten and T. Drummond. Machine learning for high-speed corner detection.  European Conference on Computer Vision, 2006.\n \n For more about the FAST feature detector, see the resources here:\n http://www.edwardrosten.com/work/fast.html\n */\n\ntypedef enum { kGPUImageFAST12Contiguous, kGPUImageFAST12ContiguousNonMaximumSuppressed} GPUImageFASTDetectorType;\n\n@interface GPUImageFASTCornerDetectionFilter : GPUImageFilterGroup\n{\n    GPUImageGrayscaleFilter *luminanceReductionFilter;\n    GPUImage3x3TextureSamplingFilter *featureDetectionFilter;\n    GPUImageNonMaximumSuppressionFilter *nonMaximumSuppressionFilter;\n// Generate a lookup texture based on the bit patterns\n    \n// Step 1: convert to monochrome if necessary\n// Step 2: do a lookup at each pixel based on the Bresenham circle, encode comparison in two color components\n// Step 3: do non-maximum suppression of close corner points\n}\n\n- (id)initWithFASTDetectorVariant:(GPUImageFASTDetectorType)detectorType;\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageFASTCornerDetectionFilter.m",
    "content": "#import \"GPUImageFASTCornerDetectionFilter.h\"\n\n#import \"GPUImageGrayscaleFilter.h\"\n#import \"GPUImage3x3TextureSamplingFilter.h\"\n#import \"GPUImageNonMaximumSuppressionFilter.h\"\n\n// 14 total texture coordinates from vertex shader for non-dependent reads\n// 3 texture coordinates for dependent reads, then\n\nNSString *const kGPUImageFASTDetectorFragmentShaderString = SHADER_STRING\n(\n precision highp float;\n \n varying vec2 textureCoordinate;\n varying vec2 leftTextureCoordinate;\n varying vec2 rightTextureCoordinate;\n \n varying vec2 topTextureCoordinate;\n varying vec2 topLeftTextureCoordinate;\n varying vec2 topRightTextureCoordinate;\n \n varying vec2 bottomTextureCoordinate;\n varying vec2 bottomLeftTextureCoordinate;\n varying vec2 bottomRightTextureCoordinate;\n \n uniform sampler2D inputImageTexture;\n uniform sampler2D lookupTable;\n \n void main()\n {\n     lowp float centerIntensity = texture2D(inputImageTexture, textureCoordinate).r;\n     lowp float bottomLeftIntensity = texture2D(inputImageTexture, bottomLeftTextureCoordinate).r;\n     lowp float topRightIntensity = texture2D(inputImageTexture, topRightTextureCoordinate).r;\n     lowp float topLeftIntensity = texture2D(inputImageTexture, topLeftTextureCoordinate).r;\n     lowp float bottomRightIntensity = texture2D(inputImageTexture, bottomRightTextureCoordinate).r;\n     lowp float leftIntensity = texture2D(inputImageTexture, leftTextureCoordinate).r;\n     lowp float rightIntensity = texture2D(inputImageTexture, rightTextureCoordinate).r;\n     lowp float bottomIntensity = texture2D(inputImageTexture, bottomTextureCoordinate).r;\n     lowp float topIntensity = texture2D(inputImageTexture, topTextureCoordinate).r;\n     \n     lowp float byteTally = 1.0 / 255.0 * step(centerIntensity, topRightIntensity);\n     byteTally += 2.0 / 255.0 * step(centerIntensity, topIntensity);\n     byteTally += 4.0 / 255.0 * step(centerIntensity, topLeftIntensity);\n     byteTally += 8.0 / 255.0 * step(centerIntensity, leftIntensity);\n     byteTally += 16.0 / 255.0 * step(centerIntensity, bottomLeftIntensity);\n     byteTally += 32.0 / 255.0 * step(centerIntensity, bottomIntensity);\n     byteTally += 64.0 / 255.0 * step(centerIntensity, bottomRightIntensity);\n     byteTally += 128.0 / 255.0 * step(centerIntensity, rightIntensity);\n     \n     // TODO: Replace the above with a dot product and two vec4s\n     // TODO: Apply step to a matrix, rather than individually\n     \n     gl_FragColor = vec4(byteTally, byteTally, byteTally, 1.0);\n }\n );\n\n\n@implementation GPUImageFASTCornerDetectionFilter\n\n- (id)init;\n{\n    if (!(self = [self initWithFASTDetectorVariant:kGPUImageFAST12ContiguousNonMaximumSuppressed]))\n    {\n        return nil;\n    }\n    \n    return self;\n}\n\n- (id)initWithFASTDetectorVariant:(GPUImageFASTDetectorType)detectorType;\n{\n    if (!(self = [super init]))\n    {\n\t\treturn nil;\n    }\n    \n//    [derivativeFilter addTarget:blurFilter];\n//    [blurFilter addTarget:harrisCornerDetectionFilter];\n//    [harrisCornerDetectionFilter addTarget:nonMaximumSuppressionFilter];\n    //    [simpleThresholdFilter addTarget:colorPackingFilter];\n    \n//    self.initialFilters = [NSArray arrayWithObjects:derivativeFilter, nil];\n    //    self.terminalFilter = colorPackingFilter;\n//    self.terminalFilter = nonMaximumSuppressionFilter;\n\n    return self;\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageFalseColorFilter.h",
    "content": "#import \"GPUImageFilter.h\"\n\n@interface GPUImageFalseColorFilter : GPUImageFilter\n{\n    GLint firstColorUniform, secondColorUniform;\n}\n\n// The first and second colors specify what colors replace the dark and light areas of the image, respectively. The defaults are (0.0, 0.0, 0.5) amd (1.0, 0.0, 0.0).\n@property(readwrite, nonatomic) GPUVector4 firstColor;\n@property(readwrite, nonatomic) GPUVector4 secondColor;\n\n- (void)setFirstColorRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent;\n- (void)setSecondColorRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent;\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageFalseColorFilter.m",
    "content": "#import \"GPUImageFalseColorFilter.h\"\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUFalseColorFragmentShaderString = SHADER_STRING\n(\n precision lowp float;\n \n varying highp vec2 textureCoordinate;\n \n uniform sampler2D inputImageTexture;\n uniform float intensity;\n uniform vec3 firstColor;\n uniform vec3 secondColor;\n \n const mediump vec3 luminanceWeighting = vec3(0.2125, 0.7154, 0.0721);\n \n void main()\n {\n     lowp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);\n     float luminance = dot(textureColor.rgb, luminanceWeighting);\n     \n     gl_FragColor = vec4( mix(firstColor.rgb, secondColor.rgb, luminance), textureColor.a);\n }\n);\n#else\nNSString *const kGPUFalseColorFragmentShaderString = SHADER_STRING\n(\n varying vec2 textureCoordinate;\n \n uniform sampler2D inputImageTexture;\n uniform float intensity;\n uniform vec3 firstColor;\n uniform vec3 secondColor;\n \n const vec3 luminanceWeighting = vec3(0.2125, 0.7154, 0.0721);\n \n void main()\n {\n     vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);\n     float luminance = dot(textureColor.rgb, luminanceWeighting);\n     \n     gl_FragColor = vec4( mix(firstColor.rgb, secondColor.rgb, luminance), textureColor.a);\n }\n);\n#endif\n\n\n@implementation GPUImageFalseColorFilter\n\n@synthesize secondColor = _secondColor;\n@synthesize firstColor = _firstColor;\n\n- (id)init;\n{\n    if (!(self = [super initWithFragmentShaderFromString:kGPUFalseColorFragmentShaderString]))\n    {\n\t\treturn nil;\n    }\n    \n    firstColorUniform = [filterProgram uniformIndex:@\"firstColor\"];\n    secondColorUniform = [filterProgram uniformIndex:@\"secondColor\"];\n    \n\tself.firstColor = (GPUVector4){0.0f, 0.0f, 0.5f, 1.0f};\n\tself.secondColor = (GPUVector4){1.0f, 0.0f, 0.0f, 1.0f};\n    \n    return self;\n}\n\n\n#pragma mark -\n#pragma mark Accessors\n\n- (void)setFirstColor:(GPUVector4)newValue;\n{\n\t_firstColor = newValue;\n\t\n\t[self setFirstColorRed:_firstColor.one green:_firstColor.two blue:_firstColor.three];\n}\n\n- (void)setSecondColor:(GPUVector4)newValue;\n{\n\t_secondColor = newValue;\n\t\n\t[self setSecondColorRed:_secondColor.one green:_secondColor.two blue:_secondColor.three];\n}\n\n- (void)setFirstColorRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent;\n{\n    GPUVector3 firstColor = {redComponent, greenComponent, blueComponent};\n    \n    [self setVec3:firstColor forUniform:firstColorUniform program:filterProgram];\n}\n\n- (void)setSecondColorRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent;\n{\n    GPUVector3 secondColor = {redComponent, greenComponent, blueComponent};\n    \n    [self setVec3:secondColor forUniform:secondColorUniform program:filterProgram];\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageGammaFilter.h",
    "content": "#import \"GPUImageFilter.h\"\n\n@interface GPUImageGammaFilter : GPUImageFilter\n{\n    GLint gammaUniform;\n}\n\n// Gamma ranges from 0.0 to 3.0, with 1.0 as the normal level\n@property(readwrite, nonatomic) CGFloat gamma; \n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageGammaFilter.m",
    "content": "#import \"GPUImageGammaFilter.h\"\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageGammaFragmentShaderString = SHADER_STRING\n(\n varying highp vec2 textureCoordinate;\n \n uniform sampler2D inputImageTexture;\n uniform lowp float gamma;\n \n void main()\n {\n     lowp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);\n     \n     gl_FragColor = vec4(pow(textureColor.rgb, vec3(gamma)), textureColor.w);\n }\n);\n#else\nNSString *const kGPUImageGammaFragmentShaderString = SHADER_STRING\n(\n varying vec2 textureCoordinate;\n \n uniform sampler2D inputImageTexture;\n uniform float gamma;\n \n void main()\n {\n     vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);\n     \n     gl_FragColor = vec4(pow(textureColor.rgb, vec3(gamma)), textureColor.w);\n }\n);\n#endif\n\n@implementation GPUImageGammaFilter\n\n@synthesize gamma = _gamma;\n\n#pragma mark -\n#pragma mark Initialization and teardown\n\n- (id)init;\n{\n    if (!(self = [super initWithFragmentShaderFromString:kGPUImageGammaFragmentShaderString]))\n    {\n\t\treturn nil;\n    }\n    \n    gammaUniform = [filterProgram uniformIndex:@\"gamma\"];\n    self.gamma = 1.0;\n    \n    return self;\n}\n\n#pragma mark -\n#pragma mark Accessors\n\n- (void)setGamma:(CGFloat)newValue;\n{\n    _gamma = newValue;\n    \n    [self setFloat:_gamma forUniform:gammaUniform program:filterProgram];\n}\n\n@end\n\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageGaussianBlurFilter.h",
    "content": "#import \"GPUImageTwoPassTextureSamplingFilter.h\"\n\n/** A Gaussian blur filter\n    Interpolated optimization based on Daniel Rákos' work at http://rastergrid.com/blog/2010/09/efficient-gaussian-blur-with-linear-sampling/\n */\n\n@interface GPUImageGaussianBlurFilter : GPUImageTwoPassTextureSamplingFilter \n{\n    BOOL shouldResizeBlurRadiusWithImageSize;\n    CGFloat _blurRadiusInPixels;\n}\n\n/** A multiplier for the spacing between texels, ranging from 0.0 on up, with a default of 1.0. Adjusting this may slightly increase the blur strength, but will introduce artifacts in the result.\n */\n@property (readwrite, nonatomic) CGFloat texelSpacingMultiplier;\n\n/** A radius in pixels to use for the blur, with a default of 2.0. This adjusts the sigma variable in the Gaussian distribution function.\n */\n@property (readwrite, nonatomic) CGFloat blurRadiusInPixels;\n\n/** Setting these properties will allow the blur radius to scale with the size of the image. These properties are mutually exclusive; setting either will set the other to 0.\n */\n@property (readwrite, nonatomic) CGFloat blurRadiusAsFractionOfImageWidth;\n@property (readwrite, nonatomic) CGFloat blurRadiusAsFractionOfImageHeight;\n\n/// The number of times to sequentially blur the incoming image. The more passes, the slower the filter.\n@property(readwrite, nonatomic) NSUInteger blurPasses;\n\n+ (NSString *)vertexShaderForStandardBlurOfRadius:(NSUInteger)blurRadius sigma:(CGFloat)sigma;\n+ (NSString *)fragmentShaderForStandardBlurOfRadius:(NSUInteger)blurRadius sigma:(CGFloat)sigma;\n+ (NSString *)vertexShaderForOptimizedBlurOfRadius:(NSUInteger)blurRadius sigma:(CGFloat)sigma;\n+ (NSString *)fragmentShaderForOptimizedBlurOfRadius:(NSUInteger)blurRadius sigma:(CGFloat)sigma;\n\n- (void)switchToVertexShader:(NSString *)newVertexShader fragmentShader:(NSString *)newFragmentShader;\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageGaussianBlurFilter.m",
    "content": "#import \"GPUImageGaussianBlurFilter.h\"\n\n@implementation GPUImageGaussianBlurFilter\n\n@synthesize texelSpacingMultiplier = _texelSpacingMultiplier;\n@synthesize blurRadiusInPixels = _blurRadiusInPixels;\n@synthesize blurRadiusAsFractionOfImageWidth  = _blurRadiusAsFractionOfImageWidth;\n@synthesize blurRadiusAsFractionOfImageHeight = _blurRadiusAsFractionOfImageHeight;\n@synthesize blurPasses = _blurPasses;\n\n#pragma mark -\n#pragma mark Initialization and teardown\n\n- (id)initWithFirstStageVertexShaderFromString:(NSString *)firstStageVertexShaderString firstStageFragmentShaderFromString:(NSString *)firstStageFragmentShaderString secondStageVertexShaderFromString:(NSString *)secondStageVertexShaderString secondStageFragmentShaderFromString:(NSString *)secondStageFragmentShaderString\n{\n    if (!(self = [super initWithFirstStageVertexShaderFromString:firstStageVertexShaderString firstStageFragmentShaderFromString:firstStageFragmentShaderString secondStageVertexShaderFromString:secondStageVertexShaderString secondStageFragmentShaderFromString:secondStageFragmentShaderString]))\n    {\n        return nil;\n    }\n    \n    self.texelSpacingMultiplier = 1.0;\n    _blurRadiusInPixels = 2.0;\n    shouldResizeBlurRadiusWithImageSize = NO;\n    \n    return self;\n}\n\n- (id)init;\n{\n    NSString *currentGaussianBlurVertexShader = [[self class] vertexShaderForOptimizedBlurOfRadius:4 sigma:2.0];\n    NSString *currentGaussianBlurFragmentShader = [[self class] fragmentShaderForOptimizedBlurOfRadius:4 sigma:2.0];\n    \n    return [self initWithFirstStageVertexShaderFromString:currentGaussianBlurVertexShader firstStageFragmentShaderFromString:currentGaussianBlurFragmentShader secondStageVertexShaderFromString:currentGaussianBlurVertexShader secondStageFragmentShaderFromString:currentGaussianBlurFragmentShader];\n}\n\n#pragma mark -\n#pragma mark Auto-generation of optimized Gaussian shaders\n\n// \"Implementation limit of 32 varying components exceeded\" - Max number of varyings for these GPUs\n\n+ (NSString *)vertexShaderForStandardBlurOfRadius:(NSUInteger)blurRadius sigma:(CGFloat)sigma;\n{\n    if (blurRadius < 1)\n    {\n        return kGPUImageVertexShaderString;\n    }\n    \n//    NSLog(@\"Max varyings: %d\", [GPUImageContext maximumVaryingVectorsForThisDevice]);\n    NSMutableString *shaderString = [[NSMutableString alloc] init];\n\n    // Header\n    [shaderString appendFormat:@\"\\\n      attribute vec4 position;\\n\\\n      attribute vec4 inputTextureCoordinate;\\n\\\n      \\n\\\n      uniform float texelWidthOffset;\\n\\\n      uniform float texelHeightOffset;\\n\\\n      \\n\\\n      varying vec2 blurCoordinates[%lu];\\n\\\n      \\n\\\n      void main()\\n\\\n      {\\n\\\n          gl_Position = position;\\n\\\n          \\n\\\n          vec2 singleStepOffset = vec2(texelWidthOffset, texelHeightOffset);\\n\", (unsigned long)(blurRadius * 2 + 1) ];\n\n    // Inner offset loop\n    for (NSUInteger currentBlurCoordinateIndex = 0; currentBlurCoordinateIndex < (blurRadius * 2 + 1); currentBlurCoordinateIndex++)\n    {\n        NSInteger offsetFromCenter = currentBlurCoordinateIndex - blurRadius;\n        if (offsetFromCenter < 0)\n        {\n            [shaderString appendFormat:@\"blurCoordinates[%ld] = inputTextureCoordinate.xy - singleStepOffset * %f;\\n\", (unsigned long)currentBlurCoordinateIndex, (GLfloat)(-offsetFromCenter)];\n        }\n        else if (offsetFromCenter > 0)\n        {\n            [shaderString appendFormat:@\"blurCoordinates[%ld] = inputTextureCoordinate.xy + singleStepOffset * %f;\\n\", (unsigned long)currentBlurCoordinateIndex, (GLfloat)(offsetFromCenter)];\n        }\n        else\n        {\n            [shaderString appendFormat:@\"blurCoordinates[%ld] = inputTextureCoordinate.xy;\\n\", (unsigned long)currentBlurCoordinateIndex];\n        }\n    }\n    \n    // Footer\n    [shaderString appendString:@\"}\\n\"];\n    \n    return shaderString;\n}\n\n+ (NSString *)fragmentShaderForStandardBlurOfRadius:(NSUInteger)blurRadius sigma:(CGFloat)sigma;\n{\n    if (blurRadius < 1)\n    {\n        return kGPUImagePassthroughFragmentShaderString;\n    }\n\n    // First, generate the normal Gaussian weights for a given sigma\n    GLfloat *standardGaussianWeights = calloc(blurRadius + 1, sizeof(GLfloat));\n    GLfloat sumOfWeights = 0.0;\n    for (NSUInteger currentGaussianWeightIndex = 0; currentGaussianWeightIndex < blurRadius + 1; currentGaussianWeightIndex++)\n    {\n        standardGaussianWeights[currentGaussianWeightIndex] = (1.0 / sqrt(2.0 * M_PI * pow(sigma, 2.0))) * exp(-pow(currentGaussianWeightIndex, 2.0) / (2.0 * pow(sigma, 2.0)));\n\n        if (currentGaussianWeightIndex == 0)\n        {\n            sumOfWeights += standardGaussianWeights[currentGaussianWeightIndex];\n        }\n        else\n        {\n            sumOfWeights += 2.0 * standardGaussianWeights[currentGaussianWeightIndex];\n        }\n    }\n\n    // Next, normalize these weights to prevent the clipping of the Gaussian curve at the end of the discrete samples from reducing luminance\n    for (NSUInteger currentGaussianWeightIndex = 0; currentGaussianWeightIndex < blurRadius + 1; currentGaussianWeightIndex++)\n    {\n        standardGaussianWeights[currentGaussianWeightIndex] = standardGaussianWeights[currentGaussianWeightIndex] / sumOfWeights;\n    }\n\n    // Finally, generate the shader from these weights\n    NSMutableString *shaderString = [[NSMutableString alloc] init];\n    \n    // Header\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\n    [shaderString appendFormat:@\"\\\n     uniform sampler2D inputImageTexture;\\n\\\n     \\n\\\n     varying highp vec2 blurCoordinates[%lu];\\n\\\n     \\n\\\n     void main()\\n\\\n     {\\n\\\n        lowp vec4 sum = vec4(0.0);\\n\", (unsigned long)(blurRadius * 2 + 1) ];\n#else\n    [shaderString appendFormat:@\"\\\n     uniform sampler2D inputImageTexture;\\n\\\n     \\n\\\n     varying vec2 blurCoordinates[%lu];\\n\\\n     \\n\\\n     void main()\\n\\\n     {\\n\\\n        vec4 sum = vec4(0.0);\\n\", (blurRadius * 2 + 1) ];\n#endif\n\n    // Inner texture loop\n    for (NSUInteger currentBlurCoordinateIndex = 0; currentBlurCoordinateIndex < (blurRadius * 2 + 1); currentBlurCoordinateIndex++)\n    {\n        NSInteger offsetFromCenter = currentBlurCoordinateIndex - blurRadius;\n        if (offsetFromCenter < 0)\n        {\n            [shaderString appendFormat:@\"sum += texture2D(inputImageTexture, blurCoordinates[%lu]) * %f;\\n\", (unsigned long)currentBlurCoordinateIndex, standardGaussianWeights[-offsetFromCenter]];\n        }\n        else\n        {\n            [shaderString appendFormat:@\"sum += texture2D(inputImageTexture, blurCoordinates[%lu]) * %f;\\n\", (unsigned long)currentBlurCoordinateIndex, standardGaussianWeights[offsetFromCenter]];\n        }\n    }\n\n    // Footer\n    [shaderString appendString:@\"\\\n     gl_FragColor = sum;\\n\\\n     }\\n\"];\n    \n    free(standardGaussianWeights);\n    return shaderString;\n}\n\n+ (NSString *)vertexShaderForOptimizedBlurOfRadius:(NSUInteger)blurRadius sigma:(CGFloat)sigma;\n{\n    if (blurRadius < 1)\n    {\n        return kGPUImageVertexShaderString;\n    }\n\n    // First, generate the normal Gaussian weights for a given sigma\n    GLfloat *standardGaussianWeights = calloc(blurRadius + 1, sizeof(GLfloat));\n    GLfloat sumOfWeights = 0.0;\n    for (NSUInteger currentGaussianWeightIndex = 0; currentGaussianWeightIndex < blurRadius + 1; currentGaussianWeightIndex++)\n    {\n        standardGaussianWeights[currentGaussianWeightIndex] = (1.0 / sqrt(2.0 * M_PI * pow(sigma, 2.0))) * exp(-pow(currentGaussianWeightIndex, 2.0) / (2.0 * pow(sigma, 2.0)));\n        \n        if (currentGaussianWeightIndex == 0)\n        {\n            sumOfWeights += standardGaussianWeights[currentGaussianWeightIndex];\n        }\n        else\n        {\n            sumOfWeights += 2.0 * standardGaussianWeights[currentGaussianWeightIndex];\n        }\n    }\n    \n    // Next, normalize these weights to prevent the clipping of the Gaussian curve at the end of the discrete samples from reducing luminance\n    for (NSUInteger currentGaussianWeightIndex = 0; currentGaussianWeightIndex < blurRadius + 1; currentGaussianWeightIndex++)\n    {\n        standardGaussianWeights[currentGaussianWeightIndex] = standardGaussianWeights[currentGaussianWeightIndex] / sumOfWeights;\n    }\n\n    // From these weights we calculate the offsets to read interpolated values from\n    NSUInteger numberOfOptimizedOffsets = MIN(blurRadius / 2 + (blurRadius % 2), 7);\n    GLfloat *optimizedGaussianOffsets = calloc(numberOfOptimizedOffsets, sizeof(GLfloat));\n    \n    for (NSUInteger currentOptimizedOffset = 0; currentOptimizedOffset < numberOfOptimizedOffsets; currentOptimizedOffset++)\n    {\n        GLfloat firstWeight = standardGaussianWeights[currentOptimizedOffset*2 + 1];\n        GLfloat secondWeight = standardGaussianWeights[currentOptimizedOffset*2 + 2];\n        \n        GLfloat optimizedWeight = firstWeight + secondWeight;\n        \n        optimizedGaussianOffsets[currentOptimizedOffset] = (firstWeight * (currentOptimizedOffset*2 + 1) + secondWeight * (currentOptimizedOffset*2 + 2)) / optimizedWeight;\n    }\n    \n    NSMutableString *shaderString = [[NSMutableString alloc] init];\n    // Header\n    [shaderString appendFormat:@\"\\\n     attribute vec4 position;\\n\\\n     attribute vec4 inputTextureCoordinate;\\n\\\n     \\n\\\n     uniform float texelWidthOffset;\\n\\\n     uniform float texelHeightOffset;\\n\\\n     \\n\\\n     varying vec2 blurCoordinates[%lu];\\n\\\n     \\n\\\n     void main()\\n\\\n     {\\n\\\n        gl_Position = position;\\n\\\n        \\n\\\n        vec2 singleStepOffset = vec2(texelWidthOffset, texelHeightOffset);\\n\", (unsigned long)(1 + (numberOfOptimizedOffsets * 2))];\n\n    // Inner offset loop\n    [shaderString appendString:@\"blurCoordinates[0] = inputTextureCoordinate.xy;\\n\"];\n    for (NSUInteger currentOptimizedOffset = 0; currentOptimizedOffset < numberOfOptimizedOffsets; currentOptimizedOffset++)\n    {\n        [shaderString appendFormat:@\"\\\n         blurCoordinates[%lu] = inputTextureCoordinate.xy + singleStepOffset * %f;\\n\\\n         blurCoordinates[%lu] = inputTextureCoordinate.xy - singleStepOffset * %f;\\n\", (unsigned long)((currentOptimizedOffset * 2) + 1), optimizedGaussianOffsets[currentOptimizedOffset], (unsigned long)((currentOptimizedOffset * 2) + 2), optimizedGaussianOffsets[currentOptimizedOffset]];\n    }\n    \n    // Footer\n    [shaderString appendString:@\"}\\n\"];\n\n    free(optimizedGaussianOffsets);\n    free(standardGaussianWeights);\n    return shaderString;\n}\n\n+ (NSString *)fragmentShaderForOptimizedBlurOfRadius:(NSUInteger)blurRadius sigma:(CGFloat)sigma;\n{\n    if (blurRadius < 1)\n    {\n        return kGPUImagePassthroughFragmentShaderString;\n    }\n    \n    // First, generate the normal Gaussian weights for a given sigma\n    GLfloat *standardGaussianWeights = calloc(blurRadius + 1, sizeof(GLfloat));\n    GLfloat sumOfWeights = 0.0;\n    for (NSUInteger currentGaussianWeightIndex = 0; currentGaussianWeightIndex < blurRadius + 1; currentGaussianWeightIndex++)\n    {\n        standardGaussianWeights[currentGaussianWeightIndex] = (1.0 / sqrt(2.0 * M_PI * pow(sigma, 2.0))) * exp(-pow(currentGaussianWeightIndex, 2.0) / (2.0 * pow(sigma, 2.0)));\n        \n        if (currentGaussianWeightIndex == 0)\n        {\n            sumOfWeights += standardGaussianWeights[currentGaussianWeightIndex];\n        }\n        else\n        {\n            sumOfWeights += 2.0 * standardGaussianWeights[currentGaussianWeightIndex];\n        }\n    }\n    \n    // Next, normalize these weights to prevent the clipping of the Gaussian curve at the end of the discrete samples from reducing luminance\n    for (NSUInteger currentGaussianWeightIndex = 0; currentGaussianWeightIndex < blurRadius + 1; currentGaussianWeightIndex++)\n    {\n        standardGaussianWeights[currentGaussianWeightIndex] = standardGaussianWeights[currentGaussianWeightIndex] / sumOfWeights;\n    }\n    \n    // From these weights we calculate the offsets to read interpolated values from\n    NSUInteger numberOfOptimizedOffsets = MIN(blurRadius / 2 + (blurRadius % 2), 7);\n    NSUInteger trueNumberOfOptimizedOffsets = blurRadius / 2 + (blurRadius % 2);\n\n    NSMutableString *shaderString = [[NSMutableString alloc] init];\n    \n    // Header\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\n    [shaderString appendFormat:@\"\\\n     uniform sampler2D inputImageTexture;\\n\\\n     uniform highp float texelWidthOffset;\\n\\\n     uniform highp float texelHeightOffset;\\n\\\n     \\n\\\n     varying highp vec2 blurCoordinates[%lu];\\n\\\n     \\n\\\n     void main()\\n\\\n     {\\n\\\n        lowp vec4 sum = vec4(0.0);\\n\", (unsigned long)(1 + (numberOfOptimizedOffsets * 2)) ];\n#else\n    [shaderString appendFormat:@\"\\\n     uniform sampler2D inputImageTexture;\\n\\\n     uniform float texelWidthOffset;\\n\\\n     uniform float texelHeightOffset;\\n\\\n     \\n\\\n     varying vec2 blurCoordinates[%lu];\\n\\\n     \\n\\\n     void main()\\n\\\n     {\\n\\\n        vec4 sum = vec4(0.0);\\n\", 1 + (numberOfOptimizedOffsets * 2) ];\n#endif\n\n    // Inner texture loop\n    [shaderString appendFormat:@\"sum += texture2D(inputImageTexture, blurCoordinates[0]) * %f;\\n\", standardGaussianWeights[0]];\n    \n    for (NSUInteger currentBlurCoordinateIndex = 0; currentBlurCoordinateIndex < numberOfOptimizedOffsets; currentBlurCoordinateIndex++)\n    {\n        GLfloat firstWeight = standardGaussianWeights[currentBlurCoordinateIndex * 2 + 1];\n        GLfloat secondWeight = standardGaussianWeights[currentBlurCoordinateIndex * 2 + 2];\n        GLfloat optimizedWeight = firstWeight + secondWeight;\n\n        [shaderString appendFormat:@\"sum += texture2D(inputImageTexture, blurCoordinates[%lu]) * %f;\\n\", (unsigned long)((currentBlurCoordinateIndex * 2) + 1), optimizedWeight];\n        [shaderString appendFormat:@\"sum += texture2D(inputImageTexture, blurCoordinates[%lu]) * %f;\\n\", (unsigned long)((currentBlurCoordinateIndex * 2) + 2), optimizedWeight];\n    }\n    \n    // If the number of required samples exceeds the amount we can pass in via varyings, we have to do dependent texture reads in the fragment shader\n    if (trueNumberOfOptimizedOffsets > numberOfOptimizedOffsets)\n    {\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\n        [shaderString appendString:@\"highp vec2 singleStepOffset = vec2(texelWidthOffset, texelHeightOffset);\\n\"];\n#else\n        [shaderString appendString:@\"vec2 singleStepOffset = vec2(texelWidthOffset, texelHeightOffset);\\n\"];\n#endif\n\n        for (NSUInteger currentOverlowTextureRead = numberOfOptimizedOffsets; currentOverlowTextureRead < trueNumberOfOptimizedOffsets; currentOverlowTextureRead++)\n        {\n            GLfloat firstWeight = standardGaussianWeights[currentOverlowTextureRead * 2 + 1];\n            GLfloat secondWeight = standardGaussianWeights[currentOverlowTextureRead * 2 + 2];\n            \n            GLfloat optimizedWeight = firstWeight + secondWeight;\n            GLfloat optimizedOffset = (firstWeight * (currentOverlowTextureRead * 2 + 1) + secondWeight * (currentOverlowTextureRead * 2 + 2)) / optimizedWeight;\n            \n            [shaderString appendFormat:@\"sum += texture2D(inputImageTexture, blurCoordinates[0] + singleStepOffset * %f) * %f;\\n\", optimizedOffset, optimizedWeight];\n            [shaderString appendFormat:@\"sum += texture2D(inputImageTexture, blurCoordinates[0] - singleStepOffset * %f) * %f;\\n\", optimizedOffset, optimizedWeight];\n        }\n    }\n    \n    // Footer\n    [shaderString appendString:@\"\\\n        gl_FragColor = sum;\\n\\\n     }\\n\"];\n\n    free(standardGaussianWeights);\n    return shaderString;\n}\n\n- (void)setupFilterForSize:(CGSize)filterFrameSize;\n{\n    [super setupFilterForSize:filterFrameSize];\n    \n    if (shouldResizeBlurRadiusWithImageSize)\n    {\n        if (self.blurRadiusAsFractionOfImageWidth > 0)\n        {\n            self.blurRadiusInPixels = filterFrameSize.width * self.blurRadiusAsFractionOfImageWidth;\n        }\n        else\n        {\n            self.blurRadiusInPixels = filterFrameSize.height * self.blurRadiusAsFractionOfImageHeight;\n        }\n    }\n}\n\n#pragma mark -\n#pragma mark Rendering\n\n- (void)renderToTextureWithVertices:(const GLfloat *)vertices textureCoordinates:(const GLfloat *)textureCoordinates;\n{\n    [super renderToTextureWithVertices:vertices textureCoordinates:textureCoordinates];\n    \n    for (NSUInteger currentAdditionalBlurPass = 1; currentAdditionalBlurPass < _blurPasses; currentAdditionalBlurPass++)\n    {\n        [super renderToTextureWithVertices:vertices textureCoordinates:[[self class] textureCoordinatesForRotation:kGPUImageNoRotation]];\n    }\n}\n\n- (void)switchToVertexShader:(NSString *)newVertexShader fragmentShader:(NSString *)newFragmentShader;\n{\n    runSynchronouslyOnVideoProcessingQueue(^{\n        [GPUImageContext useImageProcessingContext];\n\n        filterProgram = [[GPUImageContext sharedImageProcessingContext] programForVertexShaderString:newVertexShader fragmentShaderString:newFragmentShader];\n        \n        if (!filterProgram.initialized)\n        {\n            [self initializeAttributes];\n            \n            if (![filterProgram link])\n            {\n                NSString *progLog = [filterProgram programLog];\n                NSLog(@\"Program link log: %@\", progLog);\n                NSString *fragLog = [filterProgram fragmentShaderLog];\n                NSLog(@\"Fragment shader compile log: %@\", fragLog);\n                NSString *vertLog = [filterProgram vertexShaderLog];\n                NSLog(@\"Vertex shader compile log: %@\", vertLog);\n                filterProgram = nil;\n                NSAssert(NO, @\"Filter shader link failed\");\n            }\n        }\n        \n        filterPositionAttribute = [filterProgram attributeIndex:@\"position\"];\n        filterTextureCoordinateAttribute = [filterProgram attributeIndex:@\"inputTextureCoordinate\"];\n        filterInputTextureUniform = [filterProgram uniformIndex:@\"inputImageTexture\"]; // This does assume a name of \"inputImageTexture\" for the fragment shader\n        verticalPassTexelWidthOffsetUniform = [filterProgram uniformIndex:@\"texelWidthOffset\"];\n        verticalPassTexelHeightOffsetUniform = [filterProgram uniformIndex:@\"texelHeightOffset\"];\n        [GPUImageContext setActiveShaderProgram:filterProgram];\n\n        glEnableVertexAttribArray(filterPositionAttribute);\n        glEnableVertexAttribArray(filterTextureCoordinateAttribute);\n\n        secondFilterProgram = [[GPUImageContext sharedImageProcessingContext] programForVertexShaderString:newVertexShader fragmentShaderString:newFragmentShader];\n        \n        if (!secondFilterProgram.initialized)\n        {\n            [self initializeSecondaryAttributes];\n            \n            if (![secondFilterProgram link])\n            {\n                NSString *progLog = [secondFilterProgram programLog];\n                NSLog(@\"Program link log: %@\", progLog);\n                NSString *fragLog = [secondFilterProgram fragmentShaderLog];\n                NSLog(@\"Fragment shader compile log: %@\", fragLog);\n                NSString *vertLog = [secondFilterProgram vertexShaderLog];\n                NSLog(@\"Vertex shader compile log: %@\", vertLog);\n                secondFilterProgram = nil;\n                NSAssert(NO, @\"Filter shader link failed\");\n            }\n        }\n        \n        secondFilterPositionAttribute = [secondFilterProgram attributeIndex:@\"position\"];\n        secondFilterTextureCoordinateAttribute = [secondFilterProgram attributeIndex:@\"inputTextureCoordinate\"];\n        secondFilterInputTextureUniform = [secondFilterProgram uniformIndex:@\"inputImageTexture\"]; // This does assume a name of \"inputImageTexture\" for the fragment shader\n        secondFilterInputTextureUniform2 = [secondFilterProgram uniformIndex:@\"inputImageTexture2\"]; // This does assume a name of \"inputImageTexture2\" for second input texture in the fragment shader\n        horizontalPassTexelWidthOffsetUniform = [secondFilterProgram uniformIndex:@\"texelWidthOffset\"];\n        horizontalPassTexelHeightOffsetUniform = [secondFilterProgram uniformIndex:@\"texelHeightOffset\"];\n        [GPUImageContext setActiveShaderProgram:secondFilterProgram];\n\n        glEnableVertexAttribArray(secondFilterPositionAttribute);\n        glEnableVertexAttribArray(secondFilterTextureCoordinateAttribute);\n        \n        [self setupFilterForSize:[self sizeOfFBO]];\n        glFinish();\n    });\n\n}\n\n#pragma mark -\n#pragma mark Accessors\n\n- (void)setTexelSpacingMultiplier:(CGFloat)newValue;\n{\n    _texelSpacingMultiplier = newValue;\n    \n    _verticalTexelSpacing = _texelSpacingMultiplier;\n    _horizontalTexelSpacing = _texelSpacingMultiplier;\n    \n    [self setupFilterForSize:[self sizeOfFBO]];\n}\n\n// inputRadius for Core Image's CIGaussianBlur is really sigma in the Gaussian equation, so I'm using that for my blur radius, to be consistent\n- (void)setBlurRadiusInPixels:(CGFloat)newValue;\n{\n    // 7.0 is the limit for blur size for hardcoded varying offsets\n\n    if (round(newValue) != _blurRadiusInPixels)\n    {\n        _blurRadiusInPixels = round(newValue); // For now, only do integral sigmas\n        \n        NSUInteger calculatedSampleRadius = 0;\n        if (_blurRadiusInPixels >= 1) // Avoid a divide-by-zero error here\n        {\n            // Calculate the number of pixels to sample from by setting a bottom limit for the contribution of the outermost pixel\n            CGFloat minimumWeightToFindEdgeOfSamplingArea = 1.0/256.0;\n            calculatedSampleRadius = floor(sqrt(-2.0 * pow(_blurRadiusInPixels, 2.0) * log(minimumWeightToFindEdgeOfSamplingArea * sqrt(2.0 * M_PI * pow(_blurRadiusInPixels, 2.0))) ));\n            calculatedSampleRadius += calculatedSampleRadius % 2; // There's nothing to gain from handling odd radius sizes, due to the optimizations I use\n        }\n        \n//        NSLog(@\"Blur radius: %f, calculated sample radius: %d\", _blurRadiusInPixels, calculatedSampleRadius);\n//        \n        NSString *newGaussianBlurVertexShader = [[self class] vertexShaderForOptimizedBlurOfRadius:calculatedSampleRadius sigma:_blurRadiusInPixels];\n        NSString *newGaussianBlurFragmentShader = [[self class] fragmentShaderForOptimizedBlurOfRadius:calculatedSampleRadius sigma:_blurRadiusInPixels];\n\n//        NSLog(@\"Optimized vertex shader: \\n%@\", newGaussianBlurVertexShader);\n//        NSLog(@\"Optimized fragment shader: \\n%@\", newGaussianBlurFragmentShader);\n//        \n        [self switchToVertexShader:newGaussianBlurVertexShader fragmentShader:newGaussianBlurFragmentShader];\n    }\n    shouldResizeBlurRadiusWithImageSize = NO;\n}\n\n- (void)setBlurRadiusAsFractionOfImageWidth:(CGFloat)blurRadiusAsFractionOfImageWidth\n{\n    if (blurRadiusAsFractionOfImageWidth < 0)  return;\n\n    shouldResizeBlurRadiusWithImageSize = _blurRadiusAsFractionOfImageWidth != blurRadiusAsFractionOfImageWidth && blurRadiusAsFractionOfImageWidth > 0;\n    _blurRadiusAsFractionOfImageWidth = blurRadiusAsFractionOfImageWidth;\n    _blurRadiusAsFractionOfImageHeight = 0;\n}\n\n- (void)setBlurRadiusAsFractionOfImageHeight:(CGFloat)blurRadiusAsFractionOfImageHeight\n{\n    if (blurRadiusAsFractionOfImageHeight < 0)  return;\n\n    shouldResizeBlurRadiusWithImageSize = _blurRadiusAsFractionOfImageHeight != blurRadiusAsFractionOfImageHeight && blurRadiusAsFractionOfImageHeight > 0;\n    _blurRadiusAsFractionOfImageHeight = blurRadiusAsFractionOfImageHeight;\n    _blurRadiusAsFractionOfImageWidth = 0;\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageGaussianBlurPositionFilter.h",
    "content": "#import \"GPUImageTwoPassTextureSamplingFilter.h\"\n\n/** A more generalized 9x9 Gaussian blur filter\n */\n@interface GPUImageGaussianBlurPositionFilter : GPUImageTwoPassTextureSamplingFilter \n{\n    GLint blurCenterUniform, blurRadiusUniform, aspectRatioUniform;\n}\n\n/** A multiplier for the blur size, ranging from 0.0 on up, with a default of 1.0\n */\n@property (readwrite, nonatomic) CGFloat blurSize;\n\n/** Center for the blur, defaults to 0.5, 0.5\n */\n@property (readwrite, nonatomic) CGPoint blurCenter;\n\n/** Radius for the blur, defaults to 1.0\n */\n@property (readwrite, nonatomic) CGFloat blurRadius;\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageGaussianBlurPositionFilter.m",
    "content": "#import \"GPUImageGaussianBlurPositionFilter.h\"\n\nNSString *const kGPUImageGaussianBlurPositionVertexShaderString = SHADER_STRING\n(\n attribute vec4 position;\n attribute vec4 inputTextureCoordinate;\n \n const int GAUSSIAN_SAMPLES = 9;\n \n uniform float texelWidthOffset;\n uniform float texelHeightOffset;\n varying vec2 textureCoordinate;\n varying vec2 blurCoordinates[GAUSSIAN_SAMPLES];\n \n void main()\n {\n \tgl_Position = position;\n \ttextureCoordinate = inputTextureCoordinate.xy;\n \t\n \t// Calculate the positions for the blur\n \tint multiplier = 0;\n \tvec2 blurStep;\n    vec2 singleStepOffset = vec2(texelWidthOffset, texelHeightOffset);\n     \n \tfor (int i = 0; i < GAUSSIAN_SAMPLES; i++) {\n \t\tmultiplier = (i - ((GAUSSIAN_SAMPLES - 1) / 2));\n        // Blur in x (horizontal)\n        blurStep = float(multiplier) * singleStepOffset;\n \t\tblurCoordinates[i] = inputTextureCoordinate.xy + blurStep;\n \t}\n }\n);\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageGaussianBlurPositionFragmentShaderString = SHADER_STRING\n(\n uniform sampler2D inputImageTexture;\n \n const lowp int GAUSSIAN_SAMPLES = 9;\n \n varying highp vec2 textureCoordinate;\n varying highp vec2 blurCoordinates[GAUSSIAN_SAMPLES];\n\n uniform highp float aspectRatio;\n uniform lowp vec2 blurCenter;\n uniform highp float blurRadius;\n \n void main() {\n     highp vec2 textureCoordinateToUse = vec2(textureCoordinate.x, (textureCoordinate.y * aspectRatio + 0.5 - 0.5 * aspectRatio));\n     highp float dist = distance(blurCenter, textureCoordinateToUse);\n\n     if (dist < blurRadius)\n     {\n        lowp vec4 sum = vec4(0.0);\n        \n         sum += texture2D(inputImageTexture, blurCoordinates[0]) * 0.05;\n         sum += texture2D(inputImageTexture, blurCoordinates[1]) * 0.09;\n         sum += texture2D(inputImageTexture, blurCoordinates[2]) * 0.12;\n         sum += texture2D(inputImageTexture, blurCoordinates[3]) * 0.15;\n         sum += texture2D(inputImageTexture, blurCoordinates[4]) * 0.18;\n         sum += texture2D(inputImageTexture, blurCoordinates[5]) * 0.15;\n         sum += texture2D(inputImageTexture, blurCoordinates[6]) * 0.12;\n         sum += texture2D(inputImageTexture, blurCoordinates[7]) * 0.09;\n         sum += texture2D(inputImageTexture, blurCoordinates[8]) * 0.05;\n\n        gl_FragColor = sum;\n     }\n     else\n     {\n         gl_FragColor = texture2D(inputImageTexture, textureCoordinate);\n     }\n }\n);\n#else\nNSString *const kGPUImageGaussianBlurPositionFragmentShaderString = SHADER_STRING\n(\n uniform sampler2D inputImageTexture;\n \n const int GAUSSIAN_SAMPLES = 9;\n \n varying vec2 textureCoordinate;\n varying vec2 blurCoordinates[GAUSSIAN_SAMPLES];\n \n uniform float aspectRatio;\n uniform vec2 blurCenter;\n uniform float blurRadius;\n \n void main()\n {\n     vec2 textureCoordinateToUse = vec2(textureCoordinate.x, (textureCoordinate.y * aspectRatio + 0.5 - 0.5 * aspectRatio));\n     float dist = distance(blurCenter, textureCoordinateToUse);\n     \n     if (dist < blurRadius)\n     {\n         vec4 sum = vec4(0.0);\n         \n         sum += texture2D(inputImageTexture, blurCoordinates[0]) * 0.05;\n         sum += texture2D(inputImageTexture, blurCoordinates[1]) * 0.09;\n         sum += texture2D(inputImageTexture, blurCoordinates[2]) * 0.12;\n         sum += texture2D(inputImageTexture, blurCoordinates[3]) * 0.15;\n         sum += texture2D(inputImageTexture, blurCoordinates[4]) * 0.18;\n         sum += texture2D(inputImageTexture, blurCoordinates[5]) * 0.15;\n         sum += texture2D(inputImageTexture, blurCoordinates[6]) * 0.12;\n         sum += texture2D(inputImageTexture, blurCoordinates[7]) * 0.09;\n         sum += texture2D(inputImageTexture, blurCoordinates[8]) * 0.05;\n         \n         gl_FragColor = sum;\n     }\n     else\n     {\n         gl_FragColor = texture2D(inputImageTexture, textureCoordinate);\n     }\n }\n);\n#endif\n\n@interface GPUImageGaussianBlurPositionFilter ()\n\n- (void)adjustAspectRatio;\n\n@property (readwrite, nonatomic) CGFloat aspectRatio;\n\n@end\n\n@implementation GPUImageGaussianBlurPositionFilter\n\n@synthesize blurSize = _blurSize;\n@synthesize blurCenter = _blurCenter;\n@synthesize aspectRatio = _aspectRatio;\n\n- (id) initWithFirstStageVertexShaderFromString:(NSString *)firstStageVertexShaderString \n             firstStageFragmentShaderFromString:(NSString *)firstStageFragmentShaderString \n              secondStageVertexShaderFromString:(NSString *)secondStageVertexShaderString\n            secondStageFragmentShaderFromString:(NSString *)secondStageFragmentShaderString {\n    \n    if (!(self = [super initWithFirstStageVertexShaderFromString:firstStageVertexShaderString ? firstStageVertexShaderString : kGPUImageGaussianBlurPositionVertexShaderString\n                              firstStageFragmentShaderFromString:firstStageFragmentShaderString ? firstStageFragmentShaderString : kGPUImageGaussianBlurPositionFragmentShaderString\n                               secondStageVertexShaderFromString:secondStageVertexShaderString ? secondStageVertexShaderString : kGPUImageGaussianBlurPositionVertexShaderString\n                             secondStageFragmentShaderFromString:secondStageFragmentShaderString ? secondStageFragmentShaderString : kGPUImageGaussianBlurPositionFragmentShaderString])) {\n        return nil;\n    }\n    \n    aspectRatioUniform = [secondFilterProgram uniformIndex:@\"aspectRatio\"];\n    blurCenterUniform = [secondFilterProgram uniformIndex:@\"blurCenter\"];\n    blurRadiusUniform = [secondFilterProgram uniformIndex:@\"blurRadius\"];\n\n    self.blurSize = 1.0;\n    self.blurRadius = 1.0;\n    self.blurCenter = CGPointMake(0.5, 0.5);\n    \n    return self;\n}\n\n- (id)init;\n{\n    return [self initWithFirstStageVertexShaderFromString:nil\n                       firstStageFragmentShaderFromString:nil\n                        secondStageVertexShaderFromString:nil\n                      secondStageFragmentShaderFromString:nil];\n}\n\n- (void)adjustAspectRatio;\n{\n    if (GPUImageRotationSwapsWidthAndHeight(inputRotation))\n    {\n        [self setAspectRatio:(inputTextureSize.width / inputTextureSize.height)];\n    }\n    else\n    {\n        [self setAspectRatio:(inputTextureSize.height / inputTextureSize.width)];\n    }\n}\n\n- (void)forceProcessingAtSize:(CGSize)frameSize;\n{\n    [super forceProcessingAtSize:frameSize];\n    [self adjustAspectRatio];\n}\n\n- (void)setInputSize:(CGSize)newSize atIndex:(NSInteger)textureIndex;\n{\n    CGSize oldInputSize = inputTextureSize;\n    [super setInputSize:newSize atIndex:textureIndex];\n    \n    if ( (!CGSizeEqualToSize(oldInputSize, inputTextureSize)) && (!CGSizeEqualToSize(newSize, CGSizeZero)) )\n    {\n        [self adjustAspectRatio];\n    }\n}\n\n- (void)setInputRotation:(GPUImageRotationMode)newInputRotation atIndex:(NSInteger)textureIndex;\n{\n    [super setInputRotation:newInputRotation atIndex:textureIndex];\n    [self setBlurCenter:self.blurCenter];    \n    [self adjustAspectRatio];\n}\n\n#pragma mark -\n#pragma mark Accessors\n\n- (void)setBlurSize:(CGFloat)newValue;\n{\n    _blurSize = newValue;\n    \n    _verticalTexelSpacing = _blurSize;\n    _horizontalTexelSpacing = _blurSize;\n    \n    [self setupFilterForSize:[self sizeOfFBO]];\n}\n\n- (void) setBlurCenter:(CGPoint)blurCenter;\n{\n    _blurCenter = blurCenter;\n    CGPoint rotatedPoint = [self rotatedPoint:blurCenter forRotation:inputRotation];\n    [self setPoint:rotatedPoint forUniform:blurCenterUniform program:secondFilterProgram];\n}\n\n- (void) setBlurRadius:(CGFloat)blurRadius;\n{\n    _blurRadius = blurRadius;\n\n    [self setFloat:_blurRadius forUniform:blurRadiusUniform program:secondFilterProgram];\n}\n\n- (void) setAspectRatio:(CGFloat)newValue;\n{\n    _aspectRatio = newValue;\n\n    [self setFloat:_aspectRatio forUniform:aspectRatioUniform program:secondFilterProgram];\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageGaussianSelectiveBlurFilter.h",
    "content": "#import \"GPUImageFilterGroup.h\"\n\n@class GPUImageGaussianBlurFilter;\n\n/** A Gaussian blur that preserves focus within a circular region\n */\n@interface GPUImageGaussianSelectiveBlurFilter : GPUImageFilterGroup \n{\n    GPUImageGaussianBlurFilter *blurFilter;\n    GPUImageFilter *selectiveFocusFilter;\n    BOOL hasOverriddenAspectRatio;\n}\n\n/** The radius of the circular area being excluded from the blur\n */\n@property (readwrite, nonatomic) CGFloat excludeCircleRadius;\n/** The center of the circular area being excluded from the blur\n */\n@property (readwrite, nonatomic) CGPoint excludeCirclePoint;\n/** The size of the area between the blurred portion and the clear circle\n */\n@property (readwrite, nonatomic) CGFloat excludeBlurSize;\n/** A radius in pixels to use for the blur, with a default of 5.0. This adjusts the sigma variable in the Gaussian distribution function.\n */\n@property (readwrite, nonatomic) CGFloat blurRadiusInPixels;\n/** The aspect ratio of the image, used to adjust the circularity of the in-focus region. By default, this matches the image aspect ratio, but you can override this value.\n */\n@property (readwrite, nonatomic) CGFloat aspectRatio;\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageGaussianSelectiveBlurFilter.m",
    "content": "#import \"GPUImageGaussianSelectiveBlurFilter.h\"\n#import \"GPUImageGaussianBlurFilter.h\"\n#import \"GPUImageTwoInputFilter.h\"\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageGaussianSelectiveBlurFragmentShaderString = SHADER_STRING\n( \n varying highp vec2 textureCoordinate;\n varying highp vec2 textureCoordinate2;\n \n uniform sampler2D inputImageTexture;\n uniform sampler2D inputImageTexture2; \n \n uniform lowp float excludeCircleRadius;\n uniform lowp vec2 excludeCirclePoint;\n uniform lowp float excludeBlurSize;\n uniform highp float aspectRatio;\n\n void main()\n {\n     lowp vec4 sharpImageColor = texture2D(inputImageTexture, textureCoordinate);\n     lowp vec4 blurredImageColor = texture2D(inputImageTexture2, textureCoordinate2);\n     \n     highp vec2 textureCoordinateToUse = vec2(textureCoordinate2.x, (textureCoordinate2.y * aspectRatio + 0.5 - 0.5 * aspectRatio));\n     highp float distanceFromCenter = distance(excludeCirclePoint, textureCoordinateToUse);\n     \n     gl_FragColor = mix(sharpImageColor, blurredImageColor, smoothstep(excludeCircleRadius - excludeBlurSize, excludeCircleRadius, distanceFromCenter));\n }\n);\n#else\nNSString *const kGPUImageGaussianSelectiveBlurFragmentShaderString = SHADER_STRING\n(\n varying vec2 textureCoordinate;\n varying vec2 textureCoordinate2;\n \n uniform sampler2D inputImageTexture;\n uniform sampler2D inputImageTexture2;\n \n uniform float excludeCircleRadius;\n uniform vec2 excludeCirclePoint;\n uniform float excludeBlurSize;\n uniform float aspectRatio;\n \n void main()\n {\n     vec4 sharpImageColor = texture2D(inputImageTexture, textureCoordinate);\n     vec4 blurredImageColor = texture2D(inputImageTexture2, textureCoordinate2);\n     \n     vec2 textureCoordinateToUse = vec2(textureCoordinate2.x, (textureCoordinate2.y * aspectRatio + 0.5 - 0.5 * aspectRatio));\n     float distanceFromCenter = distance(excludeCirclePoint, textureCoordinateToUse);\n     \n     gl_FragColor = mix(sharpImageColor, blurredImageColor, smoothstep(excludeCircleRadius - excludeBlurSize, excludeCircleRadius, distanceFromCenter));\n }\n);\n#endif\n\n@implementation GPUImageGaussianSelectiveBlurFilter\n\n@synthesize excludeCirclePoint = _excludeCirclePoint, excludeCircleRadius = _excludeCircleRadius, excludeBlurSize = _excludeBlurSize;\n@synthesize blurRadiusInPixels = _blurRadiusInPixels;\n@synthesize aspectRatio = _aspectRatio;\n\n- (id)init;\n{\n    if (!(self = [super init]))\n    {\n\t\treturn nil;\n    }\n    \n    hasOverriddenAspectRatio = NO;\n    \n    // First pass: apply a variable Gaussian blur\n    blurFilter = [[GPUImageGaussianBlurFilter alloc] init];\n    [self addFilter:blurFilter];\n    \n    // Second pass: combine the blurred image with the original sharp one\n    selectiveFocusFilter = [[GPUImageTwoInputFilter alloc] initWithFragmentShaderFromString:kGPUImageGaussianSelectiveBlurFragmentShaderString];\n    [self addFilter:selectiveFocusFilter];\n    \n    // Texture location 0 needs to be the sharp image for both the blur and the second stage processing\n    [blurFilter addTarget:selectiveFocusFilter atTextureLocation:1];\n    \n    // To prevent double updating of this filter, disable updates from the sharp image side    \n    self.initialFilters = [NSArray arrayWithObjects:blurFilter, selectiveFocusFilter, nil];\n    self.terminalFilter = selectiveFocusFilter;\n    \n    self.blurRadiusInPixels = 5.0;\n    \n    self.excludeCircleRadius = 60.0/320.0;\n    self.excludeCirclePoint = CGPointMake(0.5f, 0.5f);\n    self.excludeBlurSize = 30.0/320.0;\n    \n    return self;\n}\n\n- (void)setInputSize:(CGSize)newSize atIndex:(NSInteger)textureIndex;\n{\n    CGSize oldInputSize = inputTextureSize;\n    [super setInputSize:newSize atIndex:textureIndex];\n    inputTextureSize = newSize;\n    \n    if ( (!CGSizeEqualToSize(oldInputSize, inputTextureSize)) && (!hasOverriddenAspectRatio) && (!CGSizeEqualToSize(newSize, CGSizeZero)) )\n    {\n        _aspectRatio = (inputTextureSize.width / inputTextureSize.height);\n        [selectiveFocusFilter setFloat:_aspectRatio forUniformName:@\"aspectRatio\"];\n    }\n}\n\n#pragma mark -\n#pragma mark Accessors\n\n- (void)setBlurRadiusInPixels:(CGFloat)newValue;\n{\n    blurFilter.blurRadiusInPixels = newValue;\n}\n\n- (CGFloat)blurRadiusInPixels;\n{\n    return blurFilter.blurRadiusInPixels;\n}\n\n- (void)setExcludeCirclePoint:(CGPoint)newValue;\n{\n    _excludeCirclePoint = newValue;\n    [selectiveFocusFilter setPoint:newValue forUniformName:@\"excludeCirclePoint\"];\n}\n\n- (void)setExcludeCircleRadius:(CGFloat)newValue;\n{\n    _excludeCircleRadius = newValue;\n    [selectiveFocusFilter setFloat:newValue forUniformName:@\"excludeCircleRadius\"];\n}\n\n- (void)setExcludeBlurSize:(CGFloat)newValue;\n{\n    _excludeBlurSize = newValue;\n    [selectiveFocusFilter setFloat:newValue forUniformName:@\"excludeBlurSize\"];\n}\n\n- (void)setAspectRatio:(CGFloat)newValue;\n{\n    hasOverriddenAspectRatio = YES;\n    _aspectRatio = newValue;    \n    [selectiveFocusFilter setFloat:_aspectRatio forUniformName:@\"aspectRatio\"];\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageGlassSphereFilter.h",
    "content": "#import \"GPUImageSphereRefractionFilter.h\"\n\n@interface GPUImageGlassSphereFilter : GPUImageSphereRefractionFilter\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageGlassSphereFilter.m",
    "content": "#import \"GPUImageGlassSphereFilter.h\"\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageGlassSphereFragmentShaderString = SHADER_STRING\n(\n varying highp vec2 textureCoordinate;\n \n uniform sampler2D inputImageTexture;\n \n uniform highp vec2 center;\n uniform highp float radius;\n uniform highp float aspectRatio;\n uniform highp float refractiveIndex;\n// uniform vec3 lightPosition;\n const highp vec3 lightPosition = vec3(-0.5, 0.5, 1.0);\n const highp vec3 ambientLightPosition = vec3(0.0, 0.0, 1.0);\n \n void main()\n {\n     highp vec2 textureCoordinateToUse = vec2(textureCoordinate.x, (textureCoordinate.y * aspectRatio + 0.5 - 0.5 * aspectRatio));\n     highp float distanceFromCenter = distance(center, textureCoordinateToUse);\n     lowp float checkForPresenceWithinSphere = step(distanceFromCenter, radius);\n     \n     distanceFromCenter = distanceFromCenter / radius;\n     \n     highp float normalizedDepth = radius * sqrt(1.0 - distanceFromCenter * distanceFromCenter);\n     highp vec3 sphereNormal = normalize(vec3(textureCoordinateToUse - center, normalizedDepth));\n     \n     highp vec3 refractedVector = 2.0 * refract(vec3(0.0, 0.0, -1.0), sphereNormal, refractiveIndex);\n     refractedVector.xy = -refractedVector.xy;\n     \n     highp vec3 finalSphereColor = texture2D(inputImageTexture, (refractedVector.xy + 1.0) * 0.5).rgb;\n     \n     // Grazing angle lighting\n     highp float lightingIntensity = 2.5 * (1.0 - pow(clamp(dot(ambientLightPosition, sphereNormal), 0.0, 1.0), 0.25));\n     finalSphereColor += lightingIntensity;\n     \n     // Specular lighting\n     lightingIntensity  = clamp(dot(normalize(lightPosition), sphereNormal), 0.0, 1.0);\n     lightingIntensity  = pow(lightingIntensity, 15.0);\n     finalSphereColor += vec3(0.8, 0.8, 0.8) * lightingIntensity;\n     \n     gl_FragColor = vec4(finalSphereColor, 1.0) * checkForPresenceWithinSphere;\n }\n);\n#else\nNSString *const kGPUImageGlassSphereFragmentShaderString = SHADER_STRING\n(\n varying vec2 textureCoordinate;\n \n uniform sampler2D inputImageTexture;\n \n uniform vec2 center;\n uniform float radius;\n uniform float aspectRatio;\n uniform float refractiveIndex;\n // uniform vec3 lightPosition;\n const vec3 lightPosition = vec3(-0.5, 0.5, 1.0);\n const vec3 ambientLightPosition = vec3(0.0, 0.0, 1.0);\n \n void main()\n {\n     vec2 textureCoordinateToUse = vec2(textureCoordinate.x, (textureCoordinate.y * aspectRatio + 0.5 - 0.5 * aspectRatio));\n     float distanceFromCenter = distance(center, textureCoordinateToUse);\n     float checkForPresenceWithinSphere = step(distanceFromCenter, radius);\n     \n     distanceFromCenter = distanceFromCenter / radius;\n     \n     float normalizedDepth = radius * sqrt(1.0 - distanceFromCenter * distanceFromCenter);\n     vec3 sphereNormal = normalize(vec3(textureCoordinateToUse - center, normalizedDepth));\n     \n     vec3 refractedVector = 2.0 * refract(vec3(0.0, 0.0, -1.0), sphereNormal, refractiveIndex);\n     refractedVector.xy = -refractedVector.xy;\n     \n     vec3 finalSphereColor = texture2D(inputImageTexture, (refractedVector.xy + 1.0) * 0.5).rgb;\n     \n     // Grazing angle lighting\n     float lightingIntensity = 2.5 * (1.0 - pow(clamp(dot(ambientLightPosition, sphereNormal), 0.0, 1.0), 0.25));\n     finalSphereColor += lightingIntensity;\n     \n     // Specular lighting\n     lightingIntensity  = clamp(dot(normalize(lightPosition), sphereNormal), 0.0, 1.0);\n     lightingIntensity  = pow(lightingIntensity, 15.0);\n     finalSphereColor += vec3(0.8, 0.8, 0.8) * lightingIntensity;\n     \n     gl_FragColor = vec4(finalSphereColor, 1.0) * checkForPresenceWithinSphere;\n }\n);\n#endif\n\n@implementation GPUImageGlassSphereFilter\n\n#pragma mark -\n#pragma mark Initialization and teardown\n\n- (id)init;\n{\n    if (!(self = [super initWithFragmentShaderFromString:kGPUImageGlassSphereFragmentShaderString]))\n    {\n\t\treturn nil;\n    }\n    \n    return self;\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageGrayscaleFilter.h",
    "content": "#import \"GPUImageFilter.h\"\n\nextern NSString *const kGPUImageLuminanceFragmentShaderString;\n\n/** Converts an image to grayscale (a slightly faster implementation of the saturation filter, without the ability to vary the color contribution)\n */\n@interface GPUImageGrayscaleFilter : GPUImageFilter\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageGrayscaleFilter.m",
    "content": "#import \"GPUImageGrayscaleFilter.h\"\n\n@implementation GPUImageGrayscaleFilter\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageLuminanceFragmentShaderString = SHADER_STRING\n(\n precision highp float;\n \n varying vec2 textureCoordinate;\n \n uniform sampler2D inputImageTexture;\n \n const highp vec3 W = vec3(0.2125, 0.7154, 0.0721);\n \n void main()\n {\n     lowp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);\n     float luminance = dot(textureColor.rgb, W);\n     \n     gl_FragColor = vec4(vec3(luminance), textureColor.a);\n }\n);\n#else\nNSString *const kGPUImageLuminanceFragmentShaderString = SHADER_STRING\n(\n varying vec2 textureCoordinate;\n \n uniform sampler2D inputImageTexture;\n \n const vec3 W = vec3(0.2125, 0.7154, 0.0721);\n \n void main()\n {\n     vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);\n     float luminance = dot(textureColor.rgb, W);\n     \n     gl_FragColor = vec4(vec3(luminance), textureColor.a);\n }\n);\n#endif\n\n\n- (void)renderToTextureWithVertices:(const GLfloat *)vertices textureCoordinates:(const GLfloat *)textureCoordinates;\n{\n    if (!currentlyReceivingMonochromeInput)\n    {\n        [super renderToTextureWithVertices:vertices textureCoordinates:textureCoordinates];\n    }\n}\n\n//- (void)setInputTexture:(GLuint)newInputTexture atIndex:(NSInteger)textureIndex;\n//{\n//    [super setInputTexture:newInputTexture atIndex:textureIndex];\n//    if (currentlyReceivingMonochromeInput)\n//    {\n//        [self notifyTargetsAboutNewOutputTexture];\n//    }\n//}\n\n//- (GLuint)textureForOutput;\n//{\n//    if (currentlyReceivingMonochromeInput)\n//    {\n//        return filterSourceTexture;\n//    }\n//    else\n//    {\n//        return outputTexture;\n//    }\n//}\n\n- (BOOL)wantsMonochromeInput;\n{\n//    return YES;\n    return NO;\n}\n\n- (BOOL)providesMonochromeOutput;\n{\n//    return YES;\n    return NO;\n}\n\n// TODO: Rewrite this based on the new GPUImageFilter implementation\n//- (void)informTargetsAboutNewFrameAtTime:(CMTime)frameTime;\n//{\n//    if (self.frameProcessingCompletionBlock != NULL)\n//    {\n//        self.frameProcessingCompletionBlock(self, frameTime);\n//    }\n//    \n//    for (id<GPUImageInput> currentTarget in targets)\n//    {\n//        if (currentTarget != self.targetToIgnoreForUpdates)\n//        {\n//            NSInteger indexOfObject = [targets indexOfObject:currentTarget];\n//            NSInteger textureIndex = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];\n//            \n//            if ([GPUImageContext supportsFastTextureUpload] && preparedToCaptureImage)\n//            {\n//                [self setInputTextureForTarget:currentTarget atIndex:textureIndex];\n//            }\n//\n//            if (currentlyReceivingMonochromeInput)\n//            {\n//                [currentTarget setInputRotation:inputRotation atIndex:textureIndex];\n//                \n//                CGSize sizeToRotate = [self outputFrameSize];\n//                CGSize rotatedSize = sizeToRotate;\n//                if (GPUImageRotationSwapsWidthAndHeight(inputRotation))\n//                {\n//                    rotatedSize.width = sizeToRotate.height;\n//                    rotatedSize.height = sizeToRotate.width;\n//                }\n//                [currentTarget setInputSize:rotatedSize atIndex:textureIndex];\n//            }\n//            else\n//            {\n//                [currentTarget setInputSize:[self outputFrameSize] atIndex:textureIndex];\n//            }\n//            [currentTarget newFrameReadyAtTime:frameTime atIndex:textureIndex];\n//        }\n//    }\n//}\n\n#pragma mark -\n#pragma mark Initialization and teardown\n\n- (id)init;\n{\n    if (!(self = [super initWithFragmentShaderFromString:kGPUImageLuminanceFragmentShaderString]))\n    {\n\t\treturn nil;\n    }\n    \n    return self;\n}\n\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageHSBFilter.h",
    "content": "#import \"GPUImageColorMatrixFilter.h\"\n\n@interface GPUImageHSBFilter : GPUImageColorMatrixFilter\n\n/** Reset the filter to have no transformations.\n */\n- (void)reset;\n\n/** Add a hue rotation to the filter.\n The hue rotation is in the range [-360, 360] with 0 being no-change.\n Note that this adjustment is additive, so use the reset method if you need to.\n */\n- (void)rotateHue:(float)h;\n\n/** Add a saturation adjustment to the filter.\n The saturation adjustment is in the range [0.0, 2.0] with 1.0 being no-change.\n Note that this adjustment is additive, so use the reset method if you need to.\n */\n- (void)adjustSaturation:(float)s;\n\n/** Add a brightness adjustment to the filter.\n The brightness adjustment is in the range [0.0, 2.0] with 1.0 being no-change.\n Note that this adjustment is additive, so use the reset method if you need to.\n */\n- (void)adjustBrightness:(float)b;\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageHSBFilter.m",
    "content": "#import \"GPUImageHSBFilter.h\"\n\n@implementation GPUImageHSBFilter {\n    float matrix[4][4];\n}\n\n- (id)init\n{\n    self = [super init];\n    if (self) {\n        [self reset];\n    }\n    return self;\n}\n\n- (void)reset {\n    identmat(matrix);\n    [self _updateColorMatrix];\n}\n\n- (void)rotateHue:(float)h {\n    huerotatemat(matrix, h);\n    [self _updateColorMatrix];\n}\n\n- (void)adjustSaturation:(float)s {\n    saturatemat(matrix, s);\n    [self _updateColorMatrix];\n}\n\n- (void)adjustBrightness:(float)b {\n    cscalemat(matrix, b, b, b);\n    [self _updateColorMatrix];\n}\n\n- (void)_updateColorMatrix {\n    GPUMatrix4x4 gpuMatrix;\n    gpuMatrix.one.one = matrix[0][0];\n    gpuMatrix.one.two = matrix[1][0];\n    gpuMatrix.one.three = matrix[2][0];\n    gpuMatrix.one.four = matrix[3][0];\n    gpuMatrix.two.one = matrix[0][1];\n    gpuMatrix.two.two = matrix[1][1];\n    gpuMatrix.two.three = matrix[2][1];\n    gpuMatrix.two.four = matrix[3][1];\n    gpuMatrix.three.one = matrix[0][2];\n    gpuMatrix.three.two = matrix[1][2];\n    gpuMatrix.three.three = matrix[2][2];\n    gpuMatrix.three.four = matrix[3][2];\n    gpuMatrix.four.one = matrix[0][3];\n    gpuMatrix.four.two = matrix[1][3];\n    gpuMatrix.four.three = matrix[2][3];\n    gpuMatrix.four.four = matrix[3][3];\n    self.colorMatrix = gpuMatrix;\n}\n\n#pragma mark - Matrix algorithms\n\n/* Matrix algorithms adapted from http://www.graficaobscura.com/matrix/index.html\n \n Note about luminance vector values below from that page:\n Where rwgt is 0.3086, gwgt is 0.6094, and bwgt is 0.0820. This is the luminance vector. Notice here that we do not use the standard NTSC weights of 0.299, 0.587, and 0.114. The NTSC weights are only applicable to RGB colors in a gamma 2.2 color space. For linear RGB colors the values above are better.\n */\n//#define RLUM (0.3086f)\n//#define GLUM (0.6094f)\n//#define BLUM (0.0820f)\n\n/* This is the vector value from the PDF specification, and may be closer to what Photoshop uses */\n#define RLUM (0.3f)\n#define GLUM (0.59f)\n#define BLUM (0.11f)\n\n/*\n *\tmatrixmult -\n *\t\tmultiply two matricies\n */\nstatic void matrixmult(a,b,c)\nfloat a[4][4], b[4][4], c[4][4];\n{\n    int x, y;\n    float temp[4][4];\n    \n    for(y=0; y<4 ; y++)\n        for(x=0 ; x<4 ; x++) {\n            temp[y][x] = b[y][0] * a[0][x]\n            + b[y][1] * a[1][x]\n            + b[y][2] * a[2][x]\n            + b[y][3] * a[3][x];\n        }\n    for(y=0; y<4; y++)\n        for(x=0; x<4; x++)\n            c[y][x] = temp[y][x];\n}\n\n/*\n *\tidentmat -\n *\t\tmake an identity matrix\n */\nstatic void identmat(matrix)\nfloat matrix[4][4];\n{\n    memset(matrix, 0, sizeof(float[4][4]));\n    matrix[0][0] = 1.0f;\n    matrix[1][1] = 1.0f;\n    matrix[2][2] = 1.0f;\n    matrix[3][3] = 1.0f;\n}\n\n/*\n *\txformpnt -\n *\t\ttransform a 3D point using a matrix\n */\nstatic void xformpnt(matrix,x,y,z,tx,ty,tz)\nfloat matrix[4][4];\nfloat x,y,z;\nfloat *tx,*ty,*tz;\n{\n    *tx = x*matrix[0][0] + y*matrix[1][0] + z*matrix[2][0] + matrix[3][0];\n    *ty = x*matrix[0][1] + y*matrix[1][1] + z*matrix[2][1] + matrix[3][1];\n    *tz = x*matrix[0][2] + y*matrix[1][2] + z*matrix[2][2] + matrix[3][2];\n}\n\n/*\n *\tcscalemat -\n *\t\tmake a color scale marix\n */\nstatic void cscalemat(mat,rscale,gscale,bscale)\nfloat mat[4][4];\nfloat rscale, gscale, bscale;\n{\n    float mmat[4][4];\n    \n    mmat[0][0] = rscale;\n    mmat[0][1] = 0.0;\n    mmat[0][2] = 0.0;\n    mmat[0][3] = 0.0;\n    \n    mmat[1][0] = 0.0;\n    mmat[1][1] = gscale;\n    mmat[1][2] = 0.0;\n    mmat[1][3] = 0.0;\n    \n    \n    mmat[2][0] = 0.0;\n    mmat[2][1] = 0.0;\n    mmat[2][2] = bscale;\n    mmat[2][3] = 0.0;\n    \n    mmat[3][0] = 0.0;\n    mmat[3][1] = 0.0;\n    mmat[3][2] = 0.0;\n    mmat[3][3] = 1.0;\n    matrixmult(mmat,mat,mat);\n}\n\n/*\n *\tsaturatemat -\n *\t\tmake a saturation marix\n */\nstatic void saturatemat(mat,sat)\nfloat mat[4][4];\nfloat sat;\n{\n    float mmat[4][4];\n    float a, b, c, d, e, f, g, h, i;\n    float rwgt, gwgt, bwgt;\n    \n    rwgt = RLUM;\n    gwgt = GLUM;\n    bwgt = BLUM;\n    \n    a = (1.0-sat)*rwgt + sat;\n    b = (1.0-sat)*rwgt;\n    c = (1.0-sat)*rwgt;\n    d = (1.0-sat)*gwgt;\n    e = (1.0-sat)*gwgt + sat;\n    f = (1.0-sat)*gwgt;\n    g = (1.0-sat)*bwgt;\n    h = (1.0-sat)*bwgt;\n    i = (1.0-sat)*bwgt + sat;\n    mmat[0][0] = a;\n    mmat[0][1] = b;\n    mmat[0][2] = c;\n    mmat[0][3] = 0.0;\n    \n    mmat[1][0] = d;\n    mmat[1][1] = e;\n    mmat[1][2] = f;\n    mmat[1][3] = 0.0;\n    \n    mmat[2][0] = g;\n    mmat[2][1] = h;\n    mmat[2][2] = i;\n    mmat[2][3] = 0.0;\n    \n    mmat[3][0] = 0.0;\n    mmat[3][1] = 0.0;\n    mmat[3][2] = 0.0;\n    mmat[3][3] = 1.0;\n    matrixmult(mmat,mat,mat);\n}\n\n/*\n *\txrotate -\n *\t\trotate about the x (red) axis\n */\nstatic void xrotatemat(mat,rs,rc)\nfloat mat[4][4];\nfloat rs, rc;\n{\n    float mmat[4][4];\n    \n    mmat[0][0] = 1.0;\n    mmat[0][1] = 0.0;\n    mmat[0][2] = 0.0;\n    mmat[0][3] = 0.0;\n    \n    mmat[1][0] = 0.0;\n    mmat[1][1] = rc;\n    mmat[1][2] = rs;\n    mmat[1][3] = 0.0;\n    \n    mmat[2][0] = 0.0;\n    mmat[2][1] = -rs;\n    mmat[2][2] = rc;\n    mmat[2][3] = 0.0;\n    \n    mmat[3][0] = 0.0;\n    mmat[3][1] = 0.0;\n    mmat[3][2] = 0.0;\n    mmat[3][3] = 1.0;\n    matrixmult(mmat,mat,mat);\n}\n\n/*\n *\tyrotate -\n *\t\trotate about the y (green) axis\n */\nstatic void yrotatemat(mat,rs,rc)\nfloat mat[4][4];\nfloat rs, rc;\n{\n    float mmat[4][4];\n    \n    mmat[0][0] = rc;\n    mmat[0][1] = 0.0;\n    mmat[0][2] = -rs;\n    mmat[0][3] = 0.0;\n    \n    mmat[1][0] = 0.0;\n    mmat[1][1] = 1.0;\n    mmat[1][2] = 0.0;\n    mmat[1][3] = 0.0;\n    \n    mmat[2][0] = rs;\n    mmat[2][1] = 0.0;\n    mmat[2][2] = rc;\n    mmat[2][3] = 0.0;\n    \n    mmat[3][0] = 0.0;\n    mmat[3][1] = 0.0;\n    mmat[3][2] = 0.0;\n    mmat[3][3] = 1.0;\n    matrixmult(mmat,mat,mat);\n}\n\n/*\n *\tzrotate -\n *\t\trotate about the z (blue) axis\n */\nstatic void zrotatemat(mat,rs,rc)\nfloat mat[4][4];\nfloat rs, rc;\n{\n    float mmat[4][4];\n    \n    mmat[0][0] = rc;\n    mmat[0][1] = rs;\n    mmat[0][2] = 0.0;\n    mmat[0][3] = 0.0;\n    \n    mmat[1][0] = -rs;\n    mmat[1][1] = rc;\n    mmat[1][2] = 0.0;\n    mmat[1][3] = 0.0;\n    \n    mmat[2][0] = 0.0;\n    mmat[2][1] = 0.0;\n    mmat[2][2] = 1.0;\n    mmat[2][3] = 0.0;\n    \n    mmat[3][0] = 0.0;\n    mmat[3][1] = 0.0;\n    mmat[3][2] = 0.0;\n    mmat[3][3] = 1.0;\n    matrixmult(mmat,mat,mat);\n}\n\n/*\n *\tzshear -\n *\t\tshear z using x and y.\n */\nstatic void zshearmat(mat,dx,dy)\nfloat mat[4][4];\nfloat dx, dy;\n{\n    float mmat[4][4];\n    \n    mmat[0][0] = 1.0;\n    mmat[0][1] = 0.0;\n    mmat[0][2] = dx;\n    mmat[0][3] = 0.0;\n    \n    mmat[1][0] = 0.0;\n    mmat[1][1] = 1.0;\n    mmat[1][2] = dy;\n    mmat[1][3] = 0.0;\n    \n    mmat[2][0] = 0.0;\n    mmat[2][1] = 0.0;\n    mmat[2][2] = 1.0;\n    mmat[2][3] = 0.0;\n    \n    mmat[3][0] = 0.0;\n    mmat[3][1] = 0.0;\n    mmat[3][2] = 0.0;\n    mmat[3][3] = 1.0;\n    matrixmult(mmat,mat,mat);\n}\n\n/*\n *\tsimplehuerotatemat -\n *\t\tsimple hue rotation. This changes luminance\n */\n//static void simplehuerotatemat(mat,rot)\n//float mat[4][4];\n//float rot;\n//{\n//    float mag;\n//    float xrs, xrc;\n//    float yrs, yrc;\n//    float zrs, zrc;\n//    \n//    /* rotate the grey vector into positive Z */\n//    mag = sqrt(2.0);\n//    xrs = 1.0/mag;\n//    xrc = 1.0/mag;\n//    xrotatemat(mat,xrs,xrc);\n//    \n//    mag = sqrt(3.0);\n//    yrs = -1.0/mag;\n//    yrc = sqrt(2.0)/mag;\n//    yrotatemat(mat,yrs,yrc);\n//    \n//    /* rotate the hue */\n//    zrs = sin(rot*M_PI/180.0);\n//    zrc = cos(rot*M_PI/180.0);\n//    zrotatemat(mat,zrs,zrc);\n//    \n//    /* rotate the grey vector back into place */\n//    yrotatemat(mat,-yrs,yrc);\n//    xrotatemat(mat,-xrs,xrc);\n//}\n\n/*\n *\thuerotatemat -\n *\t\trotate the hue, while maintaining luminance.\n */\nstatic void huerotatemat(mat,rot)\nfloat mat[4][4];\nfloat rot;\n{\n    float mmat[4][4];\n    float mag;\n    float lx, ly, lz;\n    float xrs, xrc;\n    float yrs, yrc;\n    float zrs, zrc;\n    float zsx, zsy;\n    \n    identmat(mmat);\n    \n    /* rotate the grey vector into positive Z */\n    mag = sqrt(2.0);\n    xrs = 1.0/mag;\n    xrc = 1.0/mag;\n    xrotatemat(mmat,xrs,xrc);\n    mag = sqrt(3.0);\n    yrs = -1.0/mag;\n    yrc = sqrt(2.0)/mag;\n    yrotatemat(mmat,yrs,yrc);\n    \n    /* shear the space to make the luminance plane horizontal */\n    xformpnt(mmat,RLUM,GLUM,BLUM,&lx,&ly,&lz);\n    zsx = lx/lz;\n    zsy = ly/lz;\n    zshearmat(mmat,zsx,zsy);\n    \n    /* rotate the hue */\n    zrs = sin(rot*M_PI/180.0);\n    zrc = cos(rot*M_PI/180.0);\n    zrotatemat(mmat,zrs,zrc);\n    \n    /* unshear the space to put the luminance plane back */\n    zshearmat(mmat,-zsx,-zsy);\n    \n    /* rotate the grey vector back into place */\n    yrotatemat(mmat,-yrs,yrc);\n    xrotatemat(mmat,-xrs,xrc);\n    \n    matrixmult(mmat,mat,mat);\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageHalftoneFilter.h",
    "content": "#import \"GPUImagePixellateFilter.h\"\n\n@interface GPUImageHalftoneFilter : GPUImagePixellateFilter\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageHalftoneFilter.m",
    "content": "#import \"GPUImageHalftoneFilter.h\"\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageHalftoneFragmentShaderString = SHADER_STRING\n(\n varying highp vec2 textureCoordinate;\n \n uniform sampler2D inputImageTexture;\n \n uniform highp float fractionalWidthOfPixel;\n uniform highp float aspectRatio;\n uniform highp float dotScaling;\n \n const highp vec3 W = vec3(0.2125, 0.7154, 0.0721);\n\n void main()\n {\n     highp vec2 sampleDivisor = vec2(fractionalWidthOfPixel, fractionalWidthOfPixel / aspectRatio);\n     \n     highp vec2 samplePos = textureCoordinate - mod(textureCoordinate, sampleDivisor) + 0.5 * sampleDivisor;\n     highp vec2 textureCoordinateToUse = vec2(textureCoordinate.x, (textureCoordinate.y * aspectRatio + 0.5 - 0.5 * aspectRatio));\n     highp vec2 adjustedSamplePos = vec2(samplePos.x, (samplePos.y * aspectRatio + 0.5 - 0.5 * aspectRatio));\n     highp float distanceFromSamplePoint = distance(adjustedSamplePos, textureCoordinateToUse);\n     \n     lowp vec3 sampledColor = texture2D(inputImageTexture, samplePos ).rgb;\n     highp float dotScaling = 1.0 - dot(sampledColor, W);\n    \n     lowp float checkForPresenceWithinDot = 1.0 - step(distanceFromSamplePoint, (fractionalWidthOfPixel * 0.5) * dotScaling);\n     \n     gl_FragColor = vec4(vec3(checkForPresenceWithinDot), 1.0);\n }\n);\n#else\nNSString *const kGPUImageHalftoneFragmentShaderString = SHADER_STRING\n(\n varying vec2 textureCoordinate;\n \n uniform sampler2D inputImageTexture;\n \n uniform float fractionalWidthOfPixel;\n uniform float aspectRatio;\n uniform float dotScaling;\n \n const vec3 W = vec3(0.2125, 0.7154, 0.0721);\n \n void main()\n {\n     vec2 sampleDivisor = vec2(fractionalWidthOfPixel, fractionalWidthOfPixel / aspectRatio);\n     \n     vec2 samplePos = textureCoordinate - mod(textureCoordinate, sampleDivisor) + 0.5 * sampleDivisor;\n     vec2 textureCoordinateToUse = vec2(textureCoordinate.x, (textureCoordinate.y * aspectRatio + 0.5 - 0.5 * aspectRatio));\n     vec2 adjustedSamplePos = vec2(samplePos.x, (samplePos.y * aspectRatio + 0.5 - 0.5 * aspectRatio));\n     float distanceFromSamplePoint = distance(adjustedSamplePos, textureCoordinateToUse);\n     \n     vec3 sampledColor = texture2D(inputImageTexture, samplePos ).rgb;\n     float dotScaling = 1.0 - dot(sampledColor, W);\n     \n     float checkForPresenceWithinDot = 1.0 - step(distanceFromSamplePoint, (fractionalWidthOfPixel * 0.5) * dotScaling);\n     \n     gl_FragColor = vec4(vec3(checkForPresenceWithinDot), 1.0);\n }\n);\n#endif\n\n@implementation GPUImageHalftoneFilter\n\n- (id)init;\n{\n    if (!(self = [super initWithFragmentShaderFromString:kGPUImageHalftoneFragmentShaderString]))\n    {\n\t\treturn nil;\n    }\n    \n    self.fractionalWidthOfAPixel = 0.01;\n    \n    return self;\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageHardLightBlendFilter.h",
    "content": "#import \"GPUImageTwoInputFilter.h\"\n\n@interface GPUImageHardLightBlendFilter : GPUImageTwoInputFilter\n{\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageHardLightBlendFilter.m",
    "content": "#import \"GPUImageHardLightBlendFilter.h\"\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageHardLightBlendFragmentShaderString = SHADER_STRING\n(\n varying highp vec2 textureCoordinate;\n varying highp vec2 textureCoordinate2;\n\n uniform sampler2D inputImageTexture;\n uniform sampler2D inputImageTexture2;\n\n const highp vec3 W = vec3(0.2125, 0.7154, 0.0721);\n\n void main()\n {\n     mediump vec4 base = texture2D(inputImageTexture, textureCoordinate);\n     mediump vec4 overlay = texture2D(inputImageTexture2, textureCoordinate2);\n\n     highp float ra;\n     if (2.0 * overlay.r < overlay.a) {\n         ra = 2.0 * overlay.r * base.r + overlay.r * (1.0 - base.a) + base.r * (1.0 - overlay.a);\n     } else {\n         ra = overlay.a * base.a - 2.0 * (base.a - base.r) * (overlay.a - overlay.r) + overlay.r * (1.0 - base.a) + base.r * (1.0 - overlay.a);\n     }\n     \n     highp float ga;\n     if (2.0 * overlay.g < overlay.a) {\n         ga = 2.0 * overlay.g * base.g + overlay.g * (1.0 - base.a) + base.g * (1.0 - overlay.a);\n     } else {\n         ga = overlay.a * base.a - 2.0 * (base.a - base.g) * (overlay.a - overlay.g) + overlay.g * (1.0 - base.a) + base.g * (1.0 - overlay.a);\n     }\n     \n     highp float ba;\n     if (2.0 * overlay.b < overlay.a) {\n         ba = 2.0 * overlay.b * base.b + overlay.b * (1.0 - base.a) + base.b * (1.0 - overlay.a);\n     } else {\n         ba = overlay.a * base.a - 2.0 * (base.a - base.b) * (overlay.a - overlay.b) + overlay.b * (1.0 - base.a) + base.b * (1.0 - overlay.a);\n     }\n     \n     gl_FragColor = vec4(ra, ga, ba, 1.0);\n }\n);\n#else\nNSString *const kGPUImageHardLightBlendFragmentShaderString = SHADER_STRING\n(\n varying vec2 textureCoordinate;\n varying vec2 textureCoordinate2;\n \n uniform sampler2D inputImageTexture;\n uniform sampler2D inputImageTexture2;\n \n const vec3 W = vec3(0.2125, 0.7154, 0.0721);\n \n void main()\n {\n     vec4 base = texture2D(inputImageTexture, textureCoordinate);\n     vec4 overlay = texture2D(inputImageTexture2, textureCoordinate2);\n     \n     float ra;\n     if (2.0 * overlay.r < overlay.a) {\n         ra = 2.0 * overlay.r * base.r + overlay.r * (1.0 - base.a) + base.r * (1.0 - overlay.a);\n     } else {\n         ra = overlay.a * base.a - 2.0 * (base.a - base.r) * (overlay.a - overlay.r) + overlay.r * (1.0 - base.a) + base.r * (1.0 - overlay.a);\n     }\n     \n     float ga;\n     if (2.0 * overlay.g < overlay.a) {\n         ga = 2.0 * overlay.g * base.g + overlay.g * (1.0 - base.a) + base.g * (1.0 - overlay.a);\n     } else {\n         ga = overlay.a * base.a - 2.0 * (base.a - base.g) * (overlay.a - overlay.g) + overlay.g * (1.0 - base.a) + base.g * (1.0 - overlay.a);\n     }\n     \n     float ba;\n     if (2.0 * overlay.b < overlay.a) {\n         ba = 2.0 * overlay.b * base.b + overlay.b * (1.0 - base.a) + base.b * (1.0 - overlay.a);\n     } else {\n         ba = overlay.a * base.a - 2.0 * (base.a - base.b) * (overlay.a - overlay.b) + overlay.b * (1.0 - base.a) + base.b * (1.0 - overlay.a);\n     }\n     \n     gl_FragColor = vec4(ra, ga, ba, 1.0);\n }\n);\n#endif\n\n\n@implementation GPUImageHardLightBlendFilter\n\n- (id)init;\n{\n    if (!(self = [super initWithFragmentShaderFromString:kGPUImageHardLightBlendFragmentShaderString]))\n    {\n\t\treturn nil;\n    }\n    \n    return self;\n}\n\n@end\n\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageHarrisCornerDetectionFilter.h",
    "content": "#import \"GPUImageFilterGroup.h\"\n\n@class GPUImageGaussianBlurFilter;\n@class GPUImageXYDerivativeFilter;\n@class GPUImageGrayscaleFilter;\n@class GPUImageGaussianBlurFilter;\n@class GPUImageThresholdedNonMaximumSuppressionFilter;\n@class GPUImageColorPackingFilter;\n\n//#define DEBUGFEATUREDETECTION\n\n/** Harris corner detector\n \n First pass: reduce to luminance and take the derivative of the luminance texture (GPUImageXYDerivativeFilter)\n \n Second pass: blur the derivative (GPUImageGaussianBlurFilter)\n \n Third pass: apply the Harris corner detection calculation\n \n This is the Harris corner detector, as described in \n C. Harris and M. Stephens. A Combined Corner and Edge Detector. Proc. Alvey Vision Conf., Univ. Manchester, pp. 147-151, 1988.\n */\n@interface GPUImageHarrisCornerDetectionFilter : GPUImageFilterGroup\n{\n    GPUImageXYDerivativeFilter *derivativeFilter;\n    GPUImageGaussianBlurFilter *blurFilter;\n    GPUImageFilter *harrisCornerDetectionFilter;\n    GPUImageThresholdedNonMaximumSuppressionFilter *nonMaximumSuppressionFilter;\n    GPUImageColorPackingFilter *colorPackingFilter;\n    GLfloat *cornersArray;\n    GLubyte *rawImagePixels;\n}\n\n/** The radius of the underlying Gaussian blur. The default is 2.0.\n */\n@property(readwrite, nonatomic) CGFloat blurRadiusInPixels;\n\n// This changes the dynamic range of the Harris corner detector by amplifying small cornerness values. Default is 5.0.\n@property(readwrite, nonatomic) CGFloat sensitivity;\n\n// A threshold value at which a point is recognized as being a corner after the non-maximum suppression. Default is 0.20.\n@property(readwrite, nonatomic) CGFloat threshold;\n\n// This block is called on the detection of new corner points, usually on every processed frame. A C array containing normalized coordinates in X, Y pairs is passed in, along with a count of the number of corners detected and the current timestamp of the video frame\n@property(nonatomic, copy) void(^cornersDetectedBlock)(GLfloat* cornerArray, NSUInteger cornersDetected, CMTime frameTime);\n\n// These images are only enabled when built with DEBUGFEATUREDETECTION defined, and are used to examine the intermediate states of the feature detector\n@property(nonatomic, readonly, strong) NSMutableArray *intermediateImages;\n\n// Initialization and teardown\n- (id)initWithCornerDetectionFragmentShader:(NSString *)cornerDetectionFragmentShader;\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageHarrisCornerDetectionFilter.m",
    "content": "#import \"GPUImageHarrisCornerDetectionFilter.h\"\n#import \"GPUImageGaussianBlurFilter.h\"\n#import \"GPUImageXYDerivativeFilter.h\"\n#import \"GPUImageGrayscaleFilter.h\"\n#import \"GPUImageThresholdedNonMaximumSuppressionFilter.h\"\n#import \"GPUImageColorPackingFilter.h\"\n#import \"GPUImageGaussianBlurFilter.h\"\n\n@interface GPUImageHarrisCornerDetectionFilter()\n\n- (void)extractCornerLocationsFromImageAtFrameTime:(CMTime)frameTime;\n\n@end\n\n// This is the Harris corner detector, as described in \n// C. Harris and M. Stephens. A Combined Corner and Edge Detector. Proc. Alvey Vision Conf., Univ. Manchester, pp. 147-151, 1988.\n\n@implementation GPUImageHarrisCornerDetectionFilter\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageHarrisCornerDetectionFragmentShaderString = SHADER_STRING\n( \n varying highp vec2 textureCoordinate;\n \n uniform sampler2D inputImageTexture;\n uniform lowp float sensitivity;\n \n const mediump float harrisConstant = 0.04;\n \n void main()\n {\n     mediump vec3 derivativeElements = texture2D(inputImageTexture, textureCoordinate).rgb;\n     \n     mediump float derivativeSum = derivativeElements.x + derivativeElements.y;\n     \n     mediump float zElement = (derivativeElements.z * 2.0) - 1.0;\n\n     // R = Ix^2 * Iy^2 - Ixy * Ixy - k * (Ix^2 + Iy^2)^2\n     mediump float cornerness = derivativeElements.x * derivativeElements.y - (zElement * zElement) - harrisConstant * derivativeSum * derivativeSum;\n     \n     gl_FragColor = vec4(vec3(cornerness * sensitivity), 1.0);\n }\n);\n#else\nNSString *const kGPUImageHarrisCornerDetectionFragmentShaderString = SHADER_STRING\n(\n varying vec2 textureCoordinate;\n \n uniform sampler2D inputImageTexture;\n uniform float sensitivity;\n \n const float harrisConstant = 0.04;\n \n void main()\n {\n     vec3 derivativeElements = texture2D(inputImageTexture, textureCoordinate).rgb;\n     \n     float derivativeSum = derivativeElements.x + derivativeElements.y;\n     \n     float zElement = (derivativeElements.z * 2.0) - 1.0;\n     \n     // R = Ix^2 * Iy^2 - Ixy * Ixy - k * (Ix^2 + Iy^2)^2\n     float cornerness = derivativeElements.x * derivativeElements.y - (zElement * zElement) - harrisConstant * derivativeSum * derivativeSum;\n     \n     gl_FragColor = vec4(vec3(cornerness * sensitivity), 1.0);\n }\n);\n#endif\n\n@synthesize blurRadiusInPixels;\n@synthesize cornersDetectedBlock;\n@synthesize sensitivity = _sensitivity;\n@synthesize threshold = _threshold;\n@synthesize intermediateImages = _intermediateImages;\n\n#pragma mark -\n#pragma mark Initialization and teardown\n\n- (id)init;\n{\n    if (!(self = [self initWithCornerDetectionFragmentShader:kGPUImageHarrisCornerDetectionFragmentShaderString]))\n    {\n        return nil;\n    }\n    \n    return self;\n}\n\n- (id)initWithCornerDetectionFragmentShader:(NSString *)cornerDetectionFragmentShader;\n{\n    if (!(self = [super init]))\n    {\n\t\treturn nil;\n    }\n\n#ifdef DEBUGFEATUREDETECTION\n    _intermediateImages = [[NSMutableArray alloc] init];\n#endif\n    \n    // First pass: reduce to luminance and take the derivative of the luminance texture\n    derivativeFilter = [[GPUImageXYDerivativeFilter alloc] init];\n    [self addFilter:derivativeFilter];\n\n#ifdef DEBUGFEATUREDETECTION\n    __unsafe_unretained NSMutableArray *weakIntermediateImages = _intermediateImages;\n    __unsafe_unretained GPUImageFilter *weakFilter = derivativeFilter;\n    [derivativeFilter setFrameProcessingCompletionBlock:^(GPUImageOutput *filter, CMTime frameTime){\n        UIImage *intermediateImage = [weakFilter imageFromCurrentlyProcessedOutput];\n        [weakIntermediateImages addObject:intermediateImage];\n    }];\n#endif\n\n    // Second pass: blur the derivative\n    blurFilter = [[GPUImageGaussianBlurFilter alloc] init];\n    [self addFilter:blurFilter];\n    \n#ifdef DEBUGFEATUREDETECTION\n    weakFilter = blurFilter;\n    [blurFilter setFrameProcessingCompletionBlock:^(GPUImageOutput *filter, CMTime frameTime){\n        UIImage *intermediateImage = [weakFilter imageFromCurrentlyProcessedOutput];\n        [weakIntermediateImages addObject:intermediateImage];\n    }];\n#endif\n    \n    // Third pass: apply the Harris corner detection calculation\n    harrisCornerDetectionFilter = [[GPUImageFilter alloc] initWithFragmentShaderFromString:cornerDetectionFragmentShader];\n    [self addFilter:harrisCornerDetectionFilter];\n\n#ifdef DEBUGFEATUREDETECTION\n    weakFilter = harrisCornerDetectionFilter;\n    [harrisCornerDetectionFilter setFrameProcessingCompletionBlock:^(GPUImageOutput *filter, CMTime frameTime){\n        UIImage *intermediateImage = [weakFilter imageFromCurrentlyProcessedOutput];\n        [weakIntermediateImages addObject:intermediateImage];\n    }];\n#endif\n\n    // Fourth pass: apply non-maximum suppression and thresholding to find the local maxima\n    nonMaximumSuppressionFilter = [[GPUImageThresholdedNonMaximumSuppressionFilter alloc] init];\n    [self addFilter:nonMaximumSuppressionFilter];\n\n    __unsafe_unretained GPUImageHarrisCornerDetectionFilter *weakSelf = self;\n#ifdef DEBUGFEATUREDETECTION\n    weakFilter = nonMaximumSuppressionFilter;\n    [nonMaximumSuppressionFilter setFrameProcessingCompletionBlock:^(GPUImageOutput *filter, CMTime frameTime){\n        UIImage *intermediateImage = [weakFilter imageFromCurrentlyProcessedOutput];\n        [weakIntermediateImages addObject:intermediateImage];\n        \n        [weakSelf extractCornerLocationsFromImageAtFrameTime:frameTime];\n    }];\n#else\n    [nonMaximumSuppressionFilter setFrameProcessingCompletionBlock:^(GPUImageOutput *filter, CMTime frameTime) {\n        [weakSelf extractCornerLocationsFromImageAtFrameTime:frameTime];\n    }];\n#endif\n    \n// Sixth pass: compress the thresholded points into the RGBA channels\n//    colorPackingFilter = [[GPUImageColorPackingFilter alloc] init];\n//    [self addFilter:colorPackingFilter];\n//\n//    \n//#ifdef DEBUGFEATUREDETECTION\n//    __unsafe_unretained GPUImageHarrisCornerDetectionFilter *weakSelf = self;\n//    weakFilter = colorPackingFilter;\n//    [colorPackingFilter setFrameProcessingCompletionBlock:^(GPUImageOutput *filter, CMTime frameTime){\n//        NSLog(@\"Triggered response from compaction filter\");\n//        \n//        UIImage *intermediateImage = [weakFilter imageFromCurrentlyProcessedOutput];\n//        [weakIntermediateImages addObject:intermediateImage];\n//        \n//        [weakSelf extractCornerLocationsFromImageAtFrameTime:frameTime];\n//    }];\n//#else\n//    __unsafe_unretained GPUImageHarrisCornerDetectionFilter *weakSelf = self;\n//    [colorPackingFilter setFrameProcessingCompletionBlock:^(GPUImageOutput *filter, CMTime frameTime) {\n//        [weakSelf extractCornerLocationsFromImageAtFrameTime:frameTime];\n//    }];\n//#endif\n    \n    [derivativeFilter addTarget:blurFilter];    \n    [blurFilter addTarget:harrisCornerDetectionFilter];\n    [harrisCornerDetectionFilter addTarget:nonMaximumSuppressionFilter];\n//    [simpleThresholdFilter addTarget:colorPackingFilter];\n    \n    self.initialFilters = [NSArray arrayWithObjects:derivativeFilter, nil];\n//    self.terminalFilter = colorPackingFilter;\n    self.terminalFilter = nonMaximumSuppressionFilter;\n    \n    self.blurRadiusInPixels = 2.0;\n    self.sensitivity = 5.0;\n    self.threshold = 0.20;\n    \n    return self;\n}\n     \n- (void)dealloc;\n{\n    free(rawImagePixels);\n    free(cornersArray);\n}\n\n#pragma mark -\n#pragma mark Corner extraction\n\n- (void)extractCornerLocationsFromImageAtFrameTime:(CMTime)frameTime;\n{\n    // we need a normal color texture for this filter\n    NSAssert(self.outputTextureOptions.internalFormat == GL_RGBA, @\"The output texture format for this filter must be GL_RGBA.\");\n    NSAssert(self.outputTextureOptions.type == GL_UNSIGNED_BYTE, @\"The type of the output texture of this filter must be GL_UNSIGNED_BYTE.\");\n\n    NSUInteger numberOfCorners = 0;\n    CGSize imageSize = nonMaximumSuppressionFilter.outputFrameSize;\n    \n    unsigned int imageByteSize = imageSize.width * imageSize.height * 4;\n    \n    if (rawImagePixels == NULL)\n    {\n        rawImagePixels = (GLubyte *)malloc(imageByteSize);\n        cornersArray = calloc(512 * 2, sizeof(GLfloat));\n    }    \n    \n    glReadPixels(0, 0, (int)imageSize.width, (int)imageSize.height, GL_RGBA, GL_UNSIGNED_BYTE, rawImagePixels);\n\n    CFAbsoluteTime startTime = CFAbsoluteTimeGetCurrent();\n\n    unsigned int imageWidth = imageSize.width * 4;\n    \n    unsigned int currentByte = 0;\n    unsigned int cornerStorageIndex = 0;\n    while (currentByte < imageByteSize)\n    {\n        GLubyte colorByte = rawImagePixels[currentByte];\n        \n        if (colorByte > 0)\n        {\n            unsigned int xCoordinate = currentByte % imageWidth;\n            unsigned int yCoordinate = currentByte / imageWidth;\n            \n            cornersArray[cornerStorageIndex++] = (CGFloat)(xCoordinate / 4) / imageSize.width;\n            cornersArray[cornerStorageIndex++] = (CGFloat)(yCoordinate) / imageSize.height;\n            numberOfCorners++;\n            \n            numberOfCorners = MIN(numberOfCorners, 511);\n            cornerStorageIndex = MIN(cornerStorageIndex, 1021);\n        }\n        currentByte +=4;\n    }\n    \n    CFAbsoluteTime currentFrameTime = (CFAbsoluteTimeGetCurrent() - startTime);\n    NSLog(@\"Processing time : %f ms\", 1000.0 * currentFrameTime);\n\n    if (cornersDetectedBlock != NULL)\n    {\n        cornersDetectedBlock(cornersArray, numberOfCorners, frameTime);\n    }\n}\n\n- (BOOL)wantsMonochromeInput;\n{\n//    return YES;\n    return NO;\n}\n\n#pragma mark -\n#pragma mark Accessors\n\n- (void)setBlurRadiusInPixels:(CGFloat)newValue;\n{\n    blurFilter.blurRadiusInPixels = newValue;\n}\n\n- (CGFloat)blurRadiusInPixels;\n{\n    return blurFilter.blurRadiusInPixels;\n}\n\n- (void)setSensitivity:(CGFloat)newValue;\n{\n    _sensitivity = newValue;\n    [harrisCornerDetectionFilter setFloat:newValue forUniformName:@\"sensitivity\"];\n}\n\n- (void)setThreshold:(CGFloat)newValue;\n{\n    nonMaximumSuppressionFilter.threshold = newValue;\n}\n\n- (CGFloat)threshold;\n{\n    return nonMaximumSuppressionFilter.threshold;\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageHazeFilter.h",
    "content": "#import \"GPUImageFilter.h\"\n\n/*\n * The haze filter can be used to add or remove haze (similar to a UV filter)\n * \n * @author Alaric Cole\n * @creationDate 03/10/12\n *\n */\n\n/** The haze filter can be used to add or remove haze\n \n This is similar to a UV filter\n */\n@interface GPUImageHazeFilter : GPUImageFilter\n{\n    GLint distanceUniform;\n\tGLint slopeUniform;\n}\n\n/** Strength of the color applied. Default 0. Values between -.3 and .3 are best\n */\n@property(readwrite, nonatomic) CGFloat distance; \n\n/** Amount of color change. Default 0. Values between -.3 and .3 are best\n */\n@property(readwrite, nonatomic) CGFloat slope;\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageHazeFilter.m",
    "content": "#import \"GPUImageHazeFilter.h\"\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageHazeFragmentShaderString = SHADER_STRING\n(\n varying highp vec2 textureCoordinate;\n \n uniform sampler2D inputImageTexture;\n \n uniform lowp float hazeDistance;\n uniform highp float slope;\n \n void main()\n {\n\t//todo reconsider precision modifiers\t \n\t highp vec4 color = vec4(1.0);//todo reimplement as a parameter\n\t \n\t highp float  d = textureCoordinate.y * slope  +  hazeDistance;\n\t \n\t highp vec4 c = texture2D(inputImageTexture, textureCoordinate) ; // consider using unpremultiply\n\t \n\t c = (c - d * color) / (1.0 -d);\n\t \n\t gl_FragColor = c; //consider using premultiply(c);\n }\n);\n#else\nNSString *const kGPUImageHazeFragmentShaderString = SHADER_STRING\n(\n varying vec2 textureCoordinate;\n \n uniform sampler2D inputImageTexture;\n \n uniform float hazeDistance;\n uniform float slope;\n \n void main()\n {\n     //todo reconsider precision modifiers\n\t vec4 color = vec4(1.0);//todo reimplement as a parameter\n\t \n\t float  d = textureCoordinate.y * slope  +  hazeDistance;\n\t \n\t vec4 c = texture2D(inputImageTexture, textureCoordinate) ; // consider using unpremultiply\n\t \t \n\t c = (c - d * color) / (1.0 -d);\n\t \n\t gl_FragColor = c; //consider using premultiply(c);\n }\n);\n#endif\n\n\n\n\n@implementation GPUImageHazeFilter\n\n@synthesize distance = _distance;\n@synthesize slope = _slope;\n#pragma mark -\n#pragma mark Initialization and teardown\n\n- (id)init;\n{\n    if (!(self = [super initWithFragmentShaderFromString:kGPUImageHazeFragmentShaderString]))\n    {\n\t\treturn nil;\n    }\n    \n    distanceUniform = [filterProgram uniformIndex:@\"hazeDistance\"];\n\tslopeUniform = [filterProgram uniformIndex:@\"slope\"];\n\t\n    self.distance = 0.2;\n    self.slope = 0.0;\n    return self;\n}\n\n#pragma mark -\n#pragma mark Accessors\n\n- (void)setDistance:(CGFloat)newValue;\n{\n    _distance = newValue;\n    \n    [self setFloat:_distance forUniform:distanceUniform program:filterProgram];\n}\n\n- (void)setSlope:(CGFloat)newValue;\n{\n    _slope = newValue;\n    \n    [self setFloat:_slope forUniform:slopeUniform program:filterProgram];\n}\n\n@end\n\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageHighPassFilter.h",
    "content": "#import \"GPUImageFilterGroup.h\"\n#import \"GPUImageLowPassFilter.h\"\n#import \"GPUImageDifferenceBlendFilter.h\"\n\n@interface GPUImageHighPassFilter : GPUImageFilterGroup\n{\n    GPUImageLowPassFilter *lowPassFilter;\n    GPUImageDifferenceBlendFilter *differenceBlendFilter;\n}\n\n// This controls the degree by which the previous accumulated frames are blended and then subtracted from the current one. This ranges from 0.0 to 1.0, with a default of 0.5.\n@property(readwrite, nonatomic) CGFloat filterStrength;\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageHighPassFilter.m",
    "content": "#import \"GPUImageHighPassFilter.h\"\n\n@implementation GPUImageHighPassFilter\n\n@synthesize filterStrength;\n\n- (id)init;\n{\n    if (!(self = [super init]))\n    {\n\t\treturn nil;\n    }\n    \n    // Start with a low pass filter to define the component to be removed\n    lowPassFilter = [[GPUImageLowPassFilter alloc] init];\n    [self addFilter:lowPassFilter];\n    \n    // Take the difference of the current frame from the low pass filtered result to get the high pass\n    differenceBlendFilter = [[GPUImageDifferenceBlendFilter alloc] init];\n    [self addFilter:differenceBlendFilter];\n    \n    // Texture location 0 needs to be the original image for the difference blend\n    [lowPassFilter addTarget:differenceBlendFilter atTextureLocation:1];\n    \n    self.initialFilters = [NSArray arrayWithObjects:lowPassFilter, differenceBlendFilter, nil];\n    self.terminalFilter = differenceBlendFilter;\n    \n    self.filterStrength = 0.5;\n    \n    return self;\n}\n\n#pragma mark -\n#pragma mark Accessors\n\n- (void)setFilterStrength:(CGFloat)newValue;\n{\n    lowPassFilter.filterStrength = newValue;\n}\n\n- (CGFloat)filterStrength;\n{\n    return lowPassFilter.filterStrength;\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageHighlightShadowFilter.h",
    "content": "#import \"GPUImageFilter.h\"\n\n@interface GPUImageHighlightShadowFilter : GPUImageFilter\n{\n    GLint shadowsUniform, highlightsUniform;\n}\n\n/**\n * 0 - 1, increase to lighten shadows.\n * @default 0\n */\n@property(readwrite, nonatomic) CGFloat shadows;\n\n/**\n * 0 - 1, decrease to darken highlights.\n * @default 1\n */\n@property(readwrite, nonatomic) CGFloat highlights;\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageHighlightShadowFilter.m",
    "content": "#import \"GPUImageHighlightShadowFilter.h\"\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageHighlightShadowFragmentShaderString = SHADER_STRING\n(\nuniform sampler2D inputImageTexture;\nvarying highp vec2 textureCoordinate;\n \nuniform lowp float shadows;\nuniform lowp float highlights;\n\nconst mediump vec3 luminanceWeighting = vec3(0.3, 0.3, 0.3);\n\nvoid main()\n{\n\tlowp vec4 source = texture2D(inputImageTexture, textureCoordinate);\n\tmediump float luminance = dot(source.rgb, luminanceWeighting);\n\n\tmediump float shadow = clamp((pow(luminance, 1.0/(shadows+1.0)) + (-0.76)*pow(luminance, 2.0/(shadows+1.0))) - luminance, 0.0, 1.0);\n\tmediump float highlight = clamp((1.0 - (pow(1.0-luminance, 1.0/(2.0-highlights)) + (-0.8)*pow(1.0-luminance, 2.0/(2.0-highlights)))) - luminance, -1.0, 0.0);\n\tlowp vec3 result = vec3(0.0, 0.0, 0.0) + ((luminance + shadow + highlight) - 0.0) * ((source.rgb - vec3(0.0, 0.0, 0.0))/(luminance - 0.0));\n\n\tgl_FragColor = vec4(result.rgb, source.a);\n}\n);\n#else\nNSString *const kGPUImageHighlightShadowFragmentShaderString = SHADER_STRING\n(\n uniform sampler2D inputImageTexture;\n varying vec2 textureCoordinate;\n \n uniform float shadows;\n uniform float highlights;\n \n const vec3 luminanceWeighting = vec3(0.3, 0.3, 0.3);\n \n void main()\n {\n\tvec4 source = texture2D(inputImageTexture, textureCoordinate);\n\tfloat luminance = dot(source.rgb, luminanceWeighting);\n    \n\tfloat shadow = clamp((pow(luminance, 1.0/(shadows+1.0)) + (-0.76)*pow(luminance, 2.0/(shadows+1.0))) - luminance, 0.0, 1.0);\n\tfloat highlight = clamp((1.0 - (pow(1.0-luminance, 1.0/(2.0-highlights)) + (-0.8)*pow(1.0-luminance, 2.0/(2.0-highlights)))) - luminance, -1.0, 0.0);\n\tvec3 result = vec3(0.0, 0.0, 0.0) + ((luminance + shadow + highlight) - 0.0) * ((source.rgb - vec3(0.0, 0.0, 0.0))/(luminance - 0.0));\n    \n\tgl_FragColor = vec4(result.rgb, source.a);\n }\n);\n#endif\n\n@implementation GPUImageHighlightShadowFilter\n\n@synthesize shadows = _shadows;\n@synthesize highlights = _highlights;\n\n#pragma mark -\n#pragma mark Initialization and teardown\n\n- (id)init;\n{\n    if (!(self = [super initWithFragmentShaderFromString:kGPUImageHighlightShadowFragmentShaderString]))\n    {\n\t\treturn nil;\n    }\n    \n    shadowsUniform = [filterProgram uniformIndex:@\"shadows\"];\n\thighlightsUniform = [filterProgram uniformIndex:@\"highlights\"];\n\t\n    self.shadows = 0.0;\n\tself.highlights = 1.0;\n\n    return self;\n}\n\n#pragma mark -\n#pragma mark Accessors\n\n- (void)setShadows:(CGFloat)newValue;\n{\n    _shadows = newValue;\n\n    [self setFloat:_shadows forUniform:shadowsUniform program:filterProgram];\n}\n\n- (void)setHighlights:(CGFloat)newValue;\n{\n\t_highlights = newValue;\n\n    [self setFloat:_highlights forUniform:highlightsUniform program:filterProgram];\n}\n\n@end\n\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageHighlightShadowTintFilter.h",
    "content": "//\n//  GPUImageHighlightShadowTintFilter.h\n//\n//\n//  Created by github.com/r3mus on 8/14/15.\n//\n//\n\n#import \"GPUImageFilter.h\"\n\n@interface GPUImageHighlightShadowTintFilter : GPUImageFilter\n{\n    GLint shadowTintIntensityUniform, highlightTintIntensityUniform, shadowTintColorUniform, highlightTintColorUniform;\n}\n\n// The shadowTint and highlightTint colors specify what colors replace the dark and light areas of the image, respectively. The defaults for shadows are black, highlighs white.\n@property(readwrite, nonatomic) GLfloat shadowTintIntensity;\n@property(readwrite, nonatomic) GPUVector4 shadowTintColor;\n@property(readwrite, nonatomic) GLfloat highlightTintIntensity;\n@property(readwrite, nonatomic) GPUVector4 highlightTintColor;\n\n- (void)setShadowTintColorRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent alpha:(GLfloat)alphaComponent;\n- (void)setHighlightTintColorRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent alpha:(GLfloat)alphaComponent;\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageHighlightShadowTintFilter.m",
    "content": "//\n//  GPUImageHighlightShadowTintFilter.m\n//\n//  Created by github.com/r3mus on 8/14/15.\n//\n//\n\n#import \"GPUImageHighlightShadowTintFilter.h\"\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUHighlightShadowTintFragmentShaderString = SHADER_STRING\n(\n precision lowp float;\n \n varying highp vec2 textureCoordinate;\n \n uniform sampler2D inputImageTexture;\n uniform lowp float shadowTintIntensity;\n uniform lowp float highlightTintIntensity;\n uniform highp vec4 shadowTintColor;\n uniform highp vec4 highlightTintColor;\n \n const mediump vec3 luminanceWeighting = vec3(0.2125, 0.7154, 0.0721);\n \n void main()\n {\n    lowp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);\n    highp float luminance = dot(textureColor.rgb, luminanceWeighting);\n     \n    highp vec4 shadowResult = mix(textureColor, max(textureColor, vec4( mix(shadowTintColor.rgb, textureColor.rgb, luminance), textureColor.a)), shadowTintIntensity);\n    highp vec4 highlightResult = mix(textureColor, min(shadowResult, vec4( mix(shadowResult.rgb, highlightTintColor.rgb, luminance), textureColor.a)), highlightTintIntensity);\n\n    gl_FragColor = vec4( mix(shadowResult.rgb, highlightResult.rgb, luminance), textureColor.a);\n }\n );\n#else\nNSString *const kGPUHighlightShadowTintFragmentShaderString = SHADER_STRING\n(\n varying vec2 textureCoordinate;\n \n uniform sampler2D inputImageTexture;\n uniform float shadowTintIntensity;\n uniform float highlightTintIntensity;\n uniform vec3 shadowTintColor;\n uniform vec3 highlightTintColor;\n \n const vec3 luminanceWeighting = vec3(0.2125, 0.7154, 0.0721);\n \n void main()\n {\n    vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);\n    float luminance = dot(textureColor.rgb, luminanceWeighting);\n     \n    vec4 shadowResult = mix(textureColor, max(textureColor, vec4( mix(shadowTintColor.rgb, textureColor.rgb, luminance), textureColor.a)), shadowTintIntensity);\n    vec4 highlightResult = mix(textureColor, min(shadowResult, vec4( mix(shadowResult.rgb, highlightTintColor.rgb, luminance), textureColor.a)), highlightTintIntensity);\n     \n    gl_FragColor = vec4( mix(shadowResult.rgb, highlightResult.rgb, luminance), textureColor.a);\n }\n );\n#endif\n\n\n@implementation GPUImageHighlightShadowTintFilter\n\n@synthesize shadowTintIntensity = _shadowTintIntensity;\n@synthesize highlightTintIntensity = _highlightTintIntensity;\n@synthesize shadowTintColor = _shadowTintColor;\n@synthesize highlightTintColor = _highlightTintColor;\n\n- (id)init;\n{\n    if (!(self = [super initWithFragmentShaderFromString:kGPUHighlightShadowTintFragmentShaderString]))\n    {\n        return nil;\n    }\n    \n    shadowTintIntensityUniform = [filterProgram uniformIndex:@\"shadowTintIntensity\"];\n    highlightTintIntensityUniform = [filterProgram uniformIndex:@\"highlightTintIntensity\"];\n    shadowTintColorUniform = [filterProgram uniformIndex:@\"shadowTintColor\"];\n    highlightTintColorUniform = [filterProgram uniformIndex:@\"highlightTintColor\"];\n    \n    self.shadowTintIntensity = 0.0f;\n    self.highlightTintIntensity = 0.0f;\n    self.shadowTintColor = (GPUVector4){1.0f, 0.0f, 0.0f, 1.0f};\n    self.highlightTintColor = (GPUVector4){0.0f, 0.0f, 1.0f, 1.0f};\n    \n    return self;\n}\n\n\n#pragma mark -\n#pragma mark Accessors\n\n- (void)setShadowTintIntensity:(GLfloat)newValue\n{\n    _shadowTintIntensity = newValue;\n    \n    [self setFloat:_shadowTintIntensity forUniform:shadowTintIntensityUniform program:filterProgram];\n}\n\n- (void)setHighlightTintIntensity:(GLfloat)newValue\n{\n    _highlightTintIntensity = newValue;\n    \n    [self setFloat:_highlightTintIntensity forUniform:highlightTintIntensityUniform program:filterProgram];\n}\n\n- (void)setShadowTintColor:(GPUVector4)newValue;\n{\n    _shadowTintColor = newValue;\n    \n    [self setShadowTintColorRed:_shadowTintColor.one green:_shadowTintColor.two blue:_shadowTintColor.three alpha:_shadowTintColor.four];\n}\n\n- (void)setHighlightTintColor:(GPUVector4)newValue;\n{\n    _highlightTintColor = newValue;\n    \n    [self setHighlightTintColorRed:_highlightTintColor.one green:_highlightTintColor.two blue:_highlightTintColor.three alpha:_highlightTintColor.four];\n}\n\n- (void)setShadowTintColorRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent alpha:(GLfloat)alphaComponent;\n{\n    GPUVector4 shadowTintColor = {redComponent, greenComponent, blueComponent, alphaComponent};\n    \n    [self setVec4:shadowTintColor forUniform:shadowTintColorUniform program:filterProgram];\n}\n\n- (void)setHighlightTintColorRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent alpha:(GLfloat)alphaComponent;\n{\n    GPUVector4 highlightTintColor = {redComponent, greenComponent, blueComponent, alphaComponent};\n    \n    [self setVec4:highlightTintColor forUniform:highlightTintColorUniform program:filterProgram];\n}\n\n@end"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageHistogramEqualizationFilter.h",
    "content": "//\n//  GPUImageHistogramEqualizationFilter.h\n//  FilterShowcase\n//\n//  Created by Adam Marcus on 19/08/2014.\n//  Copyright (c) 2014 Sunset Lake Software LLC. All rights reserved.\n//\n\n#import \"GPUImageFilterGroup.h\"\n#import \"GPUImageHistogramFilter.h\"\n#import \"GPUImageRawDataOutput.h\"\n#import \"GPUImageRawDataInput.h\"\n#import \"GPUImageTwoInputFilter.h\"\n\n@interface GPUImageHistogramEqualizationFilter : GPUImageFilterGroup\n{\n    GPUImageHistogramFilter *histogramFilter;\n    GPUImageRawDataOutput *rawDataOutputFilter;\n    GPUImageRawDataInput *rawDataInputFilter;\n}\n\n@property(readwrite, nonatomic) NSUInteger downsamplingFactor;\n\n- (id)initWithHistogramType:(GPUImageHistogramType)newHistogramType;\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageHistogramEqualizationFilter.m",
    "content": "//\n//  GPUImageHistogramEqualizationFilter.m\n//  FilterShowcase\n//\n//  Created by Adam Marcus on 19/08/2014.\n//  Copyright (c) 2014 Sunset Lake Software LLC. All rights reserved.\n//\n\n#import \"GPUImageHistogramEqualizationFilter.h\"\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageRedHistogramEqualizationFragmentShaderString = SHADER_STRING\n(\n varying highp vec2 textureCoordinate;\n uniform sampler2D inputImageTexture;\n uniform sampler2D inputImageTexture2;\n \n void main()\n {\n     lowp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);\n     lowp float redCurveValue = texture2D(inputImageTexture2, vec2(textureColor.r, 0.0)).r;\n     \n     gl_FragColor = vec4(redCurveValue, textureColor.g, textureColor.b, textureColor.a);\n }\n );\n#else\nNSString *const kGPUImageRedHistogramEqualizationFragmentShaderString = SHADER_STRING\n(\n varying vec2 textureCoordinate;\n uniform sampler2D inputImageTexture;\n uniform sampler2D inputImageTexture2;\n \n void main()\n {\n     vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);\n     float redCurveValue = texture2D(inputImageTexture2, vec2(textureColor.r, 0.0)).r;\n     \n     gl_FragColor = vec4(redCurveValue, textureColor.g, textureColor.b, textureColor.a);\n }\n );\n#endif\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageGreenHistogramEqualizationFragmentShaderString = SHADER_STRING\n(\n varying highp vec2 textureCoordinate;\n uniform sampler2D inputImageTexture;\n uniform sampler2D inputImageTexture2;\n \n void main()\n {\n     lowp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);\n     lowp float greenCurveValue = texture2D(inputImageTexture2, vec2(textureColor.g, 0.0)).g;\n     \n     gl_FragColor = vec4(textureColor.r, greenCurveValue, textureColor.b, textureColor.a);\n }\n );\n#else\nNSString *const kGPUImageGreenHistogramEqualizationFragmentShaderString = SHADER_STRING\n(\n varying vec2 textureCoordinate;\n uniform sampler2D inputImageTexture;\n uniform sampler2D inputImageTexture2;\n \n void main()\n {\n     vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);\n     float greenCurveValue = texture2D(inputImageTexture2, vec2(textureColor.g, 0.0)).g;\n     \n     gl_FragColor = vec4(textureColor.r, greenCurveValue, textureColor.b, textureColor.a);\n }\n );\n#endif\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageBlueHistogramEqualizationFragmentShaderString = SHADER_STRING\n(\n varying highp vec2 textureCoordinate;\n uniform sampler2D inputImageTexture;\n uniform sampler2D inputImageTexture2;\n \n void main()\n {\n     lowp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);\n     lowp float blueCurveValue = texture2D(inputImageTexture2, vec2(textureColor.b, 0.0)).b;\n     \n     gl_FragColor = vec4(textureColor.r, textureColor.g, blueCurveValue, textureColor.a);\n }\n );\n#else\nNSString *const kGPUImageBlueHistogramEqualizationFragmentShaderString = SHADER_STRING\n(\n varying vec2 textureCoordinate;\n uniform sampler2D inputImageTexture;\n uniform sampler2D inputImageTexture2;\n \n void main()\n {\n     vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);\n     float blueCurveValue = texture2D(inputImageTexture2, vec2(textureColor.b, 0.0)).b;\n     \n     gl_FragColor = vec4(textureColor.r, textureColor.g, blueCurveValue, textureColor.a);\n }\n );\n#endif\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageRGBHistogramEqualizationFragmentShaderString = SHADER_STRING\n(\n varying highp vec2 textureCoordinate; \n uniform sampler2D inputImageTexture;\n uniform sampler2D inputImageTexture2;\n \n void main()\n {\n     lowp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);\n     lowp float redCurveValue = texture2D(inputImageTexture2, vec2(textureColor.r, 0.0)).r;\n     lowp float greenCurveValue = texture2D(inputImageTexture2, vec2(textureColor.g, 0.0)).g;\n     lowp float blueCurveValue = texture2D(inputImageTexture2, vec2(textureColor.b, 0.0)).b;\n     \n     gl_FragColor = vec4(redCurveValue, greenCurveValue, blueCurveValue, textureColor.a);\n }\n );\n#else\nNSString *const kGPUImageRGBHistogramEqualizationFragmentShaderString = SHADER_STRING\n(\n varying vec2 textureCoordinate;\n uniform sampler2D inputImageTexture;\n uniform sampler2D inputImageTexture2;\n \n void main()\n {\n     vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);\n     float redCurveValue = texture2D(inputImageTexture2, vec2(textureColor.r, 0.0)).r;\n     float greenCurveValue = texture2D(inputImageTexture2, vec2(textureColor.g, 0.0)).g;\n     float blueCurveValue = texture2D(inputImageTexture2, vec2(textureColor.b, 0.0)).b;\n     \n     gl_FragColor = vec4(redCurveValue, greenCurveValue, blueCurveValue, textureColor.a);\n }\n );\n#endif\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageLuminanceHistogramEqualizationFragmentShaderString = SHADER_STRING\n(\n varying highp vec2 textureCoordinate;\n uniform sampler2D inputImageTexture;\n uniform sampler2D inputImageTexture2;\n \n const lowp vec3 W = vec3(0.2125, 0.7154, 0.0721);\n \n void main()\n {\n     lowp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);\n     lowp float luminance = dot(textureColor.rgb, W);\n     lowp float newLuminance = texture2D(inputImageTexture2, vec2(luminance, 0.0)).r;\n     lowp float deltaLuminance = newLuminance - luminance;\n     \n     lowp float red   = clamp(textureColor.r + deltaLuminance, 0.0, 1.0);\n     lowp float green = clamp(textureColor.g + deltaLuminance, 0.0, 1.0);\n     lowp float blue  = clamp(textureColor.b + deltaLuminance, 0.0, 1.0);\n\n     gl_FragColor = vec4(red, green, blue, textureColor.a);\n }\n );\n#else\nNSString *const kGPUImageLuminanceHistogramEqualizationFragmentShaderString = SHADER_STRING\n(\n varying vec2 textureCoordinate;\n uniform sampler2D inputImageTexture;\n uniform sampler2D inputImageTexture2;\n \n const vec3 W = vec3(0.2125, 0.7154, 0.0721);\n\n void main()\n {\n     vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);\n     float luminance = dot(textureColor.rgb, W);\n     float newLuminance = texture2D(inputImageTexture2, vec2(luminance, 0.0)).r;\n     float deltaLuminance = newLuminance - luminance;\n     \n     float red   = clamp(textureColor.r + deltaLuminance, 0.0, 1.0);\n     float green = clamp(textureColor.g + deltaLuminance, 0.0, 1.0);\n     float blue  = clamp(textureColor.b + deltaLuminance, 0.0, 1.0);\n     \n     gl_FragColor = vec4(red, green, blue, textureColor.a);\n }\n );\n#endif\n\n@implementation GPUImageHistogramEqualizationFilter\n\n@synthesize downsamplingFactor = _downsamplingFactor;\n\n#pragma mark -\n#pragma mark Initialization\n\n- (id)init;\n{\n    if (!(self = [self initWithHistogramType:kGPUImageHistogramRGB]))\n    {\n        return nil;\n    }\n    \n    return self;\n}\n\n- (id)initWithHistogramType:(GPUImageHistogramType)newHistogramType\n{\n    if (!(self = [super init]))\n    {\n        return nil;\n    }\n    \n    histogramFilter = [[GPUImageHistogramFilter alloc] initWithHistogramType:newHistogramType];\n    [self addFilter:histogramFilter];\n    \n    GLubyte dummyInput[4 * 256]; // NB: No way to initialise GPUImageRawDataInput without providing bytes\n    rawDataInputFilter = [[GPUImageRawDataInput alloc] initWithBytes:dummyInput size:CGSizeMake(256.0, 1.0) pixelFormat:GPUPixelFormatBGRA type:GPUPixelTypeUByte];\n    rawDataOutputFilter = [[GPUImageRawDataOutput alloc] initWithImageSize:CGSizeMake(256.0, 3.0) resultsInBGRAFormat:YES];\n    \n    __unsafe_unretained GPUImageRawDataOutput *_rawDataOutputFilter = rawDataOutputFilter;\n    __unsafe_unretained GPUImageRawDataInput *_rawDataInputFilter = rawDataInputFilter;\n    [rawDataOutputFilter setNewFrameAvailableBlock:^{\n        \n        unsigned int histogramBins[3][256];\n        \n        [_rawDataOutputFilter lockFramebufferForReading];\n        \n        GLubyte *data  = [_rawDataOutputFilter rawBytesForImage];\n        data += [_rawDataOutputFilter bytesPerRowInOutput];\n\n        histogramBins[0][0] = *data++;\n        histogramBins[1][0] = *data++;\n        histogramBins[2][0] = *data++;\n        data++;\n        \n        for (unsigned int x = 1; x < 256; x++) {\n            histogramBins[0][x] = histogramBins[0][x-1] + *data++;\n            histogramBins[1][x] = histogramBins[1][x-1] + *data++;\n            histogramBins[2][x] = histogramBins[2][x-1] + *data++;\n            data++;\n        }\n        \n        [_rawDataOutputFilter unlockFramebufferAfterReading];\n\n        GLubyte colorMapping[4 * 256];\n        GLubyte *_colorMapping = colorMapping;\n        \n        for (unsigned int x = 0; x < 256; x++) {\n            *_colorMapping++ = (GLubyte) (((histogramBins[0][x] - histogramBins[0][0]) * 255) / histogramBins[0][255]);\n            *_colorMapping++ = (GLubyte) (((histogramBins[1][x] - histogramBins[1][0]) * 255) / histogramBins[1][255]);\n            *_colorMapping++ = (GLubyte) (((histogramBins[2][x] - histogramBins[2][0]) * 255) / histogramBins[2][255]);\n            *_colorMapping++ = 255;\n        }\n        \n        _colorMapping = colorMapping;\n        [_rawDataInputFilter updateDataFromBytes:_colorMapping size:CGSizeMake(256.0, 1.0)];\n        [_rawDataInputFilter processData];\n    }];\n    [histogramFilter addTarget:rawDataOutputFilter];\n    \n    NSString *fragmentShader = nil;\n    switch (newHistogramType) {\n        case kGPUImageHistogramRed:\n            fragmentShader = kGPUImageRedHistogramEqualizationFragmentShaderString;\n            break;\n        case kGPUImageHistogramGreen:\n            fragmentShader = kGPUImageGreenHistogramEqualizationFragmentShaderString;\n            break;\n        case kGPUImageHistogramBlue:\n            fragmentShader = kGPUImageBlueHistogramEqualizationFragmentShaderString;\n            break;\n        default:\n        case kGPUImageHistogramRGB:\n            fragmentShader = kGPUImageRGBHistogramEqualizationFragmentShaderString;\n            break;\n        case kGPUImageHistogramLuminance:\n            fragmentShader = kGPUImageLuminanceHistogramEqualizationFragmentShaderString;\n            break;\n    }\n    GPUImageFilter *equalizationFilter = [[GPUImageTwoInputFilter alloc] initWithFragmentShaderFromString:fragmentShader];\n    [rawDataInputFilter addTarget:equalizationFilter atTextureLocation:1];\n    \n    [self addFilter:equalizationFilter];\n    \n    self.initialFilters = [NSArray arrayWithObjects:histogramFilter, equalizationFilter, nil];\n    self.terminalFilter = equalizationFilter;\n    \n    self.downsamplingFactor = 16;\n    \n    return self;\n}\n\n#pragma mark -\n#pragma mark Accessors\n\n- (void)setDownsamplingFactor:(NSUInteger)newValue;\n{\n    if (_downsamplingFactor != newValue)\n    {\n        _downsamplingFactor = newValue;\n        histogramFilter.downsamplingFactor = newValue;\n    }\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageHistogramFilter.h",
    "content": "#import \"GPUImageFilter.h\"\n\ntypedef enum { kGPUImageHistogramRed, kGPUImageHistogramGreen, kGPUImageHistogramBlue, kGPUImageHistogramRGB, kGPUImageHistogramLuminance} GPUImageHistogramType;\n\n@interface GPUImageHistogramFilter : GPUImageFilter\n{\n    GPUImageHistogramType histogramType;\n    \n    GLubyte *vertexSamplingCoordinates;\n    \n    GLProgram *secondFilterProgram, *thirdFilterProgram;\n    GLint secondFilterPositionAttribute, thirdFilterPositionAttribute;\n}\n\n// Rather than sampling every pixel, this dictates what fraction of the image is sampled. By default, this is 16 with a minimum of 1.\n@property(readwrite, nonatomic) NSUInteger downsamplingFactor;\n\n// Initialization and teardown\n- (id)initWithHistogramType:(GPUImageHistogramType)newHistogramType;\n- (void)initializeSecondaryAttributes;\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageHistogramFilter.m",
    "content": "#import \"GPUImageHistogramFilter.h\"\n\n// Unlike other filters, this one uses a grid of GL_POINTs to sample the incoming image in a grid. A custom vertex shader reads the color in the texture at its position \n// and outputs a bin position in the final histogram as the vertex position. That point is then written into the image of the histogram using translucent pixels.\n// The degree of translucency is controlled by the scalingFactor, which lets you adjust the dynamic range of the histogram. The histogram can only be generated for one\n// color channel or luminance value at a time.\n//\n// This is based on this implementation: http://www.shaderwrangler.com/publications/histogram/histogram_cameraready.pdf\n//\n// Or at least that's how it would work if iOS could read from textures in a vertex shader, which it can't. Therefore, I read the texture data down from the\n// incoming frame and process the texture colors as vertices.\n\nNSString *const kGPUImageRedHistogramSamplingVertexShaderString = SHADER_STRING\n(\n attribute vec4 position;\n \n varying vec3 colorFactor;\n\n void main()\n {\n     colorFactor = vec3(1.0, 0.0, 0.0);\n     gl_Position = vec4(-1.0 + (position.x * 0.0078125), 0.0, 0.0, 1.0);\n     gl_PointSize = 1.0;\n }\n);\n\nNSString *const kGPUImageGreenHistogramSamplingVertexShaderString = SHADER_STRING\n(\n attribute vec4 position;\n \n varying vec3 colorFactor;\n \n void main()\n {\n     colorFactor = vec3(0.0, 1.0, 0.0);\n     gl_Position = vec4(-1.0 + (position.y * 0.0078125), 0.0, 0.0, 1.0);\n     gl_PointSize = 1.0;\n }\n);\n\nNSString *const kGPUImageBlueHistogramSamplingVertexShaderString = SHADER_STRING\n(\n attribute vec4 position;\n \n varying vec3 colorFactor;\n \n void main()\n {\n     colorFactor = vec3(0.0, 0.0, 1.0);\n     gl_Position = vec4(-1.0 + (position.z * 0.0078125), 0.0, 0.0, 1.0);\n     gl_PointSize = 1.0;\n }\n);\n\nNSString *const kGPUImageLuminanceHistogramSamplingVertexShaderString = SHADER_STRING\n(\n attribute vec4 position;\n \n varying vec3 colorFactor;\n \n const vec3 W = vec3(0.2125, 0.7154, 0.0721);\n \n void main()\n {\n     float luminance = dot(position.xyz, W);\n\n     colorFactor = vec3(1.0, 1.0, 1.0);\n     gl_Position = vec4(-1.0 + (luminance * 0.0078125), 0.0, 0.0, 1.0);\n     gl_PointSize = 1.0;\n }\n);\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageHistogramAccumulationFragmentShaderString = SHADER_STRING\n(\n const lowp float scalingFactor = 1.0 / 256.0;\n\n varying lowp vec3 colorFactor;\n\n void main()\n {\n     gl_FragColor = vec4(colorFactor * scalingFactor , 1.0);\n }\n);\n#else\nNSString *const kGPUImageHistogramAccumulationFragmentShaderString = SHADER_STRING\n(\n const float scalingFactor = 1.0 / 256.0;\n \n varying vec3 colorFactor;\n \n void main()\n {\n     gl_FragColor = vec4(colorFactor * scalingFactor , 1.0);\n }\n);\n#endif\n\n@implementation GPUImageHistogramFilter\n\n@synthesize downsamplingFactor = _downsamplingFactor;\n\n#pragma mark -\n#pragma mark Initialization and teardown\n\n- (id)initWithHistogramType:(GPUImageHistogramType)newHistogramType;\n{\n    switch (newHistogramType)\n    {\n        case kGPUImageHistogramRed:\n        {\n            if (!(self = [super initWithVertexShaderFromString:kGPUImageRedHistogramSamplingVertexShaderString fragmentShaderFromString:kGPUImageHistogramAccumulationFragmentShaderString]))\n            {\n                return nil;\n            }\n        }; break;\n        case kGPUImageHistogramGreen:\n        {\n            if (!(self = [super initWithVertexShaderFromString:kGPUImageGreenHistogramSamplingVertexShaderString fragmentShaderFromString:kGPUImageHistogramAccumulationFragmentShaderString]))\n            {\n                return nil;\n            }\n        }; break;\n        case kGPUImageHistogramBlue:\n        {\n            if (!(self = [super initWithVertexShaderFromString:kGPUImageBlueHistogramSamplingVertexShaderString fragmentShaderFromString:kGPUImageHistogramAccumulationFragmentShaderString]))\n            {\n                return nil;\n            }\n        }; break;\n        case kGPUImageHistogramLuminance:\n        {\n            if (!(self = [super initWithVertexShaderFromString:kGPUImageLuminanceHistogramSamplingVertexShaderString fragmentShaderFromString:kGPUImageHistogramAccumulationFragmentShaderString]))\n            {\n                return nil;\n            }\n        }; break;\n        case kGPUImageHistogramRGB:\n        {\n            if (!(self = [super initWithVertexShaderFromString:kGPUImageRedHistogramSamplingVertexShaderString fragmentShaderFromString:kGPUImageHistogramAccumulationFragmentShaderString]))\n            {\n                return nil;\n            }\n            \n            runSynchronouslyOnVideoProcessingQueue(^{\n                [GPUImageContext useImageProcessingContext];\n                \n                secondFilterProgram = [[GPUImageContext sharedImageProcessingContext] programForVertexShaderString:kGPUImageGreenHistogramSamplingVertexShaderString fragmentShaderString:kGPUImageHistogramAccumulationFragmentShaderString];\n                thirdFilterProgram = [[GPUImageContext sharedImageProcessingContext] programForVertexShaderString:kGPUImageBlueHistogramSamplingVertexShaderString fragmentShaderString:kGPUImageHistogramAccumulationFragmentShaderString];\n                \n                if (!secondFilterProgram.initialized)\n                {\n                    [self initializeSecondaryAttributes];\n                    \n                    if (![secondFilterProgram link])\n                    {\n                        NSString *progLog = [secondFilterProgram programLog];\n                        NSLog(@\"Program link log: %@\", progLog);\n                        NSString *fragLog = [secondFilterProgram fragmentShaderLog];\n                        NSLog(@\"Fragment shader compile log: %@\", fragLog);\n                        NSString *vertLog = [secondFilterProgram vertexShaderLog];\n                        NSLog(@\"Vertex shader compile log: %@\", vertLog);\n                        filterProgram = nil;\n                        NSAssert(NO, @\"Filter shader link failed\");\n\n                    }\n\n                    [GPUImageContext setActiveShaderProgram:secondFilterProgram];\n                    \n                    glEnableVertexAttribArray(secondFilterPositionAttribute);\n                    \n                    if (![thirdFilterProgram link])\n                    {\n                        NSString *progLog = [secondFilterProgram programLog];\n                        NSLog(@\"Program link log: %@\", progLog);\n                        NSString *fragLog = [secondFilterProgram fragmentShaderLog];\n                        NSLog(@\"Fragment shader compile log: %@\", fragLog);\n                        NSString *vertLog = [secondFilterProgram vertexShaderLog];\n                        NSLog(@\"Vertex shader compile log: %@\", vertLog);\n                        filterProgram = nil;\n                        NSAssert(NO, @\"Filter shader link failed\");\n                    }\n                }\n                \n                secondFilterPositionAttribute = [secondFilterProgram attributeIndex:@\"position\"];\n                \n                \n                thirdFilterPositionAttribute = [thirdFilterProgram attributeIndex:@\"position\"];\n                [GPUImageContext setActiveShaderProgram:thirdFilterProgram];\n                \n                glEnableVertexAttribArray(thirdFilterPositionAttribute);\n            });\n        }; break;\n    }\n\n    histogramType = newHistogramType;\n    \n    self.downsamplingFactor = 16;\n\n    return self;\n}\n\n- (id)init;\n{\n    if (!(self = [self initWithHistogramType:kGPUImageHistogramRGB]))\n    {\n        return nil;\n    }\n\n    return self;\n}\n\n- (void)initializeSecondaryAttributes;\n{\n    [secondFilterProgram addAttribute:@\"position\"];\n\t[thirdFilterProgram addAttribute:@\"position\"];\n}\n\n- (void)dealloc;\n{\n    if (vertexSamplingCoordinates != NULL && ![GPUImageContext supportsFastTextureUpload])\n    {\n        free(vertexSamplingCoordinates);\n    }\n}\n\n#pragma mark -\n#pragma mark Rendering\n\n- (CGSize)sizeOfFBO;\n{\n    return CGSizeMake(256.0, 3.0);\n}\n\n- (void)newFrameReadyAtTime:(CMTime)frameTime atIndex:(NSInteger)textureIndex;\n{\n    [self renderToTextureWithVertices:NULL textureCoordinates:NULL];\n    \n    [self informTargetsAboutNewFrameAtTime:frameTime];\n}\n\n- (CGSize)outputFrameSize;\n{\n    return [self sizeOfFBO];\n}\n\n- (void)setInputSize:(CGSize)newSize atIndex:(NSInteger)textureIndex;\n{\n    if (self.preventRendering)\n    {\n        return;\n    }\n    \n    inputTextureSize = newSize;\n}\n\n- (void)setInputRotation:(GPUImageRotationMode)newInputRotation atIndex:(NSInteger)textureIndex;\n{\n    inputRotation = kGPUImageNoRotation;\n}\n\n- (void)renderToTextureWithVertices:(const GLfloat *)vertices textureCoordinates:(const GLfloat *)textureCoordinates;\n{\n    // we need a normal color texture for this filter\n    NSAssert(self.outputTextureOptions.internalFormat == GL_RGBA, @\"The output texture format for this filter must be GL_RGBA.\");\n    NSAssert(self.outputTextureOptions.type == GL_UNSIGNED_BYTE, @\"The type of the output texture of this filter must be GL_UNSIGNED_BYTE.\");\n    \n    if (self.preventRendering)\n    {\n        [firstInputFramebuffer unlock];\n        return;\n    }\n    \n    [GPUImageContext useImageProcessingContext];\n\n    if ([GPUImageContext supportsFastTextureUpload])\n    {\n        glFinish();\n        vertexSamplingCoordinates = [firstInputFramebuffer byteBuffer];\n    } else {\n        if (vertexSamplingCoordinates == NULL)\n        {\n            vertexSamplingCoordinates = calloc(inputTextureSize.width * inputTextureSize.height * 4, sizeof(GLubyte));\n        }\n        glReadPixels(0, 0, inputTextureSize.width, inputTextureSize.height, GL_RGBA, GL_UNSIGNED_BYTE, vertexSamplingCoordinates);\n    }\n    \n    outputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:[self sizeOfFBO] textureOptions:self.outputTextureOptions onlyTexture:NO];\n    [outputFramebuffer activateFramebuffer];\n    if (usingNextFrameForImageCapture)\n    {\n        [outputFramebuffer lock];\n    }\n    \n    [GPUImageContext setActiveShaderProgram:filterProgram];\n    \n    glClearColor(0.0, 0.0, 0.0, 1.0);\n    glClear(GL_COLOR_BUFFER_BIT);\n    \n    glBlendEquation(GL_FUNC_ADD);\n    glBlendFunc(GL_ONE, GL_ONE);\n    glEnable(GL_BLEND);\n    \n\tglVertexAttribPointer(filterPositionAttribute, 4, GL_UNSIGNED_BYTE, 0, ((unsigned int)_downsamplingFactor - 1) * 4, vertexSamplingCoordinates);\n    glDrawArrays(GL_POINTS, 0, inputTextureSize.width * inputTextureSize.height / (CGFloat)_downsamplingFactor);\n\n    if (histogramType == kGPUImageHistogramRGB)\n    {\n        [GPUImageContext setActiveShaderProgram:secondFilterProgram];\n        \n        glVertexAttribPointer(secondFilterPositionAttribute, 4, GL_UNSIGNED_BYTE, 0, ((unsigned int)_downsamplingFactor - 1) * 4, vertexSamplingCoordinates);\n        glDrawArrays(GL_POINTS, 0, inputTextureSize.width * inputTextureSize.height / (CGFloat)_downsamplingFactor);\n\n        [GPUImageContext setActiveShaderProgram:thirdFilterProgram];\n        \n        glVertexAttribPointer(thirdFilterPositionAttribute, 4, GL_UNSIGNED_BYTE, 0, ((unsigned int)_downsamplingFactor - 1) * 4, vertexSamplingCoordinates);\n        glDrawArrays(GL_POINTS, 0, inputTextureSize.width * inputTextureSize.height / (CGFloat)_downsamplingFactor);\n    }\n    \n    glDisable(GL_BLEND);\n    [firstInputFramebuffer unlock];\n\n    if (usingNextFrameForImageCapture)\n    {\n        dispatch_semaphore_signal(imageCaptureSemaphore);\n    }\n}\n\n#pragma mark -\n#pragma mark Accessors\n\n//- (void)setScalingFactor:(CGFloat)newValue;\n//{\n//    _scalingFactor = newValue;\n//    \n//    [GPUImageContext useImageProcessingContext];\n//    [filterProgram use];\n//    glUniform1f(scalingFactorUniform, _scalingFactor);\n//}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageHistogramGenerator.h",
    "content": "#import \"GPUImageFilter.h\"\n\n@interface GPUImageHistogramGenerator : GPUImageFilter\n{\n    GLint backgroundColorUniform;\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageHistogramGenerator.m",
    "content": "#import \"GPUImageHistogramGenerator.h\"\n\nNSString *const kGPUImageHistogramGeneratorVertexShaderString = SHADER_STRING\n(\n attribute vec4 position;\n attribute vec4 inputTextureCoordinate;\n \n varying vec2 textureCoordinate;\n varying float height;\n \n void main()\n {\n     gl_Position = position;\n     textureCoordinate = vec2(inputTextureCoordinate.x, 0.5);\n     height = 1.0 - inputTextureCoordinate.y;\n }\n);\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageHistogramGeneratorFragmentShaderString = SHADER_STRING\n(\n varying highp vec2 textureCoordinate;\n varying highp float height;\n \n uniform sampler2D inputImageTexture;\n uniform lowp vec4 backgroundColor;\n \n void main()\n {\n     lowp vec3 colorChannels = texture2D(inputImageTexture, textureCoordinate).rgb;\n     lowp vec4 heightTest = vec4(step(height, colorChannels), 1.0);\n     gl_FragColor = mix(backgroundColor, heightTest, heightTest.r + heightTest.g + heightTest.b);\n }\n);\n#else\nNSString *const kGPUImageHistogramGeneratorFragmentShaderString = SHADER_STRING\n(\n varying vec2 textureCoordinate;\n varying float height;\n \n uniform sampler2D inputImageTexture;\n uniform vec4 backgroundColor;\n \n void main()\n {\n     vec3 colorChannels = texture2D(inputImageTexture, textureCoordinate).rgb;\n     vec4 heightTest = vec4(step(height, colorChannels), 1.0);\n     gl_FragColor = mix(backgroundColor, heightTest, heightTest.r + heightTest.g + heightTest.b);\n }\n);\n#endif\n\n@implementation GPUImageHistogramGenerator\n\n#pragma mark -\n#pragma mark Initialization and teardown\n\n- (id)init;\n{\n    if (!(self = [super initWithVertexShaderFromString:kGPUImageHistogramGeneratorVertexShaderString fragmentShaderFromString:kGPUImageHistogramGeneratorFragmentShaderString]))\n    {\n        return nil;\n    }\n    \n    backgroundColorUniform = [filterProgram uniformIndex:@\"backgroundColor\"];\n\n    [self setBackgroundColorRed:0.0 green:0.0 blue:0.0 alpha:0.0];\n    \n    return self;\n}\n\n#pragma mark -\n#pragma mark Accessors\n\n- (void)setBackgroundColorRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent alpha:(GLfloat)alphaComponent;\n{\n//    GLfloat backgroundColor[4];\n//    backgroundColor[0] = redComponent;\n//    backgroundColor[1] = greenComponent;    \n//    backgroundColor[2] = blueComponent;\n//    backgroundColor[3] = alphaComponent;\n    GPUVector4 backgroundColor = {redComponent, greenComponent, blueComponent, alphaComponent};\n    \n    [self setVec4:backgroundColor forUniform:backgroundColorUniform program:filterProgram];\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageHoughTransformLineDetector.h",
    "content": "#import \"GPUImageFilterGroup.h\"\n#import \"GPUImageThresholdEdgeDetectionFilter.h\"\n#import \"GPUImageParallelCoordinateLineTransformFilter.h\"\n#import \"GPUImageThresholdedNonMaximumSuppressionFilter.h\"\n#import \"GPUImageCannyEdgeDetectionFilter.h\"\n\n// This applies a Hough transform to detect lines in a scene. It starts with a thresholded Sobel edge detection pass,\n// then takes those edge points in and applies a Hough transform to convert them to lines. The intersection of these lines\n// is then determined via blending and accumulation, and a non-maximum suppression filter is applied to find local maxima.\n// These local maxima are then converted back into lines in normal space and returned via a callback block.\n//\n// Rather than using one of the standard Hough transform types, this filter uses parallel coordinate space which is far more efficient\n// to rasterize on a GPU.\n//\n// This approach is based entirely on the PC lines process developed by the Graph@FIT research group at the Brno University of Technology\n// and described in their publications:\n//\n// M. Dubská, J. Havel, and A. Herout. Real-Time Detection of Lines using Parallel Coordinates and OpenGL. Proceedings of SCCG 2011, Bratislava, SK, p. 7.\n// http://medusa.fit.vutbr.cz/public/data/papers/2011-SCCG-Dubska-Real-Time-Line-Detection-Using-PC-and-OpenGL.pdf\n// M. Dubská, J. Havel, and A. Herout. PClines — Line detection using parallel coordinates. 2011 IEEE Conference on Computer Vision and Pattern Recognition (CVPR), p. 1489- 1494.\n// http://medusa.fit.vutbr.cz/public/data/papers/2011-CVPR-Dubska-PClines.pdf\n\n//#define DEBUGLINEDETECTION\n\n@interface GPUImageHoughTransformLineDetector : GPUImageFilterGroup\n{\n    GPUImageOutput<GPUImageInput> *thresholdEdgeDetectionFilter;\n    \n//    GPUImageThresholdEdgeDetectionFilter *thresholdEdgeDetectionFilter;\n    GPUImageParallelCoordinateLineTransformFilter *parallelCoordinateLineTransformFilter;\n    GPUImageThresholdedNonMaximumSuppressionFilter *nonMaximumSuppressionFilter;\n    \n    GLfloat *linesArray;\n    GLubyte *rawImagePixels;\n}\n\n// A threshold value for which a point is detected as belonging to an edge for determining lines. Default is 0.9.\n@property(readwrite, nonatomic) CGFloat edgeThreshold;\n\n// A threshold value for which a local maximum is detected as belonging to a line in parallel coordinate space. Default is 0.20.\n@property(readwrite, nonatomic) CGFloat lineDetectionThreshold;\n\n// This block is called on the detection of lines, usually on every processed frame. A C array containing normalized slopes and intercepts in m, b pairs (y=mx+b) is passed in, along with a count of the number of lines detected and the current timestamp of the video frame\n@property(nonatomic, copy) void(^linesDetectedBlock)(GLfloat* lineArray, NSUInteger linesDetected, CMTime frameTime);\n\n// These images are only enabled when built with DEBUGLINEDETECTION defined, and are used to examine the intermediate states of the Hough transform\n@property(nonatomic, readonly, strong) NSMutableArray *intermediateImages;\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageHoughTransformLineDetector.m",
    "content": "#import \"GPUImageHoughTransformLineDetector.h\"\n\n@interface GPUImageHoughTransformLineDetector()\n\n- (void)extractLineParametersFromImageAtFrameTime:(CMTime)frameTime;\n\n@end\n\n@implementation GPUImageHoughTransformLineDetector\n\n@synthesize linesDetectedBlock;\n@synthesize edgeThreshold;\n@synthesize lineDetectionThreshold;\n@synthesize intermediateImages = _intermediateImages;\n\n- (id)init;\n{\n    if (!(self = [super init]))\n    {\n\t\treturn nil;\n    }\n    \n    // First pass: do edge detection and threshold that to just have white pixels for edges\n//    if ([GPUImageContext deviceSupportsFramebufferReads])\n//    if ([GPUImageContext deviceSupportsFramebufferReads])\n//    {\n//        thresholdEdgeDetectionFilter = [[GPUImageThresholdEdgeDetectionFilter alloc] init];\n//        thresholdEdgeDetectionFilter = [[GPUImageSobelEdgeDetectionFilter alloc] init];\n//        [(GPUImageThresholdEdgeDetectionFilter *)thresholdEdgeDetectionFilter setThreshold:0.07];\n//        [(GPUImageThresholdEdgeDetectionFilter *)thresholdEdgeDetectionFilter setEdgeStrength:0.25];\n//        [(GPUImageThresholdEdgeDetectionFilter *)thresholdEdgeDetectionFilter setEdgeStrength:1.0];\n//        thresholdEdgeDetectionFilter = [[GPUImageCannyEdgeDetectionFilter alloc] init];\n//    }\n//    else\n//    {\n        thresholdEdgeDetectionFilter = [[GPUImageCannyEdgeDetectionFilter alloc] init];\n//    }\n    [self addFilter:thresholdEdgeDetectionFilter];\n    \n    // Second pass: extract the white points and draw representative lines in parallel coordinate space\n    parallelCoordinateLineTransformFilter = [[GPUImageParallelCoordinateLineTransformFilter alloc] init];\n    [self addFilter:parallelCoordinateLineTransformFilter];\n    \n    // Third pass: apply non-maximum suppression\n    if ([GPUImageContext deviceSupportsFramebufferReads])\n    {\n        nonMaximumSuppressionFilter = [[GPUImageThresholdedNonMaximumSuppressionFilter alloc] initWithPackedColorspace:YES];\n    }\n    else\n    {\n        nonMaximumSuppressionFilter = [[GPUImageThresholdedNonMaximumSuppressionFilter alloc] initWithPackedColorspace:NO];\n    }\n    [self addFilter:nonMaximumSuppressionFilter];\n    \n    __unsafe_unretained GPUImageHoughTransformLineDetector *weakSelf = self;\n#ifdef DEBUGLINEDETECTION\n    _intermediateImages = [[NSMutableArray alloc] init];\n    __unsafe_unretained NSMutableArray *weakIntermediateImages = _intermediateImages;\n\n//    __unsafe_unretained GPUImageOutput<GPUImageInput> *weakEdgeDetectionFilter = thresholdEdgeDetectionFilter;\n//    [thresholdEdgeDetectionFilter setFrameProcessingCompletionBlock:^(GPUImageOutput *filter, CMTime frameTime){\n//        [weakIntermediateImages removeAllObjects];\n//        UIImage *intermediateImage = [weakEdgeDetectionFilter imageFromCurrentFramebuffer];\n//        [weakIntermediateImages addObject:intermediateImage];\n//    }];\n//\n//    __unsafe_unretained GPUImageOutput<GPUImageInput> *weakParallelCoordinateLineTransformFilter = parallelCoordinateLineTransformFilter;\n//    [parallelCoordinateLineTransformFilter setFrameProcessingCompletionBlock:^(GPUImageOutput *filter, CMTime frameTime){\n//        UIImage *intermediateImage = [weakParallelCoordinateLineTransformFilter imageFromCurrentFramebuffer];\n//        [weakIntermediateImages addObject:intermediateImage];\n//    }];\n\n    __unsafe_unretained GPUImageOutput<GPUImageInput> *weakNonMaximumSuppressionFilter = nonMaximumSuppressionFilter;\n    [nonMaximumSuppressionFilter setFrameProcessingCompletionBlock:^(GPUImageOutput *filter, CMTime frameTime){\n        UIImage *intermediateImage = [weakNonMaximumSuppressionFilter imageFromCurrentFramebuffer];\n        [weakIntermediateImages addObject:intermediateImage];\n\n        [weakSelf extractLineParametersFromImageAtFrameTime:frameTime];\n    }];\n#else\n    [nonMaximumSuppressionFilter setFrameProcessingCompletionBlock:^(GPUImageOutput *filter, CMTime frameTime) {\n        [weakSelf extractLineParametersFromImageAtFrameTime:frameTime];\n    }];\n#endif\n    \n    [thresholdEdgeDetectionFilter addTarget:parallelCoordinateLineTransformFilter];\n    [parallelCoordinateLineTransformFilter addTarget:nonMaximumSuppressionFilter];\n    \n    self.initialFilters = [NSArray arrayWithObjects:thresholdEdgeDetectionFilter, nil];\n    //    self.terminalFilter = colorPackingFilter;\n    self.terminalFilter = nonMaximumSuppressionFilter;\n    \n//    self.edgeThreshold = 0.95;\n    self.lineDetectionThreshold = 0.12;\n    \n    return self;\n}\n\n- (void)dealloc;\n{\n    free(rawImagePixels);\n    free(linesArray);\n}\n\n#pragma mark -\n#pragma mark Corner extraction\n\n- (void)extractLineParametersFromImageAtFrameTime:(CMTime)frameTime;\n{\n    // we need a normal color texture for this filter\n    NSAssert(self.outputTextureOptions.internalFormat == GL_RGBA, @\"The output texture format for this filter must be GL_RGBA.\");\n    NSAssert(self.outputTextureOptions.type == GL_UNSIGNED_BYTE, @\"The type of the output texture of this filter must be GL_UNSIGNED_BYTE.\");\n    \n    NSUInteger numberOfLines = 0;\n    CGSize imageSize = nonMaximumSuppressionFilter.outputFrameSize;\n    \n    unsigned int imageByteSize = imageSize.width * imageSize.height * 4;\n    \n    if (rawImagePixels == NULL)\n    {\n        rawImagePixels = (GLubyte *)malloc(imageByteSize);\n        linesArray = calloc(1024 * 2, sizeof(GLfloat));\n    }\n    \n    glReadPixels(0, 0, (int)imageSize.width, (int)imageSize.height, GL_RGBA, GL_UNSIGNED_BYTE, rawImagePixels);\n    \n//    CFAbsoluteTime startTime = CFAbsoluteTimeGetCurrent();\n    \n    unsigned int imageWidth = imageSize.width * 4;\n    \n    unsigned int currentByte = 0;\n    unsigned int cornerStorageIndex = 0;\n    unsigned long lineStrengthCounter = 0;\n    while (currentByte < imageByteSize)\n    {\n        GLubyte colorByte = rawImagePixels[currentByte];\n//        NSLog(@\"(%d,%d): [%d,%d,%d,%d]\", xCoordinate, yCoordinate, rawImagePixels[currentByte], rawImagePixels[currentByte+1], rawImagePixels[currentByte+2], rawImagePixels[currentByte+3]);\n//        NSLog(@\"[%d,%d,%d,%d]\", rawImagePixels[currentByte], rawImagePixels[currentByte+1], rawImagePixels[currentByte+2], rawImagePixels[currentByte+3]);\n        \n        if (colorByte > 0)\n        {\n            unsigned int xCoordinate = currentByte % imageWidth;\n            unsigned int yCoordinate = currentByte / imageWidth;\n            \n            lineStrengthCounter += colorByte;\n//            NSLog(@\"(%d,%d): [%d,%d,%d,%d]\", xCoordinate, yCoordinate, rawImagePixels[currentByte], rawImagePixels[currentByte+1], rawImagePixels[currentByte+2], rawImagePixels[currentByte+3]);\n          \n            CGFloat normalizedXCoordinate = -1.0 + 2.0 * (CGFloat)(xCoordinate / 4) / imageSize.width;\n            CGFloat normalizedYCoordinate = -1.0 + 2.0 * (CGFloat)(yCoordinate) / imageSize.height;\n            \n            if (normalizedXCoordinate < 0.0)\n            {\n                // T space\n                // m = -1 - d/u\n                // b = d * v/u\n                if (normalizedXCoordinate > -0.05) // Test for the case right near the X axis, stamp the X intercept instead of the Y\n                {\n                    linesArray[cornerStorageIndex++] = 100000.0;\n                    linesArray[cornerStorageIndex++] = normalizedYCoordinate;\n                }\n                else\n                {\n                    linesArray[cornerStorageIndex++] = -1.0 - 1.0 / normalizedXCoordinate;\n                    linesArray[cornerStorageIndex++] = 1.0 * normalizedYCoordinate / normalizedXCoordinate;\n                }\n            }\n            else\n            {\n                // S space\n                // m = 1 - d/u\n                // b = d * v/u\n                if (normalizedXCoordinate < 0.05) // Test for the case right near the X axis, stamp the X intercept instead of the Y\n                {\n                    linesArray[cornerStorageIndex++] = 100000.0;\n                    linesArray[cornerStorageIndex++] = normalizedYCoordinate;\n                }\n                else\n                {\n                    linesArray[cornerStorageIndex++] = 1.0 - 1.0 / normalizedXCoordinate;\n                    linesArray[cornerStorageIndex++] = 1.0 * normalizedYCoordinate / normalizedXCoordinate;\n                }\n            }\n            \n            numberOfLines++;\n            \n            numberOfLines = MIN(numberOfLines, 1023);\n            cornerStorageIndex = MIN(cornerStorageIndex, 2040);\n        }\n        currentByte +=4;\n    }\n    \n//    CFAbsoluteTime currentFrameTime = (CFAbsoluteTimeGetCurrent() - startTime);\n//    NSLog(@\"Processing time : %f ms\", 1000.0 * currentFrameTime);\n\n    if (linesDetectedBlock != NULL)\n    {\n        linesDetectedBlock(linesArray, numberOfLines, frameTime);\n    }\n}\n\n- (BOOL)wantsMonochromeInput;\n{\n//    return YES;\n    return NO;\n}\n\n#pragma mark -\n#pragma mark Accessors\n\n//- (void)setEdgeThreshold:(CGFloat)newValue;\n//{\n//    [(GPUImageCannyEdgeDetectionFilter *)thresholdEdgeDetectionFilter setThreshold:newValue];\n//}\n//\n//- (CGFloat)edgeThreshold;\n//{\n//    return [(GPUImageCannyEdgeDetectionFilter *)thresholdEdgeDetectionFilter threshold];\n//}\n\n- (void)setLineDetectionThreshold:(CGFloat)newValue;\n{\n    nonMaximumSuppressionFilter.threshold = newValue;\n}\n\n- (CGFloat)lineDetectionThreshold;\n{\n    return nonMaximumSuppressionFilter.threshold;\n}\n\n#ifdef DEBUGLINEDETECTION\n- (void)newFrameReadyAtTime:(CMTime)frameTime atIndex:(NSInteger)textureIndex;\n{\n//    [thresholdEdgeDetectionFilter useNextFrameForImageCapture];\n//    [parallelCoordinateLineTransformFilter useNextFrameForImageCapture];\n    [nonMaximumSuppressionFilter useNextFrameForImageCapture];\n    \n    [super newFrameReadyAtTime:frameTime atIndex:textureIndex];\n}\n#endif\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageHueBlendFilter.h",
    "content": "#import \"GPUImageTwoInputFilter.h\"\n\n@interface GPUImageHueBlendFilter : GPUImageTwoInputFilter\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageHueBlendFilter.m",
    "content": "#import \"GPUImageHueBlendFilter.h\"\n\n/**\n * Hue blend mode based upon pseudo code from the PDF specification.\n */\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageHueBlendFragmentShaderString = SHADER_STRING\n(\n varying highp vec2 textureCoordinate;\n varying highp vec2 textureCoordinate2;\n \n uniform sampler2D inputImageTexture;\n uniform sampler2D inputImageTexture2;\n \n highp float lum(lowp vec3 c) {\n     return dot(c, vec3(0.3, 0.59, 0.11));\n }\n \n lowp vec3 clipcolor(lowp vec3 c) {\n     highp float l = lum(c);\n     lowp float n = min(min(c.r, c.g), c.b);\n     lowp float x = max(max(c.r, c.g), c.b);\n     \n     if (n < 0.0) {\n         c.r = l + ((c.r - l) * l) / (l - n);\n         c.g = l + ((c.g - l) * l) / (l - n);\n         c.b = l + ((c.b - l) * l) / (l - n);\n     }\n     if (x > 1.0) {\n         c.r = l + ((c.r - l) * (1.0 - l)) / (x - l);\n         c.g = l + ((c.g - l) * (1.0 - l)) / (x - l);\n         c.b = l + ((c.b - l) * (1.0 - l)) / (x - l);\n     }\n     \n     return c;\n }\n \n lowp vec3 setlum(lowp vec3 c, highp float l) {\n     highp float d = l - lum(c);\n     c = c + vec3(d);\n     return clipcolor(c);\n }\n \n highp float sat(lowp vec3 c) {\n     lowp float n = min(min(c.r, c.g), c.b);\n     lowp float x = max(max(c.r, c.g), c.b);\n     return x - n;\n }\n \n lowp float mid(lowp float cmin, lowp float cmid, lowp float cmax, highp float s) {\n     return ((cmid - cmin) * s) / (cmax - cmin);\n }\n \n lowp vec3 setsat(lowp vec3 c, highp float s) {\n     if (c.r > c.g) {\n         if (c.r > c.b) {\n             if (c.g > c.b) {\n                 /* g is mid, b is min */\n                 c.g = mid(c.b, c.g, c.r, s);\n                 c.b = 0.0;\n             } else {\n                 /* b is mid, g is min */\n                 c.b = mid(c.g, c.b, c.r, s);\n                 c.g = 0.0;\n             }\n             c.r = s;\n         } else {\n             /* b is max, r is mid, g is min */\n             c.r = mid(c.g, c.r, c.b, s);\n             c.b = s;\n             c.r = 0.0;\n         }\n     } else if (c.r > c.b) {\n         /* g is max, r is mid, b is min */\n         c.r = mid(c.b, c.r, c.g, s);\n         c.g = s;\n         c.b = 0.0;\n     } else if (c.g > c.b) {\n         /* g is max, b is mid, r is min */\n         c.b = mid(c.r, c.b, c.g, s);\n         c.g = s;\n         c.r = 0.0;\n     } else if (c.b > c.g) {\n         /* b is max, g is mid, r is min */\n         c.g = mid(c.r, c.g, c.b, s);\n         c.b = s;\n         c.r = 0.0;\n     } else {\n         c = vec3(0.0);\n     }\n     return c;\n }\n \n void main()\n {\n\t highp vec4 baseColor = texture2D(inputImageTexture, textureCoordinate);\n\t highp vec4 overlayColor = texture2D(inputImageTexture2, textureCoordinate2);\n     \n     gl_FragColor = vec4(baseColor.rgb * (1.0 - overlayColor.a) + setlum(setsat(overlayColor.rgb, sat(baseColor.rgb)), lum(baseColor.rgb)) * overlayColor.a, baseColor.a);\n }\n);\n#else\nNSString *const kGPUImageHueBlendFragmentShaderString = SHADER_STRING\n(\n varying vec2 textureCoordinate;\n varying vec2 textureCoordinate2;\n \n uniform sampler2D inputImageTexture;\n uniform sampler2D inputImageTexture2;\n \n float lum(vec3 c) {\n     return dot(c, vec3(0.3, 0.59, 0.11));\n }\n \n vec3 clipcolor(vec3 c) {\n     float l = lum(c);\n     float n = min(min(c.r, c.g), c.b);\n     float x = max(max(c.r, c.g), c.b);\n     \n     if (n < 0.0) {\n         c.r = l + ((c.r - l) * l) / (l - n);\n         c.g = l + ((c.g - l) * l) / (l - n);\n         c.b = l + ((c.b - l) * l) / (l - n);\n     }\n     if (x > 1.0) {\n         c.r = l + ((c.r - l) * (1.0 - l)) / (x - l);\n         c.g = l + ((c.g - l) * (1.0 - l)) / (x - l);\n         c.b = l + ((c.b - l) * (1.0 - l)) / (x - l);\n     }\n     \n     return c;\n }\n \n vec3 setlum(vec3 c, float l) {\n     float d = l - lum(c);\n     c = c + vec3(d);\n     return clipcolor(c);\n }\n \n float sat(vec3 c) {\n     float n = min(min(c.r, c.g), c.b);\n     float x = max(max(c.r, c.g), c.b);\n     return x - n;\n }\n \n float mid(float cmin, float cmid, float cmax, float s) {\n     return ((cmid - cmin) * s) / (cmax - cmin);\n }\n \n vec3 setsat(vec3 c, float s) {\n     if (c.r > c.g) {\n         if (c.r > c.b) {\n             if (c.g > c.b) {\n                 /* g is mid, b is min */\n                 c.g = mid(c.b, c.g, c.r, s);\n                 c.b = 0.0;\n             } else {\n                 /* b is mid, g is min */\n                 c.b = mid(c.g, c.b, c.r, s);\n                 c.g = 0.0;\n             }\n             c.r = s;\n         } else {\n             /* b is max, r is mid, g is min */\n             c.r = mid(c.g, c.r, c.b, s);\n             c.b = s;\n             c.r = 0.0;\n         }\n     } else if (c.r > c.b) {\n         /* g is max, r is mid, b is min */\n         c.r = mid(c.b, c.r, c.g, s);\n         c.g = s;\n         c.b = 0.0;\n     } else if (c.g > c.b) {\n         /* g is max, b is mid, r is min */\n         c.b = mid(c.r, c.b, c.g, s);\n         c.g = s;\n         c.r = 0.0;\n     } else if (c.b > c.g) {\n         /* b is max, g is mid, r is min */\n         c.g = mid(c.r, c.g, c.b, s);\n         c.b = s;\n         c.r = 0.0;\n     } else {\n         c = vec3(0.0);\n     }\n     return c;\n }\n \n void main()\n {\n\t vec4 baseColor = texture2D(inputImageTexture, textureCoordinate);\n\t vec4 overlayColor = texture2D(inputImageTexture2, textureCoordinate2);\n     \n     gl_FragColor = vec4(baseColor.rgb * (1.0 - overlayColor.a) + setlum(setsat(overlayColor.rgb, sat(baseColor.rgb)), lum(baseColor.rgb)) * overlayColor.a, baseColor.a);\n }\n);\n#endif\n\n@implementation GPUImageHueBlendFilter\n\n- (id)init;\n{\n    if (!(self = [super initWithFragmentShaderFromString:kGPUImageHueBlendFragmentShaderString]))\n    {\n\t\treturn nil;\n    }\n    \n    return self;\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageHueFilter.h",
    "content": "\n#import \"GPUImageFilter.h\"\n\n@interface GPUImageHueFilter : GPUImageFilter\n{\n    GLint hueAdjustUniform;\n    \n}\n@property (nonatomic, readwrite) CGFloat hue;\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageHueFilter.m",
    "content": "\n#import \"GPUImageHueFilter.h\"\n\n// Adapted from http://stackoverflow.com/questions/9234724/how-to-change-hue-of-a-texture-with-glsl - see for code and discussion\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageHueFragmentShaderString = SHADER_STRING\n(\n precision highp float;\n varying highp vec2 textureCoordinate;\n \n uniform sampler2D inputImageTexture;\n uniform mediump float hueAdjust;\n const highp  vec4  kRGBToYPrime = vec4 (0.299, 0.587, 0.114, 0.0);\n const highp  vec4  kRGBToI     = vec4 (0.595716, -0.274453, -0.321263, 0.0);\n const highp  vec4  kRGBToQ     = vec4 (0.211456, -0.522591, 0.31135, 0.0);\n \n const highp  vec4  kYIQToR   = vec4 (1.0, 0.9563, 0.6210, 0.0);\n const highp  vec4  kYIQToG   = vec4 (1.0, -0.2721, -0.6474, 0.0);\n const highp  vec4  kYIQToB   = vec4 (1.0, -1.1070, 1.7046, 0.0);\n \n void main ()\n {\n     // Sample the input pixel\n     highp vec4 color   = texture2D(inputImageTexture, textureCoordinate);\n     \n     // Convert to YIQ\n     highp float   YPrime  = dot (color, kRGBToYPrime);\n     highp float   I      = dot (color, kRGBToI);\n     highp float   Q      = dot (color, kRGBToQ);\n     \n     // Calculate the hue and chroma\n     highp float   hue     = atan (Q, I);\n     highp float   chroma  = sqrt (I * I + Q * Q);\n     \n     // Make the user's adjustments\n     hue += (-hueAdjust); //why negative rotation?\n     \n     // Convert back to YIQ\n     Q = chroma * sin (hue);\n     I = chroma * cos (hue);\n     \n     // Convert back to RGB\n     highp vec4    yIQ   = vec4 (YPrime, I, Q, 0.0);\n     color.r = dot (yIQ, kYIQToR);\n     color.g = dot (yIQ, kYIQToG);\n     color.b = dot (yIQ, kYIQToB);\n     \n     // Save the result\n     gl_FragColor = color;\n }\n);\n#else\nNSString *const kGPUImageHueFragmentShaderString = SHADER_STRING\n(\n varying vec2 textureCoordinate;\n \n uniform sampler2D inputImageTexture;\n uniform float hueAdjust;\n const vec4  kRGBToYPrime = vec4 (0.299, 0.587, 0.114, 0.0);\n const vec4  kRGBToI     = vec4 (0.595716, -0.274453, -0.321263, 0.0);\n const vec4  kRGBToQ     = vec4 (0.211456, -0.522591, 0.31135, 0.0);\n \n const vec4  kYIQToR   = vec4 (1.0, 0.9563, 0.6210, 0.0);\n const vec4  kYIQToG   = vec4 (1.0, -0.2721, -0.6474, 0.0);\n const vec4  kYIQToB   = vec4 (1.0, -1.1070, 1.7046, 0.0);\n \n void main ()\n {\n     // Sample the input pixel\n     vec4 color   = texture2D(inputImageTexture, textureCoordinate);\n     \n     // Convert to YIQ\n     float   YPrime  = dot (color, kRGBToYPrime);\n     float   I      = dot (color, kRGBToI);\n     float   Q      = dot (color, kRGBToQ);\n     \n     // Calculate the hue and chroma\n     float   hue     = atan (Q, I);\n     float   chroma  = sqrt (I * I + Q * Q);\n     \n     // Make the user's adjustments\n     hue += (-hueAdjust); //why negative rotation?\n     \n     // Convert back to YIQ\n     Q = chroma * sin (hue);\n     I = chroma * cos (hue);\n     \n     // Convert back to RGB\n     vec4    yIQ   = vec4 (YPrime, I, Q, 0.0);\n     color.r = dot (yIQ, kYIQToR);\n     color.g = dot (yIQ, kYIQToG);\n     color.b = dot (yIQ, kYIQToB);\n     \n     // Save the result\n     gl_FragColor = color;\n }\n);\n#endif\n\n@implementation GPUImageHueFilter\n@synthesize hue;\n\n- (id)init\n{\n    if(! (self = [super initWithFragmentShaderFromString:kGPUImageHueFragmentShaderString]) )\n    {\n        return nil;\n    }\n    \n    hueAdjustUniform = [filterProgram uniformIndex:@\"hueAdjust\"];\n    self.hue = 90;\n    \n    return self;\n}\n\n- (void)setHue:(CGFloat)newHue\n{\n    // Convert degrees to radians for hue rotation\n    hue = fmodf(newHue, 360.0) * M_PI/180;\n    [self setFloat:hue forUniform:hueAdjustUniform program:filterProgram];\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageJFAVoronoiFilter.h",
    "content": "#import \"GPUImageFilter.h\"\n\n@interface GPUImageJFAVoronoiFilter : GPUImageFilter\n{\n    GLuint secondFilterOutputTexture;\n    GLuint secondFilterFramebuffer;\n    \n    \n    GLint sampleStepUniform;\n    GLint sizeUniform;\n    NSUInteger numPasses;\n    \n}\n\n@property (nonatomic, readwrite) CGSize sizeInPixels;\n\n@end"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageJFAVoronoiFilter.m",
    "content": "//  adapted from unitzeroone - http://unitzeroone.com/labs/jfavoronoi/\n\n#import \"GPUImageJFAVoronoiFilter.h\"\n\n//  The shaders are mostly taken from UnitZeroOne's WebGL example here:\n//  http://unitzeroone.com/blog/2011/03/22/jump-flood-voronoi-for-webgl/\n\nNSString *const kGPUImageJFAVoronoiVertexShaderString = SHADER_STRING\n(\n attribute vec4 position;\n attribute vec4 inputTextureCoordinate;\n \n uniform float sampleStep;\n \n varying vec2 textureCoordinate;\n varying vec2 leftTextureCoordinate;\n varying vec2 rightTextureCoordinate;\n \n varying vec2 topTextureCoordinate;\n varying vec2 topLeftTextureCoordinate;\n varying vec2 topRightTextureCoordinate;\n \n varying vec2 bottomTextureCoordinate;\n varying vec2 bottomLeftTextureCoordinate;\n varying vec2 bottomRightTextureCoordinate;\n \n void main()\n {\n     gl_Position = position;\n     \n     vec2 widthStep = vec2(sampleStep, 0.0);\n     vec2 heightStep = vec2(0.0, sampleStep);\n     vec2 widthHeightStep = vec2(sampleStep);\n     vec2 widthNegativeHeightStep = vec2(sampleStep, -sampleStep);\n     \n     textureCoordinate = inputTextureCoordinate.xy;\n     leftTextureCoordinate = inputTextureCoordinate.xy - widthStep;\n     rightTextureCoordinate = inputTextureCoordinate.xy + widthStep;\n     \n     topTextureCoordinate = inputTextureCoordinate.xy - heightStep;\n     topLeftTextureCoordinate = inputTextureCoordinate.xy - widthHeightStep;\n     topRightTextureCoordinate = inputTextureCoordinate.xy + widthNegativeHeightStep;\n     \n     bottomTextureCoordinate = inputTextureCoordinate.xy + heightStep;\n     bottomLeftTextureCoordinate = inputTextureCoordinate.xy - widthNegativeHeightStep;\n     bottomRightTextureCoordinate = inputTextureCoordinate.xy + widthHeightStep;\n }\n );\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageJFAVoronoiFragmentShaderString = SHADER_STRING\n(\n \n precision highp float;\n \n varying vec2 textureCoordinate;\n varying vec2 leftTextureCoordinate;\n varying vec2 rightTextureCoordinate;\n \n varying vec2 topTextureCoordinate;\n varying vec2 topLeftTextureCoordinate;\n varying vec2 topRightTextureCoordinate;\n \n varying vec2 bottomTextureCoordinate;\n varying vec2 bottomLeftTextureCoordinate;\n varying vec2 bottomRightTextureCoordinate;\n \n uniform sampler2D inputImageTexture;\n uniform vec2 size;\n //varying vec2 textureCoordinate;\n //uniform float sampleStep;\n \n vec2 getCoordFromColor(vec4 color)\n{\n    float z = color.z * 256.0;\n    float yoff = floor(z / 8.0);\n    float xoff = mod(z, 8.0);\n    float x = color.x*256.0 + xoff*256.0;\n    float y = color.y*256.0 + yoff*256.0;\n    return vec2(x,y) / size;\n}\n \n void main(void) {\n     \n     vec2 sub;\n     vec4 dst;\n     vec4 local = texture2D(inputImageTexture, textureCoordinate);\n     vec4 sam;\n     float l;\n     float smallestDist;\n     if(local.a == 0.0){\n         \n         smallestDist = dot(1.0,1.0);\n     }else{\n         sub = getCoordFromColor(local)-textureCoordinate;\n         smallestDist = dot(sub,sub);\n     }\n     dst = local;\n     \n     \n     sam = texture2D(inputImageTexture, topRightTextureCoordinate);\n     if(sam.a == 1.0){\n         sub = (getCoordFromColor(sam)-textureCoordinate);\n         l = dot(sub,sub);\n         if(l < smallestDist){\n             smallestDist = l;\n             dst = sam;\n         }\n     }\n     \n     sam = texture2D(inputImageTexture, topTextureCoordinate);\n     if(sam.a == 1.0){\n         sub = (getCoordFromColor(sam)-textureCoordinate);\n         l = dot(sub,sub);\n         if(l < smallestDist){\n             smallestDist = l;\n             dst = sam;\n         }\n     }\n     \n     sam = texture2D(inputImageTexture, topLeftTextureCoordinate);\n     if(sam.a == 1.0){\n         sub = (getCoordFromColor(sam)-textureCoordinate);\n         l = dot(sub,sub);\n         if(l < smallestDist){\n             smallestDist = l;\n             dst = sam;\n         }\n     }\n     \n     sam = texture2D(inputImageTexture, bottomRightTextureCoordinate);\n     if(sam.a == 1.0){\n         sub = (getCoordFromColor(sam)-textureCoordinate);\n         l = dot(sub,sub);\n         if(l < smallestDist){\n             smallestDist = l;\n             dst = sam;\n         }\n     }\n     \n     sam = texture2D(inputImageTexture, bottomTextureCoordinate);\n     if(sam.a == 1.0){\n         sub = (getCoordFromColor(sam)-textureCoordinate);\n         l = dot(sub,sub);\n         if(l < smallestDist){\n             smallestDist = l;\n             dst = sam;\n         }\n     }\n     \n     sam = texture2D(inputImageTexture, bottomLeftTextureCoordinate);\n     if(sam.a == 1.0){\n         sub = (getCoordFromColor(sam)-textureCoordinate);\n         l = dot(sub,sub);\n         if(l < smallestDist){\n             smallestDist = l;\n             dst = sam;\n         }\n     }\n     \n     sam = texture2D(inputImageTexture, leftTextureCoordinate);\n     if(sam.a == 1.0){\n         sub = (getCoordFromColor(sam)-textureCoordinate);\n         l = dot(sub,sub);\n         if(l < smallestDist){\n             smallestDist = l;\n             dst = sam;\n         }\n     }\n     \n     sam = texture2D(inputImageTexture, rightTextureCoordinate);\n     if(sam.a == 1.0){\n         sub = (getCoordFromColor(sam)-textureCoordinate);\n         l = dot(sub,sub);\n         if(l < smallestDist){\n             smallestDist = l;\n             dst = sam;\n         }\n     }\n     gl_FragColor = dst;\n }\n);\n#else\nNSString *const kGPUImageJFAVoronoiFragmentShaderString = SHADER_STRING\n( \n varying vec2 textureCoordinate;\n varying vec2 leftTextureCoordinate;\n varying vec2 rightTextureCoordinate;\n \n varying vec2 topTextureCoordinate;\n varying vec2 topLeftTextureCoordinate;\n varying vec2 topRightTextureCoordinate;\n \n varying vec2 bottomTextureCoordinate;\n varying vec2 bottomLeftTextureCoordinate;\n varying vec2 bottomRightTextureCoordinate;\n \n uniform sampler2D inputImageTexture;\n uniform vec2 size;\n //varying vec2 textureCoordinate;\n //uniform float sampleStep;\n \n vec2 getCoordFromColor(vec4 color)\n{\n    float z = color.z * 256.0;\n    float yoff = floor(z / 8.0);\n    float xoff = mod(z, 8.0);\n    float x = color.x*256.0 + xoff*256.0;\n    float y = color.y*256.0 + yoff*256.0;\n    return vec2(x,y) / size;\n}\n \n void main(void) {\n     \n     vec2 sub;\n     vec4 dst;\n     vec4 local = texture2D(inputImageTexture, textureCoordinate);\n     vec4 sam;\n     float l;\n     float smallestDist;\n     if(local.a == 0.0){\n         \n         smallestDist = dot(1.0,1.0);\n     }else{\n         sub = getCoordFromColor(local)-textureCoordinate;\n         smallestDist = dot(sub,sub);\n     }\n     dst = local;\n     \n     \n     sam = texture2D(inputImageTexture, topRightTextureCoordinate);\n     if(sam.a == 1.0){\n         sub = (getCoordFromColor(sam)-textureCoordinate);\n         l = dot(sub,sub);\n         if(l < smallestDist){\n             smallestDist = l;\n             dst = sam;\n         }\n     }\n     \n     sam = texture2D(inputImageTexture, topTextureCoordinate);\n     if(sam.a == 1.0){\n         sub = (getCoordFromColor(sam)-textureCoordinate);\n         l = dot(sub,sub);\n         if(l < smallestDist){\n             smallestDist = l;\n             dst = sam;\n         }\n     }\n     \n     sam = texture2D(inputImageTexture, topLeftTextureCoordinate);\n     if(sam.a == 1.0){\n         sub = (getCoordFromColor(sam)-textureCoordinate);\n         l = dot(sub,sub);\n         if(l < smallestDist){\n             smallestDist = l;\n             dst = sam;\n         }\n     }\n     \n     sam = texture2D(inputImageTexture, bottomRightTextureCoordinate);\n     if(sam.a == 1.0){\n         sub = (getCoordFromColor(sam)-textureCoordinate);\n         l = dot(sub,sub);\n         if(l < smallestDist){\n             smallestDist = l;\n             dst = sam;\n         }\n     }\n     \n     sam = texture2D(inputImageTexture, bottomTextureCoordinate);\n     if(sam.a == 1.0){\n         sub = (getCoordFromColor(sam)-textureCoordinate);\n         l = dot(sub,sub);\n         if(l < smallestDist){\n             smallestDist = l;\n             dst = sam;\n         }\n     }\n     \n     sam = texture2D(inputImageTexture, bottomLeftTextureCoordinate);\n     if(sam.a == 1.0){\n         sub = (getCoordFromColor(sam)-textureCoordinate);\n         l = dot(sub,sub);\n         if(l < smallestDist){\n             smallestDist = l;\n             dst = sam;\n         }\n     }\n     \n     sam = texture2D(inputImageTexture, leftTextureCoordinate);\n     if(sam.a == 1.0){\n         sub = (getCoordFromColor(sam)-textureCoordinate);\n         l = dot(sub,sub);\n         if(l < smallestDist){\n             smallestDist = l;\n             dst = sam;\n         }\n     }\n     \n     sam = texture2D(inputImageTexture, rightTextureCoordinate);\n     if(sam.a == 1.0){\n         sub = (getCoordFromColor(sam)-textureCoordinate);\n         l = dot(sub,sub);\n         if(l < smallestDist){\n             smallestDist = l;\n             dst = sam;\n         }\n     }\n     gl_FragColor = dst;\n }\n);\n#endif\n\n@interface GPUImageJFAVoronoiFilter() {\n    int currentPass;\n}\n\n\n@end\n\n@implementation GPUImageJFAVoronoiFilter\n\n@synthesize sizeInPixels = _sizeInPixels;\n\n- (id)init;\n{\n    if (!(self = [super initWithVertexShaderFromString:kGPUImageJFAVoronoiVertexShaderString fragmentShaderFromString:kGPUImageJFAVoronoiFragmentShaderString]))\n    {\n        \n        NSLog(@\"nil returned\");\n\t\treturn nil;\n        \n    }\n    \n    sampleStepUniform = [filterProgram uniformIndex:@\"sampleStep\"];\n    sizeUniform = [filterProgram uniformIndex:@\"size\"];\n    //[self disableSecondFrameCheck];\n    \n    return self;\n}\n\n-(void)setSizeInPixels:(CGSize)sizeInPixels {\n    _sizeInPixels = sizeInPixels;\n    \n    //validate that it's a power of 2\n    \n    float width = log2(sizeInPixels.width);\n    float height = log2(sizeInPixels.height);\n    \n    if (width != height) {\n        NSLog(@\"Voronoi point texture must be square\");\n        return;\n    }\n    if (width != floor(width) || height != floor(height)) {\n        NSLog(@\"Voronoi point texture must be a power of 2.  Texture size: %f, %f\", sizeInPixels.width, sizeInPixels.height);\n        return;\n    }\n    glUniform2f(sizeUniform, _sizeInPixels.width, _sizeInPixels.height);\n}\n\n#pragma mark -\n#pragma mark Managing the display FBOs\n\n-(NSUInteger)nextPowerOfTwo:(CGPoint)input {\n    NSUInteger val;\n    if (input.x > input.y) {\n        val = (NSUInteger)input.x;\n    } else {\n        val = (NSUInteger)input.y;\n    }\n    \n    val--;\n    val = (val >> 1) | val;\n    val = (val >> 2) | val;\n    val = (val >> 4) | val;\n    val = (val >> 8) | val;\n    val = (val >> 16) | val;\n    val++;\n    return val;\n}\n\n//- (void)setOutputFBO;\n//{\n//    if (currentPass % 2 == 1) {\n//        [self setSecondFilterFBO];\n//    } else {\n//        [self setFilterFBO];\n//    }\n//    \n//}\n\n- (void)renderToTextureWithVertices:(const GLfloat *)vertices textureCoordinates:(const GLfloat *)textureCoordinates;\n{\n    // Run the first stage of the two-pass filter\n    [GPUImageContext setActiveShaderProgram:filterProgram];\n    currentPass = 0;\n    \n    outputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:[self sizeOfFBO] textureOptions:self.outputTextureOptions onlyTexture:NO];\n    [outputFramebuffer activateFramebuffer];\n    \n    glActiveTexture(GL_TEXTURE2);\n    \n    glClearColor(0.0f, 0.0f, 0.0f, 1.0f);\n    glClear(GL_COLOR_BUFFER_BIT);\n    \n    glUniform1f(sampleStepUniform, 0.5);\n    \n    glUniform2f(sizeUniform, _sizeInPixels.width, _sizeInPixels.height);\n    \n    glBindTexture(GL_TEXTURE_2D, [firstInputFramebuffer texture]);\n    \n    glUniform1i(filterInputTextureUniform, 2);\n    \n    glVertexAttribPointer(filterPositionAttribute, 2, GL_FLOAT, 0, 0, vertices);\n    glVertexAttribPointer(filterTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, textureCoordinates);\n    \n    glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);\n    \n    for (int pass = 1; pass <= numPasses + 1; pass++) {\n        currentPass = pass;\n//        [self setOutputFBO];\n        \n        //glClearColor(0.0f, 0.0f, 0.0f, 1.0f);\n        glClear(GL_COLOR_BUFFER_BIT);\n        \n        glActiveTexture(GL_TEXTURE2);\n        if (pass % 2 == 0) {\n            glBindTexture(GL_TEXTURE_2D, secondFilterOutputTexture);\n        } else {\n            glBindTexture(GL_TEXTURE_2D, [outputFramebuffer texture]);\n        }\n        glUniform1i(filterInputTextureUniform, 2);\n        \n        float step = pow(2.0, numPasses - pass) / pow(2.0, numPasses);\n        glUniform1f(sampleStepUniform, step);\n        glUniform2f(sizeUniform, _sizeInPixels.width, _sizeInPixels.height);\n        \n        glVertexAttribPointer(filterPositionAttribute, 2, GL_FLOAT, 0, 0, vertices);\n        glVertexAttribPointer(filterTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, textureCoordinates);\n        \n        glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);\n    }\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageKuwaharaFilter.h",
    "content": "#import \"GPUImageFilter.h\"\n\n/** Kuwahara image abstraction, drawn from the work of Kyprianidis, et. al. in their publication \"Anisotropic Kuwahara Filtering on the GPU\" within the GPU Pro collection. This produces an oil-painting-like image, but it is extremely computationally expensive, so it can take seconds to render a frame on an iPad 2. This might be best used for still images.\n */\n@interface GPUImageKuwaharaFilter : GPUImageFilter\n{\n    GLint radiusUniform;\n}\n\n/// The radius to sample from when creating the brush-stroke effect, with a default of 3. The larger the radius, the slower the filter.\n@property(readwrite, nonatomic) NSUInteger radius;\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageKuwaharaFilter.m",
    "content": "#import \"GPUImageKuwaharaFilter.h\"\n\n// Sourced from Kyprianidis, J. E., Kang, H., and Doellner, J. \"Anisotropic Kuwahara Filtering on the GPU,\" GPU Pro p.247 (2010).\n// \n// Original header:\n// \n// Anisotropic Kuwahara Filtering on the GPU\n// by Jan Eric Kyprianidis <www.kyprianidis.com>\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageKuwaharaFragmentShaderString = SHADER_STRING\n(\n varying highp vec2 textureCoordinate;\n uniform sampler2D inputImageTexture;\n uniform int radius;\n \n precision highp float;\n \n const vec2 src_size = vec2 (1.0 / 768.0, 1.0 / 1024.0);\n \n void main (void) \n {\n     vec2 uv = textureCoordinate;\n     float n = float((radius + 1) * (radius + 1));\n     int i; int j;\n     vec3 m0 = vec3(0.0); vec3 m1 = vec3(0.0); vec3 m2 = vec3(0.0); vec3 m3 = vec3(0.0);\n     vec3 s0 = vec3(0.0); vec3 s1 = vec3(0.0); vec3 s2 = vec3(0.0); vec3 s3 = vec3(0.0);\n     vec3 c;\n\n     for (j = -radius; j <= 0; ++j)  {\n         for (i = -radius; i <= 0; ++i)  {\n             c = texture2D(inputImageTexture, uv + vec2(i,j) * src_size).rgb;\n             m0 += c;\n             s0 += c * c;\n         }\n     }\n\n     for (j = -radius; j <= 0; ++j)  {\n         for (i = 0; i <= radius; ++i)  {\n             c = texture2D(inputImageTexture, uv + vec2(i,j) * src_size).rgb;\n             m1 += c;\n             s1 += c * c;\n         }\n     }\n\n     for (j = 0; j <= radius; ++j)  {\n         for (i = 0; i <= radius; ++i)  {\n             c = texture2D(inputImageTexture, uv + vec2(i,j) * src_size).rgb;\n             m2 += c;\n             s2 += c * c;\n         }\n     }\n\n     for (j = 0; j <= radius; ++j)  {\n         for (i = -radius; i <= 0; ++i)  {\n             c = texture2D(inputImageTexture, uv + vec2(i,j) * src_size).rgb;\n             m3 += c;\n             s3 += c * c;\n         }\n     }\n\n\n     float min_sigma2 = 1e+2;\n     m0 /= n;\n     s0 = abs(s0 / n - m0 * m0);\n\n     float sigma2 = s0.r + s0.g + s0.b;\n     if (sigma2 < min_sigma2) {\n         min_sigma2 = sigma2;\n         gl_FragColor = vec4(m0, 1.0);\n     }\n\n     m1 /= n;\n     s1 = abs(s1 / n - m1 * m1);\n\n     sigma2 = s1.r + s1.g + s1.b;\n     if (sigma2 < min_sigma2) {\n         min_sigma2 = sigma2;\n         gl_FragColor = vec4(m1, 1.0);\n     }\n\n     m2 /= n;\n     s2 = abs(s2 / n - m2 * m2);\n\n     sigma2 = s2.r + s2.g + s2.b;\n     if (sigma2 < min_sigma2) {\n         min_sigma2 = sigma2;\n         gl_FragColor = vec4(m2, 1.0);\n     }\n\n     m3 /= n;\n     s3 = abs(s3 / n - m3 * m3);\n\n     sigma2 = s3.r + s3.g + s3.b;\n     if (sigma2 < min_sigma2) {\n         min_sigma2 = sigma2;\n         gl_FragColor = vec4(m3, 1.0);\n     }\n }\n);\n#else\nNSString *const kGPUImageKuwaharaFragmentShaderString = SHADER_STRING\n(\n varying vec2 textureCoordinate;\n uniform sampler2D inputImageTexture;\n uniform int radius;\n \n const vec2 src_size = vec2 (1.0 / 768.0, 1.0 / 1024.0);\n \n void main (void)\n {\n     vec2 uv = textureCoordinate;\n     float n = float((radius + 1) * (radius + 1));\n     int i; int j;\n     vec3 m0 = vec3(0.0); vec3 m1 = vec3(0.0); vec3 m2 = vec3(0.0); vec3 m3 = vec3(0.0);\n     vec3 s0 = vec3(0.0); vec3 s1 = vec3(0.0); vec3 s2 = vec3(0.0); vec3 s3 = vec3(0.0);\n     vec3 c;\n     \n     for (j = -radius; j <= 0; ++j)  {\n         for (i = -radius; i <= 0; ++i)  {\n             c = texture2D(inputImageTexture, uv + vec2(i,j) * src_size).rgb;\n             m0 += c;\n             s0 += c * c;\n         }\n     }\n     \n     for (j = -radius; j <= 0; ++j)  {\n         for (i = 0; i <= radius; ++i)  {\n             c = texture2D(inputImageTexture, uv + vec2(i,j) * src_size).rgb;\n             m1 += c;\n             s1 += c * c;\n         }\n     }\n     \n     for (j = 0; j <= radius; ++j)  {\n         for (i = 0; i <= radius; ++i)  {\n             c = texture2D(inputImageTexture, uv + vec2(i,j) * src_size).rgb;\n             m2 += c;\n             s2 += c * c;\n         }\n     }\n     \n     for (j = 0; j <= radius; ++j)  {\n         for (i = -radius; i <= 0; ++i)  {\n             c = texture2D(inputImageTexture, uv + vec2(i,j) * src_size).rgb;\n             m3 += c;\n             s3 += c * c;\n         }\n     }\n     \n     \n     float min_sigma2 = 1e+2;\n     m0 /= n;\n     s0 = abs(s0 / n - m0 * m0);\n     \n     float sigma2 = s0.r + s0.g + s0.b;\n     if (sigma2 < min_sigma2) {\n         min_sigma2 = sigma2;\n         gl_FragColor = vec4(m0, 1.0);\n     }\n     \n     m1 /= n;\n     s1 = abs(s1 / n - m1 * m1);\n     \n     sigma2 = s1.r + s1.g + s1.b;\n     if (sigma2 < min_sigma2) {\n         min_sigma2 = sigma2;\n         gl_FragColor = vec4(m1, 1.0);\n     }\n     \n     m2 /= n;\n     s2 = abs(s2 / n - m2 * m2);\n     \n     sigma2 = s2.r + s2.g + s2.b;\n     if (sigma2 < min_sigma2) {\n         min_sigma2 = sigma2;\n         gl_FragColor = vec4(m2, 1.0);\n     }\n     \n     m3 /= n;\n     s3 = abs(s3 / n - m3 * m3);\n     \n     sigma2 = s3.r + s3.g + s3.b;\n     if (sigma2 < min_sigma2) {\n         min_sigma2 = sigma2;\n         gl_FragColor = vec4(m3, 1.0);\n     }\n }\n);\n#endif\n\n@implementation GPUImageKuwaharaFilter\n\n@synthesize radius = _radius;\n\n#pragma mark -\n#pragma mark Initialization and teardown\n\n- (id)init;\n{\n    if (!(self = [super initWithFragmentShaderFromString:kGPUImageKuwaharaFragmentShaderString]))\n    {\n\t\treturn nil;\n    }\n    \n    radiusUniform = [filterProgram uniformIndex:@\"radius\"];\n\n    self.radius = 3;\n    \n    return self;\n}\n\n#pragma mark -\n#pragma mark Accessors\n\n- (void)setRadius:(NSUInteger)newValue;\n{\n    _radius = newValue;\n    \n    [self setInteger:(GLint)_radius forUniform:radiusUniform program:filterProgram];\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageKuwaharaRadius3Filter.h",
    "content": "//\n//  GPUImageKuwaharaRadius3Filter.h\n\n#import \"GPUImageFilter.h\"\n\n@interface GPUImageKuwaharaRadius3Filter : GPUImageFilter\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageKuwaharaRadius3Filter.m",
    "content": "#import \"GPUImageKuwaharaRadius3Filter.h\"\n\n// Sourced from Kyprianidis, J. E., Kang, H., and Doellner, J. \"Anisotropic Kuwahara Filtering on the GPU,\" GPU Pro p.247 (2010).\n//\n// Original header:\n//\n// Anisotropic Kuwahara Filtering on the GPU\n// by Jan Eric Kyprianidis <www.kyprianidis.com>\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageKuwaharaRadius3FragmentShaderString = SHADER_STRING\n(\n varying highp vec2 textureCoordinate;\n uniform sampler2D inputImageTexture;\n\n precision highp float;\n\n const vec2 src_size = vec2 (1.0 / 768.0, 1.0 / 1024.0);\n\n void main (void)\n {\n     vec2 uv = textureCoordinate;\n     float n = float(16); // radius is assumed to be 3\n     vec3 m0 = vec3(0.0); vec3 m1 = vec3(0.0); vec3 m2 = vec3(0.0); vec3 m3 = vec3(0.0);\n     vec3 s0 = vec3(0.0); vec3 s1 = vec3(0.0); vec3 s2 = vec3(0.0); vec3 s3 = vec3(0.0);\n     vec3 c;\n     vec3 cSq;\n\n     c = texture2D(inputImageTexture, uv + vec2(-3,-3) * src_size).rgb;\n     m0 += c;\n     s0 += c * c;\n     c = texture2D(inputImageTexture, uv + vec2(-3,-2) * src_size).rgb;\n     m0 += c;\n     s0 += c * c;\n     c = texture2D(inputImageTexture, uv + vec2(-3,-1) * src_size).rgb;\n     m0 += c;\n     s0 += c * c;\n     c = texture2D(inputImageTexture, uv + vec2(-3,0) * src_size).rgb;\n     cSq = c * c;\n     m0 += c;\n     s0 += cSq;\n     m1 += c;\n     s1 += cSq;\n\n     c = texture2D(inputImageTexture, uv + vec2(-2,-3) * src_size).rgb;\n     m0 += c;\n     s0 += c * c;\n     c = texture2D(inputImageTexture, uv + vec2(-2,-2) * src_size).rgb;\n     m0 += c;\n     s0 += c * c;\n     c = texture2D(inputImageTexture, uv + vec2(-2,-1) * src_size).rgb;\n     m0 += c;\n     s0 += c * c;\n     c = texture2D(inputImageTexture, uv + vec2(-2,0) * src_size).rgb;\n     cSq = c * c;\n     m0 += c;\n     s0 += cSq;\n     m1 += c;\n     s1 += cSq;\n\n     c = texture2D(inputImageTexture, uv + vec2(-1,-3) * src_size).rgb;\n     m0 += c;\n     s0 += c * c;\n     c = texture2D(inputImageTexture, uv + vec2(-1,-2) * src_size).rgb;\n     m0 += c;\n     s0 += c * c;\n     c = texture2D(inputImageTexture, uv + vec2(-1,-1) * src_size).rgb;\n     m0 += c;\n     s0 += c * c;\n     c = texture2D(inputImageTexture, uv + vec2(-1,0) * src_size).rgb;\n     cSq = c * c;\n     m0 += c;\n     s0 += cSq;\n     m1 += c;\n     s1 += cSq;\n\n     c = texture2D(inputImageTexture, uv + vec2(0,-3) * src_size).rgb;\n     cSq = c * c;\n     m0 += c;\n     s0 += cSq;\n     m3 += c;\n     s3 += cSq;\n     c = texture2D(inputImageTexture, uv + vec2(0,-2) * src_size).rgb;\n     cSq = c * c;\n     m0 += c;\n     s0 += cSq;\n     m3 += c;\n     s3 += cSq;\n     c = texture2D(inputImageTexture, uv + vec2(0,-1) * src_size).rgb;\n     cSq = c * c;\n     m0 += c;\n     s0 += cSq;\n     m3 += c;\n     s3 += cSq;\n     c = texture2D(inputImageTexture, uv + vec2(0,0) * src_size).rgb;\n     cSq = c * c;\n     m0 += c;\n     s0 += cSq;\n     m1 += c;\n     s1 += cSq;\n     m2 += c;\n     s2 += cSq;\n     m3 += c;\n     s3 += cSq;\n\n     c = texture2D(inputImageTexture, uv + vec2(-3,3) * src_size).rgb;\n     m1 += c;\n     s1 += c * c;\n     c = texture2D(inputImageTexture, uv + vec2(-3,2) * src_size).rgb;\n     m1 += c;\n     s1 += c * c;\n     c = texture2D(inputImageTexture, uv + vec2(-3,1) * src_size).rgb;\n     m1 += c;\n     s1 += c * c;\n\n     c = texture2D(inputImageTexture, uv + vec2(-2,3) * src_size).rgb;\n     m1 += c;\n     s1 += c * c;\n     c = texture2D(inputImageTexture, uv + vec2(-2,2) * src_size).rgb;\n     m1 += c;\n     s1 += c * c;\n     c = texture2D(inputImageTexture, uv + vec2(-2,1) * src_size).rgb;\n     m1 += c;\n     s1 += c * c;\n\n     c = texture2D(inputImageTexture, uv + vec2(-1,3) * src_size).rgb;\n     m1 += c;\n     s1 += c * c;\n     c = texture2D(inputImageTexture, uv + vec2(-1,2) * src_size).rgb;\n     m1 += c;\n     s1 += c * c;\n     c = texture2D(inputImageTexture, uv + vec2(-1,1) * src_size).rgb;\n     m1 += c;\n     s1 += c * c;\n\n     c = texture2D(inputImageTexture, uv + vec2(0,3) * src_size).rgb;\n     cSq = c * c;\n     m1 += c;\n     s1 += cSq;\n     m2 += c;\n     s2 += cSq;\n     c = texture2D(inputImageTexture, uv + vec2(0,2) * src_size).rgb;\n     cSq = c * c;\n     m1 += c;\n     s1 += cSq;\n     m2 += c;\n     s2 += cSq;\n     c = texture2D(inputImageTexture, uv + vec2(0,1) * src_size).rgb;\n     cSq = c * c;\n     m1 += c;\n     s1 += cSq;\n     m2 += c;\n     s2 += cSq;\n\n     c = texture2D(inputImageTexture, uv + vec2(3,3) * src_size).rgb;\n     m2 += c;\n     s2 += c * c;\n     c = texture2D(inputImageTexture, uv + vec2(3,2) * src_size).rgb;\n     m2 += c;\n     s2 += c * c;\n     c = texture2D(inputImageTexture, uv + vec2(3,1) * src_size).rgb;\n     m2 += c;\n     s2 += c * c;\n     c = texture2D(inputImageTexture, uv + vec2(3,0) * src_size).rgb;\n     cSq = c * c;\n     m2 += c;\n     s2 += cSq;\n     m3 += c;\n     s3 += cSq;\n\n     c = texture2D(inputImageTexture, uv + vec2(2,3) * src_size).rgb;\n     m2 += c;\n     s2 += c * c;\n     c = texture2D(inputImageTexture, uv + vec2(2,2) * src_size).rgb;\n     m2 += c;\n     s2 += c * c;\n     c = texture2D(inputImageTexture, uv + vec2(2,1) * src_size).rgb;\n     m2 += c;\n     s2 += c * c;\n     c = texture2D(inputImageTexture, uv + vec2(2,0) * src_size).rgb;\n     cSq = c * c;\n     m2 += c;\n     s2 += cSq;\n     m3 += c;\n     s3 += cSq;\n\n     c = texture2D(inputImageTexture, uv + vec2(1,3) * src_size).rgb;\n     m2 += c;\n     s2 += c * c;\n     c = texture2D(inputImageTexture, uv + vec2(1,2) * src_size).rgb;\n     m2 += c;\n     s2 += c * c;\n     c = texture2D(inputImageTexture, uv + vec2(1,1) * src_size).rgb;\n     m2 += c;\n     s2 += c * c;\n     c = texture2D(inputImageTexture, uv + vec2(1,0) * src_size).rgb;\n     cSq = c * c;\n     m2 += c;\n     s2 += cSq;\n     m3 += c;\n     s3 += cSq;\n\n     c = texture2D(inputImageTexture, uv + vec2(3,-3) * src_size).rgb;\n     m3 += c;\n     s3 += c * c;\n     c = texture2D(inputImageTexture, uv + vec2(3,-2) * src_size).rgb;\n     m3 += c;\n     s3 += c * c;\n     c = texture2D(inputImageTexture, uv + vec2(3,-1) * src_size).rgb;\n     m3 += c;\n     s3 += c * c;\n\n     c = texture2D(inputImageTexture, uv + vec2(2,-3) * src_size).rgb;\n     m3 += c;\n     s3 += c * c;\n     c = texture2D(inputImageTexture, uv + vec2(2,-2) * src_size).rgb;\n     m3 += c;\n     s3 += c * c;\n     c = texture2D(inputImageTexture, uv + vec2(2,-1) * src_size).rgb;\n     m3 += c;\n     s3 += c * c;\n\n     c = texture2D(inputImageTexture, uv + vec2(1,-3) * src_size).rgb;\n     m3 += c;\n     s3 += c * c;\n     c = texture2D(inputImageTexture, uv + vec2(1,-2) * src_size).rgb;\n     m3 += c;\n     s3 += c * c;\n     c = texture2D(inputImageTexture, uv + vec2(1,-1) * src_size).rgb;\n     m3 += c;\n     s3 += c * c;\n\n     float min_sigma2 = 1e+2;\n     m0 /= n;\n     s0 = abs(s0 / n - m0 * m0);\n\n     float sigma2 = s0.r + s0.g + s0.b;\n     if (sigma2 < min_sigma2) {\n         min_sigma2 = sigma2;\n         gl_FragColor = vec4(m0, 1.0);\n     }\n\n     m1 /= n;\n     s1 = abs(s1 / n - m1 * m1);\n\n     sigma2 = s1.r + s1.g + s1.b;\n     if (sigma2 < min_sigma2) {\n         min_sigma2 = sigma2;\n         gl_FragColor = vec4(m1, 1.0);\n     }\n\n     m2 /= n;\n     s2 = abs(s2 / n - m2 * m2);\n\n     sigma2 = s2.r + s2.g + s2.b;\n     if (sigma2 < min_sigma2) {\n         min_sigma2 = sigma2;\n         gl_FragColor = vec4(m2, 1.0);\n     }\n\n     m3 /= n;\n     s3 = abs(s3 / n - m3 * m3);\n\n     sigma2 = s3.r + s3.g + s3.b;\n     if (sigma2 < min_sigma2) {\n         min_sigma2 = sigma2;\n         gl_FragColor = vec4(m3, 1.0);\n     }\n }\n);\n#else\nNSString *const kGPUImageKuwaharaRadius3FragmentShaderString = SHADER_STRING\n(\n varying vec2 textureCoordinate;\n uniform sampler2D inputImageTexture;\n \n const vec2 src_size = vec2 (1.0 / 768.0, 1.0 / 1024.0);\n \n void main (void)\n {\n     vec2 uv = textureCoordinate;\n     float n = float(16); // radius is assumed to be 3\n     vec3 m0 = vec3(0.0); vec3 m1 = vec3(0.0); vec3 m2 = vec3(0.0); vec3 m3 = vec3(0.0);\n     vec3 s0 = vec3(0.0); vec3 s1 = vec3(0.0); vec3 s2 = vec3(0.0); vec3 s3 = vec3(0.0);\n     vec3 c;\n     vec3 cSq;\n     \n     c = texture2D(inputImageTexture, uv + vec2(-3,-3) * src_size).rgb;\n     m0 += c;\n     s0 += c * c;\n     c = texture2D(inputImageTexture, uv + vec2(-3,-2) * src_size).rgb;\n     m0 += c;\n     s0 += c * c;\n     c = texture2D(inputImageTexture, uv + vec2(-3,-1) * src_size).rgb;\n     m0 += c;\n     s0 += c * c;\n     c = texture2D(inputImageTexture, uv + vec2(-3,0) * src_size).rgb;\n     cSq = c * c;\n     m0 += c;\n     s0 += cSq;\n     m1 += c;\n     s1 += cSq;\n     \n     c = texture2D(inputImageTexture, uv + vec2(-2,-3) * src_size).rgb;\n     m0 += c;\n     s0 += c * c;\n     c = texture2D(inputImageTexture, uv + vec2(-2,-2) * src_size).rgb;\n     m0 += c;\n     s0 += c * c;\n     c = texture2D(inputImageTexture, uv + vec2(-2,-1) * src_size).rgb;\n     m0 += c;\n     s0 += c * c;\n     c = texture2D(inputImageTexture, uv + vec2(-2,0) * src_size).rgb;\n     cSq = c * c;\n     m0 += c;\n     s0 += cSq;\n     m1 += c;\n     s1 += cSq;\n     \n     c = texture2D(inputImageTexture, uv + vec2(-1,-3) * src_size).rgb;\n     m0 += c;\n     s0 += c * c;\n     c = texture2D(inputImageTexture, uv + vec2(-1,-2) * src_size).rgb;\n     m0 += c;\n     s0 += c * c;\n     c = texture2D(inputImageTexture, uv + vec2(-1,-1) * src_size).rgb;\n     m0 += c;\n     s0 += c * c;\n     c = texture2D(inputImageTexture, uv + vec2(-1,0) * src_size).rgb;\n     cSq = c * c;\n     m0 += c;\n     s0 += cSq;\n     m1 += c;\n     s1 += cSq;\n     \n     c = texture2D(inputImageTexture, uv + vec2(0,-3) * src_size).rgb;\n     cSq = c * c;\n     m0 += c;\n     s0 += cSq;\n     m3 += c;\n     s3 += cSq;\n     c = texture2D(inputImageTexture, uv + vec2(0,-2) * src_size).rgb;\n     cSq = c * c;\n     m0 += c;\n     s0 += cSq;\n     m3 += c;\n     s3 += cSq;\n     c = texture2D(inputImageTexture, uv + vec2(0,-1) * src_size).rgb;\n     cSq = c * c;\n     m0 += c;\n     s0 += cSq;\n     m3 += c;\n     s3 += cSq;\n     c = texture2D(inputImageTexture, uv + vec2(0,0) * src_size).rgb;\n     cSq = c * c;\n     m0 += c;\n     s0 += cSq;\n     m1 += c;\n     s1 += cSq;\n     m2 += c;\n     s2 += cSq;\n     m3 += c;\n     s3 += cSq;\n     \n     c = texture2D(inputImageTexture, uv + vec2(-3,3) * src_size).rgb;\n     m1 += c;\n     s1 += c * c;\n     c = texture2D(inputImageTexture, uv + vec2(-3,2) * src_size).rgb;\n     m1 += c;\n     s1 += c * c;\n     c = texture2D(inputImageTexture, uv + vec2(-3,1) * src_size).rgb;\n     m1 += c;\n     s1 += c * c;\n     \n     c = texture2D(inputImageTexture, uv + vec2(-2,3) * src_size).rgb;\n     m1 += c;\n     s1 += c * c;\n     c = texture2D(inputImageTexture, uv + vec2(-2,2) * src_size).rgb;\n     m1 += c;\n     s1 += c * c;\n     c = texture2D(inputImageTexture, uv + vec2(-2,1) * src_size).rgb;\n     m1 += c;\n     s1 += c * c;\n     \n     c = texture2D(inputImageTexture, uv + vec2(-1,3) * src_size).rgb;\n     m1 += c;\n     s1 += c * c;\n     c = texture2D(inputImageTexture, uv + vec2(-1,2) * src_size).rgb;\n     m1 += c;\n     s1 += c * c;\n     c = texture2D(inputImageTexture, uv + vec2(-1,1) * src_size).rgb;\n     m1 += c;\n     s1 += c * c;\n     \n     c = texture2D(inputImageTexture, uv + vec2(0,3) * src_size).rgb;\n     cSq = c * c;\n     m1 += c;\n     s1 += cSq;\n     m2 += c;\n     s2 += cSq;\n     c = texture2D(inputImageTexture, uv + vec2(0,2) * src_size).rgb;\n     cSq = c * c;\n     m1 += c;\n     s1 += cSq;\n     m2 += c;\n     s2 += cSq;\n     c = texture2D(inputImageTexture, uv + vec2(0,1) * src_size).rgb;\n     cSq = c * c;\n     m1 += c;\n     s1 += cSq;\n     m2 += c;\n     s2 += cSq;\n     \n     c = texture2D(inputImageTexture, uv + vec2(3,3) * src_size).rgb;\n     m2 += c;\n     s2 += c * c;\n     c = texture2D(inputImageTexture, uv + vec2(3,2) * src_size).rgb;\n     m2 += c;\n     s2 += c * c;\n     c = texture2D(inputImageTexture, uv + vec2(3,1) * src_size).rgb;\n     m2 += c;\n     s2 += c * c;\n     c = texture2D(inputImageTexture, uv + vec2(3,0) * src_size).rgb;\n     cSq = c * c;\n     m2 += c;\n     s2 += cSq;\n     m3 += c;\n     s3 += cSq;\n     \n     c = texture2D(inputImageTexture, uv + vec2(2,3) * src_size).rgb;\n     m2 += c;\n     s2 += c * c;\n     c = texture2D(inputImageTexture, uv + vec2(2,2) * src_size).rgb;\n     m2 += c;\n     s2 += c * c;\n     c = texture2D(inputImageTexture, uv + vec2(2,1) * src_size).rgb;\n     m2 += c;\n     s2 += c * c;\n     c = texture2D(inputImageTexture, uv + vec2(2,0) * src_size).rgb;\n     cSq = c * c;\n     m2 += c;\n     s2 += cSq;\n     m3 += c;\n     s3 += cSq;\n     \n     c = texture2D(inputImageTexture, uv + vec2(1,3) * src_size).rgb;\n     m2 += c;\n     s2 += c * c;\n     c = texture2D(inputImageTexture, uv + vec2(1,2) * src_size).rgb;\n     m2 += c;\n     s2 += c * c;\n     c = texture2D(inputImageTexture, uv + vec2(1,1) * src_size).rgb;\n     m2 += c;\n     s2 += c * c;\n     c = texture2D(inputImageTexture, uv + vec2(1,0) * src_size).rgb;\n     cSq = c * c;\n     m2 += c;\n     s2 += cSq;\n     m3 += c;\n     s3 += cSq;\n     \n     c = texture2D(inputImageTexture, uv + vec2(3,-3) * src_size).rgb;\n     m3 += c;\n     s3 += c * c;\n     c = texture2D(inputImageTexture, uv + vec2(3,-2) * src_size).rgb;\n     m3 += c;\n     s3 += c * c;\n     c = texture2D(inputImageTexture, uv + vec2(3,-1) * src_size).rgb;\n     m3 += c;\n     s3 += c * c;\n     \n     c = texture2D(inputImageTexture, uv + vec2(2,-3) * src_size).rgb;\n     m3 += c;\n     s3 += c * c;\n     c = texture2D(inputImageTexture, uv + vec2(2,-2) * src_size).rgb;\n     m3 += c;\n     s3 += c * c;\n     c = texture2D(inputImageTexture, uv + vec2(2,-1) * src_size).rgb;\n     m3 += c;\n     s3 += c * c;\n     \n     c = texture2D(inputImageTexture, uv + vec2(1,-3) * src_size).rgb;\n     m3 += c;\n     s3 += c * c;\n     c = texture2D(inputImageTexture, uv + vec2(1,-2) * src_size).rgb;\n     m3 += c;\n     s3 += c * c;\n     c = texture2D(inputImageTexture, uv + vec2(1,-1) * src_size).rgb;\n     m3 += c;\n     s3 += c * c;\n     \n     float min_sigma2 = 1e+2;\n     m0 /= n;\n     s0 = abs(s0 / n - m0 * m0);\n     \n     float sigma2 = s0.r + s0.g + s0.b;\n     if (sigma2 < min_sigma2) {\n         min_sigma2 = sigma2;\n         gl_FragColor = vec4(m0, 1.0);\n     }\n     \n     m1 /= n;\n     s1 = abs(s1 / n - m1 * m1);\n     \n     sigma2 = s1.r + s1.g + s1.b;\n     if (sigma2 < min_sigma2) {\n         min_sigma2 = sigma2;\n         gl_FragColor = vec4(m1, 1.0);\n     }\n     \n     m2 /= n;\n     s2 = abs(s2 / n - m2 * m2);\n     \n     sigma2 = s2.r + s2.g + s2.b;\n     if (sigma2 < min_sigma2) {\n         min_sigma2 = sigma2;\n         gl_FragColor = vec4(m2, 1.0);\n     }\n     \n     m3 /= n;\n     s3 = abs(s3 / n - m3 * m3);\n     \n     sigma2 = s3.r + s3.g + s3.b;\n     if (sigma2 < min_sigma2) {\n         min_sigma2 = sigma2;\n         gl_FragColor = vec4(m3, 1.0);\n     }\n }\n);\n#endif\n\n@implementation GPUImageKuwaharaRadius3Filter\n\n#pragma mark -\n#pragma mark Initialization and teardown\n\n- (id)init;\n{\n    if (!(self = [super initWithFragmentShaderFromString:kGPUImageKuwaharaRadius3FragmentShaderString]))\n    {\n\t\treturn nil;\n    }\n\n    return self;\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageLanczosResamplingFilter.h",
    "content": "#import \"GPUImageTwoPassTextureSamplingFilter.h\"\n\n@interface GPUImageLanczosResamplingFilter : GPUImageTwoPassTextureSamplingFilter\n\n@property(readwrite, nonatomic) CGSize originalImageSize;\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageLanczosResamplingFilter.m",
    "content": "#import \"GPUImageLanczosResamplingFilter.h\"\n\nNSString *const kGPUImageLanczosVertexShaderString = SHADER_STRING\n(\n attribute vec4 position;\n attribute vec2 inputTextureCoordinate;\n \n uniform float texelWidthOffset;\n uniform float texelHeightOffset;\n \n varying vec2 centerTextureCoordinate;\n varying vec2 oneStepLeftTextureCoordinate;\n varying vec2 twoStepsLeftTextureCoordinate;\n varying vec2 threeStepsLeftTextureCoordinate;\n varying vec2 fourStepsLeftTextureCoordinate;\n varying vec2 oneStepRightTextureCoordinate;\n varying vec2 twoStepsRightTextureCoordinate;\n varying vec2 threeStepsRightTextureCoordinate;\n varying vec2 fourStepsRightTextureCoordinate;\n\n void main()\n {\n     gl_Position = position;\n     \n     vec2 firstOffset = vec2(texelWidthOffset, texelHeightOffset);\n     vec2 secondOffset = vec2(2.0 * texelWidthOffset, 2.0 * texelHeightOffset);\n     vec2 thirdOffset = vec2(3.0 * texelWidthOffset, 3.0 * texelHeightOffset);\n     vec2 fourthOffset = vec2(4.0 * texelWidthOffset, 4.0 * texelHeightOffset);\n     \n     centerTextureCoordinate = inputTextureCoordinate;\n     oneStepLeftTextureCoordinate = inputTextureCoordinate - firstOffset;\n     twoStepsLeftTextureCoordinate = inputTextureCoordinate - secondOffset;\n     threeStepsLeftTextureCoordinate = inputTextureCoordinate - thirdOffset;\n     fourStepsLeftTextureCoordinate = inputTextureCoordinate - fourthOffset;\n     oneStepRightTextureCoordinate = inputTextureCoordinate + firstOffset;\n     twoStepsRightTextureCoordinate = inputTextureCoordinate + secondOffset;\n     threeStepsRightTextureCoordinate = inputTextureCoordinate + thirdOffset;\n     fourStepsRightTextureCoordinate = inputTextureCoordinate + fourthOffset;\n }\n);\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageLanczosFragmentShaderString = SHADER_STRING\n(\n precision highp float;\n \n uniform sampler2D inputImageTexture;\n \n varying vec2 centerTextureCoordinate;\n varying vec2 oneStepLeftTextureCoordinate;\n varying vec2 twoStepsLeftTextureCoordinate;\n varying vec2 threeStepsLeftTextureCoordinate;\n varying vec2 fourStepsLeftTextureCoordinate;\n varying vec2 oneStepRightTextureCoordinate;\n varying vec2 twoStepsRightTextureCoordinate;\n varying vec2 threeStepsRightTextureCoordinate;\n varying vec2 fourStepsRightTextureCoordinate;\n\n // sinc(x) * sinc(x/a) = (a * sin(pi * x) * sin(pi * x / a)) / (pi^2 * x^2)\n // Assuming a Lanczos constant of 2.0, and scaling values to max out at x = +/- 1.5\n \n void main()\n {\n     lowp vec4 fragmentColor = texture2D(inputImageTexture, centerTextureCoordinate) * 0.38026;\n     \n     fragmentColor += texture2D(inputImageTexture, oneStepLeftTextureCoordinate) * 0.27667;\n     fragmentColor += texture2D(inputImageTexture, oneStepRightTextureCoordinate) * 0.27667;\n     \n     fragmentColor += texture2D(inputImageTexture, twoStepsLeftTextureCoordinate) * 0.08074;\n     fragmentColor += texture2D(inputImageTexture, twoStepsRightTextureCoordinate) * 0.08074;\n\n     fragmentColor += texture2D(inputImageTexture, threeStepsLeftTextureCoordinate) * -0.02612;\n     fragmentColor += texture2D(inputImageTexture, threeStepsRightTextureCoordinate) * -0.02612;\n\n     fragmentColor += texture2D(inputImageTexture, fourStepsLeftTextureCoordinate) * -0.02143;\n     fragmentColor += texture2D(inputImageTexture, fourStepsRightTextureCoordinate) * -0.02143;\n\n     gl_FragColor = fragmentColor;\n }\n);\n#else\nNSString *const kGPUImageLanczosFragmentShaderString = SHADER_STRING\n(\n uniform sampler2D inputImageTexture;\n \n varying vec2 centerTextureCoordinate;\n varying vec2 oneStepLeftTextureCoordinate;\n varying vec2 twoStepsLeftTextureCoordinate;\n varying vec2 threeStepsLeftTextureCoordinate;\n varying vec2 fourStepsLeftTextureCoordinate;\n varying vec2 oneStepRightTextureCoordinate;\n varying vec2 twoStepsRightTextureCoordinate;\n varying vec2 threeStepsRightTextureCoordinate;\n varying vec2 fourStepsRightTextureCoordinate;\n \n // sinc(x) * sinc(x/a) = (a * sin(pi * x) * sin(pi * x / a)) / (pi^2 * x^2)\n // Assuming a Lanczos constant of 2.0, and scaling values to max out at x = +/- 1.5\n \n void main()\n {\n     vec4 fragmentColor = texture2D(inputImageTexture, centerTextureCoordinate) * 0.38026;\n     \n     fragmentColor += texture2D(inputImageTexture, oneStepLeftTextureCoordinate) * 0.27667;\n     fragmentColor += texture2D(inputImageTexture, oneStepRightTextureCoordinate) * 0.27667;\n     \n     fragmentColor += texture2D(inputImageTexture, twoStepsLeftTextureCoordinate) * 0.08074;\n     fragmentColor += texture2D(inputImageTexture, twoStepsRightTextureCoordinate) * 0.08074;\n     \n     fragmentColor += texture2D(inputImageTexture, threeStepsLeftTextureCoordinate) * -0.02612;\n     fragmentColor += texture2D(inputImageTexture, threeStepsRightTextureCoordinate) * -0.02612;\n     \n     fragmentColor += texture2D(inputImageTexture, fourStepsLeftTextureCoordinate) * -0.02143;\n     fragmentColor += texture2D(inputImageTexture, fourStepsRightTextureCoordinate) * -0.02143;\n     \n     gl_FragColor = fragmentColor;\n }\n);\n#endif\n\n@implementation GPUImageLanczosResamplingFilter\n\n@synthesize originalImageSize = _originalImageSize;\n\n#pragma mark -\n#pragma mark Initialization and teardown\n\n- (id)init;\n{\n    if (!(self = [super initWithFirstStageVertexShaderFromString:kGPUImageLanczosVertexShaderString firstStageFragmentShaderFromString:kGPUImageLanczosFragmentShaderString secondStageVertexShaderFromString:kGPUImageLanczosVertexShaderString secondStageFragmentShaderFromString:kGPUImageLanczosFragmentShaderString]))\n    {\n\t\treturn nil;\n    }\n        \n    return self;\n}\n\n// Base texture sampling offset on the input image, not the final size\n- (void)setInputSize:(CGSize)newSize atIndex:(NSInteger)textureIndex;\n{\n    self.originalImageSize = newSize;\n    [super setInputSize:newSize atIndex:textureIndex];\n}\n\n- (void)setupFilterForSize:(CGSize)filterFrameSize;\n{\n    runSynchronouslyOnVideoProcessingQueue(^{\n        // The first pass through the framebuffer may rotate the inbound image, so need to account for that by changing up the kernel ordering for that pass\n        if (GPUImageRotationSwapsWidthAndHeight(inputRotation))\n        {\n            verticalPassTexelWidthOffset = 1.0 / _originalImageSize.height;\n            verticalPassTexelHeightOffset = 0.0;\n        }\n        else\n        {\n            verticalPassTexelWidthOffset = 0.0;\n            verticalPassTexelHeightOffset = 1.0 / _originalImageSize.height;\n        }\n        \n        horizontalPassTexelWidthOffset = 1.0 / _originalImageSize.width;\n        horizontalPassTexelHeightOffset = 0.0;\n    });\n}\n\n\n- (void)renderToTextureWithVertices:(const GLfloat *)vertices textureCoordinates:(const GLfloat *)textureCoordinates;\n{\n    if (self.preventRendering)\n    {\n        [firstInputFramebuffer unlock];\n        return;\n    }\n    \n    [GPUImageContext setActiveShaderProgram:filterProgram];\n    \n    CGSize currentFBOSize = [self sizeOfFBO];\n    if (GPUImageRotationSwapsWidthAndHeight(inputRotation))\n    {\n        currentFBOSize.height = self.originalImageSize.height;\n    }\n    else\n    {\n        currentFBOSize.width = self.originalImageSize.width;\n    }\n    outputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:currentFBOSize textureOptions:self.outputTextureOptions onlyTexture:NO];\n    [outputFramebuffer activateFramebuffer];\n    \n    [self setUniformsForProgramAtIndex:0];\n    \n    glClearColor(backgroundColorRed, backgroundColorGreen, backgroundColorBlue, backgroundColorAlpha);\n    glClear(GL_COLOR_BUFFER_BIT);\n    \n\tglActiveTexture(GL_TEXTURE2);\n\tglBindTexture(GL_TEXTURE_2D, [firstInputFramebuffer texture]);\n\t\n\tglUniform1i(filterInputTextureUniform, 2);\n    \n    glVertexAttribPointer(filterPositionAttribute, 2, GL_FLOAT, 0, 0, vertices);\n\tglVertexAttribPointer(filterTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, textureCoordinates);\n    \n    glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);\n    \n    [firstInputFramebuffer unlock];\n    \n    // Run the second stage of the two-pass filter\n    [GPUImageContext setActiveShaderProgram:secondFilterProgram];\n    glActiveTexture(GL_TEXTURE2);\n    glBindTexture(GL_TEXTURE_2D, 0);\n    glActiveTexture(GL_TEXTURE3);\n    glBindTexture(GL_TEXTURE_2D, 0);\n    secondOutputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:[self sizeOfFBO] textureOptions:self.outputTextureOptions onlyTexture:NO];\n    [secondOutputFramebuffer activateFramebuffer];\n    if (usingNextFrameForImageCapture)\n    {\n        [secondOutputFramebuffer lock];\n    }\n\n    [self setUniformsForProgramAtIndex:1];\n    \n    glActiveTexture(GL_TEXTURE3);\n    glBindTexture(GL_TEXTURE_2D, [outputFramebuffer texture]);\n    glVertexAttribPointer(secondFilterTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, [[self class] textureCoordinatesForRotation:kGPUImageNoRotation]);\n    \n\tglUniform1i(secondFilterInputTextureUniform, 3);\n    \n    glVertexAttribPointer(secondFilterPositionAttribute, 2, GL_FLOAT, 0, 0, vertices);\n    \n    glClearColor(0.0f, 0.0f, 0.0f, 1.0f);\n    glClear(GL_COLOR_BUFFER_BIT);\n    \n    glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);\n    [outputFramebuffer unlock];\n    outputFramebuffer = nil;\n    if (usingNextFrameForImageCapture)\n    {\n        dispatch_semaphore_signal(imageCaptureSemaphore);\n    }\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageLaplacianFilter.h",
    "content": "#import \"GPUImage3x3ConvolutionFilter.h\"\n\n@interface GPUImageLaplacianFilter : GPUImage3x3ConvolutionFilter\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageLaplacianFilter.m",
    "content": "#import \"GPUImageLaplacianFilter.h\"\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageLaplacianFragmentShaderString = SHADER_STRING\n(\n precision highp float;\n \n uniform sampler2D inputImageTexture;\n \n uniform mediump mat3 convolutionMatrix;\n \n varying vec2 textureCoordinate;\n varying vec2 leftTextureCoordinate;\n varying vec2 rightTextureCoordinate;\n \n varying vec2 topTextureCoordinate;\n varying vec2 topLeftTextureCoordinate;\n varying vec2 topRightTextureCoordinate;\n \n varying vec2 bottomTextureCoordinate;\n varying vec2 bottomLeftTextureCoordinate;\n varying vec2 bottomRightTextureCoordinate;\n \n void main()\n {\n     mediump vec3 bottomColor = texture2D(inputImageTexture, bottomTextureCoordinate).rgb;\n     mediump vec3 bottomLeftColor = texture2D(inputImageTexture, bottomLeftTextureCoordinate).rgb;\n     mediump vec3 bottomRightColor = texture2D(inputImageTexture, bottomRightTextureCoordinate).rgb;\n     mediump vec4 centerColor = texture2D(inputImageTexture, textureCoordinate);\n     mediump vec3 leftColor = texture2D(inputImageTexture, leftTextureCoordinate).rgb;\n     mediump vec3 rightColor = texture2D(inputImageTexture, rightTextureCoordinate).rgb;\n     mediump vec3 topColor = texture2D(inputImageTexture, topTextureCoordinate).rgb;\n     mediump vec3 topRightColor = texture2D(inputImageTexture, topRightTextureCoordinate).rgb;\n     mediump vec3 topLeftColor = texture2D(inputImageTexture, topLeftTextureCoordinate).rgb;\n     \n     mediump vec3 resultColor = topLeftColor * convolutionMatrix[0][0] + topColor * convolutionMatrix[0][1] + topRightColor * convolutionMatrix[0][2];\n     resultColor += leftColor * convolutionMatrix[1][0] + centerColor.rgb * convolutionMatrix[1][1] + rightColor * convolutionMatrix[1][2];\n     resultColor += bottomLeftColor * convolutionMatrix[2][0] + bottomColor * convolutionMatrix[2][1] + bottomRightColor * convolutionMatrix[2][2];\n     \n     // Normalize the results to allow for negative gradients in the 0.0-1.0 colorspace\n     resultColor = resultColor + 0.5;\n     \n     gl_FragColor = vec4(resultColor, centerColor.a);\n }\n);\n#else\nNSString *const kGPUImageLaplacianFragmentShaderString = SHADER_STRING\n(\n uniform sampler2D inputImageTexture;\n \n uniform mat3 convolutionMatrix;\n \n varying vec2 textureCoordinate;\n varying vec2 leftTextureCoordinate;\n varying vec2 rightTextureCoordinate;\n \n varying vec2 topTextureCoordinate;\n varying vec2 topLeftTextureCoordinate;\n varying vec2 topRightTextureCoordinate;\n \n varying vec2 bottomTextureCoordinate;\n varying vec2 bottomLeftTextureCoordinate;\n varying vec2 bottomRightTextureCoordinate;\n \n void main()\n {\n     vec3 bottomColor = texture2D(inputImageTexture, bottomTextureCoordinate).rgb;\n     vec3 bottomLeftColor = texture2D(inputImageTexture, bottomLeftTextureCoordinate).rgb;\n     vec3 bottomRightColor = texture2D(inputImageTexture, bottomRightTextureCoordinate).rgb;\n     vec4 centerColor = texture2D(inputImageTexture, textureCoordinate);\n     vec3 leftColor = texture2D(inputImageTexture, leftTextureCoordinate).rgb;\n     vec3 rightColor = texture2D(inputImageTexture, rightTextureCoordinate).rgb;\n     vec3 topColor = texture2D(inputImageTexture, topTextureCoordinate).rgb;\n     vec3 topRightColor = texture2D(inputImageTexture, topRightTextureCoordinate).rgb;\n     vec3 topLeftColor = texture2D(inputImageTexture, topLeftTextureCoordinate).rgb;\n     \n     vec3 resultColor = topLeftColor * convolutionMatrix[0][0] + topColor * convolutionMatrix[0][1] + topRightColor * convolutionMatrix[0][2];\n     resultColor += leftColor * convolutionMatrix[1][0] + centerColor.rgb * convolutionMatrix[1][1] + rightColor * convolutionMatrix[1][2];\n     resultColor += bottomLeftColor * convolutionMatrix[2][0] + bottomColor * convolutionMatrix[2][1] + bottomRightColor * convolutionMatrix[2][2];\n     \n     // Normalize the results to allow for negative gradients in the 0.0-1.0 colorspace\n     resultColor = resultColor + 0.5;\n\n     gl_FragColor = vec4(resultColor, centerColor.a);\n }\n);\n#endif\n\n@implementation GPUImageLaplacianFilter\n\n- (id)init;\n{\n    if (!(self = [super initWithFragmentShaderFromString:kGPUImageLaplacianFragmentShaderString]))\n    {\n\t\treturn nil;\n    }\n    \n    GPUMatrix3x3 newConvolutionMatrix;\n    newConvolutionMatrix.one.one = 0.5;\n    newConvolutionMatrix.one.two = 1.0;\n    newConvolutionMatrix.one.three = 0.5;\n    \n    newConvolutionMatrix.two.one = 1.0;\n    newConvolutionMatrix.two.two = -6.0;\n    newConvolutionMatrix.two.three = 1.0;\n    \n    newConvolutionMatrix.three.one = 0.5;\n    newConvolutionMatrix.three.two = 1.0;\n    newConvolutionMatrix.three.three = 0.5;\n    \n    self.convolutionKernel = newConvolutionMatrix;\n    \n    return self;\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageLevelsFilter.h",
    "content": "#import \"GPUImageFilter.h\"\n\n/**\n * Levels like Photoshop.\n *\n * The min, max, minOut and maxOut parameters are floats in the range [0, 1].\n * If you have parameters from Photoshop in the range [0, 255] you must first\n * convert them to be [0, 1].\n * The gamma/mid parameter is a float >= 0. This matches the value from Photoshop.\n *\n * If you want to apply levels to RGB as well as individual channels you need to use\n * this filter twice - first for the individual channels and then for all channels.\n */\n@interface GPUImageLevelsFilter : GPUImageFilter\n{\n    GLint minUniform;\n    GLint midUniform;\n    GLint maxUniform;\n    GLint minOutputUniform;\n    GLint maxOutputUniform;\n    \n    GPUVector3 minVector, midVector, maxVector, minOutputVector, maxOutputVector;\n}\n\n/** Set levels for the red channel */\n- (void)setRedMin:(CGFloat)min gamma:(CGFloat)mid max:(CGFloat)max minOut:(CGFloat)minOut maxOut:(CGFloat)maxOut;\n\n- (void)setRedMin:(CGFloat)min gamma:(CGFloat)mid max:(CGFloat)max;\n\n/** Set levels for the green channel */\n- (void)setGreenMin:(CGFloat)min gamma:(CGFloat)mid max:(CGFloat)max minOut:(CGFloat)minOut maxOut:(CGFloat)maxOut;\n\n- (void)setGreenMin:(CGFloat)min gamma:(CGFloat)mid max:(CGFloat)max;\n\n/** Set levels for the blue channel */\n- (void)setBlueMin:(CGFloat)min gamma:(CGFloat)mid max:(CGFloat)max minOut:(CGFloat)minOut maxOut:(CGFloat)maxOut;\n\n- (void)setBlueMin:(CGFloat)min gamma:(CGFloat)mid max:(CGFloat)max;\n\n/** Set levels for all channels at once */\n- (void)setMin:(CGFloat)min gamma:(CGFloat)mid max:(CGFloat)max minOut:(CGFloat)minOut maxOut:(CGFloat)maxOut;\n- (void)setMin:(CGFloat)min gamma:(CGFloat)mid max:(CGFloat)max;\n\n@end\n\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageLevelsFilter.m",
    "content": "#import \"GPUImageLevelsFilter.h\"\n\n/*\n ** Gamma correction\n ** Details: http://blog.mouaif.org/2009/01/22/photoshop-gamma-correction-shader/\n */\n\n#define GammaCorrection(color, gamma)\t\t\t\t\t\t\t\tpow(color, 1.0 / gamma)\n\n/*\n ** Levels control (input (+gamma), output)\n ** Details: http://blog.mouaif.org/2009/01/28/levels-control-shader/\n */\n\n#define LevelsControlInputRange(color, minInput, maxInput)\t\t\t\tmin(max(color - minInput, vec3(0.0)) / (maxInput - minInput), vec3(1.0))\n#define LevelsControlInput(color, minInput, gamma, maxInput)\t\t\t\tGammaCorrection(LevelsControlInputRange(color, minInput, maxInput), gamma)\n#define LevelsControlOutputRange(color, minOutput, maxOutput) \t\t\tmix(minOutput, maxOutput, color)\n#define LevelsControl(color, minInput, gamma, maxInput, minOutput, maxOutput) \tLevelsControlOutputRange(LevelsControlInput(color, minInput, gamma, maxInput), minOutput, maxOutput)\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageLevelsFragmentShaderString = SHADER_STRING\n(\n varying highp vec2 textureCoordinate;\n \n uniform sampler2D inputImageTexture;\n uniform mediump vec3 levelMinimum;\n uniform mediump vec3 levelMiddle;\n uniform mediump vec3 levelMaximum;\n uniform mediump vec3 minOutput;\n uniform mediump vec3 maxOutput;\n \n void main()\n {\n     mediump vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);\n     \n     gl_FragColor = vec4(LevelsControl(textureColor.rgb, levelMinimum, levelMiddle, levelMaximum, minOutput, maxOutput), textureColor.a);\n }\n);\n#else\nNSString *const kGPUImageLevelsFragmentShaderString = SHADER_STRING\n(\n varying vec2 textureCoordinate;\n \n uniform sampler2D inputImageTexture;\n uniform vec3 levelMinimum;\n uniform vec3 levelMiddle;\n uniform vec3 levelMaximum;\n uniform vec3 minOutput;\n uniform vec3 maxOutput;\n \n void main()\n {\n     vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);\n     \n     gl_FragColor = vec4(LevelsControl(textureColor.rgb, levelMinimum, levelMiddle, levelMaximum, minOutput, maxOutput), textureColor.a);\n }\n);\n#endif\n\n@implementation GPUImageLevelsFilter\n\n#pragma mark -\n#pragma mark Initialization and teardown\n\n- (id)init;\n{\n    if (!(self = [super initWithFragmentShaderFromString:kGPUImageLevelsFragmentShaderString]))\n    {\n\t\treturn nil;\n    }\n    \n    minUniform = [filterProgram uniformIndex:@\"levelMinimum\"];\n    midUniform = [filterProgram uniformIndex:@\"levelMiddle\"];\n    maxUniform = [filterProgram uniformIndex:@\"levelMaximum\"];\n    minOutputUniform = [filterProgram uniformIndex:@\"minOutput\"];\n    maxOutputUniform = [filterProgram uniformIndex:@\"maxOutput\"];\n    \n    [self setRedMin:0.0 gamma:1.0 max:1.0 minOut:0.0 maxOut:1.0];\n    [self setGreenMin:0.0 gamma:1.0 max:1.0 minOut:0.0 maxOut:1.0];\n    [self setBlueMin:0.0 gamma:1.0 max:1.0 minOut:0.0 maxOut:1.0];\n    \n    return self;\n}\n\n#pragma mark -\n#pragma mark Helpers\n\n- (void)updateUniforms {\n    [self setVec3:minVector forUniform:minUniform program:filterProgram];\n    [self setVec3:midVector forUniform:midUniform program:filterProgram];\n    [self setVec3:maxVector forUniform:maxUniform program:filterProgram];\n    [self setVec3:minOutputVector forUniform:minOutputUniform program:filterProgram];\n    [self setVec3:maxOutputVector forUniform:maxOutputUniform program:filterProgram];\n}\n\n#pragma mark -\n#pragma mark Accessors\n\n- (void)setMin:(CGFloat)min gamma:(CGFloat)mid max:(CGFloat)max minOut:(CGFloat)minOut maxOut:(CGFloat)maxOut {\n    [self setRedMin:min gamma:mid max:max minOut:minOut maxOut:maxOut];\n    [self setGreenMin:min gamma:mid max:max minOut:minOut maxOut:maxOut];\n    [self setBlueMin:min gamma:mid max:max minOut:minOut maxOut:maxOut];\n}\n\n- (void)setMin:(CGFloat)min gamma:(CGFloat)mid max:(CGFloat)max {\n    [self setMin:min gamma:mid max:max minOut:0.0 maxOut:1.0];\n}\n\n- (void)setRedMin:(CGFloat)min gamma:(CGFloat)mid max:(CGFloat)max minOut:(CGFloat)minOut maxOut:(CGFloat)maxOut {\n    minVector.one = min;\n    midVector.one = mid;\n    maxVector.one = max;\n    minOutputVector.one = minOut;\n    maxOutputVector.one = maxOut;\n    \n    [self updateUniforms];\n}\n\n- (void)setRedMin:(CGFloat)min gamma:(CGFloat)mid max:(CGFloat)max {\n    [self setRedMin:min gamma:mid max:max minOut:0.0 maxOut:1.0];\n}\n\n- (void)setGreenMin:(CGFloat)min gamma:(CGFloat)mid max:(CGFloat)max minOut:(CGFloat)minOut maxOut:(CGFloat)maxOut {\n    minVector.two = min;\n    midVector.two = mid;\n    maxVector.two = max;\n    minOutputVector.two = minOut;\n    maxOutputVector.two = maxOut;\n    \n    [self updateUniforms];\n}\n\n- (void)setGreenMin:(CGFloat)min gamma:(CGFloat)mid max:(CGFloat)max {\n    [self setGreenMin:min gamma:mid max:max minOut:0.0 maxOut:1.0];\n}\n\n- (void)setBlueMin:(CGFloat)min gamma:(CGFloat)mid max:(CGFloat)max minOut:(CGFloat)minOut maxOut:(CGFloat)maxOut {\n    minVector.three = min;\n    midVector.three = mid;\n    maxVector.three = max;\n    minOutputVector.three = minOut;\n    maxOutputVector.three = maxOut;\n    \n    [self updateUniforms];\n}\n\n- (void)setBlueMin:(CGFloat)min gamma:(CGFloat)mid max:(CGFloat)max {\n    [self setBlueMin:min gamma:mid max:max minOut:0.0 maxOut:1.0];\n}\n\n@end\n\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageLightenBlendFilter.h",
    "content": "#import \"GPUImageTwoInputFilter.h\"\n\n/// Blends two images by taking the maximum value of each color component between the images\n@interface GPUImageLightenBlendFilter : GPUImageTwoInputFilter\n{\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageLightenBlendFilter.m",
    "content": "#import \"GPUImageLightenBlendFilter.h\"\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageLightenBlendFragmentShaderString = SHADER_STRING\n(\n varying highp vec2 textureCoordinate;\n varying highp vec2 textureCoordinate2;\n\n uniform sampler2D inputImageTexture;\n uniform sampler2D inputImageTexture2;\n \n void main()\n {\n    lowp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);\n    lowp vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate2);\n    \n    gl_FragColor = max(textureColor, textureColor2);\n }\n);\n#else\nNSString *const kGPUImageLightenBlendFragmentShaderString = SHADER_STRING\n(\n varying vec2 textureCoordinate;\n varying vec2 textureCoordinate2;\n \n uniform sampler2D inputImageTexture;\n uniform sampler2D inputImageTexture2;\n \n void main()\n {\n     vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);\n     vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate2);\n     \n     gl_FragColor = max(textureColor, textureColor2);\n }\n );\n#endif\n\n@implementation GPUImageLightenBlendFilter\n\n- (id)init;\n{\n    if (!(self = [super initWithFragmentShaderFromString:kGPUImageLightenBlendFragmentShaderString]))\n    {\n\t\treturn nil;\n    }\n    \n    return self;\n}\n\n@end\n\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageLineGenerator.h",
    "content": "#import \"GPUImageFilter.h\"\n\n@interface GPUImageLineGenerator : GPUImageFilter\n{\n    GLint lineWidthUniform, lineColorUniform;\n    GLfloat *lineCoordinates;\n}\n\n// The width of the displayed lines, in pixels. The default is 1.\n@property(readwrite, nonatomic) CGFloat lineWidth;\n\n// The color of the lines is specified using individual red, green, and blue components (normalized to 1.0). The default is green: (0.0, 1.0, 0.0).\n- (void)setLineColorRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent;\n\n// Rendering\n- (void)renderLinesFromArray:(GLfloat *)lineSlopeAndIntercepts count:(NSUInteger)numberOfLines frameTime:(CMTime)frameTime;\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageLineGenerator.m",
    "content": "#import \"GPUImageLineGenerator.h\"\n\nNSString *const kGPUImageLineGeneratorVertexShaderString = SHADER_STRING\n(\n attribute vec4 position;\n \n void main()\n {\n     gl_Position = position;\n }\n);\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageLineGeneratorFragmentShaderString = SHADER_STRING\n(\n uniform lowp vec3 lineColor;\n \n void main()\n {\n     gl_FragColor = vec4(lineColor, 1.0);\n }\n);\n#else\nNSString *const kGPUImageLineGeneratorFragmentShaderString = SHADER_STRING\n(\n uniform vec3 lineColor;\n \n void main()\n {\n     gl_FragColor = vec4(lineColor, 1.0);\n }\n);\n#endif\n\n@interface GPUImageLineGenerator()\n\n- (void)generateLineCoordinates;\n\n@end\n\n@implementation GPUImageLineGenerator\n\n@synthesize lineWidth = _lineWidth;\n\n#pragma mark -\n#pragma mark Initialization and teardown\n\n- (id)init;\n{\n    if (!(self = [super initWithVertexShaderFromString:kGPUImageLineGeneratorVertexShaderString fragmentShaderFromString:kGPUImageLineGeneratorFragmentShaderString]))\n    {\n        return nil;\n    }\n    \n    runSynchronouslyOnVideoProcessingQueue(^{\n        lineWidthUniform = [filterProgram uniformIndex:@\"lineWidth\"];\n        lineColorUniform = [filterProgram uniformIndex:@\"lineColor\"];\n        \n        self.lineWidth = 1.0;\n        [self setLineColorRed:0.0 green:1.0 blue:0.0];\n    });\n    \n    return self;\n}\n\n- (void)dealloc\n{\n    if (lineCoordinates)\n    {\n        free(lineCoordinates);\n    }\n}\n\n#pragma mark -\n#pragma mark Rendering\n\n- (void)generateLineCoordinates;\n{\n    lineCoordinates = calloc(1024 * 4, sizeof(GLfloat));\n}\n\n- (void)renderLinesFromArray:(GLfloat *)lineSlopeAndIntercepts count:(NSUInteger)numberOfLines frameTime:(CMTime)frameTime;\n{\n    if (self.preventRendering)\n    {\n        return;\n    }\n    \n    if (lineCoordinates == NULL)\n    {\n        [self generateLineCoordinates];\n    }\n    \n    // Iterate through and generate vertices from the slopes and intercepts\n    NSUInteger currentVertexIndex = 0;\n    NSUInteger currentLineIndex = 0;\n    NSUInteger maxLineIndex = numberOfLines *2;\n    while(currentLineIndex < maxLineIndex)\n    {\n        GLfloat slope = lineSlopeAndIntercepts[currentLineIndex++];\n        GLfloat intercept = lineSlopeAndIntercepts[currentLineIndex++];\n        \n        if (slope > 9000.0) // Vertical line\n        {\n            lineCoordinates[currentVertexIndex++] = intercept;\n            lineCoordinates[currentVertexIndex++] = -1.0;\n            lineCoordinates[currentVertexIndex++] = intercept;\n            lineCoordinates[currentVertexIndex++] = 1.0;\n        }\n        else\n        {\n            lineCoordinates[currentVertexIndex++] = -1.0;\n            lineCoordinates[currentVertexIndex++] = slope * -1.0 + intercept;\n            lineCoordinates[currentVertexIndex++] = 1.0;\n            lineCoordinates[currentVertexIndex++] = slope * 1.0 + intercept;\n        }\n    }\n    \n    runSynchronouslyOnVideoProcessingQueue(^{\n        [GPUImageContext setActiveShaderProgram:filterProgram];\n        \n        outputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:[self sizeOfFBO] textureOptions:self.outputTextureOptions onlyTexture:NO];\n        [outputFramebuffer activateFramebuffer];\n        \n        glClearColor(0.0, 0.0, 0.0, 0.0);\n        glClear(GL_COLOR_BUFFER_BIT);\n        \n        glBlendEquation(GL_FUNC_ADD);\n        glBlendFunc(GL_ONE, GL_ONE);\n        glEnable(GL_BLEND);\n        \n        glVertexAttribPointer(filterPositionAttribute, 2, GL_FLOAT, 0, 0, lineCoordinates);\n        glDrawArrays(GL_LINES, 0, ((unsigned int)numberOfLines * 2));\n        \n        glDisable(GL_BLEND);\n\n        [self informTargetsAboutNewFrameAtTime:frameTime];\n    });\n}\n\n- (void)renderToTextureWithVertices:(const GLfloat *)vertices textureCoordinates:(const GLfloat *)textureCoordinates;\n{\n    // Prevent rendering of the frame by normal means\n}\n\n#pragma mark -\n#pragma mark Accessors\n\n- (void)setLineWidth:(CGFloat)newValue;\n{\n    _lineWidth = newValue;\n    [GPUImageContext setActiveShaderProgram:filterProgram];\n    glLineWidth(newValue);\n}\n\n- (void)setLineColorRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent;\n{\n    GPUVector3 lineColor = {redComponent, greenComponent, blueComponent};\n    \n    [self setVec3:lineColor forUniform:lineColorUniform program:filterProgram];\n}\n\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageLinearBurnBlendFilter.h",
    "content": "#import \"GPUImageTwoInputFilter.h\"\n\n@interface GPUImageLinearBurnBlendFilter : GPUImageTwoInputFilter\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageLinearBurnBlendFilter.m",
    "content": "#import \"GPUImageLinearBurnBlendFilter.h\"\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageLinearBurnBlendFragmentShaderString = SHADER_STRING\n(\n varying highp vec2 textureCoordinate;\n varying highp vec2 textureCoordinate2;\n \n uniform sampler2D inputImageTexture;\n uniform sampler2D inputImageTexture2;\n \n void main()\n {\n     mediump vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);\n     mediump vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate2);\n     \n     gl_FragColor = vec4(clamp(textureColor.rgb + textureColor2.rgb - vec3(1.0), vec3(0.0), vec3(1.0)), textureColor.a);\n }\n);\n#else\nNSString *const kGPUImageLinearBurnBlendFragmentShaderString = SHADER_STRING\n(\n varying vec2 textureCoordinate;\n varying vec2 textureCoordinate2;\n \n uniform sampler2D inputImageTexture;\n uniform sampler2D inputImageTexture2;\n \n void main()\n {\n     vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);\n     vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate2);\n     \n     gl_FragColor = vec4(clamp(textureColor.rgb + textureColor2.rgb - vec3(1.0), vec3(0.0), vec3(1.0)), textureColor.a);\n }\n);\n#endif\n\n@implementation GPUImageLinearBurnBlendFilter\n\n- (id)init;\n{\n    if (!(self = [super initWithFragmentShaderFromString:kGPUImageLinearBurnBlendFragmentShaderString]))\n    {\n\t\treturn nil;\n    }\n    \n    return self;\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageLocalBinaryPatternFilter.h",
    "content": "#import \"GPUImage3x3TextureSamplingFilter.h\"\n\n@interface GPUImageLocalBinaryPatternFilter : GPUImage3x3TextureSamplingFilter\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageLocalBinaryPatternFilter.m",
    "content": "#import \"GPUImageLocalBinaryPatternFilter.h\"\n\n// This is based on \"Accelerating image recognition on mobile devices using GPGPU\" by Miguel Bordallo Lopez, Henri Nykanen, Jari Hannuksela, Olli Silven and Markku Vehvilainen\n// http://www.ee.oulu.fi/~jhannuks/publications/SPIE2011a.pdf\n\n// Right pixel is the most significant bit, traveling clockwise to get to the upper right, which is the least significant\n// If the external pixel is greater than or equal to the center, set to 1, otherwise 0\n//\n// 2 1 0\n// 3   7\n// 4 5 6\n\n// 01101101\n// 76543210\n\n@implementation GPUImageLocalBinaryPatternFilter\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageLocalBinaryPatternFragmentShaderString = SHADER_STRING\n(\n precision highp float;\n \n varying vec2 textureCoordinate;\n varying vec2 leftTextureCoordinate;\n varying vec2 rightTextureCoordinate;\n \n varying vec2 topTextureCoordinate;\n varying vec2 topLeftTextureCoordinate;\n varying vec2 topRightTextureCoordinate;\n \n varying vec2 bottomTextureCoordinate;\n varying vec2 bottomLeftTextureCoordinate;\n varying vec2 bottomRightTextureCoordinate;\n \n uniform sampler2D inputImageTexture;\n \n void main()\n {\n     lowp float centerIntensity = texture2D(inputImageTexture, textureCoordinate).r;\n     lowp float bottomLeftIntensity = texture2D(inputImageTexture, bottomLeftTextureCoordinate).r;\n     lowp float topRightIntensity = texture2D(inputImageTexture, topRightTextureCoordinate).r;\n     lowp float topLeftIntensity = texture2D(inputImageTexture, topLeftTextureCoordinate).r;\n     lowp float bottomRightIntensity = texture2D(inputImageTexture, bottomRightTextureCoordinate).r;\n     lowp float leftIntensity = texture2D(inputImageTexture, leftTextureCoordinate).r;\n     lowp float rightIntensity = texture2D(inputImageTexture, rightTextureCoordinate).r;\n     lowp float bottomIntensity = texture2D(inputImageTexture, bottomTextureCoordinate).r;\n     lowp float topIntensity = texture2D(inputImageTexture, topTextureCoordinate).r;\n\n     lowp float byteTally = 1.0 / 255.0 * step(centerIntensity, topRightIntensity);\n     byteTally += 2.0 / 255.0 * step(centerIntensity, topIntensity);\n     byteTally += 4.0 / 255.0 * step(centerIntensity, topLeftIntensity);\n     byteTally += 8.0 / 255.0 * step(centerIntensity, leftIntensity);\n     byteTally += 16.0 / 255.0 * step(centerIntensity, bottomLeftIntensity);\n     byteTally += 32.0 / 255.0 * step(centerIntensity, bottomIntensity);\n     byteTally += 64.0 / 255.0 * step(centerIntensity, bottomRightIntensity);\n     byteTally += 128.0 / 255.0 * step(centerIntensity, rightIntensity);\n          \n     // TODO: Replace the above with a dot product and two vec4s\n     // TODO: Apply step to a matrix, rather than individually\n     \n     gl_FragColor = vec4(byteTally, byteTally, byteTally, 1.0);\n }\n);\n#else\nNSString *const kGPUImageLocalBinaryPatternFragmentShaderString = SHADER_STRING\n(\n varying vec2 textureCoordinate;\n varying vec2 leftTextureCoordinate;\n varying vec2 rightTextureCoordinate;\n \n varying vec2 topTextureCoordinate;\n varying vec2 topLeftTextureCoordinate;\n varying vec2 topRightTextureCoordinate;\n \n varying vec2 bottomTextureCoordinate;\n varying vec2 bottomLeftTextureCoordinate;\n varying vec2 bottomRightTextureCoordinate;\n \n uniform sampler2D inputImageTexture;\n \n void main()\n {\n     float centerIntensity = texture2D(inputImageTexture, textureCoordinate).r;\n     float bottomLeftIntensity = texture2D(inputImageTexture, bottomLeftTextureCoordinate).r;\n     float topRightIntensity = texture2D(inputImageTexture, topRightTextureCoordinate).r;\n     float topLeftIntensity = texture2D(inputImageTexture, topLeftTextureCoordinate).r;\n     float bottomRightIntensity = texture2D(inputImageTexture, bottomRightTextureCoordinate).r;\n     float leftIntensity = texture2D(inputImageTexture, leftTextureCoordinate).r;\n     float rightIntensity = texture2D(inputImageTexture, rightTextureCoordinate).r;\n     float bottomIntensity = texture2D(inputImageTexture, bottomTextureCoordinate).r;\n     float topIntensity = texture2D(inputImageTexture, topTextureCoordinate).r;\n     \n     float byteTally = 1.0 / 255.0 * step(centerIntensity, topRightIntensity);\n     byteTally += 2.0 / 255.0 * step(centerIntensity, topIntensity);\n     byteTally += 4.0 / 255.0 * step(centerIntensity, topLeftIntensity);\n     byteTally += 8.0 / 255.0 * step(centerIntensity, leftIntensity);\n     byteTally += 16.0 / 255.0 * step(centerIntensity, bottomLeftIntensity);\n     byteTally += 32.0 / 255.0 * step(centerIntensity, bottomIntensity);\n     byteTally += 64.0 / 255.0 * step(centerIntensity, bottomRightIntensity);\n     byteTally += 128.0 / 255.0 * step(centerIntensity, rightIntensity);\n     \n     // TODO: Replace the above with a dot product and two vec4s\n     // TODO: Apply step to a matrix, rather than individually\n     \n     gl_FragColor = vec4(byteTally, byteTally, byteTally, 1.0);\n }\n);\n#endif\n\n#pragma mark -\n#pragma mark Initialization and teardown\n\n- (id)init;\n{\n    if (!(self = [super initWithFragmentShaderFromString:kGPUImageLocalBinaryPatternFragmentShaderString]))\n    {\n\t\treturn nil;\n    }\n    \n    return self;\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageLookupFilter.h",
    "content": "#import \"GPUImageTwoInputFilter.h\"\n\n@interface GPUImageLookupFilter : GPUImageTwoInputFilter\n{\n    GLint intensityUniform;\n}\n\n// How To Use:\n// 1) Use your favourite photo editing application to apply a filter to lookup.png from GPUImage/framework/Resources.\n// For this to work properly each pixel color must not depend on other pixels (e.g. blur will not work).\n// If you need more complex filter you can create as many lookup tables as required.\n// E.g. color_balance_lookup_1.png -> GPUImageGaussianBlurFilter -> color_balance_lookup_2.png\n// 2) Use you new lookup.png file as a second input for GPUImageLookupFilter.\n\n// See GPUImageAmatorkaFilter, GPUImageMissEtikateFilter, and GPUImageSoftEleganceFilter for example.\n\n// Additional Info:\n// Lookup texture is organised as 8x8 quads of 64x64 pixels representing all possible RGB colors:\n//for (int by = 0; by < 8; by++) {\n//    for (int bx = 0; bx < 8; bx++) {\n//        for (int g = 0; g < 64; g++) {\n//            for (int r = 0; r < 64; r++) {\n//                image.setPixel(r + bx * 64, g + by * 64, qRgb((int)(r * 255.0 / 63.0 + 0.5),\n//                                                              (int)(g * 255.0 / 63.0 + 0.5),\n//                                                              (int)((bx + by * 8.0) * 255.0 / 63.0 + 0.5)));\n//            }\n//        }\n//    }\n//}\n\n// Opacity/intensity of lookup filter ranges from 0.0 to 1.0, with 1.0 as the normal setting\n@property(readwrite, nonatomic) CGFloat intensity;\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageLookupFilter.m",
    "content": "#import \"GPUImageLookupFilter.h\"\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageLookupFragmentShaderString = SHADER_STRING\n(\n varying highp vec2 textureCoordinate;\n varying highp vec2 textureCoordinate2; // TODO: This is not used\n \n uniform sampler2D inputImageTexture;\n uniform sampler2D inputImageTexture2; // lookup texture\n \n uniform lowp float intensity;\n\n void main()\n {\n     highp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);\n     \n     highp float blueColor = textureColor.b * 63.0;\n     \n     highp vec2 quad1;\n     quad1.y = floor(floor(blueColor) / 8.0);\n     quad1.x = floor(blueColor) - (quad1.y * 8.0);\n     \n     highp vec2 quad2;\n     quad2.y = floor(ceil(blueColor) / 8.0);\n     quad2.x = ceil(blueColor) - (quad2.y * 8.0);\n     \n     highp vec2 texPos1;\n     texPos1.x = (quad1.x * 0.125) + 0.5/512.0 + ((0.125 - 1.0/512.0) * textureColor.r);\n     texPos1.y = (quad1.y * 0.125) + 0.5/512.0 + ((0.125 - 1.0/512.0) * textureColor.g);\n     \n     highp vec2 texPos2;\n     texPos2.x = (quad2.x * 0.125) + 0.5/512.0 + ((0.125 - 1.0/512.0) * textureColor.r);\n     texPos2.y = (quad2.y * 0.125) + 0.5/512.0 + ((0.125 - 1.0/512.0) * textureColor.g);\n     \n     lowp vec4 newColor1 = texture2D(inputImageTexture2, texPos1);\n     lowp vec4 newColor2 = texture2D(inputImageTexture2, texPos2);\n     \n     lowp vec4 newColor = mix(newColor1, newColor2, fract(blueColor));\n     gl_FragColor = mix(textureColor, vec4(newColor.rgb, textureColor.w), intensity);\n }\n);\n#else\nNSString *const kGPUImageLookupFragmentShaderString = SHADER_STRING\n(\n varying vec2 textureCoordinate;\n varying vec2 textureCoordinate2; // TODO: This is not used\n \n uniform sampler2D inputImageTexture;\n uniform sampler2D inputImageTexture2; // lookup texture\n \n uniform float intensity;\n \n void main()\n {\n     vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);\n     \n     float blueColor = textureColor.b * 63.0;\n     \n     vec2 quad1;\n     quad1.y = floor(floor(blueColor) / 8.0);\n     quad1.x = floor(blueColor) - (quad1.y * 8.0);\n     \n     vec2 quad2;\n     quad2.y = floor(ceil(blueColor) / 8.0);\n     quad2.x = ceil(blueColor) - (quad2.y * 8.0);\n     \n     vec2 texPos1;\n     texPos1.x = (quad1.x * 0.125) + 0.5/512.0 + ((0.125 - 1.0/512.0) * textureColor.r);\n     texPos1.y = (quad1.y * 0.125) + 0.5/512.0 + ((0.125 - 1.0/512.0) * textureColor.g);\n     \n     vec2 texPos2;\n     texPos2.x = (quad2.x * 0.125) + 0.5/512.0 + ((0.125 - 1.0/512.0) * textureColor.r);\n     texPos2.y = (quad2.y * 0.125) + 0.5/512.0 + ((0.125 - 1.0/512.0) * textureColor.g);\n     \n     vec4 newColor1 = texture2D(inputImageTexture2, texPos1);\n     vec4 newColor2 = texture2D(inputImageTexture2, texPos2);\n     \n     vec4 newColor = mix(newColor1, newColor2, fract(blueColor));\n     gl_FragColor = mix(textureColor, vec4(newColor.rgb, textureColor.w), intensity);\n }\n);\n#endif\n\n@implementation GPUImageLookupFilter\n\n@synthesize intensity = _intensity;\n\n#pragma mark -\n#pragma mark Initialization and teardown\n\n- (id)init;\n{\n    if (!(self = [super initWithFragmentShaderFromString:kGPUImageLookupFragmentShaderString]))\n    {\n\t\treturn nil;\n    }\n\n    intensityUniform = [filterProgram uniformIndex:@\"intensity\"];\n    self.intensity = 1.0f;\n\n    return self;\n}\n\n#pragma mark -\n#pragma mark Accessors\n\n- (void)setIntensity:(CGFloat)intensity\n{\n    _intensity = intensity;\n    \n    [self setFloat:_intensity forUniform:intensityUniform program:filterProgram];\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageLowPassFilter.h",
    "content": "#import \"GPUImageFilterGroup.h\"\n#import \"GPUImageBuffer.h\"\n#import \"GPUImageDissolveBlendFilter.h\"\n\n@interface GPUImageLowPassFilter : GPUImageFilterGroup\n{\n    GPUImageBuffer *bufferFilter;\n    GPUImageDissolveBlendFilter *dissolveBlendFilter;\n}\n\n// This controls the degree by which the previous accumulated frames are blended with the current one. This ranges from 0.0 to 1.0, with a default of 0.5.\n@property(readwrite, nonatomic) CGFloat filterStrength;\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageLowPassFilter.m",
    "content": "#import \"GPUImageLowPassFilter.h\"\n\n@implementation GPUImageLowPassFilter\n\n@synthesize filterStrength;\n\n- (id)init;\n{\n    if (!(self = [super init]))\n    {\n\t\treturn nil;\n    }\n    \n    // Take in the frame and blend it with the previous one\n    dissolveBlendFilter = [[GPUImageDissolveBlendFilter alloc] init];\n    [self addFilter:dissolveBlendFilter];\n    \n    // Buffer the result to be fed back into the blend\n    bufferFilter = [[GPUImageBuffer alloc] init];\n    [self addFilter:bufferFilter];\n    \n    // Texture location 0 needs to be the original image for the dissolve blend\n    [bufferFilter addTarget:dissolveBlendFilter atTextureLocation:1];\n    [dissolveBlendFilter addTarget:bufferFilter];\n    \n    [dissolveBlendFilter disableSecondFrameCheck];\n    \n    // To prevent double updating of this filter, disable updates from the sharp image side\n    //    self.inputFilterToIgnoreForUpdates = unsharpMaskFilter;\n    \n    self.initialFilters = [NSArray arrayWithObject:dissolveBlendFilter];\n    self.terminalFilter = dissolveBlendFilter;\n    \n    self.filterStrength = 0.5;\n    \n    return self;\n}\n\n#pragma mark -\n#pragma mark Accessors\n\n- (void)setFilterStrength:(CGFloat)newValue;\n{\n    dissolveBlendFilter.mix = newValue;\n}\n\n- (CGFloat)filterStrength;\n{\n    return dissolveBlendFilter.mix;\n}\n\n- (void)addTarget:(id<GPUImageInput>)newTarget atTextureLocation:(NSInteger)textureLocation;\n{\n    [self.terminalFilter addTarget:newTarget atTextureLocation:textureLocation];\n    //if use GPUImagePipline,will cause self.termainlFilter removeAllTargets,so need add bufferFilter back\n    if (self.terminalFilter == dissolveBlendFilter && ![self.terminalFilter.targets containsObject:bufferFilter]) {\n        [self.terminalFilter addTarget:bufferFilter atTextureLocation:1];\n    }\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageLuminanceRangeFilter.h",
    "content": "#import \"GPUImageFilter.h\"\n\n@interface GPUImageLuminanceRangeFilter : GPUImageFilter\n{\n    GLint rangeReductionUniform;\n}\n\n/** The degree to reduce the luminance range, from 0.0 to 1.0. Default is 0.6.\n */\n@property(readwrite, nonatomic) CGFloat rangeReductionFactor;\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageLuminanceRangeFilter.m",
    "content": "#import \"GPUImageLuminanceRangeFilter.h\"\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageLuminanceRangeFragmentShaderString = SHADER_STRING\n(\n varying highp vec2 textureCoordinate;\n \n uniform sampler2D inputImageTexture;\n uniform lowp float rangeReduction;\n \n // Values from \"Graphics Shaders: Theory and Practice\" by Bailey and Cunningham\n const mediump vec3 luminanceWeighting = vec3(0.2125, 0.7154, 0.0721);\n \n void main()\n {\n     lowp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);\n     mediump float luminance = dot(textureColor.rgb, luminanceWeighting);\n     mediump float luminanceRatio = ((0.5 - luminance) * rangeReduction);\n     \n     gl_FragColor = vec4((textureColor.rgb) + (luminanceRatio), textureColor.w);\n }\n);\n#else\nNSString *const kGPUImageLuminanceRangeFragmentShaderString = SHADER_STRING\n(\n varying vec2 textureCoordinate;\n \n uniform sampler2D inputImageTexture;\n uniform float rangeReduction;\n \n // Values from \"Graphics Shaders: Theory and Practice\" by Bailey and Cunningham\n const vec3 luminanceWeighting = vec3(0.2125, 0.7154, 0.0721);\n \n void main()\n {\n     vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);\n     float luminance = dot(textureColor.rgb, luminanceWeighting);\n     float luminanceRatio = ((0.5 - luminance) * rangeReduction);\n     \n     gl_FragColor = vec4((textureColor.rgb) + (luminanceRatio), textureColor.w);\n }\n);\n#endif\n\n@implementation GPUImageLuminanceRangeFilter\n\n@synthesize rangeReductionFactor = _rangeReductionFactor;\n\n#pragma mark -\n#pragma mark Initialization and teardown\n\n- (id)init;\n{\n    if (!(self = [super initWithFragmentShaderFromString:kGPUImageLuminanceRangeFragmentShaderString]))\n    {\n\t\treturn nil;\n    }\n    \n    rangeReductionUniform = [filterProgram uniformIndex:@\"rangeReduction\"];\n    self.rangeReductionFactor = 0.6;\n    \n    return self;\n}\n\n#pragma mark -\n#pragma mark Accessors\n\n- (void)setRangeReductionFactor:(CGFloat)newValue;\n{\n    _rangeReductionFactor = newValue;\n    \n    [self setFloat:_rangeReductionFactor forUniform:rangeReductionUniform program:filterProgram];\n}\n\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageLuminanceThresholdFilter.h",
    "content": "#import \"GPUImageFilter.h\"\n\n/** Pixels with a luminance above the threshold will appear white, and those below will be black\n */\n@interface GPUImageLuminanceThresholdFilter : GPUImageFilter\n{\n    GLint thresholdUniform;\n}\n\n/** Anything above this luminance will be white, and anything below black. Ranges from 0.0 to 1.0, with 0.5 as the default\n */\n@property(readwrite, nonatomic) CGFloat threshold; \n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageLuminanceThresholdFilter.m",
    "content": "#import \"GPUImageLuminanceThresholdFilter.h\"\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageLuminanceThresholdFragmentShaderString = SHADER_STRING\n( \n varying highp vec2 textureCoordinate;\n \n uniform sampler2D inputImageTexture;\n uniform highp float threshold;\n \n const highp vec3 W = vec3(0.2125, 0.7154, 0.0721);\n\n void main()\n {\n     highp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);\n     highp float luminance = dot(textureColor.rgb, W);\n     highp float thresholdResult = step(threshold, luminance);\n     \n     gl_FragColor = vec4(vec3(thresholdResult), textureColor.w);\n }\n);\n#else\nNSString *const kGPUImageLuminanceThresholdFragmentShaderString = SHADER_STRING\n(\n varying vec2 textureCoordinate;\n \n uniform sampler2D inputImageTexture;\n uniform float threshold;\n \n const vec3 W = vec3(0.2125, 0.7154, 0.0721);\n \n void main()\n {\n     vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);\n     float luminance = dot(textureColor.rgb, W);\n     float thresholdResult = step(threshold, luminance);\n     \n     gl_FragColor = vec4(vec3(thresholdResult), textureColor.w);\n }\n);\n#endif\n\n@implementation GPUImageLuminanceThresholdFilter\n\n@synthesize threshold = _threshold;\n\n#pragma mark -\n#pragma mark Initialization\n\n- (id)init;\n{\n    if (!(self = [super initWithFragmentShaderFromString:kGPUImageLuminanceThresholdFragmentShaderString]))\n    {\n\t\treturn nil;\n    }\n    \n    thresholdUniform = [filterProgram uniformIndex:@\"threshold\"];\n    self.threshold = 0.5;\n    \n    return self;\n}\n\n#pragma mark -\n#pragma mark Accessors\n\n- (void)setThreshold:(CGFloat)newValue;\n{\n    _threshold = newValue;\n    \n    [self setFloat:_threshold forUniform:thresholdUniform program:filterProgram];\n}\n\n@end\n\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageLuminosity.h",
    "content": "#import \"GPUImageAverageColor.h\"\n\n@interface GPUImageLuminosity : GPUImageAverageColor\n{\n    GLProgram *secondFilterProgram;\n    GLint secondFilterPositionAttribute, secondFilterTextureCoordinateAttribute;\n    GLint secondFilterInputTextureUniform, secondFilterInputTextureUniform2;\n    GLint secondFilterTexelWidthUniform, secondFilterTexelHeightUniform;\n}\n\n// This block is called on the completion of color averaging for a frame\n@property(nonatomic, copy) void(^luminosityProcessingFinishedBlock)(CGFloat luminosity, CMTime frameTime);\n\n- (void)extractLuminosityAtFrameTime:(CMTime)frameTime;\n- (void)initializeSecondaryAttributes;\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageLuminosity.m",
    "content": "#import \"GPUImageLuminosity.h\"\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageInitialLuminosityFragmentShaderString = SHADER_STRING\n(\n precision highp float;\n \n uniform sampler2D inputImageTexture;\n \n varying highp vec2 outputTextureCoordinate;\n \n varying highp vec2 upperLeftInputTextureCoordinate;\n varying highp vec2 upperRightInputTextureCoordinate;\n varying highp vec2 lowerLeftInputTextureCoordinate;\n varying highp vec2 lowerRightInputTextureCoordinate;\n \n const highp vec3 W = vec3(0.2125, 0.7154, 0.0721);\n\n void main()\n {\n     highp float upperLeftLuminance = dot(texture2D(inputImageTexture, upperLeftInputTextureCoordinate).rgb, W);\n     highp float upperRightLuminance = dot(texture2D(inputImageTexture, upperRightInputTextureCoordinate).rgb, W);\n     highp float lowerLeftLuminance = dot(texture2D(inputImageTexture, lowerLeftInputTextureCoordinate).rgb, W);\n     highp float lowerRightLuminance = dot(texture2D(inputImageTexture, lowerRightInputTextureCoordinate).rgb, W);\n\n     highp float luminosity = 0.25 * (upperLeftLuminance + upperRightLuminance + lowerLeftLuminance + lowerRightLuminance);\n     gl_FragColor = vec4(luminosity, luminosity, luminosity, 1.0);\n }\n);\n\nNSString *const kGPUImageLuminosityFragmentShaderString = SHADER_STRING\n(\n precision highp float;\n \n uniform sampler2D inputImageTexture;\n \n varying highp vec2 outputTextureCoordinate;\n \n varying highp vec2 upperLeftInputTextureCoordinate;\n varying highp vec2 upperRightInputTextureCoordinate;\n varying highp vec2 lowerLeftInputTextureCoordinate;\n varying highp vec2 lowerRightInputTextureCoordinate;\n \n void main()\n {\n     highp float upperLeftLuminance = texture2D(inputImageTexture, upperLeftInputTextureCoordinate).r;\n     highp float upperRightLuminance = texture2D(inputImageTexture, upperRightInputTextureCoordinate).r;\n     highp float lowerLeftLuminance = texture2D(inputImageTexture, lowerLeftInputTextureCoordinate).r;\n     highp float lowerRightLuminance = texture2D(inputImageTexture, lowerRightInputTextureCoordinate).r;\n     \n     highp float luminosity = 0.25 * (upperLeftLuminance + upperRightLuminance + lowerLeftLuminance + lowerRightLuminance);\n     gl_FragColor = vec4(luminosity, luminosity, luminosity, 1.0);\n }\n);\n#else\nNSString *const kGPUImageInitialLuminosityFragmentShaderString = SHADER_STRING\n(\n uniform sampler2D inputImageTexture;\n \n varying vec2 outputTextureCoordinate;\n \n varying vec2 upperLeftInputTextureCoordinate;\n varying vec2 upperRightInputTextureCoordinate;\n varying vec2 lowerLeftInputTextureCoordinate;\n varying vec2 lowerRightInputTextureCoordinate;\n \n const vec3 W = vec3(0.2125, 0.7154, 0.0721);\n \n void main()\n {\n     float upperLeftLuminance = dot(texture2D(inputImageTexture, upperLeftInputTextureCoordinate).rgb, W);\n     float upperRightLuminance = dot(texture2D(inputImageTexture, upperRightInputTextureCoordinate).rgb, W);\n     float lowerLeftLuminance = dot(texture2D(inputImageTexture, lowerLeftInputTextureCoordinate).rgb, W);\n     float lowerRightLuminance = dot(texture2D(inputImageTexture, lowerRightInputTextureCoordinate).rgb, W);\n     \n     float luminosity = 0.25 * (upperLeftLuminance + upperRightLuminance + lowerLeftLuminance + lowerRightLuminance);\n     gl_FragColor = vec4(luminosity, luminosity, luminosity, 1.0);\n }\n);\n\nNSString *const kGPUImageLuminosityFragmentShaderString = SHADER_STRING\n(\n uniform sampler2D inputImageTexture;\n \n varying vec2 outputTextureCoordinate;\n \n varying vec2 upperLeftInputTextureCoordinate;\n varying vec2 upperRightInputTextureCoordinate;\n varying vec2 lowerLeftInputTextureCoordinate;\n varying vec2 lowerRightInputTextureCoordinate;\n \n void main()\n {\n     float upperLeftLuminance = texture2D(inputImageTexture, upperLeftInputTextureCoordinate).r;\n     float upperRightLuminance = texture2D(inputImageTexture, upperRightInputTextureCoordinate).r;\n     float lowerLeftLuminance = texture2D(inputImageTexture, lowerLeftInputTextureCoordinate).r;\n     float lowerRightLuminance = texture2D(inputImageTexture, lowerRightInputTextureCoordinate).r;\n     \n     float luminosity = 0.25 * (upperLeftLuminance + upperRightLuminance + lowerLeftLuminance + lowerRightLuminance);\n     gl_FragColor = vec4(luminosity, luminosity, luminosity, 1.0);\n }\n);\n#endif\n\n@implementation GPUImageLuminosity\n\n@synthesize luminosityProcessingFinishedBlock = _luminosityProcessingFinishedBlock;\n\n#pragma mark -\n#pragma mark Initialization and teardown\n\n- (id)init;\n{\n    if (!(self = [super initWithVertexShaderFromString:kGPUImageColorAveragingVertexShaderString fragmentShaderFromString:kGPUImageInitialLuminosityFragmentShaderString]))\n    {\n        return nil;\n    }\n    \n    texelWidthUniform = [filterProgram uniformIndex:@\"texelWidth\"];\n    texelHeightUniform = [filterProgram uniformIndex:@\"texelHeight\"];\n        \n    __unsafe_unretained GPUImageLuminosity *weakSelf = self;\n    [self setFrameProcessingCompletionBlock:^(GPUImageOutput *filter, CMTime frameTime) {\n        [weakSelf extractLuminosityAtFrameTime:frameTime];\n    }];\n\n    runSynchronouslyOnVideoProcessingQueue(^{\n        [GPUImageContext useImageProcessingContext];\n        \n        secondFilterProgram = [[GPUImageContext sharedImageProcessingContext] programForVertexShaderString:kGPUImageColorAveragingVertexShaderString fragmentShaderString:kGPUImageLuminosityFragmentShaderString];\n        \n        if (!secondFilterProgram.initialized)\n        {\n            [self initializeSecondaryAttributes];\n            \n            if (![secondFilterProgram link])\n            {\n                NSString *progLog = [secondFilterProgram programLog];\n                NSLog(@\"Program link log: %@\", progLog);\n                NSString *fragLog = [secondFilterProgram fragmentShaderLog];\n                NSLog(@\"Fragment shader compile log: %@\", fragLog);\n                NSString *vertLog = [secondFilterProgram vertexShaderLog];\n                NSLog(@\"Vertex shader compile log: %@\", vertLog);\n                filterProgram = nil;\n                NSAssert(NO, @\"Filter shader link failed\");\n            }\n        }\n        \n        secondFilterPositionAttribute = [secondFilterProgram attributeIndex:@\"position\"];\n        secondFilterTextureCoordinateAttribute = [secondFilterProgram attributeIndex:@\"inputTextureCoordinate\"];\n        secondFilterInputTextureUniform = [secondFilterProgram uniformIndex:@\"inputImageTexture\"]; // This does assume a name of \"inputImageTexture\" for the fragment shader\n        secondFilterInputTextureUniform2 = [secondFilterProgram uniformIndex:@\"inputImageTexture2\"]; // This does assume a name of \"inputImageTexture2\" for second input texture in the fragment shader\n        \n        secondFilterTexelWidthUniform = [secondFilterProgram uniformIndex:@\"texelWidth\"];\n        secondFilterTexelHeightUniform = [secondFilterProgram uniformIndex:@\"texelHeight\"];\n\n        [GPUImageContext setActiveShaderProgram:secondFilterProgram];\n        \n        glEnableVertexAttribArray(secondFilterPositionAttribute);\n        glEnableVertexAttribArray(secondFilterTextureCoordinateAttribute);\n    });\n\n    return self;\n}\n\n- (void)initializeSecondaryAttributes;\n{\n    [secondFilterProgram addAttribute:@\"position\"];\n\t[secondFilterProgram addAttribute:@\"inputTextureCoordinate\"];\n}\n\n/*\n- (void)renderToTextureWithVertices:(const GLfloat *)vertices textureCoordinates:(const GLfloat *)textureCoordinates;\n{\n    if (self.preventRendering)\n    {\n        [firstInputFramebuffer unlock];\n        return;\n    }\n    \n    // Do an initial render pass that both convert to luminance and reduces\n    [GPUImageContext setActiveShaderProgram:filterProgram];\n    \n    glVertexAttribPointer(filterPositionAttribute, 2, GL_FLOAT, 0, 0, vertices);\n    glVertexAttribPointer(filterTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, textureCoordinates);\n\n    GLuint currentFramebuffer = [[stageFramebuffers objectAtIndex:0] intValue];\n    glBindFramebuffer(GL_FRAMEBUFFER, currentFramebuffer);\n    \n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\n    CGSize currentStageSize = [[stageSizes objectAtIndex:0] CGSizeValue];\n#else\n    NSSize currentStageSize = [[stageSizes objectAtIndex:0] sizeValue];\n#endif\n    glViewport(0, 0, (int)currentStageSize.width, (int)currentStageSize.height);\n\n    GLuint currentTexture = [firstInputFramebuffer texture];\n\n    glClearColor(0.0f, 0.0f, 0.0f, 1.0f);\n    glClear(GL_COLOR_BUFFER_BIT);\n    \n    glActiveTexture(GL_TEXTURE2);\n    glBindTexture(GL_TEXTURE_2D, currentTexture);\n    \n    glUniform1i(filterInputTextureUniform, 2);\n    \n    glUniform1f(texelWidthUniform, 0.5 / currentStageSize.width);\n    glUniform1f(texelHeightUniform, 0.5 / currentStageSize.height);\n    \n    glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);\n    \n    currentTexture = [[stageTextures objectAtIndex:0] intValue];\n\n    // Just perform reductions from this point on\n    [GPUImageContext setActiveShaderProgram:secondFilterProgram];\n    glVertexAttribPointer(secondFilterPositionAttribute, 2, GL_FLOAT, 0, 0, vertices);\n    glVertexAttribPointer(secondFilterTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, textureCoordinates);\n\n    NSUInteger numberOfStageFramebuffers = [stageFramebuffers count];\n    for (NSUInteger currentStage = 1; currentStage < numberOfStageFramebuffers; currentStage++)\n    {\n        currentFramebuffer = [[stageFramebuffers objectAtIndex:currentStage] intValue];\n        glBindFramebuffer(GL_FRAMEBUFFER, currentFramebuffer);\n        \n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\n        currentStageSize = [[stageSizes objectAtIndex:currentStage] CGSizeValue];\n#else\n        currentStageSize = [[stageSizes objectAtIndex:currentStage] sizeValue];\n#endif\n        glViewport(0, 0, (int)currentStageSize.width, (int)currentStageSize.height);\n        \n        glClearColor(0.0f, 0.0f, 0.0f, 1.0f);\n        glClear(GL_COLOR_BUFFER_BIT);\n        \n        glActiveTexture(GL_TEXTURE2);\n        glBindTexture(GL_TEXTURE_2D, currentTexture);\n        \n        glUniform1i(secondFilterInputTextureUniform, 2);\n        \n        glUniform1f(secondFilterTexelWidthUniform, 0.5 / currentStageSize.width);\n        glUniform1f(secondFilterTexelHeightUniform, 0.5 / currentStageSize.height);\n        \n        glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);\n        \n        currentTexture = [[stageTextures objectAtIndex:currentStage] intValue];\n        \n//        NSUInteger totalBytesForImage = (int)currentStageSize.width * (int)currentStageSize.height * 4;\n//        GLubyte *rawImagePixels2 = (GLubyte *)malloc(totalBytesForImage);\n//        glReadPixels(0, 0, (int)currentStageSize.width, (int)currentStageSize.height, GL_RGBA, GL_UNSIGNED_BYTE, rawImagePixels2);\n//        CGDataProviderRef dataProvider = CGDataProviderCreateWithData(NULL, rawImagePixels2, totalBytesForImage, NULL);\n//        CGColorSpaceRef defaultRGBColorSpace = CGColorSpaceCreateDeviceRGB();\n//\n//        CGFloat currentRedTotal = 0.0f, currentGreenTotal = 0.0f, currentBlueTotal = 0.0f, currentAlphaTotal = 0.0f;\n//        NSUInteger totalNumberOfPixels = totalBytesForImage / 4;\n//\n//        for (NSUInteger currentPixel = 0; currentPixel < totalNumberOfPixels; currentPixel++)\n//        {\n//            currentRedTotal += (CGFloat)rawImagePixels2[(currentPixel * 4)] / 255.0f;\n//            currentGreenTotal += (CGFloat)rawImagePixels2[(currentPixel * 4) + 1] / 255.0f;\n//            currentBlueTotal += (CGFloat)rawImagePixels2[(currentPixel * 4 + 2)] / 255.0f;\n//            currentAlphaTotal += (CGFloat)rawImagePixels2[(currentPixel * 4) + 3] / 255.0f;\n//        }\n//\n//        NSLog(@\"Stage %d average image red: %f, green: %f, blue: %f, alpha: %f\", currentStage, currentRedTotal / (CGFloat)totalNumberOfPixels, currentGreenTotal / (CGFloat)totalNumberOfPixels, currentBlueTotal / (CGFloat)totalNumberOfPixels, currentAlphaTotal / (CGFloat)totalNumberOfPixels);\n//\n//\n//        CGImageRef cgImageFromBytes = CGImageCreate((int)currentStageSize.width, (int)currentStageSize.height, 8, 32, 4 * (int)currentStageSize.width, defaultRGBColorSpace, kCGBitmapByteOrderDefault | kCGImageAlphaLast, dataProvider, NULL, NO, kCGRenderingIntentDefault);\n//\n//        UIImage *imageToSave = [UIImage imageWithCGImage:cgImageFromBytes];\n//\n//        NSData *dataForPNGFile = UIImagePNGRepresentation(imageToSave);\n//\n//        NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);\n//        NSString *documentsDirectory = [paths objectAtIndex:0];\n//\n//        NSString *imageName = [NSString stringWithFormat:@\"AverageLevel%d.png\", currentStage];\n//        NSError *error = nil;\n//        if (![dataForPNGFile writeToFile:[documentsDirectory stringByAppendingPathComponent:imageName] options:NSAtomicWrite error:&error])\n//        {\n//            return;\n//        }\n    }\n    \n    [firstInputFramebuffer unlock];\n}\n */\n\n#pragma mark -\n#pragma mark Callbacks\n\n- (void)extractLuminosityAtFrameTime:(CMTime)frameTime;\n{\n    runSynchronouslyOnVideoProcessingQueue(^{\n\n        // we need a normal color texture for this filter\n        NSAssert(self.outputTextureOptions.internalFormat == GL_RGBA, @\"The output texture format for this filter must be GL_RGBA.\");\n        NSAssert(self.outputTextureOptions.type == GL_UNSIGNED_BYTE, @\"The type of the output texture of this filter must be GL_UNSIGNED_BYTE.\");\n        \n        NSUInteger totalNumberOfPixels = round(finalStageSize.width * finalStageSize.height);\n        \n        if (rawImagePixels == NULL)\n        {\n            rawImagePixels = (GLubyte *)malloc(totalNumberOfPixels * 4);\n        }\n        \n        [GPUImageContext useImageProcessingContext];\n        [outputFramebuffer activateFramebuffer];\n\n        glReadPixels(0, 0, (int)finalStageSize.width, (int)finalStageSize.height, GL_RGBA, GL_UNSIGNED_BYTE, rawImagePixels);\n        \n        NSUInteger luminanceTotal = 0;\n        NSUInteger byteIndex = 0;\n        for (NSUInteger currentPixel = 0; currentPixel < totalNumberOfPixels; currentPixel++)\n        {\n            luminanceTotal += rawImagePixels[byteIndex];\n            byteIndex += 4;\n        }\n        \n        CGFloat normalizedLuminosityTotal = (CGFloat)luminanceTotal / (CGFloat)totalNumberOfPixels / 255.0;\n        \n        if (_luminosityProcessingFinishedBlock != NULL)\n        {\n            _luminosityProcessingFinishedBlock(normalizedLuminosityTotal, frameTime);\n        }\n    });\n}\n\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageLuminosityBlendFilter.h",
    "content": "#import \"GPUImageTwoInputFilter.h\"\n\n@interface GPUImageLuminosityBlendFilter : GPUImageTwoInputFilter\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageLuminosityBlendFilter.m",
    "content": "#import \"GPUImageLuminosityBlendFilter.h\"\n\n/**\n * Luminosity blend mode based upon pseudo code from the PDF specification.\n */\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageLuminosityBlendFragmentShaderString = SHADER_STRING\n(\n varying highp vec2 textureCoordinate;\n varying highp vec2 textureCoordinate2;\n \n uniform sampler2D inputImageTexture;\n uniform sampler2D inputImageTexture2;\n \n highp float lum(lowp vec3 c) {\n     return dot(c, vec3(0.3, 0.59, 0.11));\n }\n \n lowp vec3 clipcolor(lowp vec3 c) {\n     highp float l = lum(c);\n     lowp float n = min(min(c.r, c.g), c.b);\n     lowp float x = max(max(c.r, c.g), c.b);\n     \n     if (n < 0.0) {\n         c.r = l + ((c.r - l) * l) / (l - n);\n         c.g = l + ((c.g - l) * l) / (l - n);\n         c.b = l + ((c.b - l) * l) / (l - n);\n     }\n     if (x > 1.0) {\n         c.r = l + ((c.r - l) * (1.0 - l)) / (x - l);\n         c.g = l + ((c.g - l) * (1.0 - l)) / (x - l);\n         c.b = l + ((c.b - l) * (1.0 - l)) / (x - l);\n     }\n     \n     return c;\n }\n \n lowp vec3 setlum(lowp vec3 c, highp float l) {\n     highp float d = l - lum(c);\n     c = c + vec3(d);\n     return clipcolor(c);\n }\n \n void main()\n {\n\t highp vec4 baseColor = texture2D(inputImageTexture, textureCoordinate);\n\t highp vec4 overlayColor = texture2D(inputImageTexture2, textureCoordinate2);\n     \n     gl_FragColor = vec4(baseColor.rgb * (1.0 - overlayColor.a) + setlum(baseColor.rgb, lum(overlayColor.rgb)) * overlayColor.a, baseColor.a);\n }\n);\n#else\nNSString *const kGPUImageLuminosityBlendFragmentShaderString = SHADER_STRING\n(\n varying vec2 textureCoordinate;\n varying vec2 textureCoordinate2;\n \n uniform sampler2D inputImageTexture;\n uniform sampler2D inputImageTexture2;\n \n float lum(vec3 c) {\n     return dot(c, vec3(0.3, 0.59, 0.11));\n }\n \n vec3 clipcolor(vec3 c) {\n     float l = lum(c);\n     float n = min(min(c.r, c.g), c.b);\n     float x = max(max(c.r, c.g), c.b);\n     \n     if (n < 0.0) {\n         c.r = l + ((c.r - l) * l) / (l - n);\n         c.g = l + ((c.g - l) * l) / (l - n);\n         c.b = l + ((c.b - l) * l) / (l - n);\n     }\n     if (x > 1.0) {\n         c.r = l + ((c.r - l) * (1.0 - l)) / (x - l);\n         c.g = l + ((c.g - l) * (1.0 - l)) / (x - l);\n         c.b = l + ((c.b - l) * (1.0 - l)) / (x - l);\n     }\n     \n     return c;\n }\n \n vec3 setlum(vec3 c, float l) {\n     float d = l - lum(c);\n     c = c + vec3(d);\n     return clipcolor(c);\n }\n \n void main()\n {\n\t vec4 baseColor = texture2D(inputImageTexture, textureCoordinate);\n\t vec4 overlayColor = texture2D(inputImageTexture2, textureCoordinate2);\n     \n     gl_FragColor = vec4(baseColor.rgb * (1.0 - overlayColor.a) + setlum(baseColor.rgb, lum(overlayColor.rgb)) * overlayColor.a, baseColor.a);\n }\n);\n#endif\n\n\n@implementation GPUImageLuminosityBlendFilter\n\n- (id)init;\n{\n    if (!(self = [super initWithFragmentShaderFromString:kGPUImageLuminosityBlendFragmentShaderString]))\n    {\n\t\treturn nil;\n    }\n    \n    return self;\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageMaskFilter.h",
    "content": "#import \"GPUImageTwoInputFilter.h\"\n\n@interface GPUImageMaskFilter : GPUImageTwoInputFilter\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageMaskFilter.m",
    "content": "#import \"GPUImageMaskFilter.h\"\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageMaskShaderString = SHADER_STRING\n(\n varying highp vec2 textureCoordinate;\n varying highp vec2 textureCoordinate2;\n \n uniform sampler2D inputImageTexture;\n uniform sampler2D inputImageTexture2;\n \n void main()\n {\n\t lowp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);\n\t lowp vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate2);\n\t \n\t //Averages mask's the RGB values, and scales that value by the mask's alpha\n\t //\n\t //The dot product should take fewer cycles than doing an average normally\n\t //\n\t //Typical/ideal case, R,G, and B will be the same, and Alpha will be 1.0\n\t lowp float newAlpha = dot(textureColor2.rgb, vec3(.33333334, .33333334, .33333334)) * textureColor2.a;\n\t \t \n\t gl_FragColor = vec4(textureColor.xyz, newAlpha);\n//\t gl_FragColor = vec4(textureColor2);\n }\n);\n#else\nNSString *const kGPUImageMaskShaderString = SHADER_STRING\n(\n varying vec2 textureCoordinate;\n varying vec2 textureCoordinate2;\n \n uniform sampler2D inputImageTexture;\n uniform sampler2D inputImageTexture2;\n \n void main()\n {\n\t vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);\n\t vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate2);\n\t \n\t //Averages mask's the RGB values, and scales that value by the mask's alpha\n\t //\n\t //The dot product should take fewer cycles than doing an average normally\n\t //\n\t //Typical/ideal case, R,G, and B will be the same, and Alpha will be 1.0\n\t float newAlpha = dot(textureColor2.rgb, vec3(.33333334, .33333334, .33333334)) * textureColor2.a;\n     \n\t gl_FragColor = vec4(textureColor.xyz, newAlpha);\n     //\t gl_FragColor = vec4(textureColor2);\n }\n);\n#endif\n\n@implementation GPUImageMaskFilter\n\n- (id)init;\n{\n    if (!(self = [super initWithFragmentShaderFromString:kGPUImageMaskShaderString]))\n    {\n\t\treturn nil;\n    }\n    \n    return self;\n}\n\n- (void)renderToTextureWithVertices:(const GLfloat *)vertices textureCoordinates:(const GLfloat *)textureCoordinates;\n{\n    glEnable(GL_BLEND);\n    glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);\n    [super renderToTextureWithVertices:vertices textureCoordinates:textureCoordinates];\n    glDisable(GL_BLEND);\n}\n\n@end\n\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageMedianFilter.h",
    "content": "#import \"GPUImage3x3TextureSamplingFilter.h\"\n\n@interface GPUImageMedianFilter : GPUImage3x3TextureSamplingFilter\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageMedianFilter.m",
    "content": "#import \"GPUImageMedianFilter.h\"\n\n/*\n 3x3 median filter, adapted from \"A Fast, Small-Radius GPU Median Filter\" by Morgan McGuire in ShaderX6\n http://graphics.cs.williams.edu/papers/MedianShaderX6/\n \n Morgan McGuire and Kyle Whitson\n Williams College\n \n Register allocation tips by Victor Huang Xiaohuang\n University of Illinois at Urbana-Champaign\n \n http://graphics.cs.williams.edu\n \n \n Copyright (c) Morgan McGuire and Williams College, 2006\n All rights reserved.\n \n Redistribution and use in source and binary forms, with or without\n modification, are permitted provided that the following conditions are\n met:\n \n Redistributions of source code must retain the above copyright notice,\n this list of conditions and the following disclaimer.\n \n Redistributions in binary form must reproduce the above copyright\n notice, this list of conditions and the following disclaimer in the\n documentation and/or other materials provided with the distribution.\n \n THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n */\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageMedianFragmentShaderString = SHADER_STRING\n(\n precision highp float;\n \n varying vec2 textureCoordinate;\n varying vec2 leftTextureCoordinate;\n varying vec2 rightTextureCoordinate;\n \n varying vec2 topTextureCoordinate;\n varying vec2 topLeftTextureCoordinate;\n varying vec2 topRightTextureCoordinate;\n \n varying vec2 bottomTextureCoordinate;\n varying vec2 bottomLeftTextureCoordinate;\n varying vec2 bottomRightTextureCoordinate;\n \n uniform sampler2D inputImageTexture;\n \n#define s2(a, b)\t\t\t\ttemp = a; a = min(a, b); b = max(temp, b);\n#define mn3(a, b, c)\t\t\ts2(a, b); s2(a, c);\n#define mx3(a, b, c)\t\t\ts2(b, c); s2(a, c);\n \n#define mnmx3(a, b, c)\t\t\tmx3(a, b, c); s2(a, b);                                   // 3 exchanges\n#define mnmx4(a, b, c, d)\t\ts2(a, b); s2(c, d); s2(a, c); s2(b, d);                   // 4 exchanges\n#define mnmx5(a, b, c, d, e)\ts2(a, b); s2(c, d); mn3(a, c, e); mx3(b, d, e);           // 6 exchanges\n#define mnmx6(a, b, c, d, e, f) s2(a, d); s2(b, e); s2(c, f); mn3(a, b, c); mx3(d, e, f); // 7 exchanges\n\n void main()\n {\n     vec3 v[6];\n\n     v[0] = texture2D(inputImageTexture, bottomLeftTextureCoordinate).rgb;\n     v[1] = texture2D(inputImageTexture, topRightTextureCoordinate).rgb;\n     v[2] = texture2D(inputImageTexture, topLeftTextureCoordinate).rgb;\n     v[3] = texture2D(inputImageTexture, bottomRightTextureCoordinate).rgb;\n     v[4] = texture2D(inputImageTexture, leftTextureCoordinate).rgb;\n     v[5] = texture2D(inputImageTexture, rightTextureCoordinate).rgb;\n//     v[6] = texture2D(inputImageTexture, bottomTextureCoordinate).rgb;\n//     v[7] = texture2D(inputImageTexture, topTextureCoordinate).rgb;\n     vec3 temp;\n\n     mnmx6(v[0], v[1], v[2], v[3], v[4], v[5]);\n     \n     v[5] = texture2D(inputImageTexture, bottomTextureCoordinate).rgb;\n                  \n     mnmx5(v[1], v[2], v[3], v[4], v[5]);\n                  \n     v[5] = texture2D(inputImageTexture, topTextureCoordinate).rgb;\n                               \n     mnmx4(v[2], v[3], v[4], v[5]);\n                               \n     v[5] = texture2D(inputImageTexture, textureCoordinate).rgb;\n                                            \n     mnmx3(v[3], v[4], v[5]);\n    \n     gl_FragColor = vec4(v[4], 1.0);\n }\n);\n#else\nNSString *const kGPUImageMedianFragmentShaderString = SHADER_STRING\n(\n varying vec2 textureCoordinate;\n varying vec2 leftTextureCoordinate;\n varying vec2 rightTextureCoordinate;\n \n varying vec2 topTextureCoordinate;\n varying vec2 topLeftTextureCoordinate;\n varying vec2 topRightTextureCoordinate;\n \n varying vec2 bottomTextureCoordinate;\n varying vec2 bottomLeftTextureCoordinate;\n varying vec2 bottomRightTextureCoordinate;\n \n uniform sampler2D inputImageTexture;\n \n#define s2(a, b)\t\t\t\ttemp = a; a = min(a, b); b = max(temp, b);\n#define mn3(a, b, c)\t\t\ts2(a, b); s2(a, c);\n#define mx3(a, b, c)\t\t\ts2(b, c); s2(a, c);\n \n#define mnmx3(a, b, c)\t\t\tmx3(a, b, c); s2(a, b);                                   // 3 exchanges\n#define mnmx4(a, b, c, d)\t\ts2(a, b); s2(c, d); s2(a, c); s2(b, d);                   // 4 exchanges\n#define mnmx5(a, b, c, d, e)\ts2(a, b); s2(c, d); mn3(a, c, e); mx3(b, d, e);           // 6 exchanges\n#define mnmx6(a, b, c, d, e, f) s2(a, d); s2(b, e); s2(c, f); mn3(a, b, c); mx3(d, e, f); // 7 exchanges\n \n void main()\n {\n     vec3 v[6];\n     \n     v[0] = texture2D(inputImageTexture, bottomLeftTextureCoordinate).rgb;\n     v[1] = texture2D(inputImageTexture, topRightTextureCoordinate).rgb;\n     v[2] = texture2D(inputImageTexture, topLeftTextureCoordinate).rgb;\n     v[3] = texture2D(inputImageTexture, bottomRightTextureCoordinate).rgb;\n     v[4] = texture2D(inputImageTexture, leftTextureCoordinate).rgb;\n     v[5] = texture2D(inputImageTexture, rightTextureCoordinate).rgb;\n     //     v[6] = texture2D(inputImageTexture, bottomTextureCoordinate).rgb;\n     //     v[7] = texture2D(inputImageTexture, topTextureCoordinate).rgb;\n     vec3 temp;\n     \n     mnmx6(v[0], v[1], v[2], v[3], v[4], v[5]);\n     \n     v[5] = texture2D(inputImageTexture, bottomTextureCoordinate).rgb;\n     \n     mnmx5(v[1], v[2], v[3], v[4], v[5]);\n     \n     v[5] = texture2D(inputImageTexture, topTextureCoordinate).rgb;\n     \n     mnmx4(v[2], v[3], v[4], v[5]);\n     \n     v[5] = texture2D(inputImageTexture, textureCoordinate).rgb;\n     \n     mnmx3(v[3], v[4], v[5]);\n     \n     gl_FragColor = vec4(v[4], 1.0);\n }\n);\n#endif\n\n@implementation GPUImageMedianFilter\n\n#pragma mark -\n#pragma mark Initialization and teardown\n\n- (id)init;\n{\n    if (!(self = [super initWithFragmentShaderFromString:kGPUImageMedianFragmentShaderString]))\n    {\n\t\treturn nil;\n    }\n    \n    hasOverriddenImageSizeFactor = NO;\n    \n    return self;\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageMissEtikateFilter.h",
    "content": "#import \"GPUImageFilterGroup.h\"\n\n@class GPUImagePicture;\n\n/** A photo filter based on Photoshop action by Miss Etikate:\n    http://miss-etikate.deviantart.com/art/Photoshop-Action-15-120151961\n */\n\n// Note: If you want to use this effect you have to add lookup_miss_etikate.png\n//       from Resources folder to your application bundle.\n\n@interface GPUImageMissEtikateFilter : GPUImageFilterGroup\n{\n    GPUImagePicture *lookupImageSource;\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageMissEtikateFilter.m",
    "content": "#import \"GPUImageMissEtikateFilter.h\"\n#import \"GPUImagePicture.h\"\n#import \"GPUImageLookupFilter.h\"\n\n@implementation GPUImageMissEtikateFilter\n\n- (id)init;\n{\n    if (!(self = [super init]))\n    {\n\t\treturn nil;\n    }\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\n    UIImage *image = [UIImage imageNamed:@\"lookup_miss_etikate.png\"];\n#else\n    NSImage *image = [NSImage imageNamed:@\"lookup_miss_etikate.png\"];\n#endif\n\n    NSAssert(image, @\"To use GPUImageMissEtikateFilter you need to add lookup_miss_etikate.png from GPUImage/framework/Resources to your application bundle.\");\n\n    lookupImageSource = [[GPUImagePicture alloc] initWithImage:image];\n    GPUImageLookupFilter *lookupFilter = [[GPUImageLookupFilter alloc] init];\n    [self addFilter:lookupFilter];\n\n    [lookupImageSource addTarget:lookupFilter atTextureLocation:1];\n    [lookupImageSource processImage];\n\n    self.initialFilters = [NSArray arrayWithObjects:lookupFilter, nil];\n    self.terminalFilter = lookupFilter;\n\n    return self;\n}\n\n#pragma mark -\n#pragma mark Accessors\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageMonochromeFilter.h",
    "content": "#import \"GPUImageFilter.h\"\n\n@interface GPUImageMonochromeFilter : GPUImageFilter\n{\n    GLint intensityUniform, filterColorUniform;\n}\n\n@property(readwrite, nonatomic) CGFloat intensity;\n@property(readwrite, nonatomic) GPUVector4 color;\n\n- (void)setColorRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent;\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageMonochromeFilter.m",
    "content": "#import \"GPUImageMonochromeFilter.h\"\r\n\r\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\r\nNSString *const kGPUMonochromeFragmentShaderString = SHADER_STRING\r\n(\r\n precision lowp float;\r\n \r\n varying highp vec2 textureCoordinate;\r\n \r\n uniform sampler2D inputImageTexture;\r\n uniform float intensity;\r\n uniform vec3 filterColor;\r\n \r\n const mediump vec3 luminanceWeighting = vec3(0.2125, 0.7154, 0.0721);\r\n \r\n void main()\r\n {\r\n\t//desat, then apply overlay blend\r\n\tlowp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);\r\n\tfloat luminance = dot(textureColor.rgb, luminanceWeighting);\r\n\t\r\n\tlowp vec4 desat = vec4(vec3(luminance), 1.0);\r\n\t\r\n\t//overlay\r\n\tlowp vec4 outputColor = vec4(\r\n                                 (desat.r < 0.5 ? (2.0 * desat.r * filterColor.r) : (1.0 - 2.0 * (1.0 - desat.r) * (1.0 - filterColor.r))),\r\n                                 (desat.g < 0.5 ? (2.0 * desat.g * filterColor.g) : (1.0 - 2.0 * (1.0 - desat.g) * (1.0 - filterColor.g))),\r\n                                 (desat.b < 0.5 ? (2.0 * desat.b * filterColor.b) : (1.0 - 2.0 * (1.0 - desat.b) * (1.0 - filterColor.b))),\r\n                                 1.0\r\n                                 );\r\n\t\r\n\t//which is better, or are they equal?\r\n\tgl_FragColor = vec4( mix(textureColor.rgb, outputColor.rgb, intensity), textureColor.a);\r\n }\r\n);\r\n#else\r\nNSString *const kGPUMonochromeFragmentShaderString = SHADER_STRING\r\n(\r\n varying vec2 textureCoordinate;\r\n \r\n uniform sampler2D inputImageTexture;\r\n uniform float intensity;\r\n uniform vec3 filterColor;\r\n \r\n const vec3 luminanceWeighting = vec3(0.2125, 0.7154, 0.0721);\r\n \r\n void main()\r\n {\r\n     //desat, then apply overlay blend\r\n     vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);\r\n     float luminance = dot(textureColor.rgb, luminanceWeighting);\r\n     \r\n     vec4 desat = vec4(vec3(luminance), 1.0);\r\n     \r\n     //overlay\r\n     vec4 outputColor = vec4(\r\n                                  (desat.r < 0.5 ? (2.0 * desat.r * filterColor.r) : (1.0 - 2.0 * (1.0 - desat.r) * (1.0 - filterColor.r))),\r\n                                  (desat.g < 0.5 ? (2.0 * desat.g * filterColor.g) : (1.0 - 2.0 * (1.0 - desat.g) * (1.0 - filterColor.g))),\r\n                                  (desat.b < 0.5 ? (2.0 * desat.b * filterColor.b) : (1.0 - 2.0 * (1.0 - desat.b) * (1.0 - filterColor.b))),\r\n                                  1.0\r\n                                  );\r\n     \r\n     //which is better, or are they equal?\r\n     gl_FragColor = vec4( mix(textureColor.rgb, outputColor.rgb, intensity), textureColor.a);\r\n }\r\n);\r\n#endif\r\n\r\n@implementation GPUImageMonochromeFilter\r\n\r\n@synthesize intensity = _intensity;\r\n@synthesize color = _color;\r\n\r\n- (id)init;\r\n{\r\n    if (!(self = [super initWithFragmentShaderFromString:kGPUMonochromeFragmentShaderString]))\r\n    {\r\n\t\treturn nil;\r\n    }\r\n    \r\n    intensityUniform = [filterProgram uniformIndex:@\"intensity\"];\r\n    filterColorUniform = [filterProgram uniformIndex:@\"filterColor\"];\r\n    \r\n    self.intensity = 1.0;\r\n\tself.color = (GPUVector4){0.6f, 0.45f, 0.3f, 1.f};\r\n\t//self.color = [CIColor colorWithRed:0.6 green:0.45 blue:0.3 alpha:1.];\r\n    return self;\r\n}\r\n\r\n#pragma mark -\r\n#pragma mark Accessors\r\n\r\n- (void)setColor:(GPUVector4)color;\r\n{    \r\n\t\r\n\t_color = color;\r\n\t\r\n\t[self setColorRed:color.one green:color.two blue:color.three];\r\n}\r\n\r\n- (void)setColorRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent;\r\n{\r\n    GPUVector3 filterColor = {redComponent, greenComponent, blueComponent};\r\n    \r\n    [self setVec3:filterColor forUniform:filterColorUniform program:filterProgram];\r\n}\r\n\r\n- (void)setIntensity:(CGFloat)newValue;\r\n{\r\n    _intensity = newValue;\r\n    \r\n    [self setFloat:_intensity forUniform:intensityUniform program:filterProgram];\r\n}\r\n\r\n@end\r\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageMosaicFilter.h",
    "content": "\n// This needs a little more work, it's rotating the input tileset and there are some artifacts (I think from GL_LINEAR interpolation), but it's working\n\n#import \"GPUImageTwoInputFilter.h\"\n#import \"GPUImagePicture.h\"\n\n@interface GPUImageMosaicFilter : GPUImageTwoInputFilter {\n    GLint inputTileSizeUniform, numTilesUniform, displayTileSizeUniform, colorOnUniform;\n    GPUImagePicture *pic;\n}\n\n//  This filter takes an input tileset, the tiles must ascend in luminance\n//  It looks at the input image and replaces each display tile with an input tile \n//  according to the luminance of that tile.  The idea was to replicate the ASCII\n//  video filters seen in other apps, but the tileset can be anything.\n@property(readwrite, nonatomic) CGSize inputTileSize;\n@property(readwrite, nonatomic) float numTiles;\n@property(readwrite, nonatomic) CGSize displayTileSize;\n@property(readwrite, nonatomic) BOOL colorOn;\n@property(readwrite, nonatomic, copy) NSString *tileSet;\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageMosaicFilter.m",
    "content": "//\n//  GPUImageMosaicFilter.m\n\n\n#import \"GPUImageMosaicFilter.h\"\n#import \"GPUImagePicture.h\"\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageMosaicFragmentShaderString = SHADER_STRING\n(\n precision highp float;\n \n varying vec2 textureCoordinate;\n \n uniform sampler2D inputImageTexture;\n uniform sampler2D inputImageTexture2;\n \n uniform vec2 inputTileSize;\n uniform vec2 displayTileSize;\n uniform float numTiles;\n uniform int colorOn;\n \n void main()\n {\n     vec2 xy = textureCoordinate;\n     xy = xy - mod(xy, displayTileSize);\n     \n     vec4 lumcoeff = vec4(0.299,0.587,0.114,0.0);\n     \n     vec4 inputColor = texture2D(inputImageTexture2, xy);\n     float lum = dot(inputColor,lumcoeff);\n     lum = 1.0 - lum;\n     \n     float stepsize = 1.0 / numTiles;\n     float lumStep = (lum - mod(lum, stepsize)) / stepsize; \n  \n     float rowStep = 1.0 / inputTileSize.x;\n     float x = mod(lumStep, rowStep);\n     float y = floor(lumStep / rowStep);\n     \n     vec2 startCoord = vec2(float(x) *  inputTileSize.x, float(y) * inputTileSize.y);\n     vec2 finalCoord = startCoord + ((textureCoordinate - xy) * (inputTileSize / displayTileSize));\n     \n     vec4 color = texture2D(inputImageTexture, finalCoord);   \n     if (colorOn == 1) {\n         color = color * inputColor;\n     }\n     gl_FragColor = color; \n     \n }  \n);\n#else\nNSString *const kGPUImageMosaicFragmentShaderString = SHADER_STRING\n(\n varying vec2 textureCoordinate;\n \n uniform sampler2D inputImageTexture;\n uniform sampler2D inputImageTexture2;\n \n uniform vec2 inputTileSize;\n uniform vec2 displayTileSize;\n uniform float numTiles;\n uniform int colorOn;\n \n void main()\n {\n     vec2 xy = textureCoordinate;\n     xy = xy - mod(xy, displayTileSize);\n     \n     vec4 lumcoeff = vec4(0.299,0.587,0.114,0.0);\n     \n     vec4 inputColor = texture2D(inputImageTexture2, xy);\n     float lum = dot(inputColor,lumcoeff);\n     lum = 1.0 - lum;\n     \n     float stepsize = 1.0 / numTiles;\n     float lumStep = (lum - mod(lum, stepsize)) / stepsize;\n     \n     float rowStep = 1.0 / inputTileSize.x;\n     float x = mod(lumStep, rowStep);\n     float y = floor(lumStep / rowStep);\n     \n     vec2 startCoord = vec2(float(x) *  inputTileSize.x, float(y) * inputTileSize.y);\n     vec2 finalCoord = startCoord + ((textureCoordinate - xy) * (inputTileSize / displayTileSize));\n     \n     vec4 color = texture2D(inputImageTexture, finalCoord);\n     if (colorOn == 1) {\n         color = color * inputColor;\n     }\n     gl_FragColor = color;\n }\n);\n#endif\n\n@implementation GPUImageMosaicFilter\n\n@synthesize inputTileSize = _inputTileSize, numTiles = _numTiles, displayTileSize = _displayTileSize, colorOn = _colorOn;\n@synthesize tileSet = _tileSet;\n\n- (id)init;\n{\n    if (!(self = [super initWithFragmentShaderFromString:kGPUImageMosaicFragmentShaderString]))\n    {\n\t\treturn nil;\n    }\n    \n    inputTileSizeUniform = [filterProgram uniformIndex:@\"inputTileSize\"];\n    displayTileSizeUniform = [filterProgram uniformIndex:@\"displayTileSize\"];\n    numTilesUniform = [filterProgram uniformIndex:@\"numTiles\"];\n    colorOnUniform = [filterProgram uniformIndex:@\"colorOn\"];\n    \n    CGSize its = CGSizeMake(0.125, 0.125);\n    CGSize dts = CGSizeMake(0.025, 0.025);\n    [self setDisplayTileSize:dts];\n    [self setInputTileSize:its];\n    [self setNumTiles:64.0];\n    [self setColorOn:YES];\n    //[self setTileSet:@\"squares.png\"];\n    return self;\n}\n\n- (void)setColorOn:(BOOL)yes\n{\n    glUniform1i(colorOnUniform, yes);\n}\n\n- (void)setNumTiles:(float)numTiles\n{\n\n    _numTiles = numTiles;\n    [self setFloat:_numTiles forUniformName:@\"numTiles\"];\n}\n\n- (void)setInputTileSize:(CGSize)inputTileSize\n{\n    if (inputTileSize.width > 1.0) {\n        _inputTileSize.width = 1.0;\n    } \n    if (inputTileSize.height > 1.0) {\n        _inputTileSize.height = 1.0;\n    }\n    if (inputTileSize.width < 0.0) {\n        _inputTileSize.width = 0.0;\n    }\n    if (inputTileSize.height < 0.0) {\n        _inputTileSize.height = 0.0;\n    }\n    \n    \n    _inputTileSize = inputTileSize;\n    \n    [self setSize:_inputTileSize forUniform:inputTileSizeUniform program:filterProgram];    \n}\n\n-(void)setDisplayTileSize:(CGSize)displayTileSize\n{\n    if (displayTileSize.width > 1.0) {\n        _displayTileSize.width = 1.0;\n    } \n    if (displayTileSize.height > 1.0) {\n        _displayTileSize.height = 1.0;\n    }\n    if (displayTileSize.width < 0.0) {\n        _displayTileSize.width = 0.0;\n    }\n    if (displayTileSize.height < 0.0) {\n        _displayTileSize.height = 0.0;\n    }\n    \n    \n    _displayTileSize = displayTileSize;\n    \n    [self setSize:_displayTileSize forUniform:displayTileSizeUniform program:filterProgram];\n}\n\n-(void)setTileSet:(NSString *)tileSet\n{\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\n    UIImage *img = [UIImage imageNamed:tileSet];\n#else\n    NSImage *img = [NSImage imageNamed:tileSet];\n#endif\n    pic = [[GPUImagePicture alloc] initWithImage:img smoothlyScaleOutput:YES];\n    [pic addTarget:self];\n    [pic processImage];\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageMotionBlurFilter.h",
    "content": "#import \"GPUImageFilter.h\"\n\n@interface GPUImageMotionBlurFilter : GPUImageFilter\n\n/** A multiplier for the blur size, ranging from 0.0 on up, with a default of 1.0\n */\n@property (readwrite, nonatomic) CGFloat blurSize;\n\n/** The angular direction of the blur, in degrees. 0 degrees by default\n */\n@property (readwrite, nonatomic) CGFloat blurAngle;\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageMotionBlurFilter.m",
    "content": "#import \"GPUImageMotionBlurFilter.h\"\n\n// Override vertex shader to remove dependent texture reads\nNSString *const kGPUImageTiltedTexelSamplingVertexShaderString = SHADER_STRING\n(\n attribute vec4 position;\n attribute vec4 inputTextureCoordinate;\n \n uniform vec2 directionalTexelStep;\n \n varying vec2 textureCoordinate;\n varying vec2 oneStepBackTextureCoordinate;\n varying vec2 twoStepsBackTextureCoordinate;\n varying vec2 threeStepsBackTextureCoordinate;\n varying vec2 fourStepsBackTextureCoordinate;\n varying vec2 oneStepForwardTextureCoordinate;\n varying vec2 twoStepsForwardTextureCoordinate;\n varying vec2 threeStepsForwardTextureCoordinate;\n varying vec2 fourStepsForwardTextureCoordinate;\n \n void main()\n {\n     gl_Position = position;\n     \n     textureCoordinate = inputTextureCoordinate.xy;\n     oneStepBackTextureCoordinate = inputTextureCoordinate.xy - directionalTexelStep;\n     twoStepsBackTextureCoordinate = inputTextureCoordinate.xy - 2.0 * directionalTexelStep;\n     threeStepsBackTextureCoordinate = inputTextureCoordinate.xy - 3.0 * directionalTexelStep;\n     fourStepsBackTextureCoordinate = inputTextureCoordinate.xy - 4.0 * directionalTexelStep;\n     oneStepForwardTextureCoordinate = inputTextureCoordinate.xy + directionalTexelStep;\n     twoStepsForwardTextureCoordinate = inputTextureCoordinate.xy + 2.0 * directionalTexelStep;\n     threeStepsForwardTextureCoordinate = inputTextureCoordinate.xy + 3.0 * directionalTexelStep;\n     fourStepsForwardTextureCoordinate = inputTextureCoordinate.xy + 4.0 * directionalTexelStep;\n }\n);\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageMotionBlurFragmentShaderString = SHADER_STRING\n(\n precision highp float;\n \n uniform sampler2D inputImageTexture;\n \n varying vec2 textureCoordinate;\n varying vec2 oneStepBackTextureCoordinate;\n varying vec2 twoStepsBackTextureCoordinate;\n varying vec2 threeStepsBackTextureCoordinate;\n varying vec2 fourStepsBackTextureCoordinate;\n varying vec2 oneStepForwardTextureCoordinate;\n varying vec2 twoStepsForwardTextureCoordinate;\n varying vec2 threeStepsForwardTextureCoordinate;\n varying vec2 fourStepsForwardTextureCoordinate;\n \n void main()\n {\n     // Box weights\n//     lowp vec4 fragmentColor = texture2D(inputImageTexture, textureCoordinate) * 0.1111111;\n//     fragmentColor += texture2D(inputImageTexture, oneStepBackTextureCoordinate) * 0.1111111;\n//     fragmentColor += texture2D(inputImageTexture, twoStepsBackTextureCoordinate) * 0.1111111;\n//     fragmentColor += texture2D(inputImageTexture, threeStepsBackTextureCoordinate) * 0.1111111;\n//     fragmentColor += texture2D(inputImageTexture, fourStepsBackTextureCoordinate) * 0.1111111;\n//     fragmentColor += texture2D(inputImageTexture, oneStepForwardTextureCoordinate) * 0.1111111;\n//     fragmentColor += texture2D(inputImageTexture, twoStepsForwardTextureCoordinate) * 0.1111111;\n//     fragmentColor += texture2D(inputImageTexture, threeStepsForwardTextureCoordinate) * 0.1111111;\n//     fragmentColor += texture2D(inputImageTexture, fourStepsForwardTextureCoordinate) * 0.1111111;\n\n     lowp vec4 fragmentColor = texture2D(inputImageTexture, textureCoordinate) * 0.18;\n     fragmentColor += texture2D(inputImageTexture, oneStepBackTextureCoordinate) * 0.15;\n     fragmentColor += texture2D(inputImageTexture, twoStepsBackTextureCoordinate) *  0.12;\n     fragmentColor += texture2D(inputImageTexture, threeStepsBackTextureCoordinate) * 0.09;\n     fragmentColor += texture2D(inputImageTexture, fourStepsBackTextureCoordinate) * 0.05;\n     fragmentColor += texture2D(inputImageTexture, oneStepForwardTextureCoordinate) * 0.15;\n     fragmentColor += texture2D(inputImageTexture, twoStepsForwardTextureCoordinate) *  0.12;\n     fragmentColor += texture2D(inputImageTexture, threeStepsForwardTextureCoordinate) * 0.09;\n     fragmentColor += texture2D(inputImageTexture, fourStepsForwardTextureCoordinate) * 0.05;\n\n     gl_FragColor = fragmentColor;\n }\n);\n#else\nNSString *const kGPUImageMotionBlurFragmentShaderString = SHADER_STRING\n(\n uniform sampler2D inputImageTexture;\n \n varying vec2 textureCoordinate;\n varying vec2 oneStepBackTextureCoordinate;\n varying vec2 twoStepsBackTextureCoordinate;\n varying vec2 threeStepsBackTextureCoordinate;\n varying vec2 fourStepsBackTextureCoordinate;\n varying vec2 oneStepForwardTextureCoordinate;\n varying vec2 twoStepsForwardTextureCoordinate;\n varying vec2 threeStepsForwardTextureCoordinate;\n varying vec2 fourStepsForwardTextureCoordinate;\n \n void main()\n {\n     // Box weights\n     //     vec4 fragmentColor = texture2D(inputImageTexture, textureCoordinate) * 0.1111111;\n     //     fragmentColor += texture2D(inputImageTexture, oneStepBackTextureCoordinate) * 0.1111111;\n     //     fragmentColor += texture2D(inputImageTexture, twoStepsBackTextureCoordinate) * 0.1111111;\n     //     fragmentColor += texture2D(inputImageTexture, threeStepsBackTextureCoordinate) * 0.1111111;\n     //     fragmentColor += texture2D(inputImageTexture, fourStepsBackTextureCoordinate) * 0.1111111;\n     //     fragmentColor += texture2D(inputImageTexture, oneStepForwardTextureCoordinate) * 0.1111111;\n     //     fragmentColor += texture2D(inputImageTexture, twoStepsForwardTextureCoordinate) * 0.1111111;\n     //     fragmentColor += texture2D(inputImageTexture, threeStepsForwardTextureCoordinate) * 0.1111111;\n     //     fragmentColor += texture2D(inputImageTexture, fourStepsForwardTextureCoordinate) * 0.1111111;\n     \n     vec4 fragmentColor = texture2D(inputImageTexture, textureCoordinate) * 0.18;\n     fragmentColor += texture2D(inputImageTexture, oneStepBackTextureCoordinate) * 0.15;\n     fragmentColor += texture2D(inputImageTexture, twoStepsBackTextureCoordinate) *  0.12;\n     fragmentColor += texture2D(inputImageTexture, threeStepsBackTextureCoordinate) * 0.09;\n     fragmentColor += texture2D(inputImageTexture, fourStepsBackTextureCoordinate) * 0.05;\n     fragmentColor += texture2D(inputImageTexture, oneStepForwardTextureCoordinate) * 0.15;\n     fragmentColor += texture2D(inputImageTexture, twoStepsForwardTextureCoordinate) *  0.12;\n     fragmentColor += texture2D(inputImageTexture, threeStepsForwardTextureCoordinate) * 0.09;\n     fragmentColor += texture2D(inputImageTexture, fourStepsForwardTextureCoordinate) * 0.05;\n     \n     gl_FragColor = fragmentColor;\n }\n);\n#endif\n\n@interface GPUImageMotionBlurFilter()\n{\n    GLint directionalTexelStepUniform;\n}\n\n- (void)recalculateTexelOffsets;\n\n@end\n\n@implementation GPUImageMotionBlurFilter\n\n@synthesize blurSize = _blurSize;\n@synthesize blurAngle = _blurAngle;\n\n#pragma mark -\n#pragma mark Initialization and teardown\n\n- (id)init;\n{\n    if (!(self = [super initWithVertexShaderFromString:kGPUImageTiltedTexelSamplingVertexShaderString fragmentShaderFromString:kGPUImageMotionBlurFragmentShaderString]))\n    {\n        return nil;\n    }\n    \n    directionalTexelStepUniform = [filterProgram uniformIndex:@\"directionalTexelStep\"];\n    \n    self.blurSize = 2.5;\n    self.blurAngle = 0.0;\n    \n    return self;\n}\n\n- (void)setInputSize:(CGSize)newSize atIndex:(NSInteger)textureIndex;\n{\n    CGSize oldInputSize = inputTextureSize;\n    [super setInputSize:newSize atIndex:textureIndex];\n    \n    if (!CGSizeEqualToSize(oldInputSize, inputTextureSize) && (!CGSizeEqualToSize(newSize, CGSizeZero)) )\n    {\n        [self recalculateTexelOffsets];\n    }\n}\n\n- (void)recalculateTexelOffsets;\n{\n    CGFloat aspectRatio = 1.0;\n    CGPoint texelOffsets;\n    \n    if (GPUImageRotationSwapsWidthAndHeight(inputRotation))\n    {\n        aspectRatio = (inputTextureSize.width / inputTextureSize.height);\n        texelOffsets.x = _blurSize * sin(_blurAngle * M_PI / 180.0) * aspectRatio / inputTextureSize.height;\n        texelOffsets.y = _blurSize * cos(_blurAngle * M_PI / 180.0) / inputTextureSize.height;\n    }\n    else\n    {\n        aspectRatio = (inputTextureSize.height / inputTextureSize.width);\n        texelOffsets.x = _blurSize * cos(_blurAngle * M_PI / 180.0) * aspectRatio / inputTextureSize.width;\n        texelOffsets.y = _blurSize * sin(_blurAngle * M_PI / 180.0) / inputTextureSize.width;\n    }\n    \n    [self setPoint:texelOffsets forUniform:directionalTexelStepUniform program:filterProgram];\n}\n\n#pragma mark -\n#pragma mark Accessors\n\n- (void)setInputRotation:(GPUImageRotationMode)newInputRotation atIndex:(NSInteger)textureIndex;\n{\n    [super setInputRotation:newInputRotation atIndex:textureIndex];\n    [self recalculateTexelOffsets];\n}\n\n- (void)setBlurAngle:(CGFloat)newValue;\n{\n    _blurAngle = newValue;\n    [self recalculateTexelOffsets];\n}\n\n- (void)setBlurSize:(CGFloat)newValue;\n{\n    _blurSize = newValue;\n    [self recalculateTexelOffsets];\n}\n\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageMotionDetector.h",
    "content": "#import \"GPUImageFilterGroup.h\"\n#import \"GPUImageLowPassFilter.h\"\n#import \"GPUImageAverageColor.h\"\n\n@interface GPUImageMotionDetector : GPUImageFilterGroup\n{\n    GPUImageLowPassFilter *lowPassFilter;\n    GPUImageTwoInputFilter *frameComparisonFilter;\n    GPUImageAverageColor *averageColor;\n}\n\n// This controls the low pass filter strength used to compare the current frame with previous ones to detect motion. This ranges from 0.0 to 1.0, with a default of 0.5.\n@property(readwrite, nonatomic) CGFloat lowPassFilterStrength;\n\n// For every frame, this will feed back the calculated centroid of the motion, as well as a relative intensity.\n@property(nonatomic, copy) void(^motionDetectionBlock)(CGPoint motionCentroid, CGFloat motionIntensity, CMTime frameTime);\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageMotionDetector.m",
    "content": "#import \"GPUImageMotionDetector.h\"\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageMotionComparisonFragmentShaderString = SHADER_STRING\n(\n varying highp vec2 textureCoordinate;\n varying highp vec2 textureCoordinate2;\n \n uniform sampler2D inputImageTexture;\n uniform sampler2D inputImageTexture2;\n \n uniform highp float intensity;\n \n void main()\n {\n     lowp vec3 currentImageColor = texture2D(inputImageTexture, textureCoordinate).rgb;\n     lowp vec3 lowPassImageColor = texture2D(inputImageTexture2, textureCoordinate2).rgb;\n     \n     mediump float colorDistance = distance(currentImageColor, lowPassImageColor); // * 0.57735\n     lowp float movementThreshold = step(0.2, colorDistance);\n     \n     gl_FragColor = movementThreshold * vec4(textureCoordinate2.x, textureCoordinate2.y, 1.0, 1.0);\n }\n);\n#else\nNSString *const kGPUImageMotionComparisonFragmentShaderString = SHADER_STRING\n(\n varying vec2 textureCoordinate;\n varying vec2 textureCoordinate2;\n \n uniform sampler2D inputImageTexture;\n uniform sampler2D inputImageTexture2;\n \n uniform float intensity;\n \n void main()\n {\n     vec3 currentImageColor = texture2D(inputImageTexture, textureCoordinate).rgb;\n     vec3 lowPassImageColor = texture2D(inputImageTexture2, textureCoordinate2).rgb;\n     \n     float colorDistance = distance(currentImageColor, lowPassImageColor); // * 0.57735\n     float movementThreshold = step(0.2, colorDistance);\n     \n     gl_FragColor = movementThreshold * vec4(textureCoordinate2.x, textureCoordinate2.y, 1.0, 1.0);\n }\n);\n#endif\n\n\n@implementation GPUImageMotionDetector\n\n@synthesize lowPassFilterStrength, motionDetectionBlock;\n\n#pragma mark -\n#pragma mark Initialization and teardown\n\n- (id)init;\n{\n    if (!(self = [super init]))\n    {\n\t\treturn nil;\n    }\n    \n    // Start with a low pass filter to define the component to be removed\n    lowPassFilter = [[GPUImageLowPassFilter alloc] init];\n    [self addFilter:lowPassFilter];\n    \n    // Take the difference of the current frame from the low pass filtered result to get the high pass\n    frameComparisonFilter = [[GPUImageTwoInputFilter alloc] initWithFragmentShaderFromString:kGPUImageMotionComparisonFragmentShaderString];\n    [self addFilter:frameComparisonFilter];\n    \n    // Texture location 0 needs to be the original image for the difference blend\n    [lowPassFilter addTarget:frameComparisonFilter atTextureLocation:1];\n    \n    // End with the average color for the scene to determine the centroid\n    averageColor = [[GPUImageAverageColor alloc] init];\n    \n    __unsafe_unretained GPUImageMotionDetector *weakSelf = self;\n\n    [averageColor setColorAverageProcessingFinishedBlock:^(CGFloat redComponent, CGFloat greenComponent, CGFloat blueComponent, CGFloat alphaComponent, CMTime frameTime) {\n        if (weakSelf.motionDetectionBlock != NULL)\n        {\n            weakSelf.motionDetectionBlock(CGPointMake(redComponent / alphaComponent, greenComponent / alphaComponent), alphaComponent, frameTime);\n        }\n//        NSLog(@\"Average X: %f, Y: %f total: %f\", redComponent / alphaComponent, greenComponent / alphaComponent, alphaComponent);\n    }];\n    \n    [frameComparisonFilter addTarget:averageColor];\n    \n    self.initialFilters = [NSArray arrayWithObjects:lowPassFilter, frameComparisonFilter, nil];\n    self.terminalFilter = frameComparisonFilter;\n    \n    self.lowPassFilterStrength = 0.5;\n    \n    return self;\n}\n\n#pragma mark -\n#pragma mark Accessors\n\n- (void)setLowPassFilterStrength:(CGFloat)newValue;\n{\n    lowPassFilter.filterStrength = newValue;\n}\n\n- (CGFloat)lowPassFilterStrength;\n{\n    return lowPassFilter.filterStrength;\n}\n\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageMultiplyBlendFilter.h",
    "content": "#import \"GPUImageTwoInputFilter.h\"\n\n@interface GPUImageMultiplyBlendFilter : GPUImageTwoInputFilter\n{\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageMultiplyBlendFilter.m",
    "content": "#import \"GPUImageMultiplyBlendFilter.h\"\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageMultiplyBlendFragmentShaderString = SHADER_STRING\n(\n varying highp vec2 textureCoordinate;\n varying highp vec2 textureCoordinate2;\n\n uniform sampler2D inputImageTexture;\n uniform sampler2D inputImageTexture2;\n \n void main()\n {\n     lowp vec4 base = texture2D(inputImageTexture, textureCoordinate);\n     lowp vec4 overlayer = texture2D(inputImageTexture2, textureCoordinate2);\n          \n     gl_FragColor = overlayer * base + overlayer * (1.0 - base.a) + base * (1.0 - overlayer.a);\n }\n);\n#else\nNSString *const kGPUImageMultiplyBlendFragmentShaderString = SHADER_STRING\n(\n varying vec2 textureCoordinate;\n varying vec2 textureCoordinate2;\n \n uniform sampler2D inputImageTexture;\n uniform sampler2D inputImageTexture2;\n \n void main()\n {\n     vec4 base = texture2D(inputImageTexture, textureCoordinate);\n     vec4 overlayer = texture2D(inputImageTexture2, textureCoordinate2);\n     \n     gl_FragColor = overlayer * base + overlayer * (1.0 - base.a) + base * (1.0 - overlayer.a);\n }\n);\n#endif\n\n@implementation GPUImageMultiplyBlendFilter\n\n- (id)init;\n{\n    if (!(self = [super initWithFragmentShaderFromString:kGPUImageMultiplyBlendFragmentShaderString]))\n    {\n\t\treturn nil;\n    }\n    \n    return self;\n}\n\n@end\n\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageNobleCornerDetectionFilter.h",
    "content": "#import \"GPUImageHarrisCornerDetectionFilter.h\"\n\n/** Noble corner detector\n \n This is the Noble variant on the Harris detector, from \n Alison Noble, \"Descriptions of Image Surfaces\", PhD thesis, Department of Engineering Science, Oxford University 1989, p45.  \n*/\n\n\n@interface GPUImageNobleCornerDetectionFilter : GPUImageHarrisCornerDetectionFilter\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageNobleCornerDetectionFilter.m",
    "content": "#import \"GPUImageNobleCornerDetectionFilter.h\"\n\n@implementation GPUImageNobleCornerDetectionFilter\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageNobleCornerDetectionFragmentShaderString = SHADER_STRING\n( \n varying highp vec2 textureCoordinate;\n \n uniform sampler2D inputImageTexture;\n uniform lowp float sensitivity;\n \n void main()\n {\n     mediump vec3 derivativeElements = texture2D(inputImageTexture, textureCoordinate).rgb;\n     \n     mediump float derivativeSum = derivativeElements.x + derivativeElements.y;\n     \n     // R = (Ix^2 * Iy^2 - Ixy * Ixy) / (Ix^2 + Iy^2)\n     mediump float zElement = (derivativeElements.z * 2.0) - 1.0;\n     //     mediump float harrisIntensity = (derivativeElements.x * derivativeElements.y - (derivativeElements.z * derivativeElements.z)) / (derivativeSum);\n     mediump float cornerness = (derivativeElements.x * derivativeElements.y - (zElement * zElement)) / (derivativeSum);\n     \n     // Original Harris detector\n     // R = Ix^2 * Iy^2 - Ixy * Ixy - k * (Ix^2 + Iy^2)^2\n     //     highp float harrisIntensity = derivativeElements.x * derivativeElements.y - (derivativeElements.z * derivativeElements.z) - harrisConstant * derivativeSum * derivativeSum;\n     \n     //     gl_FragColor = vec4(vec3(harrisIntensity * 7.0), 1.0);\n     gl_FragColor = vec4(vec3(cornerness * sensitivity), 1.0);\n }\n);\n#else\nNSString *const kGPUImageNobleCornerDetectionFragmentShaderString = SHADER_STRING\n(\n varying vec2 textureCoordinate;\n \n uniform sampler2D inputImageTexture;\n uniform float sensitivity;\n \n void main()\n {\n     vec3 derivativeElements = texture2D(inputImageTexture, textureCoordinate).rgb;\n     \n     float derivativeSum = derivativeElements.x + derivativeElements.y;\n     \n     // R = (Ix^2 * Iy^2 - Ixy * Ixy) / (Ix^2 + Iy^2)\n     float zElement = (derivativeElements.z * 2.0) - 1.0;\n     //     mediump float harrisIntensity = (derivativeElements.x * derivativeElements.y - (derivativeElements.z * derivativeElements.z)) / (derivativeSum);\n     float cornerness = (derivativeElements.x * derivativeElements.y - (zElement * zElement)) / (derivativeSum);\n     \n     // Original Harris detector\n     // R = Ix^2 * Iy^2 - Ixy * Ixy - k * (Ix^2 + Iy^2)^2\n     //     highp float harrisIntensity = derivativeElements.x * derivativeElements.y - (derivativeElements.z * derivativeElements.z) - harrisConstant * derivativeSum * derivativeSum;\n     \n     //     gl_FragColor = vec4(vec3(harrisIntensity * 7.0), 1.0);\n     gl_FragColor = vec4(vec3(cornerness * sensitivity), 1.0);\n }\n);\n#endif\n\n#pragma mark -\n#pragma mark Initialization and teardown\n\n- (id)init;\n{\n    if (!(self = [self initWithCornerDetectionFragmentShader:kGPUImageNobleCornerDetectionFragmentShaderString]))\n    {\n        return nil;\n    }\n    \n    return self;\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageNonMaximumSuppressionFilter.h",
    "content": "#import \"GPUImage3x3TextureSamplingFilter.h\"\n\n@interface GPUImageNonMaximumSuppressionFilter : GPUImage3x3TextureSamplingFilter\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageNonMaximumSuppressionFilter.m",
    "content": "#import \"GPUImageNonMaximumSuppressionFilter.h\"\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageNonMaximumSuppressionFragmentShaderString = SHADER_STRING\n(\n uniform sampler2D inputImageTexture;\n \n varying highp vec2 textureCoordinate;\n varying highp vec2 leftTextureCoordinate;\n varying highp vec2 rightTextureCoordinate;\n \n varying highp vec2 topTextureCoordinate;\n varying highp vec2 topLeftTextureCoordinate;\n varying highp vec2 topRightTextureCoordinate;\n \n varying highp vec2 bottomTextureCoordinate;\n varying highp vec2 bottomLeftTextureCoordinate;\n varying highp vec2 bottomRightTextureCoordinate;\n \n void main()\n {\n     lowp float bottomColor = texture2D(inputImageTexture, bottomTextureCoordinate).r;\n     lowp float bottomLeftColor = texture2D(inputImageTexture, bottomLeftTextureCoordinate).r;\n     lowp float bottomRightColor = texture2D(inputImageTexture, bottomRightTextureCoordinate).r;\n     lowp vec4 centerColor = texture2D(inputImageTexture, textureCoordinate);\n     lowp float leftColor = texture2D(inputImageTexture, leftTextureCoordinate).r;\n     lowp float rightColor = texture2D(inputImageTexture, rightTextureCoordinate).r;\n     lowp float topColor = texture2D(inputImageTexture, topTextureCoordinate).r;\n     lowp float topRightColor = texture2D(inputImageTexture, topRightTextureCoordinate).r;\n     lowp float topLeftColor = texture2D(inputImageTexture, topLeftTextureCoordinate).r;\n     \n     // Use a tiebreaker for pixels to the left and immediately above this one\n     lowp float multiplier = 1.0 - step(centerColor.r, topColor);\n     multiplier = multiplier * (1.0 - step(centerColor.r, topLeftColor));\n     multiplier = multiplier * (1.0 - step(centerColor.r, leftColor));\n     multiplier = multiplier * (1.0 - step(centerColor.r, bottomLeftColor));\n     \n     lowp float maxValue = max(centerColor.r, bottomColor);\n     maxValue = max(maxValue, bottomRightColor);\n     maxValue = max(maxValue, rightColor);\n     maxValue = max(maxValue, topRightColor);\n     \n     gl_FragColor = vec4((centerColor.rgb * step(maxValue, centerColor.r) * multiplier), 1.0);\n }\n);\n#else\nNSString *const kGPUImageNonMaximumSuppressionFragmentShaderString = SHADER_STRING\n(\n uniform sampler2D inputImageTexture;\n \n varying vec2 textureCoordinate;\n varying vec2 leftTextureCoordinate;\n varying vec2 rightTextureCoordinate;\n \n varying vec2 topTextureCoordinate;\n varying vec2 topLeftTextureCoordinate;\n varying vec2 topRightTextureCoordinate;\n \n varying vec2 bottomTextureCoordinate;\n varying vec2 bottomLeftTextureCoordinate;\n varying vec2 bottomRightTextureCoordinate;\n \n void main()\n {\n     float bottomColor = texture2D(inputImageTexture, bottomTextureCoordinate).r;\n     float bottomLeftColor = texture2D(inputImageTexture, bottomLeftTextureCoordinate).r;\n     float bottomRightColor = texture2D(inputImageTexture, bottomRightTextureCoordinate).r;\n     vec4 centerColor = texture2D(inputImageTexture, textureCoordinate);\n     float leftColor = texture2D(inputImageTexture, leftTextureCoordinate).r;\n     float rightColor = texture2D(inputImageTexture, rightTextureCoordinate).r;\n     float topColor = texture2D(inputImageTexture, topTextureCoordinate).r;\n     float topRightColor = texture2D(inputImageTexture, topRightTextureCoordinate).r;\n     float topLeftColor = texture2D(inputImageTexture, topLeftTextureCoordinate).r;\n     \n     // Use a tiebreaker for pixels to the left and immediately above this one\n     float multiplier = 1.0 - step(centerColor.r, topColor);\n     multiplier = multiplier * (1.0 - step(centerColor.r, topLeftColor));\n     multiplier = multiplier * (1.0 - step(centerColor.r, leftColor));\n     multiplier = multiplier * (1.0 - step(centerColor.r, bottomLeftColor));\n     \n     float maxValue = max(centerColor.r, bottomColor);\n     maxValue = max(maxValue, bottomRightColor);\n     maxValue = max(maxValue, rightColor);\n     maxValue = max(maxValue, topRightColor);\n     \n     gl_FragColor = vec4((centerColor.rgb * step(maxValue, centerColor.r) * multiplier), 1.0);\n }\n);\n#endif\n\n@implementation GPUImageNonMaximumSuppressionFilter\n\n#pragma mark -\n#pragma mark Initialization and teardown\n\n- (id)init;\n{\n    if (!(self = [super initWithFragmentShaderFromString:kGPUImageNonMaximumSuppressionFragmentShaderString]))\n    {\n        return nil;\n    }\n    \n    return self;\n}\n\n@end\n\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageNormalBlendFilter.h",
    "content": "//  Created by Jorge Garcia on 9/5/12.\n//\n\n#import \"GPUImageTwoInputFilter.h\"\n\n@interface GPUImageNormalBlendFilter : GPUImageTwoInputFilter\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageNormalBlendFilter.m",
    "content": "//  Created by Jorge Garcia on 9/5/12.\n\n#import \"GPUImageNormalBlendFilter.h\"\n/*\n This equation is a simplification of the general blending equation. It assumes the destination color is opaque, and therefore drops the destination color's alpha term.\n \n D = C1 * C1a + C2 * C2a * (1 - C1a)\n where D is the resultant color, C1 is the color of the first element, C1a is the alpha of the first element, C2 is the second element color, C2a is the alpha of the second element. The destination alpha is calculated with:\n \n Da = C1a + C2a * (1 - C1a)\n The resultant color is premultiplied with the alpha. To restore the color to the unmultiplied values, just divide by Da, the resultant alpha.\n \n http://stackoverflow.com/questions/1724946/blend-mode-on-a-transparent-and-semi-transparent-background\n \n For some reason Photoshop behaves \n D = C1 + C2 * C2a * (1 - C1a)\n */\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageNormalBlendFragmentShaderString = SHADER_STRING\n(\n varying highp vec2 textureCoordinate;\n varying highp vec2 textureCoordinate2;\n \n uniform sampler2D inputImageTexture;\n uniform sampler2D inputImageTexture2;\n \n void main()\n {\n     lowp vec4 c2 = texture2D(inputImageTexture, textureCoordinate);\n\t lowp vec4 c1 = texture2D(inputImageTexture2, textureCoordinate2);\n     \n     lowp vec4 outputColor;\n     \n//     outputColor.r = c1.r + c2.r * c2.a * (1.0 - c1.a);\n//     outputColor.g = c1.g + c2.g * c2.a * (1.0 - c1.a);\n//     outputColor.b = c1.b + c2.b * c2.a * (1.0 - c1.a);\n//     outputColor.a = c1.a + c2.a * (1.0 - c1.a);\n     \n     lowp float a = c1.a + c2.a * (1.0 - c1.a);\n     lowp float alphaDivisor = a + step(a, 0.0); // Protect against a divide-by-zero blacking out things in the output\n\n     outputColor.r = (c1.r * c1.a + c2.r * c2.a * (1.0 - c1.a))/alphaDivisor;\n     outputColor.g = (c1.g * c1.a + c2.g * c2.a * (1.0 - c1.a))/alphaDivisor;\n     outputColor.b = (c1.b * c1.a + c2.b * c2.a * (1.0 - c1.a))/alphaDivisor;\n     outputColor.a = a;\n\n     gl_FragColor = outputColor;\n }\n);\n#else\nNSString *const kGPUImageNormalBlendFragmentShaderString = SHADER_STRING\n(\n varying vec2 textureCoordinate;\n varying vec2 textureCoordinate2;\n \n uniform sampler2D inputImageTexture;\n uniform sampler2D inputImageTexture2;\n \n void main()\n {\n     vec4 c2 = texture2D(inputImageTexture, textureCoordinate);\n\t vec4 c1 = texture2D(inputImageTexture2, textureCoordinate2);\n     \n     vec4 outputColor;\n     \n     //     outputColor.r = c1.r + c2.r * c2.a * (1.0 - c1.a);\n     //     outputColor.g = c1.g + c2.g * c2.a * (1.0 - c1.a);\n     //     outputColor.b = c1.b + c2.b * c2.a * (1.0 - c1.a);\n     //     outputColor.a = c1.a + c2.a * (1.0 - c1.a);\n     \n     float a = c1.a + c2.a * (1.0 - c1.a);\n     float alphaDivisor = a + step(a, 0.0); // Protect against a divide-by-zero blacking out things in the output\n\n     outputColor.r = (c1.r * c1.a + c2.r * c2.a * (1.0 - c1.a))/alphaDivisor;\n     outputColor.g = (c1.g * c1.a + c2.g * c2.a * (1.0 - c1.a))/alphaDivisor;\n     outputColor.b = (c1.b * c1.a + c2.b * c2.a * (1.0 - c1.a))/alphaDivisor;\n     outputColor.a = a;\n     \n     gl_FragColor = outputColor;\n }\n);\n#endif\n\n@implementation GPUImageNormalBlendFilter\n\n- (id)init;\n{\n    if (!(self = [super initWithFragmentShaderFromString:kGPUImageNormalBlendFragmentShaderString]))\n    {\n\t\treturn nil;\n    }\n    \n    return self;\n}\n\n@end"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageOpacityFilter.h",
    "content": "#import \"GPUImageFilter.h\"\n\n@interface GPUImageOpacityFilter : GPUImageFilter\n{\n    GLint opacityUniform;\n}\n\n// Opacity ranges from 0.0 to 1.0, with 1.0 as the normal setting\n@property(readwrite, nonatomic) CGFloat opacity;\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageOpacityFilter.m",
    "content": "#import \"GPUImageOpacityFilter.h\"\n\n@implementation GPUImageOpacityFilter\n\n@synthesize opacity = _opacity;\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageOpacityFragmentShaderString = SHADER_STRING\n(\n varying highp vec2 textureCoordinate;\n \n uniform sampler2D inputImageTexture;\n uniform lowp float opacity;\n \n void main()\n {\n     lowp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);\n     \n     gl_FragColor = vec4(textureColor.rgb, textureColor.a * opacity);\n }\n);\n#else\nNSString *const kGPUImageOpacityFragmentShaderString = SHADER_STRING\n(\n varying vec2 textureCoordinate;\n \n uniform sampler2D inputImageTexture;\n uniform float opacity;\n \n void main()\n {\n     vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);\n     \n     gl_FragColor = vec4(textureColor.rgb, textureColor.a * opacity);\n }\n);\n#endif\n\n#pragma mark -\n#pragma mark Initialization and teardown\n\n- (id)init;\n{\n    if (!(self = [super initWithFragmentShaderFromString:kGPUImageOpacityFragmentShaderString]))\n    {\n\t\treturn nil;\n    }\n    \n    opacityUniform = [filterProgram uniformIndex:@\"opacity\"];\n    self.opacity = 1.0;\n    \n    return self;\n}\n\n#pragma mark -\n#pragma mark Accessors\n\n- (void)setOpacity:(CGFloat)newValue;\n{\n    _opacity = newValue;\n    \n    [self setFloat:_opacity forUniform:opacityUniform program:filterProgram];\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageOpeningFilter.h",
    "content": "#import \"GPUImageFilterGroup.h\"\n\n@class GPUImageErosionFilter;\n@class GPUImageDilationFilter;\n\n// A filter that first performs an erosion on the red channel of an image, followed by a dilation of the same radius. \n// This helps to filter out smaller bright elements.\n\n@interface GPUImageOpeningFilter : GPUImageFilterGroup\n{\n    GPUImageErosionFilter *erosionFilter;\n    GPUImageDilationFilter *dilationFilter;\n}\n\n@property(readwrite, nonatomic) CGFloat verticalTexelSpacing, horizontalTexelSpacing;\n\n- (id)initWithRadius:(NSUInteger)radius;\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageOpeningFilter.m",
    "content": "#import \"GPUImageOpeningFilter.h\"\n#import \"GPUImageErosionFilter.h\"\n#import \"GPUImageDilationFilter.h\"\n\n@implementation GPUImageOpeningFilter\n\n@synthesize verticalTexelSpacing = _verticalTexelSpacing;\n@synthesize horizontalTexelSpacing = _horizontalTexelSpacing;\n\n- (id)init;\n{\n    if (!(self = [self initWithRadius:1]))\n    {\n\t\treturn nil;\n    }\n    \n    return self;\n}\n\n- (id)initWithRadius:(NSUInteger)radius;\n{\n    if (!(self = [super init]))\n    {\n\t\treturn nil;\n    }\n    \n    // First pass: erosion\n    erosionFilter = [[GPUImageErosionFilter alloc] initWithRadius:radius];\n    [self addFilter:erosionFilter];\n    \n    // Second pass: dilation\n    dilationFilter = [[GPUImageDilationFilter alloc] initWithRadius:radius];\n    [self addFilter:dilationFilter];\n    \n    [erosionFilter addTarget:dilationFilter];\n        \n    self.initialFilters = [NSArray arrayWithObjects:erosionFilter, nil];\n    self.terminalFilter = dilationFilter;\n\n    return self;\n}\n\n- (void)setVerticalTexelSpacing:(CGFloat)newValue;\n{\n    _verticalTexelSpacing = newValue;\n    erosionFilter.verticalTexelSpacing = newValue;\n    dilationFilter.verticalTexelSpacing = newValue;\n}\n\n- (void)setHorizontalTexelSpacing:(CGFloat)newValue;\n{\n    _horizontalTexelSpacing = newValue;\n    erosionFilter.horizontalTexelSpacing = newValue;\n    dilationFilter.horizontalTexelSpacing = newValue;\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageOverlayBlendFilter.h",
    "content": "#import \"GPUImageTwoInputFilter.h\"\n\n@interface GPUImageOverlayBlendFilter : GPUImageTwoInputFilter\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageOverlayBlendFilter.m",
    "content": "#import \"GPUImageOverlayBlendFilter.h\"\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageOverlayBlendFragmentShaderString = SHADER_STRING\n(\n varying highp vec2 textureCoordinate;\n varying highp vec2 textureCoordinate2;\n\n uniform sampler2D inputImageTexture;\n uniform sampler2D inputImageTexture2;\n \n void main()\n {\n     mediump vec4 base = texture2D(inputImageTexture, textureCoordinate);\n     mediump vec4 overlay = texture2D(inputImageTexture2, textureCoordinate2);\n     \n     mediump float ra;\n     if (2.0 * base.r < base.a) {\n         ra = 2.0 * overlay.r * base.r + overlay.r * (1.0 - base.a) + base.r * (1.0 - overlay.a);\n     } else {\n         ra = overlay.a * base.a - 2.0 * (base.a - base.r) * (overlay.a - overlay.r) + overlay.r * (1.0 - base.a) + base.r * (1.0 - overlay.a);\n     }\n     \n     mediump float ga;\n     if (2.0 * base.g < base.a) {\n         ga = 2.0 * overlay.g * base.g + overlay.g * (1.0 - base.a) + base.g * (1.0 - overlay.a);\n     } else {\n         ga = overlay.a * base.a - 2.0 * (base.a - base.g) * (overlay.a - overlay.g) + overlay.g * (1.0 - base.a) + base.g * (1.0 - overlay.a);\n     }\n     \n     mediump float ba;\n     if (2.0 * base.b < base.a) {\n         ba = 2.0 * overlay.b * base.b + overlay.b * (1.0 - base.a) + base.b * (1.0 - overlay.a);\n     } else {\n         ba = overlay.a * base.a - 2.0 * (base.a - base.b) * (overlay.a - overlay.b) + overlay.b * (1.0 - base.a) + base.b * (1.0 - overlay.a);\n     }\n     \n     gl_FragColor = vec4(ra, ga, ba, 1.0);\n }\n);\n#else\nNSString *const kGPUImageOverlayBlendFragmentShaderString = SHADER_STRING\n(\n varying vec2 textureCoordinate;\n varying vec2 textureCoordinate2;\n \n uniform sampler2D inputImageTexture;\n uniform sampler2D inputImageTexture2;\n \n void main()\n {\n     vec4 base = texture2D(inputImageTexture, textureCoordinate);\n     vec4 overlay = texture2D(inputImageTexture2, textureCoordinate2);\n     \n     float ra;\n     if (2.0 * base.r < base.a) {\n         ra = 2.0 * overlay.r * base.r + overlay.r * (1.0 - base.a) + base.r * (1.0 - overlay.a);\n     } else {\n         ra = overlay.a * base.a - 2.0 * (base.a - base.r) * (overlay.a - overlay.r) + overlay.r * (1.0 - base.a) + base.r * (1.0 - overlay.a);\n     }\n     \n     float ga;\n     if (2.0 * base.g < base.a) {\n         ga = 2.0 * overlay.g * base.g + overlay.g * (1.0 - base.a) + base.g * (1.0 - overlay.a);\n     } else {\n         ga = overlay.a * base.a - 2.0 * (base.a - base.g) * (overlay.a - overlay.g) + overlay.g * (1.0 - base.a) + base.g * (1.0 - overlay.a);\n     }\n     \n     float ba;\n     if (2.0 * base.b < base.a) {\n         ba = 2.0 * overlay.b * base.b + overlay.b * (1.0 - base.a) + base.b * (1.0 - overlay.a);\n     } else {\n         ba = overlay.a * base.a - 2.0 * (base.a - base.b) * (overlay.a - overlay.b) + overlay.b * (1.0 - base.a) + base.b * (1.0 - overlay.a);\n     }\n     \n     gl_FragColor = vec4(ra, ga, ba, 1.0);\n }\n);\n#endif\n\n@implementation GPUImageOverlayBlendFilter\n\n- (id)init;\n{\n  if (!(self = [super initWithFragmentShaderFromString:kGPUImageOverlayBlendFragmentShaderString]))\n  {\n\t\treturn nil;\n  }\n  \n  return self;\n}\n\n@end\n\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageParallelCoordinateLineTransformFilter.h",
    "content": "#import \"GPUImageFilter.h\"\n\n// This is an accumulator that uses a Hough transform in parallel coordinate space to identify probable lines in a scene.\n//\n// It is entirely based on the work of the Graph@FIT research group at the Brno University of Technology and their publications:\n// M. Dubská, J. Havel, and A. Herout. Real-Time Detection of Lines using Parallel Coordinates and OpenGL. Proceedings of SCCG 2011, Bratislava, SK, p. 7.\n// M. Dubská, J. Havel, and A. Herout. PClines — Line detection using parallel coordinates. 2011 IEEE Conference on Computer Vision and Pattern Recognition (CVPR), p. 1489- 1494.\n\n@interface GPUImageParallelCoordinateLineTransformFilter : GPUImageFilter\n{\n    GLubyte *rawImagePixels;\n    GLfloat *lineCoordinates;\n    unsigned int maxLinePairsToRender, linePairsToRender;\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageParallelCoordinateLineTransformFilter.m",
    "content": "#import \"GPUImageParallelCoordinateLineTransformFilter.h\"\n\nNSString *const kGPUImageHoughAccumulationVertexShaderString = SHADER_STRING\n(\n attribute vec4 position;\n \n void main()\n {\n     gl_Position = position;\n }\n);\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageHoughAccumulationFragmentShaderString = SHADER_STRING\n(\n const lowp float scalingFactor = 1.0 / 256.0;\n \n void main()\n {\n     gl_FragColor = vec4(0.004, 0.004, 0.004, 1.0);\n }\n);\n\n// highp - 16-bit, floating point range: -2^62 to 2^62, integer range: -2^16 to 2^16\n// NOTE: See below for where I'm tacking on the required extension as a prefix\nNSString *const kGPUImageHoughAccumulationFBOReadFragmentShaderString = SHADER_STRING\n(\n const lowp float scalingFactor = 0.004;\n// const lowp float scalingFactor = 0.1;\n\n void main()\n {\n     mediump vec4 fragmentData = gl_LastFragData[0];\n     \n     fragmentData.r = fragmentData.r + scalingFactor;\n     fragmentData.g = scalingFactor * floor(fragmentData.r) + fragmentData.g;\n     fragmentData.b = scalingFactor * floor(fragmentData.g) + fragmentData.b;\n     fragmentData.a = scalingFactor * floor(fragmentData.b) + fragmentData.a;\n     \n     fragmentData = fract(fragmentData);\n     \n     gl_FragColor = vec4(fragmentData.rgb, 1.0);\n }\n);\n\n#else\nNSString *const kGPUImageHoughAccumulationFragmentShaderString = SHADER_STRING\n(\n const float scalingFactor = 1.0 / 256.0;\n \n void main()\n {\n     gl_FragColor = vec4(0.004, 0.004, 0.004, 1.0);\n }\n);\n\nNSString *const kGPUImageHoughAccumulationFBOReadFragmentShaderString = SHADER_STRING\n(\n const float scalingFactor = 1.0 / 256.0;\n \n void main()\n {\n     //     gl_FragColor = vec4(scalingFactor, scalingFactor, scalingFactor, 1.0);\n     gl_FragColor = vec4(0.004, 0.004, 0.004, 1.0);\n }\n);\n#endif\n\n@interface GPUImageParallelCoordinateLineTransformFilter()\n// Rendering\n- (void)generateLineCoordinates;\n\n@end\n\n@implementation GPUImageParallelCoordinateLineTransformFilter\n\n#pragma mark -\n#pragma mark Initialization and teardown\n\n- (id)init;\n{\n    NSString *fragmentShaderToUse = nil;\n    \n    if ([GPUImageContext deviceSupportsFramebufferReads])\n    {\n        fragmentShaderToUse = [NSString stringWithFormat:@\"#extension GL_EXT_shader_framebuffer_fetch : require\\n %@\",kGPUImageHoughAccumulationFBOReadFragmentShaderString];\n    }\n    else\n    {\n        fragmentShaderToUse = kGPUImageHoughAccumulationFragmentShaderString;\n    }\n\n    if (!(self = [super initWithVertexShaderFromString:kGPUImageHoughAccumulationVertexShaderString fragmentShaderFromString:fragmentShaderToUse]))\n    {\n        return nil;\n    }\n    \n    \n    return self;\n}\n\n// TODO: have this be regenerated on change of image size\n- (void)dealloc;\n{\n    free(rawImagePixels);\n    free(lineCoordinates);\n}\n\n- (void)initializeAttributes;\n{\n    [filterProgram addAttribute:@\"position\"];\n}\n\n#pragma mark -\n#pragma mark Rendering\n\n#define MAXLINESCALINGFACTOR 4\n\n- (void)generateLineCoordinates;\n{\n    unsigned int imageByteSize = inputTextureSize.width * inputTextureSize.height * 4;\n    rawImagePixels = (GLubyte *)malloc(imageByteSize);\n\n    maxLinePairsToRender = (inputTextureSize.width * inputTextureSize.height) / MAXLINESCALINGFACTOR;\n    lineCoordinates = calloc(maxLinePairsToRender * 8, sizeof(GLfloat));\n}\n\n- (void)newFrameReadyAtTime:(CMTime)frameTime atIndex:(NSInteger)textureIndex;\n{\n    if (lineCoordinates == NULL)\n    {\n        [self generateLineCoordinates];\n    }\n    \n    [self renderToTextureWithVertices:NULL textureCoordinates:NULL];\n    \n    [self informTargetsAboutNewFrameAtTime:frameTime];\n}\n\n- (void)renderToTextureWithVertices:(const GLfloat *)vertices textureCoordinates:(const GLfloat *)textureCoordinates;\n{\n    // we need a normal color texture for this filter\n    NSAssert(self.outputTextureOptions.internalFormat == GL_RGBA, @\"The output texture format for this filter must be GL_RGBA.\");\n    NSAssert(self.outputTextureOptions.type == GL_UNSIGNED_BYTE, @\"The type of the output texture of this filter must be GL_UNSIGNED_BYTE.\");\n    \n    if (self.preventRendering)\n    {\n        [firstInputFramebuffer unlock];\n        return;\n    }\n    \n    // Grab the edge points from the previous frame and create the parallel coordinate lines for them\n    // This would be a great place to have a working histogram pyramid implementation\n    \n    [GPUImageContext useImageProcessingContext];\n    [firstInputFramebuffer activateFramebuffer];\n\n    glFinish();\n    glReadPixels(0, 0, inputTextureSize.width, inputTextureSize.height, GL_RGBA, GL_UNSIGNED_BYTE, rawImagePixels);\n    \n    CGFloat xAspectMultiplier = 1.0, yAspectMultiplier = 1.0;\n    \n//    if (inputTextureSize.width > inputTextureSize.height)\n//    {\n//        yAspectMultiplier = inputTextureSize.height / inputTextureSize.width;\n//    }\n//    else\n//    {\n//        xAspectMultiplier = inputTextureSize.width / inputTextureSize.height;\n//    }\n    \n//    CFAbsoluteTime startTime = CFAbsoluteTimeGetCurrent();\n    \n    unsigned int imageByteSize = inputTextureSize.width * inputTextureSize.height * 4;\n    unsigned int imageWidth = inputTextureSize.width * 4;\n    \n    linePairsToRender = 0;\n    unsigned int currentByte = 0;\n    unsigned int lineStorageIndex = 0;\n    unsigned int maxLineStorageIndex = maxLinePairsToRender * 8 - 8;\n    \n    GLfloat minY = 100, maxY = -100, minX = 100, maxX = -100;\n    while (currentByte < imageByteSize)\n    {\n        GLubyte colorByte = rawImagePixels[currentByte];        \n        \n        if (colorByte > 0)\n        {\n            unsigned int xCoordinate = currentByte % imageWidth;\n            unsigned int yCoordinate = currentByte / imageWidth;\n            \n            CGFloat normalizedXCoordinate = (-1.0 + 2.0 * (CGFloat)(xCoordinate / 4) / inputTextureSize.width) * xAspectMultiplier;\n            CGFloat normalizedYCoordinate = (-1.0 + 2.0 * (CGFloat)(yCoordinate) / inputTextureSize.height) * yAspectMultiplier;\n            \n            minY = MIN(minY, normalizedYCoordinate);\n            maxY = MAX(maxY, normalizedYCoordinate);\n            minX = MIN(minX, normalizedXCoordinate);\n            maxX = MAX(maxX, normalizedXCoordinate);\n            \n//            NSLog(@\"Parallel line coordinates: (%f, %f) - (%f, %f) - (%f, %f)\", -1.0, -normalizedYCoordinate, 0.0, normalizedXCoordinate, 1.0, normalizedYCoordinate);\n            // T space coordinates, (-d, -y) to (0, x)\n            lineCoordinates[lineStorageIndex++] = -1.0;\n            lineCoordinates[lineStorageIndex++] = -normalizedYCoordinate;\n            lineCoordinates[lineStorageIndex++] = 0.0;\n            lineCoordinates[lineStorageIndex++] = normalizedXCoordinate;\n\n            // S space coordinates, (0, x) to (d, y)\n            lineCoordinates[lineStorageIndex++] = 0.0;\n            lineCoordinates[lineStorageIndex++] = normalizedXCoordinate;\n            lineCoordinates[lineStorageIndex++] = 1.0;\n            lineCoordinates[lineStorageIndex++] = normalizedYCoordinate;\n\n            linePairsToRender++;\n            \n            linePairsToRender = MIN(linePairsToRender, maxLinePairsToRender);\n            lineStorageIndex = MIN(lineStorageIndex, maxLineStorageIndex);\n        }\n        currentByte +=8;\n    }\n    \n//    NSLog(@\"Line pairs to render: %d out of max: %d\", linePairsToRender, maxLinePairsToRender);\n    \n//    CFAbsoluteTime currentFrameTime = (CFAbsoluteTimeGetCurrent() - startTime);\n//    NSLog(@\"Line generation processing time : %f ms\", 1000.0 * currentFrameTime);\n\n    outputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:[self sizeOfFBO] textureOptions:self.outputTextureOptions onlyTexture:NO];\n    [outputFramebuffer activateFramebuffer];\n\n    if (usingNextFrameForImageCapture)\n    {\n        [outputFramebuffer lock];\n    }\n\n    [GPUImageContext setActiveShaderProgram:filterProgram];\n    [self setUniformsForProgramAtIndex:0];\n\n    glClearColor(0.0, 0.0, 0.0, 1.0);\n    glClear(GL_COLOR_BUFFER_BIT);\n    \n    if (![GPUImageContext deviceSupportsFramebufferReads])\n    {\n        glBlendEquation(GL_FUNC_ADD);\n        glBlendFunc(GL_ONE, GL_ONE);\n        glEnable(GL_BLEND);\n    }\n    else\n    {\n    }\n\n    glLineWidth(1);\n\n\tglVertexAttribPointer(filterPositionAttribute, 2, GL_FLOAT, 0, 0, lineCoordinates);\n    glDrawArrays(GL_LINES, 0, (linePairsToRender * 4));\n    \n    if (![GPUImageContext deviceSupportsFramebufferReads])\n    {\n        glDisable(GL_BLEND);\n    }\n    [firstInputFramebuffer unlock];\n    if (usingNextFrameForImageCapture)\n    {\n        dispatch_semaphore_signal(imageCaptureSemaphore);\n    }\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImagePerlinNoiseFilter.h",
    "content": "#import \"GPUImageFilter.h\"\n\n@interface GPUImagePerlinNoiseFilter : GPUImageFilter \n{\n    GLint scaleUniform, colorStartUniform, colorFinishUniform;\n}\n\n@property (readwrite, nonatomic) GPUVector4 colorStart;\n@property (readwrite, nonatomic) GPUVector4 colorFinish;\n\n@property (readwrite, nonatomic) float scale;\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImagePerlinNoiseFilter.m",
    "content": "#import \"GPUImagePerlinNoiseFilter.h\"\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImagePerlinNoiseFragmentShaderString = SHADER_STRING\n(\n precision highp float;\n varying highp vec2 textureCoordinate;\n uniform float scale;\n \n uniform vec4 colorStart;\n uniform vec4 colorFinish;\n \n //\n // Description : Array and textureless GLSL 2D/3D/4D simplex\n // noise functions.\n // Author : Ian McEwan, Ashima Arts.\n // Maintainer : ijm\n // Lastmod : 20110822 (ijm)\n // License : Copyright (C) 2011 Ashima Arts. All rights reserved.\n // Distributed under the MIT License. See LICENSE file.\n // https://github.com/ashima/webgl-noise\n //\n \n vec4 mod289(vec4 x)\n{\n    return x - floor(x * (1.0 / 289.0)) * 289.0;\n}\n \n vec4 permute(vec4 x)\n{\n    return mod289(((x*34.0)+1.0)*x);\n}\n \n vec4 taylorInvSqrt(vec4 r)\n{\n    return 1.79284291400159 - 0.85373472095314 * r;\n}\n \n vec2 fade(vec2 t) {\n     return t*t*t*(t*(t*6.0-15.0)+10.0);\n }\n \n // Classic Perlin noise\n float cnoise(vec2 P)\n{\n    vec4 Pi = floor(P.xyxy) + vec4(0.0, 0.0, 1.0, 1.0);\n    vec4 Pf = fract(P.xyxy) - vec4(0.0, 0.0, 1.0, 1.0);\n    Pi = mod289(Pi); // To avoid truncation effects in permutation\n    vec4 ix = Pi.xzxz;\n    vec4 iy = Pi.yyww;\n    vec4 fx = Pf.xzxz;\n    vec4 fy = Pf.yyww;\n    \n    vec4 i = permute(permute(ix) + iy);\n    \n    vec4 gx = fract(i * (1.0 / 41.0)) * 2.0 - 1.0 ;\n    vec4 gy = abs(gx) - 0.5 ;\n    vec4 tx = floor(gx + 0.5);\n    gx = gx - tx;\n    \n    vec2 g00 = vec2(gx.x,gy.x);\n    vec2 g10 = vec2(gx.y,gy.y);\n    vec2 g01 = vec2(gx.z,gy.z);\n    vec2 g11 = vec2(gx.w,gy.w);\n    \n    vec4 norm = taylorInvSqrt(vec4(dot(g00, g00), dot(g01, g01), dot(g10, g10), dot(g11, g11)));\n    g00 *= norm.x;  \n    g01 *= norm.y;  \n    g10 *= norm.z;  \n    g11 *= norm.w;  \n    \n    float n00 = dot(g00, vec2(fx.x, fy.x));\n    float n10 = dot(g10, vec2(fx.y, fy.y));\n    float n01 = dot(g01, vec2(fx.z, fy.z));\n    float n11 = dot(g11, vec2(fx.w, fy.w));\n    \n    vec2 fade_xy = fade(Pf.xy);\n    vec2 n_x = mix(vec2(n00, n01), vec2(n10, n11), fade_xy.x);\n    float n_xy = mix(n_x.x, n_x.y, fade_xy.y);\n    return 2.3 * n_xy;\n}\n \n \n void main()\n {\n     \n     float n1 = (cnoise(textureCoordinate * scale) + 1.0) / 2.0;\n     \n     vec4 colorDiff = colorFinish - colorStart;\n     vec4 color = colorStart + colorDiff * n1;\n     \n     gl_FragColor = color;\n }\n);\n#else\nNSString *const kGPUImagePerlinNoiseFragmentShaderString = SHADER_STRING\n(\n varying vec2 textureCoordinate;\n uniform float scale;\n \n uniform vec4 colorStart;\n uniform vec4 colorFinish;\n \n //\n // Description : Array and textureless GLSL 2D/3D/4D simplex\n // noise functions.\n // Author : Ian McEwan, Ashima Arts.\n // Maintainer : ijm\n // Lastmod : 20110822 (ijm)\n // License : Copyright (C) 2011 Ashima Arts. All rights reserved.\n // Distributed under the MIT License. See LICENSE file.\n // https://github.com/ashima/webgl-noise\n //\n \n vec4 mod289(vec4 x)\n{\n    return x - floor(x * (1.0 / 289.0)) * 289.0;\n}\n \n vec4 permute(vec4 x)\n{\n    return mod289(((x*34.0)+1.0)*x);\n}\n \n vec4 taylorInvSqrt(vec4 r)\n{\n    return 1.79284291400159 - 0.85373472095314 * r;\n}\n \n vec2 fade(vec2 t) {\n     return t*t*t*(t*(t*6.0-15.0)+10.0);\n }\n \n // Classic Perlin noise\n float cnoise(vec2 P)\n{\n    vec4 Pi = floor(P.xyxy) + vec4(0.0, 0.0, 1.0, 1.0);\n    vec4 Pf = fract(P.xyxy) - vec4(0.0, 0.0, 1.0, 1.0);\n    Pi = mod289(Pi); // To avoid truncation effects in permutation\n    vec4 ix = Pi.xzxz;\n    vec4 iy = Pi.yyww;\n    vec4 fx = Pf.xzxz;\n    vec4 fy = Pf.yyww;\n    \n    vec4 i = permute(permute(ix) + iy);\n    \n    vec4 gx = fract(i * (1.0 / 41.0)) * 2.0 - 1.0 ;\n    vec4 gy = abs(gx) - 0.5 ;\n    vec4 tx = floor(gx + 0.5);\n    gx = gx - tx;\n    \n    vec2 g00 = vec2(gx.x,gy.x);\n    vec2 g10 = vec2(gx.y,gy.y);\n    vec2 g01 = vec2(gx.z,gy.z);\n    vec2 g11 = vec2(gx.w,gy.w);\n    \n    vec4 norm = taylorInvSqrt(vec4(dot(g00, g00), dot(g01, g01), dot(g10, g10), dot(g11, g11)));\n    g00 *= norm.x;\n    g01 *= norm.y;\n    g10 *= norm.z;\n    g11 *= norm.w;\n    \n    float n00 = dot(g00, vec2(fx.x, fy.x));\n    float n10 = dot(g10, vec2(fx.y, fy.y));\n    float n01 = dot(g01, vec2(fx.z, fy.z));\n    float n11 = dot(g11, vec2(fx.w, fy.w));\n    \n    vec2 fade_xy = fade(Pf.xy);\n    vec2 n_x = mix(vec2(n00, n01), vec2(n10, n11), fade_xy.x);\n    float n_xy = mix(n_x.x, n_x.y, fade_xy.y);\n    return 2.3 * n_xy;\n }\n \n void main()\n {\n     \n     float n1 = (cnoise(textureCoordinate * scale) + 1.0) / 2.0;\n     \n     vec4 colorDiff = colorFinish - colorStart;\n     vec4 color = colorStart + colorDiff * n1;\n     \n     gl_FragColor = color;\n }\n);\n#endif\n\n\n@implementation GPUImagePerlinNoiseFilter\n\n@synthesize scale = _scale, colorStart = _colorStart, colorFinish = _colorFinish;\n\n#pragma mark -\n#pragma mark Initialization and teardown\n\n- (id)init;\n{\n    if (!(self = [super initWithFragmentShaderFromString:kGPUImagePerlinNoiseFragmentShaderString]))\n    {\n\t\treturn nil;\n    }\n    \n    scaleUniform = [filterProgram uniformIndex:@\"scale\"];\n    \n    colorStartUniform = [filterProgram uniformIndex:@\"colorStart\"];\n    colorFinishUniform = [filterProgram uniformIndex:@\"colorFinish\"];\n    \n    [self setScale:8.0];\n    \n    [self setColorStart:(GPUVector4){0.0, 0.0, 0.0, 1.0}];\n    [self setColorFinish:(GPUVector4){1.0, 1.0, 1.0, 1.0}];    \n    \n    return self;\n}\n\n#pragma mark -\n#pragma mark Accessors\n\n- (void)setScale:(float)scale \n{\n    _scale = scale;\n    \n    [self setFloat:_scale forUniform:scaleUniform program:filterProgram];\n}\n\n- (void)setColorStart:(GPUVector4)colorStart \n{\n    _colorStart = colorStart;\n    \n    [self setVec4:_colorStart forUniform:colorStartUniform program:filterProgram];\n}\n\n- (void)setColorFinish:(GPUVector4)colorFinish \n{\n    _colorFinish = colorFinish;\n\n    [self setVec4:_colorFinish forUniform:colorFinishUniform program:filterProgram];\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImagePinchDistortionFilter.h",
    "content": "#import \"GPUImageFilter.h\"\n\n/** Creates a pinch distortion of the image\n */\n@interface GPUImagePinchDistortionFilter : GPUImageFilter\n{\n    GLint aspectRatioUniform, radiusUniform, centerUniform, scaleUniform;\n}\n\n/** The center about which to apply the distortion, with a default of (0.5, 0.5)\n */\n@property(readwrite, nonatomic) CGPoint center;\n/** The radius of the distortion, ranging from 0.0 to 2.0, with a default of 1.0\n */\n@property(readwrite, nonatomic) CGFloat radius;\n/** The amount of distortion to apply, from -2.0 to 2.0, with a default of 0.5\n */\n@property(readwrite, nonatomic) CGFloat scale;\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImagePinchDistortionFilter.m",
    "content": "#import \"GPUImagePinchDistortionFilter.h\"\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImagePinchDistortionFragmentShaderString = SHADER_STRING\n(\n varying highp vec2 textureCoordinate;\n \n uniform sampler2D inputImageTexture;\n \n uniform highp float aspectRatio;\n uniform highp vec2 center;\n uniform highp float radius;\n uniform highp float scale;\n \n void main()\n {\n     highp vec2 textureCoordinateToUse = vec2(textureCoordinate.x, (textureCoordinate.y * aspectRatio + 0.5 - 0.5 * aspectRatio));\n     highp float dist = distance(center, textureCoordinateToUse);\n     textureCoordinateToUse = textureCoordinate;\n     \n     if (dist < radius)\n     {\n         textureCoordinateToUse -= center;\n         highp float percent = 1.0 + ((0.5 - dist) / 0.5) * scale;\n         textureCoordinateToUse = textureCoordinateToUse * percent;\n         textureCoordinateToUse += center;\n         \n         gl_FragColor = texture2D(inputImageTexture, textureCoordinateToUse );\n     }\n     else\n     {\n         gl_FragColor = texture2D(inputImageTexture, textureCoordinate );\n     }\n }\n);\n#else\nNSString *const kGPUImagePinchDistortionFragmentShaderString = SHADER_STRING\n(\n varying vec2 textureCoordinate;\n \n uniform sampler2D inputImageTexture;\n \n uniform float aspectRatio;\n uniform vec2 center;\n uniform float radius;\n uniform float scale;\n \n void main()\n {\n     vec2 textureCoordinateToUse = vec2(textureCoordinate.x, (textureCoordinate.y * aspectRatio + 0.5 - 0.5 * aspectRatio));\n     float dist = distance(center, textureCoordinateToUse);\n     textureCoordinateToUse = textureCoordinate;\n     \n     if (dist < radius)\n     {\n         textureCoordinateToUse -= center;\n         float percent = 1.0 + ((0.5 - dist) / 0.5) * scale;\n         textureCoordinateToUse = textureCoordinateToUse * percent;\n         textureCoordinateToUse += center;\n         \n         gl_FragColor = texture2D(inputImageTexture, textureCoordinateToUse );\n     }\n     else\n     {\n         gl_FragColor = texture2D(inputImageTexture, textureCoordinate );\n     }\n }\n);\n#endif\n\n@interface GPUImagePinchDistortionFilter ()\n\n- (void)adjustAspectRatio;\n\n@property (readwrite, nonatomic) CGFloat aspectRatio;\n\n@end\n\n@implementation GPUImagePinchDistortionFilter\n\n@synthesize aspectRatio = _aspectRatio;\n@synthesize center = _center;\n@synthesize radius = _radius;\n@synthesize scale = _scale;\n\n#pragma mark -\n#pragma mark Initialization and teardown\n\n- (id)init;\n{\n    if (!(self = [super initWithFragmentShaderFromString:kGPUImagePinchDistortionFragmentShaderString]))\n    {\n\t\treturn nil;\n    }\n    \n    aspectRatioUniform = [filterProgram uniformIndex:@\"aspectRatio\"];\n    radiusUniform = [filterProgram uniformIndex:@\"radius\"];\n    scaleUniform = [filterProgram uniformIndex:@\"scale\"];\n    centerUniform = [filterProgram uniformIndex:@\"center\"];\n\n    self.radius = 1.0;\n    self.scale = 0.5;\n    self.center = CGPointMake(0.5, 0.5);\n\n    return self;\n}\n\n#pragma mark -\n#pragma mark Accessors\n\n- (void)adjustAspectRatio;\n{\n    if (GPUImageRotationSwapsWidthAndHeight(inputRotation))\n    {\n        [self setAspectRatio:(inputTextureSize.width / inputTextureSize.height)];\n    }\n    else\n    {\n        [self setAspectRatio:(inputTextureSize.height / inputTextureSize.width)];\n    }\n}\n\n- (void)forceProcessingAtSize:(CGSize)frameSize;\n{\n    [super forceProcessingAtSize:frameSize];\n    [self adjustAspectRatio];\n}\n\n- (void)setInputSize:(CGSize)newSize atIndex:(NSInteger)textureIndex;\n{\n    CGSize oldInputSize = inputTextureSize;\n    [super setInputSize:newSize atIndex:textureIndex];\n    \n    if ( (!CGSizeEqualToSize(oldInputSize, inputTextureSize)) && (!CGSizeEqualToSize(newSize, CGSizeZero)) )\n    {\n        [self adjustAspectRatio];\n    }\n}\n\n- (void)setInputRotation:(GPUImageRotationMode)newInputRotation atIndex:(NSInteger)textureIndex;\n{\n    [super setInputRotation:newInputRotation atIndex:textureIndex];\n    [self setCenter:self.center];\n    [self adjustAspectRatio];\n}\n\n- (void)setAspectRatio:(CGFloat)newValue;\n{\n    _aspectRatio = newValue;\n    \n    [self setFloat:_aspectRatio forUniform:aspectRatioUniform program:filterProgram];\n}\n\n- (void)setRadius:(CGFloat)newValue;\n{\n    _radius = newValue;\n    \n    [self setFloat:_radius forUniform:radiusUniform program:filterProgram];\n}\n\n- (void)setScale:(CGFloat)newValue;\n{\n    _scale = newValue;\n\n    [self setFloat:_scale forUniform:scaleUniform program:filterProgram];\n}\n\n- (void)setCenter:(CGPoint)newValue;\n{\n    _center = newValue;\n    \n    CGPoint rotatedPoint = [self rotatedPoint:_center forRotation:inputRotation];\n    [self setPoint:rotatedPoint forUniform:centerUniform program:filterProgram];\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImagePixellateFilter.h",
    "content": "#import \"GPUImageFilter.h\"\n\n@interface GPUImagePixellateFilter : GPUImageFilter\n{\n    GLint fractionalWidthOfAPixelUniform, aspectRatioUniform;\n}\n\n// The fractional width of the image to use as a size for the pixels in the resulting image. Values below one pixel width in the source image are ignored.\n@property(readwrite, nonatomic) CGFloat fractionalWidthOfAPixel;\n\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImagePixellateFilter.m",
    "content": "#import \"GPUImagePixellateFilter.h\"\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImagePixellationFragmentShaderString = SHADER_STRING\n(\n varying highp vec2 textureCoordinate;\n \n uniform sampler2D inputImageTexture;\n \n uniform highp float fractionalWidthOfPixel;\n uniform highp float aspectRatio;\n\n void main()\n {\n     highp vec2 sampleDivisor = vec2(fractionalWidthOfPixel, fractionalWidthOfPixel / aspectRatio);\n     \n     highp vec2 samplePos = textureCoordinate - mod(textureCoordinate, sampleDivisor) + 0.5 * sampleDivisor;\n     gl_FragColor = texture2D(inputImageTexture, samplePos );\n }\n);\n#else\nNSString *const kGPUImagePixellationFragmentShaderString = SHADER_STRING\n(\n varying vec2 textureCoordinate;\n \n uniform sampler2D inputImageTexture;\n \n uniform float fractionalWidthOfPixel;\n uniform float aspectRatio;\n \n void main()\n {\n     vec2 sampleDivisor = vec2(fractionalWidthOfPixel, fractionalWidthOfPixel / aspectRatio);\n     \n     vec2 samplePos = textureCoordinate - mod(textureCoordinate, sampleDivisor) + 0.5 * sampleDivisor;\n     gl_FragColor = texture2D(inputImageTexture, samplePos );\n }\n);\n#endif\n\n@interface GPUImagePixellateFilter ()\n\n@property (readwrite, nonatomic) CGFloat aspectRatio;\n\n- (void)adjustAspectRatio;\n\n@end\n\n@implementation GPUImagePixellateFilter\n\n@synthesize fractionalWidthOfAPixel = _fractionalWidthOfAPixel;\n@synthesize aspectRatio = _aspectRatio;\n\n#pragma mark -\n#pragma mark Initialization and teardown\n\n- (id)init;\n{\n    if (!(self = [self initWithFragmentShaderFromString:kGPUImagePixellationFragmentShaderString]))\n    {\n\t\treturn nil;\n    }\n    \n    return self;\n}\n\n- (id)initWithFragmentShaderFromString:(NSString *)fragmentShaderString;\n{\n    if (!(self = [super initWithFragmentShaderFromString:fragmentShaderString]))\n    {\n\t\treturn nil;\n    }\n    \n    fractionalWidthOfAPixelUniform = [filterProgram uniformIndex:@\"fractionalWidthOfPixel\"];\n    aspectRatioUniform = [filterProgram uniformIndex:@\"aspectRatio\"];\n\n    self.fractionalWidthOfAPixel = 0.05;\n    \n    return self;\n}\n\n- (void)adjustAspectRatio;\n{\n    if (GPUImageRotationSwapsWidthAndHeight(inputRotation))\n    {\n        [self setAspectRatio:(inputTextureSize.width / inputTextureSize.height)];\n    }\n    else\n    {\n        [self setAspectRatio:(inputTextureSize.height / inputTextureSize.width)];\n    }\n}\n\n- (void)setInputRotation:(GPUImageRotationMode)newInputRotation atIndex:(NSInteger)textureIndex;\n{\n    [super setInputRotation:newInputRotation atIndex:textureIndex];    \n    [self adjustAspectRatio];\n}\n\n- (void)forceProcessingAtSize:(CGSize)frameSize;\n{\n    [super forceProcessingAtSize:frameSize];\n    [self adjustAspectRatio];\n}\n\n- (void)setInputSize:(CGSize)newSize atIndex:(NSInteger)textureIndex;\n{\n    CGSize oldInputSize = inputTextureSize;\n    [super setInputSize:newSize atIndex:textureIndex];\n    \n    if ( (!CGSizeEqualToSize(oldInputSize, inputTextureSize)) && (!CGSizeEqualToSize(newSize, CGSizeZero)) )\n    {\n        [self adjustAspectRatio];\n    }\n}\n\n#pragma mark -\n#pragma mark Accessors\n\n- (void)setFractionalWidthOfAPixel:(CGFloat)newValue;\n{\n    CGFloat singlePixelSpacing;\n    if (inputTextureSize.width != 0.0)\n    {\n        singlePixelSpacing = 1.0 / inputTextureSize.width;\n    }\n    else\n    {\n        singlePixelSpacing = 1.0 / 2048.0;\n    }\n    \n    if (newValue < singlePixelSpacing)\n    {\n        _fractionalWidthOfAPixel = singlePixelSpacing;\n    }\n    else\n    {\n        _fractionalWidthOfAPixel = newValue;\n    }\n    \n    [self setFloat:_fractionalWidthOfAPixel forUniform:fractionalWidthOfAPixelUniform program:filterProgram];\n}\n\n- (void)setAspectRatio:(CGFloat)newValue;\n{\n    _aspectRatio = newValue;\n\n    [self setFloat:_aspectRatio forUniform:aspectRatioUniform program:filterProgram];\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImagePixellatePositionFilter.h",
    "content": "#import \"GPUImageFilter.h\"\n\n@interface GPUImagePixellatePositionFilter : GPUImageFilter\n{\n    GLint fractionalWidthOfAPixelUniform, aspectRatioUniform, centerUniform, radiusUniform;\n}\n\n// The fractional width of the image to use as a size for the pixels in the resulting image. Values below one pixel width in the source image are ignored.\n@property(readwrite, nonatomic) CGFloat fractionalWidthOfAPixel;\n\n// the center point to start pixelation in texture coordinates, default 0.5, 0.5\n@property(readwrite, nonatomic) CGPoint center;\n\n// the radius (0.0 - 1.0) in which to pixelate, default 1.0\n@property(readwrite, nonatomic) CGFloat radius;\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImagePixellatePositionFilter.m",
    "content": "#import \"GPUImagePixellatePositionFilter.h\"\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImagePixellationPositionFragmentShaderString = SHADER_STRING\n(\n varying highp vec2 textureCoordinate;\n \n uniform sampler2D inputImageTexture;\n \n uniform highp float fractionalWidthOfPixel;\n uniform highp float aspectRatio;\n uniform lowp vec2 pixelateCenter;\n uniform highp float pixelateRadius;\n \n void main()\n {\n     highp vec2 textureCoordinateToUse = vec2(textureCoordinate.x, (textureCoordinate.y * aspectRatio + 0.5 - 0.5 * aspectRatio));\n     highp float dist = distance(pixelateCenter, textureCoordinateToUse);\n\n     if (dist < pixelateRadius)\n     {\n         highp vec2 sampleDivisor = vec2(fractionalWidthOfPixel, fractionalWidthOfPixel / aspectRatio);\n         highp vec2 samplePos = textureCoordinate - mod(textureCoordinate, sampleDivisor) + 0.5 * sampleDivisor;\n         gl_FragColor = texture2D(inputImageTexture, samplePos );\n     }\n     else\n     {\n         gl_FragColor = texture2D(inputImageTexture, textureCoordinate );\n     }\n }\n);\n#else\nNSString *const kGPUImagePixellationPositionFragmentShaderString = SHADER_STRING\n(\n varying vec2 textureCoordinate;\n \n uniform sampler2D inputImageTexture;\n \n uniform float fractionalWidthOfPixel;\n uniform float aspectRatio;\n uniform vec2 pixelateCenter;\n uniform float pixelateRadius;\n \n void main()\n {\n     vec2 textureCoordinateToUse = vec2(textureCoordinate.x, (textureCoordinate.y * aspectRatio + 0.5 - 0.5 * aspectRatio));\n     float dist = distance(pixelateCenter, textureCoordinateToUse);\n     \n     if (dist < pixelateRadius)\n     {\n         vec2 sampleDivisor = vec2(fractionalWidthOfPixel, fractionalWidthOfPixel / aspectRatio);\n         vec2 samplePos = textureCoordinate - mod(textureCoordinate, sampleDivisor) + 0.5 * sampleDivisor;\n         gl_FragColor = texture2D(inputImageTexture, samplePos );\n     }\n     else\n     {\n         gl_FragColor = texture2D(inputImageTexture, textureCoordinate );\n     }\n }\n);\n#endif\n\n@interface GPUImagePixellatePositionFilter ()\n\n- (void)adjustAspectRatio;\n\n@property (readwrite, nonatomic) CGFloat aspectRatio;\n\n@end\n\n@implementation GPUImagePixellatePositionFilter\n\n@synthesize fractionalWidthOfAPixel = _fractionalWidthOfAPixel;\n@synthesize aspectRatio = _aspectRatio;\n@synthesize center = _center;\n@synthesize radius = _radius;\n\n#pragma mark -\n#pragma mark Initialization and teardown\n\n- (id)init;\n{\n    if (!(self = [self initWithFragmentShaderFromString:kGPUImagePixellationPositionFragmentShaderString]))\n    {\n\t\treturn nil;\n    }\n    \n    return self;\n}\n\n- (id)initWithFragmentShaderFromString:(NSString *)fragmentShaderString;\n{\n    if (!(self = [super initWithFragmentShaderFromString:fragmentShaderString]))\n    {\n\t\treturn nil;\n    }\n    \n    fractionalWidthOfAPixelUniform = [filterProgram uniformIndex:@\"fractionalWidthOfPixel\"];\n    aspectRatioUniform = [filterProgram uniformIndex:@\"aspectRatio\"];\n    centerUniform = [filterProgram uniformIndex:@\"pixelateCenter\"];\n    radiusUniform = [filterProgram uniformIndex:@\"pixelateRadius\"];\n    \n    self.fractionalWidthOfAPixel = 0.05;\n    self.center = CGPointMake(0.5f, 0.5f);\n    self.radius = 0.25f;\n    \n    return self;\n}\n\n- (void)setInputSize:(CGSize)newSize atIndex:(NSInteger)textureIndex;\n{\n    CGSize oldInputSize = inputTextureSize;\n    [super setInputSize:newSize atIndex:textureIndex];\n    \n    if ( (!CGSizeEqualToSize(oldInputSize, inputTextureSize)) && (!CGSizeEqualToSize(newSize, CGSizeZero)) )\n    {\n        [self adjustAspectRatio];\n    }\n}\n\n#pragma mark -\n#pragma mark Accessors\n\n- (void)setInputRotation:(GPUImageRotationMode)newInputRotation atIndex:(NSInteger)textureIndex;\n{\n    [super setInputRotation:newInputRotation atIndex:textureIndex];\n    [self setCenter:self.center];\n    [self adjustAspectRatio];\n}\n\n- (void)adjustAspectRatio;\n{\n    if (GPUImageRotationSwapsWidthAndHeight(inputRotation))\n    {\n        [self setAspectRatio:(inputTextureSize.width / inputTextureSize.height)];\n    }\n    else\n    {\n        [self setAspectRatio:(inputTextureSize.height / inputTextureSize.width)];\n    }\n}\n\n- (void)forceProcessingAtSize:(CGSize)frameSize;\n{\n    [super forceProcessingAtSize:frameSize];\n    [self adjustAspectRatio];\n}\n\n- (void)setFractionalWidthOfAPixel:(CGFloat)newValue;\n{\n    CGFloat singlePixelSpacing;\n    if (inputTextureSize.width != 0.0)\n    {\n        singlePixelSpacing = 1.0 / inputTextureSize.width;\n    }\n    else\n    {\n        singlePixelSpacing = 1.0 / 2048.0;\n    }\n    \n    if (newValue < singlePixelSpacing)\n    {\n        _fractionalWidthOfAPixel = singlePixelSpacing;\n    }\n    else\n    {\n        _fractionalWidthOfAPixel = newValue;\n    }\n    \n    [self setFloat:_fractionalWidthOfAPixel forUniform:fractionalWidthOfAPixelUniform program:filterProgram];\n}\n\n- (void)setAspectRatio:(CGFloat)newValue;\n{\n    _aspectRatio = newValue;\n\n    [self setFloat:_aspectRatio forUniform:aspectRatioUniform program:filterProgram];\n}\n\n- (void)setCenter:(CGPoint)center\n{\n    _center = center;\n    CGPoint rotatedPoint = [self rotatedPoint:center forRotation:inputRotation];    \n    [self setPoint:rotatedPoint forUniform:centerUniform program:filterProgram];\n}\n\n- (void)setRadius:(CGFloat)radius\n{\n    _radius = radius;\n    \n    [self setFloat:_radius forUniform:radiusUniform program:filterProgram];\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImagePoissonBlendFilter.h",
    "content": "#import \"GPUImageTwoInputCrossTextureSamplingFilter.h\"\n#import \"GPUImageFilterGroup.h\"\n\n@interface GPUImagePoissonBlendFilter : GPUImageTwoInputCrossTextureSamplingFilter\n{\n    GLint mixUniform;\n    \n    GPUImageFramebuffer *secondOutputFramebuffer;\n}\n\n// Mix ranges from 0.0 (only image 1) to 1.0 (only image 2 gradients), with 1.0 as the normal level\n@property(readwrite, nonatomic) CGFloat mix;\n\n// The number of times to propagate the gradients.\n// Crank this up to 100 or even 1000 if you want to get anywhere near convergence.  Yes, this will be slow.\n@property(readwrite, nonatomic) NSUInteger numIterations;\n\n@end"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImagePoissonBlendFilter.m",
    "content": "#import \"GPUImagePoissonBlendFilter.h\"\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImagePoissonBlendFragmentShaderString = SHADER_STRING\n(\n precision mediump float;\n \n varying vec2 textureCoordinate;\n varying vec2 leftTextureCoordinate;\n varying vec2 rightTextureCoordinate;\n varying vec2 topTextureCoordinate;\n varying vec2 bottomTextureCoordinate;\n \n varying vec2 textureCoordinate2;\n varying vec2 leftTextureCoordinate2;\n varying vec2 rightTextureCoordinate2;\n varying vec2 topTextureCoordinate2;\n varying vec2 bottomTextureCoordinate2;\n \n uniform sampler2D inputImageTexture;\n uniform sampler2D inputImageTexture2;\n \n uniform lowp float mixturePercent;\n\n void main()\n {\n     vec4 centerColor = texture2D(inputImageTexture, textureCoordinate);\n     vec3 bottomColor = texture2D(inputImageTexture, bottomTextureCoordinate).rgb;\n     vec3 leftColor = texture2D(inputImageTexture, leftTextureCoordinate).rgb;\n     vec3 rightColor = texture2D(inputImageTexture, rightTextureCoordinate).rgb;\n     vec3 topColor = texture2D(inputImageTexture, topTextureCoordinate).rgb;\n\n     vec4 centerColor2 = texture2D(inputImageTexture2, textureCoordinate2);\n     vec3 bottomColor2 = texture2D(inputImageTexture2, bottomTextureCoordinate2).rgb;\n     vec3 leftColor2 = texture2D(inputImageTexture2, leftTextureCoordinate2).rgb;\n     vec3 rightColor2 = texture2D(inputImageTexture2, rightTextureCoordinate2).rgb;\n     vec3 topColor2 = texture2D(inputImageTexture2, topTextureCoordinate2).rgb;\n\n     vec3 meanColor = (bottomColor + leftColor + rightColor + topColor) / 4.0;\n     vec3 diffColor = centerColor.rgb - meanColor;\n\n     vec3 meanColor2 = (bottomColor2 + leftColor2 + rightColor2 + topColor2) / 4.0;\n     vec3 diffColor2 = centerColor2.rgb - meanColor2;\n     \n     vec3 gradColor = (meanColor + diffColor2);\n     \n\t gl_FragColor = vec4(mix(centerColor.rgb, gradColor, centerColor2.a * mixturePercent), centerColor.a);\n }\n);\n#else\nNSString *const kGPUImagePoissonBlendFragmentShaderString = SHADER_STRING\n(\n varying vec2 textureCoordinate;\n varying vec2 leftTextureCoordinate;\n varying vec2 rightTextureCoordinate;\n varying vec2 topTextureCoordinate;\n varying vec2 bottomTextureCoordinate;\n \n varying vec2 textureCoordinate2;\n varying vec2 leftTextureCoordinate2;\n varying vec2 rightTextureCoordinate2;\n varying vec2 topTextureCoordinate2;\n varying vec2 bottomTextureCoordinate2;\n \n uniform sampler2D inputImageTexture;\n uniform sampler2D inputImageTexture2;\n \n uniform float mixturePercent;\n \n void main()\n {\n     vec4 centerColor = texture2D(inputImageTexture, textureCoordinate);\n     vec3 bottomColor = texture2D(inputImageTexture, bottomTextureCoordinate).rgb;\n     vec3 leftColor = texture2D(inputImageTexture, leftTextureCoordinate).rgb;\n     vec3 rightColor = texture2D(inputImageTexture, rightTextureCoordinate).rgb;\n     vec3 topColor = texture2D(inputImageTexture, topTextureCoordinate).rgb;\n     \n     vec4 centerColor2 = texture2D(inputImageTexture2, textureCoordinate2);\n     vec3 bottomColor2 = texture2D(inputImageTexture2, bottomTextureCoordinate2).rgb;\n     vec3 leftColor2 = texture2D(inputImageTexture2, leftTextureCoordinate2).rgb;\n     vec3 rightColor2 = texture2D(inputImageTexture2, rightTextureCoordinate2).rgb;\n     vec3 topColor2 = texture2D(inputImageTexture2, topTextureCoordinate2).rgb;\n     \n     vec3 meanColor = (bottomColor + leftColor + rightColor + topColor) / 4.0;\n     vec3 diffColor = centerColor.rgb - meanColor;\n     \n     vec3 meanColor2 = (bottomColor2 + leftColor2 + rightColor2 + topColor2) / 4.0;\n     vec3 diffColor2 = centerColor2.rgb - meanColor2;\n     \n     vec3 gradColor = (meanColor + diffColor2);\n     \n\t gl_FragColor = vec4(mix(centerColor.rgb, gradColor, centerColor2.a * mixturePercent), centerColor.a);\n }\n);\n#endif\n\n@implementation GPUImagePoissonBlendFilter\n\n@synthesize mix = _mix;\n@synthesize numIterations = _numIterations;\n\n- (id)init;\n{\n    if (!(self = [super initWithFragmentShaderFromString:kGPUImagePoissonBlendFragmentShaderString]))\n    {\n\t\treturn nil;\n    }\n    \n    mixUniform = [filterProgram uniformIndex:@\"mixturePercent\"];\n    self.mix = 0.5;\n    \n    self.numIterations = 10;\n    \n    return self;\n}\n\n- (void)setMix:(CGFloat)newValue;\n{\n    _mix = newValue;\n    \n    [self setFloat:_mix forUniform:mixUniform program:filterProgram];\n}\n\n//- (void)setOutputFBO;\n//{\n//    if (self.numIterations % 2 == 1) {\n//        [self setSecondFilterFBO];\n//    } else {\n//        [self setFilterFBO];\n//    }\n//}\n\n- (void)renderToTextureWithVertices:(const GLfloat *)vertices textureCoordinates:(const GLfloat *)textureCoordinates;\n{\n    // Run the first stage of the two-pass filter\n    [GPUImageContext setActiveShaderProgram:filterProgram];\n    \n    [super renderToTextureWithVertices:vertices textureCoordinates:textureCoordinates];\n    \n    for (int pass = 1; pass < self.numIterations; pass++) {\n        \n        if (pass % 2 == 0) {\n            \n            [GPUImageContext setActiveShaderProgram:filterProgram];\n            \n            // TODO: This will over-unlock the incoming framebuffer\n            [super renderToTextureWithVertices:vertices textureCoordinates:[[self class] textureCoordinatesForRotation:kGPUImageNoRotation]];\n        } else {\n            // Run the second stage of the two-pass filter\n            secondOutputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:[self sizeOfFBO] textureOptions:self.outputTextureOptions onlyTexture:NO];\n            [secondOutputFramebuffer activateFramebuffer];\n            \n            [GPUImageContext setActiveShaderProgram:filterProgram];\n            \n            glClearColor(backgroundColorRed, backgroundColorGreen, backgroundColorBlue, backgroundColorAlpha);\n            glClear(GL_COLOR_BUFFER_BIT);\n            \n            glActiveTexture(GL_TEXTURE2);\n            glBindTexture(GL_TEXTURE_2D, [outputFramebuffer texture]);\n            glUniform1i(filterInputTextureUniform, 2);\n            \n            glActiveTexture(GL_TEXTURE3);\n            glBindTexture(GL_TEXTURE_2D, [secondInputFramebuffer texture]);\n            glUniform1i(filterInputTextureUniform2, 3);\n            \n            glVertexAttribPointer(filterPositionAttribute, 2, GL_FLOAT, 0, 0, vertices);\n            glVertexAttribPointer(filterTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, [[self class] textureCoordinatesForRotation:kGPUImageNoRotation]);\n            glVertexAttribPointer(filterSecondTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, [[self class] textureCoordinatesForRotation:inputRotation2]);\n            \n            glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);            \n        }\n    }\n}\n\n@end"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImagePolarPixellateFilter.h",
    "content": "#import \"GPUImageFilter.h\"\n\n@interface GPUImagePolarPixellateFilter : GPUImageFilter {\n    GLint centerUniform, pixelSizeUniform;\n}\n\n// The center about which to apply the distortion, with a default of (0.5, 0.5)\n@property(readwrite, nonatomic) CGPoint center;\n// The amount of distortion to apply, from (-2.0, -2.0) to (2.0, 2.0), with a default of (0.05, 0.05)\n@property(readwrite, nonatomic) CGSize pixelSize;\n\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImagePolarPixellateFilter.m",
    "content": "#import \"GPUImagePolarPixellateFilter.h\"\n\n// @fattjake based on vid by toneburst\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImagePolarPixellateFragmentShaderString = SHADER_STRING\n(\n varying highp vec2 textureCoordinate;\n \n uniform sampler2D inputImageTexture;\n \n uniform highp vec2 center;\n uniform highp vec2 pixelSize;\n\n \n void main()\n {\n     highp vec2 normCoord = 2.0 * textureCoordinate - 1.0;\n     highp vec2 normCenter = 2.0 * center - 1.0;\n     \n     normCoord -= normCenter;\n     \n     highp float r = length(normCoord); // to polar coords \n     highp float phi = atan(normCoord.y, normCoord.x); // to polar coords \n     \n     r = r - mod(r, pixelSize.x) + 0.03;\n     phi = phi - mod(phi, pixelSize.y);\n           \n     normCoord.x = r * cos(phi);\n     normCoord.y = r * sin(phi);\n      \n     normCoord += normCenter;\n     \n     mediump vec2 textureCoordinateToUse = normCoord / 2.0 + 0.5;\n     \n     gl_FragColor = texture2D(inputImageTexture, textureCoordinateToUse );\n     \n }\n);\n#else\nNSString *const kGPUImagePolarPixellateFragmentShaderString = SHADER_STRING\n(\n varying vec2 textureCoordinate;\n \n uniform sampler2D inputImageTexture;\n \n uniform vec2 center;\n uniform vec2 pixelSize;\n \n \n void main()\n {\n     vec2 normCoord = 2.0 * textureCoordinate - 1.0;\n     vec2 normCenter = 2.0 * center - 1.0;\n     \n     normCoord -= normCenter;\n     \n     float r = length(normCoord); // to polar coords\n     float phi = atan(normCoord.y, normCoord.x); // to polar coords\n     \n     r = r - mod(r, pixelSize.x) + 0.03;\n     phi = phi - mod(phi, pixelSize.y);\n     \n     normCoord.x = r * cos(phi);\n     normCoord.y = r * sin(phi);\n     \n     normCoord += normCenter;\n     \n     vec2 textureCoordinateToUse = normCoord / 2.0 + 0.5;\n     \n     gl_FragColor = texture2D(inputImageTexture, textureCoordinateToUse );\n     \n }\n);\n#endif\n\n\n@implementation GPUImagePolarPixellateFilter\n\n@synthesize center = _center;\n\n@synthesize pixelSize = _pixelSize;\n\n#pragma mark -\n#pragma mark Initialization and teardown\n\n- (id)init;\n{\n    if (!(self = [super initWithFragmentShaderFromString:kGPUImagePolarPixellateFragmentShaderString]))\n    {\n\t\treturn nil;\n    }\n    \n    pixelSizeUniform = [filterProgram uniformIndex:@\"pixelSize\"];\n    centerUniform = [filterProgram uniformIndex:@\"center\"];\n    \n    \n    self.pixelSize = CGSizeMake(0.05, 0.05);\n    self.center = CGPointMake(0.5, 0.5);\n    \n    return self;\n}\n\n#pragma mark -\n#pragma mark Accessors\n\n- (void)setInputRotation:(GPUImageRotationMode)newInputRotation atIndex:(NSInteger)textureIndex;\n{\n    [super setInputRotation:newInputRotation atIndex:textureIndex];\n    [self setCenter:self.center];\n}\n\n- (void)setPixelSize:(CGSize)pixelSize \n{\n    _pixelSize = pixelSize;\n    \n    [self setSize:_pixelSize forUniform:pixelSizeUniform program:filterProgram];\n}\n\n- (void)setCenter:(CGPoint)newValue;\n{\n    _center = newValue;\n    \n    CGPoint rotatedPoint = [self rotatedPoint:_center forRotation:inputRotation];\n    [self setPoint:rotatedPoint forUniform:centerUniform program:filterProgram];\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImagePolkaDotFilter.h",
    "content": "#import \"GPUImagePixellateFilter.h\"\n\n@interface GPUImagePolkaDotFilter : GPUImagePixellateFilter\n{\n    GLint dotScalingUniform;\n}\n\n@property(readwrite, nonatomic) CGFloat dotScaling;\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImagePolkaDotFilter.m",
    "content": "#import \"GPUImagePolkaDotFilter.h\"\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImagePolkaDotFragmentShaderString = SHADER_STRING\n(\n varying highp vec2 textureCoordinate;\n \n uniform sampler2D inputImageTexture;\n \n uniform highp float fractionalWidthOfPixel;\n uniform highp float aspectRatio;\n uniform highp float dotScaling;\n \n void main()\n {\n     highp vec2 sampleDivisor = vec2(fractionalWidthOfPixel, fractionalWidthOfPixel / aspectRatio);\n     \n     highp vec2 samplePos = textureCoordinate - mod(textureCoordinate, sampleDivisor) + 0.5 * sampleDivisor;\n     highp vec2 textureCoordinateToUse = vec2(textureCoordinate.x, (textureCoordinate.y * aspectRatio + 0.5 - 0.5 * aspectRatio));\n     highp vec2 adjustedSamplePos = vec2(samplePos.x, (samplePos.y * aspectRatio + 0.5 - 0.5 * aspectRatio));\n     highp float distanceFromSamplePoint = distance(adjustedSamplePos, textureCoordinateToUse);\n     lowp float checkForPresenceWithinDot = step(distanceFromSamplePoint, (fractionalWidthOfPixel * 0.5) * dotScaling);\n\n     lowp vec4 inputColor = texture2D(inputImageTexture, samplePos);\n     \n     gl_FragColor = vec4(inputColor.rgb * checkForPresenceWithinDot, inputColor.a);\n }\n);\n#else\nNSString *const kGPUImagePolkaDotFragmentShaderString = SHADER_STRING\n(\n varying vec2 textureCoordinate;\n \n uniform sampler2D inputImageTexture;\n \n uniform float fractionalWidthOfPixel;\n uniform float aspectRatio;\n uniform float dotScaling;\n \n void main()\n {\n     vec2 sampleDivisor = vec2(fractionalWidthOfPixel, fractionalWidthOfPixel / aspectRatio);\n     \n     vec2 samplePos = textureCoordinate - mod(textureCoordinate, sampleDivisor) + 0.5 * sampleDivisor;\n     vec2 textureCoordinateToUse = vec2(textureCoordinate.x, (textureCoordinate.y * aspectRatio + 0.5 - 0.5 * aspectRatio));\n     vec2 adjustedSamplePos = vec2(samplePos.x, (samplePos.y * aspectRatio + 0.5 - 0.5 * aspectRatio));\n     float distanceFromSamplePoint = distance(adjustedSamplePos, textureCoordinateToUse);\n     float checkForPresenceWithinDot = step(distanceFromSamplePoint, (fractionalWidthOfPixel * 0.5) * dotScaling);\n     \n     vec4 inputColor = texture2D(inputImageTexture, samplePos);\n\n     gl_FragColor = vec4(inputColor.rgb * checkForPresenceWithinDot, inputColor.a);\n }\n);\n#endif\n\n@implementation GPUImagePolkaDotFilter\n\n@synthesize dotScaling = _dotScaling;\n\n- (id)init;\n{\n    if (!(self = [super initWithFragmentShaderFromString:kGPUImagePolkaDotFragmentShaderString]))\n    {\n\t\treturn nil;\n    }\n    \n    dotScalingUniform = [filterProgram uniformIndex:@\"dotScaling\"];\n    \n    self.dotScaling = 0.90;\n\n    return self;\n}\n\n#pragma mark -\n#pragma mark Accessors\n\n- (void)setDotScaling:(CGFloat)newValue;\n{\n    _dotScaling = newValue;\n    \n    [self setFloat:_dotScaling forUniform:dotScalingUniform program:filterProgram];\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImagePosterizeFilter.h",
    "content": "#import \"GPUImageFilter.h\"\n\n/** This reduces the color dynamic range into the number of steps specified, leading to a cartoon-like simple shading of the image.\n */\n@interface GPUImagePosterizeFilter : GPUImageFilter\n{\n    GLint colorLevelsUniform;\n}\n\n/** The number of color levels to reduce the image space to. This ranges from 1 to 256, with a default of 10.\n */\n@property(readwrite, nonatomic) NSUInteger colorLevels; \n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImagePosterizeFilter.m",
    "content": "#import \"GPUImagePosterizeFilter.h\"\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImagePosterizeFragmentShaderString = SHADER_STRING\n( \n varying highp vec2 textureCoordinate;\n \n uniform sampler2D inputImageTexture;\n uniform highp float colorLevels;\n \n void main()\n {\n     highp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);\n     \n     gl_FragColor = floor((textureColor * colorLevels) + vec4(0.5)) / colorLevels;\n }\n);\n#else\nNSString *const kGPUImagePosterizeFragmentShaderString = SHADER_STRING\n(\n varying vec2 textureCoordinate;\n \n uniform sampler2D inputImageTexture;\n uniform float colorLevels;\n \n void main()\n {\n     vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);\n     \n     gl_FragColor = floor((textureColor * colorLevels) + vec4(0.5)) / colorLevels;\n }\n);\n#endif\n\n@implementation GPUImagePosterizeFilter\n\n@synthesize colorLevels = _colorLevels;\n\n#pragma mark -\n#pragma mark Initialization\n\n- (id)init;\n{\n    if (!(self = [super initWithFragmentShaderFromString:kGPUImagePosterizeFragmentShaderString]))\n    {\n\t\treturn nil;\n    }\n    \n    colorLevelsUniform = [filterProgram uniformIndex:@\"colorLevels\"];\n    self.colorLevels = 10;\n    \n    return self;\n}\n\n#pragma mark -\n#pragma mark Accessors\n\n- (void)setColorLevels:(NSUInteger)newValue;\n{\n    _colorLevels = newValue;\n    \n    [self setFloat:_colorLevels forUniform:colorLevelsUniform program:filterProgram];\n}\n\n@end\n\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImagePrewittEdgeDetectionFilter.h",
    "content": "#import \"GPUImageSobelEdgeDetectionFilter.h\"\n\n@interface GPUImagePrewittEdgeDetectionFilter : GPUImageSobelEdgeDetectionFilter\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImagePrewittEdgeDetectionFilter.m",
    "content": "#import \"GPUImagePrewittEdgeDetectionFilter.h\"\n\n@implementation GPUImagePrewittEdgeDetectionFilter\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImagePrewittFragmentShaderString = SHADER_STRING\n(\n precision highp float;\n \n varying vec2 textureCoordinate;\n varying vec2 leftTextureCoordinate;\n varying vec2 rightTextureCoordinate;\n \n varying vec2 topTextureCoordinate;\n varying vec2 topLeftTextureCoordinate;\n varying vec2 topRightTextureCoordinate;\n \n varying vec2 bottomTextureCoordinate;\n varying vec2 bottomLeftTextureCoordinate;\n varying vec2 bottomRightTextureCoordinate;\n \n uniform sampler2D inputImageTexture;\n uniform float edgeStrength;\n\n void main()\n {\n     float bottomLeftIntensity = texture2D(inputImageTexture, bottomLeftTextureCoordinate).r;\n     float topRightIntensity = texture2D(inputImageTexture, topRightTextureCoordinate).r;\n     float topLeftIntensity = texture2D(inputImageTexture, topLeftTextureCoordinate).r;\n     float bottomRightIntensity = texture2D(inputImageTexture, bottomRightTextureCoordinate).r;\n     float leftIntensity = texture2D(inputImageTexture, leftTextureCoordinate).r;\n     float rightIntensity = texture2D(inputImageTexture, rightTextureCoordinate).r;\n     float bottomIntensity = texture2D(inputImageTexture, bottomTextureCoordinate).r;\n     float topIntensity = texture2D(inputImageTexture, topTextureCoordinate).r;\n     float h = -topLeftIntensity - topIntensity - topRightIntensity + bottomLeftIntensity + bottomIntensity + bottomRightIntensity;\n     float v = -bottomLeftIntensity - leftIntensity - topLeftIntensity + bottomRightIntensity + rightIntensity + topRightIntensity;\n     \n     float mag = length(vec2(h, v)) * edgeStrength;\n     \n     gl_FragColor = vec4(vec3(mag), 1.0);\n }\n);\n#else\nNSString *const kGPUImagePrewittFragmentShaderString = SHADER_STRING\n(\n varying vec2 textureCoordinate;\n varying vec2 leftTextureCoordinate;\n varying vec2 rightTextureCoordinate;\n \n varying vec2 topTextureCoordinate;\n varying vec2 topLeftTextureCoordinate;\n varying vec2 topRightTextureCoordinate;\n \n varying vec2 bottomTextureCoordinate;\n varying vec2 bottomLeftTextureCoordinate;\n varying vec2 bottomRightTextureCoordinate;\n \n uniform sampler2D inputImageTexture;\n uniform float edgeStrength;\n\n void main()\n {\n     float bottomLeftIntensity = texture2D(inputImageTexture, bottomLeftTextureCoordinate).r;\n     float topRightIntensity = texture2D(inputImageTexture, topRightTextureCoordinate).r;\n     float topLeftIntensity = texture2D(inputImageTexture, topLeftTextureCoordinate).r;\n     float bottomRightIntensity = texture2D(inputImageTexture, bottomRightTextureCoordinate).r;\n     float leftIntensity = texture2D(inputImageTexture, leftTextureCoordinate).r;\n     float rightIntensity = texture2D(inputImageTexture, rightTextureCoordinate).r;\n     float bottomIntensity = texture2D(inputImageTexture, bottomTextureCoordinate).r;\n     float topIntensity = texture2D(inputImageTexture, topTextureCoordinate).r;\n     float h = -topLeftIntensity - topIntensity - topRightIntensity + bottomLeftIntensity + bottomIntensity + bottomRightIntensity;\n     float v = -bottomLeftIntensity - leftIntensity - topLeftIntensity + bottomRightIntensity + rightIntensity + topRightIntensity;\n     \n     float mag = length(vec2(h, v)) * edgeStrength;\n     \n     gl_FragColor = vec4(vec3(mag), 1.0);\n }\n);\n#endif\n\n#pragma mark -\n#pragma mark Initialization and teardown\n\n- (id)init;\n{\n    if (!(self = [self initWithFragmentShaderFromString:kGPUImagePrewittFragmentShaderString]))\n    {\n\t\treturn nil;\n    }\n\n    self.edgeStrength = 1.0;\n    \n    return self;\n}\n\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageRGBClosingFilter.h",
    "content": "#import \"GPUImageFilterGroup.h\"\n\n@class GPUImageRGBErosionFilter;\n@class GPUImageRGBDilationFilter;\n\n// A filter that first performs a dilation on each color channel of an image, followed by an erosion of the same radius. \n// This helps to filter out smaller dark elements.\n\n@interface GPUImageRGBClosingFilter : GPUImageFilterGroup\n{\n    GPUImageRGBErosionFilter *erosionFilter;\n    GPUImageRGBDilationFilter *dilationFilter;\n}\n\n- (id)initWithRadius:(NSUInteger)radius;\n\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageRGBClosingFilter.m",
    "content": "#import \"GPUImageRGBClosingFilter.h\"\n#import \"GPUImageRGBErosionFilter.h\"\n#import \"GPUImageRGBDilationFilter.h\"\n\n@implementation GPUImageRGBClosingFilter\n\n- (id)init;\n{\n    if (!(self = [self initWithRadius:1]))\n    {\n\t\treturn nil;\n    }\n    \n    return self;\n}\n\n- (id)initWithRadius:(NSUInteger)radius;\n{\n    if (!(self = [super init]))\n    {\n\t\treturn nil;\n    }\n    \n    // First pass: dilation\n    dilationFilter = [[GPUImageRGBDilationFilter alloc] initWithRadius:radius];\n    [self addFilter:dilationFilter];\n    \n    // Second pass: erosion\n    erosionFilter = [[GPUImageRGBErosionFilter alloc] initWithRadius:radius];\n    [self addFilter:erosionFilter];\n    \n    [dilationFilter addTarget:erosionFilter];\n    \n    self.initialFilters = [NSArray arrayWithObjects:dilationFilter, nil];\n    self.terminalFilter = erosionFilter;\n    \n    return self;\n}\n\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageRGBDilationFilter.h",
    "content": "#import \"GPUImageTwoPassTextureSamplingFilter.h\"\n\n// For each pixel, this sets it to the maximum value of each color channel in a rectangular neighborhood extending out dilationRadius pixels from the center.\n// This extends out brighter colors, and can be used for abstraction of color images.\n\n@interface GPUImageRGBDilationFilter : GPUImageTwoPassTextureSamplingFilter\n\n// Acceptable values for dilationRadius, which sets the distance in pixels to sample out from the center, are 1, 2, 3, and 4.\n- (id)initWithRadius:(NSUInteger)dilationRadius;\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageRGBDilationFilter.m",
    "content": "#import \"GPUImageRGBDilationFilter.h\"\n#import \"GPUImageDilationFilter.h\"\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageRGBDilationRadiusOneFragmentShaderString = SHADER_STRING\n(\n precision highp float;\n \n varying vec2 centerTextureCoordinate;\n varying vec2 oneStepPositiveTextureCoordinate;\n varying vec2 oneStepNegativeTextureCoordinate;\n \n uniform sampler2D inputImageTexture;\n \n void main()\n {\n     lowp vec4 centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate);\n     lowp vec4 oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate);\n     lowp vec4 oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate);\n     \n     lowp vec4 maxValue = max(centerIntensity, oneStepPositiveIntensity);\n     \n     gl_FragColor = max(maxValue, oneStepNegativeIntensity);\n }\n);\n\nNSString *const kGPUImageRGBDilationRadiusTwoFragmentShaderString = SHADER_STRING\n(\n precision highp float;\n \n varying vec2 centerTextureCoordinate;\n varying vec2 oneStepPositiveTextureCoordinate;\n varying vec2 oneStepNegativeTextureCoordinate;\n varying vec2 twoStepsPositiveTextureCoordinate;\n varying vec2 twoStepsNegativeTextureCoordinate;\n \n uniform sampler2D inputImageTexture;\n \n void main()\n {\n     lowp vec4 centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate);\n     lowp vec4 oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate);\n     lowp vec4 oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate);\n     lowp vec4 twoStepsPositiveIntensity = texture2D(inputImageTexture, twoStepsPositiveTextureCoordinate);\n     lowp vec4 twoStepsNegativeIntensity = texture2D(inputImageTexture, twoStepsNegativeTextureCoordinate);\n     \n     lowp vec4 maxValue = max(centerIntensity, oneStepPositiveIntensity);\n     maxValue = max(maxValue, oneStepNegativeIntensity);\n     maxValue = max(maxValue, twoStepsPositiveIntensity);\n     maxValue = max(maxValue, twoStepsNegativeIntensity);\n     \n     gl_FragColor = max(maxValue, twoStepsNegativeIntensity);\n }\n);\n\nNSString *const kGPUImageRGBDilationRadiusThreeFragmentShaderString = SHADER_STRING\n(\n precision highp float;\n \n varying vec2 centerTextureCoordinate;\n varying vec2 oneStepPositiveTextureCoordinate;\n varying vec2 oneStepNegativeTextureCoordinate;\n varying vec2 twoStepsPositiveTextureCoordinate;\n varying vec2 twoStepsNegativeTextureCoordinate;\n varying vec2 threeStepsPositiveTextureCoordinate;\n varying vec2 threeStepsNegativeTextureCoordinate;\n \n uniform sampler2D inputImageTexture;\n \n void main()\n {\n     lowp vec4 centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate);\n     lowp vec4 oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate);\n     lowp vec4 oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate);\n     lowp vec4 twoStepsPositiveIntensity = texture2D(inputImageTexture, twoStepsPositiveTextureCoordinate);\n     lowp vec4 twoStepsNegativeIntensity = texture2D(inputImageTexture, twoStepsNegativeTextureCoordinate);\n     lowp vec4 threeStepsPositiveIntensity = texture2D(inputImageTexture, threeStepsPositiveTextureCoordinate);\n     lowp vec4 threeStepsNegativeIntensity = texture2D(inputImageTexture, threeStepsNegativeTextureCoordinate);\n     \n     lowp vec4 maxValue = max(centerIntensity, oneStepPositiveIntensity);\n     maxValue = max(maxValue, oneStepNegativeIntensity);\n     maxValue = max(maxValue, twoStepsPositiveIntensity);\n     maxValue = max(maxValue, twoStepsNegativeIntensity);\n     maxValue = max(maxValue, threeStepsPositiveIntensity);\n     \n     gl_FragColor = max(maxValue, threeStepsNegativeIntensity);\n }\n);\n\nNSString *const kGPUImageRGBDilationRadiusFourFragmentShaderString = SHADER_STRING\n(\n precision highp float;\n \n varying vec2 centerTextureCoordinate;\n varying vec2 oneStepPositiveTextureCoordinate;\n varying vec2 oneStepNegativeTextureCoordinate;\n varying vec2 twoStepsPositiveTextureCoordinate;\n varying vec2 twoStepsNegativeTextureCoordinate;\n varying vec2 threeStepsPositiveTextureCoordinate;\n varying vec2 threeStepsNegativeTextureCoordinate;\n varying vec2 fourStepsPositiveTextureCoordinate;\n varying vec2 fourStepsNegativeTextureCoordinate;\n \n uniform sampler2D inputImageTexture;\n \n void main()\n {\n     lowp vec4 centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate);\n     lowp vec4 oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate);\n     lowp vec4 oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate);\n     lowp vec4 twoStepsPositiveIntensity = texture2D(inputImageTexture, twoStepsPositiveTextureCoordinate);\n     lowp vec4 twoStepsNegativeIntensity = texture2D(inputImageTexture, twoStepsNegativeTextureCoordinate);\n     lowp vec4 threeStepsPositiveIntensity = texture2D(inputImageTexture, threeStepsPositiveTextureCoordinate);\n     lowp vec4 threeStepsNegativeIntensity = texture2D(inputImageTexture, threeStepsNegativeTextureCoordinate);\n     lowp vec4 fourStepsPositiveIntensity = texture2D(inputImageTexture, fourStepsPositiveTextureCoordinate);\n     lowp vec4 fourStepsNegativeIntensity = texture2D(inputImageTexture, fourStepsNegativeTextureCoordinate);\n     \n     lowp vec4 maxValue = max(centerIntensity, oneStepPositiveIntensity);\n     maxValue = max(maxValue, oneStepNegativeIntensity);\n     maxValue = max(maxValue, twoStepsPositiveIntensity);\n     maxValue = max(maxValue, twoStepsNegativeIntensity);\n     maxValue = max(maxValue, threeStepsPositiveIntensity);\n     maxValue = max(maxValue, threeStepsNegativeIntensity);\n     maxValue = max(maxValue, fourStepsPositiveIntensity);\n     \n     gl_FragColor = max(maxValue, fourStepsNegativeIntensity);\n }\n);\n#else\nNSString *const kGPUImageRGBDilationRadiusOneFragmentShaderString = SHADER_STRING\n(\n varying vec2 centerTextureCoordinate;\n varying vec2 oneStepPositiveTextureCoordinate;\n varying vec2 oneStepNegativeTextureCoordinate;\n \n uniform sampler2D inputImageTexture;\n \n void main()\n {\n     vec4 centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate);\n     vec4 oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate);\n     vec4 oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate);\n     \n     vec4 maxValue = max(centerIntensity, oneStepPositiveIntensity);\n     \n     gl_FragColor = max(maxValue, oneStepNegativeIntensity);\n }\n );\n\nNSString *const kGPUImageRGBDilationRadiusTwoFragmentShaderString = SHADER_STRING\n(\n varying vec2 centerTextureCoordinate;\n varying vec2 oneStepPositiveTextureCoordinate;\n varying vec2 oneStepNegativeTextureCoordinate;\n varying vec2 twoStepsPositiveTextureCoordinate;\n varying vec2 twoStepsNegativeTextureCoordinate;\n \n uniform sampler2D inputImageTexture;\n \n void main()\n {\n     vec4 centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate);\n     vec4 oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate);\n     vec4 oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate);\n     vec4 twoStepsPositiveIntensity = texture2D(inputImageTexture, twoStepsPositiveTextureCoordinate);\n     vec4 twoStepsNegativeIntensity = texture2D(inputImageTexture, twoStepsNegativeTextureCoordinate);\n     \n     vec4 maxValue = max(centerIntensity, oneStepPositiveIntensity);\n     maxValue = max(maxValue, oneStepNegativeIntensity);\n     maxValue = max(maxValue, twoStepsPositiveIntensity);\n     maxValue = max(maxValue, twoStepsNegativeIntensity);\n     \n     gl_FragColor = max(maxValue, twoStepsNegativeIntensity);\n }\n );\n\nNSString *const kGPUImageRGBDilationRadiusThreeFragmentShaderString = SHADER_STRING\n(\n varying vec2 centerTextureCoordinate;\n varying vec2 oneStepPositiveTextureCoordinate;\n varying vec2 oneStepNegativeTextureCoordinate;\n varying vec2 twoStepsPositiveTextureCoordinate;\n varying vec2 twoStepsNegativeTextureCoordinate;\n varying vec2 threeStepsPositiveTextureCoordinate;\n varying vec2 threeStepsNegativeTextureCoordinate;\n \n uniform sampler2D inputImageTexture;\n \n void main()\n {\n     vec4 centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate);\n     vec4 oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate);\n     vec4 oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate);\n     vec4 twoStepsPositiveIntensity = texture2D(inputImageTexture, twoStepsPositiveTextureCoordinate);\n     vec4 twoStepsNegativeIntensity = texture2D(inputImageTexture, twoStepsNegativeTextureCoordinate);\n     vec4 threeStepsPositiveIntensity = texture2D(inputImageTexture, threeStepsPositiveTextureCoordinate);\n     vec4 threeStepsNegativeIntensity = texture2D(inputImageTexture, threeStepsNegativeTextureCoordinate);\n     \n     vec4 maxValue = max(centerIntensity, oneStepPositiveIntensity);\n     maxValue = max(maxValue, oneStepNegativeIntensity);\n     maxValue = max(maxValue, twoStepsPositiveIntensity);\n     maxValue = max(maxValue, twoStepsNegativeIntensity);\n     maxValue = max(maxValue, threeStepsPositiveIntensity);\n     \n     gl_FragColor = max(maxValue, threeStepsNegativeIntensity);\n }\n);\n\nNSString *const kGPUImageRGBDilationRadiusFourFragmentShaderString = SHADER_STRING\n(\n varying vec2 centerTextureCoordinate;\n varying vec2 oneStepPositiveTextureCoordinate;\n varying vec2 oneStepNegativeTextureCoordinate;\n varying vec2 twoStepsPositiveTextureCoordinate;\n varying vec2 twoStepsNegativeTextureCoordinate;\n varying vec2 threeStepsPositiveTextureCoordinate;\n varying vec2 threeStepsNegativeTextureCoordinate;\n varying vec2 fourStepsPositiveTextureCoordinate;\n varying vec2 fourStepsNegativeTextureCoordinate;\n \n uniform sampler2D inputImageTexture;\n \n void main()\n {\n     vec4 centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate);\n     vec4 oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate);\n     vec4 oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate);\n     vec4 twoStepsPositiveIntensity = texture2D(inputImageTexture, twoStepsPositiveTextureCoordinate);\n     vec4 twoStepsNegativeIntensity = texture2D(inputImageTexture, twoStepsNegativeTextureCoordinate);\n     vec4 threeStepsPositiveIntensity = texture2D(inputImageTexture, threeStepsPositiveTextureCoordinate);\n     vec4 threeStepsNegativeIntensity = texture2D(inputImageTexture, threeStepsNegativeTextureCoordinate);\n     vec4 fourStepsPositiveIntensity = texture2D(inputImageTexture, fourStepsPositiveTextureCoordinate);\n     vec4 fourStepsNegativeIntensity = texture2D(inputImageTexture, fourStepsNegativeTextureCoordinate);\n     \n     vec4 maxValue = max(centerIntensity, oneStepPositiveIntensity);\n     maxValue = max(maxValue, oneStepNegativeIntensity);\n     maxValue = max(maxValue, twoStepsPositiveIntensity);\n     maxValue = max(maxValue, twoStepsNegativeIntensity);\n     maxValue = max(maxValue, threeStepsPositiveIntensity);\n     maxValue = max(maxValue, threeStepsNegativeIntensity);\n     maxValue = max(maxValue, fourStepsPositiveIntensity);\n     \n     gl_FragColor = max(maxValue, fourStepsNegativeIntensity);\n }\n);\n#endif\n\n@implementation GPUImageRGBDilationFilter\n\n#pragma mark -\n#pragma mark Initialization and teardown\n\n- (id)initWithRadius:(NSUInteger)dilationRadius;\n{    \n    NSString *fragmentShaderForThisRadius = nil;\n    NSString *vertexShaderForThisRadius = nil;\n    \n    switch (dilationRadius)\n    {\n        case 0:\n        case 1:\n        {\n            vertexShaderForThisRadius = kGPUImageDilationRadiusOneVertexShaderString;\n            fragmentShaderForThisRadius = kGPUImageRGBDilationRadiusOneFragmentShaderString;\n        }; break;\n        case 2:\n        {\n            vertexShaderForThisRadius = kGPUImageDilationRadiusTwoVertexShaderString;\n            fragmentShaderForThisRadius = kGPUImageRGBDilationRadiusTwoFragmentShaderString;\n        }; break;\n        case 3:\n        {\n            vertexShaderForThisRadius = kGPUImageDilationRadiusThreeVertexShaderString;\n            fragmentShaderForThisRadius = kGPUImageRGBDilationRadiusThreeFragmentShaderString;\n        }; break;\n        case 4:\n        {\n            vertexShaderForThisRadius = kGPUImageDilationRadiusFourVertexShaderString;\n            fragmentShaderForThisRadius = kGPUImageRGBDilationRadiusFourFragmentShaderString;\n        }; break;\n        default:\n        {\n            vertexShaderForThisRadius = kGPUImageDilationRadiusFourVertexShaderString;\n            fragmentShaderForThisRadius = kGPUImageRGBDilationRadiusFourFragmentShaderString;\n        }; break;\n    }\n    \n    if (!(self = [super initWithFirstStageVertexShaderFromString:vertexShaderForThisRadius firstStageFragmentShaderFromString:fragmentShaderForThisRadius secondStageVertexShaderFromString:vertexShaderForThisRadius secondStageFragmentShaderFromString:fragmentShaderForThisRadius]))\n    {\n        return nil;\n    }\n    \n    return self;\n}\n\n- (id)init;\n{\n    if (!(self = [self initWithRadius:1]))\n    {\n        return nil;\n    }\n    \n    return self;\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageRGBErosionFilter.h",
    "content": "#import \"GPUImageTwoPassTextureSamplingFilter.h\"\n\n// For each pixel, this sets it to the minimum value of each color channel in a rectangular neighborhood extending out dilationRadius pixels from the center.\n// This extends out dark features, and can be used for abstraction of color images.\n\n@interface GPUImageRGBErosionFilter : GPUImageTwoPassTextureSamplingFilter\n\n// Acceptable values for erosionRadius, which sets the distance in pixels to sample out from the center, are 1, 2, 3, and 4.\n- (id)initWithRadius:(NSUInteger)erosionRadius;\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageRGBErosionFilter.m",
    "content": "#import \"GPUImageRGBErosionFilter.h\"\n#import \"GPUImageDilationFilter.h\"\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageRGBErosionRadiusOneFragmentShaderString = SHADER_STRING\n(\n precision highp float;\n \n varying vec2 centerTextureCoordinate;\n varying vec2 oneStepPositiveTextureCoordinate;\n varying vec2 oneStepNegativeTextureCoordinate;\n \n uniform sampler2D inputImageTexture;\n \n void main()\n {\n     lowp vec4 centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate);\n     lowp vec4 oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate);\n     lowp vec4 oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate);\n     \n     lowp vec4 minValue = min(centerIntensity, oneStepPositiveIntensity);\n     \n     gl_FragColor = min(minValue, oneStepNegativeIntensity);\n }\n);\n\nNSString *const kGPUImageRGBErosionRadiusTwoFragmentShaderString = SHADER_STRING\n(\n precision highp float;\n \n varying vec2 centerTextureCoordinate;\n varying vec2 oneStepPositiveTextureCoordinate;\n varying vec2 oneStepNegativeTextureCoordinate;\n varying vec2 twoStepsPositiveTextureCoordinate;\n varying vec2 twoStepsNegativeTextureCoordinate;\n \n uniform sampler2D inputImageTexture;\n \n void main()\n {\n     lowp vec4 centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate);\n     lowp vec4 oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate);\n     lowp vec4 oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate);\n     lowp vec4 twoStepsPositiveIntensity = texture2D(inputImageTexture, twoStepsPositiveTextureCoordinate);\n     lowp vec4 twoStepsNegativeIntensity = texture2D(inputImageTexture, twoStepsNegativeTextureCoordinate);\n     \n     lowp vec4 minValue = min(centerIntensity, oneStepPositiveIntensity);\n     minValue = min(minValue, oneStepNegativeIntensity);\n     minValue = min(minValue, twoStepsPositiveIntensity);\n     \n     gl_FragColor = min(minValue, twoStepsNegativeIntensity);\n }\n );\n\nNSString *const kGPUImageRGBErosionRadiusThreeFragmentShaderString = SHADER_STRING\n(\n precision highp float;\n \n varying vec2 centerTextureCoordinate;\n varying vec2 oneStepPositiveTextureCoordinate;\n varying vec2 oneStepNegativeTextureCoordinate;\n varying vec2 twoStepsPositiveTextureCoordinate;\n varying vec2 twoStepsNegativeTextureCoordinate;\n varying vec2 threeStepsPositiveTextureCoordinate;\n varying vec2 threeStepsNegativeTextureCoordinate;\n \n uniform sampler2D inputImageTexture;\n \n void main()\n {\n     lowp vec4 centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate);\n     lowp vec4 oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate);\n     lowp vec4 oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate);\n     lowp vec4 twoStepsPositiveIntensity = texture2D(inputImageTexture, twoStepsPositiveTextureCoordinate);\n     lowp vec4 twoStepsNegativeIntensity = texture2D(inputImageTexture, twoStepsNegativeTextureCoordinate);\n     lowp vec4 threeStepsPositiveIntensity = texture2D(inputImageTexture, threeStepsPositiveTextureCoordinate);\n     lowp vec4 threeStepsNegativeIntensity = texture2D(inputImageTexture, threeStepsNegativeTextureCoordinate);\n     \n     lowp vec4 minValue = min(centerIntensity, oneStepPositiveIntensity);\n     minValue = min(minValue, oneStepNegativeIntensity);\n     minValue = min(minValue, twoStepsPositiveIntensity);\n     minValue = min(minValue, twoStepsNegativeIntensity);\n     minValue = min(minValue, threeStepsPositiveIntensity);\n     \n     gl_FragColor = min(minValue, threeStepsNegativeIntensity);\n }\n );\n\nNSString *const kGPUImageRGBErosionRadiusFourFragmentShaderString = SHADER_STRING\n(\n precision highp float;\n \n varying vec2 centerTextureCoordinate;\n varying vec2 oneStepPositiveTextureCoordinate;\n varying vec2 oneStepNegativeTextureCoordinate;\n varying vec2 twoStepsPositiveTextureCoordinate;\n varying vec2 twoStepsNegativeTextureCoordinate;\n varying vec2 threeStepsPositiveTextureCoordinate;\n varying vec2 threeStepsNegativeTextureCoordinate;\n varying vec2 fourStepsPositiveTextureCoordinate;\n varying vec2 fourStepsNegativeTextureCoordinate;\n \n uniform sampler2D inputImageTexture;\n \n void main()\n {\n     lowp vec4 centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate);\n     lowp vec4 oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate);\n     lowp vec4 oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate);\n     lowp vec4 twoStepsPositiveIntensity = texture2D(inputImageTexture, twoStepsPositiveTextureCoordinate);\n     lowp vec4 twoStepsNegativeIntensity = texture2D(inputImageTexture, twoStepsNegativeTextureCoordinate);\n     lowp vec4 threeStepsPositiveIntensity = texture2D(inputImageTexture, threeStepsPositiveTextureCoordinate);\n     lowp vec4 threeStepsNegativeIntensity = texture2D(inputImageTexture, threeStepsNegativeTextureCoordinate);\n     lowp vec4 fourStepsPositiveIntensity = texture2D(inputImageTexture, fourStepsPositiveTextureCoordinate);\n     lowp vec4 fourStepsNegativeIntensity = texture2D(inputImageTexture, fourStepsNegativeTextureCoordinate);\n     \n     lowp vec4 minValue = min(centerIntensity, oneStepPositiveIntensity);\n     minValue = min(minValue, oneStepNegativeIntensity);\n     minValue = min(minValue, twoStepsPositiveIntensity);\n     minValue = min(minValue, twoStepsNegativeIntensity);\n     minValue = min(minValue, threeStepsPositiveIntensity);\n     minValue = min(minValue, threeStepsNegativeIntensity);\n     minValue = min(minValue, fourStepsPositiveIntensity);\n     \n     gl_FragColor = min(minValue, fourStepsNegativeIntensity);\n }\n);\n#else\nNSString *const kGPUImageRGBErosionRadiusOneFragmentShaderString = SHADER_STRING\n(\n varying vec2 centerTextureCoordinate;\n varying vec2 oneStepPositiveTextureCoordinate;\n varying vec2 oneStepNegativeTextureCoordinate;\n \n uniform sampler2D inputImageTexture;\n \n void main()\n {\n     vec4 centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate);\n     vec4 oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate);\n     vec4 oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate);\n     \n     vec4 minValue = min(centerIntensity, oneStepPositiveIntensity);\n     \n     gl_FragColor = min(minValue, oneStepNegativeIntensity);\n }\n);\n\nNSString *const kGPUImageRGBErosionRadiusTwoFragmentShaderString = SHADER_STRING\n(\n varying vec2 centerTextureCoordinate;\n varying vec2 oneStepPositiveTextureCoordinate;\n varying vec2 oneStepNegativeTextureCoordinate;\n varying vec2 twoStepsPositiveTextureCoordinate;\n varying vec2 twoStepsNegativeTextureCoordinate;\n \n uniform sampler2D inputImageTexture;\n \n void main()\n {\n     vec4 centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate);\n     vec4 oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate);\n     vec4 oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate);\n     vec4 twoStepsPositiveIntensity = texture2D(inputImageTexture, twoStepsPositiveTextureCoordinate);\n     vec4 twoStepsNegativeIntensity = texture2D(inputImageTexture, twoStepsNegativeTextureCoordinate);\n     \n     vec4 minValue = min(centerIntensity, oneStepPositiveIntensity);\n     minValue = min(minValue, oneStepNegativeIntensity);\n     minValue = min(minValue, twoStepsPositiveIntensity);\n     \n     gl_FragColor = min(minValue, twoStepsNegativeIntensity);\n }\n);\n\nNSString *const kGPUImageRGBErosionRadiusThreeFragmentShaderString = SHADER_STRING\n(\n varying vec2 centerTextureCoordinate;\n varying vec2 oneStepPositiveTextureCoordinate;\n varying vec2 oneStepNegativeTextureCoordinate;\n varying vec2 twoStepsPositiveTextureCoordinate;\n varying vec2 twoStepsNegativeTextureCoordinate;\n varying vec2 threeStepsPositiveTextureCoordinate;\n varying vec2 threeStepsNegativeTextureCoordinate;\n \n uniform sampler2D inputImageTexture;\n \n void main()\n {\n     vec4 centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate);\n     vec4 oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate);\n     vec4 oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate);\n     vec4 twoStepsPositiveIntensity = texture2D(inputImageTexture, twoStepsPositiveTextureCoordinate);\n     vec4 twoStepsNegativeIntensity = texture2D(inputImageTexture, twoStepsNegativeTextureCoordinate);\n     vec4 threeStepsPositiveIntensity = texture2D(inputImageTexture, threeStepsPositiveTextureCoordinate);\n     vec4 threeStepsNegativeIntensity = texture2D(inputImageTexture, threeStepsNegativeTextureCoordinate);\n     \n     vec4 minValue = min(centerIntensity, oneStepPositiveIntensity);\n     minValue = min(minValue, oneStepNegativeIntensity);\n     minValue = min(minValue, twoStepsPositiveIntensity);\n     minValue = min(minValue, twoStepsNegativeIntensity);\n     minValue = min(minValue, threeStepsPositiveIntensity);\n     \n     gl_FragColor = min(minValue, threeStepsNegativeIntensity);\n }\n);\n\nNSString *const kGPUImageRGBErosionRadiusFourFragmentShaderString = SHADER_STRING\n(\n varying vec2 centerTextureCoordinate;\n varying vec2 oneStepPositiveTextureCoordinate;\n varying vec2 oneStepNegativeTextureCoordinate;\n varying vec2 twoStepsPositiveTextureCoordinate;\n varying vec2 twoStepsNegativeTextureCoordinate;\n varying vec2 threeStepsPositiveTextureCoordinate;\n varying vec2 threeStepsNegativeTextureCoordinate;\n varying vec2 fourStepsPositiveTextureCoordinate;\n varying vec2 fourStepsNegativeTextureCoordinate;\n \n uniform sampler2D inputImageTexture;\n \n void main()\n {\n     vec4 centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate);\n     vec4 oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate);\n     vec4 oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate);\n     vec4 twoStepsPositiveIntensity = texture2D(inputImageTexture, twoStepsPositiveTextureCoordinate);\n     vec4 twoStepsNegativeIntensity = texture2D(inputImageTexture, twoStepsNegativeTextureCoordinate);\n     vec4 threeStepsPositiveIntensity = texture2D(inputImageTexture, threeStepsPositiveTextureCoordinate);\n     vec4 threeStepsNegativeIntensity = texture2D(inputImageTexture, threeStepsNegativeTextureCoordinate);\n     vec4 fourStepsPositiveIntensity = texture2D(inputImageTexture, fourStepsPositiveTextureCoordinate);\n     vec4 fourStepsNegativeIntensity = texture2D(inputImageTexture, fourStepsNegativeTextureCoordinate);\n     \n     vec4 minValue = min(centerIntensity, oneStepPositiveIntensity);\n     minValue = min(minValue, oneStepNegativeIntensity);\n     minValue = min(minValue, twoStepsPositiveIntensity);\n     minValue = min(minValue, twoStepsNegativeIntensity);\n     minValue = min(minValue, threeStepsPositiveIntensity);\n     minValue = min(minValue, threeStepsNegativeIntensity);\n     minValue = min(minValue, fourStepsPositiveIntensity);\n     \n     gl_FragColor = min(minValue, fourStepsNegativeIntensity);\n }\n);\n#endif\n\n@implementation GPUImageRGBErosionFilter\n\n#pragma mark -\n#pragma mark Initialization and teardown\n\n- (id)initWithRadius:(NSUInteger)erosionRadius;\n{    \n    NSString *fragmentShaderForThisRadius = nil;\n    NSString *vertexShaderForThisRadius = nil;\n    \n    switch (erosionRadius)\n    {\n        case 0:\n        case 1:\n        {\n            vertexShaderForThisRadius = kGPUImageDilationRadiusOneVertexShaderString;\n            fragmentShaderForThisRadius = kGPUImageRGBErosionRadiusOneFragmentShaderString;\n        }; break;\n        case 2:\n        {\n            vertexShaderForThisRadius = kGPUImageDilationRadiusTwoVertexShaderString;\n            fragmentShaderForThisRadius = kGPUImageRGBErosionRadiusTwoFragmentShaderString;\n        }; break;\n        case 3:\n        {\n            vertexShaderForThisRadius = kGPUImageDilationRadiusThreeVertexShaderString;\n            fragmentShaderForThisRadius = kGPUImageRGBErosionRadiusThreeFragmentShaderString;\n        }; break;\n        case 4:\n        {\n            vertexShaderForThisRadius = kGPUImageDilationRadiusFourVertexShaderString;\n            fragmentShaderForThisRadius = kGPUImageRGBErosionRadiusFourFragmentShaderString;\n        }; break;\n        default:\n        {\n            vertexShaderForThisRadius = kGPUImageDilationRadiusFourVertexShaderString;\n            fragmentShaderForThisRadius = kGPUImageRGBErosionRadiusFourFragmentShaderString;\n        }; break;\n    }\n    \n    if (!(self = [super initWithFirstStageVertexShaderFromString:vertexShaderForThisRadius firstStageFragmentShaderFromString:fragmentShaderForThisRadius secondStageVertexShaderFromString:vertexShaderForThisRadius secondStageFragmentShaderFromString:fragmentShaderForThisRadius]))\n    {\n        return nil;\n    }\n    \n    return self;\n}\n\n- (id)init;\n{\n    if (!(self = [self initWithRadius:1]))\n    {\n        return nil;\n    }\n    \n    return self;\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageRGBFilter.h",
    "content": "#import \"GPUImageFilter.h\"\n\n@interface GPUImageRGBFilter : GPUImageFilter\n{\n    GLint redUniform;\n    GLint greenUniform;\n    GLint blueUniform;\n}\n\n// Normalized values by which each color channel is multiplied. The range is from 0.0 up, with 1.0 as the default.\n@property (readwrite, nonatomic) CGFloat red; \n@property (readwrite, nonatomic) CGFloat green; \n@property (readwrite, nonatomic) CGFloat blue;\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageRGBFilter.m",
    "content": "#import \"GPUImageRGBFilter.h\"\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageRGBFragmentShaderString = SHADER_STRING\n(\n varying highp vec2 textureCoordinate;\n \n uniform sampler2D inputImageTexture;\n uniform highp float redAdjustment;\n uniform highp float greenAdjustment;\n uniform highp float blueAdjustment;\n \n void main()\n {\n     highp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);\n     \n     gl_FragColor = vec4(textureColor.r * redAdjustment, textureColor.g * greenAdjustment, textureColor.b * blueAdjustment, textureColor.a);\n }\n);\n#else\nNSString *const kGPUImageRGBFragmentShaderString = SHADER_STRING\n(\n varying vec2 textureCoordinate;\n \n uniform sampler2D inputImageTexture;\n uniform float redAdjustment;\n uniform float greenAdjustment;\n uniform float blueAdjustment;\n \n void main()\n {\n     vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);\n     \n     gl_FragColor = vec4(textureColor.r * redAdjustment, textureColor.g * greenAdjustment, textureColor.b * blueAdjustment, textureColor.a);\n }\n );\n#endif\n\n@implementation GPUImageRGBFilter\n\n@synthesize red = _red, blue = _blue, green = _green;\n\n#pragma mark -\n#pragma mark Initialization and teardown\n\n- (id)init;\n{\n    if (!(self = [super initWithFragmentShaderFromString:kGPUImageRGBFragmentShaderString]))\n    {\n\t\treturn nil;\n    }\n    \n    redUniform = [filterProgram uniformIndex:@\"redAdjustment\"];\n    self.red = 1.0;\n    \n    greenUniform = [filterProgram uniformIndex:@\"greenAdjustment\"];\n    self.green = 1.0;\n    \n    blueUniform = [filterProgram uniformIndex:@\"blueAdjustment\"];\n    self.blue = 1.0;\n    \n    return self;\n}\n\n#pragma mark -\n#pragma mark Accessors\n\n- (void)setRed:(CGFloat)newValue;\n{\n    _red = newValue;\n    \n    [self setFloat:_red forUniform:redUniform program:filterProgram];\n}\n\n- (void)setGreen:(CGFloat)newValue;\n{\n    _green = newValue;\n\n    [self setFloat:_green forUniform:greenUniform program:filterProgram];\n}\n\n- (void)setBlue:(CGFloat)newValue;\n{\n    _blue = newValue;\n\n    [self setFloat:_blue forUniform:blueUniform program:filterProgram];\n}\n\n@end"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageRGBOpeningFilter.h",
    "content": "#import \"GPUImageFilterGroup.h\"\n\n@class GPUImageRGBErosionFilter;\n@class GPUImageRGBDilationFilter;\n\n// A filter that first performs an erosion on each color channel of an image, followed by a dilation of the same radius. \n// This helps to filter out smaller bright elements.\n\n@interface GPUImageRGBOpeningFilter : GPUImageFilterGroup\n{\n    GPUImageRGBErosionFilter *erosionFilter;\n    GPUImageRGBDilationFilter *dilationFilter;\n}\n\n- (id)initWithRadius:(NSUInteger)radius;\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageRGBOpeningFilter.m",
    "content": "#import \"GPUImageRGBOpeningFilter.h\"\n#import \"GPUImageRGBErosionFilter.h\"\n#import \"GPUImageRGBDilationFilter.h\"\n\n@implementation GPUImageRGBOpeningFilter\n\n- (id)init;\n{\n    if (!(self = [self initWithRadius:1]))\n    {\n\t\treturn nil;\n    }\n    \n    return self;\n}\n\n- (id)initWithRadius:(NSUInteger)radius;\n{\n    if (!(self = [super init]))\n    {\n\t\treturn nil;\n    }\n    \n    // First pass: erosion\n    erosionFilter = [[GPUImageRGBErosionFilter alloc] initWithRadius:radius];\n    [self addFilter:erosionFilter];\n    \n    // Second pass: dilation\n    dilationFilter = [[GPUImageRGBDilationFilter alloc] initWithRadius:radius];\n    [self addFilter:dilationFilter];\n    \n    [erosionFilter addTarget:dilationFilter];\n    \n    self.initialFilters = [NSArray arrayWithObjects:erosionFilter, nil];\n    self.terminalFilter = dilationFilter;\n    \n    return self;\n}\n\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageRawDataInput.h",
    "content": "#import \"GPUImageOutput.h\"\n\n// The bytes passed into this input are not copied or retained, but you are free to deallocate them after they are used by this filter.\n// The bytes are uploaded and stored within a texture, so nothing is kept locally.\n// The default format for input bytes is GPUPixelFormatBGRA, unless specified with pixelFormat:\n// The default type for input bytes is GPUPixelTypeUByte, unless specified with pixelType:\n\ntypedef enum {\n\tGPUPixelFormatBGRA = GL_BGRA,\n\tGPUPixelFormatRGBA = GL_RGBA,\n\tGPUPixelFormatRGB = GL_RGB,\n    GPUPixelFormatLuminance = GL_LUMINANCE\n} GPUPixelFormat;\n\ntypedef enum {\n\tGPUPixelTypeUByte = GL_UNSIGNED_BYTE,\n\tGPUPixelTypeFloat = GL_FLOAT\n} GPUPixelType;\n\n@interface GPUImageRawDataInput : GPUImageOutput\n{\n    CGSize uploadedImageSize;\n\t\n\tdispatch_semaphore_t dataUpdateSemaphore;\n}\n\n// Initialization and teardown\n- (id)initWithBytes:(GLubyte *)bytesToUpload size:(CGSize)imageSize;\n- (id)initWithBytes:(GLubyte *)bytesToUpload size:(CGSize)imageSize pixelFormat:(GPUPixelFormat)pixelFormat;\n- (id)initWithBytes:(GLubyte *)bytesToUpload size:(CGSize)imageSize pixelFormat:(GPUPixelFormat)pixelFormat type:(GPUPixelType)pixelType;\n\n/** Input data pixel format\n */\n@property (readwrite, nonatomic) GPUPixelFormat pixelFormat;\n@property (readwrite, nonatomic) GPUPixelType   pixelType;\n\n// Image rendering\n- (void)updateDataFromBytes:(GLubyte *)bytesToUpload size:(CGSize)imageSize;\n- (void)processData;\n- (void)processDataForTimestamp:(CMTime)frameTime;\n- (CGSize)outputImageSize;\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageRawDataInput.m",
    "content": "#import \"GPUImageRawDataInput.h\"\n\n@interface GPUImageRawDataInput()\n- (void)uploadBytes:(GLubyte *)bytesToUpload;\n@end\n\n@implementation GPUImageRawDataInput\n\n@synthesize pixelFormat = _pixelFormat;\n@synthesize pixelType = _pixelType;\n\n#pragma mark -\n#pragma mark Initialization and teardown\n\n- (id)initWithBytes:(GLubyte *)bytesToUpload size:(CGSize)imageSize;\n{\n    if (!(self = [self initWithBytes:bytesToUpload size:imageSize pixelFormat:GPUPixelFormatBGRA type:GPUPixelTypeUByte]))\n    {\n\t\treturn nil;\n    }\n\t\n\treturn self;\n}\n\n- (id)initWithBytes:(GLubyte *)bytesToUpload size:(CGSize)imageSize pixelFormat:(GPUPixelFormat)pixelFormat;\n{\n    if (!(self = [self initWithBytes:bytesToUpload size:imageSize pixelFormat:pixelFormat type:GPUPixelTypeUByte]))\n    {\n\t\treturn nil;\n    }\n\t\n\treturn self;\n}\n\n- (id)initWithBytes:(GLubyte *)bytesToUpload size:(CGSize)imageSize pixelFormat:(GPUPixelFormat)pixelFormat type:(GPUPixelType)pixelType;\n{\n    if (!(self = [super init]))\n    {\n\t\treturn nil;\n    }\n    \n\tdataUpdateSemaphore = dispatch_semaphore_create(1);\n\n    uploadedImageSize = imageSize;\n\tself.pixelFormat = pixelFormat;\n\tself.pixelType = pixelType;\n        \n    [self uploadBytes:bytesToUpload];\n    \n    return self;\n}\n\n// ARC forbids explicit message send of 'release'; since iOS 6 even for dispatch_release() calls: stripping it out in that case is required.\n- (void)dealloc;\n{\n#if !OS_OBJECT_USE_OBJC\n    if (dataUpdateSemaphore != NULL)\n    {\n        dispatch_release(dataUpdateSemaphore);\n    }\n#endif\n}\n\n#pragma mark -\n#pragma mark Image rendering\n\n- (void)uploadBytes:(GLubyte *)bytesToUpload;\n{\n    [GPUImageContext useImageProcessingContext];\n\n    // TODO: This probably isn't right, and will need to be corrected\n    outputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:uploadedImageSize textureOptions:self.outputTextureOptions onlyTexture:YES];\n    \n    glBindTexture(GL_TEXTURE_2D, [outputFramebuffer texture]);\n    glTexImage2D(GL_TEXTURE_2D, 0, _pixelFormat, (int)uploadedImageSize.width, (int)uploadedImageSize.height, 0, (GLint)_pixelFormat, (GLenum)_pixelType, bytesToUpload);\n}\n\n- (void)updateDataFromBytes:(GLubyte *)bytesToUpload size:(CGSize)imageSize;\n{\n    uploadedImageSize = imageSize;\n\n    [self uploadBytes:bytesToUpload];\n}\n\n- (void)processData;\n{\n\tif (dispatch_semaphore_wait(dataUpdateSemaphore, DISPATCH_TIME_NOW) != 0)\n    {\n        return;\n    }\n\t\n\trunAsynchronouslyOnVideoProcessingQueue(^{\n\n\t\tCGSize pixelSizeOfImage = [self outputImageSize];\n    \n\t\tfor (id<GPUImageInput> currentTarget in targets)\n\t\t{\n\t\t\tNSInteger indexOfObject = [targets indexOfObject:currentTarget];\n\t\t\tNSInteger textureIndexOfTarget = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];\n        \n\t\t\t[currentTarget setInputSize:pixelSizeOfImage atIndex:textureIndexOfTarget];\n            [currentTarget setInputFramebuffer:outputFramebuffer atIndex:textureIndexOfTarget];\n\t\t\t[currentTarget newFrameReadyAtTime:kCMTimeInvalid atIndex:textureIndexOfTarget];\n\t\t}\n\t\n\t\tdispatch_semaphore_signal(dataUpdateSemaphore);\n\t});\n}\n\n- (void)processDataForTimestamp:(CMTime)frameTime;\n{\n\tif (dispatch_semaphore_wait(dataUpdateSemaphore, DISPATCH_TIME_NOW) != 0)\n    {\n        return;\n    }\n\t\n\trunAsynchronouslyOnVideoProcessingQueue(^{\n        \n\t\tCGSize pixelSizeOfImage = [self outputImageSize];\n        \n\t\tfor (id<GPUImageInput> currentTarget in targets)\n\t\t{\n\t\t\tNSInteger indexOfObject = [targets indexOfObject:currentTarget];\n\t\t\tNSInteger textureIndexOfTarget = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];\n            \n\t\t\t[currentTarget setInputSize:pixelSizeOfImage atIndex:textureIndexOfTarget];\n\t\t\t[currentTarget newFrameReadyAtTime:frameTime atIndex:textureIndexOfTarget];\n\t\t}\n        \n\t\tdispatch_semaphore_signal(dataUpdateSemaphore);\n\t});\n}\n\n- (CGSize)outputImageSize;\n{\n    return uploadedImageSize;\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageRawDataOutput.h",
    "content": "#import <Foundation/Foundation.h>\n#import \"GPUImageContext.h\"\n\nstruct GPUByteColorVector {\n    GLubyte red;\n    GLubyte green;\n    GLubyte blue;\n    GLubyte alpha;\n};\ntypedef struct GPUByteColorVector GPUByteColorVector;\n\n@protocol GPUImageRawDataProcessor;\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\n@interface GPUImageRawDataOutput : NSObject <GPUImageInput> {\n    CGSize imageSize;\n    GPUImageRotationMode inputRotation;\n    BOOL outputBGRA;\n}\n#else\n@interface GPUImageRawDataOutput : NSObject <GPUImageInput> {\n    CGSize imageSize;\n    GPUImageRotationMode inputRotation;\n    BOOL outputBGRA;\n}\n#endif\n\n@property(readonly) GLubyte *rawBytesForImage;\n@property(nonatomic, copy) void(^newFrameAvailableBlock)(void);\n@property(nonatomic) BOOL enabled;\n\n// Initialization and teardown\n- (id)initWithImageSize:(CGSize)newImageSize resultsInBGRAFormat:(BOOL)resultsInBGRAFormat;\n\n// Data access\n- (GPUByteColorVector)colorAtLocation:(CGPoint)locationInImage;\n- (NSUInteger)bytesPerRowInOutput;\n\n- (void)setImageSize:(CGSize)newImageSize;\n\n- (void)lockFramebufferForReading;\n- (void)unlockFramebufferAfterReading;\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageRawDataOutput.m",
    "content": "#import \"GPUImageRawDataOutput.h\"\n\n#import \"GPUImageContext.h\"\n#import \"GLProgram.h\"\n#import \"GPUImageFilter.h\"\n#import \"GPUImageMovieWriter.h\"\n\n@interface GPUImageRawDataOutput ()\n{\n    GPUImageFramebuffer *firstInputFramebuffer, *outputFramebuffer, *retainedFramebuffer;\n    \n    BOOL hasReadFromTheCurrentFrame;\n    \n    GLProgram *dataProgram;\n    GLint dataPositionAttribute, dataTextureCoordinateAttribute;\n    GLint dataInputTextureUniform;\n    \n    GLubyte *_rawBytesForImage;\n    \n    BOOL lockNextFramebuffer;\n}\n\n// Frame rendering\n- (void)renderAtInternalSize;\n\n@end\n\n@implementation GPUImageRawDataOutput\n\n@synthesize rawBytesForImage = _rawBytesForImage;\n@synthesize newFrameAvailableBlock = _newFrameAvailableBlock;\n@synthesize enabled;\n\n#pragma mark -\n#pragma mark Initialization and teardown\n\n- (id)initWithImageSize:(CGSize)newImageSize resultsInBGRAFormat:(BOOL)resultsInBGRAFormat;\n{\n    if (!(self = [super init]))\n    {\n\t\treturn nil;\n    }\n\n    self.enabled = YES;\n    lockNextFramebuffer = NO;\n    outputBGRA = resultsInBGRAFormat;\n    imageSize = newImageSize;\n    hasReadFromTheCurrentFrame = NO;\n    _rawBytesForImage = NULL;\n    inputRotation = kGPUImageNoRotation;\n\n    [GPUImageContext useImageProcessingContext];\n    if ( (outputBGRA && ![GPUImageContext supportsFastTextureUpload]) || (!outputBGRA && [GPUImageContext supportsFastTextureUpload]) )\n    {\n        dataProgram = [[GPUImageContext sharedImageProcessingContext] programForVertexShaderString:kGPUImageVertexShaderString fragmentShaderString:kGPUImageColorSwizzlingFragmentShaderString];\n    }\n    else\n    {\n        dataProgram = [[GPUImageContext sharedImageProcessingContext] programForVertexShaderString:kGPUImageVertexShaderString fragmentShaderString:kGPUImagePassthroughFragmentShaderString];\n    }\n \n    if (!dataProgram.initialized)\n    {\n        [dataProgram addAttribute:@\"position\"];\n        [dataProgram addAttribute:@\"inputTextureCoordinate\"];\n        \n        if (![dataProgram link])\n        {\n            NSString *progLog = [dataProgram programLog];\n            NSLog(@\"Program link log: %@\", progLog);\n            NSString *fragLog = [dataProgram fragmentShaderLog];\n            NSLog(@\"Fragment shader compile log: %@\", fragLog);\n            NSString *vertLog = [dataProgram vertexShaderLog];\n            NSLog(@\"Vertex shader compile log: %@\", vertLog);\n            dataProgram = nil;\n            NSAssert(NO, @\"Filter shader link failed\");\n        }\n    }\n    \n    dataPositionAttribute = [dataProgram attributeIndex:@\"position\"];\n    dataTextureCoordinateAttribute = [dataProgram attributeIndex:@\"inputTextureCoordinate\"];\n    dataInputTextureUniform = [dataProgram uniformIndex:@\"inputImageTexture\"];\n    \n    return self;\n}\n\n- (void)dealloc\n{\n    if (_rawBytesForImage != NULL && (![GPUImageContext supportsFastTextureUpload]))\n    {\n        free(_rawBytesForImage);\n        _rawBytesForImage = NULL;\n    }\n}\n\n#pragma mark -\n#pragma mark Data access\n\n- (void)renderAtInternalSize;\n{\n    [GPUImageContext setActiveShaderProgram:dataProgram];\n\n    outputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:imageSize onlyTexture:NO];\n    [outputFramebuffer activateFramebuffer];\n\n    if(lockNextFramebuffer)\n    {\n        retainedFramebuffer = outputFramebuffer;\n        [retainedFramebuffer lock];\n        [retainedFramebuffer lockForReading];\n        lockNextFramebuffer = NO;\n    }\n\n    glClearColor(0.0f, 0.0f, 0.0f, 1.0f);\n    glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);\n    \n    static const GLfloat squareVertices[] = {\n        -1.0f, -1.0f,\n        1.0f, -1.0f,\n        -1.0f,  1.0f,\n        1.0f,  1.0f,\n    };\n    \n    static const GLfloat textureCoordinates[] = {\n        0.0f, 0.0f,\n        1.0f, 0.0f,\n        0.0f, 1.0f,\n        1.0f, 1.0f,\n    };\n    \n\tglActiveTexture(GL_TEXTURE4);\n\tglBindTexture(GL_TEXTURE_2D, [firstInputFramebuffer texture]);\n\tglUniform1i(dataInputTextureUniform, 4);\t\n    \n    glVertexAttribPointer(dataPositionAttribute, 2, GL_FLOAT, 0, 0, squareVertices);\n\tglVertexAttribPointer(dataTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, textureCoordinates);\n    \n    glEnableVertexAttribArray(dataPositionAttribute);\n\tglEnableVertexAttribArray(dataTextureCoordinateAttribute);\n    \n    glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);\n    [firstInputFramebuffer unlock];\n}\n\n- (GPUByteColorVector)colorAtLocation:(CGPoint)locationInImage;\n{\n    GPUByteColorVector *imageColorBytes = (GPUByteColorVector *)self.rawBytesForImage;\n//    NSLog(@\"Row start\");\n//    for (unsigned int currentXPosition = 0; currentXPosition < (imageSize.width * 2.0); currentXPosition++)\n//    {\n//        GPUByteColorVector byteAtPosition = imageColorBytes[currentXPosition];\n//        NSLog(@\"%d - %d, %d, %d\", currentXPosition, byteAtPosition.red, byteAtPosition.green, byteAtPosition.blue);\n//    }\n//    NSLog(@\"Row end\");\n    \n//    GPUByteColorVector byteAtOne = imageColorBytes[1];\n//    GPUByteColorVector byteAtWidth = imageColorBytes[(int)imageSize.width - 3];\n//    GPUByteColorVector byteAtHeight = imageColorBytes[(int)(imageSize.height - 1) * (int)imageSize.width];\n//    NSLog(@\"Byte 1: %d, %d, %d, byte 2: %d, %d, %d, byte 3: %d, %d, %d\", byteAtOne.red, byteAtOne.green, byteAtOne.blue, byteAtWidth.red, byteAtWidth.green, byteAtWidth.blue, byteAtHeight.red, byteAtHeight.green, byteAtHeight.blue);\n    \n    CGPoint locationToPickFrom = CGPointZero;\n    locationToPickFrom.x = MIN(MAX(locationInImage.x, 0.0), (imageSize.width - 1.0));\n    locationToPickFrom.y = MIN(MAX((imageSize.height - locationInImage.y), 0.0), (imageSize.height - 1.0));\n    \n    if (outputBGRA)    \n    {\n        GPUByteColorVector flippedColor = imageColorBytes[(int)(round((locationToPickFrom.y * imageSize.width) + locationToPickFrom.x))];\n        GLubyte temporaryRed = flippedColor.red;\n        \n        flippedColor.red = flippedColor.blue;\n        flippedColor.blue = temporaryRed;\n\n        return flippedColor;\n    }\n    else\n    {\n        return imageColorBytes[(int)(round((locationToPickFrom.y * imageSize.width) + locationToPickFrom.x))];\n    }\n}\n\n#pragma mark -\n#pragma mark GPUImageInput protocol\n\n- (void)newFrameReadyAtTime:(CMTime)frameTime atIndex:(NSInteger)textureIndex;\n{\n    hasReadFromTheCurrentFrame = NO;\n    \n    if (_newFrameAvailableBlock != NULL)\n    {\n        _newFrameAvailableBlock();\n    }\n}\n\n- (NSInteger)nextAvailableTextureIndex;\n{\n    return 0;\n}\n\n- (void)setInputFramebuffer:(GPUImageFramebuffer *)newInputFramebuffer atIndex:(NSInteger)textureIndex;\n{\n    firstInputFramebuffer = newInputFramebuffer;\n    [firstInputFramebuffer lock];\n}\n\n- (void)setInputRotation:(GPUImageRotationMode)newInputRotation atIndex:(NSInteger)textureIndex;\n{\n    inputRotation = newInputRotation;\n}\n\n- (void)setInputSize:(CGSize)newSize atIndex:(NSInteger)textureIndex;\n{\n}\n\n- (CGSize)maximumOutputSize;\n{\n    return imageSize;\n}\n\n- (void)endProcessing;\n{\n}\n\n- (BOOL)shouldIgnoreUpdatesToThisTarget;\n{\n    return NO;\n}\n\n- (BOOL)wantsMonochromeInput;\n{\n    return NO;\n}\n\n- (void)setCurrentlyReceivingMonochromeInput:(BOOL)newValue;\n{\n    \n}\n\n#pragma mark -\n#pragma mark Accessors\n\n- (GLubyte *)rawBytesForImage;\n{\n    if ( (_rawBytesForImage == NULL) && (![GPUImageContext supportsFastTextureUpload]) )\n    {\n        _rawBytesForImage = (GLubyte *) calloc(imageSize.width * imageSize.height * 4, sizeof(GLubyte));\n        hasReadFromTheCurrentFrame = NO;\n    }\n\n    if (hasReadFromTheCurrentFrame)\n    {\n        return _rawBytesForImage;\n    }\n    else\n    {\n        runSynchronouslyOnVideoProcessingQueue(^{\n            // Note: the fast texture caches speed up 640x480 frame reads from 9.6 ms to 3.1 ms on iPhone 4S\n            \n            [GPUImageContext useImageProcessingContext];\n            [self renderAtInternalSize];\n            \n            if ([GPUImageContext supportsFastTextureUpload])\n            {\n                glFinish();\n                _rawBytesForImage = [outputFramebuffer byteBuffer];\n            }\n            else\n            {\n                glReadPixels(0, 0, imageSize.width, imageSize.height, GL_RGBA, GL_UNSIGNED_BYTE, _rawBytesForImage);\n                // GL_EXT_read_format_bgra\n                //            glReadPixels(0, 0, imageSize.width, imageSize.height, GL_BGRA_EXT, GL_UNSIGNED_BYTE, _rawBytesForImage);\n            }\n          \n            hasReadFromTheCurrentFrame = YES;\n\n        });\n        \n        return _rawBytesForImage;\n    }\n}\n\n- (NSUInteger)bytesPerRowInOutput;\n{\n    return [retainedFramebuffer bytesPerRow];\n}\n\n- (void)setImageSize:(CGSize)newImageSize {\n    imageSize = newImageSize;\n    if (_rawBytesForImage != NULL && (![GPUImageContext supportsFastTextureUpload]))\n    {\n        free(_rawBytesForImage);\n        _rawBytesForImage = NULL;\n    }\n}\n\n- (void)lockFramebufferForReading;\n{\n    lockNextFramebuffer = YES;\n}\n\n- (void)unlockFramebufferAfterReading;\n{\n    [retainedFramebuffer unlockAfterReading];\n    [retainedFramebuffer unlock];\n    retainedFramebuffer = nil;\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageSaturationBlendFilter.h",
    "content": "#import \"GPUImageTwoInputFilter.h\"\n\n@interface GPUImageSaturationBlendFilter : GPUImageTwoInputFilter\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageSaturationBlendFilter.m",
    "content": "#import \"GPUImageSaturationBlendFilter.h\"\n\n/**\n * Saturation blend mode based upon pseudo code from the PDF specification.\n */\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageSaturationBlendFragmentShaderString = SHADER_STRING\n(\n varying highp vec2 textureCoordinate;\n varying highp vec2 textureCoordinate2;\n \n uniform sampler2D inputImageTexture;\n uniform sampler2D inputImageTexture2;\n \n highp float lum(lowp vec3 c) {\n     return dot(c, vec3(0.3, 0.59, 0.11));\n }\n \n lowp vec3 clipcolor(lowp vec3 c) {\n     highp float l = lum(c);\n     lowp float n = min(min(c.r, c.g), c.b);\n     lowp float x = max(max(c.r, c.g), c.b);\n     \n     if (n < 0.0) {\n         c.r = l + ((c.r - l) * l) / (l - n);\n         c.g = l + ((c.g - l) * l) / (l - n);\n         c.b = l + ((c.b - l) * l) / (l - n);\n     }\n     if (x > 1.0) {\n         c.r = l + ((c.r - l) * (1.0 - l)) / (x - l);\n         c.g = l + ((c.g - l) * (1.0 - l)) / (x - l);\n         c.b = l + ((c.b - l) * (1.0 - l)) / (x - l);\n     }\n     \n     return c;\n }\n \n lowp vec3 setlum(lowp vec3 c, highp float l) {\n     highp float d = l - lum(c);\n     c = c + vec3(d);\n     return clipcolor(c);\n }\n \n highp float sat(lowp vec3 c) {\n     lowp float n = min(min(c.r, c.g), c.b);\n     lowp float x = max(max(c.r, c.g), c.b);\n     return x - n;\n }\n \n lowp float mid(lowp float cmin, lowp float cmid, lowp float cmax, highp float s) {\n     return ((cmid - cmin) * s) / (cmax - cmin);\n }\n \n lowp vec3 setsat(lowp vec3 c, highp float s) {\n     if (c.r > c.g) {\n         if (c.r > c.b) {\n             if (c.g > c.b) {\n                 /* g is mid, b is min */\n                 c.g = mid(c.b, c.g, c.r, s);\n                 c.b = 0.0;\n             } else {\n                 /* b is mid, g is min */\n                 c.b = mid(c.g, c.b, c.r, s);\n                 c.g = 0.0;\n             }\n             c.r = s;\n         } else {\n             /* b is max, r is mid, g is min */\n             c.r = mid(c.g, c.r, c.b, s);\n             c.b = s;\n             c.r = 0.0;\n         }\n     } else if (c.r > c.b) {\n         /* g is max, r is mid, b is min */\n         c.r = mid(c.b, c.r, c.g, s);\n         c.g = s;\n         c.b = 0.0;\n     } else if (c.g > c.b) {\n         /* g is max, b is mid, r is min */\n         c.b = mid(c.r, c.b, c.g, s);\n         c.g = s;\n         c.r = 0.0;\n     } else if (c.b > c.g) {\n         /* b is max, g is mid, r is min */\n         c.g = mid(c.r, c.g, c.b, s);\n         c.b = s;\n         c.r = 0.0;\n     } else {\n         c = vec3(0.0);\n     }\n     return c;\n }\n \n void main()\n {\n\t highp vec4 baseColor = texture2D(inputImageTexture, textureCoordinate);\n\t highp vec4 overlayColor = texture2D(inputImageTexture2, textureCoordinate2);\n     \n     gl_FragColor = vec4(baseColor.rgb * (1.0 - overlayColor.a) + setlum(setsat(baseColor.rgb, sat(overlayColor.rgb)), lum(baseColor.rgb)) * overlayColor.a, baseColor.a);\n }\n);\n#else\nNSString *const kGPUImageSaturationBlendFragmentShaderString = SHADER_STRING\n(\n varying vec2 textureCoordinate;\n varying vec2 textureCoordinate2;\n \n uniform sampler2D inputImageTexture;\n uniform sampler2D inputImageTexture2;\n \n float lum(vec3 c) {\n     return dot(c, vec3(0.3, 0.59, 0.11));\n }\n \n vec3 clipcolor(vec3 c) {\n     float l = lum(c);\n     float n = min(min(c.r, c.g), c.b);\n     float x = max(max(c.r, c.g), c.b);\n     \n     if (n < 0.0) {\n         c.r = l + ((c.r - l) * l) / (l - n);\n         c.g = l + ((c.g - l) * l) / (l - n);\n         c.b = l + ((c.b - l) * l) / (l - n);\n     }\n     if (x > 1.0) {\n         c.r = l + ((c.r - l) * (1.0 - l)) / (x - l);\n         c.g = l + ((c.g - l) * (1.0 - l)) / (x - l);\n         c.b = l + ((c.b - l) * (1.0 - l)) / (x - l);\n     }\n     \n     return c;\n }\n \n vec3 setlum(vec3 c, float l) {\n     float d = l - lum(c);\n     c = c + vec3(d);\n     return clipcolor(c);\n }\n \n float sat(vec3 c) {\n     float n = min(min(c.r, c.g), c.b);\n     float x = max(max(c.r, c.g), c.b);\n     return x - n;\n }\n \n float mid(float cmin, float cmid, float cmax, float s) {\n     return ((cmid - cmin) * s) / (cmax - cmin);\n }\n \n vec3 setsat(vec3 c, float s) {\n     if (c.r > c.g) {\n         if (c.r > c.b) {\n             if (c.g > c.b) {\n                 /* g is mid, b is min */\n                 c.g = mid(c.b, c.g, c.r, s);\n                 c.b = 0.0;\n             } else {\n                 /* b is mid, g is min */\n                 c.b = mid(c.g, c.b, c.r, s);\n                 c.g = 0.0;\n             }\n             c.r = s;\n         } else {\n             /* b is max, r is mid, g is min */\n             c.r = mid(c.g, c.r, c.b, s);\n             c.b = s;\n             c.r = 0.0;\n         }\n     } else if (c.r > c.b) {\n         /* g is max, r is mid, b is min */\n         c.r = mid(c.b, c.r, c.g, s);\n         c.g = s;\n         c.b = 0.0;\n     } else if (c.g > c.b) {\n         /* g is max, b is mid, r is min */\n         c.b = mid(c.r, c.b, c.g, s);\n         c.g = s;\n         c.r = 0.0;\n     } else if (c.b > c.g) {\n         /* b is max, g is mid, r is min */\n         c.g = mid(c.r, c.g, c.b, s);\n         c.b = s;\n         c.r = 0.0;\n     } else {\n         c = vec3(0.0);\n     }\n     return c;\n }\n \n void main()\n {\n\t vec4 baseColor = texture2D(inputImageTexture, textureCoordinate);\n\t vec4 overlayColor = texture2D(inputImageTexture2, textureCoordinate2);\n     \n     gl_FragColor = vec4(baseColor.rgb * (1.0 - overlayColor.a) + setlum(setsat(baseColor.rgb, sat(overlayColor.rgb)), lum(baseColor.rgb)) * overlayColor.a, baseColor.a);\n }\n);\n#endif\n\n\n@implementation GPUImageSaturationBlendFilter\n\n- (id)init;\n{\n    if (!(self = [super initWithFragmentShaderFromString:kGPUImageSaturationBlendFragmentShaderString]))\n    {\n\t\treturn nil;\n    }\n    \n    return self;\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageSaturationFilter.h",
    "content": "#import \"GPUImageFilter.h\"\n\n/** Adjusts the saturation of an image\n */\n@interface GPUImageSaturationFilter : GPUImageFilter\n{\n    GLint saturationUniform;\n}\n\n/** Saturation ranges from 0.0 (fully desaturated) to 2.0 (max saturation), with 1.0 as the normal level\n */\n@property(readwrite, nonatomic) CGFloat saturation; \n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageSaturationFilter.m",
    "content": "#import \"GPUImageSaturationFilter.h\"\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageSaturationFragmentShaderString = SHADER_STRING\n(\n varying highp vec2 textureCoordinate;\n \n uniform sampler2D inputImageTexture;\n uniform lowp float saturation;\n \n // Values from \"Graphics Shaders: Theory and Practice\" by Bailey and Cunningham\n const mediump vec3 luminanceWeighting = vec3(0.2125, 0.7154, 0.0721);\n \n void main()\n {\n    lowp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);\n    lowp float luminance = dot(textureColor.rgb, luminanceWeighting);\n    lowp vec3 greyScaleColor = vec3(luminance);\n    \n\tgl_FragColor = vec4(mix(greyScaleColor, textureColor.rgb, saturation), textureColor.w);\n\t \n }\n);\n#else\nNSString *const kGPUImageSaturationFragmentShaderString = SHADER_STRING\n(\n varying vec2 textureCoordinate;\n \n uniform sampler2D inputImageTexture;\n uniform float saturation;\n \n // Values from \"Graphics Shaders: Theory and Practice\" by Bailey and Cunningham\n const vec3 luminanceWeighting = vec3(0.2125, 0.7154, 0.0721);\n \n void main()\n {\n     vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);\n     float luminance = dot(textureColor.rgb, luminanceWeighting);\n     vec3 greyScaleColor = vec3(luminance);\n     \n     gl_FragColor = vec4(mix(greyScaleColor, textureColor.rgb, saturation), textureColor.w);\n\t \n }\n );\n#endif\n\n@implementation GPUImageSaturationFilter\n\n@synthesize saturation = _saturation;\n\n#pragma mark -\n#pragma mark Initialization and teardown\n\n- (id)init;\n{\n    if (!(self = [super initWithFragmentShaderFromString:kGPUImageSaturationFragmentShaderString]))\n    {\n\t\treturn nil;\n    }\n    \n    saturationUniform = [filterProgram uniformIndex:@\"saturation\"];\n    self.saturation = 1.0;\n\n    return self;\n}\n\n#pragma mark -\n#pragma mark Accessors\n\n- (void)setSaturation:(CGFloat)newValue;\n{\n    _saturation = newValue;\n    \n    [self setFloat:_saturation forUniform:saturationUniform program:filterProgram];\n}\n\n@end\n\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageScreenBlendFilter.h",
    "content": "#import \"GPUImageTwoInputFilter.h\"\n\n@interface GPUImageScreenBlendFilter : GPUImageTwoInputFilter\n{\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageScreenBlendFilter.m",
    "content": "#import \"GPUImageScreenBlendFilter.h\"\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageScreenBlendFragmentShaderString = SHADER_STRING\n(\n varying highp vec2 textureCoordinate;\n varying highp vec2 textureCoordinate2;\n\n uniform sampler2D inputImageTexture;\n uniform sampler2D inputImageTexture2;\n \n void main()\n {\n     mediump vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);\n     mediump vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate2);\n     mediump vec4 whiteColor = vec4(1.0);\n     gl_FragColor = whiteColor - ((whiteColor - textureColor2) * (whiteColor - textureColor));\n }\n);\n#else\nNSString *const kGPUImageScreenBlendFragmentShaderString = SHADER_STRING\n(\n varying vec2 textureCoordinate;\n varying vec2 textureCoordinate2;\n \n uniform sampler2D inputImageTexture;\n uniform sampler2D inputImageTexture2;\n \n void main()\n {\n     vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);\n     vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate2);\n     vec4 whiteColor = vec4(1.0);\n     gl_FragColor = whiteColor - ((whiteColor - textureColor2) * (whiteColor - textureColor));\n }\n);\n#endif\n\n@implementation GPUImageScreenBlendFilter\n\n- (id)init;\n{\n    if (!(self = [super initWithFragmentShaderFromString:kGPUImageScreenBlendFragmentShaderString]))\n    {\n\t\treturn nil;\n    }\n    \n    return self;\n}\n\n@end\n\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageSepiaFilter.h",
    "content": "#import \"GPUImageColorMatrixFilter.h\"\n\n/// Simple sepia tone filter\n@interface GPUImageSepiaFilter : GPUImageColorMatrixFilter\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageSepiaFilter.m",
    "content": "#import \"GPUImageSepiaFilter.h\"\n\n@implementation GPUImageSepiaFilter\n\n- (id)init;\n{\n    if (!(self = [super init]))\n    {\n\t\treturn nil;\n    }\n    \n    self.intensity = 1.0;\n    self.colorMatrix = (GPUMatrix4x4){\n        {0.3588, 0.7044, 0.1368, 0.0},\n        {0.2990, 0.5870, 0.1140, 0.0},\n        {0.2392, 0.4696, 0.0912 ,0.0},\n        {0,0,0,1.0},\n    };\n\n    return self;\n}\n\n@end\n\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageSharpenFilter.h",
    "content": "#import \"GPUImageFilter.h\"\n\n@interface GPUImageSharpenFilter : GPUImageFilter\n{\n    GLint sharpnessUniform;\n    GLint imageWidthFactorUniform, imageHeightFactorUniform;\n}\n\n// Sharpness ranges from -4.0 to 4.0, with 0.0 as the normal level\n@property(readwrite, nonatomic) CGFloat sharpness; \n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageSharpenFilter.m",
    "content": "#import \"GPUImageSharpenFilter.h\"\n\nNSString *const kGPUImageSharpenVertexShaderString = SHADER_STRING\n(\n attribute vec4 position;\n attribute vec4 inputTextureCoordinate;\n \n uniform float imageWidthFactor; \n uniform float imageHeightFactor; \n uniform float sharpness;\n \n varying vec2 textureCoordinate;\n varying vec2 leftTextureCoordinate;\n varying vec2 rightTextureCoordinate; \n varying vec2 topTextureCoordinate;\n varying vec2 bottomTextureCoordinate;\n \n varying float centerMultiplier;\n varying float edgeMultiplier;\n \n void main()\n {\n     gl_Position = position;\n     \n     vec2 widthStep = vec2(imageWidthFactor, 0.0);\n     vec2 heightStep = vec2(0.0, imageHeightFactor);\n     \n     textureCoordinate = inputTextureCoordinate.xy;\n     leftTextureCoordinate = inputTextureCoordinate.xy - widthStep;\n     rightTextureCoordinate = inputTextureCoordinate.xy + widthStep;\n     topTextureCoordinate = inputTextureCoordinate.xy + heightStep;     \n     bottomTextureCoordinate = inputTextureCoordinate.xy - heightStep;\n     \n     centerMultiplier = 1.0 + 4.0 * sharpness;\n     edgeMultiplier = sharpness;\n }\n);\n\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageSharpenFragmentShaderString = SHADER_STRING\n(\n precision highp float;\n \n varying highp vec2 textureCoordinate;\n varying highp vec2 leftTextureCoordinate;\n varying highp vec2 rightTextureCoordinate; \n varying highp vec2 topTextureCoordinate;\n varying highp vec2 bottomTextureCoordinate;\n \n varying highp float centerMultiplier;\n varying highp float edgeMultiplier;\n\n uniform sampler2D inputImageTexture;\n \n void main()\n {\n     mediump vec3 textureColor = texture2D(inputImageTexture, textureCoordinate).rgb;\n     mediump vec3 leftTextureColor = texture2D(inputImageTexture, leftTextureCoordinate).rgb;\n     mediump vec3 rightTextureColor = texture2D(inputImageTexture, rightTextureCoordinate).rgb;\n     mediump vec3 topTextureColor = texture2D(inputImageTexture, topTextureCoordinate).rgb;\n     mediump vec3 bottomTextureColor = texture2D(inputImageTexture, bottomTextureCoordinate).rgb;\n\n     gl_FragColor = vec4((textureColor * centerMultiplier - (leftTextureColor * edgeMultiplier + rightTextureColor * edgeMultiplier + topTextureColor * edgeMultiplier + bottomTextureColor * edgeMultiplier)), texture2D(inputImageTexture, bottomTextureCoordinate).w);\n }\n);\n#else\nNSString *const kGPUImageSharpenFragmentShaderString = SHADER_STRING\n(\n varying vec2 textureCoordinate;\n varying vec2 leftTextureCoordinate;\n varying vec2 rightTextureCoordinate;\n varying vec2 topTextureCoordinate;\n varying vec2 bottomTextureCoordinate;\n \n varying float centerMultiplier;\n varying float edgeMultiplier;\n \n uniform sampler2D inputImageTexture;\n \n void main()\n {\n     vec3 textureColor = texture2D(inputImageTexture, textureCoordinate).rgb;\n     vec3 leftTextureColor = texture2D(inputImageTexture, leftTextureCoordinate).rgb;\n     vec3 rightTextureColor = texture2D(inputImageTexture, rightTextureCoordinate).rgb;\n     vec3 topTextureColor = texture2D(inputImageTexture, topTextureCoordinate).rgb;\n     vec3 bottomTextureColor = texture2D(inputImageTexture, bottomTextureCoordinate).rgb;\n     \n     gl_FragColor = vec4((textureColor * centerMultiplier - (leftTextureColor * edgeMultiplier + rightTextureColor * edgeMultiplier + topTextureColor * edgeMultiplier + bottomTextureColor * edgeMultiplier)), texture2D(inputImageTexture, bottomTextureCoordinate).w);\n }\n);\n#endif\n\n\n@implementation GPUImageSharpenFilter\n\n@synthesize sharpness = _sharpness;\n\n#pragma mark -\n#pragma mark Initialization and teardown\n\n- (id)init;\n{\n    if (!(self = [super initWithVertexShaderFromString:kGPUImageSharpenVertexShaderString fragmentShaderFromString:kGPUImageSharpenFragmentShaderString]))\n    {\n\t\treturn nil;\n    }\n    \n    sharpnessUniform = [filterProgram uniformIndex:@\"sharpness\"];\n    self.sharpness = 0.0;\n    \n    imageWidthFactorUniform = [filterProgram uniformIndex:@\"imageWidthFactor\"];\n    imageHeightFactorUniform = [filterProgram uniformIndex:@\"imageHeightFactor\"];\n    \n    return self;\n}\n\n- (void)setupFilterForSize:(CGSize)filterFrameSize;\n{\n    runSynchronouslyOnVideoProcessingQueue(^{\n        [GPUImageContext setActiveShaderProgram:filterProgram];\n        \n        if (GPUImageRotationSwapsWidthAndHeight(inputRotation))\n        {\n            glUniform1f(imageWidthFactorUniform, 1.0 / filterFrameSize.height);\n            glUniform1f(imageHeightFactorUniform, 1.0 / filterFrameSize.width);\n        }\n        else\n        {\n            glUniform1f(imageWidthFactorUniform, 1.0 / filterFrameSize.width);\n            glUniform1f(imageHeightFactorUniform, 1.0 / filterFrameSize.height);\n        }\n    });\n}\n\n#pragma mark -\n#pragma mark Accessors\n\n- (void)setSharpness:(CGFloat)newValue;\n{\n    _sharpness = newValue;\n    \n    [self setFloat:_sharpness forUniform:sharpnessUniform program:filterProgram];\n}\n\n@end\n\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageShiTomasiFeatureDetectionFilter.h",
    "content": "#import \"GPUImageHarrisCornerDetectionFilter.h\"\n\n/** Shi-Tomasi feature detector\n \n This is the Shi-Tomasi feature detector, as described in  \n J. Shi and C. Tomasi. Good features to track. Proceedings of the IEEE Conference on Computer Vision and Pattern Recognition, pages 593-600, June 1994.\n */\n\n@interface GPUImageShiTomasiFeatureDetectionFilter : GPUImageHarrisCornerDetectionFilter\n\n// Compared to the Harris corner detector, the default sensitivity value for this detector is set to 1.5\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageShiTomasiFeatureDetectionFilter.m",
    "content": "#import \"GPUImageShiTomasiFeatureDetectionFilter.h\"\n\n@implementation GPUImageShiTomasiFeatureDetectionFilter\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageShiTomasiCornerDetectionFragmentShaderString = SHADER_STRING\n( \n varying highp vec2 textureCoordinate;\n \n uniform sampler2D inputImageTexture;\n uniform lowp float sensitivity;\n \n void main()\n {\n     mediump vec3 derivativeElements = texture2D(inputImageTexture, textureCoordinate).rgb;\n     \n     mediump float derivativeDifference = derivativeElements.x - derivativeElements.y;\n     mediump float zElement = (derivativeElements.z * 2.0) - 1.0;\n     \n     // R = Ix^2 + Iy^2 - sqrt( (Ix^2 - Iy^2)^2 + 4 * Ixy * Ixy)\n     mediump float cornerness = derivativeElements.x + derivativeElements.y - sqrt(derivativeDifference * derivativeDifference + 4.0 * zElement * zElement);\n\n     gl_FragColor = vec4(vec3(cornerness * sensitivity), 1.0);\n }\n);\n#else\nNSString *const kGPUImageShiTomasiCornerDetectionFragmentShaderString = SHADER_STRING\n(\n varying vec2 textureCoordinate;\n \n uniform sampler2D inputImageTexture;\n uniform float sensitivity;\n \n void main()\n {\n     vec3 derivativeElements = texture2D(inputImageTexture, textureCoordinate).rgb;\n     \n     float derivativeDifference = derivativeElements.x - derivativeElements.y;\n     float zElement = (derivativeElements.z * 2.0) - 1.0;\n     \n     // R = Ix^2 + Iy^2 - sqrt( (Ix^2 - Iy^2)^2 + 4 * Ixy * Ixy)\n     float cornerness = derivativeElements.x + derivativeElements.y - sqrt(derivativeDifference * derivativeDifference + 4.0 * zElement * zElement);\n     \n     gl_FragColor = vec4(vec3(cornerness * sensitivity), 1.0);\n }\n);\n#endif\n\n#pragma mark -\n#pragma mark Initialization and teardown\n\n- (id)init;\n{\n    if (!(self = [self initWithCornerDetectionFragmentShader:kGPUImageShiTomasiCornerDetectionFragmentShaderString]))\n    {\n        return nil;\n    }\n    \n    self.sensitivity = 1.5;\n    \n    return self;\n}\n\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageSingleComponentGaussianBlurFilter.h",
    "content": "#import \"GPUImageGaussianBlurFilter.h\"\n\n// This filter merely performs the standard Gaussian blur on the red color channel (assuming a luminance image)\n\n@interface GPUImageSingleComponentGaussianBlurFilter : GPUImageGaussianBlurFilter\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageSingleComponentGaussianBlurFilter.m",
    "content": "#import \"GPUImageSingleComponentGaussianBlurFilter.h\"\n\n@implementation GPUImageSingleComponentGaussianBlurFilter\n\n+ (NSString *)vertexShaderForOptimizedBlurOfRadius:(NSUInteger)blurRadius sigma:(CGFloat)sigma;\n{\n    if (blurRadius < 1)\n    {\n        return kGPUImageVertexShaderString;\n    }\n\n    // First, generate the normal Gaussian weights for a given sigma\n    GLfloat *standardGaussianWeights = calloc(blurRadius + 1, sizeof(GLfloat));\n    GLfloat sumOfWeights = 0.0;\n    for (NSUInteger currentGaussianWeightIndex = 0; currentGaussianWeightIndex < blurRadius + 1; currentGaussianWeightIndex++)\n    {\n        standardGaussianWeights[currentGaussianWeightIndex] = (1.0 / sqrt(2.0 * M_PI * pow(sigma, 2.0))) * exp(-pow(currentGaussianWeightIndex, 2.0) / (2.0 * pow(sigma, 2.0)));\n        \n        if (currentGaussianWeightIndex == 0)\n        {\n            sumOfWeights += standardGaussianWeights[currentGaussianWeightIndex];\n        }\n        else\n        {\n            sumOfWeights += 2.0 * standardGaussianWeights[currentGaussianWeightIndex];\n        }\n    }\n    \n    // Next, normalize these weights to prevent the clipping of the Gaussian curve at the end of the discrete samples from reducing luminance\n    for (NSUInteger currentGaussianWeightIndex = 0; currentGaussianWeightIndex < blurRadius + 1; currentGaussianWeightIndex++)\n    {\n        standardGaussianWeights[currentGaussianWeightIndex] = standardGaussianWeights[currentGaussianWeightIndex] / sumOfWeights;\n    }\n    \n    // From these weights we calculate the offsets to read interpolated values from\n    NSUInteger numberOfOptimizedOffsets = MIN(blurRadius / 2 + (blurRadius % 2), 7);\n    GLfloat *optimizedGaussianOffsets = calloc(numberOfOptimizedOffsets, sizeof(GLfloat));\n    \n    for (NSUInteger currentOptimizedOffset = 0; currentOptimizedOffset < numberOfOptimizedOffsets; currentOptimizedOffset++)\n    {\n        GLfloat firstWeight = standardGaussianWeights[currentOptimizedOffset*2 + 1];\n        GLfloat secondWeight = standardGaussianWeights[currentOptimizedOffset*2 + 2];\n        \n        GLfloat optimizedWeight = firstWeight + secondWeight;\n        \n        optimizedGaussianOffsets[currentOptimizedOffset] = (firstWeight * (currentOptimizedOffset*2 + 1) + secondWeight * (currentOptimizedOffset*2 + 2)) / optimizedWeight;\n    }\n    \n    NSMutableString *shaderString = [[NSMutableString alloc] init];\n    // Header\n    [shaderString appendFormat:@\"\\\n     attribute vec4 position;\\n\\\n     attribute vec4 inputTextureCoordinate;\\n\\\n     \\n\\\n     uniform float texelWidthOffset;\\n\\\n     uniform float texelHeightOffset;\\n\\\n     \\n\\\n     varying vec2 blurCoordinates[%lu];\\n\\\n     \\n\\\n     void main()\\n\\\n     {\\n\\\n     gl_Position = position;\\n\\\n     \\n\\\n     vec2 singleStepOffset = vec2(texelWidthOffset, texelHeightOffset);\\n\", (unsigned long)(1 + (numberOfOptimizedOffsets * 2))];\n    \n    // Inner offset loop\n    [shaderString appendString:@\"blurCoordinates[0] = inputTextureCoordinate.xy;\\n\"];\n    for (NSUInteger currentOptimizedOffset = 0; currentOptimizedOffset < numberOfOptimizedOffsets; currentOptimizedOffset++)\n    {\n        [shaderString appendFormat:@\"\\\n         blurCoordinates[%lu] = inputTextureCoordinate.xy + singleStepOffset * %f;\\n\\\n         blurCoordinates[%lu] = inputTextureCoordinate.xy - singleStepOffset * %f;\\n\", (unsigned long)((currentOptimizedOffset * 2) + 1), optimizedGaussianOffsets[currentOptimizedOffset], (unsigned long)((currentOptimizedOffset * 2) + 2), optimizedGaussianOffsets[currentOptimizedOffset]];\n    }\n    \n    // Footer\n    [shaderString appendString:@\"}\\n\"];\n    \n    free(optimizedGaussianOffsets);\n    free(standardGaussianWeights);\n    return shaderString;\n}\n\n+ (NSString *)fragmentShaderForOptimizedBlurOfRadius:(NSUInteger)blurRadius sigma:(CGFloat)sigma;\n{\n    if (blurRadius < 1)\n    {\n        return kGPUImagePassthroughFragmentShaderString;\n    }\n\n    // First, generate the normal Gaussian weights for a given sigma\n    GLfloat *standardGaussianWeights = calloc(blurRadius + 1, sizeof(GLfloat));\n    GLfloat sumOfWeights = 0.0;\n    for (NSUInteger currentGaussianWeightIndex = 0; currentGaussianWeightIndex < blurRadius + 1; currentGaussianWeightIndex++)\n    {\n        standardGaussianWeights[currentGaussianWeightIndex] = (1.0 / sqrt(2.0 * M_PI * pow(sigma, 2.0))) * exp(-pow(currentGaussianWeightIndex, 2.0) / (2.0 * pow(sigma, 2.0)));\n        \n        if (currentGaussianWeightIndex == 0)\n        {\n            sumOfWeights += standardGaussianWeights[currentGaussianWeightIndex];\n        }\n        else\n        {\n            sumOfWeights += 2.0 * standardGaussianWeights[currentGaussianWeightIndex];\n        }\n    }\n    \n    // Next, normalize these weights to prevent the clipping of the Gaussian curve at the end of the discrete samples from reducing luminance\n    for (NSUInteger currentGaussianWeightIndex = 0; currentGaussianWeightIndex < blurRadius + 1; currentGaussianWeightIndex++)\n    {\n        standardGaussianWeights[currentGaussianWeightIndex] = standardGaussianWeights[currentGaussianWeightIndex] / sumOfWeights;\n    }\n    \n    // From these weights we calculate the offsets to read interpolated values from\n    NSUInteger numberOfOptimizedOffsets = MIN(blurRadius / 2 + (blurRadius % 2), 7);\n    NSUInteger trueNumberOfOptimizedOffsets = blurRadius / 2 + (blurRadius % 2);\n    \n    NSMutableString *shaderString = [[NSMutableString alloc] init];\n    \n    // Header\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\n    [shaderString appendFormat:@\"\\\n     uniform sampler2D inputImageTexture;\\n\\\n     uniform highp float texelWidthOffset;\\n\\\n     uniform highp float texelHeightOffset;\\n\\\n     \\n\\\n     varying highp vec2 blurCoordinates[%lu];\\n\\\n     \\n\\\n     void main()\\n\\\n     {\\n\\\n     lowp float sum = 0.0;\\n\", (unsigned long)(1 + (numberOfOptimizedOffsets * 2)) ];\n#else\n    [shaderString appendFormat:@\"\\\n     uniform sampler2D inputImageTexture;\\n\\\n     uniform float texelWidthOffset;\\n\\\n     uniform float texelHeightOffset;\\n\\\n     \\n\\\n     varying vec2 blurCoordinates[%lu];\\n\\\n     \\n\\\n     void main()\\n\\\n     {\\n\\\n     float sum = 0.0;\\n\", 1 + (numberOfOptimizedOffsets * 2) ];\n#endif\n    \n    // Inner texture loop\n    [shaderString appendFormat:@\"sum += texture2D(inputImageTexture, blurCoordinates[0]).r * %f;\\n\", standardGaussianWeights[0]];\n    \n    for (NSUInteger currentBlurCoordinateIndex = 0; currentBlurCoordinateIndex < numberOfOptimizedOffsets; currentBlurCoordinateIndex++)\n    {\n        GLfloat firstWeight = standardGaussianWeights[currentBlurCoordinateIndex * 2 + 1];\n        GLfloat secondWeight = standardGaussianWeights[currentBlurCoordinateIndex * 2 + 2];\n        GLfloat optimizedWeight = firstWeight + secondWeight;\n        \n        [shaderString appendFormat:@\"sum += texture2D(inputImageTexture, blurCoordinates[%lu]).r * %f;\\n\", (unsigned long)((currentBlurCoordinateIndex * 2) + 1), optimizedWeight];\n        [shaderString appendFormat:@\"sum += texture2D(inputImageTexture, blurCoordinates[%lu]).r * %f;\\n\", (unsigned long)((currentBlurCoordinateIndex * 2) + 2), optimizedWeight];\n    }\n    \n    // If the number of required samples exceeds the amount we can pass in via varyings, we have to do dependent texture reads in the fragment shader\n    if (trueNumberOfOptimizedOffsets > numberOfOptimizedOffsets)\n    {\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\n        [shaderString appendString:@\"highp vec2 singleStepOffset = vec2(texelWidthOffset, texelHeightOffset);\\n\"];\n#else\n        [shaderString appendString:@\"highp vec2 singleStepOffset = vec2(texelWidthOffset, texelHeightOffset);\\n\"];\n#endif\n        \n        for (NSUInteger currentOverlowTextureRead = numberOfOptimizedOffsets; currentOverlowTextureRead < trueNumberOfOptimizedOffsets; currentOverlowTextureRead++)\n        {\n            GLfloat firstWeight = standardGaussianWeights[currentOverlowTextureRead * 2 + 1];\n            GLfloat secondWeight = standardGaussianWeights[currentOverlowTextureRead * 2 + 2];\n            \n            GLfloat optimizedWeight = firstWeight + secondWeight;\n            GLfloat optimizedOffset = (firstWeight * (currentOverlowTextureRead * 2 + 1) + secondWeight * (currentOverlowTextureRead * 2 + 2)) / optimizedWeight;\n            \n            [shaderString appendFormat:@\"sum += texture2D(inputImageTexture, blurCoordinates[0] + singleStepOffset * %f).r * %f;\\n\", optimizedOffset, optimizedWeight];\n            [shaderString appendFormat:@\"sum += texture2D(inputImageTexture, blurCoordinates[0] - singleStepOffset * %f).r * %f;\\n\", optimizedOffset, optimizedWeight];\n        }\n    }\n    \n    // Footer\n    [shaderString appendString:@\"\\\n     gl_FragColor = vec4(sum, sum, sum, 1.0);\\n\\\n     }\\n\"];\n    \n    free(standardGaussianWeights);\n    return shaderString;\n}\n\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageSketchFilter.h",
    "content": "#import \"GPUImageSobelEdgeDetectionFilter.h\"\n\n/** Converts video to look like a sketch.\n \n This is just the Sobel edge detection filter with the colors inverted.\n */\n@interface GPUImageSketchFilter : GPUImageSobelEdgeDetectionFilter\n{\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageSketchFilter.m",
    "content": "#import \"GPUImageSketchFilter.h\"\n\n@implementation GPUImageSketchFilter\n\n// Invert the colorspace for a sketch\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageSketchFragmentShaderString = SHADER_STRING\n(\n precision mediump float;\n \n varying vec2 textureCoordinate;\n varying vec2 leftTextureCoordinate;\n varying vec2 rightTextureCoordinate;\n \n varying vec2 topTextureCoordinate;\n varying vec2 topLeftTextureCoordinate;\n varying vec2 topRightTextureCoordinate;\n \n varying vec2 bottomTextureCoordinate;\n varying vec2 bottomLeftTextureCoordinate;\n varying vec2 bottomRightTextureCoordinate;\n \n uniform float edgeStrength;\n\n uniform sampler2D inputImageTexture;\n \n void main()\n {\n     float bottomLeftIntensity = texture2D(inputImageTexture, bottomLeftTextureCoordinate).r;\n     float topRightIntensity = texture2D(inputImageTexture, topRightTextureCoordinate).r;\n     float topLeftIntensity = texture2D(inputImageTexture, topLeftTextureCoordinate).r;\n     float bottomRightIntensity = texture2D(inputImageTexture, bottomRightTextureCoordinate).r;\n     float leftIntensity = texture2D(inputImageTexture, leftTextureCoordinate).r;\n     float rightIntensity = texture2D(inputImageTexture, rightTextureCoordinate).r;\n     float bottomIntensity = texture2D(inputImageTexture, bottomTextureCoordinate).r;\n     float topIntensity = texture2D(inputImageTexture, topTextureCoordinate).r;\n     float h = -topLeftIntensity - 2.0 * topIntensity - topRightIntensity + bottomLeftIntensity + 2.0 * bottomIntensity + bottomRightIntensity;\n     float v = -bottomLeftIntensity - 2.0 * leftIntensity - topLeftIntensity + bottomRightIntensity + 2.0 * rightIntensity + topRightIntensity;\n     \n     float mag = 1.0 - (length(vec2(h, v)) * edgeStrength);\n     \n     gl_FragColor = vec4(vec3(mag), 1.0);\n }\n);\n#else\nNSString *const kGPUImageSketchFragmentShaderString = SHADER_STRING\n(\n varying vec2 textureCoordinate;\n varying vec2 leftTextureCoordinate;\n varying vec2 rightTextureCoordinate;\n \n varying vec2 topTextureCoordinate;\n varying vec2 topLeftTextureCoordinate;\n varying vec2 topRightTextureCoordinate;\n \n varying vec2 bottomTextureCoordinate;\n varying vec2 bottomLeftTextureCoordinate;\n varying vec2 bottomRightTextureCoordinate;\n \n uniform float edgeStrength;\n\n uniform sampler2D inputImageTexture;\n \n void main()\n {\n     float bottomLeftIntensity = texture2D(inputImageTexture, bottomLeftTextureCoordinate).r;\n     float topRightIntensity = texture2D(inputImageTexture, topRightTextureCoordinate).r;\n     float topLeftIntensity = texture2D(inputImageTexture, topLeftTextureCoordinate).r;\n     float bottomRightIntensity = texture2D(inputImageTexture, bottomRightTextureCoordinate).r;\n     float leftIntensity = texture2D(inputImageTexture, leftTextureCoordinate).r;\n     float rightIntensity = texture2D(inputImageTexture, rightTextureCoordinate).r;\n     float bottomIntensity = texture2D(inputImageTexture, bottomTextureCoordinate).r;\n     float topIntensity = texture2D(inputImageTexture, topTextureCoordinate).r;\n     float h = -topLeftIntensity - 2.0 * topIntensity - topRightIntensity + bottomLeftIntensity + 2.0 * bottomIntensity + bottomRightIntensity;\n     float v = -bottomLeftIntensity - 2.0 * leftIntensity - topLeftIntensity + bottomRightIntensity + 2.0 * rightIntensity + topRightIntensity;\n     \n     float mag = 1.0 - (length(vec2(h, v)) * edgeStrength);\n     \n     gl_FragColor = vec4(vec3(mag), 1.0);\n }\n);\n#endif\n\n#pragma mark -\n#pragma mark Initialization and teardown\n\n- (id)init;\n{\n    if (!(self = [self initWithFragmentShaderFromString:kGPUImageSketchFragmentShaderString]))\n    {\n\t\treturn nil;\n    }\n    \n    return self;\n}\n\n@end\n\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageSkinToneFilter.h",
    "content": "//\n//  GPUImageSkinToneFilter.h\n//\n//\n//  Created by github.com/r3mus on 8/14/15.\n//\n//\n\n#import \"GPUImageTwoInputFilter.h\"\n\ntypedef NS_ENUM(NSUInteger, GPUImageSkinToneUpperColor) {\n    GPUImageSkinToneUpperColorGreen,\n    GPUImageSkinToneUpperColorOrange\n};\n\nextern NSString *const kGPUImageSkinToneFragmentShaderString;\n\n@interface GPUImageSkinToneFilter : GPUImageFilter\n{\n    GLint skinToneAdjustUniform;\n    GLint skinHueUniform;\n    GLint skinHueThresholdUniform;\n    GLint maxHueShiftUniform;\n    GLint maxSaturationShiftUniform;\n    GLint upperSkinToneColorUniform;\n}\n\n// The amount of effect to apply, between -1.0 (pink) and +1.0 (orange OR green). Default is 0.0.\n@property (nonatomic, readwrite) CGFloat skinToneAdjust;\n\n// The initial hue of skin to adjust. Default is 0.05 (a common skin red).\n@property (nonatomic, readwrite) CGFloat skinHue;\n\n// The bell curve \"breadth\" of the skin hue adjustment (i.e. how different from the original skinHue will the modifications effect).\n// Default is 40.0\n@property (nonatomic, readwrite) CGFloat skinHueThreshold;\n\n// The maximum amount of hue shift allowed in the adjustments that affect hue (pink, green). Default = 0.25.\n@property (nonatomic, readwrite) CGFloat maxHueShift;\n\n// The maximum amount of saturation shift allowed in the adjustments that affect saturation (orange). Default = 0.4.\n@property (nonatomic, readwrite) CGFloat maxSaturationShift;\n\n// Defines whether the upper range (> 0.0) will change the skin tone to green (hue) or orange (saturation)\n@property (nonatomic, readwrite) GPUImageSkinToneUpperColor upperSkinToneColor;\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageSkinToneFilter.m",
    "content": "//\n//  GPUImageSkinToneFilter.m\n//\n//\n//  Created by github.com/r3mus on 8/13/15.\n//\n//\n\n#import \"GPUImageSkinToneFilter.h\"\n\n@implementation GPUImageSkinToneFilter\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageSkinToneFragmentShaderString = SHADER_STRING\n(\n varying highp vec2 textureCoordinate;\n \n uniform sampler2D inputImageTexture;\n \n // [-1;1] <=> [pink;orange]\n uniform highp float skinToneAdjust; // will make reds more pink\n \n // Other parameters\n uniform mediump float skinHue;\n uniform mediump float skinHueThreshold;\n uniform mediump float maxHueShift;\n uniform mediump float maxSaturationShift;\n uniform int upperSkinToneColor;\n \n // RGB <-> HSV conversion, thanks to http://lolengine.net/blog/2013/07/27/rgb-to-hsv-in-glsl\n highp vec3 rgb2hsv(highp vec3 c)\n{\n    highp vec4 K = vec4(0.0, -1.0 / 3.0, 2.0 / 3.0, -1.0);\n    highp vec4 p = mix(vec4(c.bg, K.wz), vec4(c.gb, K.xy), step(c.b, c.g));\n    highp vec4 q = mix(vec4(p.xyw, c.r), vec4(c.r, p.yzx), step(p.x, c.r));\n    \n    highp float d = q.x - min(q.w, q.y);\n    highp float e = 1.0e-10;\n    return vec3(abs(q.z + (q.w - q.y) / (6.0 * d + e)), d / (q.x + e), q.x);\n}\n \n // HSV <-> RGB conversion, thanks to http://lolengine.net/blog/2013/07/27/rgb-to-hsv-in-glsl\n highp vec3 hsv2rgb(highp vec3 c)\n{\n    highp vec4 K = vec4(1.0, 2.0 / 3.0, 1.0 / 3.0, 3.0);\n    highp vec3 p = abs(fract(c.xxx + K.xyz) * 6.0 - K.www);\n    return c.z * mix(K.xxx, clamp(p - K.xxx, 0.0, 1.0), c.y);\n}\n \n // Main\n void main ()\n{\n    \n    // Sample the input pixel\n    highp vec4 colorRGB = texture2D(inputImageTexture, textureCoordinate);\n    \n    // Convert color to HSV, extract hue\n    highp vec3 colorHSV = rgb2hsv(colorRGB.rgb);\n    highp float hue = colorHSV.x;\n    \n    // check how far from skin hue\n    highp float dist = hue - skinHue;\n    if (dist > 0.5)\n        dist -= 1.0;\n    if (dist < -0.5)\n        dist += 1.0;\n    dist = abs(dist)/0.5; // normalized to [0,1]\n    \n    // Apply Gaussian like filter\n    highp float weight = exp(-dist*dist*skinHueThreshold);\n    weight = clamp(weight, 0.0, 1.0);\n    \n    // Using pink/green, so only adjust hue\n    if (upperSkinToneColor == 0) {\n        colorHSV.x += skinToneAdjust * weight * maxHueShift;\n    // Using pink/orange, so adjust hue < 0 and saturation > 0\n    } else if (upperSkinToneColor == 1) {\n        // We want more orange, so increase saturation\n        if (skinToneAdjust > 0.0)\n            colorHSV.y += skinToneAdjust * weight * maxSaturationShift;\n        // we want more pinks, so decrease hue\n        else\n            colorHSV.x += skinToneAdjust * weight * maxHueShift;\n    }\n\n    // final color\n    highp vec3 finalColorRGB = hsv2rgb(colorHSV.rgb);\n    \n    // display\n    gl_FragColor = vec4(finalColorRGB, 1.0);\n}\n);\n#else\nNSString *const kGPUImageSkinToneFragmentShaderString = SHADER_STRING\n(\n varying vec2 textureCoordinate;\n \n uniform sampler2D inputImageTexture;\n \n // [-1;1] <=> [pink;orange]\n uniform float skinToneAdjust; // will make reds more pink\n \n // Other parameters\n uniform float skinHue;\n uniform float skinHueThreshold;\n uniform float maxHueShift;\n uniform float maxSaturationShift;\n uniform int upperSkinToneColor;\n \n // RGB <-> HSV conversion, thanks to http://lolengine.net/blog/2013/07/27/rgb-to-hsv-in-glsl\n highp vec3 rgb2hsv(highp vec3 c)\n{\n    vec4 K = vec4(0.0, -1.0 / 3.0, 2.0 / 3.0, -1.0);\n    vec4 p = mix(vec4(c.bg, K.wz), vec4(c.gb, K.xy), step(c.b, c.g));\n    vec4 q = mix(vec4(p.xyw, c.r), vec4(c.r, p.yzx), step(p.x, c.r));\n    \n    float d = q.x - min(q.w, q.y);\n    float e = 1.0e-10;\n    return vec3(abs(q.z + (q.w - q.y) / (6.0 * d + e)), d / (q.x + e), q.x);\n}\n \n // HSV <-> RGB conversion, thanks to http://lolengine.net/blog/2013/07/27/rgb-to-hsv-in-glsl\n highp vec3 hsv2rgb(highp vec3 c)\n{\n    vec4 K = vec4(1.0, 2.0 / 3.0, 1.0 / 3.0, 3.0);\n    vec3 p = abs(fract(c.xxx + K.xyz) * 6.0 - K.www);\n    return c.z * mix(K.xxx, clamp(p - K.xxx, 0.0, 1.0), c.y);\n}\n \n // Main\n void main ()\n{\n    \n    // Sample the input pixel\n    vec4 colorRGB = texture2D(inputImageTexture, textureCoordinate);\n    \n    // Convert color to HSV, extract hue\n    vec3 colorHSV = rgb2hsv(colorRGB.rgb);\n    float hue = colorHSV.x;\n    \n    // check how far from skin hue\n    float dist = hue - skinHue;\n    if (dist > 0.5)\n        dist -= 1.0;\n    if (dist < -0.5)\n        dist += 1.0;\n    dist = abs(dist)/0.5; // normalized to [0,1]\n    \n    // Apply Gaussian like filter\n    float weight = exp(-dist*dist*skinHueThreshold);\n    weight = clamp(weight, 0.0, 1.0);\n    \n    // Using pink/green, so only adjust hue\n    if (upperSkinToneColor == 0) {\n        colorHSV.x += skinToneAdjust * weight * maxHueShift;\n        // Using pink/orange, so adjust hue < 0 and saturation > 0\n    } else if (upperSkinToneColor == 1) {\n        // We want more orange, so increase saturation\n        if (skinToneAdjust > 0.0)\n            colorHSV.y += skinToneAdjust * weight * maxSaturationShift;\n        // we want more pinks, so decrease hue\n        else\n            colorHSV.x += skinToneAdjust * weight * maxHueShift;\n    }\n    \n    // final color\n    vec3 finalColorRGB = hsv2rgb(colorHSV.rgb);\n    \n    // display\n    gl_FragColor = vec4(finalColorRGB, 1.0);\n}\n );\n#endif\n\n#pragma mark -\n#pragma mark Initialization and teardown\n@synthesize skinToneAdjust;\n@synthesize skinHue;\n@synthesize skinHueThreshold;\n@synthesize maxHueShift;\n@synthesize maxSaturationShift;\n@synthesize upperSkinToneColor;\n\n- (id)init\n{\n    if(! (self = [super initWithFragmentShaderFromString:kGPUImageSkinToneFragmentShaderString]) )\n    {\n        return nil;\n    }\n    \n    skinToneAdjustUniform = [filterProgram uniformIndex:@\"skinToneAdjust\"];\n    skinHueUniform = [filterProgram uniformIndex:@\"skinHue\"];\n    skinHueThresholdUniform = [filterProgram uniformIndex:@\"skinHueThreshold\"];\n    maxHueShiftUniform = [filterProgram uniformIndex:@\"maxHueShift\"];\n    maxSaturationShiftUniform = [filterProgram uniformIndex:@\"maxSaturationShift\"];\n    upperSkinToneColorUniform = [filterProgram uniformIndex:@\"upperSkinToneColor\"];\n    \n    self.skinHue = 0.05;\n    self.skinHueThreshold = 40.0;\n    self.maxHueShift = 0.25;\n    self.maxSaturationShift = 0.4;\n    self.upperSkinToneColor = GPUImageSkinToneUpperColorGreen;\n    \n    return self;\n}\n\n#pragma mark -\n#pragma mark Accessors\n\n- (void)setSkinToneAdjust:(CGFloat)newValue\n{\n    skinToneAdjust = newValue;\n    [self setFloat:newValue forUniform:skinToneAdjustUniform program:filterProgram];\n}\n\n- (void)setSkinHue:(CGFloat)newValue\n{\n    skinHue = newValue;\n    [self setFloat:newValue forUniform:skinHueUniform program:filterProgram];\n}\n\n- (void)setSkinHueThreshold:(CGFloat)newValue\n{\n    skinHueThreshold = newValue;\n    [self setFloat:newValue forUniform:skinHueThresholdUniform program:filterProgram];\n}\n\n- (void)setMaxHueShift:(CGFloat)newValue\n{\n    maxHueShift = newValue;\n    [self setFloat:newValue forUniform:maxHueShiftUniform program:filterProgram];\n}\n\n- (void)setMaxSaturationShift:(CGFloat)newValue\n{\n    maxSaturationShift = newValue;\n    [self setFloat:newValue forUniform:maxSaturationShiftUniform program:filterProgram];\n}\n\n- (void)setUpperSkinToneColor:(GPUImageSkinToneUpperColor)newValue\n{\n    upperSkinToneColor = newValue;\n    [self setInteger:newValue forUniform:upperSkinToneColorUniform program:filterProgram];\n}\n\n@end"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageSmoothToonFilter.h",
    "content": "#import \"GPUImageFilterGroup.h\"\n\n@class GPUImageGaussianBlurFilter;\n@class GPUImageToonFilter;\n\n/** This uses a similar process as the GPUImageToonFilter, only it precedes the toon effect with a Gaussian blur to smooth out noise.\n */\n@interface GPUImageSmoothToonFilter : GPUImageFilterGroup\n{\n    GPUImageGaussianBlurFilter *blurFilter;\n    GPUImageToonFilter *toonFilter;\n}\n\n/// The image width and height factors tweak the appearance of the edges. By default, they match the filter size in pixels\n@property(readwrite, nonatomic) CGFloat texelWidth; \n/// The image width and height factors tweak the appearance of the edges. By default, they match the filter size in pixels\n@property(readwrite, nonatomic) CGFloat texelHeight; \n\n/// The radius of the underlying Gaussian blur. The default is 2.0.\n@property (readwrite, nonatomic) CGFloat blurRadiusInPixels;\n\n/// The threshold at which to apply the edges, default of 0.2\n@property(readwrite, nonatomic) CGFloat threshold; \n\n/// The levels of quantization for the posterization of colors within the scene, with a default of 10.0\n@property(readwrite, nonatomic) CGFloat quantizationLevels; \n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageSmoothToonFilter.m",
    "content": "#import \"GPUImageSmoothToonFilter.h\"\n#import \"GPUImageGaussianBlurFilter.h\"\n#import \"GPUImageToonFilter.h\"\n\n@implementation GPUImageSmoothToonFilter\n\n@synthesize threshold;\n@synthesize blurRadiusInPixels;\n@synthesize quantizationLevels;\n@synthesize texelWidth;\n@synthesize texelHeight;\n\n- (id)init;\n{\n    if (!(self = [super init]))\n    {\n\t\treturn nil;\n    }\n    \n    // First pass: apply a variable Gaussian blur\n    blurFilter = [[GPUImageGaussianBlurFilter alloc] init];\n    [self addFilter:blurFilter];\n    \n    // Second pass: run the Sobel edge detection on this blurred image, along with a posterization effect\n    toonFilter = [[GPUImageToonFilter alloc] init];\n    [self addFilter:toonFilter];\n    \n    // Texture location 0 needs to be the sharp image for both the blur and the second stage processing\n    [blurFilter addTarget:toonFilter];\n    \n    self.initialFilters = [NSArray arrayWithObject:blurFilter];\n    self.terminalFilter = toonFilter;\n    \n    self.blurRadiusInPixels = 2.0;\n    self.threshold = 0.2;\n    self.quantizationLevels = 10.0;\n    \n    return self;\n}\n\n#pragma mark -\n#pragma mark Accessors\n\n- (void)setBlurRadiusInPixels:(CGFloat)newValue;\n{\n    blurFilter.blurRadiusInPixels = newValue;\n}\n\n- (CGFloat)blurRadiusInPixels;\n{\n    return blurFilter.blurRadiusInPixels;\n}\n\n- (void)setTexelWidth:(CGFloat)newValue;\n{\n    toonFilter.texelWidth = newValue;\n}\n\n- (CGFloat)texelWidth;\n{\n    return toonFilter.texelWidth;\n}\n\n- (void)setTexelHeight:(CGFloat)newValue;\n{\n    toonFilter.texelHeight = newValue;\n}\n\n- (CGFloat)texelHeight;\n{\n    return toonFilter.texelHeight;\n}\n\n- (void)setThreshold:(CGFloat)newValue;\n{\n    toonFilter.threshold = newValue;\n}\n\n- (CGFloat)threshold;\n{\n    return toonFilter.threshold;\n}\n\n- (void)setQuantizationLevels:(CGFloat)newValue;\n{\n    toonFilter.quantizationLevels = newValue;\n}\n\n- (CGFloat)quantizationLevels;\n{\n    return toonFilter.quantizationLevels;\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageSobelEdgeDetectionFilter.h",
    "content": "#import \"GPUImageTwoPassFilter.h\"\n\n@interface GPUImageSobelEdgeDetectionFilter : GPUImageTwoPassFilter\n{\n    GLint texelWidthUniform, texelHeightUniform, edgeStrengthUniform;\n    BOOL hasOverriddenImageSizeFactor;\n}\n\n// The texel width and height factors tweak the appearance of the edges. By default, they match the inverse of the filter size in pixels\n@property(readwrite, nonatomic) CGFloat texelWidth; \n@property(readwrite, nonatomic) CGFloat texelHeight; \n\n// The filter strength property affects the dynamic range of the filter. High values can make edges more visible, but can lead to saturation. Default of 1.0.\n@property(readwrite, nonatomic) CGFloat edgeStrength;\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageSobelEdgeDetectionFilter.m",
    "content": "#import \"GPUImageSobelEdgeDetectionFilter.h\"\n#import \"GPUImageGrayscaleFilter.h\"\n#import \"GPUImage3x3ConvolutionFilter.h\"\n\n//   Code from \"Graphics Shaders: Theory and Practice\" by M. Bailey and S. Cunningham \n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageSobelEdgeDetectionFragmentShaderString = SHADER_STRING\n(\n precision mediump float;\n\n varying vec2 textureCoordinate;\n varying vec2 leftTextureCoordinate;\n varying vec2 rightTextureCoordinate;\n \n varying vec2 topTextureCoordinate;\n varying vec2 topLeftTextureCoordinate;\n varying vec2 topRightTextureCoordinate;\n \n varying vec2 bottomTextureCoordinate;\n varying vec2 bottomLeftTextureCoordinate;\n varying vec2 bottomRightTextureCoordinate;\n\n uniform sampler2D inputImageTexture;\n uniform float edgeStrength;\n \n void main()\n {\n    float bottomLeftIntensity = texture2D(inputImageTexture, bottomLeftTextureCoordinate).r;\n    float topRightIntensity = texture2D(inputImageTexture, topRightTextureCoordinate).r;\n    float topLeftIntensity = texture2D(inputImageTexture, topLeftTextureCoordinate).r;\n    float bottomRightIntensity = texture2D(inputImageTexture, bottomRightTextureCoordinate).r;\n    float leftIntensity = texture2D(inputImageTexture, leftTextureCoordinate).r;\n    float rightIntensity = texture2D(inputImageTexture, rightTextureCoordinate).r;\n    float bottomIntensity = texture2D(inputImageTexture, bottomTextureCoordinate).r;\n    float topIntensity = texture2D(inputImageTexture, topTextureCoordinate).r;\n    float h = -topLeftIntensity - 2.0 * topIntensity - topRightIntensity + bottomLeftIntensity + 2.0 * bottomIntensity + bottomRightIntensity;\n    float v = -bottomLeftIntensity - 2.0 * leftIntensity - topLeftIntensity + bottomRightIntensity + 2.0 * rightIntensity + topRightIntensity;\n    \n    float mag = length(vec2(h, v)) * edgeStrength;\n    \n    gl_FragColor = vec4(vec3(mag), 1.0);\n }\n);\n#else\nNSString *const kGPUImageSobelEdgeDetectionFragmentShaderString = SHADER_STRING\n(\n varying vec2 textureCoordinate;\n varying vec2 leftTextureCoordinate;\n varying vec2 rightTextureCoordinate;\n \n varying vec2 topTextureCoordinate;\n varying vec2 topLeftTextureCoordinate;\n varying vec2 topRightTextureCoordinate;\n \n varying vec2 bottomTextureCoordinate;\n varying vec2 bottomLeftTextureCoordinate;\n varying vec2 bottomRightTextureCoordinate;\n \n uniform sampler2D inputImageTexture;\n uniform float edgeStrength;\n\n void main()\n {\n     float bottomLeftIntensity = texture2D(inputImageTexture, bottomLeftTextureCoordinate).r;\n     float topRightIntensity = texture2D(inputImageTexture, topRightTextureCoordinate).r;\n     float topLeftIntensity = texture2D(inputImageTexture, topLeftTextureCoordinate).r;\n     float bottomRightIntensity = texture2D(inputImageTexture, bottomRightTextureCoordinate).r;\n     float leftIntensity = texture2D(inputImageTexture, leftTextureCoordinate).r;\n     float rightIntensity = texture2D(inputImageTexture, rightTextureCoordinate).r;\n     float bottomIntensity = texture2D(inputImageTexture, bottomTextureCoordinate).r;\n     float topIntensity = texture2D(inputImageTexture, topTextureCoordinate).r;\n     float h = -topLeftIntensity - 2.0 * topIntensity - topRightIntensity + bottomLeftIntensity + 2.0 * bottomIntensity + bottomRightIntensity;\n     float v = -bottomLeftIntensity - 2.0 * leftIntensity - topLeftIntensity + bottomRightIntensity + 2.0 * rightIntensity + topRightIntensity;\n     \n     float mag = length(vec2(h, v)) * edgeStrength;\n     \n     gl_FragColor = vec4(vec3(mag), 1.0);\n }\n);\n#endif\n\n@implementation GPUImageSobelEdgeDetectionFilter\n\n@synthesize texelWidth = _texelWidth; \n@synthesize texelHeight = _texelHeight; \n@synthesize edgeStrength = _edgeStrength;\n\n#pragma mark -\n#pragma mark Initialization and teardown\n\n- (id)init;\n{\n    if (!(self = [self initWithFragmentShaderFromString:kGPUImageSobelEdgeDetectionFragmentShaderString]))\n    {\n\t\treturn nil;\n    }\n    \n    return self;\n}\n\n- (id)initWithFragmentShaderFromString:(NSString *)fragmentShaderString;\n{\n    // Do a luminance pass first to reduce the calculations performed at each fragment in the edge detection phase\n\n    if (!(self = [super initWithFirstStageVertexShaderFromString:kGPUImageVertexShaderString firstStageFragmentShaderFromString:kGPUImageLuminanceFragmentShaderString secondStageVertexShaderFromString:kGPUImageNearbyTexelSamplingVertexShaderString secondStageFragmentShaderFromString:fragmentShaderString]))\n    {\n\t\treturn nil;\n    }\n    \n    hasOverriddenImageSizeFactor = NO;\n    \n    texelWidthUniform = [secondFilterProgram uniformIndex:@\"texelWidth\"];\n    texelHeightUniform = [secondFilterProgram uniformIndex:@\"texelHeight\"];\n    edgeStrengthUniform = [secondFilterProgram uniformIndex:@\"edgeStrength\"];\n    \n    self.edgeStrength = 1.0;\n    return self;\n}\n\n- (void)setupFilterForSize:(CGSize)filterFrameSize;\n{\n    if (!hasOverriddenImageSizeFactor)\n    {\n        _texelWidth = 1.0 / filterFrameSize.width;\n        _texelHeight = 1.0 / filterFrameSize.height;\n        \n        runSynchronouslyOnVideoProcessingQueue(^{\n            GLProgram *previousProgram = [GPUImageContext sharedImageProcessingContext].currentShaderProgram;\n            [GPUImageContext setActiveShaderProgram:secondFilterProgram];\n            glUniform1f(texelWidthUniform, _texelWidth);\n            glUniform1f(texelHeightUniform, _texelHeight);\n            [GPUImageContext setActiveShaderProgram:previousProgram];\n        });\n    }\n}\n\n- (void)setUniformsForProgramAtIndex:(NSUInteger)programIndex;\n{\n    [super setUniformsForProgramAtIndex:programIndex];\n    \n    if (programIndex == 1)\n    {\n        glUniform1f(texelWidthUniform, _texelWidth);\n        glUniform1f(texelHeightUniform, _texelHeight);\n    }\n}\n\n- (BOOL)wantsMonochromeInput;\n{\n//    return YES;\n    return NO;\n}\n\n- (BOOL)providesMonochromeOutput;\n{\n//    return YES;\n    return NO;\n}\n\n#pragma mark -\n#pragma mark Accessors\n\n- (void)setTexelWidth:(CGFloat)newValue;\n{\n    hasOverriddenImageSizeFactor = YES;\n    _texelWidth = newValue;\n    \n    [self setFloat:_texelWidth forUniform:texelWidthUniform program:secondFilterProgram];\n}\n\n- (void)setTexelHeight:(CGFloat)newValue;\n{\n    hasOverriddenImageSizeFactor = YES;\n    _texelHeight = newValue;\n\n    [self setFloat:_texelHeight forUniform:texelHeightUniform program:secondFilterProgram];\n}\n\n- (void)setEdgeStrength:(CGFloat)newValue;\n{\n    _edgeStrength = newValue;\n    \n    [self setFloat:_edgeStrength forUniform:edgeStrengthUniform program:secondFilterProgram];\n}\n\n\n@end\n\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageSoftEleganceFilter.h",
    "content": "#import \"GPUImageFilterGroup.h\"\n\n@class GPUImagePicture;\n\n/** A photo filter based on Soft Elegance Photoshop action\n    http://h-d-stock.deviantart.com/art/H-D-A-soft-elegance-70107603\n */\n\n// Note: If you want to use this effect you have to add \n//       lookup_soft_elegance_1.png and lookup_soft_elegance_2.png\n//       from Resources folder to your application bundle.\n\n@interface GPUImageSoftEleganceFilter : GPUImageFilterGroup\n{\n    GPUImagePicture *lookupImageSource1;\n    GPUImagePicture *lookupImageSource2;\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageSoftEleganceFilter.m",
    "content": "#import \"GPUImageSoftEleganceFilter.h\"\n#import \"GPUImagePicture.h\"\n#import \"GPUImageLookupFilter.h\"\n#import \"GPUImageGaussianBlurFilter.h\"\n#import \"GPUImageAlphaBlendFilter.h\"\n\n@implementation GPUImageSoftEleganceFilter\n\n- (id)init;\n{\n    if (!(self = [super init]))\n    {\n\t\treturn nil;\n    }\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\n    UIImage *image1 = [UIImage imageNamed:@\"lookup_soft_elegance_1.png\"];\n    UIImage *image2 = [UIImage imageNamed:@\"lookup_soft_elegance_2.png\"];\n#else\n    NSImage *image1 = [NSImage imageNamed:@\"lookup_soft_elegance_1.png\"];\n    NSImage *image2 = [NSImage imageNamed:@\"lookup_soft_elegance_2.png\"];\n#endif\n\n    NSAssert(image1 && image2,\n             @\"To use GPUImageSoftEleganceFilter you need to add lookup_soft_elegance_1.png and lookup_soft_elegance_2.png from GPUImage/framework/Resources to your application bundle.\");\n    \n    lookupImageSource1 = [[GPUImagePicture alloc] initWithImage:image1];\n    GPUImageLookupFilter *lookupFilter1 = [[GPUImageLookupFilter alloc] init];\n    [self addFilter:lookupFilter1];\n\n    [lookupImageSource1 addTarget:lookupFilter1 atTextureLocation:1];\n    [lookupImageSource1 processImage];\n\n    GPUImageGaussianBlurFilter *gaussianBlur = [[GPUImageGaussianBlurFilter alloc] init];\n    gaussianBlur.blurRadiusInPixels = 10.0;\n    [lookupFilter1 addTarget:gaussianBlur];\n    [self addFilter:gaussianBlur];\n    \n    GPUImageAlphaBlendFilter *alphaBlend = [[GPUImageAlphaBlendFilter alloc] init];\n    alphaBlend.mix = 0.14;\n    [lookupFilter1 addTarget:alphaBlend];\n    [gaussianBlur addTarget:alphaBlend];\n    [self addFilter:alphaBlend];\n    \n    lookupImageSource2 = [[GPUImagePicture alloc] initWithImage:image2];\n\n    GPUImageLookupFilter *lookupFilter2 = [[GPUImageLookupFilter alloc] init];\n    [alphaBlend addTarget:lookupFilter2];\n    [lookupImageSource2 addTarget:lookupFilter2];\n    [lookupImageSource2 processImage];\n    [self addFilter:lookupFilter2];\n    \n    self.initialFilters = [NSArray arrayWithObjects:lookupFilter1, nil];\n    self.terminalFilter = lookupFilter2;\n    \n    return self;\n}\n\n#pragma mark -\n#pragma mark Accessors\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageSoftLightBlendFilter.h",
    "content": "#import \"GPUImageTwoInputFilter.h\"\n\n@interface GPUImageSoftLightBlendFilter : GPUImageTwoInputFilter\n{\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageSoftLightBlendFilter.m",
    "content": "#import \"GPUImageSoftLightBlendFilter.h\"\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageSoftLightBlendFragmentShaderString = SHADER_STRING\n(\n varying highp vec2 textureCoordinate;\n varying highp vec2 textureCoordinate2;\n\n uniform sampler2D inputImageTexture;\n uniform sampler2D inputImageTexture2;\n \n void main()\n {\n     mediump vec4 base = texture2D(inputImageTexture, textureCoordinate);\n     mediump vec4 overlay = texture2D(inputImageTexture2, textureCoordinate2);\n     \n     lowp float alphaDivisor = base.a + step(base.a, 0.0); // Protect against a divide-by-zero blacking out things in the output\n     gl_FragColor = base * (overlay.a * (base / alphaDivisor) + (2.0 * overlay * (1.0 - (base / alphaDivisor)))) + overlay * (1.0 - base.a) + base * (1.0 - overlay.a);\n }\n);\n#else\nNSString *const kGPUImageSoftLightBlendFragmentShaderString = SHADER_STRING\n(\n varying vec2 textureCoordinate;\n varying vec2 textureCoordinate2;\n \n uniform sampler2D inputImageTexture;\n uniform sampler2D inputImageTexture2;\n \n void main()\n {\n     vec4 base = texture2D(inputImageTexture, textureCoordinate);\n     vec4 overlay = texture2D(inputImageTexture2, textureCoordinate2);\n     \n     float alphaDivisor = base.a + step(base.a, 0.0); // Protect against a divide-by-zero blacking out things in the output\n     gl_FragColor = base * (overlay.a * (base / alphaDivisor) + (2.0 * overlay * (1.0 - (base / alphaDivisor)))) + overlay * (1.0 - base.a) + base * (1.0 - overlay.a);\n }\n);\n#endif\n\n@implementation GPUImageSoftLightBlendFilter\n\n- (id)init;\n{\n    if (!(self = [super initWithFragmentShaderFromString:kGPUImageSoftLightBlendFragmentShaderString]))\n    {\n\t\treturn nil;\n    }\n    \n    return self;\n}\n\n@end\n\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageSolarizeFilter.h",
    "content": "#import \"GPUImageFilter.h\"\n\n/** Pixels with a luminance above the threshold will invert their color\n */\n@interface GPUImageSolarizeFilter : GPUImageFilter\n{\n    GLint thresholdUniform;\n}\n\n/** Anything above this luminance will be inverted, and anything below normal. Ranges from 0.0 to 1.0, with 0.5 as the default\n */\n@property(readwrite, nonatomic) CGFloat threshold;\n\n@end"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageSolarizeFilter.m",
    "content": "#import \"GPUImageSolarizeFilter.h\"\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageSolarizeFragmentShaderString = SHADER_STRING\n(\n varying highp vec2 textureCoordinate;\n \n uniform sampler2D inputImageTexture;\n uniform highp float threshold;\n \n const highp vec3 W = vec3(0.2125, 0.7154, 0.0721);\n \n void main()\n {\n     highp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);\n     highp float luminance = dot(textureColor.rgb, W);\n     highp float thresholdResult = step(luminance, threshold);\n     highp vec3 finalColor = abs(thresholdResult - textureColor.rgb);\n     \n     gl_FragColor = vec4(finalColor, textureColor.w);\n }\n);\n#else\nNSString *const kGPUImageSolarizeFragmentShaderString = SHADER_STRING\n(\n varying vec2 textureCoordinate;\n \n uniform sampler2D inputImageTexture;\n uniform float threshold;\n \n const vec3 W = vec3(0.2125, 0.7154, 0.0721);\n \n void main()\n {\n     vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);\n     float luminance = dot(textureColor.rgb, W);\n     float thresholdResult = step(luminance, threshold);\n     vec3 finalColor = abs(thresholdResult - textureColor.rgb);\n\n     gl_FragColor = vec4(vec3(finalColor), textureColor.w);\n }\n);\n#endif\n\n@implementation GPUImageSolarizeFilter;\n\n@synthesize threshold = _threshold;\n\n#pragma mark -\n#pragma mark Initialization\n\n- (id)init;\n{\n    if (!(self = [super initWithFragmentShaderFromString:kGPUImageSolarizeFragmentShaderString]))\n    {\n        return nil;\n    }\n    \n    thresholdUniform = [filterProgram uniformIndex:@\"threshold\"];\n    self.threshold = 0.5;\n    \n    return self;\n}\n\n#pragma mark -\n#pragma mark Accessors\n\n- (void)setThreshold:(CGFloat)newValue;\n{\n    _threshold = newValue;\n    \n    [self setFloat:_threshold forUniform:thresholdUniform program:filterProgram];\n}\n\n\n@end"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageSolidColorGenerator.h",
    "content": "#import \"GPUImageFilter.h\"\n\n// This outputs an image with a constant color. You need to use -forceProcessingAtSize: in order to set the output image\n// dimensions, or this won't work correctly\n\n\n@interface GPUImageSolidColorGenerator : GPUImageFilter\n{\n    GLint colorUniform;\n    GLint useExistingAlphaUniform;\n}\n\n// This color dictates what the output image will be filled with\n@property(readwrite, nonatomic) GPUVector4 color;\n@property(readwrite, nonatomic, assign) BOOL useExistingAlpha; // whether to use the alpha of the existing image or not, default is NO\n\n- (void)setColorRed:(CGFloat)redComponent green:(CGFloat)greenComponent blue:(CGFloat)blueComponent alpha:(CGFloat)alphaComponent;\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageSolidColorGenerator.m",
    "content": "#import \"GPUImageSolidColorGenerator.h\"\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUSolidColorFragmentShaderString = SHADER_STRING\n(\n precision lowp float;\n\n varying highp vec2 textureCoordinate;\n uniform sampler2D inputImageTexture;\n uniform vec4 color;\n uniform float useExistingAlpha;\n\n void main()\n {\n     lowp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);\n     gl_FragColor = vec4(color.rgb, max(textureColor.a, 1.0 - useExistingAlpha));\n }\n );\n#else\nNSString *const kGPUSolidColorFragmentShaderString = SHADER_STRING\n(\n varying vec2 textureCoordinate;\n uniform sampler2D inputImageTexture;\n uniform vec4 color;\n uniform float useExistingAlpha;\n\n void main()\n {\n     vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);\n     gl_FragColor = vec4(color.rgb, max(textureColor.a, 1.0 - useExistingAlpha));\n }\n );\n#endif\n\n@implementation GPUImageSolidColorGenerator\n\n@synthesize color = _color;\n\n- (id)init;\n{\n    if (!(self = [super initWithFragmentShaderFromString:kGPUSolidColorFragmentShaderString]))\n    {\n\t\treturn nil;\n    }\n    \n    colorUniform = [filterProgram uniformIndex:@\"color\"];\n    useExistingAlphaUniform = [filterProgram uniformIndex:@\"useExistingAlpha\"];\n    \n\t_color = (GPUVector4){0.0f, 0.0f, 0.5f, 1.0f};\n    self.useExistingAlpha = NO;\n    \n    return self;\n}\n\n- (void)renderToTextureWithVertices:(const GLfloat *)vertices textureCoordinates:(const GLfloat *)textureCoordinates;\n{\n    if (self.preventRendering)\n    {\n        return;\n    }\n    \n    runSynchronouslyOnVideoProcessingQueue(^{\n        [GPUImageContext setActiveShaderProgram:filterProgram];\n        \n        outputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:[self sizeOfFBO] textureOptions:self.outputTextureOptions onlyTexture:NO];\n        [outputFramebuffer activateFramebuffer];\n        \n        glClearColor(_color.one, _color.two, _color.three, _color.four);\n        glClear(GL_COLOR_BUFFER_BIT);\n    });\n}\n\n\n#pragma mark -\n#pragma mark Accessors\n\n- (void)forceProcessingAtSize:(CGSize)frameSize;\n{\n    [super forceProcessingAtSize:frameSize];\n\n    if (!CGSizeEqualToSize(inputTextureSize, CGSizeZero))\n    {\n        [self newFrameReadyAtTime:kCMTimeIndefinite atIndex:0];\n    }\n}\n\n- (void)addTarget:(id<GPUImageInput>)newTarget atTextureLocation:(NSInteger)textureLocation;\n{\n    [super addTarget:newTarget atTextureLocation:textureLocation];\n\n    if (!CGSizeEqualToSize(inputTextureSize, CGSizeZero))\n    {\n        [newTarget setInputSize:inputTextureSize atIndex:textureLocation];\n        [newTarget newFrameReadyAtTime:kCMTimeIndefinite atIndex:textureLocation];\n    }\n}\n\n- (void)setColor:(GPUVector4)newValue;\n{\n\t[self setColorRed:newValue.one green:newValue.two blue:newValue.three alpha:newValue.four];\n}\n\n- (void)setColorRed:(CGFloat)redComponent green:(CGFloat)greenComponent blue:(CGFloat)blueComponent alpha:(CGFloat)alphaComponent;\n{\n    _color.one = (GLfloat)redComponent;\n    _color.two = (GLfloat)greenComponent;\n    _color.three = (GLfloat)blueComponent;\n    _color.four = (GLfloat)alphaComponent;\n    \n//    [self setVec4:_color forUniform:colorUniform program:filterProgram];\n    runAsynchronouslyOnVideoProcessingQueue(^{\n        [self newFrameReadyAtTime:kCMTimeIndefinite atIndex:0];\n    });\n}\n\n- (void)setUseExistingAlpha:(BOOL)useExistingAlpha;\n{\n    _useExistingAlpha = useExistingAlpha;\n\n    [self setInteger:(useExistingAlpha ? 1 : 0) forUniform:useExistingAlphaUniform program:filterProgram];\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageSourceOverBlendFilter.h",
    "content": "#import \"GPUImageTwoInputFilter.h\"\n\n@interface GPUImageSourceOverBlendFilter : GPUImageTwoInputFilter\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageSourceOverBlendFilter.m",
    "content": "#import \"GPUImageSourceOverBlendFilter.h\"\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageSourceOverBlendFragmentShaderString = SHADER_STRING\n(\n varying highp vec2 textureCoordinate;\n varying highp vec2 textureCoordinate2;\n \n uniform sampler2D inputImageTexture;\n uniform sampler2D inputImageTexture2;\n \n void main()\n {\n   lowp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);\n   lowp vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate);\n   \n   gl_FragColor = mix(textureColor, textureColor2, textureColor2.a);\n }\n);\n#else\nNSString *const kGPUImageSourceOverBlendFragmentShaderString = SHADER_STRING\n(\n varying vec2 textureCoordinate;\n varying vec2 textureCoordinate2;\n \n uniform sampler2D inputImageTexture;\n uniform sampler2D inputImageTexture2;\n \n void main()\n {\n     vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);\n     vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate);\n     \n     gl_FragColor = mix(textureColor, textureColor2, textureColor2.a);\n }\n );\n#endif\n\n@implementation GPUImageSourceOverBlendFilter\n\n- (id)init;\n{\n  if (!(self = [super initWithFragmentShaderFromString:kGPUImageSourceOverBlendFragmentShaderString]))\n  {\n\t\treturn nil;\n  }\n  \n  return self;\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageSphereRefractionFilter.h",
    "content": "#import \"GPUImageFilter.h\"\n\n@interface GPUImageSphereRefractionFilter : GPUImageFilter\n{\n    GLint radiusUniform, centerUniform, aspectRatioUniform, refractiveIndexUniform;\n}\n\n/// The center about which to apply the distortion, with a default of (0.5, 0.5)\n@property(readwrite, nonatomic) CGPoint center;\n/// The radius of the distortion, ranging from 0.0 to 1.0, with a default of 0.25\n@property(readwrite, nonatomic) CGFloat radius;\n/// The index of refraction for the sphere, with a default of 0.71\n@property(readwrite, nonatomic) CGFloat refractiveIndex;\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageSphereRefractionFilter.m",
    "content": "#import \"GPUImageSphereRefractionFilter.h\"\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageSphereRefractionFragmentShaderString = SHADER_STRING\n(\n varying highp vec2 textureCoordinate;\n \n uniform sampler2D inputImageTexture;\n \n uniform highp vec2 center;\n uniform highp float radius;\n uniform highp float aspectRatio;\n uniform highp float refractiveIndex;\n \n void main()\n {\n     highp vec2 textureCoordinateToUse = vec2(textureCoordinate.x, (textureCoordinate.y * aspectRatio + 0.5 - 0.5 * aspectRatio));\n     highp float distanceFromCenter = distance(center, textureCoordinateToUse);\n     lowp float checkForPresenceWithinSphere = step(distanceFromCenter, radius);\n     \n     distanceFromCenter = distanceFromCenter / radius;\n     \n     highp float normalizedDepth = radius * sqrt(1.0 - distanceFromCenter * distanceFromCenter);\n     highp vec3 sphereNormal = normalize(vec3(textureCoordinateToUse - center, normalizedDepth));\n     \n     highp vec3 refractedVector = refract(vec3(0.0, 0.0, -1.0), sphereNormal, refractiveIndex);\n     \n     gl_FragColor = texture2D(inputImageTexture, (refractedVector.xy + 1.0) * 0.5) * checkForPresenceWithinSphere;     \n }\n);\n#else\nNSString *const kGPUImageSphereRefractionFragmentShaderString = SHADER_STRING\n(\n varying vec2 textureCoordinate;\n \n uniform sampler2D inputImageTexture;\n \n uniform vec2 center;\n uniform float radius;\n uniform float aspectRatio;\n uniform float refractiveIndex;\n \n void main()\n {\n     vec2 textureCoordinateToUse = vec2(textureCoordinate.x, (textureCoordinate.y * aspectRatio + 0.5 - 0.5 * aspectRatio));\n     float distanceFromCenter = distance(center, textureCoordinateToUse);\n     float checkForPresenceWithinSphere = step(distanceFromCenter, radius);\n     \n     distanceFromCenter = distanceFromCenter / radius;\n     \n     float normalizedDepth = radius * sqrt(1.0 - distanceFromCenter * distanceFromCenter);\n     vec3 sphereNormal = normalize(vec3(textureCoordinateToUse - center, normalizedDepth));\n     \n     vec3 refractedVector = refract(vec3(0.0, 0.0, -1.0), sphereNormal, refractiveIndex);\n     \n     gl_FragColor = texture2D(inputImageTexture, (refractedVector.xy + 1.0) * 0.5) * checkForPresenceWithinSphere;\n }\n);\n#endif\n\n@interface GPUImageSphereRefractionFilter ()\n\n- (void)adjustAspectRatio;\n\n@property (readwrite, nonatomic) CGFloat aspectRatio;\n\n@end\n\n\n@implementation GPUImageSphereRefractionFilter\n\n@synthesize center = _center;\n@synthesize radius = _radius;\n@synthesize aspectRatio = _aspectRatio;\n@synthesize refractiveIndex = _refractiveIndex;\n\n#pragma mark -\n#pragma mark Initialization and teardown\n\n- (id)init;\n{\n    if (!(self = [self initWithFragmentShaderFromString:kGPUImageSphereRefractionFragmentShaderString]))\n    {\n\t\treturn nil;\n    }\n    \n    return self;\n}\n\n- (id)initWithFragmentShaderFromString:(NSString *)fragmentShaderString;\n{\n    if (!(self = [super initWithFragmentShaderFromString:fragmentShaderString]))\n    {\n\t\treturn nil;\n    }\n    \n    radiusUniform = [filterProgram uniformIndex:@\"radius\"];\n    aspectRatioUniform = [filterProgram uniformIndex:@\"aspectRatio\"];\n    centerUniform = [filterProgram uniformIndex:@\"center\"];\n    refractiveIndexUniform = [filterProgram uniformIndex:@\"refractiveIndex\"];\n    \n    self.radius = 0.25;\n    self.center = CGPointMake(0.5, 0.5);\n    self.refractiveIndex = 0.71;\n    \n    [self setBackgroundColorRed:0.0 green:0.0 blue:0.0 alpha:0.0];\n    \n    return self;\n}\n\n- (void)setInputSize:(CGSize)newSize atIndex:(NSInteger)textureIndex;\n{\n    CGSize oldInputSize = inputTextureSize;\n    [super setInputSize:newSize atIndex:textureIndex];\n\n    if (!CGSizeEqualToSize(oldInputSize, inputTextureSize) && (!CGSizeEqualToSize(newSize, CGSizeZero)) )\n    {\n        [self adjustAspectRatio];\n    }\n}\n\n#pragma mark -\n#pragma mark Accessors\n\n- (void)adjustAspectRatio;\n{\n    if (GPUImageRotationSwapsWidthAndHeight(inputRotation))\n    {\n        [self setAspectRatio:(inputTextureSize.width / inputTextureSize.height)];\n    }\n    else\n    {\n        [self setAspectRatio:(inputTextureSize.height / inputTextureSize.width)];\n    }\n}\n\n- (void)setInputRotation:(GPUImageRotationMode)newInputRotation atIndex:(NSInteger)textureIndex;\n{\n    [super setInputRotation:newInputRotation atIndex:textureIndex];\n    [self setCenter:self.center];\n    [self adjustAspectRatio];\n}\n\n- (void)forceProcessingAtSize:(CGSize)frameSize;\n{\n    [super forceProcessingAtSize:frameSize];\n    [self adjustAspectRatio];\n}\n\n- (void)setRadius:(CGFloat)newValue;\n{\n    _radius = newValue;\n    \n    [self setFloat:_radius forUniform:radiusUniform program:filterProgram];\n}\n\n- (void)setCenter:(CGPoint)newValue;\n{\n    _center = newValue;\n    \n    CGPoint rotatedPoint = [self rotatedPoint:_center forRotation:inputRotation];\n    [self setPoint:rotatedPoint forUniform:centerUniform program:filterProgram];\n}\n\n- (void)setAspectRatio:(CGFloat)newValue;\n{\n    _aspectRatio = newValue;\n    \n    [self setFloat:_aspectRatio forUniform:aspectRatioUniform program:filterProgram];\n}\n\n- (void)setRefractiveIndex:(CGFloat)newValue;\n{\n    _refractiveIndex = newValue;\n\n    [self setFloat:_refractiveIndex forUniform:refractiveIndexUniform program:filterProgram];\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageStillCamera.h",
    "content": "#import \"GPUImageVideoCamera.h\"\n\nvoid stillImageDataReleaseCallback(void *releaseRefCon, const void *baseAddress);\nvoid GPUImageCreateResizedSampleBuffer(CVPixelBufferRef cameraFrame, CGSize finalSize, CMSampleBufferRef *sampleBuffer);\n\n@interface GPUImageStillCamera : GPUImageVideoCamera\n\n/** The JPEG compression quality to use when capturing a photo as a JPEG.\n */\n@property CGFloat jpegCompressionQuality;\n\n// Only reliably set inside the context of the completion handler of one of the capture methods\n@property (readonly) NSDictionary *currentCaptureMetadata;\n\n// Photography controls\n- (void)capturePhotoAsSampleBufferWithCompletionHandler:(void (^)(CMSampleBufferRef imageSampleBuffer, NSError *error))block;\n- (void)capturePhotoAsImageProcessedUpToFilter:(GPUImageOutput<GPUImageInput> *)finalFilterInChain withCompletionHandler:(void (^)(UIImage *processedImage, NSError *error))block;\n- (void)capturePhotoAsImageProcessedUpToFilter:(GPUImageOutput<GPUImageInput> *)finalFilterInChain withOrientation:(UIImageOrientation)orientation withCompletionHandler:(void (^)(UIImage *processedImage, NSError *error))block;\n- (void)capturePhotoAsJPEGProcessedUpToFilter:(GPUImageOutput<GPUImageInput> *)finalFilterInChain withCompletionHandler:(void (^)(NSData *processedJPEG, NSError *error))block;\n- (void)capturePhotoAsJPEGProcessedUpToFilter:(GPUImageOutput<GPUImageInput> *)finalFilterInChain withOrientation:(UIImageOrientation)orientation withCompletionHandler:(void (^)(NSData *processedJPEG, NSError *error))block;\n- (void)capturePhotoAsPNGProcessedUpToFilter:(GPUImageOutput<GPUImageInput> *)finalFilterInChain withCompletionHandler:(void (^)(NSData *processedPNG, NSError *error))block;\n- (void)capturePhotoAsPNGProcessedUpToFilter:(GPUImageOutput<GPUImageInput> *)finalFilterInChain withOrientation:(UIImageOrientation)orientation withCompletionHandler:(void (^)(NSData *processedPNG, NSError *error))block;\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageStillCamera.m",
    "content": "// 2448x3264 pixel image = 31,961,088 bytes for uncompressed RGBA\n\n#import \"GPUImageStillCamera.h\"\n\nvoid stillImageDataReleaseCallback(void *releaseRefCon, const void *baseAddress)\n{\n    free((void *)baseAddress);\n}\n\nvoid GPUImageCreateResizedSampleBuffer(CVPixelBufferRef cameraFrame, CGSize finalSize, CMSampleBufferRef *sampleBuffer)\n{\n    // CVPixelBufferCreateWithPlanarBytes for YUV input\n    \n    CGSize originalSize = CGSizeMake(CVPixelBufferGetWidth(cameraFrame), CVPixelBufferGetHeight(cameraFrame));\n\n    CVPixelBufferLockBaseAddress(cameraFrame, 0);\n    GLubyte *sourceImageBytes =  CVPixelBufferGetBaseAddress(cameraFrame);\n    CGDataProviderRef dataProvider = CGDataProviderCreateWithData(NULL, sourceImageBytes, CVPixelBufferGetBytesPerRow(cameraFrame) * originalSize.height, NULL);\n    CGColorSpaceRef genericRGBColorspace = CGColorSpaceCreateDeviceRGB();\n    CGImageRef cgImageFromBytes = CGImageCreate((int)originalSize.width, (int)originalSize.height, 8, 32, CVPixelBufferGetBytesPerRow(cameraFrame), genericRGBColorspace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst, dataProvider, NULL, NO, kCGRenderingIntentDefault);\n    \n    GLubyte *imageData = (GLubyte *) calloc(1, (int)finalSize.width * (int)finalSize.height * 4);\n    \n    CGContextRef imageContext = CGBitmapContextCreate(imageData, (int)finalSize.width, (int)finalSize.height, 8, (int)finalSize.width * 4, genericRGBColorspace,  kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);\n    CGContextDrawImage(imageContext, CGRectMake(0.0, 0.0, finalSize.width, finalSize.height), cgImageFromBytes);\n    CGImageRelease(cgImageFromBytes);\n    CGContextRelease(imageContext);\n    CGColorSpaceRelease(genericRGBColorspace);\n    CGDataProviderRelease(dataProvider);\n    \n    CVPixelBufferRef pixel_buffer = NULL;\n    CVPixelBufferCreateWithBytes(kCFAllocatorDefault, finalSize.width, finalSize.height, kCVPixelFormatType_32BGRA, imageData, finalSize.width * 4, stillImageDataReleaseCallback, NULL, NULL, &pixel_buffer);\n    CMVideoFormatDescriptionRef videoInfo = NULL;\n    CMVideoFormatDescriptionCreateForImageBuffer(NULL, pixel_buffer, &videoInfo);\n    \n    CMTime frameTime = CMTimeMake(1, 30);\n    CMSampleTimingInfo timing = {frameTime, frameTime, kCMTimeInvalid};\n    \n    CMSampleBufferCreateForImageBuffer(kCFAllocatorDefault, pixel_buffer, YES, NULL, NULL, videoInfo, &timing, sampleBuffer);\n    CVPixelBufferUnlockBaseAddress(cameraFrame, 0);\n    CFRelease(videoInfo);\n    CVPixelBufferRelease(pixel_buffer);\n}\n\n@interface GPUImageStillCamera ()\n{\n    AVCaptureStillImageOutput *photoOutput;\n}\n\n// Methods calling this are responsible for calling dispatch_semaphore_signal(frameRenderingSemaphore) somewhere inside the block\n- (void)capturePhotoProcessedUpToFilter:(GPUImageOutput<GPUImageInput> *)finalFilterInChain withImageOnGPUHandler:(void (^)(NSError *error))block;\n\n@end\n\n@implementation GPUImageStillCamera {\n    BOOL requiresFrontCameraTextureCacheCorruptionWorkaround;\n}\n\n@synthesize currentCaptureMetadata = _currentCaptureMetadata;\n@synthesize jpegCompressionQuality = _jpegCompressionQuality;\n\n#pragma mark -\n#pragma mark Initialization and teardown\n\n- (id)initWithSessionPreset:(NSString *)sessionPreset cameraPosition:(AVCaptureDevicePosition)cameraPosition;\n{\n    if (!(self = [super initWithSessionPreset:sessionPreset cameraPosition:cameraPosition]))\n    {\n\t\treturn nil;\n    }\n    \n    /* Detect iOS version < 6 which require a texture cache corruption workaround */\n#pragma clang diagnostic push\n#pragma clang diagnostic ignored \"-Wdeprecated-declarations\"\n    requiresFrontCameraTextureCacheCorruptionWorkaround = [[[UIDevice currentDevice] systemVersion] compare:@\"6.0\" options:NSNumericSearch] == NSOrderedAscending;\n#pragma clang diagnostic pop\n    \n    [self.captureSession beginConfiguration];\n    \n    photoOutput = [[AVCaptureStillImageOutput alloc] init];\n   \n    // Having a still photo input set to BGRA and video to YUV doesn't work well, so since I don't have YUV resizing for iPhone 4 yet, kick back to BGRA for that device\n//    if (captureAsYUV && [GPUImageContext supportsFastTextureUpload])\n    if (captureAsYUV && [GPUImageContext deviceSupportsRedTextures])\n    {\n        BOOL supportsFullYUVRange = NO;\n        NSArray *supportedPixelFormats = photoOutput.availableImageDataCVPixelFormatTypes;\n        for (NSNumber *currentPixelFormat in supportedPixelFormats)\n        {\n            if ([currentPixelFormat intValue] == kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)\n            {\n                supportsFullYUVRange = YES;\n            }\n        }\n        \n        if (supportsFullYUVRange)\n        {\n            [photoOutput setOutputSettings:[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarFullRange] forKey:(id)kCVPixelBufferPixelFormatTypeKey]];\n        }\n        else\n        {\n            [photoOutput setOutputSettings:[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange] forKey:(id)kCVPixelBufferPixelFormatTypeKey]];\n        }\n    }\n    else\n    {\n        captureAsYUV = NO;\n        [photoOutput setOutputSettings:[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_32BGRA] forKey:(id)kCVPixelBufferPixelFormatTypeKey]];\n        [videoOutput setVideoSettings:[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_32BGRA] forKey:(id)kCVPixelBufferPixelFormatTypeKey]];\n    }\n    \n    [self.captureSession addOutput:photoOutput];\n    \n    [self.captureSession commitConfiguration];\n    \n    self.jpegCompressionQuality = 0.8;\n    \n    return self;\n}\n\n- (id)init;\n{\n    if (!(self = [self initWithSessionPreset:AVCaptureSessionPresetPhoto cameraPosition:AVCaptureDevicePositionBack]))\n    {\n\t\treturn nil;\n    }\n    return self;\n}\n\n- (void)removeInputsAndOutputs;\n{\n    [self.captureSession removeOutput:photoOutput];\n    [super removeInputsAndOutputs];\n}\n\n#pragma mark -\n#pragma mark Photography controls\n\n- (void)capturePhotoAsSampleBufferWithCompletionHandler:(void (^)(CMSampleBufferRef imageSampleBuffer, NSError *error))block\n{\n    NSLog(@\"If you want to use the method capturePhotoAsSampleBufferWithCompletionHandler:, you must comment out the line in GPUImageStillCamera.m in the method initWithSessionPreset:cameraPosition: which sets the CVPixelBufferPixelFormatTypeKey, as well as uncomment the rest of the method capturePhotoAsSampleBufferWithCompletionHandler:. However, if you do this you cannot use any of the photo capture methods to take a photo if you also supply a filter.\");\n    \n    /*dispatch_semaphore_wait(frameRenderingSemaphore, DISPATCH_TIME_FOREVER);\n    \n    [photoOutput captureStillImageAsynchronouslyFromConnection:[[photoOutput connections] objectAtIndex:0] completionHandler:^(CMSampleBufferRef imageSampleBuffer, NSError *error) {\n        block(imageSampleBuffer, error);\n    }];\n     \n     dispatch_semaphore_signal(frameRenderingSemaphore);\n\n     */\n    \n    return;\n}\n\n- (void)capturePhotoAsImageProcessedUpToFilter:(GPUImageOutput<GPUImageInput> *)finalFilterInChain withCompletionHandler:(void (^)(UIImage *processedImage, NSError *error))block;\n{\n    [self capturePhotoProcessedUpToFilter:finalFilterInChain withImageOnGPUHandler:^(NSError *error) {\n        UIImage *filteredPhoto = nil;\n\n        if(!error){\n            filteredPhoto = [finalFilterInChain imageFromCurrentFramebuffer];\n        }\n        dispatch_semaphore_signal(frameRenderingSemaphore);\n\n        block(filteredPhoto, error);\n    }];\n}\n\n- (void)capturePhotoAsImageProcessedUpToFilter:(GPUImageOutput<GPUImageInput> *)finalFilterInChain withOrientation:(UIImageOrientation)orientation withCompletionHandler:(void (^)(UIImage *processedImage, NSError *error))block {\n    [self capturePhotoProcessedUpToFilter:finalFilterInChain withImageOnGPUHandler:^(NSError *error) {\n        UIImage *filteredPhoto = nil;\n        \n        if(!error) {\n            filteredPhoto = [finalFilterInChain imageFromCurrentFramebufferWithOrientation:orientation];\n        }\n        dispatch_semaphore_signal(frameRenderingSemaphore);\n        \n        block(filteredPhoto, error);\n    }];\n}\n\n- (void)capturePhotoAsJPEGProcessedUpToFilter:(GPUImageOutput<GPUImageInput> *)finalFilterInChain withCompletionHandler:(void (^)(NSData *processedJPEG, NSError *error))block;\n{\n//    reportAvailableMemoryForGPUImage(@\"Before Capture\");\n\n    [self capturePhotoProcessedUpToFilter:finalFilterInChain withImageOnGPUHandler:^(NSError *error) {\n        NSData *dataForJPEGFile = nil;\n\n        if(!error){\n            @autoreleasepool {\n                UIImage *filteredPhoto = [finalFilterInChain imageFromCurrentFramebuffer];\n                dispatch_semaphore_signal(frameRenderingSemaphore);\n//                reportAvailableMemoryForGPUImage(@\"After UIImage generation\");\n\n                dataForJPEGFile = UIImageJPEGRepresentation(filteredPhoto,self.jpegCompressionQuality);\n//                reportAvailableMemoryForGPUImage(@\"After JPEG generation\");\n            }\n\n//            reportAvailableMemoryForGPUImage(@\"After autorelease pool\");\n        }else{\n            dispatch_semaphore_signal(frameRenderingSemaphore);\n        }\n\n        block(dataForJPEGFile, error);\n    }];\n}\n\n- (void)capturePhotoAsJPEGProcessedUpToFilter:(GPUImageOutput<GPUImageInput> *)finalFilterInChain withOrientation:(UIImageOrientation)orientation withCompletionHandler:(void (^)(NSData *processedImage, NSError *error))block {\n    [self capturePhotoProcessedUpToFilter:finalFilterInChain withImageOnGPUHandler:^(NSError *error) {\n        NSData *dataForJPEGFile = nil;\n        \n        if(!error) {\n            @autoreleasepool {\n                UIImage *filteredPhoto = [finalFilterInChain imageFromCurrentFramebufferWithOrientation:orientation];\n                dispatch_semaphore_signal(frameRenderingSemaphore);\n                \n                dataForJPEGFile = UIImageJPEGRepresentation(filteredPhoto, self.jpegCompressionQuality);\n            }\n        } else {\n            dispatch_semaphore_signal(frameRenderingSemaphore);\n        }\n        \n        block(dataForJPEGFile, error);\n    }];\n}\n\n- (void)capturePhotoAsPNGProcessedUpToFilter:(GPUImageOutput<GPUImageInput> *)finalFilterInChain withCompletionHandler:(void (^)(NSData *processedPNG, NSError *error))block;\n{\n\n    [self capturePhotoProcessedUpToFilter:finalFilterInChain withImageOnGPUHandler:^(NSError *error) {\n        NSData *dataForPNGFile = nil;\n\n        if(!error){\n            @autoreleasepool {\n                UIImage *filteredPhoto = [finalFilterInChain imageFromCurrentFramebuffer];\n                dispatch_semaphore_signal(frameRenderingSemaphore);\n                dataForPNGFile = UIImagePNGRepresentation(filteredPhoto);\n            }\n        }else{\n            dispatch_semaphore_signal(frameRenderingSemaphore);\n        }\n        \n        block(dataForPNGFile, error);        \n    }];\n    \n    return;\n}\n\n- (void)capturePhotoAsPNGProcessedUpToFilter:(GPUImageOutput<GPUImageInput> *)finalFilterInChain withOrientation:(UIImageOrientation)orientation withCompletionHandler:(void (^)(NSData *processedPNG, NSError *error))block;\n{\n    \n    [self capturePhotoProcessedUpToFilter:finalFilterInChain withImageOnGPUHandler:^(NSError *error) {\n        NSData *dataForPNGFile = nil;\n        \n        if(!error){\n            @autoreleasepool {\n                UIImage *filteredPhoto = [finalFilterInChain imageFromCurrentFramebufferWithOrientation:orientation];\n                dispatch_semaphore_signal(frameRenderingSemaphore);\n                dataForPNGFile = UIImagePNGRepresentation(filteredPhoto);\n            }\n        }else{\n            dispatch_semaphore_signal(frameRenderingSemaphore);\n        }\n        \n        block(dataForPNGFile, error);\n    }];\n    \n    return;\n}\n\n#pragma mark - Private Methods\n\n- (void)capturePhotoProcessedUpToFilter:(GPUImageOutput<GPUImageInput> *)finalFilterInChain withImageOnGPUHandler:(void (^)(NSError *error))block\n{\n    dispatch_semaphore_wait(frameRenderingSemaphore, DISPATCH_TIME_FOREVER);\n\n    if(photoOutput.isCapturingStillImage){\n        block([NSError errorWithDomain:AVFoundationErrorDomain code:AVErrorMaximumStillImageCaptureRequestsExceeded userInfo:nil]);\n        return;\n    }\n\n    [photoOutput captureStillImageAsynchronouslyFromConnection:[[photoOutput connections] objectAtIndex:0] completionHandler:^(CMSampleBufferRef imageSampleBuffer, NSError *error) {\n        if(imageSampleBuffer == NULL){\n            block(error);\n            return;\n        }\n\n        // For now, resize photos to fix within the max texture size of the GPU\n        CVImageBufferRef cameraFrame = CMSampleBufferGetImageBuffer(imageSampleBuffer);\n        \n        CGSize sizeOfPhoto = CGSizeMake(CVPixelBufferGetWidth(cameraFrame), CVPixelBufferGetHeight(cameraFrame));\n        CGSize scaledImageSizeToFitOnGPU = [GPUImageContext sizeThatFitsWithinATextureForSize:sizeOfPhoto];\n        if (!CGSizeEqualToSize(sizeOfPhoto, scaledImageSizeToFitOnGPU))\n        {\n            CMSampleBufferRef sampleBuffer = NULL;\n            \n            if (CVPixelBufferGetPlaneCount(cameraFrame) > 0)\n            {\n                NSAssert(NO, @\"Error: no downsampling for YUV input in the framework yet\");\n            }\n            else\n            {\n                GPUImageCreateResizedSampleBuffer(cameraFrame, scaledImageSizeToFitOnGPU, &sampleBuffer);\n            }\n\n            dispatch_semaphore_signal(frameRenderingSemaphore);\n            [finalFilterInChain useNextFrameForImageCapture];\n            [self captureOutput:photoOutput didOutputSampleBuffer:sampleBuffer fromConnection:[[photoOutput connections] objectAtIndex:0]];\n            dispatch_semaphore_wait(frameRenderingSemaphore, DISPATCH_TIME_FOREVER);\n            if (sampleBuffer != NULL)\n                CFRelease(sampleBuffer);\n        }\n        else\n        {\n            // This is a workaround for the corrupt images that are sometimes returned when taking a photo with the front camera and using the iOS 5.0 texture caches\n            AVCaptureDevicePosition currentCameraPosition = [[videoInput device] position];\n            if ( (currentCameraPosition != AVCaptureDevicePositionFront) || (![GPUImageContext supportsFastTextureUpload]) || !requiresFrontCameraTextureCacheCorruptionWorkaround)\n            {\n                dispatch_semaphore_signal(frameRenderingSemaphore);\n                [finalFilterInChain useNextFrameForImageCapture];\n                [self captureOutput:photoOutput didOutputSampleBuffer:imageSampleBuffer fromConnection:[[photoOutput connections] objectAtIndex:0]];\n                dispatch_semaphore_wait(frameRenderingSemaphore, DISPATCH_TIME_FOREVER);\n            }\n        }\n        \n        CFDictionaryRef metadata = CMCopyDictionaryOfAttachments(NULL, imageSampleBuffer, kCMAttachmentMode_ShouldPropagate);\n        _currentCaptureMetadata = (__bridge_transfer NSDictionary *)metadata;\n\n        block(nil);\n\n        _currentCaptureMetadata = nil;\n    }];\n}\n\n\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageStretchDistortionFilter.h",
    "content": "#import \"GPUImageFilter.h\"\n\n/** Creates a stretch distortion of the image\n */\n@interface GPUImageStretchDistortionFilter : GPUImageFilter {\n    GLint centerUniform;\n}\n\n/** The center about which to apply the distortion, with a default of (0.5, 0.5)\n */\n@property(readwrite, nonatomic) CGPoint center;\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageStretchDistortionFilter.m",
    "content": "#import \"GPUImageStretchDistortionFilter.h\"\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageStretchDistortionFragmentShaderString = SHADER_STRING\n(\n varying highp vec2 textureCoordinate;\n \n uniform sampler2D inputImageTexture;\n \n uniform highp vec2 center;\n \n void main()\n {\n     highp vec2 normCoord = 2.0 * textureCoordinate - 1.0;\n     highp vec2 normCenter = 2.0 * center - 1.0;\n     \n     normCoord -= normCenter;\n     mediump vec2 s = sign(normCoord);\n     normCoord = abs(normCoord);\n     normCoord = 0.5 * normCoord + 0.5 * smoothstep(0.25, 0.5, normCoord) * normCoord;\n     normCoord = s * normCoord;\n     \n     normCoord += normCenter;\n        \n     mediump vec2 textureCoordinateToUse = normCoord / 2.0 + 0.5;\n     \n     \n     gl_FragColor = texture2D(inputImageTexture, textureCoordinateToUse );\n     \n }\n);\n#else\nNSString *const kGPUImageStretchDistortionFragmentShaderString = SHADER_STRING\n(\n varying vec2 textureCoordinate;\n \n uniform sampler2D inputImageTexture;\n \n uniform vec2 center;\n \n void main()\n {\n     vec2 normCoord = 2.0 * textureCoordinate - 1.0;\n     vec2 normCenter = 2.0 * center - 1.0;\n     \n     normCoord -= normCenter;\n     vec2 s = sign(normCoord);\n     normCoord = abs(normCoord);\n     normCoord = 0.5 * normCoord + 0.5 * smoothstep(0.25, 0.5, normCoord) * normCoord;\n     normCoord = s * normCoord;\n     \n     normCoord += normCenter;\n     \n     vec2 textureCoordinateToUse = normCoord / 2.0 + 0.5;\n     \n     gl_FragColor = texture2D(inputImageTexture, textureCoordinateToUse);\n }\n);\n#endif\n\n@implementation GPUImageStretchDistortionFilter\n\n@synthesize center = _center;\n\n#pragma mark -\n#pragma mark Initialization and teardown\n\n- (id)init;\n{\n    if (!(self = [super initWithFragmentShaderFromString:kGPUImageStretchDistortionFragmentShaderString]))\n    {\n\t\treturn nil;\n    }\n    \n    centerUniform = [filterProgram uniformIndex:@\"center\"];\n    \n    self.center = CGPointMake(0.5, 0.5);\n    \n    return self;\n}\n\n#pragma mark -\n#pragma mark Accessors\n\n- (void)setInputRotation:(GPUImageRotationMode)newInputRotation atIndex:(NSInteger)textureIndex;\n{\n    [super setInputRotation:newInputRotation atIndex:textureIndex];\n    [self setCenter:self.center];\n}\n\n- (void)setCenter:(CGPoint)newValue;\n{\n    _center = newValue;\n    \n    CGPoint rotatedPoint = [self rotatedPoint:_center forRotation:inputRotation];\n    [self setPoint:rotatedPoint forUniform:centerUniform program:filterProgram];\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageSubtractBlendFilter.h",
    "content": "#import \"GPUImageTwoInputFilter.h\"\n\n@interface GPUImageSubtractBlendFilter : GPUImageTwoInputFilter\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageSubtractBlendFilter.m",
    "content": "#import \"GPUImageSubtractBlendFilter.h\"\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageSubtractBlendFragmentShaderString = SHADER_STRING\n(\n varying highp vec2 textureCoordinate;\n varying highp vec2 textureCoordinate2;\n\n uniform sampler2D inputImageTexture;\n uniform sampler2D inputImageTexture2;\n \n void main()\n {\n\t lowp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);\n\t lowp vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate2);\n\t \n\t gl_FragColor = vec4(textureColor.rgb - textureColor2.rgb, textureColor.a);\n }\n);\n#else\nNSString *const kGPUImageSubtractBlendFragmentShaderString = SHADER_STRING\n(\n varying vec2 textureCoordinate;\n varying vec2 textureCoordinate2;\n \n uniform sampler2D inputImageTexture;\n uniform sampler2D inputImageTexture2;\n \n void main()\n {\n\t vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);\n\t vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate2);\n\t \n\t gl_FragColor = vec4(textureColor.rgb - textureColor2.rgb, textureColor.a);\n }\n);\n#endif\n\n@implementation GPUImageSubtractBlendFilter\n\n- (id)init;\n{\n    if (!(self = [super initWithFragmentShaderFromString:kGPUImageSubtractBlendFragmentShaderString]))\n    {\n\t\treturn nil;\n    }\n    \n    return self;\n}\n\n@end\n\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageSwirlFilter.h",
    "content": "#import \"GPUImageFilter.h\"\n\n/** Creates a swirl distortion on the image\n */\n@interface GPUImageSwirlFilter : GPUImageFilter\n{\n    GLint radiusUniform, centerUniform, angleUniform;\n}\n\n/// The center about which to apply the distortion, with a default of (0.5, 0.5)\n@property(readwrite, nonatomic) CGPoint center;\n/// The radius of the distortion, ranging from 0.0 to 1.0, with a default of 0.5\n@property(readwrite, nonatomic) CGFloat radius;\n/// The amount of distortion to apply, with a minimum of 0.0 and a default of 1.0\n@property(readwrite, nonatomic) CGFloat angle;\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageSwirlFilter.m",
    "content": "#import \"GPUImageSwirlFilter.h\"\n\n// Adapted from the shader example here: http://www.geeks3d.com/20110428/shader-library-swirl-post-processing-filter-in-glsl/\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageSwirlFragmentShaderString = SHADER_STRING\n(\n varying highp vec2 textureCoordinate;\n \n uniform sampler2D inputImageTexture;\n \n uniform highp vec2 center;\n uniform highp float radius;\n uniform highp float angle;\n \n void main()\n {\n     highp vec2 textureCoordinateToUse = textureCoordinate;\n     highp float dist = distance(center, textureCoordinate);\n     if (dist < radius)\n     {\n         textureCoordinateToUse -= center;\n         highp float percent = (radius - dist) / radius;\n         highp float theta = percent * percent * angle * 8.0;\n         highp float s = sin(theta);\n         highp float c = cos(theta);\n         textureCoordinateToUse = vec2(dot(textureCoordinateToUse, vec2(c, -s)), dot(textureCoordinateToUse, vec2(s, c)));\n         textureCoordinateToUse += center;\n     }\n    \n     gl_FragColor = texture2D(inputImageTexture, textureCoordinateToUse );\n     \n }\n);\n#else\nNSString *const kGPUImageSwirlFragmentShaderString = SHADER_STRING\n(\n varying vec2 textureCoordinate;\n \n uniform sampler2D inputImageTexture;\n \n uniform vec2 center;\n uniform float radius;\n uniform float angle;\n \n void main()\n {\n     vec2 textureCoordinateToUse = textureCoordinate;\n     float dist = distance(center, textureCoordinate);\n     if (dist < radius)\n     {\n         textureCoordinateToUse -= center;\n         float percent = (radius - dist) / radius;\n         float theta = percent * percent * angle * 8.0;\n         float s = sin(theta);\n         float c = cos(theta);\n         textureCoordinateToUse = vec2(dot(textureCoordinateToUse, vec2(c, -s)), dot(textureCoordinateToUse, vec2(s, c)));\n         textureCoordinateToUse += center;\n     }\n     \n     gl_FragColor = texture2D(inputImageTexture, textureCoordinateToUse );\n }\n);\n#endif\n\n@implementation GPUImageSwirlFilter\n\n@synthesize center = _center;\n@synthesize radius = _radius;\n@synthesize angle = _angle;\n\n#pragma mark -\n#pragma mark Initialization and teardown\n\n- (id)init;\n{\n    if (!(self = [super initWithFragmentShaderFromString:kGPUImageSwirlFragmentShaderString]))\n    {\n\t\treturn nil;\n    }\n    \n    radiusUniform = [filterProgram uniformIndex:@\"radius\"];\n    angleUniform = [filterProgram uniformIndex:@\"angle\"];\n    centerUniform = [filterProgram uniformIndex:@\"center\"];\n\n    self.radius = 0.5;\n    self.angle = 1.0;\n    self.center = CGPointMake(0.5, 0.5);\n\n    return self;\n}\n\n#pragma mark -\n#pragma mark Accessors\n\n- (void)setInputRotation:(GPUImageRotationMode)newInputRotation atIndex:(NSInteger)textureIndex;\n{\n    [super setInputRotation:newInputRotation atIndex:textureIndex];\n    [self setCenter:self.center];\n}\n\n- (void)setRadius:(CGFloat)newValue;\n{\n    _radius = newValue;\n    \n    [self setFloat:_radius forUniform:radiusUniform program:filterProgram];\n}\n\n- (void)setAngle:(CGFloat)newValue;\n{\n    _angle = newValue;\n\n    [self setFloat:_angle forUniform:angleUniform program:filterProgram];\n}\n\n- (void)setCenter:(CGPoint)newValue;\n{\n    _center = newValue;\n    \n    CGPoint rotatedPoint = [self rotatedPoint:_center forRotation:inputRotation];\n    [self setPoint:rotatedPoint forUniform:centerUniform program:filterProgram];\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageThresholdEdgeDetectionFilter.h",
    "content": "#import \"GPUImageSobelEdgeDetectionFilter.h\"\n\n@interface GPUImageThresholdEdgeDetectionFilter : GPUImageSobelEdgeDetectionFilter\n{\n    GLint thresholdUniform;\n}\n\n/** Any edge above this threshold will be black, and anything below white. Ranges from 0.0 to 1.0, with 0.8 as the default\n */\n@property(readwrite, nonatomic) CGFloat threshold; \n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageThresholdEdgeDetectionFilter.m",
    "content": "#import \"GPUImageThresholdEdgeDetectionFilter.h\"\n\n@implementation GPUImageThresholdEdgeDetectionFilter\n\n// Invert the colorspace for a sketch\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageThresholdEdgeDetectionFragmentShaderString = SHADER_STRING\n(\n precision highp float;\n \n varying vec2 textureCoordinate;\n varying vec2 leftTextureCoordinate;\n varying vec2 rightTextureCoordinate;\n \n varying vec2 topTextureCoordinate;\n varying vec2 topLeftTextureCoordinate;\n varying vec2 topRightTextureCoordinate;\n \n varying vec2 bottomTextureCoordinate;\n varying vec2 bottomLeftTextureCoordinate;\n varying vec2 bottomRightTextureCoordinate;\n \n uniform sampler2D inputImageTexture;\n uniform lowp float threshold;\n \n uniform float edgeStrength;\n\n void main()\n {\n//     float bottomLeftIntensity = texture2D(inputImageTexture, bottomLeftTextureCoordinate).r;\n//     float topRightIntensity = texture2D(inputImageTexture, topRightTextureCoordinate).r;\n//     float topLeftIntensity = texture2D(inputImageTexture, topLeftTextureCoordinate).r;\n//     float bottomRightIntensity = texture2D(inputImageTexture, bottomRightTextureCoordinate).r;\n     float leftIntensity = texture2D(inputImageTexture, leftTextureCoordinate).r;\n     float rightIntensity = texture2D(inputImageTexture, rightTextureCoordinate).r;\n     float bottomIntensity = texture2D(inputImageTexture, bottomTextureCoordinate).r;\n     float topIntensity = texture2D(inputImageTexture, topTextureCoordinate).r;\n     float centerIntensity = texture2D(inputImageTexture, textureCoordinate).r;\n//     float h = -topLeftIntensity - 2.0 * topIntensity - topRightIntensity + bottomLeftIntensity + 2.0 * bottomIntensity + bottomRightIntensity;\n//     float v = -bottomLeftIntensity - 2.0 * leftIntensity - topLeftIntensity + bottomRightIntensity + 2.0 * rightIntensity + topRightIntensity;\n//     float h = -topLeftIntensity - 2.0 * topIntensity - topRightIntensity + leftIntensity + 2.0 * centerIntensity + rightIntensity;\n//     float v = -bottomLeftIntensity - 2.0 * leftIntensity - topLeftIntensity + bottomIntensity + 2.0 * centerIntensity + topIntensity;\n     float h = (centerIntensity - topIntensity) + (bottomIntensity - centerIntensity);\n     float v = (centerIntensity - leftIntensity) + (rightIntensity - centerIntensity);\n//     float h = (centerIntensity - topIntensity);\n//     float j = (topIntensity - centerIntensity);\n//     h = max(h,j);\n//     j = abs(h);\n//     float v = (centerIntensity - leftIntensity);\n     \n    float mag = length(vec2(h, v)) * edgeStrength;\n     mag = step(threshold, mag);\n     \n//     float mag = abs(h);\n     \n//     gl_FragColor = vec4(h, h, h, 1.0);\n//     gl_FragColor = vec4(texture2D(inputImageTexture, textureCoordinate));\n//     gl_FragColor = vec4(h, centerIntensity, j, 1.0);\n     gl_FragColor = vec4(mag, mag, mag, 1.0);\n }\n);\n#else\nNSString *const kGPUImageThresholdEdgeDetectionFragmentShaderString = SHADER_STRING\n(\n varying vec2 textureCoordinate;\n varying vec2 leftTextureCoordinate;\n varying vec2 rightTextureCoordinate;\n \n varying vec2 topTextureCoordinate;\n varying vec2 topLeftTextureCoordinate;\n varying vec2 topRightTextureCoordinate;\n \n varying vec2 bottomTextureCoordinate;\n varying vec2 bottomLeftTextureCoordinate;\n varying vec2 bottomRightTextureCoordinate;\n \n uniform sampler2D inputImageTexture;\n uniform float threshold;\n \n uniform float edgeStrength;\n\n void main()\n {\n     float bottomLeftIntensity = texture2D(inputImageTexture, bottomLeftTextureCoordinate).r;\n     float topRightIntensity = texture2D(inputImageTexture, topRightTextureCoordinate).r;\n     float topLeftIntensity = texture2D(inputImageTexture, topLeftTextureCoordinate).r;\n     float bottomRightIntensity = texture2D(inputImageTexture, bottomRightTextureCoordinate).r;\n     float leftIntensity = texture2D(inputImageTexture, leftTextureCoordinate).r;\n     float rightIntensity = texture2D(inputImageTexture, rightTextureCoordinate).r;\n     float bottomIntensity = texture2D(inputImageTexture, bottomTextureCoordinate).r;\n     float topIntensity = texture2D(inputImageTexture, topTextureCoordinate).r;\n     float h = -topLeftIntensity - 2.0 * topIntensity - topRightIntensity + bottomLeftIntensity + 2.0 * bottomIntensity + bottomRightIntensity;\n     h = max(0.0, h);\n     float v = -bottomLeftIntensity - 2.0 * leftIntensity - topLeftIntensity + bottomRightIntensity + 2.0 * rightIntensity + topRightIntensity;\n     v = max(0.0, v);\n     \n     float mag = length(vec2(h, v)) * edgeStrength;\n     mag = step(threshold, mag);\n     \n     gl_FragColor = vec4(vec3(mag), 1.0);\n }\n);\n#endif\n\n#pragma mark -\n#pragma mark Initialization and teardown\n\n@synthesize threshold = _threshold;\n\n- (id)initWithFragmentShaderFromString:(NSString *)fragmentShaderString;\n{\n    if (!(self = [super initWithFragmentShaderFromString:fragmentShaderString]))\n    {\n\t\treturn nil;\n    }\n    \n    thresholdUniform = [secondFilterProgram uniformIndex:@\"threshold\"];\n    self.threshold = 0.25;\n    self.edgeStrength = 1.0;\n    \n    return self;\n}\n\n\n- (id)init;\n{\n    if (!(self = [self initWithFragmentShaderFromString:kGPUImageThresholdEdgeDetectionFragmentShaderString]))\n    {\n\t\treturn nil;\n    }\n    \n    return self;\n}\n\n#pragma mark -\n#pragma mark Accessors\n\n- (void)setThreshold:(CGFloat)newValue;\n{\n    _threshold = newValue;\n    \n    [self setFloat:_threshold forUniform:thresholdUniform program:secondFilterProgram];\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageThresholdSketchFilter.h",
    "content": "#import \"GPUImageThresholdEdgeDetectionFilter.h\"\n\n@interface GPUImageThresholdSketchFilter : GPUImageThresholdEdgeDetectionFilter\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageThresholdSketchFilter.m",
    "content": "#import \"GPUImageThresholdSketchFilter.h\"\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageThresholdSketchFragmentShaderString = SHADER_STRING\n(\n precision highp float;\n \n varying vec2 textureCoordinate;\n varying vec2 leftTextureCoordinate;\n varying vec2 rightTextureCoordinate;\n \n varying vec2 topTextureCoordinate;\n varying vec2 topLeftTextureCoordinate;\n varying vec2 topRightTextureCoordinate;\n \n varying vec2 bottomTextureCoordinate;\n varying vec2 bottomLeftTextureCoordinate;\n varying vec2 bottomRightTextureCoordinate;\n \n uniform sampler2D inputImageTexture;\n uniform lowp float threshold;\n uniform float edgeStrength;\n\n const highp vec3 W = vec3(0.2125, 0.7154, 0.0721);\n \n void main()\n {\n     float bottomLeftIntensity = texture2D(inputImageTexture, bottomLeftTextureCoordinate).r;\n     float topRightIntensity = texture2D(inputImageTexture, topRightTextureCoordinate).r;\n     float topLeftIntensity = texture2D(inputImageTexture, topLeftTextureCoordinate).r;\n     float bottomRightIntensity = texture2D(inputImageTexture, bottomRightTextureCoordinate).r;\n     float leftIntensity = texture2D(inputImageTexture, leftTextureCoordinate).r;\n     float rightIntensity = texture2D(inputImageTexture, rightTextureCoordinate).r;\n     float bottomIntensity = texture2D(inputImageTexture, bottomTextureCoordinate).r;\n     float topIntensity = texture2D(inputImageTexture, topTextureCoordinate).r;\n     float h = -topLeftIntensity - 2.0 * topIntensity - topRightIntensity + bottomLeftIntensity + 2.0 * bottomIntensity + bottomRightIntensity;\n     float v = -bottomLeftIntensity - 2.0 * leftIntensity - topLeftIntensity + bottomRightIntensity + 2.0 * rightIntensity + topRightIntensity;\n     \n     float mag = (length(vec2(h, v)) * edgeStrength);\n     mag = step(threshold, mag);\n     mag = 1.0 - mag;\n     \n     gl_FragColor = vec4(vec3(mag), 1.0);\n }\n);\n#else\nNSString *const kGPUImageThresholdSketchFragmentShaderString = SHADER_STRING\n(\n varying vec2 textureCoordinate;\n varying vec2 leftTextureCoordinate;\n varying vec2 rightTextureCoordinate;\n \n varying vec2 topTextureCoordinate;\n varying vec2 topLeftTextureCoordinate;\n varying vec2 topRightTextureCoordinate;\n \n varying vec2 bottomTextureCoordinate;\n varying vec2 bottomLeftTextureCoordinate;\n varying vec2 bottomRightTextureCoordinate;\n \n uniform sampler2D inputImageTexture;\n uniform float threshold;\n uniform float edgeStrength;\n\n const vec3 W = vec3(0.2125, 0.7154, 0.0721);\n \n void main()\n {\n     float bottomLeftIntensity = texture2D(inputImageTexture, bottomLeftTextureCoordinate).r;\n     float topRightIntensity = texture2D(inputImageTexture, topRightTextureCoordinate).r;\n     float topLeftIntensity = texture2D(inputImageTexture, topLeftTextureCoordinate).r;\n     float bottomRightIntensity = texture2D(inputImageTexture, bottomRightTextureCoordinate).r;\n     float leftIntensity = texture2D(inputImageTexture, leftTextureCoordinate).r;\n     float rightIntensity = texture2D(inputImageTexture, rightTextureCoordinate).r;\n     float bottomIntensity = texture2D(inputImageTexture, bottomTextureCoordinate).r;\n     float topIntensity = texture2D(inputImageTexture, topTextureCoordinate).r;\n     float h = -topLeftIntensity - 2.0 * topIntensity - topRightIntensity + bottomLeftIntensity + 2.0 * bottomIntensity + bottomRightIntensity;\n     float v = -bottomLeftIntensity - 2.0 * leftIntensity - topLeftIntensity + bottomRightIntensity + 2.0 * rightIntensity + topRightIntensity;\n     \n     float mag = 1.0 - length(vec2(h, v) * edgeStrength);\n     mag = step(threshold, mag);\n     \n     gl_FragColor = vec4(vec3(mag), 1.0);\n }\n);\n#endif\n\n@implementation GPUImageThresholdSketchFilter\n\n#pragma mark -\n#pragma mark Initialization and teardown\n\n- (id)init;\n{\n    if (!(self = [self initWithFragmentShaderFromString:kGPUImageThresholdSketchFragmentShaderString]))\n    {\n\t\treturn nil;\n    }\n    \n    return self;\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageThresholdedNonMaximumSuppressionFilter.h",
    "content": "#import \"GPUImage3x3TextureSamplingFilter.h\"\n\n@interface GPUImageThresholdedNonMaximumSuppressionFilter : GPUImage3x3TextureSamplingFilter\n{\n    GLint thresholdUniform;\n}\n\n/** Any local maximum above this threshold will be white, and anything below black. Ranges from 0.0 to 1.0, with 0.8 as the default\n */\n@property(readwrite, nonatomic) CGFloat threshold;\n\n- (id)initWithPackedColorspace:(BOOL)inputUsesPackedColorspace;\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageThresholdedNonMaximumSuppressionFilter.m",
    "content": "#import \"GPUImageThresholdedNonMaximumSuppressionFilter.h\"\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageThresholdedNonMaximumSuppressionFragmentShaderString = SHADER_STRING\n(\n uniform sampler2D inputImageTexture;\n \n varying highp vec2 textureCoordinate;\n varying highp vec2 leftTextureCoordinate;\n varying highp vec2 rightTextureCoordinate;\n \n varying highp vec2 topTextureCoordinate;\n varying highp vec2 topLeftTextureCoordinate;\n varying highp vec2 topRightTextureCoordinate;\n \n varying highp vec2 bottomTextureCoordinate;\n varying highp vec2 bottomLeftTextureCoordinate;\n varying highp vec2 bottomRightTextureCoordinate;\n \n uniform lowp float threshold;\n \n void main()\n {\n     lowp float bottomColor = texture2D(inputImageTexture, bottomTextureCoordinate).r;\n     lowp float bottomLeftColor = texture2D(inputImageTexture, bottomLeftTextureCoordinate).r;\n     lowp float bottomRightColor = texture2D(inputImageTexture, bottomRightTextureCoordinate).r;\n     lowp vec4 centerColor = texture2D(inputImageTexture, textureCoordinate);\n     lowp float leftColor = texture2D(inputImageTexture, leftTextureCoordinate).r;\n     lowp float rightColor = texture2D(inputImageTexture, rightTextureCoordinate).r;\n     lowp float topColor = texture2D(inputImageTexture, topTextureCoordinate).r;\n     lowp float topRightColor = texture2D(inputImageTexture, topRightTextureCoordinate).r;\n     lowp float topLeftColor = texture2D(inputImageTexture, topLeftTextureCoordinate).r;\n     \n     // Use a tiebreaker for pixels to the left and immediately above this one\n     lowp float multiplier = 1.0 - step(centerColor.r, topColor);\n     multiplier = multiplier * (1.0 - step(centerColor.r, topLeftColor));\n     multiplier = multiplier * (1.0 - step(centerColor.r, leftColor));\n     multiplier = multiplier * (1.0 - step(centerColor.r, bottomLeftColor));\n     \n     lowp float maxValue = max(centerColor.r, bottomColor);\n     maxValue = max(maxValue, bottomRightColor);\n     maxValue = max(maxValue, rightColor);\n     maxValue = max(maxValue, topRightColor);\n     \n     lowp float finalValue = centerColor.r * step(maxValue, centerColor.r) * multiplier;\n     finalValue = step(threshold, finalValue);\n     \n     gl_FragColor = vec4(finalValue, finalValue, finalValue, 1.0);\n//\n//     gl_FragColor = vec4((centerColor.rgb * step(maxValue, step(threshold, centerColor.r)) * multiplier), 1.0);\n }\n);\n\nNSString *const kGPUImageThresholdedNonMaximumSuppressionPackedColorspaceFragmentShaderString = SHADER_STRING\n(\n uniform sampler2D inputImageTexture;\n \n varying highp vec2 textureCoordinate;\n varying highp vec2 leftTextureCoordinate;\n varying highp vec2 rightTextureCoordinate;\n \n varying highp vec2 topTextureCoordinate;\n varying highp vec2 topLeftTextureCoordinate;\n varying highp vec2 topRightTextureCoordinate;\n \n varying highp vec2 bottomTextureCoordinate;\n varying highp vec2 bottomLeftTextureCoordinate;\n varying highp vec2 bottomRightTextureCoordinate;\n \n uniform lowp float threshold;\n uniform highp float texelWidth;\n uniform highp float texelHeight;\n\n highp float encodedIntensity(highp vec3 sourceColor)\n {\n     return (sourceColor.b * 256.0 * 256.0 + sourceColor.g * 256.0 + sourceColor.r);\n }\n \n void main()\n {\n     highp float bottomColor = encodedIntensity(texture2D(inputImageTexture, bottomTextureCoordinate).rgb);\n     highp float bottomLeftColor = encodedIntensity(texture2D(inputImageTexture, bottomLeftTextureCoordinate).rgb);\n     highp float bottomRightColor = encodedIntensity(texture2D(inputImageTexture, bottomRightTextureCoordinate).rgb);\n     highp float centerColor = encodedIntensity(texture2D(inputImageTexture, textureCoordinate).rgb);\n     highp float leftColor = encodedIntensity(texture2D(inputImageTexture, leftTextureCoordinate).rgb);\n     highp float rightColor = encodedIntensity(texture2D(inputImageTexture, rightTextureCoordinate).rgb);\n     highp float topColor = encodedIntensity(texture2D(inputImageTexture, topTextureCoordinate).rgb);\n     highp float topRightColor = encodedIntensity(texture2D(inputImageTexture, topRightTextureCoordinate).rgb);\n     highp float topLeftColor = encodedIntensity(texture2D(inputImageTexture, topLeftTextureCoordinate).rgb);\n     \n     highp float secondStageColor1 = encodedIntensity(texture2D(inputImageTexture, textureCoordinate + vec2(-2.0 * texelWidth, -2.0 * texelHeight)).rgb);\n     highp float secondStageColor2 = encodedIntensity(texture2D(inputImageTexture, textureCoordinate + vec2(-2.0 * texelWidth, -1.0 * texelHeight)).rgb);\n     highp float secondStageColor3 = encodedIntensity(texture2D(inputImageTexture, textureCoordinate + vec2(-2.0 * texelWidth, 0.0)).rgb);\n     highp float secondStageColor4 = encodedIntensity(texture2D(inputImageTexture, textureCoordinate + vec2(-2.0 * texelWidth, 1.0 * texelHeight)).rgb);\n     highp float secondStageColor5 = encodedIntensity(texture2D(inputImageTexture, textureCoordinate + vec2(-2.0 * texelWidth, 2.0 * texelHeight)).rgb);\n     highp float secondStageColor6 = encodedIntensity(texture2D(inputImageTexture, textureCoordinate + vec2(-1.0 * texelWidth, 2.0 * texelHeight)).rgb);\n     highp float secondStageColor7 = encodedIntensity(texture2D(inputImageTexture, textureCoordinate + vec2(0.0, 2.0 * texelHeight)).rgb);\n     highp float secondStageColor8 = encodedIntensity(texture2D(inputImageTexture, textureCoordinate + vec2(1.0 * texelWidth, 2.0 * texelHeight)).rgb);\n\n     highp float thirdStageColor1 = encodedIntensity(texture2D(inputImageTexture, textureCoordinate + vec2(-1.0 * texelWidth, -2.0 * texelHeight)).rgb);\n     highp float thirdStageColor2 = encodedIntensity(texture2D(inputImageTexture, textureCoordinate + vec2(0.0, -2.0 * texelHeight)).rgb);\n     highp float thirdStageColor3 = encodedIntensity(texture2D(inputImageTexture, textureCoordinate + vec2(1.0 * texelWidth, -2.0 * texelHeight)).rgb);\n     highp float thirdStageColor4 = encodedIntensity(texture2D(inputImageTexture, textureCoordinate + vec2(2.0 * texelWidth, -2.0 * texelHeight)).rgb);\n     highp float thirdStageColor5 = encodedIntensity(texture2D(inputImageTexture, textureCoordinate + vec2(2.0 * texelWidth, -1.0 * texelHeight)).rgb);\n     highp float thirdStageColor6 = encodedIntensity(texture2D(inputImageTexture, textureCoordinate + vec2(2.0 * texelWidth, 0.0)).rgb);\n     highp float thirdStageColor7 = encodedIntensity(texture2D(inputImageTexture, textureCoordinate + vec2(2.0 * texelWidth, 1.0 * texelHeight)).rgb);\n     highp float thirdStageColor8 = encodedIntensity(texture2D(inputImageTexture, textureCoordinate + vec2(2.0 * texelWidth, 2.0 * texelHeight)).rgb);\n     \n     // Use a tiebreaker for pixels to the left and immediately above this one\n     highp float multiplier = 1.0 - step(centerColor, topColor);\n     multiplier = multiplier * (1.0 - step(centerColor, topLeftColor));\n     multiplier = multiplier * (1.0 - step(centerColor, leftColor));\n     multiplier = multiplier * (1.0 - step(centerColor, bottomLeftColor));\n\n     multiplier = multiplier * (1.0 - step(centerColor, secondStageColor1));\n     multiplier = multiplier * (1.0 - step(centerColor, secondStageColor2));\n     multiplier = multiplier * (1.0 - step(centerColor, secondStageColor3));\n     multiplier = multiplier * (1.0 - step(centerColor, secondStageColor4));\n     multiplier = multiplier * (1.0 - step(centerColor, secondStageColor5));\n     multiplier = multiplier * (1.0 - step(centerColor, secondStageColor6));\n     multiplier = multiplier * (1.0 - step(centerColor, secondStageColor7));\n     multiplier = multiplier * (1.0 - step(centerColor, secondStageColor8));\n\n     highp float maxValue = max(centerColor, bottomColor);\n     maxValue = max(maxValue, bottomRightColor);\n     maxValue = max(maxValue, rightColor);\n     maxValue = max(maxValue, topRightColor);\n\n     maxValue = max(maxValue, thirdStageColor1);\n     maxValue = max(maxValue, thirdStageColor2);\n     maxValue = max(maxValue, thirdStageColor3);\n     maxValue = max(maxValue, thirdStageColor4);\n     maxValue = max(maxValue, thirdStageColor5);\n     maxValue = max(maxValue, thirdStageColor6);\n     maxValue = max(maxValue, thirdStageColor7);\n     maxValue = max(maxValue, thirdStageColor8);\n\n     highp float midValue = centerColor * step(maxValue, centerColor) * multiplier;\n     highp float finalValue = step(threshold, midValue);\n     \n     gl_FragColor = vec4(finalValue * centerColor, topLeftColor, topRightColor, topColor);\n }\n);\n#else\nNSString *const kGPUImageThresholdedNonMaximumSuppressionFragmentShaderString = SHADER_STRING\n(\n uniform sampler2D inputImageTexture;\n \n varying vec2 textureCoordinate;\n varying vec2 leftTextureCoordinate;\n varying vec2 rightTextureCoordinate;\n \n varying vec2 topTextureCoordinate;\n varying vec2 topLeftTextureCoordinate;\n varying vec2 topRightTextureCoordinate;\n \n varying vec2 bottomTextureCoordinate;\n varying vec2 bottomLeftTextureCoordinate;\n varying vec2 bottomRightTextureCoordinate;\n \n uniform float threshold;\n \n void main()\n {\n     float bottomColor = texture2D(inputImageTexture, bottomTextureCoordinate).r;\n     float bottomLeftColor = texture2D(inputImageTexture, bottomLeftTextureCoordinate).r;\n     float bottomRightColor = texture2D(inputImageTexture, bottomRightTextureCoordinate).r;\n     vec4 centerColor = texture2D(inputImageTexture, textureCoordinate);\n     float leftColor = texture2D(inputImageTexture, leftTextureCoordinate).r;\n     float rightColor = texture2D(inputImageTexture, rightTextureCoordinate).r;\n     float topColor = texture2D(inputImageTexture, topTextureCoordinate).r;\n     float topRightColor = texture2D(inputImageTexture, topRightTextureCoordinate).r;\n     float topLeftColor = texture2D(inputImageTexture, topLeftTextureCoordinate).r;\n     \n     // Use a tiebreaker for pixels to the left and immediately above this one\n     float multiplier = 1.0 - step(centerColor.r, topColor);\n     multiplier = multiplier * (1.0 - step(centerColor.r, topLeftColor));\n     multiplier = multiplier * (1.0 - step(centerColor.r, leftColor));\n     multiplier = multiplier * (1.0 - step(centerColor.r, bottomLeftColor));\n     \n     float maxValue = max(centerColor.r, bottomColor);\n     maxValue = max(maxValue, bottomRightColor);\n     maxValue = max(maxValue, rightColor);\n     maxValue = max(maxValue, topRightColor);\n     \n     float finalValue = centerColor.r * step(maxValue, centerColor.r) * multiplier;\n     finalValue = step(threshold, finalValue);\n     \n     gl_FragColor = vec4(finalValue, finalValue, finalValue, 1.0);\n     //\n     //     gl_FragColor = vec4((centerColor.rgb * step(maxValue, step(threshold, centerColor.r)) * multiplier), 1.0);\n }\n);\n\nNSString *const kGPUImageThresholdedNonMaximumSuppressionPackedColorspaceFragmentShaderString = SHADER_STRING\n(\n uniform sampler2D inputImageTexture;\n \n varying vec2 textureCoordinate;\n varying vec2 leftTextureCoordinate;\n varying vec2 rightTextureCoordinate;\n \n varying vec2 topTextureCoordinate;\n varying vec2 topLeftTextureCoordinate;\n varying vec2 topRightTextureCoordinate;\n \n varying vec2 bottomTextureCoordinate;\n varying vec2 bottomLeftTextureCoordinate;\n varying vec2 bottomRightTextureCoordinate;\n \n uniform float threshold;\n \n void main()\n {\n     float bottomColor = texture2D(inputImageTexture, bottomTextureCoordinate).r;\n     float bottomLeftColor = texture2D(inputImageTexture, bottomLeftTextureCoordinate).r;\n     float bottomRightColor = texture2D(inputImageTexture, bottomRightTextureCoordinate).r;\n     vec4 centerColor = texture2D(inputImageTexture, textureCoordinate);\n     float leftColor = texture2D(inputImageTexture, leftTextureCoordinate).r;\n     float rightColor = texture2D(inputImageTexture, rightTextureCoordinate).r;\n     float topColor = texture2D(inputImageTexture, topTextureCoordinate).r;\n     float topRightColor = texture2D(inputImageTexture, topRightTextureCoordinate).r;\n     float topLeftColor = texture2D(inputImageTexture, topLeftTextureCoordinate).r;\n     \n     // Use a tiebreaker for pixels to the left and immediately above this one\n     float multiplier = 1.0 - step(centerColor.r, topColor);\n     multiplier = multiplier * (1.0 - step(centerColor.r, topLeftColor));\n     multiplier = multiplier * (1.0 - step(centerColor.r, leftColor));\n     multiplier = multiplier * (1.0 - step(centerColor.r, bottomLeftColor));\n     \n     float maxValue = max(centerColor.r, bottomColor);\n     maxValue = max(maxValue, bottomRightColor);\n     maxValue = max(maxValue, rightColor);\n     maxValue = max(maxValue, topRightColor);\n     \n     float finalValue = centerColor.r * step(maxValue, centerColor.r) * multiplier;\n     finalValue = step(threshold, finalValue);\n     \n     gl_FragColor = vec4(finalValue, finalValue, finalValue, 1.0);\n     //\n     //     gl_FragColor = vec4((centerColor.rgb * step(maxValue, step(threshold, centerColor.r)) * multiplier), 1.0);\n }\n );\n#endif\n\n@implementation GPUImageThresholdedNonMaximumSuppressionFilter\n\n@synthesize threshold = _threshold;\n\n#pragma mark -\n#pragma mark Initialization and teardown\n\n- (id)init;\n{\n    if (!(self = [self initWithPackedColorspace:NO]))\n    {\n        return nil;\n    }\n    \n    return self;\n}\n\n- (id)initWithPackedColorspace:(BOOL)inputUsesPackedColorspace;\n{\n    NSString *shaderString;\n    if (inputUsesPackedColorspace)\n    {\n        shaderString = kGPUImageThresholdedNonMaximumSuppressionPackedColorspaceFragmentShaderString;\n    }\n    else\n    {\n        shaderString = kGPUImageThresholdedNonMaximumSuppressionFragmentShaderString;\n    }\n    \n    \n    if (!(self = [super initWithFragmentShaderFromString:shaderString]))\n    {\n        return nil;\n    }\n    \n    thresholdUniform = [filterProgram uniformIndex:@\"threshold\"];\n    self.threshold = 0.9;\n    \n    return self;\n}\n\n#pragma mark -\n#pragma mark Accessors\n\n- (void)setThreshold:(CGFloat)newValue;\n{\n    _threshold = newValue;\n    \n    [self setFloat:_threshold forUniform:thresholdUniform program:filterProgram];\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageTiltShiftFilter.h",
    "content": "#import \"GPUImageFilterGroup.h\"\n\n@class GPUImageGaussianBlurFilter;\n\n/// A simulated tilt shift lens effect\n@interface GPUImageTiltShiftFilter : GPUImageFilterGroup\n{\n    GPUImageGaussianBlurFilter *blurFilter;\n    GPUImageFilter *tiltShiftFilter;\n}\n\n/// The radius of the underlying blur, in pixels. This is 7.0 by default.\n@property(readwrite, nonatomic) CGFloat blurRadiusInPixels;\n\n/// The normalized location of the top of the in-focus area in the image, this value should be lower than bottomFocusLevel, default 0.4\n@property(readwrite, nonatomic) CGFloat topFocusLevel;\n\n/// The normalized location of the bottom of the in-focus area in the image, this value should be higher than topFocusLevel, default 0.6\n@property(readwrite, nonatomic) CGFloat bottomFocusLevel; \n\n/// The rate at which the image gets blurry away from the in-focus region, default 0.2\n@property(readwrite, nonatomic) CGFloat focusFallOffRate; \n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageTiltShiftFilter.m",
    "content": "#import \"GPUImageTiltShiftFilter.h\"\n#import \"GPUImageFilter.h\"\n#import \"GPUImageTwoInputFilter.h\"\n#import \"GPUImageGaussianBlurFilter.h\"\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageTiltShiftFragmentShaderString = SHADER_STRING\n( \n varying highp vec2 textureCoordinate;\n varying highp vec2 textureCoordinate2;\n \n uniform sampler2D inputImageTexture;\n uniform sampler2D inputImageTexture2; \n \n uniform highp float topFocusLevel;\n uniform highp float bottomFocusLevel;\n uniform highp float focusFallOffRate;\n \n void main()\n {\n     lowp vec4 sharpImageColor = texture2D(inputImageTexture, textureCoordinate);\n     lowp vec4 blurredImageColor = texture2D(inputImageTexture2, textureCoordinate2);\n     \n     lowp float blurIntensity = 1.0 - smoothstep(topFocusLevel - focusFallOffRate, topFocusLevel, textureCoordinate2.y);\n     blurIntensity += smoothstep(bottomFocusLevel, bottomFocusLevel + focusFallOffRate, textureCoordinate2.y);\n     \n     gl_FragColor = mix(sharpImageColor, blurredImageColor, blurIntensity);\n }\n);\n#else\nNSString *const kGPUImageTiltShiftFragmentShaderString = SHADER_STRING\n(\n varying vec2 textureCoordinate;\n varying vec2 textureCoordinate2;\n \n uniform sampler2D inputImageTexture;\n uniform sampler2D inputImageTexture2;\n \n uniform float topFocusLevel;\n uniform float bottomFocusLevel;\n uniform float focusFallOffRate;\n \n void main()\n {\n     vec4 sharpImageColor = texture2D(inputImageTexture, textureCoordinate);\n     vec4 blurredImageColor = texture2D(inputImageTexture2, textureCoordinate2);\n     \n     float blurIntensity = 1.0 - smoothstep(topFocusLevel - focusFallOffRate, topFocusLevel, textureCoordinate2.y);\n     blurIntensity += smoothstep(bottomFocusLevel, bottomFocusLevel + focusFallOffRate, textureCoordinate2.y);\n     \n     gl_FragColor = mix(sharpImageColor, blurredImageColor, blurIntensity);\n }\n);\n#endif\n\n@implementation GPUImageTiltShiftFilter\n\n@synthesize blurRadiusInPixels;\n@synthesize topFocusLevel = _topFocusLevel;\n@synthesize bottomFocusLevel = _bottomFocusLevel;\n@synthesize focusFallOffRate = _focusFallOffRate;\n\n- (id)init;\n{\n    if (!(self = [super init]))\n    {\n\t\treturn nil;\n    }\n    \n    // First pass: apply a variable Gaussian blur\n    blurFilter = [[GPUImageGaussianBlurFilter alloc] init];\n    [self addFilter:blurFilter];\n        \n    // Second pass: combine the blurred image with the original sharp one\n    tiltShiftFilter = [[GPUImageTwoInputFilter alloc] initWithFragmentShaderFromString:kGPUImageTiltShiftFragmentShaderString];\n    [self addFilter:tiltShiftFilter];\n    \n    // Texture location 0 needs to be the sharp image for both the blur and the second stage processing\n    [blurFilter addTarget:tiltShiftFilter atTextureLocation:1];\n    \n    // To prevent double updating of this filter, disable updates from the sharp image side\n//    self.inputFilterToIgnoreForUpdates = tiltShiftFilter;\n    \n    self.initialFilters = [NSArray arrayWithObjects:blurFilter, tiltShiftFilter, nil];\n    self.terminalFilter = tiltShiftFilter;\n    \n    self.topFocusLevel = 0.4;\n    self.bottomFocusLevel = 0.6;\n    self.focusFallOffRate = 0.2;\n    self.blurRadiusInPixels = 7.0;\n    \n    return self;\n}\n\n#pragma mark -\n#pragma mark Accessors\n\n- (void)setBlurRadiusInPixels:(CGFloat)newValue;\n{\n    blurFilter.blurRadiusInPixels = newValue;\n}\n\n- (CGFloat)blurRadiusInPixels;\n{\n    return blurFilter.blurRadiusInPixels;\n}\n\n- (void)setTopFocusLevel:(CGFloat)newValue;\n{\n    _topFocusLevel = newValue;\n    [tiltShiftFilter setFloat:newValue forUniformName:@\"topFocusLevel\"];\n}\n\n- (void)setBottomFocusLevel:(CGFloat)newValue;\n{\n    _bottomFocusLevel = newValue;\n    [tiltShiftFilter setFloat:newValue forUniformName:@\"bottomFocusLevel\"];\n}\n\n- (void)setFocusFallOffRate:(CGFloat)newValue;\n{\n    _focusFallOffRate = newValue;\n    [tiltShiftFilter setFloat:newValue forUniformName:@\"focusFallOffRate\"];\n}\n\n@end"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageToneCurveFilter.h",
    "content": "#import \"GPUImageFilter.h\"\r\n\r\n@interface GPUImageToneCurveFilter : GPUImageFilter\r\n\r\n@property(readwrite, nonatomic, copy) NSArray *redControlPoints;\r\n@property(readwrite, nonatomic, copy) NSArray *greenControlPoints;\r\n@property(readwrite, nonatomic, copy) NSArray *blueControlPoints;\r\n@property(readwrite, nonatomic, copy) NSArray *rgbCompositeControlPoints;\r\n\r\n// Initialization and teardown\r\n- (id)initWithACVData:(NSData*)data;\r\n\r\n- (id)initWithACV:(NSString*)curveFilename;\r\n- (id)initWithACVURL:(NSURL*)curveFileURL;\r\n\r\n// This lets you set all three red, green, and blue tone curves at once.\r\n// NOTE: Deprecated this function because this effect can be accomplished\r\n// using the rgbComposite channel rather then setting all 3 R, G, and B channels.\r\n- (void)setRGBControlPoints:(NSArray *)points DEPRECATED_ATTRIBUTE;\r\n\r\n- (void)setPointsWithACV:(NSString*)curveFilename;\r\n- (void)setPointsWithACVURL:(NSURL*)curveFileURL;\r\n\r\n// Curve calculation\r\n- (NSMutableArray *)getPreparedSplineCurve:(NSArray *)points;\r\n- (NSMutableArray *)splineCurve:(NSArray *)points;\r\n- (NSMutableArray *)secondDerivative:(NSArray *)cgPoints;\r\n- (void)updateToneCurveTexture;\r\n   \r\n@end\r\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageToneCurveFilter.m",
    "content": "#import \"GPUImageToneCurveFilter.h\"\r\n\r\n#pragma mark -\r\n#pragma mark GPUImageACVFile Helper\r\n\r\n//  GPUImageACVFile\r\n//\r\n//  ACV File format Parser\r\n//  Please refer to http://www.adobe.com/devnet-apps/photoshop/fileformatashtml/PhotoshopFileFormats.htm#50577411_pgfId-1056330\r\n//\r\n\r\n@interface GPUImageACVFile : NSObject{\r\n    short version;\r\n    short totalCurves;\r\n    \r\n    NSArray *rgbCompositeCurvePoints;\r\n    NSArray *redCurvePoints;\r\n    NSArray *greenCurvePoints;    \r\n    NSArray *blueCurvePoints;\r\n}\r\n\r\n@property(strong,nonatomic) NSArray *rgbCompositeCurvePoints;\r\n@property(strong,nonatomic) NSArray *redCurvePoints;\r\n@property(strong,nonatomic) NSArray *greenCurvePoints;    \r\n@property(strong,nonatomic) NSArray *blueCurvePoints;\r\n\r\n- (id) initWithACVFileData:(NSData*)data;\r\n\r\n\r\nunsigned short int16WithBytes(Byte* bytes);\r\n@end\r\n\r\n@implementation GPUImageACVFile\r\n\r\n@synthesize rgbCompositeCurvePoints, redCurvePoints, greenCurvePoints, blueCurvePoints;\r\n\r\n- (id) initWithACVFileData:(NSData *)data {\r\n    self = [super init];\r\n    if (self != nil)\r\n    {\r\n        if (data.length == 0)\r\n        {\r\n            NSLog(@\"failed to init ACVFile with data:%@\", data);\r\n            \r\n            return self;\r\n        }\r\n        \r\n        Byte* rawBytes = (Byte*) [data bytes];\r\n        version        = int16WithBytes(rawBytes);\r\n        rawBytes+=2;\r\n        \r\n        totalCurves    = int16WithBytes(rawBytes);\r\n        rawBytes+=2;\r\n        \r\n        NSMutableArray *curves = [NSMutableArray new];\r\n        \r\n        float pointRate = (1.0 / 255);\r\n        // The following is the data for each curve specified by count above\r\n        for (NSInteger x = 0; x<totalCurves; x++)\r\n        {\r\n            unsigned short pointCount = int16WithBytes(rawBytes);\r\n            rawBytes+=2;\r\n            \r\n            NSMutableArray *points = [NSMutableArray new];\r\n            // point count * 4\r\n            // Curve points. Each curve point is a pair of short integers where\r\n            // the first number is the output value (vertical coordinate on the\r\n            // Curves dialog graph) and the second is the input value. All coordinates have range 0 to 255.\r\n            for (NSInteger y = 0; y<pointCount; y++)\r\n            {\r\n                unsigned short y = int16WithBytes(rawBytes);\r\n                rawBytes+=2;\r\n                unsigned short x = int16WithBytes(rawBytes);\r\n                rawBytes+=2;\r\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\r\n                [points addObject:[NSValue valueWithCGSize:CGSizeMake(x * pointRate, y * pointRate)]];\r\n#else\r\n                [points addObject:[NSValue valueWithSize:CGSizeMake(x * pointRate, y * pointRate)]];\r\n#endif\r\n            }\r\n            [curves addObject:points];\r\n        }\r\n        rgbCompositeCurvePoints = [curves objectAtIndex:0];\r\n        redCurvePoints = [curves objectAtIndex:1];\r\n        greenCurvePoints = [curves objectAtIndex:2];\r\n        blueCurvePoints = [curves objectAtIndex:3];\r\n    }\r\n    return self;\r\n}\r\n\r\nunsigned short int16WithBytes(Byte* bytes) {\r\n    uint16_t result;\r\n    memcpy(&result, bytes, sizeof(result));\r\n    return CFSwapInt16BigToHost(result);\r\n}\r\n@end\r\n\r\n#pragma mark -\r\n#pragma mark GPUImageToneCurveFilter Implementation\r\n\r\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\r\nNSString *const kGPUImageToneCurveFragmentShaderString = SHADER_STRING\r\n(\r\n varying highp vec2 textureCoordinate;\r\n uniform sampler2D inputImageTexture;\r\n uniform sampler2D toneCurveTexture;\r\n \r\n void main()\r\n {\r\n     lowp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);\r\n     lowp float redCurveValue = texture2D(toneCurveTexture, vec2(textureColor.r, 0.0)).r;\r\n     lowp float greenCurveValue = texture2D(toneCurveTexture, vec2(textureColor.g, 0.0)).g;\r\n     lowp float blueCurveValue = texture2D(toneCurveTexture, vec2(textureColor.b, 0.0)).b;\r\n     \r\n     gl_FragColor = vec4(redCurveValue, greenCurveValue, blueCurveValue, textureColor.a);\r\n }\r\n);\r\n#else\r\nNSString *const kGPUImageToneCurveFragmentShaderString = SHADER_STRING\r\n(\r\n varying vec2 textureCoordinate;\r\n uniform sampler2D inputImageTexture;\r\n uniform sampler2D toneCurveTexture;\r\n \r\n void main()\r\n {\r\n     vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);\r\n     float redCurveValue = texture2D(toneCurveTexture, vec2(textureColor.r, 0.0)).r;\r\n     float greenCurveValue = texture2D(toneCurveTexture, vec2(textureColor.g, 0.0)).g;\r\n     float blueCurveValue = texture2D(toneCurveTexture, vec2(textureColor.b, 0.0)).b;\r\n     \r\n     gl_FragColor = vec4(redCurveValue, greenCurveValue, blueCurveValue, textureColor.a);\r\n }\r\n);\r\n#endif\r\n\r\n@interface GPUImageToneCurveFilter()\r\n{\r\n    GLint toneCurveTextureUniform;\r\n    GLuint toneCurveTexture;\r\n    GLubyte *toneCurveByteArray;\r\n    \r\n    NSArray *_redCurve, *_greenCurve, *_blueCurve, *_rgbCompositeCurve;\r\n}\r\n\r\n@end\r\n\r\n@implementation GPUImageToneCurveFilter\r\n\r\n@synthesize rgbCompositeControlPoints = _rgbCompositeControlPoints;\r\n@synthesize redControlPoints = _redControlPoints;\r\n@synthesize greenControlPoints = _greenControlPoints;\r\n@synthesize blueControlPoints = _blueControlPoints;\r\n\r\n#pragma mark -\r\n#pragma mark Initialization and teardown\r\n\r\n- (id)init;\r\n{\r\n    if (!(self = [super initWithFragmentShaderFromString:kGPUImageToneCurveFragmentShaderString]))\r\n    {\r\n\t\treturn nil;\r\n    }\r\n    \r\n    toneCurveTextureUniform = [filterProgram uniformIndex:@\"toneCurveTexture\"];\r\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\r\n    NSArray *defaultCurve = [NSArray arrayWithObjects:[NSValue valueWithCGPoint:CGPointMake(0.0, 0.0)], [NSValue valueWithCGPoint:CGPointMake(0.5, 0.5)], [NSValue valueWithCGPoint:CGPointMake(1.0, 1.0)], nil];\r\n#else\r\n    NSArray *defaultCurve = [NSArray arrayWithObjects:[NSValue valueWithPoint:NSMakePoint(0.0, 0.0)], [NSValue valueWithPoint:NSMakePoint(0.5, 0.5)], [NSValue valueWithPoint:NSMakePoint(1.0, 1.0)], nil];\r\n#endif\r\n    [self setRgbCompositeControlPoints:defaultCurve];\r\n    [self setRedControlPoints:defaultCurve];\r\n    [self setGreenControlPoints:defaultCurve];\r\n    [self setBlueControlPoints:defaultCurve];\r\n    \r\n    return self;\r\n}\r\n\r\n// This pulls in Adobe ACV curve files to specify the tone curve\r\n- (id)initWithACVData:(NSData *)data {\r\n    if (!(self = [super initWithFragmentShaderFromString:kGPUImageToneCurveFragmentShaderString]))\r\n    {\r\n\t\treturn nil;\r\n    }\r\n    \r\n    toneCurveTextureUniform = [filterProgram uniformIndex:@\"toneCurveTexture\"];\r\n    \r\n    GPUImageACVFile *curve = [[GPUImageACVFile alloc] initWithACVFileData:data];\r\n    \r\n    [self setRgbCompositeControlPoints:curve.rgbCompositeCurvePoints];\r\n    [self setRedControlPoints:curve.redCurvePoints];\r\n    [self setGreenControlPoints:curve.greenCurvePoints];\r\n    [self setBlueControlPoints:curve.blueCurvePoints];\r\n    \r\n    curve = nil;\r\n    \r\n    return self;\r\n}\r\n\r\n- (id)initWithACV:(NSString*)curveFilename\r\n{\r\n    return [self initWithACVURL:[[NSBundle mainBundle] URLForResource:curveFilename\r\n                                                        withExtension:@\"acv\"]];\r\n}\r\n\r\n- (id)initWithACVURL:(NSURL*)curveFileURL\r\n{\r\n    NSData* fileData = [NSData dataWithContentsOfURL:curveFileURL];\r\n    return [self initWithACVData:fileData];\r\n}\r\n\r\n- (void)setPointsWithACV:(NSString*)curveFilename\r\n{\r\n    [self setPointsWithACVURL:[[NSBundle mainBundle] URLForResource:curveFilename withExtension:@\"acv\"]];\r\n}\r\n\r\n- (void)setPointsWithACVURL:(NSURL*)curveFileURL\r\n{\r\n    NSData* fileData = [NSData dataWithContentsOfURL:curveFileURL];\r\n    GPUImageACVFile *curve = [[GPUImageACVFile alloc] initWithACVFileData:fileData];\r\n    \r\n    [self setRgbCompositeControlPoints:curve.rgbCompositeCurvePoints];\r\n    [self setRedControlPoints:curve.redCurvePoints];\r\n    [self setGreenControlPoints:curve.greenCurvePoints];\r\n    [self setBlueControlPoints:curve.blueCurvePoints];\r\n    \r\n    curve = nil;\r\n}\r\n\r\n- (void)dealloc\r\n{\r\n    runSynchronouslyOnVideoProcessingQueue(^{\r\n        [GPUImageContext useImageProcessingContext];\r\n\r\n        if (toneCurveTexture)\r\n        {\r\n            glDeleteTextures(1, &toneCurveTexture);\r\n            toneCurveTexture = 0;\r\n            free(toneCurveByteArray);\r\n        }\r\n    });\r\n}\r\n\r\n#pragma mark -\r\n#pragma mark Curve calculation\r\n\r\n- (NSArray *)getPreparedSplineCurve:(NSArray *)points\r\n{\r\n    if (points && [points count] > 0) \r\n    {\r\n        // Sort the array.\r\n        NSArray *sortedPoints = [points sortedArrayUsingComparator:^NSComparisonResult(id a, id b) {\r\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\r\n            float x1 = [(NSValue *)a CGPointValue].x;\r\n            float x2 = [(NSValue *)b CGPointValue].x;\r\n#else\r\n            float x1 = [(NSValue *)a pointValue].x;\r\n            float x2 = [(NSValue *)b pointValue].x;\r\n#endif\r\n            return x1 > x2;\r\n        }];\r\n                \r\n        // Convert from (0, 1) to (0, 255).\r\n        NSMutableArray *convertedPoints = [NSMutableArray arrayWithCapacity:[sortedPoints count]];\r\n        for (int i=0; i<[points count]; i++){\r\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\r\n            CGPoint point = [[sortedPoints objectAtIndex:i] CGPointValue];\r\n#else\r\n            NSPoint point = [[sortedPoints objectAtIndex:i] pointValue];\r\n#endif\r\n            point.x = point.x * 255;\r\n            point.y = point.y * 255;\r\n                        \r\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\r\n            [convertedPoints addObject:[NSValue valueWithCGPoint:point]];\r\n#else\r\n            [convertedPoints addObject:[NSValue valueWithPoint:point]];\r\n#endif\r\n        }\r\n        \r\n        \r\n        NSMutableArray *splinePoints = [self splineCurve:convertedPoints];\r\n        \r\n        // If we have a first point like (0.3, 0) we'll be missing some points at the beginning\r\n        // that should be 0.\r\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\r\n        CGPoint firstSplinePoint = [[splinePoints objectAtIndex:0] CGPointValue];\r\n#else\r\n        NSPoint firstSplinePoint = [[splinePoints objectAtIndex:0] pointValue];\r\n#endif\r\n        \r\n        if (firstSplinePoint.x > 0) {\r\n            for (int i=firstSplinePoint.x; i >= 0; i--) {\r\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\r\n                CGPoint newCGPoint = CGPointMake(i, 0);\r\n                [splinePoints insertObject:[NSValue valueWithCGPoint:newCGPoint] atIndex:0];\r\n#else\r\n                NSPoint newNSPoint = NSMakePoint(i, 0);\r\n                [splinePoints insertObject:[NSValue valueWithPoint:newNSPoint] atIndex:0];\r\n#endif\r\n            }\r\n        }\r\n\r\n        // Insert points similarly at the end, if necessary.\r\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\r\n        CGPoint lastSplinePoint = [[splinePoints lastObject] CGPointValue];\r\n\r\n        if (lastSplinePoint.x < 255) {\r\n            for (int i = lastSplinePoint.x + 1; i <= 255; i++) {\r\n                CGPoint newCGPoint = CGPointMake(i, 255);\r\n                [splinePoints addObject:[NSValue valueWithCGPoint:newCGPoint]];\r\n            }\r\n        }\r\n#else\r\n        NSPoint lastSplinePoint = [[splinePoints lastObject] pointValue];\r\n        \r\n        if (lastSplinePoint.x < 255) {\r\n            for (int i = lastSplinePoint.x + 1; i <= 255; i++) {\r\n                NSPoint newNSPoint = NSMakePoint(i, 255);\r\n                [splinePoints addObject:[NSValue valueWithPoint:newNSPoint]];\r\n            }\r\n        }\r\n#endif\r\n        \r\n        // Prepare the spline points.\r\n        NSMutableArray *preparedSplinePoints = [NSMutableArray arrayWithCapacity:[splinePoints count]];\r\n        for (int i=0; i<[splinePoints count]; i++) \r\n        {\r\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\r\n            CGPoint newPoint = [[splinePoints objectAtIndex:i] CGPointValue];\r\n#else\r\n            NSPoint newPoint = [[splinePoints objectAtIndex:i] pointValue];\r\n#endif\r\n            CGPoint origPoint = CGPointMake(newPoint.x, newPoint.x);\r\n            \r\n            float distance = sqrt(pow((origPoint.x - newPoint.x), 2.0) + pow((origPoint.y - newPoint.y), 2.0));\r\n            \r\n            if (origPoint.y > newPoint.y) \r\n            {\r\n                distance = -distance;\r\n            }\r\n            \r\n            [preparedSplinePoints addObject:[NSNumber numberWithFloat:distance]];\r\n        }\r\n        \r\n        return preparedSplinePoints;\r\n    }\r\n    \r\n    return nil;\r\n}\r\n\r\n\r\n- (NSMutableArray *)splineCurve:(NSArray *)points\r\n{\r\n    NSMutableArray *sdA = [self secondDerivative:points];\r\n    \r\n    // [points count] is equal to [sdA count]\r\n    NSInteger n = [sdA count];\r\n    if (n < 1)\r\n    {\r\n        return nil;\r\n    }\r\n    double sd[n];\r\n    \r\n    // From NSMutableArray to sd[n];\r\n    for (int i=0; i<n; i++) \r\n    {\r\n        sd[i] = [[sdA objectAtIndex:i] doubleValue];\r\n    }\r\n    \r\n    \r\n    NSMutableArray *output = [NSMutableArray arrayWithCapacity:(n+1)];\r\n                              \r\n    for(int i=0; i<n-1 ; i++) \r\n    {\r\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\r\n        CGPoint cur = [[points objectAtIndex:i] CGPointValue];\r\n        CGPoint next = [[points objectAtIndex:(i+1)] CGPointValue];\r\n#else\r\n        NSPoint cur = [[points objectAtIndex:i] pointValue];\r\n        NSPoint next = [[points objectAtIndex:(i+1)] pointValue];\r\n#endif\r\n        \r\n        for(int x=cur.x;x<(int)next.x;x++) \r\n        {\r\n            double t = (double)(x-cur.x)/(next.x-cur.x);\r\n            \r\n            double a = 1-t;\r\n            double b = t;\r\n            double h = next.x-cur.x;\r\n            \r\n            double y= a*cur.y + b*next.y + (h*h/6)*( (a*a*a-a)*sd[i]+ (b*b*b-b)*sd[i+1] );\r\n                        \r\n            if (y > 255.0)\r\n            {\r\n                y = 255.0;   \r\n            }\r\n            else if (y < 0.0)\r\n            {\r\n                y = 0.0;   \r\n            }\r\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\r\n            [output addObject:[NSValue valueWithCGPoint:CGPointMake(x, y)]];\r\n#else\r\n            [output addObject:[NSValue valueWithPoint:NSMakePoint(x, y)]];\r\n#endif\r\n        }\r\n    }\r\n    \r\n    // The above always misses the last point because the last point is the last next, so we approach but don't equal it.\r\n    [output addObject:[points lastObject]];\r\n    return output;\r\n}\r\n\r\n- (NSMutableArray *)secondDerivative:(NSArray *)points\r\n{\r\n    const NSInteger n = [points count];\r\n    if ((n <= 0) || (n == 1))\r\n    {\r\n        return nil;\r\n    }\r\n    \r\n    double matrix[n][3];\r\n    double result[n];\r\n    matrix[0][1]=1;\r\n    // What about matrix[0][1] and matrix[0][0]? Assuming 0 for now (Brad L.)\r\n    matrix[0][0]=0;    \r\n    matrix[0][2]=0;    \r\n    \r\n    for(int i=1;i<n-1;i++) \r\n    {\r\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\r\n        CGPoint P1 = [[points objectAtIndex:(i-1)] CGPointValue];\r\n        CGPoint P2 = [[points objectAtIndex:i] CGPointValue];\r\n        CGPoint P3 = [[points objectAtIndex:(i+1)] CGPointValue];\r\n#else\r\n        NSPoint P1 = [[points objectAtIndex:(i-1)] pointValue];\r\n        NSPoint P2 = [[points objectAtIndex:i] pointValue];\r\n        NSPoint P3 = [[points objectAtIndex:(i+1)] pointValue];\r\n#endif\r\n        \r\n        matrix[i][0]=(double)(P2.x-P1.x)/6;\r\n        matrix[i][1]=(double)(P3.x-P1.x)/3;\r\n        matrix[i][2]=(double)(P3.x-P2.x)/6;\r\n        result[i]=(double)(P3.y-P2.y)/(P3.x-P2.x) - (double)(P2.y-P1.y)/(P2.x-P1.x);\r\n    }\r\n    \r\n    // What about result[0] and result[n-1]? Assuming 0 for now (Brad L.)\r\n    result[0] = 0;\r\n    result[n-1] = 0;\r\n\t\r\n    matrix[n-1][1]=1;\r\n    // What about matrix[n-1][0] and matrix[n-1][2]? For now, assuming they are 0 (Brad L.)\r\n    matrix[n-1][0]=0;\r\n    matrix[n-1][2]=0;\r\n    \r\n  \t// solving pass1 (up->down)\r\n  \tfor(int i=1;i<n;i++) \r\n    {\r\n\t\tdouble k = matrix[i][0]/matrix[i-1][1];\r\n\t\tmatrix[i][1] -= k*matrix[i-1][2];\r\n\t\tmatrix[i][0] = 0;\r\n\t\tresult[i] -= k*result[i-1];\r\n    }\r\n\t// solving pass2 (down->up)\r\n\tfor(NSInteger i=n-2;i>=0;i--)\r\n    {\r\n\t\tdouble k = matrix[i][2]/matrix[i+1][1];\r\n\t\tmatrix[i][1] -= k*matrix[i+1][0];\r\n\t\tmatrix[i][2] = 0;\r\n\t\tresult[i] -= k*result[i+1];\r\n\t}\r\n    \r\n    double y2[n];\r\n    for(int i=0;i<n;i++) y2[i]=result[i]/matrix[i][1];\r\n    \r\n    NSMutableArray *output = [NSMutableArray arrayWithCapacity:n];\r\n    for (int i=0;i<n;i++) \r\n    {\r\n        [output addObject:[NSNumber numberWithDouble:y2[i]]];\r\n    }\r\n    \r\n    return output;\r\n}\r\n\r\n- (void)updateToneCurveTexture;\r\n{\r\n    runSynchronouslyOnVideoProcessingQueue(^{\r\n        [GPUImageContext useImageProcessingContext];\r\n        if (!toneCurveTexture)\r\n        {\r\n            glActiveTexture(GL_TEXTURE3);\r\n            glGenTextures(1, &toneCurveTexture);\r\n            glBindTexture(GL_TEXTURE_2D, toneCurveTexture);\r\n            glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);\r\n            glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);\r\n            glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);\r\n            glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);\r\n            \r\n            toneCurveByteArray = calloc(256 * 4, sizeof(GLubyte));\r\n        }\r\n        else\r\n        {\r\n            glActiveTexture(GL_TEXTURE3);\r\n            glBindTexture(GL_TEXTURE_2D, toneCurveTexture);\r\n        }\r\n        \r\n        if ( ([_redCurve count] >= 256) && ([_greenCurve count] >= 256) && ([_blueCurve count] >= 256) && ([_rgbCompositeCurve count] >= 256))\r\n        {\r\n            for (unsigned int currentCurveIndex = 0; currentCurveIndex < 256; currentCurveIndex++)\r\n            {\r\n                // BGRA for upload to texture\r\n                GLubyte b = fmin(fmax(currentCurveIndex + [[_blueCurve objectAtIndex:currentCurveIndex] floatValue], 0), 255);\r\n                toneCurveByteArray[currentCurveIndex * 4] = fmin(fmax(b + [[_rgbCompositeCurve objectAtIndex:b] floatValue], 0), 255);\r\n                GLubyte g = fmin(fmax(currentCurveIndex + [[_greenCurve objectAtIndex:currentCurveIndex] floatValue], 0), 255);\r\n                toneCurveByteArray[currentCurveIndex * 4 + 1] = fmin(fmax(g + [[_rgbCompositeCurve objectAtIndex:g] floatValue], 0), 255);\r\n                GLubyte r = fmin(fmax(currentCurveIndex + [[_redCurve objectAtIndex:currentCurveIndex] floatValue], 0), 255);\r\n                toneCurveByteArray[currentCurveIndex * 4 + 2] = fmin(fmax(r + [[_rgbCompositeCurve objectAtIndex:r] floatValue], 0), 255);\r\n                toneCurveByteArray[currentCurveIndex * 4 + 3] = 255;\r\n            }\r\n            \r\n            glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, 256 /*width*/, 1 /*height*/, 0, GL_BGRA, GL_UNSIGNED_BYTE, toneCurveByteArray);\r\n        }        \r\n    });\r\n}\r\n\r\n#pragma mark -\r\n#pragma mark Rendering\r\n\r\n- (void)renderToTextureWithVertices:(const GLfloat *)vertices textureCoordinates:(const GLfloat *)textureCoordinates;\r\n{\r\n    if (self.preventRendering)\r\n    {\r\n        [firstInputFramebuffer unlock];\r\n        return;\r\n    }\r\n    \r\n    [GPUImageContext setActiveShaderProgram:filterProgram];\r\n    outputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:[self sizeOfFBO] textureOptions:self.outputTextureOptions onlyTexture:NO];\r\n    [outputFramebuffer activateFramebuffer];\r\n    if (usingNextFrameForImageCapture)\r\n    {\r\n        [outputFramebuffer lock];\r\n    }\r\n\r\n    glClearColor(backgroundColorRed, backgroundColorGreen, backgroundColorBlue, backgroundColorAlpha);\r\n    glClear(GL_COLOR_BUFFER_BIT);\r\n    \r\n  \tglActiveTexture(GL_TEXTURE2);\r\n  \tglBindTexture(GL_TEXTURE_2D, [firstInputFramebuffer texture]);\r\n  \tglUniform1i(filterInputTextureUniform, 2);\t\r\n    \r\n    glActiveTexture(GL_TEXTURE3);\r\n    glBindTexture(GL_TEXTURE_2D, toneCurveTexture);                \r\n    glUniform1i(toneCurveTextureUniform, 3);\t\r\n    \r\n    glVertexAttribPointer(filterPositionAttribute, 2, GL_FLOAT, 0, 0, vertices);\r\n    glVertexAttribPointer(filterTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, textureCoordinates);\r\n    \r\n    glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);\r\n    [firstInputFramebuffer unlock];\r\n    if (usingNextFrameForImageCapture)\r\n    {\r\n        dispatch_semaphore_signal(imageCaptureSemaphore);\r\n    }\r\n}\r\n\r\n#pragma mark -\r\n#pragma mark Accessors\r\n\r\n- (void)setRGBControlPoints:(NSArray *)points\r\n{\r\n    _redControlPoints = [points copy];\r\n    _redCurve = [self getPreparedSplineCurve:_redControlPoints];\r\n\r\n    _greenControlPoints = [points copy];\r\n    _greenCurve = [self getPreparedSplineCurve:_greenControlPoints];\r\n\r\n    _blueControlPoints = [points copy];\r\n    _blueCurve = [self getPreparedSplineCurve:_blueControlPoints];\r\n    \r\n    [self updateToneCurveTexture];\r\n}\r\n\r\n\r\n- (void)setRgbCompositeControlPoints:(NSArray *)newValue\r\n{\r\n  _rgbCompositeControlPoints = [newValue copy];\r\n  _rgbCompositeCurve = [self getPreparedSplineCurve:_rgbCompositeControlPoints];\r\n  \r\n  [self updateToneCurveTexture];\r\n}\r\n\r\n\r\n- (void)setRedControlPoints:(NSArray *)newValue;\r\n{  \r\n    _redControlPoints = [newValue copy];\r\n    _redCurve = [self getPreparedSplineCurve:_redControlPoints];\r\n    \r\n    [self updateToneCurveTexture];\r\n}\r\n\r\n\r\n- (void)setGreenControlPoints:(NSArray *)newValue\r\n{\r\n    _greenControlPoints = [newValue copy];\r\n    _greenCurve = [self getPreparedSplineCurve:_greenControlPoints];\r\n    \r\n    [self updateToneCurveTexture];\r\n}\r\n\r\n\r\n- (void)setBlueControlPoints:(NSArray *)newValue\r\n{\r\n    _blueControlPoints = [newValue copy];\r\n    _blueCurve = [self getPreparedSplineCurve:_blueControlPoints];\r\n    \r\n    [self updateToneCurveTexture];\r\n}\r\n\r\n@end\r\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageToonFilter.h",
    "content": "#import \"GPUImage3x3TextureSamplingFilter.h\"\n\n/** This uses Sobel edge detection to place a black border around objects,\n and then it quantizes the colors present in the image to give a cartoon-like quality to the image.\n */\n@interface GPUImageToonFilter : GPUImage3x3TextureSamplingFilter\n{\n    GLint thresholdUniform, quantizationLevelsUniform;\n}\n\n/** The threshold at which to apply the edges, default of 0.2\n */\n@property(readwrite, nonatomic) CGFloat threshold; \n\n/** The levels of quantization for the posterization of colors within the scene, with a default of 10.0\n */\n@property(readwrite, nonatomic) CGFloat quantizationLevels; \n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageToonFilter.m",
    "content": "#import \"GPUImageToonFilter.h\"\n#import \"GPUImageSobelEdgeDetectionFilter.h\"\n#import \"GPUImage3x3ConvolutionFilter.h\"\n\n// Code from \"Graphics Shaders: Theory and Practice\" by M. Bailey and S. Cunningham \n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageToonFragmentShaderString = SHADER_STRING\n(\n precision highp float;\n \n varying vec2 textureCoordinate;\n varying vec2 leftTextureCoordinate;\n varying vec2 rightTextureCoordinate;\n \n varying vec2 topTextureCoordinate;\n varying vec2 topLeftTextureCoordinate;\n varying vec2 topRightTextureCoordinate;\n \n varying vec2 bottomTextureCoordinate;\n varying vec2 bottomLeftTextureCoordinate;\n varying vec2 bottomRightTextureCoordinate;\n \n uniform sampler2D inputImageTexture;\n \n uniform highp float intensity;\n uniform highp float threshold;\n uniform highp float quantizationLevels;\n \n const highp vec3 W = vec3(0.2125, 0.7154, 0.0721);\n \n void main()\n {\n     vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);\n     \n     float bottomLeftIntensity = texture2D(inputImageTexture, bottomLeftTextureCoordinate).r;\n     float topRightIntensity = texture2D(inputImageTexture, topRightTextureCoordinate).r;\n     float topLeftIntensity = texture2D(inputImageTexture, topLeftTextureCoordinate).r;\n     float bottomRightIntensity = texture2D(inputImageTexture, bottomRightTextureCoordinate).r;\n     float leftIntensity = texture2D(inputImageTexture, leftTextureCoordinate).r;\n     float rightIntensity = texture2D(inputImageTexture, rightTextureCoordinate).r;\n     float bottomIntensity = texture2D(inputImageTexture, bottomTextureCoordinate).r;\n     float topIntensity = texture2D(inputImageTexture, topTextureCoordinate).r;\n     float h = -topLeftIntensity - 2.0 * topIntensity - topRightIntensity + bottomLeftIntensity + 2.0 * bottomIntensity + bottomRightIntensity;\n     float v = -bottomLeftIntensity - 2.0 * leftIntensity - topLeftIntensity + bottomRightIntensity + 2.0 * rightIntensity + topRightIntensity;\n     \n     float mag = length(vec2(h, v));\n\n     vec3 posterizedImageColor = floor((textureColor.rgb * quantizationLevels) + 0.5) / quantizationLevels;\n     \n     float thresholdTest = 1.0 - step(threshold, mag);\n     \n     gl_FragColor = vec4(posterizedImageColor * thresholdTest, textureColor.a);\n }\n);\n#else\nNSString *const kGPUImageToonFragmentShaderString = SHADER_STRING\n(\n varying vec2 textureCoordinate;\n varying vec2 leftTextureCoordinate;\n varying vec2 rightTextureCoordinate;\n \n varying vec2 topTextureCoordinate;\n varying vec2 topLeftTextureCoordinate;\n varying vec2 topRightTextureCoordinate;\n \n varying vec2 bottomTextureCoordinate;\n varying vec2 bottomLeftTextureCoordinate;\n varying vec2 bottomRightTextureCoordinate;\n \n uniform sampler2D inputImageTexture;\n \n uniform float intensity;\n uniform float threshold;\n uniform float quantizationLevels;\n \n const vec3 W = vec3(0.2125, 0.7154, 0.0721);\n \n void main()\n {\n     vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);\n     \n     float bottomLeftIntensity = texture2D(inputImageTexture, bottomLeftTextureCoordinate).r;\n     float topRightIntensity = texture2D(inputImageTexture, topRightTextureCoordinate).r;\n     float topLeftIntensity = texture2D(inputImageTexture, topLeftTextureCoordinate).r;\n     float bottomRightIntensity = texture2D(inputImageTexture, bottomRightTextureCoordinate).r;\n     float leftIntensity = texture2D(inputImageTexture, leftTextureCoordinate).r;\n     float rightIntensity = texture2D(inputImageTexture, rightTextureCoordinate).r;\n     float bottomIntensity = texture2D(inputImageTexture, bottomTextureCoordinate).r;\n     float topIntensity = texture2D(inputImageTexture, topTextureCoordinate).r;\n     float h = -topLeftIntensity - 2.0 * topIntensity - topRightIntensity + bottomLeftIntensity + 2.0 * bottomIntensity + bottomRightIntensity;\n     float v = -bottomLeftIntensity - 2.0 * leftIntensity - topLeftIntensity + bottomRightIntensity + 2.0 * rightIntensity + topRightIntensity;\n     \n     float mag = length(vec2(h, v));\n     \n     vec3 posterizedImageColor = floor((textureColor.rgb * quantizationLevels) + 0.5) / quantizationLevels;\n     \n     float thresholdTest = 1.0 - step(threshold, mag);\n     \n     gl_FragColor = vec4(posterizedImageColor * thresholdTest, textureColor.a);\n }\n);\n#endif\n\n@implementation GPUImageToonFilter\n\n@synthesize threshold = _threshold; \n@synthesize quantizationLevels = _quantizationLevels; \n\n#pragma mark -\n#pragma mark Initialization and teardown\n\n- (id)init;\n{\n    if (!(self = [super initWithFragmentShaderFromString:kGPUImageToonFragmentShaderString]))\n    {\n\t\treturn nil;\n    }\n    \n    hasOverriddenImageSizeFactor = NO;\n    \n    thresholdUniform = [filterProgram uniformIndex:@\"threshold\"];\n    quantizationLevelsUniform = [filterProgram uniformIndex:@\"quantizationLevels\"];\n    \n    self.threshold = 0.2;\n    self.quantizationLevels = 10.0;    \n\n    return self;\n}\n\n#pragma mark -\n#pragma mark Accessors\n\n- (void)setThreshold:(CGFloat)newValue;\n{\n    _threshold = newValue;\n    \n    [self setFloat:_threshold forUniform:thresholdUniform program:filterProgram];\n}\n\n- (void)setQuantizationLevels:(CGFloat)newValue;\n{\n    _quantizationLevels = newValue;\n    \n    [self setFloat:_quantizationLevels forUniform:quantizationLevelsUniform program:filterProgram];\n}\n\n\n@end\n\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageTransformFilter.h",
    "content": "#import \"GPUImageFilter.h\"\n\n@interface GPUImageTransformFilter : GPUImageFilter\n{\n    GLint transformMatrixUniform, orthographicMatrixUniform;\n    GPUMatrix4x4 orthographicMatrix;\n}\n\n// You can either set the transform to apply to be a 2-D affine transform or a 3-D transform. The default is the identity transform (the output image is identical to the input).\n@property(readwrite, nonatomic) CGAffineTransform affineTransform;\n@property(readwrite, nonatomic) CATransform3D transform3D;\n\n// This applies the transform to the raw frame data if set to YES, the default of NO takes the aspect ratio of the image input into account when rotating\n@property(readwrite, nonatomic) BOOL ignoreAspectRatio;\n\n// sets the anchor point to top left corner\n@property(readwrite, nonatomic) BOOL anchorTopLeft;\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageTransformFilter.m",
    "content": "#import \"GPUImageTransformFilter.h\"\n\nNSString *const kGPUImageTransformVertexShaderString = SHADER_STRING\n(\n attribute vec4 position;\n attribute vec4 inputTextureCoordinate;\n \n uniform mat4 transformMatrix;\n uniform mat4 orthographicMatrix;\n \n varying vec2 textureCoordinate;\n \n void main()\n {\n     gl_Position = transformMatrix * vec4(position.xyz, 1.0) * orthographicMatrix;\n     textureCoordinate = inputTextureCoordinate.xy;\n }\n);\n\n@implementation GPUImageTransformFilter\n\n@synthesize affineTransform;\n@synthesize transform3D = _transform3D;\n@synthesize ignoreAspectRatio = _ignoreAspectRatio;\n@synthesize anchorTopLeft = _anchorTopLeft;\n\n#pragma mark -\n#pragma mark Initialization and teardown\n\n- (id)init;\n{\n    if (!(self = [super initWithVertexShaderFromString:kGPUImageTransformVertexShaderString fragmentShaderFromString:kGPUImagePassthroughFragmentShaderString]))\n    {\n        return nil;\n    }\n    \n    transformMatrixUniform = [filterProgram uniformIndex:@\"transformMatrix\"];\n    orthographicMatrixUniform = [filterProgram uniformIndex:@\"orthographicMatrix\"];\n    \n    self.transform3D = CATransform3DIdentity;\n    \n    return self;\n}\n\n#pragma mark -\n#pragma mark Conversion from matrix formats\n\n- (void)loadOrthoMatrix:(GLfloat *)matrix left:(GLfloat)left right:(GLfloat)right bottom:(GLfloat)bottom top:(GLfloat)top near:(GLfloat)near far:(GLfloat)far;\n{\n    GLfloat r_l = right - left;\n    GLfloat t_b = top - bottom;\n    GLfloat f_n = far - near;\n    GLfloat tx = - (right + left) / (right - left);\n    GLfloat ty = - (top + bottom) / (top - bottom);\n    GLfloat tz = - (far + near) / (far - near);\n    \n\tfloat scale = 2.0f;\n\tif (_anchorTopLeft)\n\t{\n\t\tscale = 4.0f;\n\t\ttx=-1.0f;\n\t\tty=-1.0f;\n\t}\n\t\n    matrix[0] = scale / r_l;\n    matrix[1] = 0.0f;\n    matrix[2] = 0.0f;\n    matrix[3] = tx;\n    \n    matrix[4] = 0.0f;\n    matrix[5] = scale / t_b;\n    matrix[6] = 0.0f;\n    matrix[7] = ty;\n    \n    matrix[8] = 0.0f;\n    matrix[9] = 0.0f;\n    matrix[10] = scale / f_n;\n    matrix[11] = tz;\n    \n    matrix[12] = 0.0f;\n    matrix[13] = 0.0f;\n    matrix[14] = 0.0f;\n    matrix[15] = 1.0f;\n}\n\n//- (void)convert3DTransform:(CATransform3D *)transform3D toMatrix:(GLfloat *)matrix;\n//{\n//\t//\tstruct CATransform3D\n//\t//\t{\n//\t//\t\tCGFloat m11, m12, m13, m14;\n//\t//\t\tCGFloat m21, m22, m23, m24;\n//\t//\t\tCGFloat m31, m32, m33, m34;\n//\t//\t\tCGFloat m41, m42, m43, m44;\n//\t//\t};\n//\t\n//\tmatrix[0] = (GLfloat)transform3D->m11;\n//\tmatrix[1] = (GLfloat)transform3D->m12;\n//\tmatrix[2] = (GLfloat)transform3D->m13;\n//\tmatrix[3] = (GLfloat)transform3D->m14;\n//\tmatrix[4] = (GLfloat)transform3D->m21;\n//\tmatrix[5] = (GLfloat)transform3D->m22;\n//\tmatrix[6] = (GLfloat)transform3D->m23;\n//\tmatrix[7] = (GLfloat)transform3D->m24;\n//\tmatrix[8] = (GLfloat)transform3D->m31;\n//\tmatrix[9] = (GLfloat)transform3D->m32;\n//\tmatrix[10] = (GLfloat)transform3D->m33;\n//\tmatrix[11] = (GLfloat)transform3D->m34;\n//\tmatrix[12] = (GLfloat)transform3D->m41;\n//\tmatrix[13] = (GLfloat)transform3D->m42;\n//\tmatrix[14] = (GLfloat)transform3D->m43;\n//\tmatrix[15] = (GLfloat)transform3D->m44;\n//}\n\n- (void)convert3DTransform:(CATransform3D *)transform3D toMatrix:(GPUMatrix4x4 *)matrix;\n{\n\t//\tstruct CATransform3D\n\t//\t{\n\t//\t\tCGFloat m11, m12, m13, m14;\n\t//\t\tCGFloat m21, m22, m23, m24;\n\t//\t\tCGFloat m31, m32, m33, m34;\n\t//\t\tCGFloat m41, m42, m43, m44;\n\t//\t};\n    \n    GLfloat *mappedMatrix = (GLfloat *)matrix;\n\t\n\tmappedMatrix[0] = (GLfloat)transform3D->m11;\n\tmappedMatrix[1] = (GLfloat)transform3D->m12;\n\tmappedMatrix[2] = (GLfloat)transform3D->m13;\n\tmappedMatrix[3] = (GLfloat)transform3D->m14;\n\tmappedMatrix[4] = (GLfloat)transform3D->m21;\n\tmappedMatrix[5] = (GLfloat)transform3D->m22;\n\tmappedMatrix[6] = (GLfloat)transform3D->m23;\n\tmappedMatrix[7] = (GLfloat)transform3D->m24;\n\tmappedMatrix[8] = (GLfloat)transform3D->m31;\n\tmappedMatrix[9] = (GLfloat)transform3D->m32;\n\tmappedMatrix[10] = (GLfloat)transform3D->m33;\n\tmappedMatrix[11] = (GLfloat)transform3D->m34;\n\tmappedMatrix[12] = (GLfloat)transform3D->m41;\n\tmappedMatrix[13] = (GLfloat)transform3D->m42;\n\tmappedMatrix[14] = (GLfloat)transform3D->m43;\n\tmappedMatrix[15] = (GLfloat)transform3D->m44;\n}\n\n#pragma mark -\n#pragma mark GPUImageInput\n\n- (void)newFrameReadyAtTime:(CMTime)frameTime atIndex:(NSInteger)textureIndex;\n{\n    CGSize currentFBOSize = [self sizeOfFBO];\n    CGFloat normalizedHeight = currentFBOSize.height / currentFBOSize.width;\n    \n    GLfloat adjustedVertices[] = {\n        -1.0f, -normalizedHeight,\n        1.0f, -normalizedHeight,\n        -1.0f,  normalizedHeight,\n        1.0f,  normalizedHeight,\n    };\n    static const GLfloat squareVertices[] = {\n        -1.0f, -1.0f,\n        1.0f, -1.0f,\n        -1.0f,  1.0f,\n        1.0f,  1.0f,\n    };\n\n\tGLfloat adjustedVerticesAnchorTL[] = {\n        0.0f, 0.0f,\n        1.0f, 0.0f,\n        0.0f,  normalizedHeight,\n        1.0f,  normalizedHeight,\n    };\n\n    static const GLfloat squareVerticesAnchorTL[] = {\n        0.0f, 0.0f,\n        1.0f, 0.0f,\n        0.0f,  1.0f,\n        1.0f,  1.0f,\n    };\n\n    if (_ignoreAspectRatio)\n    {\n\t\tif (_anchorTopLeft)\n\t\t{\n\t\t\t[self renderToTextureWithVertices:squareVerticesAnchorTL textureCoordinates:[[self class] textureCoordinatesForRotation:inputRotation]];\n\t\t}\n\t\telse\n\t\t{\n\t\t\t[self renderToTextureWithVertices:squareVertices textureCoordinates:[[self class] textureCoordinatesForRotation:inputRotation]];\n\t\t}\n    }\n    else\n    {\n\t\tif (_anchorTopLeft)\n\t\t{\n\t\t\t[self renderToTextureWithVertices:adjustedVerticesAnchorTL textureCoordinates:[[self class] textureCoordinatesForRotation:inputRotation]];\n\t\t}\n\t\telse\n\t\t{\n\t\t\t[self renderToTextureWithVertices:adjustedVertices textureCoordinates:[[self class] textureCoordinatesForRotation:inputRotation]];\n\t\t}\n    }\n    \n    [self informTargetsAboutNewFrameAtTime:frameTime];\n}\n\n- (void)setupFilterForSize:(CGSize)filterFrameSize;\n{\n    if (!_ignoreAspectRatio)\n    {\n        [self loadOrthoMatrix:(GLfloat *)&orthographicMatrix left:-1.0 right:1.0 bottom:(-1.0 * filterFrameSize.height / filterFrameSize.width) top:(1.0 * filterFrameSize.height / filterFrameSize.width) near:-1.0 far:1.0];\n        //     [self loadOrthoMatrix:orthographicMatrix left:-1.0 right:1.0 bottom:(-1.0 * (GLfloat)backingHeight / (GLfloat)backingWidth) top:(1.0 * (GLfloat)backingHeight / (GLfloat)backingWidth) near:-2.0 far:2.0];\n\n        [self setMatrix4f:orthographicMatrix forUniform:orthographicMatrixUniform program:filterProgram];\n    }\n}\n\n#pragma mark -\n#pragma mark Accessors\n\n- (void)setAffineTransform:(CGAffineTransform)newValue;\n{\n    self.transform3D = CATransform3DMakeAffineTransform(newValue);\n}\n\n- (CGAffineTransform)affineTransform;\n{\n    return CATransform3DGetAffineTransform(self.transform3D);\n}\n\n- (void)setTransform3D:(CATransform3D)newValue;\n{\n    _transform3D = newValue;\n        \n    GPUMatrix4x4 temporaryMatrix;\n    \n    [self convert3DTransform:&_transform3D toMatrix:&temporaryMatrix];\n    [self setMatrix4f:temporaryMatrix forUniform:transformMatrixUniform program:filterProgram];\n}\n\n- (void)setIgnoreAspectRatio:(BOOL)newValue;\n{\n    _ignoreAspectRatio = newValue;\n    \n    if (_ignoreAspectRatio)\n    {\n        [self loadOrthoMatrix:(GLfloat *)&orthographicMatrix left:-1.0 right:1.0 bottom:-1.0 top:1.0 near:-1.0 far:1.0];\n        [self setMatrix4f:orthographicMatrix forUniform:orthographicMatrixUniform program:filterProgram];\n    }\n    else\n    {\n        [self setupFilterForSize:[self sizeOfFBO]];\n    }\n}\n\n- (void)setAnchorTopLeft:(BOOL)newValue\n{\n\t_anchorTopLeft = newValue;\n\t[self setIgnoreAspectRatio:_ignoreAspectRatio];\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageTwoInputCrossTextureSamplingFilter.h",
    "content": "#import \"GPUImageTwoInputFilter.h\"\n\n@interface GPUImageTwoInputCrossTextureSamplingFilter : GPUImageTwoInputFilter\n{\n    GLint texelWidthUniform, texelHeightUniform;\n    \n    CGFloat texelWidth, texelHeight;\n    BOOL hasOverriddenImageSizeFactor;\n}\n\n// The texel width and height determines how far out to sample from this texel. By default, this is the normalized width of a pixel, but this can be overridden for different effects.\n@property(readwrite, nonatomic) CGFloat texelWidth;\n@property(readwrite, nonatomic) CGFloat texelHeight;\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageTwoInputCrossTextureSamplingFilter.m",
    "content": "#import \"GPUImageTwoInputCrossTextureSamplingFilter.h\"\n\nNSString *const kGPUImageTwoInputNearbyTexelSamplingVertexShaderString = SHADER_STRING\n(\n attribute vec4 position;\n attribute vec4 inputTextureCoordinate;\n attribute vec4 inputTextureCoordinate2;\n \n uniform float texelWidth;\n uniform float texelHeight;\n \n varying vec2 textureCoordinate;\n varying vec2 leftTextureCoordinate;\n varying vec2 rightTextureCoordinate;\n varying vec2 topTextureCoordinate;\n varying vec2 bottomTextureCoordinate;\n \n varying vec2 textureCoordinate2;\n varying vec2 leftTextureCoordinate2;\n varying vec2 rightTextureCoordinate2;\n varying vec2 topTextureCoordinate2;\n varying vec2 bottomTextureCoordinate2;\n \n void main()\n {\n     gl_Position = position;\n     \n     vec2 widthStep = vec2(texelWidth, 0.0);\n     vec2 heightStep = vec2(0.0, texelHeight);\n     \n     textureCoordinate = inputTextureCoordinate.xy;\n     leftTextureCoordinate = inputTextureCoordinate.xy - widthStep;\n     rightTextureCoordinate = inputTextureCoordinate.xy + widthStep;\n     topTextureCoordinate = inputTextureCoordinate.xy - heightStep;\n     bottomTextureCoordinate = inputTextureCoordinate.xy + heightStep;\n     \n     textureCoordinate2 = inputTextureCoordinate2.xy;\n     leftTextureCoordinate2 = inputTextureCoordinate2.xy - widthStep;\n     rightTextureCoordinate2 = inputTextureCoordinate2.xy + widthStep;\n     topTextureCoordinate2 = inputTextureCoordinate2.xy - heightStep;\n     bottomTextureCoordinate2 = inputTextureCoordinate2.xy + heightStep;\n }\n);\n\n@implementation GPUImageTwoInputCrossTextureSamplingFilter\n\n@synthesize texelWidth = _texelWidth;\n@synthesize texelHeight = _texelHeight;\n\n#pragma mark -\n#pragma mark Initialization and teardown\n\n- (id)initWithFragmentShaderFromString:(NSString *)fragmentShaderString;\n{\n    if (!(self = [super initWithVertexShaderFromString:kGPUImageTwoInputNearbyTexelSamplingVertexShaderString fragmentShaderFromString:fragmentShaderString]))\n    {\n        return nil;\n    }\n    \n    texelWidthUniform = [filterProgram uniformIndex:@\"texelWidth\"];\n    texelHeightUniform = [filterProgram uniformIndex:@\"texelHeight\"];\n    \n    return self;\n}\n\n- (void)setupFilterForSize:(CGSize)filterFrameSize;\n{\n    if (!hasOverriddenImageSizeFactor)\n    {\n        _texelWidth = 1.0 / filterFrameSize.width;\n        _texelHeight = 1.0 / filterFrameSize.height;\n        \n        runSynchronouslyOnVideoProcessingQueue(^{\n            [GPUImageContext setActiveShaderProgram:filterProgram];\n            if (GPUImageRotationSwapsWidthAndHeight(inputRotation))\n            {\n                glUniform1f(texelWidthUniform, _texelHeight);\n                glUniform1f(texelHeightUniform, _texelWidth);\n            }\n            else\n            {\n                glUniform1f(texelWidthUniform, _texelWidth);\n                glUniform1f(texelHeightUniform, _texelHeight);\n            }\n        });\n    }\n}\n\n#pragma mark -\n#pragma mark Accessors\n\n- (void)setTexelWidth:(CGFloat)newValue;\n{\n    hasOverriddenImageSizeFactor = YES;\n    _texelWidth = newValue;\n    \n    [self setFloat:_texelWidth forUniform:texelWidthUniform program:filterProgram];\n}\n\n- (void)setTexelHeight:(CGFloat)newValue;\n{\n    hasOverriddenImageSizeFactor = YES;\n    _texelHeight = newValue;\n    \n    [self setFloat:_texelHeight forUniform:texelHeightUniform program:filterProgram];\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageUIElement.h",
    "content": "#import \"GPUImageOutput.h\"\n\n@interface GPUImageUIElement : GPUImageOutput\n\n// Initialization and teardown\n- (id)initWithView:(UIView *)inputView;\n- (id)initWithLayer:(CALayer *)inputLayer;\n\n// Layer management\n- (CGSize)layerSizeInPixels;\n- (void)update;\n- (void)updateUsingCurrentTime;\n- (void)updateWithTimestamp:(CMTime)frameTime;\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageUIElement.m",
    "content": "#import \"GPUImageUIElement.h\"\n\n@interface GPUImageUIElement ()\n{\n    UIView *view;\n    CALayer *layer;\n    \n    CGSize previousLayerSizeInPixels;\n    CMTime time;\n    NSTimeInterval actualTimeOfLastUpdate;\n}\n\n@end\n\n@implementation GPUImageUIElement\n\n#pragma mark -\n#pragma mark Initialization and teardown\n\n- (id)initWithView:(UIView *)inputView;\n{\n    if (!(self = [super init]))\n    {\n\t\treturn nil;\n    }\n    \n    view = inputView;\n    layer = inputView.layer;\n\n    previousLayerSizeInPixels = CGSizeZero;\n    [self update];\n    \n    return self;\n}\n\n- (id)initWithLayer:(CALayer *)inputLayer;\n{\n    if (!(self = [super init]))\n    {\n\t\treturn nil;\n    }\n    \n    view = nil;\n    layer = inputLayer;\n\n    previousLayerSizeInPixels = CGSizeZero;\n    [self update];\n\n    return self;\n}\n\n#pragma mark -\n#pragma mark Layer management\n\n- (CGSize)layerSizeInPixels;\n{\n    CGSize pointSize = layer.bounds.size;\n    return CGSizeMake(layer.contentsScale * pointSize.width, layer.contentsScale * pointSize.height);\n}\n\n- (void)update;\n{\n    [self updateWithTimestamp:kCMTimeIndefinite];\n}\n\n- (void)updateUsingCurrentTime;\n{\n    if(CMTIME_IS_INVALID(time)) {\n        time = CMTimeMakeWithSeconds(0, 600);\n        actualTimeOfLastUpdate = [NSDate timeIntervalSinceReferenceDate];\n    } else {\n        NSTimeInterval now = [NSDate timeIntervalSinceReferenceDate];\n        NSTimeInterval diff = now - actualTimeOfLastUpdate;\n        time = CMTimeAdd(time, CMTimeMakeWithSeconds(diff, 600));\n        actualTimeOfLastUpdate = now;\n    }\n\n    [self updateWithTimestamp:time];\n}\n\n- (void)updateWithTimestamp:(CMTime)frameTime;\n{\n    [GPUImageContext useImageProcessingContext];\n    \n    CGSize layerPixelSize = [self layerSizeInPixels];\n    \n    GLubyte *imageData = (GLubyte *) calloc(1, (int)layerPixelSize.width * (int)layerPixelSize.height * 4);\n    \n    CGColorSpaceRef genericRGBColorspace = CGColorSpaceCreateDeviceRGB();    \n    CGContextRef imageContext = CGBitmapContextCreate(imageData, (int)layerPixelSize.width, (int)layerPixelSize.height, 8, (int)layerPixelSize.width * 4, genericRGBColorspace,  kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);\n//    CGContextRotateCTM(imageContext, M_PI_2);\n\tCGContextTranslateCTM(imageContext, 0.0f, layerPixelSize.height);\n    CGContextScaleCTM(imageContext, layer.contentsScale, -layer.contentsScale);\n    //        CGContextSetBlendMode(imageContext, kCGBlendModeCopy); // From Technical Q&A QA1708: http://developer.apple.com/library/ios/#qa/qa1708/_index.html\n    \n    [layer renderInContext:imageContext];\n    \n    CGContextRelease(imageContext);\n    CGColorSpaceRelease(genericRGBColorspace);\n    \n    // TODO: This may not work\n    outputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:layerPixelSize textureOptions:self.outputTextureOptions onlyTexture:YES];\n\n    glBindTexture(GL_TEXTURE_2D, [outputFramebuffer texture]);\n    // no need to use self.outputTextureOptions here, we always need these texture options\n    glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, (int)layerPixelSize.width, (int)layerPixelSize.height, 0, GL_BGRA, GL_UNSIGNED_BYTE, imageData);\n    \n    free(imageData);\n    \n    for (id<GPUImageInput> currentTarget in targets)\n    {\n        if (currentTarget != self.targetToIgnoreForUpdates)\n        {\n            NSInteger indexOfObject = [targets indexOfObject:currentTarget];\n            NSInteger textureIndexOfTarget = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];\n            \n            [currentTarget setInputSize:layerPixelSize atIndex:textureIndexOfTarget];\n            [currentTarget newFrameReadyAtTime:frameTime atIndex:textureIndexOfTarget];\n        }\n    }    \n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageUnsharpMaskFilter.h",
    "content": "#import \"GPUImageFilterGroup.h\"\n\n@class GPUImageGaussianBlurFilter;\n\n@interface GPUImageUnsharpMaskFilter : GPUImageFilterGroup\n{\n    GPUImageGaussianBlurFilter *blurFilter;\n    GPUImageFilter *unsharpMaskFilter;\n}\n// The blur radius of the underlying Gaussian blur. The default is 4.0.\n@property (readwrite, nonatomic) CGFloat blurRadiusInPixels;\n\n// The strength of the sharpening, from 0.0 on up, with a default of 1.0\n@property(readwrite, nonatomic) CGFloat intensity;\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageUnsharpMaskFilter.m",
    "content": "#import \"GPUImageUnsharpMaskFilter.h\"\n#import \"GPUImageFilter.h\"\n#import \"GPUImageTwoInputFilter.h\"\n#import \"GPUImageGaussianBlurFilter.h\"\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageUnsharpMaskFragmentShaderString = SHADER_STRING\n( \n varying highp vec2 textureCoordinate;\n varying highp vec2 textureCoordinate2;\n \n uniform sampler2D inputImageTexture;\n uniform sampler2D inputImageTexture2; \n \n uniform highp float intensity;\n \n void main()\n {\n     lowp vec4 sharpImageColor = texture2D(inputImageTexture, textureCoordinate);\n     lowp vec3 blurredImageColor = texture2D(inputImageTexture2, textureCoordinate2).rgb;\n     \n     gl_FragColor = vec4(sharpImageColor.rgb * intensity + blurredImageColor * (1.0 - intensity), sharpImageColor.a);\n//     gl_FragColor = mix(blurredImageColor, sharpImageColor, intensity);\n//     gl_FragColor = vec4(sharpImageColor.rgb - (blurredImageColor.rgb * intensity), 1.0);\n }\n);\n#else\nNSString *const kGPUImageUnsharpMaskFragmentShaderString = SHADER_STRING\n(\n varying vec2 textureCoordinate;\n varying vec2 textureCoordinate2;\n \n uniform sampler2D inputImageTexture;\n uniform sampler2D inputImageTexture2;\n \n uniform float intensity;\n \n void main()\n {\n     vec4 sharpImageColor = texture2D(inputImageTexture, textureCoordinate);\n     vec3 blurredImageColor = texture2D(inputImageTexture2, textureCoordinate2).rgb;\n     \n     gl_FragColor = vec4(sharpImageColor.rgb * intensity + blurredImageColor * (1.0 - intensity), sharpImageColor.a);\n     //     gl_FragColor = mix(blurredImageColor, sharpImageColor, intensity);\n     //     gl_FragColor = vec4(sharpImageColor.rgb - (blurredImageColor.rgb * intensity), 1.0);\n }\n);\n#endif\n\n@implementation GPUImageUnsharpMaskFilter\n\n@synthesize blurRadiusInPixels;\n@synthesize intensity = _intensity;\n\n- (id)init;\n{\n    if (!(self = [super init]))\n    {\n\t\treturn nil;\n    }\n    \n    // First pass: apply a variable Gaussian blur\n    blurFilter = [[GPUImageGaussianBlurFilter alloc] init];\n    [self addFilter:blurFilter];\n        \n    // Second pass: combine the blurred image with the original sharp one\n    unsharpMaskFilter = [[GPUImageTwoInputFilter alloc] initWithFragmentShaderFromString:kGPUImageUnsharpMaskFragmentShaderString];\n    [self addFilter:unsharpMaskFilter];\n    \n    // Texture location 0 needs to be the sharp image for both the blur and the second stage processing\n    [blurFilter addTarget:unsharpMaskFilter atTextureLocation:1];\n    \n    self.initialFilters = [NSArray arrayWithObjects:blurFilter, unsharpMaskFilter, nil];\n    self.terminalFilter = unsharpMaskFilter;\n    \n    self.intensity = 1.0;\n    self.blurRadiusInPixels = 4.0;\n    \n    return self;\n}\n\n#pragma mark -\n#pragma mark Accessors\n\n- (void)setBlurRadiusInPixels:(CGFloat)newValue;\n{\n    blurFilter.blurRadiusInPixels = newValue;\n}\n\n- (CGFloat)blurRadiusInPixels;\n{\n    return blurFilter.blurRadiusInPixels;\n}\n\n- (void)setIntensity:(CGFloat)newValue;\n{\n    _intensity = newValue;\n    [unsharpMaskFilter setFloat:newValue forUniformName:@\"intensity\"];\n}\n\n@end"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageVibranceFilter.h",
    "content": "//\n//  GPUImageVibranceFilter.h\n//\n//\n//  Created by github.com/r3mus on 8/14/15.\n//\n//\n\n#import \"GPUImageFilter.h\"\n\n@interface GPUImageVibranceFilter : GPUImageFilter\n{\n    GLint vibranceUniform;\n}\n\n// Modifies the saturation of desaturated colors, leaving saturated colors unmodified.\n// Value -1 to 1 (-1 is minimum vibrance, 0 is no change, and 1 is maximum vibrance)\n@property (readwrite, nonatomic) GLfloat vibrance;\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageVibranceFilter.m",
    "content": "//\n//  GPUImageVibranceFilter.m\n//  \n//\n//  Created by github.com/r3mus on 8/13/15.\n//\n//\n\n#import \"GPUImageVibranceFilter.h\"\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageVibranceFragmentShaderString = SHADER_STRING\n(\n    varying highp vec2 textureCoordinate;\n \n    uniform sampler2D inputImageTexture;\n    uniform lowp float vibrance;\n \n    void main() {\n        lowp vec4 color = texture2D(inputImageTexture, textureCoordinate);\n        lowp float average = (color.r + color.g + color.b) / 3.0;\n        lowp float mx = max(color.r, max(color.g, color.b));\n        lowp float amt = (mx - average) * (-vibrance * 3.0);\n        color.rgb = mix(color.rgb, vec3(mx), amt);\n        gl_FragColor = color;\n    }\n);\n#else\nNSString *const kGPUImageVibranceFragmentShaderString = SHADER_STRING\n(\n    varying vec2 textureCoordinate;\n \n    uniform sampler2D inputImageTexture;\n    uniform float vibrance;\n \n    void main() {\n        vec4 color = texture2D(inputImageTexture, textureCoordinate);\n        float average = (color.r + color.g + color.b) / 3.0;\n        float mx = max(color.r, max(color.g, color.b));\n        float amt = (mx - average) * (-vibrance * 3.0);\n        color.rgb = mix(color.rgb, vec3(mx), amt);\n        gl_FragColor = color;\n    }\n);\n#endif\n\n@implementation GPUImageVibranceFilter\n\n@synthesize vibrance = _vibrance;\n\n#pragma mark -\n#pragma mark Initialization and teardown\n\n- (id)init;\n{\n    if (!(self = [super initWithFragmentShaderFromString:kGPUImageVibranceFragmentShaderString]))\n    {\n        return nil;\n    }\n    \n    vibranceUniform = [filterProgram uniformIndex:@\"vibrance\"];\n    self.vibrance = 0.0;\n    \n    return self;\n}\n\n#pragma mark -\n#pragma mark Accessors\n\n- (void)setVibrance:(GLfloat)vibrance;\n{\n    _vibrance = vibrance;\n    \n    [self setFloat:_vibrance forUniform:vibranceUniform program:filterProgram];\n}\n\n@end\n\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageVignetteFilter.h",
    "content": "#import \"GPUImageFilter.h\"\n\n/** Performs a vignetting effect, fading out the image at the edges\n */\n@interface GPUImageVignetteFilter : GPUImageFilter \n{\n    GLint vignetteCenterUniform, vignetteColorUniform, vignetteStartUniform, vignetteEndUniform;\n}\n\n// the center for the vignette in tex coords (defaults to 0.5, 0.5)\n@property (nonatomic, readwrite) CGPoint vignetteCenter;\n\n// The color to use for the Vignette (defaults to black)\n@property (nonatomic, readwrite) GPUVector3 vignetteColor;\n\n// The normalized distance from the center where the vignette effect starts. Default of 0.5.\n@property (nonatomic, readwrite) CGFloat vignetteStart;\n\n// The normalized distance from the center where the vignette effect ends. Default of 0.75.\n@property (nonatomic, readwrite) CGFloat vignetteEnd;\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageVignetteFilter.m",
    "content": "#import \"GPUImageVignetteFilter.h\"\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageVignetteFragmentShaderString = SHADER_STRING\n(\n uniform sampler2D inputImageTexture;\n varying highp vec2 textureCoordinate;\n \n uniform lowp vec2 vignetteCenter;\n uniform lowp vec3 vignetteColor;\n uniform highp float vignetteStart;\n uniform highp float vignetteEnd;\n \n void main()\n {\n     lowp vec4 sourceImageColor = texture2D(inputImageTexture, textureCoordinate);\n     lowp float d = distance(textureCoordinate, vec2(vignetteCenter.x, vignetteCenter.y));\n     lowp float percent = smoothstep(vignetteStart, vignetteEnd, d);\n     gl_FragColor = vec4(mix(sourceImageColor.rgb, vignetteColor, percent), sourceImageColor.a);\n }\n);\n#else\nNSString *const kGPUImageVignetteFragmentShaderString = SHADER_STRING\n(\n uniform sampler2D inputImageTexture;\n varying vec2 textureCoordinate;\n \n uniform vec2 vignetteCenter;\n uniform vec3 vignetteColor;\n uniform float vignetteStart;\n uniform float vignetteEnd;\n \n void main()\n {\n     vec4 sourceImageColor = texture2D(inputImageTexture, textureCoordinate);\n     float d = distance(textureCoordinate, vec2(vignetteCenter.x, vignetteCenter.y));\n     float percent = smoothstep(vignetteStart, vignetteEnd, d);\n     gl_FragColor = vec4(mix(sourceImageColor.rgb, vignetteColor, percent), sourceImageColor.a);\n }\n);\n#endif\n\n@implementation GPUImageVignetteFilter\n\n@synthesize vignetteCenter = _vignetteCenter;\n@synthesize vignetteColor = _vignetteColor;\n@synthesize vignetteStart =_vignetteStart;\n@synthesize vignetteEnd = _vignetteEnd;\n\n#pragma mark -\n#pragma mark Initialization and teardown\n\n- (id)init;\n{\n    if (!(self = [super initWithFragmentShaderFromString:kGPUImageVignetteFragmentShaderString]))\n    {\n\t\treturn nil;\n    }\n    \n    vignetteCenterUniform = [filterProgram uniformIndex:@\"vignetteCenter\"];\n    vignetteColorUniform = [filterProgram uniformIndex:@\"vignetteColor\"];\n    vignetteStartUniform = [filterProgram uniformIndex:@\"vignetteStart\"];\n    vignetteEndUniform = [filterProgram uniformIndex:@\"vignetteEnd\"];\n    \n    self.vignetteCenter = (CGPoint){ 0.5f, 0.5f };\n    self.vignetteColor = (GPUVector3){ 0.0f, 0.0f, 0.0f };\n    self.vignetteStart = 0.3;\n    self.vignetteEnd = 0.75;\n    \n    return self;\n}\n\n#pragma mark -\n#pragma mark Accessors\n\n- (void)setVignetteCenter:(CGPoint)newValue\n{\n    _vignetteCenter = newValue;\n    \n    [self setPoint:newValue forUniform:vignetteCenterUniform program:filterProgram];\n}\n\n- (void)setVignetteColor:(GPUVector3)newValue\n{\n    _vignetteColor = newValue;\n    \n    [self setVec3:newValue forUniform:vignetteColorUniform program:filterProgram];\n}\n\n- (void)setVignetteStart:(CGFloat)newValue;\n{\n    _vignetteStart = newValue;\n    \n    [self setFloat:_vignetteStart forUniform:vignetteStartUniform program:filterProgram];\n}\n\n- (void)setVignetteEnd:(CGFloat)newValue;\n{\n    _vignetteEnd = newValue;\n\n    [self setFloat:_vignetteEnd forUniform:vignetteEndUniform program:filterProgram];\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageVoronoiConsumerFilter.h",
    "content": "#import \"GPUImageTwoInputFilter.h\"\n\n@interface GPUImageVoronoiConsumerFilter : GPUImageTwoInputFilter \n{\n    GLint sizeUniform;\n}\n\n@property (nonatomic, readwrite) CGSize sizeInPixels;\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageVoronoiConsumerFilter.m",
    "content": "#import \"GPUImageVoronoiConsumerFilter.h\"\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageVoronoiConsumerFragmentShaderString = SHADER_STRING\n(\n \n precision highp float;\n \n uniform sampler2D inputImageTexture;\n uniform sampler2D inputImageTexture2;\n uniform vec2 size;\n varying vec2 textureCoordinate;\n \n vec2 getCoordFromColor(vec4 color)\n{\n    float z = color.z * 256.0;\n    float yoff = floor(z / 8.0);\n    float xoff = mod(z, 8.0);\n    float x = color.x*256.0 + xoff*256.0;\n    float y = color.y*256.0 + yoff*256.0;\n    return vec2(x,y) / size;\n}\n \n void main(void) {\n     vec4 colorLoc = texture2D(inputImageTexture2, textureCoordinate);\n     vec4 color = texture2D(inputImageTexture, getCoordFromColor(colorLoc));\n     \n     gl_FragColor = color;\n }\n);\n#else\nNSString *const kGPUImageVoronoiConsumerFragmentShaderString = SHADER_STRING\n(\n uniform sampler2D inputImageTexture;\n uniform sampler2D inputImageTexture2;\n uniform vec2 size;\n varying vec2 textureCoordinate;\n \n vec2 getCoordFromColor(vec4 color)\n {\n    float z = color.z * 256.0;\n    float yoff = floor(z / 8.0);\n    float xoff = mod(z, 8.0);\n    float x = color.x*256.0 + xoff*256.0;\n    float y = color.y*256.0 + yoff*256.0;\n    return vec2(x,y) / size;\n }\n \n void main(void)\n {\n     vec4 colorLoc = texture2D(inputImageTexture2, textureCoordinate);\n     vec4 color = texture2D(inputImageTexture, getCoordFromColor(colorLoc));\n     \n     gl_FragColor = color;\n }\n);\n#endif\n\n@implementation GPUImageVoronoiConsumerFilter\n\n@synthesize sizeInPixels = _sizeInPixels;\n\n- (id)init;\n{\n    if (!(self = [super initWithFragmentShaderFromString:kGPUImageVoronoiConsumerFragmentShaderString]))\n    {\n\t\treturn nil;\n    }\n    \n    sizeUniform = [filterProgram uniformIndex:@\"size\"];\n    \n    return self;\n}\n\n-(void)setSizeInPixels:(CGSize)sizeInPixels {\n    _sizeInPixels = sizeInPixels;\n    \n    //validate that it's a power of 2 and square\n    \n    float width = log2(sizeInPixels.width);\n    float height = log2(sizeInPixels.height);\n    \n    if (width != height) {\n        NSLog(@\"Voronoi point texture must be square\");\n        return;\n    }\n    if (width != floor(width) || height != floor(height)) {\n        NSLog(@\"Voronoi point texture must be a power of 2.  Texture size %f, %f\", sizeInPixels.width, sizeInPixels.height);\n        return;\n    }\n    glUniform2f(sizeUniform, _sizeInPixels.width, _sizeInPixels.height);\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageWeakPixelInclusionFilter.h",
    "content": "#import \"GPUImage3x3TextureSamplingFilter.h\"\n\n@interface GPUImageWeakPixelInclusionFilter : GPUImage3x3TextureSamplingFilter\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageWeakPixelInclusionFilter.m",
    "content": "#import \"GPUImageWeakPixelInclusionFilter.h\"\n\n@implementation GPUImageWeakPixelInclusionFilter\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageWeakPixelInclusionFragmentShaderString = SHADER_STRING\n(\n precision lowp float;\n \n varying vec2 textureCoordinate;\n varying vec2 leftTextureCoordinate;\n varying vec2 rightTextureCoordinate;\n \n varying vec2 topTextureCoordinate;\n varying vec2 topLeftTextureCoordinate;\n varying vec2 topRightTextureCoordinate;\n \n varying vec2 bottomTextureCoordinate;\n varying vec2 bottomLeftTextureCoordinate;\n varying vec2 bottomRightTextureCoordinate;\n \n uniform sampler2D inputImageTexture;\n \n void main()\n {\n     float bottomLeftIntensity = texture2D(inputImageTexture, bottomLeftTextureCoordinate).r;\n     float topRightIntensity = texture2D(inputImageTexture, topRightTextureCoordinate).r;\n     float topLeftIntensity = texture2D(inputImageTexture, topLeftTextureCoordinate).r;\n     float bottomRightIntensity = texture2D(inputImageTexture, bottomRightTextureCoordinate).r;\n     float leftIntensity = texture2D(inputImageTexture, leftTextureCoordinate).r;\n     float rightIntensity = texture2D(inputImageTexture, rightTextureCoordinate).r;\n     float bottomIntensity = texture2D(inputImageTexture, bottomTextureCoordinate).r;\n     float topIntensity = texture2D(inputImageTexture, topTextureCoordinate).r;\n     float centerIntensity = texture2D(inputImageTexture, textureCoordinate).r;\n     \n     float pixelIntensitySum = bottomLeftIntensity + topRightIntensity + topLeftIntensity + bottomRightIntensity + leftIntensity + rightIntensity + bottomIntensity + topIntensity + centerIntensity;\n     float sumTest = step(1.5, pixelIntensitySum);\n     float pixelTest = step(0.01, centerIntensity);\n          \n     gl_FragColor = vec4(vec3(sumTest * pixelTest), 1.0);\n }\n);\n#else\nNSString *const kGPUImageWeakPixelInclusionFragmentShaderString = SHADER_STRING\n(\n varying vec2 textureCoordinate;\n varying vec2 leftTextureCoordinate;\n varying vec2 rightTextureCoordinate;\n \n varying vec2 topTextureCoordinate;\n varying vec2 topLeftTextureCoordinate;\n varying vec2 topRightTextureCoordinate;\n \n varying vec2 bottomTextureCoordinate;\n varying vec2 bottomLeftTextureCoordinate;\n varying vec2 bottomRightTextureCoordinate;\n \n uniform sampler2D inputImageTexture;\n \n void main()\n {\n     float bottomLeftIntensity = texture2D(inputImageTexture, bottomLeftTextureCoordinate).r;\n     float topRightIntensity = texture2D(inputImageTexture, topRightTextureCoordinate).r;\n     float topLeftIntensity = texture2D(inputImageTexture, topLeftTextureCoordinate).r;\n     float bottomRightIntensity = texture2D(inputImageTexture, bottomRightTextureCoordinate).r;\n     float leftIntensity = texture2D(inputImageTexture, leftTextureCoordinate).r;\n     float rightIntensity = texture2D(inputImageTexture, rightTextureCoordinate).r;\n     float bottomIntensity = texture2D(inputImageTexture, bottomTextureCoordinate).r;\n     float topIntensity = texture2D(inputImageTexture, topTextureCoordinate).r;\n     float centerIntensity = texture2D(inputImageTexture, textureCoordinate).r;\n     \n     float pixelIntensitySum = bottomLeftIntensity + topRightIntensity + topLeftIntensity + bottomRightIntensity + leftIntensity + rightIntensity + bottomIntensity + topIntensity + centerIntensity;\n     float sumTest = step(1.5, pixelIntensitySum);\n     float pixelTest = step(0.01, centerIntensity);\n     \n     gl_FragColor = vec4(vec3(sumTest * pixelTest), 1.0);\n }\n);\n#endif\n\n#pragma mark -\n#pragma mark Initialization and teardown\n\n- (id)init;\n{\n    if (!(self = [self initWithFragmentShaderFromString:kGPUImageWeakPixelInclusionFragmentShaderString]))\n    {\n\t\treturn nil;\n    }\n    \n    return self;\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageWhiteBalanceFilter.h",
    "content": "#import \"GPUImageFilter.h\"\n/**\n * Created by Alaric Cole\n * Allows adjustment of color temperature in terms of what an image was effectively shot in. This means higher Kelvin values will warm the image, while lower values will cool it. \n \n */\n@interface GPUImageWhiteBalanceFilter : GPUImageFilter\n{\n    GLint temperatureUniform, tintUniform;\n}\n//choose color temperature, in degrees Kelvin\n@property(readwrite, nonatomic) CGFloat temperature;\n\n//adjust tint to compensate\n@property(readwrite, nonatomic) CGFloat tint;\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageWhiteBalanceFilter.m",
    "content": "#import \"GPUImageWhiteBalanceFilter.h\"\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageWhiteBalanceFragmentShaderString = SHADER_STRING\n(\nuniform sampler2D inputImageTexture;\nvarying highp vec2 textureCoordinate;\n \nuniform lowp float temperature;\nuniform lowp float tint;\n\nconst lowp vec3 warmFilter = vec3(0.93, 0.54, 0.0);\n\nconst mediump mat3 RGBtoYIQ = mat3(0.299, 0.587, 0.114, 0.596, -0.274, -0.322, 0.212, -0.523, 0.311);\nconst mediump mat3 YIQtoRGB = mat3(1.0, 0.956, 0.621, 1.0, -0.272, -0.647, 1.0, -1.105, 1.702);\n\nvoid main()\n{\n\tlowp vec4 source = texture2D(inputImageTexture, textureCoordinate);\n\t\n\tmediump vec3 yiq = RGBtoYIQ * source.rgb; //adjusting tint\n\tyiq.b = clamp(yiq.b + tint*0.5226*0.1, -0.5226, 0.5226);\n\tlowp vec3 rgb = YIQtoRGB * yiq;\n\n\tlowp vec3 processed = vec3(\n\t\t(rgb.r < 0.5 ? (2.0 * rgb.r * warmFilter.r) : (1.0 - 2.0 * (1.0 - rgb.r) * (1.0 - warmFilter.r))), //adjusting temperature\n\t\t(rgb.g < 0.5 ? (2.0 * rgb.g * warmFilter.g) : (1.0 - 2.0 * (1.0 - rgb.g) * (1.0 - warmFilter.g))), \n\t\t(rgb.b < 0.5 ? (2.0 * rgb.b * warmFilter.b) : (1.0 - 2.0 * (1.0 - rgb.b) * (1.0 - warmFilter.b))));\n\n\tgl_FragColor = vec4(mix(rgb, processed, temperature), source.a);\n}\n);\n#else\nNSString *const kGPUImageWhiteBalanceFragmentShaderString = SHADER_STRING\n(\n uniform sampler2D inputImageTexture;\n varying vec2 textureCoordinate;\n \n uniform float temperature;\n uniform float tint;\n \n const vec3 warmFilter = vec3(0.93, 0.54, 0.0);\n \n const mat3 RGBtoYIQ = mat3(0.299, 0.587, 0.114, 0.596, -0.274, -0.322, 0.212, -0.523, 0.311);\n const mat3 YIQtoRGB = mat3(1.0, 0.956, 0.621, 1.0, -0.272, -0.647, 1.0, -1.105, 1.702);\n \n void main()\n{\n\tvec4 source = texture2D(inputImageTexture, textureCoordinate);\n\t\n\tvec3 yiq = RGBtoYIQ * source.rgb; //adjusting tint\n\tyiq.b = clamp(yiq.b + tint*0.5226*0.1, -0.5226, 0.5226);\n\tvec3 rgb = YIQtoRGB * yiq;\n    \n\tvec3 processed = vec3(\n                               (rgb.r < 0.5 ? (2.0 * rgb.r * warmFilter.r) : (1.0 - 2.0 * (1.0 - rgb.r) * (1.0 - warmFilter.r))), //adjusting temperature\n                               (rgb.g < 0.5 ? (2.0 * rgb.g * warmFilter.g) : (1.0 - 2.0 * (1.0 - rgb.g) * (1.0 - warmFilter.g))),\n                               (rgb.b < 0.5 ? (2.0 * rgb.b * warmFilter.b) : (1.0 - 2.0 * (1.0 - rgb.b) * (1.0 - warmFilter.b))));\n    \n\tgl_FragColor = vec4(mix(rgb, processed, temperature), source.a);\n}\n);\n#endif\n\n@implementation GPUImageWhiteBalanceFilter\n\n@synthesize temperature = _temperature;\n@synthesize tint = _tint;\n\n#pragma mark -\n#pragma mark Initialization and teardown\n\n- (id)init;\n{\n    if (!(self = [super initWithFragmentShaderFromString:kGPUImageWhiteBalanceFragmentShaderString]))\n    {\n\t\treturn nil;\n    }\n    \n    temperatureUniform = [filterProgram uniformIndex:@\"temperature\"];\n\ttintUniform = [filterProgram uniformIndex:@\"tint\"];\n\t\n    self.temperature = 5000.0;\n\tself.tint = 0.0;\n\n    return self;\n}\n\n#pragma mark -\n#pragma mark Accessors\n\n- (void)setTemperature:(CGFloat)newValue;\n{\n    _temperature = newValue;\n    \n    [self setFloat:_temperature < 5000 ? 0.0004 * (_temperature-5000.0) : 0.00006 * (_temperature-5000.0) forUniform:temperatureUniform program:filterProgram];\n}\n\n- (void)setTint:(CGFloat)newValue;\n{\n\t_tint = newValue;\n\t\n\t[self setFloat:_tint / 100.0 forUniform:tintUniform program:filterProgram];\n}\n\n@end\n\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageXYDerivativeFilter.h",
    "content": "#import \"GPUImageSobelEdgeDetectionFilter.h\"\n\n@interface GPUImageXYDerivativeFilter : GPUImageSobelEdgeDetectionFilter\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageXYDerivativeFilter.m",
    "content": "#import \"GPUImageXYDerivativeFilter.h\"\n\n// I'm using the Prewitt operator to obtain the derivative, then squaring the X and Y components and placing the product of the two in Z.\n// In tests, Prewitt seemed to be tied with Sobel for the best, and it's just a little cheaper to compute.\n// This is primarily intended to be used with corner detection filters.\n\n@implementation GPUImageXYDerivativeFilter\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageGradientFragmentShaderString = SHADER_STRING\n(\n precision highp float;\n \n varying vec2 textureCoordinate;\n varying vec2 leftTextureCoordinate;\n varying vec2 rightTextureCoordinate;\n \n varying vec2 topTextureCoordinate;\n varying vec2 topLeftTextureCoordinate;\n varying vec2 topRightTextureCoordinate;\n \n varying vec2 bottomTextureCoordinate;\n varying vec2 bottomLeftTextureCoordinate;\n varying vec2 bottomRightTextureCoordinate;\n \n uniform sampler2D inputImageTexture;\n \n uniform float edgeStrength;\n \n void main()\n {\n     float topIntensity = texture2D(inputImageTexture, topTextureCoordinate).r;\n     float topRightIntensity = texture2D(inputImageTexture, topRightTextureCoordinate).r;\n     float topLeftIntensity = texture2D(inputImageTexture, topLeftTextureCoordinate).r;\n     float bottomIntensity = texture2D(inputImageTexture, bottomTextureCoordinate).r;\n     float bottomLeftIntensity = texture2D(inputImageTexture, bottomLeftTextureCoordinate).r;\n     float bottomRightIntensity = texture2D(inputImageTexture, bottomRightTextureCoordinate).r;\n     float leftIntensity = texture2D(inputImageTexture, leftTextureCoordinate).r;\n     float rightIntensity = texture2D(inputImageTexture, rightTextureCoordinate).r;\n     \n     float verticalDerivative = -topLeftIntensity - topIntensity - topRightIntensity + bottomLeftIntensity + bottomIntensity + bottomRightIntensity;\n     float horizontalDerivative = -bottomLeftIntensity - leftIntensity - topLeftIntensity + bottomRightIntensity + rightIntensity + topRightIntensity;\n     verticalDerivative = verticalDerivative * edgeStrength;\n     horizontalDerivative = horizontalDerivative * edgeStrength;\n     \n     // Scaling the X * Y operation so that negative numbers are not clipped in the 0..1 range. This will be expanded in the corner detection filter\n     gl_FragColor = vec4(horizontalDerivative * horizontalDerivative, verticalDerivative * verticalDerivative, ((verticalDerivative * horizontalDerivative) + 1.0) / 2.0, 1.0);\n }\n);\n#else\nNSString *const kGPUImageGradientFragmentShaderString = SHADER_STRING\n(\n varying vec2 textureCoordinate;\n varying vec2 leftTextureCoordinate;\n varying vec2 rightTextureCoordinate;\n \n varying vec2 topTextureCoordinate;\n varying vec2 topLeftTextureCoordinate;\n varying vec2 topRightTextureCoordinate;\n \n varying vec2 bottomTextureCoordinate;\n varying vec2 bottomLeftTextureCoordinate;\n varying vec2 bottomRightTextureCoordinate;\n \n uniform sampler2D inputImageTexture;\n \n uniform float edgeStrength;\n\n void main()\n {\n     float topIntensity = texture2D(inputImageTexture, topTextureCoordinate).r;\n     float topRightIntensity = texture2D(inputImageTexture, topRightTextureCoordinate).r;\n     float topLeftIntensity = texture2D(inputImageTexture, topLeftTextureCoordinate).r;\n     float bottomIntensity = texture2D(inputImageTexture, bottomTextureCoordinate).r;\n     float bottomLeftIntensity = texture2D(inputImageTexture, bottomLeftTextureCoordinate).r;\n     float bottomRightIntensity = texture2D(inputImageTexture, bottomRightTextureCoordinate).r;\n     float leftIntensity = texture2D(inputImageTexture, leftTextureCoordinate).r;\n     float rightIntensity = texture2D(inputImageTexture, rightTextureCoordinate).r;\n     \n     float verticalDerivative = -topLeftIntensity - topIntensity - topRightIntensity + bottomLeftIntensity + bottomIntensity + bottomRightIntensity;\n     float horizontalDerivative = -bottomLeftIntensity - leftIntensity - topLeftIntensity + bottomRightIntensity + rightIntensity + topRightIntensity;\n     verticalDerivative = verticalDerivative * edgeStrength;\n     horizontalDerivative = horizontalDerivative * edgeStrength;\n     \n     // Scaling the X * Y operation so that negative numbers are not clipped in the 0..1 range. This will be expanded in the corner detection filter\n     gl_FragColor = vec4(horizontalDerivative * horizontalDerivative, verticalDerivative * verticalDerivative, ((verticalDerivative * horizontalDerivative) + 1.0) / 2.0, 1.0);\n }\n);\n#endif\n\n#pragma mark -\n#pragma mark Initialization and teardown\n\n- (id)init;\n{\n    if (!(self = [self initWithFragmentShaderFromString:kGPUImageGradientFragmentShaderString]))\n    {\n\t\treturn nil;\n    }\n    \n    self.edgeStrength = 1.0;\n    \n    return self;\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageZoomBlurFilter.h",
    "content": "#import \"GPUImageFilter.h\"\n\n@interface GPUImageZoomBlurFilter : GPUImageFilter\n\n/** A multiplier for the blur size, ranging from 0.0 on up, with a default of 1.0\n */\n@property (readwrite, nonatomic) CGFloat blurSize;\n\n/** The normalized center of the blur. (0.5, 0.5) by default\n */\n@property (readwrite, nonatomic) CGPoint blurCenter;\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageZoomBlurFilter.m",
    "content": "#import \"GPUImageZoomBlurFilter.h\"\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageZoomBlurFragmentShaderString = SHADER_STRING\n(\n varying highp vec2 textureCoordinate;\n \n uniform sampler2D inputImageTexture;\n \n uniform highp vec2 blurCenter;\n uniform highp float blurSize;\n \n void main()\n {\n     // TODO: Do a more intelligent scaling based on resolution here\n     highp vec2 samplingOffset = 1.0/100.0 * (blurCenter - textureCoordinate) * blurSize;\n     \n     lowp vec4 fragmentColor = texture2D(inputImageTexture, textureCoordinate) * 0.18;\n     fragmentColor += texture2D(inputImageTexture, textureCoordinate + samplingOffset) * 0.15;\n     fragmentColor += texture2D(inputImageTexture, textureCoordinate + (2.0 * samplingOffset)) *  0.12;\n     fragmentColor += texture2D(inputImageTexture, textureCoordinate + (3.0 * samplingOffset)) * 0.09;\n     fragmentColor += texture2D(inputImageTexture, textureCoordinate + (4.0 * samplingOffset)) * 0.05;\n     fragmentColor += texture2D(inputImageTexture, textureCoordinate - samplingOffset) * 0.15;\n     fragmentColor += texture2D(inputImageTexture, textureCoordinate - (2.0 * samplingOffset)) *  0.12;\n     fragmentColor += texture2D(inputImageTexture, textureCoordinate - (3.0 * samplingOffset)) * 0.09;\n     fragmentColor += texture2D(inputImageTexture, textureCoordinate - (4.0 * samplingOffset)) * 0.05;\n     \n     gl_FragColor = fragmentColor;\n }\n);\n#else\nNSString *const kGPUImageZoomBlurFragmentShaderString = SHADER_STRING\n(\n varying vec2 textureCoordinate;\n \n uniform sampler2D inputImageTexture;\n \n uniform vec2 blurCenter;\n uniform float blurSize;\n \n void main()\n {\n     // TODO: Do a more intelligent scaling based on resolution here\n     vec2 samplingOffset = 1.0/100.0 * (blurCenter - textureCoordinate) * blurSize;\n     \n     vec4 fragmentColor = texture2D(inputImageTexture, textureCoordinate) * 0.18;\n     fragmentColor += texture2D(inputImageTexture, textureCoordinate + samplingOffset) * 0.15;\n     fragmentColor += texture2D(inputImageTexture, textureCoordinate + (2.0 * samplingOffset)) *  0.12;\n     fragmentColor += texture2D(inputImageTexture, textureCoordinate + (3.0 * samplingOffset)) * 0.09;\n     fragmentColor += texture2D(inputImageTexture, textureCoordinate + (4.0 * samplingOffset)) * 0.05;\n     fragmentColor += texture2D(inputImageTexture, textureCoordinate - samplingOffset) * 0.15;\n     fragmentColor += texture2D(inputImageTexture, textureCoordinate - (2.0 * samplingOffset)) *  0.12;\n     fragmentColor += texture2D(inputImageTexture, textureCoordinate - (3.0 * samplingOffset)) * 0.09;\n     fragmentColor += texture2D(inputImageTexture, textureCoordinate - (4.0 * samplingOffset)) * 0.05;\n     \n     gl_FragColor = fragmentColor;\n }\n);\n#endif\n\n@interface GPUImageZoomBlurFilter()\n{\n    GLint blurSizeUniform, blurCenterUniform;\n}\n@end\n\n@implementation GPUImageZoomBlurFilter\n\n@synthesize blurSize = _blurSize;\n@synthesize blurCenter = _blurCenter;\n\n#pragma mark -\n#pragma mark Initialization and teardown\n\n- (id)init;\n{\n    if (!(self = [super initWithFragmentShaderFromString:kGPUImageZoomBlurFragmentShaderString]))\n    {\n        return nil;\n    }\n    \n    blurSizeUniform = [filterProgram uniformIndex:@\"blurSize\"];\n    blurCenterUniform = [filterProgram uniformIndex:@\"blurCenter\"];\n    \n    self.blurSize = 1.0;\n    self.blurCenter = CGPointMake(0.5, 0.5);\n    \n    return self;\n}\n\n#pragma mark -\n#pragma mark Accessors\n\n- (void)setInputRotation:(GPUImageRotationMode)newInputRotation atIndex:(NSInteger)textureIndex;\n{\n    [super setInputRotation:newInputRotation atIndex:textureIndex];\n    [self setBlurCenter:self.blurCenter];\n}\n\n- (void)setBlurSize:(CGFloat)newValue;\n{\n    _blurSize = newValue;\n    \n    [self setFloat:_blurSize forUniform:blurSizeUniform program:filterProgram];\n}\n\n- (void)setBlurCenter:(CGPoint)newValue;\n{\n    _blurCenter = newValue;\n    \n    CGPoint rotatedPoint = [self rotatedPoint:_blurCenter forRotation:inputRotation];\n    [self setPoint:rotatedPoint forUniform:blurCenterUniform program:filterProgram];\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageiOSBlurFilter.h",
    "content": "#import \"GPUImageFilterGroup.h\"\n\n@class GPUImageSaturationFilter;\n@class GPUImageGaussianBlurFilter;\n@class GPUImageLuminanceRangeFilter;\n\n@interface GPUImageiOSBlurFilter : GPUImageFilterGroup\n{\n    GPUImageSaturationFilter *saturationFilter;\n    GPUImageGaussianBlurFilter *blurFilter;\n    GPUImageLuminanceRangeFilter *luminanceRangeFilter;\n}\n\n/** A radius in pixels to use for the blur, with a default of 12.0. This adjusts the sigma variable in the Gaussian distribution function.\n */\n@property (readwrite, nonatomic) CGFloat blurRadiusInPixels;\n\n/** Saturation ranges from 0.0 (fully desaturated) to 2.0 (max saturation), with 0.8 as the normal level\n */\n@property (readwrite, nonatomic) CGFloat saturation;\n\n/** The degree to which to downsample, then upsample the incoming image to minimize computations within the Gaussian blur, default of 4.0\n */\n@property (readwrite, nonatomic) CGFloat downsampling;\n\n\n/** The degree to reduce the luminance range, from 0.0 to 1.0. Default is 0.6.\n */\n@property (readwrite, nonatomic) CGFloat rangeReductionFactor;\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageiOSBlurFilter.m",
    "content": "#import \"GPUImageiOSBlurFilter.h\"\n#import \"GPUImageSaturationFilter.h\"\n#import \"GPUImageGaussianBlurFilter.h\"\n#import \"GPUImageLuminanceRangeFilter.h\"\n\n@implementation GPUImageiOSBlurFilter\n\n@synthesize blurRadiusInPixels;\n@synthesize saturation;\n@synthesize downsampling = _downsampling;\n\n#pragma mark -\n#pragma mark Initialization and teardown\n\n- (id)init;\n{\n    if (!(self = [super init]))\n    {\n\t\treturn nil;\n    }\n    \n    // First pass: downsample and desaturate\n    saturationFilter = [[GPUImageSaturationFilter alloc] init];\n    [self addFilter:saturationFilter];\n    \n    // Second pass: apply a strong Gaussian blur\n    blurFilter = [[GPUImageGaussianBlurFilter alloc] init];\n    [self addFilter:blurFilter];\n    \n    // Third pass: upsample and adjust luminance range\n    luminanceRangeFilter = [[GPUImageLuminanceRangeFilter alloc] init];\n    [self addFilter:luminanceRangeFilter];\n        \n    [saturationFilter addTarget:blurFilter];\n    [blurFilter addTarget:luminanceRangeFilter];\n    \n    self.initialFilters = [NSArray arrayWithObject:saturationFilter];\n    self.terminalFilter = luminanceRangeFilter;\n    \n    self.blurRadiusInPixels = 12.0;\n    self.saturation = 0.8;\n    self.downsampling = 4.0;\n    self.rangeReductionFactor = 0.6;\n\n    return self;\n}\n\n- (void)setInputSize:(CGSize)newSize atIndex:(NSInteger)textureIndex;\n{\n    if (_downsampling > 1.0)\n    {\n        CGSize rotatedSize = [saturationFilter rotatedSize:newSize forIndex:textureIndex];\n\n        [saturationFilter forceProcessingAtSize:CGSizeMake(rotatedSize.width / _downsampling, rotatedSize.height / _downsampling)];\n        [luminanceRangeFilter forceProcessingAtSize:rotatedSize];\n    }\n    \n    [super setInputSize:newSize atIndex:textureIndex];\n}\n\n#pragma mark -\n#pragma mark Accessors\n\n// From Apple's UIImage+ImageEffects category:\n\n// A description of how to compute the box kernel width from the Gaussian\n// radius (aka standard deviation) appears in the SVG spec:\n// http://www.w3.org/TR/SVG/filters.html#feGaussianBlurElement\n//\n// For larger values of 's' (s >= 2.0), an approximation can be used: Three\n// successive box-blurs build a piece-wise quadratic convolution kernel, which\n// approximates the Gaussian kernel to within roughly 3%.\n//\n// let d = floor(s * 3*sqrt(2*pi)/4 + 0.5)\n//\n// ... if d is odd, use three box-blurs of size 'd', centered on the output pixel.\n\n\n- (void)setBlurRadiusInPixels:(CGFloat)newValue;\n{\n    blurFilter.blurRadiusInPixels = newValue;\n}\n\n- (CGFloat)blurRadiusInPixels;\n{\n    return blurFilter.blurRadiusInPixels;\n}\n\n- (void)setSaturation:(CGFloat)newValue;\n{\n    saturationFilter.saturation = newValue;\n}\n\n- (CGFloat)saturation;\n{\n    return saturationFilter.saturation;\n}\n\n- (void)setDownsampling:(CGFloat)newValue;\n{\n    _downsampling = newValue;\n}\n\n- (void)setRangeReductionFactor:(CGFloat)rangeReductionFactor\n{\n    luminanceRangeFilter.rangeReductionFactor = rangeReductionFactor;\n}\n\n- (CGFloat)rangeReductionFactor\n{\n    return luminanceRangeFilter.rangeReductionFactor;\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/GLFilters/ColorProcessing/GLImageAddStickerFilter.h",
    "content": "//\n//  GLImageAddStickerFilter.h\n//  WEOpenGLKit\n//\n//  Created by LHD on 2018/6/4.\n//  Copyright © 2018年 LHD. All rights reserved.\n//\n\n#import \"GPUImageFilterGroup.h\"\n#import \"GLImageStickerFilter.h\"\n#import \"GPUImagePicture.h\"\n\n@interface GLImageAddStickerFilter : GPUImageFilterGroup\n{\n    GLImageStickerFilter *stickerFiler;\n    GPUImagePicture *stickerImageSource;\n}\n\n/** 贴纸大小（需要归一化，如果不设置则取贴纸真实尺寸和super的尺寸比值） */\n@property (nonatomic, assign) CGSize size;\n/** 中心坐标（需要归一化） */\n@property (nonatomic, assign) CGPoint center;\n/** 旋转角（弧度） */\n@property (nonatomic, assign) CGFloat theta;\n/** 透明度 */\n@property (nonatomic, assign) CGFloat alpha;\n/** 混合模式 */\n@property (nonatomic, assign) int blendMode;\n/** 镜像模式 1 开启镜像 0 不开启镜像 */\n@property (nonatomic, assign) int mirrorMode;\n\n\n/** 设置贴纸图片 */\n- (void)setStickerImage:(UIImage *)stickerImage;\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/GLFilters/ColorProcessing/GLImageAddStickerFilter.m",
    "content": "//\n//  GLImageAddStickerFilter.m\n//  WEOpenGLKit\n//\n//  Created by LHD on 2018/6/4.\n//  Copyright © 2018年 LHD. All rights reserved.\n//\n\n#import \"GLImageAddStickerFilter.h\"\n#import \"GLImageMixBlendFilter.h\"\n\n@implementation GLImageAddStickerFilter\n{\n    UIImage *_stickerImage;\n}\n\n- (instancetype)init\n{\n    self = [super init];\n    if (self)\n    {\n        stickerFiler = [[GLImageStickerFilter alloc] init];\n        [stickerFiler disableSecondFrameCheck];\n        stickerFiler.theta = 45 * M_PI / 180.0;\n        [self addFilter:stickerFiler];\n        \n        self.initialFilters = @[stickerFiler];\n        self.terminalFilter = stickerFiler;\n    }\n    \n    return self;\n}\n\n- (void)setStickerImage:(UIImage *)stickerImage\n{\n    _stickerImage = stickerImage;\n    GPUImagePicture *picture = [[GPUImagePicture alloc] initWithImage:stickerImage];\n    [picture addTarget:stickerFiler atTextureLocation:1];\n    [picture processImage];\n    \n    if (stickerImageSource)\n    {\n        [stickerImageSource removeTarget:stickerFiler];\n        stickerImageSource = nil;\n    }\n    \n    stickerImageSource = picture;\n}\n\n- (void)setSize:(CGSize)size\n{\n    _size = size;\n    [stickerFiler setSize:size];\n}\n\n- (void)setCenter:(CGPoint)center\n{\n    _center = center;\n    [stickerFiler setCenter:center];\n}\n\n- (void)setTheta:(CGFloat)theta\n{\n    _theta = theta;\n    [stickerFiler setTheta:theta];\n}\n\n- (void)setAlpha:(CGFloat)alpha\n{\n    _alpha = alpha;\n    [stickerFiler setAlpha:alpha];\n}\n\n- (void)setBlendMode:(int)blendMode\n{\n    _blendMode = blendMode;\n    [stickerFiler setBlendMode:blendMode];\n}\n\n- (void)setMirrorMode:(int)mirrorMode\n{\n    _mirrorMode = mirrorMode;\n    [stickerFiler setMirrorMode:mirrorMode];\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/GLFilters/ColorProcessing/GLImageAddStickerWithEffectFilter.h",
    "content": "//\n//  GLImageAddStickerWithEffectFilter.h\n//  WEOpenGLKit\n//\n//  Created by LHD on 2018/6/4.\n//  Copyright © 2018年 LHD. All rights reserved.\n//\n\n#import \"GPUImageFilterGroup.h\"\n#import \"GLImageStickerFilter.h\"\n#import \"GPUImageHSBFilter.h\"\n#import \"GLImageGassianBlurMixFilter.h\"\n#import \"GPUImagePicture.h\"\n\n@interface GLImageAddStickerWithEffectFilter : GPUImageFilterGroup\n{\n    GLImageStickerFilter *stickerFiler;\n    GPUImagePicture *stickerImageSource;\n}\n\n/** 贴纸大小（需要归一化，如果不设置则取贴纸真实尺寸和super的尺寸比值） */\n@property (nonatomic, assign) CGSize size;\n/** 中心坐标（需要归一化） */\n@property (nonatomic, assign) CGPoint center;\n/** 旋转角（弧度） */\n@property (nonatomic, assign) CGFloat theta;\n/** 透明度 */\n@property (nonatomic, assign) CGFloat alpha;\n/** 混合模式 */\n@property (nonatomic, assign) int blendMode;\n\n/** 设置贴纸图片 */\n- (void)setStickerImage:(UIImage *)stickerImage;\n\n@end\n\n\n@interface GLImageAddStickerWithEffectFilter (Effect)\n\n/** 打开模糊 */\n@property (nonatomic, assign) BOOL enableBlur;\n/** 模糊半径 */\n@property (nonatomic, assign) CGFloat blurRadiusInPixels;\n/** 模糊程度 */\n@property (nonatomic, assign) CGFloat blur;\n/** 打开色相 */\n@property (nonatomic, assign) BOOL enableHue;\n/** 色相偏差 */\n@property (nonatomic, assign) CGFloat hue;\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/GLFilters/ColorProcessing/GLImageAddStickerWithEffectFilter.m",
    "content": "//\n//  GLImageAddStickerWithEffectFilter.m\n//  WEOpenGLKit\n//\n//  Created by LHD on 2018/6/4.\n//  Copyright © 2018年 LHD. All rights reserved.\n//\n\n#import \"GLImageAddStickerWithEffectFilter.h\"\n\n@interface GLImageAddStickerWithEffectFilter ()\n\n/** 打开模糊 */\n@property (nonatomic, assign) BOOL enableBlur;\n/** 模糊半径 */\n@property (nonatomic, assign) CGFloat blurRadiusInPixels;\n/** 模糊程度 */\n@property (nonatomic, assign) CGFloat blur;\n/** 打开色相 */\n@property (nonatomic, assign) BOOL enableHue;\n/** 色相偏差 */\n@property (nonatomic, assign) CGFloat hue;\n\n@property (nonatomic, strong) GPUImageHSBFilter *hsbFilter;\n\n@property (nonatomic, strong) GLImageGassianBlurMixFilter *blurFilter;\n\n- (void)updateEffectFilterChain;\n\n@end\n\n@implementation GLImageAddStickerWithEffectFilter\n{\n    UIImage *_stickerImage;\n}\n\n- (instancetype)init\n{\n    self = [super init];\n    if (self)\n    {\n        stickerFiler = [[GLImageStickerFilter alloc] init];\n        [stickerFiler disableSecondFrameCheck];\n        stickerFiler.theta = 0;\n        [self addFilter:stickerFiler];\n        \n        self.initialFilters = @[stickerFiler];\n        self.terminalFilter = stickerFiler;\n        \n//        self.blurRadiusInPixels = 20.0;\n    }\n    \n    return self;\n}\n\n- (void)setStickerImage:(UIImage *)stickerImage\n{\n    _stickerImage = stickerImage;\n    GPUImagePicture *picture = [[GPUImagePicture alloc] initWithImage:stickerImage];\n    [picture addTarget:stickerFiler atTextureLocation:1];\n    [picture processImage];\n    \n    if (stickerImageSource)\n    {\n        [stickerImageSource removeTarget:stickerFiler];\n        stickerImageSource = nil;\n    }\n    \n    stickerImageSource = picture;\n    [self setBlurRadiusInPixels:stickerImage.size.width / 50.0];\n    [self updateEffectFilterChain];\n    [self stickerProcessImageIfNeeded];\n}\n\n- (void)newFrameReadyAtTime:(CMTime)frameTime atIndex:(NSInteger)textureIndex\n{\n    __weak typeof(self) weakSelf = self;\n    \n    runSynchronouslyOnVideoProcessingQueue(^{\n        [weakSelf stickerProcessImageIfNeeded];\n    });\n    \n    [super newFrameReadyAtTime:frameTime atIndex:textureIndex];\n}\n\n- (void)stickerProcessImageIfNeeded\n{\n    if (self.enableBlur || self.enableHue)\n    {\n        [stickerImageSource processImage];\n    }\n}\n\n- (void)setSize:(CGSize)size\n{\n    _size = size;\n    [stickerFiler setSize:size];\n}\n\n- (void)setCenter:(CGPoint)center\n{\n    _center = center;\n    [stickerFiler setCenter:center];\n}\n\n- (void)setTheta:(CGFloat)theta\n{\n    _theta = theta;\n    [stickerFiler setTheta:theta];\n}\n\n- (void)setAlpha:(CGFloat)alpha\n{\n    _alpha = alpha;\n    [stickerFiler setAlpha:alpha];\n}\n\n- (void)setBlendMode:(int)blendMode\n{\n    _blendMode = blendMode;\n    [stickerFiler setBlendMode:blendMode];\n}\n\n- (void)updateEffectFilterChain\n{\n    __weak typeof(self) weakSelf = self;\n    \n    runSynchronouslyOnVideoProcessingQueue(^{\n        NSMutableArray *effectFilters = [[NSMutableArray alloc] init];\n        \n        if (_enableHue)\n        {\n            [effectFilters addObject:weakSelf.hsbFilter];\n        }\n        \n        if (_enableBlur)\n        {\n            [effectFilters addObject:weakSelf.blurFilter];\n        }\n        \n        id preFilter = stickerImageSource;\n        GPUImageOutput<GPUImageInput> *theFilter = nil;\n        \n        for (int i = 0; i < effectFilters.count; i++)\n        {\n            theFilter = [effectFilters objectAtIndex:i];\n            [preFilter removeAllTargets];\n            [preFilter addTarget:theFilter];\n            preFilter = theFilter;\n        }\n        \n        [preFilter removeAllTargets];\n        [preFilter addTarget:stickerFiler atTextureLocation:1];\n    });\n}\n\n#pragma mark - GLImageAddStickerWithEffectFilter (Effect)\n\n- (void)setEnableBlur:(BOOL)enableBlur\n{\n    _enableBlur = enableBlur;\n    [self updateEffectFilterChain];\n}\n\n- (void)setEnableHue:(BOOL)enableHue\n{\n    _enableHue = enableHue;\n    [self updateEffectFilterChain];\n}\n\n- (void)setHue:(CGFloat)hue\n{\n    [self.hsbFilter reset];\n    [self.hsbFilter rotateHue:hue];\n}\n\n- (GPUImageHSBFilter *)hsbFilter\n{\n    if (!_hsbFilter)\n    {\n        _hsbFilter = [[GPUImageHSBFilter alloc] init];\n    }\n    \n    return _hsbFilter;\n}\n\n- (void)setBlurRadiusInPixels:(CGFloat)blurRadiusInPixels\n{\n    _blurRadiusInPixels = blurRadiusInPixels;\n    [self.blurFilter setBlurRadiusInPixels:blurRadiusInPixels];\n}\n\n- (void)setBlur:(CGFloat)blur\n{\n    _blur = blur;\n    [self.blurFilter setIntensity:blur];\n}\n\n- (GLImageGassianBlurMixFilter *)blurFilter\n{\n    if (!_blurFilter)\n    {\n        _blurFilter = [[GLImageGassianBlurMixFilter alloc] init];\n    }\n    \n    return _blurFilter;\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/GLFilters/ColorProcessing/GLImageBlendFilter.h",
    "content": "//\n//  GLImageBlendFilter.h\n//  GLImage\n//\n//  Created by LHD on 2018/5/16.\n//  Copyright © 2018年 LHD. All rights reserved.\n//\n\n#import \"GPUImageTwoInputFilter.h\"\n\n@interface GLImageBlendFilter : GPUImageTwoInputFilter\n{\n    GLint intensityUniform;\n}\n\n@property (nonatomic, assign) CGFloat intensity;\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/GLFilters/ColorProcessing/GLImageBlendFilter.m",
    "content": "//\n//  GLImageBlendFilter.m\n//  GLImage\n//\n//  Created by LHD on 2018/5/16.\n//  Copyright © 2018年 LHD. All rights reserved.\n//\n\n#import \"GLImageBlendFilter.h\"\n\n@implementation GLImageBlendFilter\n\n- (id)initWithFragmentShaderFromString:(NSString *)fragmentShaderString\n{\n    self = [super initWithFragmentShaderFromString:fragmentShaderString];\n    \n    if (self)\n    {\n        intensityUniform = [filterProgram uniformIndex:@\"intensity\"];\n        self.intensity = 0.0;\n    }\n    \n    return self;\n}\n\n- (void)setIntensity:(CGFloat)intensity\n{\n    _intensity = intensity;\n    [self setFloat:intensity forUniform:intensityUniform program:filterProgram];\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/GLFilters/ColorProcessing/GLImageCircleFilter.h",
    "content": "//\n//  GLImageCircleFilter.h\n//  GPURenderKitDemo\n//\n//  Created by 刘海东 on 2019/3/18.\n//  Copyright © 2019 刘海东. All rights reserved.\n//\n\n#import <GPURenderKit/GPURenderKit.h>\n\nNS_ASSUME_NONNULL_BEGIN\n\n@interface GLImageCircleFilter : GPUImageFilter\n{\n    GLint iResolutionUniform;\n}\n\n@end\n\nNS_ASSUME_NONNULL_END\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/GLFilters/ColorProcessing/GLImageCircleFilter.m",
    "content": "//\n//  GLImageCircleFilter.m\n//  GPURenderKitDemo\n//\n//  Created by 刘海东 on 2019/3/18.\n//  Copyright © 2019 刘海东. All rights reserved.\n//\n\n#import \"GLImageCircleFilter.h\"\n#include <math.h>\nNSString *const kGLImageCircleFragmentShaderString = SHADER_STRING\n(\n precision mediump float;\n\n varying highp vec2 textureCoordinate;\n uniform sampler2D inputImageTexture;\n \n uniform highp vec2 iResolution;\n \n \n float roundRect(in vec2 distFromCenter, in vec2 halfSize, in float cornerRadius)\n{\n    float t = length(max(abs(distFromCenter) - (halfSize - cornerRadius), 0.)) - cornerRadius;\n    return smoothstep(-1., 1.,t);\n    \n}\n \n void main()\n {\n     \n     highp vec2 uv = textureCoordinate;\n     \n     \n     \n     vec4 color = texture2D(inputImageTexture, uv);\n     \n          \n     highp vec2 fragCoord = uv * iResolution;\n     vec2 xy = fragCoord.xy - iResolution.xy *0.5;\n     vec2 hsize = iResolution.xy / 2.0;\n     float p = roundRect(xy, hsize, 100.0);\n     p = 1.0-p;\n     gl_FragColor = vec4(color.r*p,color.g*p,color.b*p,p);\n }\n );\n\n@implementation GLImageCircleFilter\n\n\n- (id)init\n{\n    if (!(self = [super initWithFragmentShaderFromString:kGLImageCircleFragmentShaderString]))\n    {\n        return nil;\n    }\n    \n    iResolutionUniform = [filterProgram uniformIndex:@\"iResolution\"];\n    \n    return self;\n}\n\n- (void)setupFilterForSize:(CGSize)filterFrameSize\n{\n\n    NSLog(@\"setupFilterForSize---%@\",NSStringFromCGSize(filterFrameSize));\n    [self setSize:filterFrameSize forUniform:iResolutionUniform program:filterProgram];\n\n}\n\n\n\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/GLFilters/ColorProcessing/GLImageGassianBlurMixFilter.h",
    "content": "//\n//  GLImageGassianBlurMixFilter.h\n//  WEOpenGLKit\n//\n//  Created by LHD on 2018/8/2.\n//  Copyright © 2018年 LHD. All rights reserved.\n//\n\n#import \"GPUImageFilterGroup.h\"\n#import \"GPUImageGaussianBlurFilter.h\"\n#import \"GLImageMixBlendFilter.h\"\n\n@interface GLImageGassianBlurMixFilter : GPUImageFilterGroup\n{\n    GPUImageGaussianBlurFilter *blurFilter;\n    GLImageMixBlendFilter *mixFilter;\n}\n\n@property (nonatomic, assign) CGFloat intensity;\n@property (nonatomic, assign) CGFloat blurRadiusInPixels;\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/GLFilters/ColorProcessing/GLImageGassianBlurMixFilter.m",
    "content": "//\n//  GLImageGassianBlurMixFilter.m\n//  WEOpenGLKit\n//\n//  Created by LHD on 2018/8/2.\n//  Copyright © 2018年 LHD. All rights reserved.\n//\n\n#import \"GLImageGassianBlurMixFilter.h\"\n\n@implementation GLImageGassianBlurMixFilter\n\n- (instancetype)init\n{\n    self = [super init];\n    if (self)\n    {\n        blurFilter = [[GPUImageGaussianBlurFilter alloc] init];\n        mixFilter = [[GLImageMixBlendFilter alloc] init];\n        [blurFilter addTarget:mixFilter atTextureLocation:1];\n        \n        self.blurRadiusInPixels = 4.0;\n        self.initialFilters = @[blurFilter, mixFilter];\n        self.terminalFilter = mixFilter;\n        \n    }\n    return self;\n}\n\n- (void)setBlurRadiusInPixels:(CGFloat)blurRadiusInPixels\n{\n    _blurRadiusInPixels = blurRadiusInPixels;\n    [blurFilter setBlurRadiusInPixels:blurRadiusInPixels];\n}\n\n- (void)setIntensity:(CGFloat)intensity\n{\n    _intensity = intensity;\n    [mixFilter setIntensity:intensity];\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/GLFilters/ColorProcessing/GLImageLutFilter.h",
    "content": "//\n//  GLImageLutFilter.h\n//  WeGPUImage\n//\n//  Created by LHD on 2018/3/1.\n//  Copyright © 2018年 LHD. All rights reserved.\n//\n\n#import \"GPUImageFilterGroup.h\"\n#import \"GPUImagePicture.h\"\n#import \"GPUImageLookupFilter.h\"\n\n@interface GLImageLutFilter : GPUImageFilterGroup\n{\n    GPUImagePicture *lookupImageSource;\n    GPUImageLookupFilter *lookupFilter;\n}\n\n@property (nonatomic, assign) CGFloat intensity;\n\n- (void)setLutImage:(UIImage *)lutImage;\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/GLFilters/ColorProcessing/GLImageLutFilter.m",
    "content": "//\n//  GLImageLutFilter.m\n//  WeGPUImage\n//\n//  Created by LHD on 2018/3/1.\n//  Copyright © 2018年 LHD. All rights reserved.\n//\n\n#import \"GLImageLutFilter.h\"\n\n@implementation GLImageLutFilter\n\n- (instancetype)init\n{\n    self = [super init];\n    if (self)\n    {\n        lookupFilter = [[GPUImageLookupFilter alloc] init];\n        [lookupFilter disableSecondFrameCheck];\n        [self addFilter:lookupFilter];\n        \n        self.initialFilters = [NSArray arrayWithObjects:lookupFilter, nil];\n        self.terminalFilter = lookupFilter;\n        \n        self.intensity = 1.0;\n    }\n    \n    return self;\n}\n\n- (void)setLutImage:(UIImage *)lutImage\n{\n    GPUImagePicture *picture = [[GPUImagePicture alloc] initWithImage:lutImage];\n    [picture addTarget:lookupFilter atTextureLocation:1];\n    [picture processImage];\n    \n    if (lookupImageSource)\n    {\n        [lookupImageSource removeTarget:lookupFilter];\n        lookupImageSource = nil;\n    }\n    \n    lookupImageSource = picture;\n}\n\n- (void)setIntensity:(CGFloat)intensity\n{\n    _intensity = intensity;\n    [lookupFilter setIntensity:intensity];\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/GLFilters/ColorProcessing/GLImageMixBlendFilter.h",
    "content": "//\n//  GLImageMixBlendFilter.h\n//  GLImage\n//\n//  Created by LHD on 2018/5/18.\n//  Copyright © 2018年 LHD. All rights reserved.\n//\n\n#import \"GLImageBlendFilter.h\"\n\n@interface GLImageMixBlendFilter : GLImageBlendFilter\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/GLFilters/ColorProcessing/GLImageMixBlendFilter.m",
    "content": "//\n//  GLImageMixBlendFilter.m\n//  GLImage\n//\n//  Created by LHD on 2018/5/18.\n//  Copyright © 2018年 LHD. All rights reserved.\n//\n\n#import \"GLImageMixBlendFilter.h\"\n\nNSString *const kGLImageMixBlendFragmentShaderString = SHADER_STRING\n(\n precision mediump float;\n varying vec2 textureCoordinate;\n varying vec2 textureCoordinate2;\n \n uniform sampler2D inputImageTexture; // 这个是原图 groupFilter location为0的才能传入原图\n uniform sampler2D inputImageTexture2;\n uniform float intensity;\n \n void main()\n {\n     lowp vec4 originalColor = texture2D(inputImageTexture, textureCoordinate);\n     lowp vec4 blendColor = texture2D(inputImageTexture2, textureCoordinate2);\n     gl_FragColor = mix(originalColor, blendColor, intensity);\n }\n);\n\n@implementation GLImageMixBlendFilter\n\n- (instancetype)init\n{\n    self = [super initWithFragmentShaderFromString:kGLImageMixBlendFragmentShaderString];\n    if (self) {\n    }\n    return self;\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/GLFilters/ColorProcessing/GLImageShapeFilter.h",
    "content": "//\n//  GLImageShapeFilter.h\n//  WEOpenGLKit\n//\n//  Created by 刘海东 on 2018/6/14.\n//  Copyright © 2018年 LHD. All rights reserved.\n//\n\n//增高瘦身 Filter\n\n#import <GPURenderKit/GPURenderKit.h>\n\n/**\n 获取处理后的数据\n\n @param squareVertexes 顶点坐标数组\n @param textureCoordinates 纹理坐标数组\n @param changeValue 改变的值\n @param type 0 增高 1瘦身\n */\ntypedef void(^GetVerticesAndTextureCoordinatesHandle)(NSArray *squareVertexes,NSArray *textureCoordinates,float changeValue, NSInteger type);\n\n@interface GLImageShapeFilter : GPUImageFilter\n\n\n@property (nonatomic, assign) float imageWidth;\n@property (nonatomic, assign) float imageHeight;\n/** 屏幕宽高比 */\n@property (nonatomic, assign) float screenRatio;\n/** 归一化的 0.0-1.0*/\n/** 区间值 最小 */\n@property (nonatomic, assign) float minValue;\n/** 区间值 最大 */\n@property (nonatomic, assign) float maxValue;\n/** 0:增高 1:瘦身 */\n@property (nonatomic, assign) NSInteger type;\n\n\n- (void)changeValue:(float)value;\n\n/** 获取处理后的顶点纹理坐标数组 */\n- (void)getVerticesAndTextureCoordinatesHandle:(GetVerticesAndTextureCoordinatesHandle)result;\n\n\n\n\n@end\n\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/GLFilters/ColorProcessing/GLImageShapeFilter.m",
    "content": "//\n//  GLImageShapeFilter.m\n//  WEOpenGLKit\n//\n//  Created by 刘海东 on 2018/6/14.\n//  Copyright © 2018年 LHD. All rights reserved.\n//\n\n#import \"GLImageShapeFilter.h\"\n/** 增高最多是原图的增高的8% */\n#define kStretchMaxRatio 0.08\n/** 增高的最大值 */\n#define kStretchMax_h (_imageHeight*kStretchMaxRatio)\n/** 0.6是产品定义的 */\n#define kRatio 0.6\n/** 压缩的比例值 */\n#define kCompress_ratio (0.8 *kRatio)\n@interface GLImageShapeFilter ()\n{\n    GLfloat squareVertexes[16];\n    GLfloat textureCoordinates[16];\n\n}\n@property (nonatomic, assign) float changValue;\n@end\n\n\n@implementation GLImageShapeFilter\n\n\n\n\n- (instancetype)init\n{\n    self = [super init];\n    if (self) {\n        \n        \n    }\n    return self;\n}\n\n\n- (void)newFrameReadyAtTime:(CMTime)frameTime atIndex:(NSInteger)textureIndex\n{\n    \n    [self renderToTextureWithVertices:squareVertexes textureCoordinates:textureCoordinates];\n    [self informTargetsAboutNewFrameAtTime:frameTime];\n    \n}\n\n\n- (void)renderToTextureWithVertices:(const GLfloat *)vertices textureCoordinates:(const GLfloat *)textureCoordinates;\n{\n    if (self.preventRendering)\n    {\n        [firstInputFramebuffer unlock];\n        return;\n    }\n    \n    [GPUImageContext setActiveShaderProgram:filterProgram];\n    \n    outputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:[self sizeOfFBO] textureOptions:self.outputTextureOptions onlyTexture:NO];\n    [outputFramebuffer activateFramebuffer];\n    if (usingNextFrameForImageCapture)\n    {\n        [outputFramebuffer lock];\n    }\n    \n    [self setUniformsForProgramAtIndex:0];\n    \n    glClearColor(backgroundColorRed, backgroundColorGreen, backgroundColorBlue, backgroundColorAlpha);\n    glClear(GL_COLOR_BUFFER_BIT);\n    \n    glActiveTexture(GL_TEXTURE2);\n    glBindTexture(GL_TEXTURE_2D, [firstInputFramebuffer texture]);\n    \n    glUniform1i(filterInputTextureUniform, 2);\n    \n    glVertexAttribPointer(filterPositionAttribute, 2, GL_FLOAT, 0, 0, vertices);\n    glVertexAttribPointer(filterTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, textureCoordinates);\n    \n    glDrawArrays(GL_TRIANGLE_STRIP, 0, 8);\n    //    glFlush();\n    [firstInputFramebuffer unlock];\n    \n    if (usingNextFrameForImageCapture)\n    {\n        dispatch_semaphore_signal(imageCaptureSemaphore);\n    }\n}\n\n\n#pragma mark 配置竖直方向上面的顶点数据\n- (void )verticalConfigVertex\n{\n    \n    float imageRatio = (float)self.imageWidth/self.imageHeight;\n    float screenRatio = self.screenRatio;\n    float xfactor=1.0;\n    float yfactor=1.0;\n    \n    \n    float tempValue = self.changValue;\n    //负值\n    float xMinus = -xfactor;\n    float yMinus = -yfactor;\n    \n    //正值\n    float xPlus = xfactor;\n    float yPlus = yfactor;\n    \n    \n    float x1,x2,x3,x4,x5,x6,x7,x8 = 0.0;\n    float y1,y2,y3,y4,y5,y6,y7,y8 = 0.0;\n    float tx1,tx2,tx3,tx4,tx5,tx6,tx7,tx8 = 0.0;\n    float ty1,ty2,ty3,ty4,ty5,ty6,ty7,ty8 = 0.0;\n    \n    \n    if (imageRatio > screenRatio) {\n        \n        //宽顶到边\n        yMinus = xMinus*screenRatio/(_imageWidth/(_imageHeight+tempValue*kStretchMax_h));\n        yPlus = xPlus*screenRatio/(_imageWidth/(_imageHeight+tempValue*kStretchMax_h));\n        \n        //原来的比例\n        float originY = xPlus*screenRatio/imageRatio;\n        \n        //高大于宽\n        x1 = xMinus;\n        y1 = yMinus;\n        tx1 = 0;\n        ty1 = 0;\n        \n        x2 = xPlus;\n        y2 = yMinus;\n        tx2 = 1;\n        ty2 = 0;\n        \n        \n        /** 极限值的判断处理 */\n        if ((yMinus<-1.0000001 || yMinus>-0.0000001) || (yPlus>1.0000001 || yPlus<.0000001)) {\n            \n            //负值\n            yMinus = -yfactor;\n            //正值\n            yPlus = yfactor;\n            \n            xMinus = yMinus*_imageWidth/(_imageHeight+tempValue*kStretchMax_h)/screenRatio;\n            xPlus = xPlus*_imageWidth/(_imageHeight+tempValue*kStretchMax_h)/screenRatio;\n            \n            //原来的比例\n            float originX = yMinus*imageRatio/screenRatio;\n            \n            x1 = xMinus;\n            y1 = yMinus;\n            tx1 = 0;\n            ty1 = 0;\n            \n            x2 = xPlus;\n            y2 = yMinus;\n            tx2 = 1;\n            ty2 = 0;\n            \n            //原来的高\n            CGFloat h = ABS(1-2*_maxValue + 1) *xMinus/originX;\n            \n            x3 = xMinus;\n            y3 = -(1.0-h);\n            tx3 = 0;\n            ty3 = (1-_maxValue);\n            \n            x4 = xPlus;\n            y4 = -(1.0-h);\n            tx4 = 1;\n            ty4 = (1-_maxValue);\n            \n            \n            x5 = xMinus;\n            y5 = (1-2*_minValue*xMinus/originX);\n            tx5 = 0;\n            ty5 = (1-_minValue);\n            \n            \n            x6 = xPlus;\n            y6 = (1-2*_minValue*xMinus/originX);\n            tx6 = 1;\n            ty6 = (1-_minValue);\n            \n            x7 = xMinus;\n            y7 = yPlus;\n            tx7 = 0;\n            ty7 = 1;\n            \n            x8 = xPlus;\n            y8 = yPlus;\n            tx8 = 1;\n            ty8 = 1;\n            \n        }\n        else\n        {\n            //形变\n            CGFloat h = yPlus;\n            CGFloat value = (h - originY);\n            \n            x3 = xMinus;\n            y3 = (1-2*_maxValue)*originY-value;\n            tx3 = 0;\n            ty3 = (1-_maxValue);\n            \n            x4 = xPlus;\n            y4 = (1-2*_maxValue)*originY-value;\n            tx4 = 1;\n            ty4 = (1-_maxValue);\n            \n            x5 = xMinus;\n            y5 =  (1-2*_minValue)*originY+value;\n            tx5 = 0;\n            ty5 = (1-_minValue);\n            \n            x6 = xPlus;\n            y6 = (1-2*_minValue)*originY+value;\n            tx6 = 1;\n            ty6 = (1-_minValue);\n            \n            x7 = xMinus;\n            y7 = yPlus;\n            tx7 = 0;\n            ty7 = 1;\n            \n            x8 = xPlus;\n            y8 = yPlus;\n            tx8 = 1;\n            ty8 = 1;\n            \n            \n        }\n        \n        \n    }\n    else\n    {\n        \n        //高顶到边\n        xMinus = yMinus*_imageWidth/(_imageHeight+tempValue*kStretchMax_h)/screenRatio;\n        xPlus = yPlus*_imageWidth/(_imageHeight+tempValue*kStretchMax_h)/screenRatio;\n        //原来的比例\n        float originX = yMinus*imageRatio/screenRatio;\n        \n        x1 = xMinus;\n        y1 = yMinus;\n        tx1 = 0;\n        ty1 = 0;\n        \n        x2 = xPlus;\n        y2 = yMinus;\n        tx2 = 1;\n        ty2 = 0;\n        \n        //原来的高\n        CGFloat h = ABS(1-2*_maxValue + 1) *xMinus/originX;\n        \n        x3 = xMinus;\n        y3 = -(1.0-h);\n        tx3 = 0;\n        ty3 = (1-_maxValue);\n        \n        \n        x4 = xPlus;\n        y4 = -(1.0-h);\n        tx4 = 1;\n        ty4 = (1-_maxValue);\n        \n        \n        x5 = xMinus;\n        y5 = (1-2*_minValue*xMinus/originX);\n        tx5 = 0;\n        ty5 = (1-_minValue);\n        \n        \n        x6 = xPlus;\n        y6 = (1-2*_minValue*xMinus/originX);\n        tx6 = 1;\n        ty6 = (1-_minValue);\n        \n        x7 = xMinus;\n        y7 = yPlus;\n        tx7 = 0;\n        ty7 = 1;\n        \n        x8 = xPlus;\n        y8 = yPlus;\n        tx8 = 1;\n        ty8 = 1;\n    }\n    \n    \n    squareVertexes[0] = x1;\n    squareVertexes[1] = -y1;\n    \n    squareVertexes[2] = x2;\n    squareVertexes[3] = -y2;\n    \n    squareVertexes[4] = x3;\n    squareVertexes[5] = -y3;\n    \n    squareVertexes[6] = x4;\n    squareVertexes[7] = -y4;\n    \n    squareVertexes[8] = x5;\n    squareVertexes[9] = -y5;\n    \n    squareVertexes[10] = x6;\n    squareVertexes[11] = -y6;\n    \n    squareVertexes[12] = x7;\n    squareVertexes[13] = -y7;\n    \n    squareVertexes[14] = x8;\n    squareVertexes[15] = -y8;\n    \n    textureCoordinates[0] = tx1;\n    textureCoordinates[1] = 1-ty1;\n    \n    textureCoordinates[2] = tx2;\n    textureCoordinates[3] = 1-ty2;\n    \n    textureCoordinates[4] = tx3;\n    textureCoordinates[5] = 1-ty3;\n    \n    textureCoordinates[6] = tx4;\n    textureCoordinates[7] = 1-ty4;\n    \n    textureCoordinates[8] = tx5;\n    textureCoordinates[9] = 1-ty5;\n    \n    textureCoordinates[10] = tx6;\n    textureCoordinates[11] = 1-ty6;\n    \n    textureCoordinates[12] = tx7;\n    textureCoordinates[13] = 1-ty7;\n    \n    textureCoordinates[14] = tx8;\n    textureCoordinates[15] = 1-ty8;\n\n}\n\n#pragma mark 配置水平方向上面的顶点数据\n- (void )horizontalConfigVertex\n{\n    float imageRatio = (float)self.imageWidth/self.imageHeight;\n    float screenRatio = self.screenRatio;\n    float xfactor=1.0;\n    float yfactor=1.0;\n    \n    float tempValue = self.changValue;\n    //负值\n    float xMinus = -xfactor;\n    float yMinus = -yfactor;\n    \n    //正值\n    float xPlus = xfactor;\n    float yPlus = yfactor;\n    \n    float x1,x2,x3,x4,x5,x6,x7,x8 = 0.0;\n    float y1,y2,y3,y4,y5,y6,y7,y8 = 0.0;\n    float tx1,tx2,tx3,tx4,tx5,tx6,tx7,tx8 = 0.0;\n    float ty1,ty2,ty3,ty4,ty5,ty6,ty7,ty8 = 0.0;\n    \n    //压缩最大的值域区间的80%\n    float compressMaxValue = (_maxValue - _minValue)*_imageWidth*kCompress_ratio;\n    \n    if (imageRatio > screenRatio) {\n        \n        //宽顶到边\n        yMinus = xMinus*screenRatio/imageRatio;\n        yPlus = xPlus*screenRatio/imageRatio;\n                \n        //改变的比例\n        float neW_xMinus = yMinus*((self.imageWidth-compressMaxValue*tempValue)/self.imageHeight)/screenRatio;\n        //改变的值\n        float w = ABS(xMinus - neW_xMinus);\n        \n        x1 = xMinus+w;\n        y1 = yMinus;\n        tx1 = 0;\n        ty1 = 0;\n        \n        x2 = xMinus+w;\n        y2 = yPlus;\n        tx2 = 0;\n        ty2 = 1;\n        \n        x3 = (1-2*_minValue)/xMinus+w;\n        y3 = yMinus;\n        tx3 = _minValue;\n        ty3 = 0;\n        \n        x4 = (1-2*_minValue)/xMinus+w;\n        y4 = yPlus;\n        tx4 = _minValue;\n        ty4 = 1;\n        \n        x5 =  (1-2*_maxValue)/xMinus-w;\n        y5 = yMinus;\n        tx5 = _maxValue;\n        ty5 = 0;\n        \n        \n        x6 = (1-2*_maxValue)/xMinus-w;\n        y6 = yPlus;\n        tx6 = _maxValue;\n        ty6 = 1;\n        \n        x7 = xPlus-w;\n        y7 = yMinus;\n        tx7 = 1;\n        ty7 = 0;\n        \n        x8 = xPlus-w;\n        y8 = yPlus;\n        tx8 = 1;\n        ty8 = 1;\n    }\n    else\n    {\n        \n        \n        xMinus = yMinus*((_imageWidth+tempValue*compressMaxValue*-1)/_imageHeight)/screenRatio;\n        xPlus = yPlus*((_imageWidth+tempValue*compressMaxValue*-1)/_imageHeight)/screenRatio;\n        //        NSLog(@\"高顶到边\");\n        //原来的比例\n        float originX = yPlus*imageRatio/screenRatio;\n        float w = originX - xPlus;\n        //高大于宽\n        x1 = xMinus;\n        y1 = yMinus;\n        tx1 = 0;\n        ty1 = 0;\n        \n        x2 = xMinus;\n        y2 = yPlus;\n        tx2 = 0;\n        ty2 = 1;\n        \n        x3 = -(1-2*_minValue)*originX+w;\n        y3 = yMinus;\n        tx3 = _minValue;\n        ty3 = 0;\n        \n        x4 = -(1-2*_minValue)*originX+w;\n        y4 = yPlus;\n        tx4 = _minValue;\n        ty4 = 1;\n        \n        x5 = -(1-2*_maxValue)*originX-w;\n        y5 = yMinus;\n        tx5 = _maxValue;\n        ty5 = 0;\n        \n        x6 = -(1-2*_maxValue)*originX-w;\n        y6 = yPlus;\n        tx6 = _maxValue;\n        ty6 = 1;\n        \n        x7 = xPlus;\n        y7 = yMinus;\n        tx7 = 1;\n        ty7 = 0;\n        \n        x8 = xPlus;\n        y8 = yPlus;\n        tx8 = 1;\n        ty8 = 1;\n    }\n    \n    squareVertexes[0] = x1;\n    squareVertexes[1] = y1;\n    \n    squareVertexes[2] = x2;\n    squareVertexes[3] = y2;\n    \n    squareVertexes[4] = x3;\n    squareVertexes[5] = y3;\n    \n    squareVertexes[6] = x4;\n    squareVertexes[7] = y4;\n    \n    squareVertexes[8] = x5;\n    squareVertexes[9] = y5;\n    \n    squareVertexes[10] = x6;\n    squareVertexes[11] = y6;\n    \n    squareVertexes[12] = x7;\n    squareVertexes[13] = y7;\n    \n    squareVertexes[14] = x8;\n    squareVertexes[15] = y8;\n    \n    textureCoordinates[0] = tx1;\n    textureCoordinates[1] = ty1;\n    \n    textureCoordinates[2] = tx2;\n    textureCoordinates[3] = ty2;\n    \n    textureCoordinates[4] = tx3;\n    textureCoordinates[5] = ty3;\n    \n    textureCoordinates[6] = tx4;\n    textureCoordinates[7] = ty4;\n    \n    textureCoordinates[8] = tx5;\n    textureCoordinates[9] = ty5;\n    \n    textureCoordinates[10] = tx6;\n    textureCoordinates[11] = ty6;\n    \n    textureCoordinates[12] = tx7;\n    textureCoordinates[13] = ty7;\n    \n    textureCoordinates[14] = tx8;\n    textureCoordinates[15] = ty8;\n    \n}\n\n\n\n- (void)changeValue:(float)value\n{\n    self.changValue = value;\n    if (_type == 0)\n    {\n        //增高\n         [self verticalConfigVertex];\n    }\n    else\n    {\n        //瘦身\n         [self horizontalConfigVertex];\n    }\n}\n\n- (void)getVerticesAndTextureCoordinatesHandle:(GetVerticesAndTextureCoordinatesHandle)result\n{\n    \n    if (result) {\n        \n        NSMutableArray *squareVertexeArray = [NSMutableArray array];\n        NSMutableArray *textureCoordinatesArray = [NSMutableArray array];\n        \n        float changeValue = 0.0;\n        \n        if (_type==0)\n        {\n            NSLog(@\"增高\");\n            //增高\n            for (int i=0; i!=16; i++)\n            {\n                float value = squareVertexes[i];\n                float yNormaliValue = ABS(squareVertexes[1])*1.0;\n                if (i%2==0)\n                {\n                    //x 坐标\n                    if (i == 0 || i == 4 || i == 8 || i == 12) {\n                        value = -1.0;\n                    }\n                    else\n                    {\n                        value = 1.0;\n                    }\n                }\n                else\n                {\n                    //y坐标\n                    value = value/yNormaliValue;\n                }\n                squareVertexeArray[i] = [NSNumber numberWithDouble:value];\n            }\n            \n            \n            for (int i=0; i!=16; i++) {\n                float value = textureCoordinates[i];\n                textureCoordinatesArray[i] = [NSNumber numberWithDouble:value];\n            }\n\n            changeValue = kStretchMaxRatio*self.changValue;\n        }\n        else\n        {\n            //瘦身\n            NSLog(@\"瘦身\");\n            for (int i=0; i!=16; i++)\n            {\n                float value = squareVertexes[i];\n                float xNormaliValue = ABS(squareVertexes[0])*1.0;\n                float yNormaliValue = ABS(squareVertexes[1])*1.0;\n                if (i%2==0)\n                {\n                    //x坐标\n                    value = value/xNormaliValue;\n                }\n                else\n                {\n                    //y坐标\n                    value = value/yNormaliValue;\n                }\n                squareVertexeArray[i] = [NSNumber numberWithDouble:value];\n            }\n\n            \n            for (int i=0; i!=16; i++) {\n                float value = textureCoordinates[i];\n                textureCoordinatesArray[i] = [NSNumber numberWithDouble:value];\n            }\n            \n            float compressMaxValue = (_maxValue - _minValue)*_imageWidth*kCompress_ratio*self.changValue;\n            changeValue = compressMaxValue/_imageWidth*(-1.0);\n        }\n        \n        result(squareVertexeArray,textureCoordinatesArray,changeValue,_type);\n    }\n    \n}\n\n- (void)dealloc\n{\n    [[GPUImageContext sharedImageProcessingContext].framebufferCache purgeAllUnassignedFramebuffers];\n    NSLog(@\"GLImageShapeFilter---dealloc\");\n}\n\n\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/GLFilters/ColorProcessing/GLImageShapeHighDefinitionFilter.h",
    "content": "//\n//  GLImageShapeHighDefinitionFilter.h\n//  WEOpenGLKit\n//\n//  Created by 刘海东 on 2018/6/26.\n//  Copyright © 2018年 LHD. All rights reserved.\n//\n\n#import <GPURenderKit/GPURenderKit.h>\n//增高瘦身 处理的Filter\n\n@interface GLImageShapeHighDefinitionFilter : GPUImageFilter\n\n\n\n/** 顶点坐标 */\n@property (nonatomic, copy) NSArray *squareVertexeArray;\n/** 纹理坐标 */\n@property (nonatomic, copy) NSArray *textureCoordinateArray;\n/** 处理图片 */\n@property (nonatomic, copy) UIImage *originImage;\n\n\n\n/**\n 初始化GLImageShapeHighDefinitionFilter\n\n @param type 0增高 1瘦身\n @param changeValue 高或者宽的 形变值。\n @return GLImageShapeHighDefinitionFilter\n */\n- (instancetype)initWithType:(NSInteger)type changeValue:(float)changeValue;\n\n\n/** 获取处理后的图片 */\n- (UIImage *)getImageFromCurrentFramebuffer;\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/GLFilters/ColorProcessing/GLImageShapeHighDefinitionFilter.m",
    "content": "//\n//  GLImageShapeHighDefinitionFilter.m\n//  WEOpenGLKit\n//\n//  Created by 刘海东 on 2018/6/26.\n//  Copyright © 2018年 LHD. All rights reserved.\n//\n\n#import \"GLImageShapeHighDefinitionFilter.h\"\n\n@interface GLImageShapeHighDefinitionFilter ()\n{\n    GLfloat squareVertexes[16];\n    GLfloat textureCoordinates[16];\n    \n}\n@property (nonatomic, assign) float changValue;\n@property (nonatomic, strong) GPUImagePicture *input;\n/** 改变的fboSize */\n@property (nonatomic, assign) CGSize fboSize;\n/** type  */\n@property (nonatomic, assign) NSInteger type;\n/** 形变值 */\n@property (nonatomic, assign) float value;\n\n\n@end\n\n\n@implementation GLImageShapeHighDefinitionFilter\n\n\n- (instancetype)initWithType:(NSInteger)type changeValue:(float)changeValue\n{\n    self = [super init];\n    if (self) {\n        _type = type;\n        _value = changeValue;\n    }\n    return self;\n}\n\n\n- (void)newFrameReadyAtTime:(CMTime)frameTime atIndex:(NSInteger)textureIndex\n{\n    \n    [self renderToTextureWithVertices:squareVertexes textureCoordinates:textureCoordinates];\n    [self informTargetsAboutNewFrameAtTime:frameTime];\n}\n\n\n- (void)renderToTextureWithVertices:(const GLfloat *)vertices textureCoordinates:(const GLfloat *)textureCoordinates;\n{\n    if (self.preventRendering)\n    {\n        [firstInputFramebuffer unlock];\n        return;\n    }\n    \n    [GPUImageContext setActiveShaderProgram:filterProgram];\n    \n    outputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:[self sizeOfFBO] textureOptions:self.outputTextureOptions onlyTexture:NO];\n    [outputFramebuffer activateFramebuffer];\n    if (usingNextFrameForImageCapture)\n    {\n        [outputFramebuffer lock];\n    }\n    \n    [self setUniformsForProgramAtIndex:0];\n    \n    glClearColor(backgroundColorRed, backgroundColorGreen, backgroundColorBlue, backgroundColorAlpha);\n    glClear(GL_COLOR_BUFFER_BIT);\n    \n    glActiveTexture(GL_TEXTURE2);\n    glBindTexture(GL_TEXTURE_2D, [firstInputFramebuffer texture]);\n    \n    glUniform1i(filterInputTextureUniform, 2);\n    \n    glVertexAttribPointer(filterPositionAttribute, 2, GL_FLOAT, 0, 0, vertices);\n    glVertexAttribPointer(filterTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, textureCoordinates);\n    \n    glDrawArrays(GL_TRIANGLE_STRIP, 0, 8);\n    //    glFlush();\n    [firstInputFramebuffer unlock];\n    \n    if (usingNextFrameForImageCapture)\n    {\n        dispatch_semaphore_signal(imageCaptureSemaphore);\n    }\n}\n\n\n- (CGSize)sizeOfFBO\n{\n    return _fboSize;\n}\n\n#pragma mark setFunc\n- (void)setOriginImage:(UIImage *)originImage\n{\n    _originImage = originImage;\n    [self input];\n    \n    GLint maxTextureSize = [GPUImageContext maximumTextureSizeForThisDevice];\n        \n    //计算FBOSize\n    if (_type == 0)\n    {\n        //增高\n        _fboSize = CGSizeMake(originImage.size.width, originImage.size.height*(1.0+_value));\n    }\n    else\n    {\n        //瘦身\n        _fboSize = CGSizeMake(originImage.size.width*(1.0+_value), originImage.size.height);\n    }\n    \n    if ( (_fboSize.width > maxTextureSize) || (_fboSize.height > maxTextureSize) )\n    {\n        \n        CGSize adjustedSize;\n        if (_fboSize.width > _fboSize.height)\n        {\n            adjustedSize.width = (CGFloat)maxTextureSize;\n            adjustedSize.height = ((CGFloat)maxTextureSize / _fboSize.width) * _fboSize.height;\n        }\n        else\n        {\n            adjustedSize.height = (CGFloat)maxTextureSize;\n            adjustedSize.width = ((CGFloat)maxTextureSize / _fboSize.height) * _fboSize.width;\n        }\n        \n        _fboSize = adjustedSize;\n    }\n\n}\n\n- (GPUImagePicture *)input\n{\n    if (!_input)\n    {\n        _input = [[GPUImagePicture alloc]initWithImage:_originImage];\n        [_input addTarget:self];\n    }\n    return _input;\n}\n\n\n- (void)setSquareVertexeArray:(NSArray *)squareVertexeArray\n{\n    _squareVertexeArray = squareVertexeArray;\n    for (int i=0; i!=squareVertexeArray.count; i++) {\n        squareVertexes[i] = [self getFloat:squareVertexeArray[i]];\n    }\n    \n}\n\n- (void)setTextureCoordinateArray:(NSArray *)textureCoordinateArray\n{\n    _textureCoordinateArray = textureCoordinateArray;\n    \n    for (int i=0; i!=textureCoordinateArray.count; i++) {\n        textureCoordinates[i] = [self getFloat:textureCoordinateArray[i]];\n    }\n}\n\n- (float)getFloat:(NSNumber *)number\n{\n    return number.floatValue;\n}\n\n\n- (UIImage *)getImageFromCurrentFramebuffer\n{\n\n    [self.input processImage];\n    [self useNextFrameForImageCapture];\n    UIImage *image =  [self imageFromCurrentFramebuffer];\n\n    [self removeOutputFramebuffer];\n    [self removeAllTargets];\n    if (_input) {\n        [_input removeTarget:self];\n        [_input removeAllTargets];\n        [_input removeOutputFramebuffer];\n        _input = nil;\n    }\n    return image;\n}\n\n- (void)dealloc\n{\n    [[GPUImageContext sharedImageProcessingContext].framebufferCache purgeAllUnassignedFramebuffers];\n    NSLog(@\"GLImageShapeHighDefinitionFilter---dealloc\");\n}\n\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/GLFilters/ColorProcessing/GLImageStickerFilter.h",
    "content": "//\n//  GLImageStickerFilter.h\n//  WEOpenGLKit\n//\n//  Created by LHD on 2018/6/4.\n//  Copyright © 2018年 LHD. All rights reserved.\n//\n\n#import \"GPUImageTwoInputFilter.h\"\n\ntypedef NS_ENUM(NSInteger, GL_IMAGE_BLEND_MODE)\n{\n    GL_IMAGE_BLEND_MODE_NORMAL,\n};\n\n@interface GLImageStickerFilter : GPUImageTwoInputFilter\n{\n    CGSize firstInputSize, secondInputSize;\n    GLint stickerSizeUniform, stickerCenterUniform, stickerThetaUniform, stickerAlphaUniform, stickerBlendModeUniform, aspectRatioUniform, mirrorModeUniform;\n}\n\n/** 贴纸大小 */\n@property (nonatomic, assign) CGSize size;\n/** 中心坐标 */\n@property (nonatomic, assign) CGPoint center;\n/** 旋转角 */\n@property (nonatomic, assign) CGFloat theta;\n/** 透明度 */\n@property (nonatomic, assign) CGFloat alpha;\n/** 混合模式 */\n@property (nonatomic, assign) int blendMode;\n/** 镜像模式 1 开启镜像 0 不开启镜像 */\n@property (nonatomic, assign) int mirrorMode;\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/GLFilters/ColorProcessing/GLImageStickerFilter.m",
    "content": "//\n//  GLImageStickerFilter.m\n//  WEOpenGLKit\n//\n//  Created by LHD on 2018/6/4.\n//  Copyright © 2018年 LHD. All rights reserved.\n//\n\n#import \"GLImageStickerFilter.h\"\n\nNSString *const kGLImageStickerFragmentShaderString = SHADER_STRING\n(\n precision mediump float;\n varying highp vec2 textureCoordinate;\n \n uniform sampler2D inputImageTexture;\n uniform sampler2D inputImageTexture2; // sticker texture;\n \n uniform vec2  size;\n uniform vec2  center;\n uniform float theta;\n uniform float alpha;\n uniform int blendMode;\n uniform int mirrorMode;\n uniform float aspectRatio;\n uniform float s;\n uniform float c;\n \n \n highp float lum(lowp vec3 c)\n {\n     return dot(c, vec3(0.3, 0.59, 0.11));\n }\n \n lowp vec3 clipcolor(lowp vec3 c)\n {\n     highp float l = lum(c);\n     lowp float n = min(min(c.r, c.g), c.b);\n     lowp float x = max(max(c.r, c.g), c.b);\n     \n     if (n < 0.0)\n     {\n         c.r = l + ((c.r - l) * l) / (l - n);\n         c.g = l + ((c.g - l) * l) / (l - n);\n         c.b = l + ((c.b - l) * l) / (l - n);\n     }\n     \n     if (x > 1.0)\n     {\n         c.r = l + ((c.r - l) * (1.0 - l)) / (x - l);\n         c.g = l + ((c.g - l) * (1.0 - l)) / (x - l);\n         c.b = l + ((c.b - l) * (1.0 - l)) / (x - l);\n     }\n     \n     return c;\n }\n \n lowp vec3 setlum(lowp vec3 c, highp float l)\n {\n     highp float d = l - lum(c);\n     c = c + vec3(d);\n     \n     return clipcolor(c);\n }\n \n highp float sat(lowp vec3 c)\n {\n     lowp float n = min(min(c.r, c.g), c.b);\n     lowp float x = max(max(c.r, c.g), c.b);\n     \n     return x - n;\n }\n \n lowp vec3 setsat(lowp vec3 c, highp float s)\n {\n     float minbase = min(min(c.r, c.g), c.b);\n     float sbase = sat(c);\n     vec3 color;\n     \n     if (sbase > 0.0)\n     {\n         color = (c - minbase) * s / sbase;\n     }\n     else\n     {\n         color = vec3(0.0);\n     }\n     \n     return color;\n }\n \n // T = F(S, D); // S -> blend, D -> base, T -> result\n vec4 blend(vec4 S, vec4 D)\n {\n     vec3 T;\n     \n     if (blendMode < 1 || blendMode > 27)\n     {\n         // 其它情况显示base\n         T = D.rgb;\n     }\n     else if (blendMode == 1)\n     {\n         // normal(正常)\n         T = S.rgb;\n         \n     }\n     else if (blendMode == 2)\n     {\n         // dissolve(溶解)\n         // 未实现\n         T = D.rgb;\n     }\n     else if (blendMode == 3)\n     {\n         // darken(变暗)\n         T = min(D.rgb, S.rgb);\n     }\n     else if (blendMode == 4)\n     {\n         // multiply(正片叠底)\n         T = D.rgb * S.rgb;\n     }\n     else if (blendMode == 5)\n     {\n         // color burn(颜色加深)\n         T = 1.0 - min((1.0 - D.rgb) / S.rgb, 1.0);\n     }\n     else if (blendMode == 6)\n     {\n         // linear burn(线性加深)\n         vec3 white = vec3(1.0, 1.0, 1.0);\n         T = S.rgb + D.rgb - white;\n     }\n     else if (blendMode == 7)\n     {\n         // darker color(深色)\n         T = dot(D.rgb, vec3(0.299, 0.587, 0.114)) < dot(S.rgb, vec3(0.299, 0.587, 0.114)) ? D.rgb : S.rgb;\n     }\n     else if (blendMode == 8)\n     {\n         // lighten(变亮)\n         T = max(D.rgb, S.rgb);\n     }\n     else if (blendMode == 9)\n     {\n         // screen(滤色)\n         vec3 white = vec3(1.0, 1.0, 1.0);\n         T = white - (white - D.rgb) * (white - S.rgb);\n     }\n     else if (blendMode == 10)\n     {\n         // color dodge(颜色减淡)\n         T = min(D.rgb / (1.0 - S.rgb), 1.0);\n     }\n     else if (blendMode == 11)\n     {\n         // linear dodge (线性减淡)\n         vec3 white = vec3(1.0, 1.0, 1.0);\n         T = D.rgb + S.rgb;\n         T = min(white, T);\n     }\n     else if (blendMode == 12)\n     {\n         // lighter color(浅色)\n         T = dot(D.rgb, vec3(0.299, 0.587, 0.114)) > dot(S.rgb, vec3(0.299, 0.587, 0.114)) ? D.rgb : S.rgb;\n     }\n     else if (blendMode == 13)\n     {\n         // overlay(叠加)\n         T = 2.0 * D.rgb * S.rgb;\n         \n         if (D.r >= 0.5)\n         {\n             T.r = 1.0 - 2.0 * (1.0 - D.r) * (1.0 - S.r);\n         }\n         \n         if (D.g >= 0.5)\n         {\n             T.g = 1.0 - 2.0 * (1.0 - D.g) * (1.0 - S.g);\n         }\n         \n         if (D.b >= 0.5)\n         {\n             T.b = 1.0 - 2.0 * (1.0 - D.b) * (1.0 - S.b);\n         }\n     }\n     else if (blendMode == 14)\n     {\n         // soft light(柔光)\n         vec3 white = vec3(1.0, 1.0, 1.0);\n         T = 2.0 * D.rgb * S.rgb + D.rgb * D.rgb * (white - 2.0 * S.rgb);\n         \n         if (S.r >= 0.5)\n         {\n             T.r = 2.0 * D.r * (1.0 - S.r) + (2.0 * S.r - 1.0) * sqrt(D.r);\n         }\n         \n         if (S.g >= 0.5)\n         {\n             T.g = 2.0 * D.g * (1.0 - S.g) + (2.0 * S.g - 1.0) * sqrt(D.g);\n         }\n         \n         if (S.b >= 0.5)\n         {\n             T.b = 2.0 * D.b * (1.0 - S.b) + (2.0 * S.b - 1.0) * sqrt(D.b);\n         }\n     }\n     else if (blendMode == 15)\n     {\n         // hard light(强光)\n         T = 2.0 * D.rgb * S.rgb;\n         \n         if (S.r >= 0.5)\n         {\n             T.r = 1.0 - 2.0 * (1.0 - D.r) * (1.0 - S.r);\n         }\n         \n         if (S.g >= 0.5)\n         {\n             T.g = 1.0 - 2.0 * (1.0 - D.g) * (1.0 - S.g);\n         }\n         \n         if (S.b >= 0.5)\n         {\n             T.b = 1.0 - 2.0 * (1.0 - D.b) * (1.0 - S.b);\n         }\n         \n     }\n     else if (blendMode == 16)\n     {\n         // vivid light(亮光)\n         T.r = S.r < 0.5 ? 1.0 - (1.0 - D.r) / (S.r * 2.0) : D.r / (1.0 - S.r) * 0.5;\n         T.g = S.g < 0.5 ? 1.0 - (1.0 - D.g) / (S.g * 2.0) : D.g / (1.0 - S.g) * 0.5;\n         T.b = S.b < 0.5 ? 1.0 - (1.0 - D.b) / (S.b * 2.0) : D.b / (1.0 - S.b) * 0.5;\n         T = clamp(T, 0.0, 1.0);\n     }\n     else if (blendMode == 17)\n     {\n         // linear light(线性光)\n         vec3 white = vec3(1.0, 1.0, 1.0);\n         T = 2.0 * S.rgb + D.rgb - white;\n     }\n     else if (blendMode == 18)\n     {\n         // pin light(点光)\n         T.r = S.r < 0.5 ? min(D.r, 2.0 * S.r) : max(D.r, 2.0 * S.r - 1.0);\n         T.g = S.g < 0.5 ? min(D.g, 2.0 * S.g) : max(D.g, 2.0 * S.g - 1.0);\n         T.b = S.b < 0.5 ? min(D.b, 2.0 * S.b) : max(D.b, 2.0 * S.b - 1.0);\n     }\n     else if (blendMode == 19)\n     {\n         // hard mix(实色混合)\n         T = floor(S.rgb + D.rgb);\n     }\n     else if (blendMode == 20)\n     {\n         // diff(差值)\n         T = abs(D.rgb - S.rgb);\n     }\n     else if (blendMode == 21)\n     {\n         // exclusion(排除)\n         T = S.rgb + D.rgb - 2.0 * S.rgb * D.rgb;\n     }\n     else if (blendMode == 22)\n     {\n         // substract(减去)\n         vec3 black = vec3(0.0, 0.0, 0.0);\n         T = D.rgb - S.rgb;\n         T = max(black, T);\n     }\n     else if (blendMode == 23)\n     {\n         // divide(划分)\n         vec3 white = vec3(1.0, 1.0, 1.0);\n         T = white;\n         \n         if (S.r > 0.0)\n         {\n             T.r = D.r / S.r;\n         }\n         \n         if (S.g > 0.0)\n         {\n             T.g = D.g / S.g;\n         }\n         \n         if (S.b > 0.0)\n         {\n             T.b = D.b / S.b;\n         }\n         \n         T = min(white, T);\n     }\n     else if (blendMode == 24)\n     {\n         // hue(色相)\n         T = setlum(setsat(S.rgb, sat(D.rgb)), lum(D.rgb));\n     }\n     else if (blendMode == 25)\n     {\n         // saturation(饱和度)\n         T = setlum(setsat(D.rgb, sat(S.rgb)), lum(D.rgb));\n     }\n     else if (blendMode == 26)\n     {\n         // color(颜色)\n         T = S.rgb + dot(D.rgb, vec3(0.299, 0.587, 0.114)) - dot(S.rgb, vec3(0.299, 0.587, 0.114));\n     }\n     else if (blendMode == 27)\n     {\n         // luminosity(明度)\n         T = setlum(D.rgb, lum(S.rgb));\n     }\n     \n     vec4 resultColor = vec4(T, S.a);\n     return resultColor;\n }\n \n void main()\n {\n     lowp vec4 color = texture2D(inputImageTexture, textureCoordinate);\n     \n     vec2 coordinateToUse = vec2(textureCoordinate.x * aspectRatio, textureCoordinate.y);\n     vec2 centerToUse = vec2(center.x * aspectRatio, center.y);\n     vec2 sizeToUse = vec2(size.x * aspectRatio, size.y);\n     mat2 rotateMatrix = mat2(c, s, -s, c);\n     \n     coordinateToUse = (coordinateToUse - centerToUse) * rotateMatrix;\n     coordinateToUse = vec2(coordinateToUse.x / sizeToUse.x, coordinateToUse.y / sizeToUse.y) + vec2(0.5);\n     \n     if (all(greaterThanEqual(coordinateToUse, vec2(0.0))) &&\n         all(lessThanEqual(coordinateToUse, vec2(1.0))))\n     {\n         if (mirrorMode == 1)\n         {\n             //镜像\n             coordinateToUse = vec2(1.0 - coordinateToUse.x, coordinateToUse.y);\n         }\n         \n         lowp vec4 S = texture2D(inputImageTexture2, coordinateToUse);\n         lowp vec4 D = color;\n         \n         vec3 S1 = S.a == 0.0 ? S.rgb : S.rgb / S.a;\n         vec4 resultColor = blend(vec4(S1, S.a), D);\n         float opacity = alpha * S.a;\n         \n         resultColor = vec4(resultColor.rgb * opacity + D.rgb * (1.0 - opacity), 1.0);\n         \n         gl_FragColor = resultColor;\n     }\n     else\n     {\n         gl_FragColor = color;\n     }\n }\n);\n\n\n@interface GLImageStickerFilter ()\n{\n    GLint sUniform, cUniform;\n}\n\n/** 父视图的宽高比 */\n@property (nonatomic, assign) float aspectRatio;\n\n@end\n\n@implementation GLImageStickerFilter\n\n- (instancetype)init\n{\n    self = [super initWithFragmentShaderFromString:kGLImageStickerFragmentShaderString];\n    \n    if (self)\n    {\n        aspectRatioUniform = [filterProgram uniformIndex:@\"aspectRatio\"];\n        stickerSizeUniform = [filterProgram uniformIndex:@\"size\"];\n        stickerCenterUniform = [filterProgram uniformIndex:@\"center\"];\n        stickerThetaUniform = [filterProgram uniformIndex:@\"theta\"];\n        stickerAlphaUniform = [filterProgram uniformIndex:@\"alpha\"];\n        stickerBlendModeUniform = [filterProgram uniformIndex:@\"blendMode\"];\n        mirrorModeUniform = [filterProgram uniformIndex:@\"mirrorMode\"];\n        sUniform = [filterProgram uniformIndex:@\"s\"];\n        cUniform = [filterProgram uniformIndex:@\"c\"];\n        self.size = CGSizeZero;\n        self.center = CGPointMake(0.5, 0.5);\n        self.theta = 0.0;\n        self.aspectRatio = 1.0;\n        self.alpha = 1.0;\n        self.blendMode = 1;\n        self.mirrorMode = 0;\n    }\n    \n    return self;\n}\n\n- (void)setInputSize:(CGSize)newSize atIndex:(NSInteger)textureIndex\n{\n    [super setInputSize:newSize atIndex:textureIndex];\n    \n    if (textureIndex == 0)\n    {\n        self.aspectRatio = newSize.width / newSize.height;\n        firstInputSize = newSize;\n    }\n    else if (textureIndex == 1)\n    {\n        secondInputSize = newSize;\n    }\n    \n    [self updateSize];\n}\n\n- (void)updateSize\n{\n    if ( CGSizeEqualToSize(_size, CGSizeZero) &&\n        !CGSizeEqualToSize(firstInputSize, CGSizeZero) &&\n        !CGSizeEqualToSize(secondInputSize, CGSizeZero))\n    {\n        [self setSize:CGSizeMake(secondInputSize.width / firstInputSize.width, secondInputSize.height / firstInputSize.height) forUniform:stickerSizeUniform program:filterProgram];\n    }\n}\n\n- (void)setSize:(CGSize)size\n{\n    _size = size;\n    [self setSize:self.size forUniform:stickerSizeUniform program:filterProgram];\n}\n\n- (void)setCenter:(CGPoint)center\n{\n    _center = center;\n    [self setPoint:self.center forUniform:stickerCenterUniform program:filterProgram];\n}\n\n- (void)setTheta:(CGFloat)theta\n{\n    _theta = theta;\n    [self setFloat:theta forUniform:stickerThetaUniform program:filterProgram];\n    [self setFloat:sinf(theta) forUniform:sUniform program:filterProgram];\n    [self setFloat:cosf(theta) forUniform:cUniform program:filterProgram];\n}\n\n- (void)setAspectRatio:(float)aspectRatio\n{\n    _aspectRatio = aspectRatio;\n    [self setFloat:aspectRatio forUniform:aspectRatioUniform program:filterProgram];\n}\n\n- (void)setAlpha:(CGFloat)alpha\n{\n    _alpha = alpha;\n    [self setFloat:alpha forUniform:stickerAlphaUniform program:filterProgram];\n}\n\n- (void)setBlendMode:(int)blendMode\n{\n    _blendMode = blendMode;\n    [self setInteger:blendMode forUniform:stickerBlendModeUniform program:filterProgram];\n}\n\n- (void)setMirrorMode:(int)mirrorMode\n{\n    _mirrorMode = mirrorMode;\n    [self setInteger:mirrorMode forUniform:mirrorModeUniform program:filterProgram];\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/GLFilters/ColorProcessing/GLImageTwoLutFilter.h",
    "content": "//\n//  GLImageTwoLutFilter.h\n//  WEOpenGLKit\n//\n//  Created by LHD on 2018/6/30.\n//  Copyright © 2018年 LHD. All rights reserved.\n//\n\n#import \"GPUImageFilterGroup.h\"\n#import \"GLImageLutFilter.h\"\n\n@interface GLImageTwoLutFilter : GPUImageFilterGroup\n{\n    GLImageLutFilter *positiveLutFilter, *negativeLutFilter;\n}\n\n@property (nonatomic, assign) CGFloat intensity;\n\n/** 正向 Lut Image */\n- (void)setPositiveLutImage:(UIImage *)lutImage;\n/** 负向 Lut Image */\n- (void)setNegativeLutImage:(UIImage *)lutImage;\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/GLFilters/ColorProcessing/GLImageTwoLutFilter.m",
    "content": "//\n//  GLImageTwoLutFilter.m\n//  WEOpenGLKit\n//\n//  Created by LHD on 2018/6/30.\n//  Copyright © 2018年 LHD. All rights reserved.\n//\n\n#import \"GLImageTwoLutFilter.h\"\n\n@implementation GLImageTwoLutFilter\n\n- (instancetype)init\n{\n    self = [super init];\n    if (self)\n    {\n        positiveLutFilter = [[GLImageLutFilter alloc] init];\n        negativeLutFilter = [[GLImageLutFilter alloc] init];\n        [positiveLutFilter addTarget:negativeLutFilter];\n        self.initialFilters = @[positiveLutFilter];\n        self.terminalFilter = negativeLutFilter;\n        self.intensity = 0.0;\n    }\n    return self;\n}\n\n- (void)setPositiveLutImage:(UIImage *)lutImage\n{\n    [positiveLutFilter setLutImage:lutImage];\n}\n\n- (void)setNegativeLutImage:(UIImage *)lutImage\n{\n    [negativeLutFilter setLutImage:lutImage];\n}\n\n- (void)setIntensity:(CGFloat)intensity\n{\n    _intensity = intensity;\n    \n    if (intensity >= 0.0)\n    {\n        positiveLutFilter.intensity = intensity;\n        negativeLutFilter.intensity = 0.0;\n    }\n    else\n    {\n        positiveLutFilter.intensity = 0.0;\n        negativeLutFilter.intensity = -intensity;\n    }\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/GLFilters/DouYinEffect/GLImageBlurSnapViewFilter.h",
    "content": "//\n//  GLImageBlurSnapViewFilter.h\n//  GPURenderKit\n//\n//  Created by 刘海东 on 2019/4/19.\n//  Copyright © 2019 刘海东. All rights reserved.\n//\n\n#import <GPURenderKit/GPURenderKit.h>\n\nNS_ASSUME_NONNULL_BEGIN\n\n@interface GLImageBlurSnapViewFilter : GPUImageTwoInputFilter\n{\n    GLuint blurOffsetYUniform,blurTextureScalUniform;\n    \n}\n\n\n/** 模糊位置【0.0-0.5】 default:0.25*/\n@property (nonatomic, assign) float blurOffsetY;\n\n/** 底部纹理缩放系数 【1.0- 5.0】 */\n@property (nonatomic, assign) float blurTextureScal;\n\n@end\n\nNS_ASSUME_NONNULL_END\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/GLFilters/DouYinEffect/GLImageBlurSnapViewFilter.m",
    "content": "//\n//  GLImageBlurSnapViewFilter.m\n//  GPURenderKit\n//\n//  Created by 刘海东 on 2019/4/19.\n//  Copyright © 2019 刘海东. All rights reserved.\n//\n\n#import \"GLImageBlurSnapViewFilter.h\"\n\nNSString *const kGLImageBlurSnapViewFragmentShaderString = SHADER_STRING\n(\n precision highp float;\n varying vec2 textureCoordinate;\n uniform float blurOffsetY;//y的偏移值\n uniform float blurTextureScal;//底部模糊视频的缩放值\n\n uniform sampler2D inputImageTexture; //视频\n uniform sampler2D inputImageTexture2; //模糊视频\n\n \n void main()\n {\n     \n     highp vec2 uv = textureCoordinate;\n     \n     vec4 outPutcolor = vec4(0.0,0.0,0.0,1.0);\n     \n     //这里是中间显示不带任何效果的的Texture，根据y方向的偏移进行区域选择\n     if (uv.y >= blurOffsetY && uv.y <= 1.0 - blurOffsetY) {\n         //原视频\n         outPutcolor = texture2D(inputImageTexture, uv);\n     } else {\n         \n         //uv坐标的中心点并非是（0.0，0.0），所以这里进行一次偏移，后面在偏移回来就可以了\n         vec2 center = vec2(0.5, 0.5);\n         //新的uv坐标 uv - center\n         uv = uv -  center;\n         //纹理放大缩小\n         uv = uv / blurTextureScal;\n         uv = uv + center;\n         outPutcolor = texture2D(inputImageTexture2, uv);\n     }     \n     \n     gl_FragColor = outPutcolor;\n }\n );\n\n\n\n@interface GLImageBlurSnapViewFilter ()\n\n@end\n\n@implementation GLImageBlurSnapViewFilter\n\n- (instancetype)init\n{\n    self = [super initWithFragmentShaderFromString:kGLImageBlurSnapViewFragmentShaderString];\n    if (self) {\n        \n        \n        blurOffsetYUniform = [filterProgram uniformIndex:@\"blurOffsetY\"];\n        blurTextureScalUniform = [filterProgram uniformIndex:@\"blurTextureScal\"];\n\n        self.blurOffsetY = 0.25;\n        self.blurTextureScal = 2.0;\n\n    }\n    return self;\n}\n\n- (void)setBlurOffsetY:(float)blurOffsetY{\n    \n    _blurOffsetY = blurOffsetY;\n    if (blurOffsetY<0.0 || blurOffsetY>0.5) {\n        _blurOffsetY = 0.25;\n    }\n    [self setFloat:_blurOffsetY forUniform:blurOffsetYUniform program:filterProgram];\n}\n\n- (void)setBlurTextureScal:(float)blurTextureScal{\n    \n    _blurTextureScal = blurTextureScal;\n    if (blurTextureScal<1.0 || blurTextureScal>5.0) {\n        _blurTextureScal = 2.0;\n    }\n    [self setFloat:_blurTextureScal forUniform:blurTextureScalUniform program:filterProgram];\n}\n\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/GLFilters/DouYinEffect/GLImageBlurSnapViewFilterGroup.h",
    "content": "//\n//  GLImageBlurSnapViewFilterGroup.h\n//  GPURenderKit\n//\n//  Created by 刘海东 on 2019/4/19.\n//  Copyright © 2019 刘海东. All rights reserved.\n//\n\n#import <GPURenderKit/GPURenderKit.h>\n\nNS_ASSUME_NONNULL_BEGIN\n\n@interface GLImageBlurSnapViewFilterGroup : GPUImageFilterGroup\n\n/** 模糊半径【0.0 - 30.0】default：20.0*/\n@property (nonatomic, assign) float blurRadiusInPixels;\n\n/** 模糊位置【0.0-0.5】 default:0.25 */\n@property (nonatomic, assign) float blurOffsetY;\n\n/** 底部纹理缩放系数 【1.0- 5.0】 default:2.0 */\n@property (nonatomic, assign) float blurTextureScal;\n\n\n@end\n\nNS_ASSUME_NONNULL_END\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/GLFilters/DouYinEffect/GLImageBlurSnapViewFilterGroup.m",
    "content": "//\n//  GLImageBlurSnapViewFilterGroup.m\n//  GPURenderKit\n//\n//  Created by 刘海东 on 2019/4/19.\n//  Copyright © 2019 刘海东. All rights reserved.\n//\n\n#import \"GLImageBlurSnapViewFilterGroup.h\"\n\n@interface GLImageBlurSnapViewFilterGroup ()\n\n@property (nonatomic, strong) GPUImageGaussianBlurFilter *gaussianBlurFilter;\n@property (nonatomic, strong) GLImageBlurSnapViewFilter *blurSnapViewFilter;\n\n\n@end\n\n\n@implementation GLImageBlurSnapViewFilterGroup\n\n- (instancetype)init\n{\n    self = [super init];\n    if (self) {\n        \n        self.gaussianBlurFilter = [[GPUImageGaussianBlurFilter alloc]init];\n        self.gaussianBlurFilter.blurRadiusInPixels = 20.0;\n        [self addFilter:self.gaussianBlurFilter];\n        \n        self.blurSnapViewFilter = [[GLImageBlurSnapViewFilter alloc]init];\n        [self addFilter:self.blurSnapViewFilter];\n        \n        \n        [self.gaussianBlurFilter addTarget:self.blurSnapViewFilter atTextureLocation:1];\n        \n        \n        self.initialFilters = [NSArray arrayWithObjects:self.gaussianBlurFilter, self.blurSnapViewFilter, nil];\n        self.terminalFilter = self.blurSnapViewFilter;\n\n    }\n    return self;\n}\n\n- (void)setBlurRadiusInPixels:(float)blurRadiusInPixels{\n    \n    _blurRadiusInPixels = blurRadiusInPixels;\n    if (blurRadiusInPixels<0.0 || blurRadiusInPixels>30.0) {\n        _blurRadiusInPixels = 30.0;\n    }\n    self.gaussianBlurFilter.blurRadiusInPixels = blurRadiusInPixels;\n}\n\n- (void)setBlurTextureScal:(float)blurTextureScal{\n    self.blurSnapViewFilter.blurTextureScal = blurTextureScal;\n}\n\n- (void)setBlurOffsetY:(float)blurOffsetY{\n    self.blurSnapViewFilter.blurOffsetY = blurOffsetY;\n}\n\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/GLFilters/DouYinEffect/GLImageFourPointsMirrorFilter.h",
    "content": "//\n//  GLImageFourPointsMirrorFilter.h\n//  WEOpenGLKit\n//\n//  Created by 刘海东 on 2019/2/20.\n//  Copyright © 2019 Leo. All rights reserved.\n//\n\n#import <GPURenderKit/GPURenderKit.h>\n\nNS_ASSUME_NONNULL_BEGIN\n\n@interface GLImageFourPointsMirrorFilter : GPUImageFilter\n\n@end\n\nNS_ASSUME_NONNULL_END\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/GLFilters/DouYinEffect/GLImageFourPointsMirrorFilter.m",
    "content": "//\n//  GLImageFourPointsMirrorFilter.m\n//  WEOpenGLKit\n//\n//  Created by 刘海东 on 2019/2/20.\n//  Copyright © 2019 Leo. All rights reserved.\n//\n\n#import \"GLImageFourPointsMirrorFilter.h\"\n\nNSString *const kGLImagePointsMirrorFragmentShaderString = SHADER_STRING\n(\n precision highp float;\n varying vec2 textureCoordinate;\n uniform sampler2D inputImageTexture;\n \n void main()\n {\n     \n     highp vec2 uv = textureCoordinate;\n     \n     if (uv.x <= 0.5) {\n         uv.x = uv.x * 2.0;\n     } else {\n         uv.x = (uv.x - 0.5) * 2.0;\n     }\n     \n     if (uv.y <= 0.5) {\n         uv.y = uv.y * 2.0;\n     } else {\n         uv.y = (uv.y - 0.5) * 2.0;\n     }\n     \n     gl_FragColor = texture2D(inputImageTexture, uv);\n }\n );\n\n\n\n\n\n@implementation GLImageFourPointsMirrorFilter\n\n\n- (instancetype)init\n{\n    self = [super initWithFragmentShaderFromString:kGLImagePointsMirrorFragmentShaderString];\n    if (self) {\n        \n    }\n    return self;\n}\n\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/GLFilters/DouYinEffect/GLImageGlitchEffectGridFilter.h",
    "content": "//\n//  GLImageGlitchEffectGridFilter.h\n//  WEOpenGLKit\n//\n//  Created by 刘海东 on 2018/8/31.\n//  Copyright © 2018年 LHD. All rights reserved.\n//\n\n#import <GPURenderKit/GPURenderKit.h>\n\n//故障效果---格子故障\n@interface GLImageGlitchEffectGridFilter : GPUImageTwoInputFilter\n{\n    GLint intensityUniform;\n    GLint colorIntensityUniform;\n    GLint blendModeUniform;\n    GPUImagePicture *plaidImageSource;\n}\n\n@property (nonatomic, assign) float intensity;\n@property (nonatomic, assign) float colorIntensity;\n@property (nonatomic, assign) int blendMode;\n- (void)setPlaidImage:(UIImage *)plaidImage;\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/GLFilters/DouYinEffect/GLImageGlitchEffectGridFilter.m",
    "content": "//\n//  GLImageGlitchEffectGridFilter.m\n//  WEOpenGLKit\n//\n//  Created by 刘海东 on 2018/8/31.\n//  Copyright © 2018年 LHD. All rights reserved.\n//\n\n#import \"GLImageGlitchEffectGridFilter.h\"\n\n@implementation GLImageGlitchEffectGridFilter\n\nNSString *const kGLImageGlitchEffectGridFilterFragmentShaderString = SHADER_STRING\n(\n precision lowp float;\n varying vec2 textureCoordinate;\n varying vec2 textureCoordinate2;\n \n uniform sampler2D inputImageTexture; // 原图的纹理\n uniform sampler2D inputImageTexture2; //\n\n uniform float colorFloat;\n uniform float intensity;\n uniform int blendMode;\n\n \n //返回小数部分\n float fracFunc (float x)\n{\n    return x - floor(x);\n}\n \n //两者之间插值\n float lerpFunc(float a, float b, float w) {\n     return a + w*(b-a);\n }\n \n highp float lum(lowp vec3 c) {\n     return dot(c, vec3(0.3, 0.59, 0.11));\n }\n \n lowp vec3 clipcolor(lowp vec3 c) {\n     highp float l = lum(c);\n     lowp float n = min(min(c.r, c.g), c.b);\n     lowp float x = max(max(c.r, c.g), c.b);\n     if (n < 0.0) {\n         c.r = l + ((c.r - l) * l) / (l - n);\n         c.g = l + ((c.g - l) * l) / (l - n);\n         c.b = l + ((c.b - l) * l) / (l - n);\n     }\n     if (x > 1.0) {\n         c.r = l + ((c.r - l) * (1.0 - l)) / (x - l);\n         c.g = l + ((c.g - l) * (1.0 - l)) / (x - l);\n         c.b = l + ((c.b - l) * (1.0 - l)) / (x - l);\n     }\n     return c;\n }\n \n lowp vec3 setlum(lowp vec3 c, highp float l) {\n     highp float d = l - lum(c);\n     c = c + vec3(d);\n     return clipcolor(c);\n }\n \n highp float sat(lowp vec3 c) {\n     lowp float n = min(min(c.r, c.g), c.b);\n     lowp float x = max(max(c.r, c.g), c.b);\n     return x - n;\n }\n \n lowp vec3 setsat(lowp vec3 c, highp float s) {\n     float minbase = min(min(c.r, c.g), c.b);\n     float sbase = sat(c);\n     vec3 color;\n     if (sbase > 0.0) {\n         color = (c - minbase) * s / sbase;\n     } else {\n         color = vec3(0.0);\n     }\n     return color;\n }\n\n // T = F(S, D);\n // S -> blend, D -> base, T -> result\n vec4 blend(vec4 S, vec4 D) {\n     vec3 T;\n     if(blendMode == 1)\n     {\n         //差值diff\n         T = abs(D.rgb - S.rgb);\n     }\n     else\n     {\n         // color burn(颜色加深)\n         T = 1.0 - min((1.0 - D.rgb) / S.rgb, 1.0);\n     }\n     vec4 resultColor = vec4(T, S.a);\n     return resultColor;\n }\n \n void main()\n {\n     \n     vec2 uv = textureCoordinate;\n     vec4 glitch = texture2D(inputImageTexture2,uv);\n     \n     float thresh = 1.001 - intensity * 1.001;\n     float w_d = step(thresh, pow(glitch.z, 2.5)); // displacement glitch\n     float w_f = step(thresh, pow(glitch.w, 2.5)); // frame glitch\n     float w_c = step(thresh, pow(glitch.z, 3.5)); // color glitch\n\n     // Displacement.\n     float x = fracFunc(uv.x+glitch.x*w_d);\n     float y = fracFunc(uv.y+glitch.y*w_d);\n     vec2 uv1 = vec2(x,y);\n     vec4 source = texture2D(inputImageTexture, uv1);\n     \n     // Mix with trash frame.\n     float r1  = lerpFunc(source.r, texture2D(inputImageTexture, uv1).r, w_f);\n     float g1  = lerpFunc(source.g, texture2D(inputImageTexture, uv1).g, w_f);\n     float b1  = lerpFunc(source.b, texture2D(inputImageTexture, uv1).b, w_f);\n     vec3 color = vec3(r1,g1,b1);\n     \n     \n     // blend.\n     lowp vec4 S = vec4(color,1.0);\n     lowp vec4 D = glitch;\n     vec3 S1 = S.a == 0.0 ? S.rgb : S.rgb / S.a;\n     vec4 resultColor = blend(vec4(S1, S.a), D);\n     float opacity = colorFloat * S.a;\n     resultColor = vec4(resultColor.rgb * opacity + D.rgb * (1.0 - opacity), 1.0);\n     vec4 neg = resultColor;\n     \n     //\n     float r3 = lerpFunc(color.r, neg.r, w_c);\n     float g3 = lerpFunc(color.g, neg.g, w_c);\n     float b3 = lerpFunc(color.b, neg.b, w_c);\n     vec3 color3 = vec3(r3, g3, b3);\n     \n     gl_FragColor = vec4(color3,1.0);\n     \n }\n );\n\n\n- (instancetype)init\n{\n    self = [super initWithFragmentShaderFromString:kGLImageGlitchEffectGridFilterFragmentShaderString];\n    intensityUniform = [filterProgram uniformIndex:@\"intensity\"];\n    colorIntensityUniform = [filterProgram uniformIndex:@\"colorFloat\"];\n    blendModeUniform = [filterProgram uniformIndex:@\"blendMode\"];\n    \n    [self disableSecondFrameCheck];\n    self.intensity = 0.0;\n    self.colorIntensity = 1.0;\n    self.blendMode = 1;\n    return self;\n}\n\n- (void)setIntensity:(float)intensity\n{\n    _intensity = intensity;\n    [self setFloat:intensity forUniform:intensityUniform program:filterProgram];\n    [plaidImageSource processImage];\n\n}\n\n- (void)setColorIntensity:(float)colorIntensity\n{\n    _colorIntensity = colorIntensity;\n    [self setFloat:colorIntensity forUniform:colorIntensityUniform program:filterProgram];\n    [plaidImageSource processImage];\n\n}\n\n- (void)setBlendMode:(int)blendMode\n{\n    _blendMode = blendMode;\n    [self setInteger:blendMode forUniform:blendModeUniform program:filterProgram];\n    [plaidImageSource processImage];\n}\n\n- (void)setPlaidImage:(UIImage *)plaidImage\n{\n    \n    __weak __typeof(self)weakSelf = self;\n    runSynchronouslyOnVideoProcessingQueue(^{\n        [plaidImageSource removeTarget:weakSelf];\n        plaidImageSource = nil;\n        plaidImageSource = [[GPUImagePicture alloc] initWithImage:plaidImage];\n        [plaidImageSource addTarget:weakSelf atTextureLocation:1];\n        [plaidImageSource processImage];\n    });\n    \n\n}\n\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/GLFilters/DouYinEffect/GLImageGlitchEffectLineFilter.h",
    "content": "//\n//  GLImageGlitchEffectLineFilter.h\n//  WEOpenGLKit\n//\n//  Created by 刘海东 on 2018/9/5.\n//  Copyright © 2018年 LHD. All rights reserved.\n//\n\n#import <GPURenderKit/GPURenderKit.h>\n//故障效果---线条故障\n@interface GLImageGlitchEffectLineFilter : GPUImageFilter\n{\n    GLint intensityUniform;\n    \n}\n\n@property (nonatomic, assign) float intensity;\n\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/GLFilters/DouYinEffect/GLImageGlitchEffectLineFilter.m",
    "content": "//\n//  GLImageGlitchEffectLineFilter.m\n//  WEOpenGLKit\n//\n//  Created by 刘海东 on 2018/9/5.\n//  Copyright © 2018年 LHD. All rights reserved.\n//\n\n#import \"GLImageGlitchEffectLineFilter.h\"\n\n@implementation GLImageGlitchEffectLineFilter\n\n\nNSString *const kGLImageGlitchEffectLineFilterFragmentShaderString = SHADER_STRING\n(\n precision highp float;\n uniform sampler2D inputImageTexture;\n varying lowp vec2 textureCoordinate;\n uniform float intensity;\n //返回小数部分\n float fracFunc (float x)\n{\n    return x - floor(x);\n}\n //\n float nrand(float x, float y)\n{\n    return fracFunc(sin(dot(vec2(x, y), vec2(12.9898, 78.233))) * 43758.5453);\n}\n //两者之间插值\n float lerpFunc(float a, float b, float w)\n{\n    return a + w*(b-a);\n}\n \n void main()\n {\n     \n     vec2 uv = textureCoordinate;\n     //scanLineJitter\n     //intensity 【0.0 - 1.0】\n     float scanLineJitter = intensity *1.3;\n     float sl_thresh = clamp(1.0-scanLineJitter*1.2,0.0,1.0);\n     float sl_disp = 0.002 + pow(scanLineJitter, 3.0) * 0.05;\n     vec2 slj_uv = vec2(sl_disp,sl_thresh);\n     \n     //colorDrift 颜色偏移\n     //取值【0.0 - 1.0】\n     float amount = 0.032;\n     if(intensity<=0.001)\n     {\n         amount = 0.0;\n     }\n     float time = 1.0 *606.11;\n     vec2 colorDrift = vec2(amount,time);\n     \n     // Scan line jitter 抖动效果\n     float jitter = nrand(uv.y, scanLineJitter) * 2.0 - 1.0;\n     jitter *= step(slj_uv.y, abs(jitter)) * slj_uv.x;\n     \n     // Color drift\n     float drift = sin(colorDrift.y) * colorDrift.x;\n     \n     vec2 src1uv = vec2(fracFunc(uv.x + jitter ),uv.y);\n     vec4 src1 = texture2D(inputImageTexture, src1uv);\n     \n     vec2 src2uv = vec2(fracFunc(uv.x + jitter +drift),uv.y);\n     vec4 src2 = texture2D(inputImageTexture, src2uv);\n     \n     gl_FragColor = vec4(src1.r, src2.g, src1.b, 1.0);\n }\n );\n\n- (instancetype)init\n{\n    self = [super initWithFragmentShaderFromString:kGLImageGlitchEffectLineFilterFragmentShaderString];\n    if (self) {\n        \n        intensityUniform = [filterProgram uniformIndex:@\"intensity\"];\n        self.intensity = 0.0;\n    }\n    return self;\n}\n\n- (void)setIntensity:(float)intensity\n{\n    _intensity = intensity;\n    [self setFloat:intensity forUniform:intensityUniform program:filterProgram];\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/GLFilters/DouYinEffect/GLImageSoulOutFilter.h",
    "content": "//\n//  GLImageSoulOutFilter.h\n//  GPURenderKitDemo\n//\n//  Created by 刘海东 on 2019/4/16.\n//  Copyright © 2019 刘海东. All rights reserved.\n//\n\n#import <GPURenderKit/GPURenderKit.h>\n\n/** 抖音灵魂出窍demo */\n\nNS_ASSUME_NONNULL_BEGIN\n\n@interface GLImageSoulOutFilter : GPUImageFilter\n\n@end\n\nNS_ASSUME_NONNULL_END\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/GLFilters/DouYinEffect/GLImageSoulOutFilter.m",
    "content": "//\n//  GLImageSoulOutFilter.m\n//  GPURenderKitDemo\n//\n//  Created by 刘海东 on 2019/4/16.\n//  Copyright © 2019 刘海东. All rights reserved.\n//\n\n#import \"GLImageSoulOutFilter.h\"\n#define kMaxResetCount 20\n#define kMinResetCount 12\n\n\nNSString *const kGLImageSoulOutFragmentShaderString = SHADER_STRING\n(\n precision highp float;\n varying vec2 textureCoordinate;\n uniform float scale;//缩放比\n \n uniform sampler2D inputImageTexture;\n \n void main()\n {\n     \n     highp vec2 uv = textureCoordinate;\n     \n     vec4 originColor = texture2D(inputImageTexture, uv);\n     //uv坐标的中心点并非是（0.0，0.0），所以这里进行一次偏移，后面在偏移回来就可以了\n     vec2 center = vec2(0.5, 0.5);\n     uv -= center;\n     uv = uv / scale;\n     uv += center;\n     vec4 overlayColor = texture2D(inputImageTexture, uv);\n     \n     //线性混合\n     vec4 color = mix(originColor,overlayColor,fract(scale)*0.2);\n     gl_FragColor = color;\n     \n }\n );\n\n@interface GLImageSoulOutFilter ()\n\n@property (nonatomic, assign) NSInteger currentFrameCount;\n\n@property (nonatomic, assign) NSInteger resetCount;\n\n@end\n\n\n@implementation GLImageSoulOutFilter\n\n\n- (instancetype)init\n{\n    self = [super initWithFragmentShaderFromString:kGLImageSoulOutFragmentShaderString];\n    if (self) {\n        \n        self.currentFrameCount = 0;\n        self.resetCount = 0;\n        \n        \n    }\n    return self;\n}\n\n- (void)newFrameReadyAtTime:(CMTime)frameTime atIndex:(NSInteger)textureIndex{\n    \n    [super newFrameReadyAtTime:frameTime atIndex:textureIndex];\n    \n    self.currentFrameCount = self.currentFrameCount + 1;\n    \n    \n    if (self.currentFrameCount == kMaxResetCount) {\n        self.currentFrameCount =0;\n    }\n    \n    if (self.currentFrameCount>=kMinResetCount) {\n        self.resetCount = self.resetCount + 12;\n    }else{\n        self.resetCount = 0;\n    }\n    \n    //这里是做灵魂出窍的重点计算\n    NSInteger value = self.resetCount;\n    [self updateForegroundTexture:1.0+(value/100.0)];\n}\n\n\n- (void)updateForegroundTexture:(float)scale{\n    [self setFloat:scale forUniformName:@\"scale\"];\n}\n\n\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/GLFilters/DouYinEffect/GLImageThreePartitionFilter.h",
    "content": "//\n//  GLImageThreePartitionFilter.h\n//  WEOpenGLKit\n//\n//  Created by 刘海东 on 2019/2/19.\n//  Copyright © 2019 Leo. All rights reserved.\n//\n\n#import <GPURenderKit/GPURenderKit.h>\n\nNS_ASSUME_NONNULL_BEGIN\n\n@interface GLImageThreePartitionFilter : GPUImageFourInputFilter\n{\n    GLint directionTypeUniform;\n    GLint intensityUniform;    \n}\n\n//@property (nonatomic, assign) GLdirectionType directionType;\n\n/** directionType:0 竖直方向 1:水平方向 */\n@property (nonatomic, assign) int directionType;\n\n@property (nonatomic, assign) float intensity;\n\n\n\n\n@end\n\nNS_ASSUME_NONNULL_END\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/GLFilters/DouYinEffect/GLImageThreePartitionFilter.m",
    "content": "//\n//  GLImageThreePartitionFilter.m\n//  WEOpenGLKit\n//\n//  Created by 刘海东 on 2019/2/19.\n//  Copyright © 2019 Leo. All rights reserved.\n//\n\n#import \"GLImageThreePartitionFilter.h\"\n\nNSString *const kGLImageThreePartitionFragmentShaderString = SHADER_STRING\n(\n precision highp float;\n varying vec2 textureCoordinate;\n \n uniform sampler2D inputImageTexture;\n uniform float intensity;\n uniform int directionType;\n \n uniform sampler2D inputImageTexture2;\n uniform sampler2D inputImageTexture3;\n uniform sampler2D inputImageTexture4;\n \n \n \n vec4 mixLutColor (vec4 textureColor, sampler2D lutTexture) {\n     \n     highp float blueColor = textureColor.b * 63.0;\n     \n     highp vec2 quad1;\n     quad1.y = floor(floor(blueColor) / 8.0);\n     quad1.x = floor(blueColor) - (quad1.y * 8.0);\n     \n     highp vec2 quad2;\n     quad2.y = floor(ceil(blueColor) / 8.0);\n     quad2.x = ceil(blueColor) - (quad2.y * 8.0);\n     \n     highp vec2 texPos1;\n     texPos1.x = (quad1.x * 0.125) + 0.5/512.0 + ((0.125 - 1.0/512.0) * textureColor.r);\n     texPos1.y = (quad1.y * 0.125) + 0.5/512.0 + ((0.125 - 1.0/512.0) * textureColor.g);\n     \n     highp vec2 texPos2;\n     texPos2.x = (quad2.x * 0.125) + 0.5/512.0 + ((0.125 - 1.0/512.0) * textureColor.r);\n     texPos2.y = (quad2.y * 0.125) + 0.5/512.0 + ((0.125 - 1.0/512.0) * textureColor.g);\n     \n     lowp vec4 newColor1 = texture2D(lutTexture, texPos1);\n     lowp vec4 newColor2 = texture2D(lutTexture, texPos2);\n     \n     lowp vec4 newColor = mix(newColor1, newColor2, fract(blueColor));\n     \n     return vec4(newColor.rgb,intensity);\n     \n }\n \n \n void main()\n {\n     \n     highp vec2 uv = textureCoordinate;\n     \n     vec4 color;\n     \n     if (directionType == 0) {\n         if (uv.x >= 0.0 && uv.x <= 0.33) { // 上\n             \n             vec2 coordinate = vec2(uv.x+0.33, uv.y);\n             color = texture2D(inputImageTexture, coordinate);\n             color = mixLutColor(color,inputImageTexture2);\n             \n         } else if (uv.x > 0.33 && uv.x <= 0.67) {   // 中\n             \n             color = texture2D(inputImageTexture, uv);\n             color = mixLutColor(color,inputImageTexture3);\n             \n         } else {    // 下\n             \n             vec2 coordinate = vec2(uv.x-0.33, uv.y);\n             color = texture2D(inputImageTexture, coordinate);\n             color = mixLutColor(color,inputImageTexture4);\n         }\n         \n     } else {\n         \n         if (uv.y >= 0.0 && uv.y <= 0.33) { // 上\n             \n             vec2 coordinate = vec2(uv.x, uv.y + 0.33);\n             color = texture2D(inputImageTexture, coordinate);\n             color = mixLutColor(color,inputImageTexture2);\n             \n         } else if (uv.y > 0.33 && uv.y <= 0.67) {   // 中\n             \n             color = texture2D(inputImageTexture, uv);\n             color = mixLutColor(color,inputImageTexture3);\n             \n         } else {    // 下\n             \n             vec2 coordinate = vec2(uv.x, uv.y - 0.33);\n             color = texture2D(inputImageTexture, coordinate);\n             color = mixLutColor(color,inputImageTexture4);\n             \n         }\n     }\n     \n     gl_FragColor = color;\n }\n );\n\n\n@implementation GLImageThreePartitionFilter\n\n- (instancetype)init\n{\n    self = [super initWithFragmentShaderFromString:kGLImageThreePartitionFragmentShaderString];\n    if (self) {\n        \n        directionTypeUniform = [filterProgram uniformIndex:@\"directionType\"];\n        intensityUniform = [filterProgram uniformIndex:@\"intensity\"];\n        \n        self.intensity = 0.5;\n        [self setDirectionType:1];\n        \n        \n    }\n    return self;\n}\n\n- (void)setDirectionType:(int)directionType{\n    _directionType = directionType;\n    [self setInteger:directionType forUniform:directionTypeUniform program:filterProgram];\n\n}\n\n- (void)setIntensity:(float)intensity\n{\n    _intensity = intensity;\n    [self setFloat:intensity forUniform:intensityUniform program:filterProgram];\n}\n\n\n\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/GLFilters/DouYinEffect/GLImageThreePartitionGroupFilter.h",
    "content": "//\n//  GLImageThreePartitionGroupFilter.h\n//  WEOpenGLKit\n//\n//  Created by 刘海东 on 2019/2/19.\n//  Copyright © 2019 Leo. All rights reserved.\n//\n\n#import <GPURenderKit/GPURenderKit.h>\n\nNS_ASSUME_NONNULL_BEGIN\n\n@interface GLImageThreePartitionGroupFilter : GPUImageFilterGroup\n\n/** directionType:0 竖直方向 1:水平方向 */\n@property (nonatomic, assign) int directionType;\n\n@property (nonatomic, assign) float intensity;\n\n\n- (void)setTopLutImg:(UIImage *)topLutImg;\n- (void)setMidLutImg:(UIImage *)midLutImg;\n- (void)setBottomLutImg:(UIImage *)bottomLutImg;\n\n\n\n@end\n\nNS_ASSUME_NONNULL_END\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/GLFilters/DouYinEffect/GLImageThreePartitionGroupFilter.m",
    "content": "//\n//  GLImageThreePartitionGroupFilter.m\n//  WEOpenGLKit\n//\n//  Created by 刘海东 on 2019/2/19.\n//  Copyright © 2019 Leo. All rights reserved.\n//\n\n#import \"GLImageThreePartitionGroupFilter.h\"\n\n\n@implementation GLImageThreePartitionGroupFilter\n{\n    GLImageThreePartitionFilter *threePartitionFilter;\n    GPUImagePicture *topLutPic;\n    GPUImagePicture *midLutPic;\n    GPUImagePicture *bottomLutPic;\n\n}\n\n\n- (instancetype)init\n{\n    self = [super init];\n    if (self) {\n        \n        threePartitionFilter = [[GLImageThreePartitionFilter alloc]init];\n        [self addFilter:threePartitionFilter];\n        self.initialFilters = @[threePartitionFilter];\n        self.terminalFilter = threePartitionFilter;\n        threePartitionFilter.intensity = 0.0;\n        \n    }\n    return self;\n}\n\n- (void)setIntensity:(float)intensity\n{\n    _intensity = intensity;\n    threePartitionFilter.intensity = intensity;\n}\n\n- (void)setDirectionType:(int)directionType{\n    _directionType = directionType;\n    threePartitionFilter.directionType = directionType;\n}\n\n- (void)setTopLutImg:(UIImage *)topLutImg\n{\n    \n    GPUImagePicture *picture = [[GPUImagePicture alloc] initWithImage:topLutImg];\n    [picture addTarget:threePartitionFilter atTextureLocation:1];\n    [picture processImage];\n    [threePartitionFilter disableSecondFrameCheck];\n    \n    if (topLutPic)\n    {\n        [topLutPic removeTarget:threePartitionFilter];\n        topLutPic = nil;\n    }\n    \n    topLutPic = picture;\n\n}\n- (void)setMidLutImg:(UIImage *)midLutImg\n{\n\n    GPUImagePicture *picture = [[GPUImagePicture alloc] initWithImage:midLutImg];\n    [picture addTarget:threePartitionFilter atTextureLocation:2];\n    [picture processImage];\n    [threePartitionFilter disableThirdFrameCheck];\n    \n    if (midLutPic)\n    {\n        [midLutPic removeTarget:threePartitionFilter];\n        midLutPic = nil;\n    }\n    \n    midLutPic = picture;\n}\n- (void)setBottomLutImg:(UIImage *)bottomLutImg\n{\n    \n    GPUImagePicture *picture = [[GPUImagePicture alloc] initWithImage:bottomLutImg];\n    [picture addTarget:threePartitionFilter atTextureLocation:3];\n    [picture processImage];\n    [threePartitionFilter disableFourthFrameCheck];\n    \n    if (bottomLutPic)\n    {\n        [bottomLutPic removeTarget:threePartitionFilter];\n        bottomLutPic = nil;\n    }\n    \n    bottomLutPic = picture;\n    \n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/GLFilters/DouYinEffect/GLImageWaterReflectionFilter.h",
    "content": "//\n//  GLImageWaterReflectionFilter.h\n//  GPURenderKit\n//\n//  Created by 刘海东 on 2019/4/17.\n//  Copyright © 2019 刘海东. All rights reserved.\n//\n\n#import <GPURenderKit/GPURenderKit.h>\n\nNS_ASSUME_NONNULL_BEGIN\n\n@interface GLImageWaterReflectionFilter : GPUImageFilter\n{\n    GLint iResolutionUniform,timeUniform;\n}\n\n@end\n\nNS_ASSUME_NONNULL_END\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/GLFilters/DouYinEffect/GLImageWaterReflectionFilter.m",
    "content": "//\n//  GLImageWaterReflectionFilter.m\n//  GPURenderKit\n//\n//  Created by 刘海东 on 2019/4/17.\n//  Copyright © 2019 刘海东. All rights reserved.\n//\n\n#import \"GLImageWaterReflectionFilter.h\"\n\nNSString *const kGLImageWaterReflectionFragmentShaderString = SHADER_STRING\n(\n precision highp float;\n varying vec2 textureCoordinate;\n uniform sampler2D inputImageTexture;\n \n uniform float time;\n \n uniform vec2 resolution;\n \n \n#define F cos(x-y)*cos(y),sin(x+y)*sin(y)\n //波纹\n vec2 ripple(vec2 point)\n{\n    //d 水波纹的剧烈程度\n    float d= abs(sin(time))*0.98;\n    float x=10.*(point.x+d);\n    float y=3.*(point.y+d);\n    return vec2(F);\n}\n \n void main()\n {\n     \n     highp vec2 uv = textureCoordinate;\n     float aspectRatio = resolution.x/resolution.y;\n\n     float iResolutionX = resolution.x;\n\n     if (uv.y>0.5){\n         \n         uv = uv+2./iResolutionX*(ripple(uv)-ripple(uv+resolution.xy));\n         \n         uv.y = 1.0 - uv.y;\n         \n     }\n     \n     gl_FragColor = texture2D(inputImageTexture, uv);\n\n }\n );\n\n\n@interface GLImageWaterReflectionFilter ()\n\n@property (nonatomic, assign) float time;\n\n@end\n\n@implementation GLImageWaterReflectionFilter\n\n- (instancetype)init\n{\n    self = [super initWithFragmentShaderFromString:kGLImageWaterReflectionFragmentShaderString];\n    if (self) {\n        \n        iResolutionUniform = [filterProgram uniformIndex:@\"resolution\"];\n        timeUniform = [filterProgram uniformIndex:@\"time\"];\n        \n        \n    }\n    return self;\n}\n\n\n- (void)setupFilterForSize:(CGSize)filterFrameSize\n{\n    [self setSize:filterFrameSize forUniform:iResolutionUniform program:filterProgram];\n    \n    self.time  = self.time + 0.05;\n    [self setFloat:self.time forUniform:timeUniform program:filterProgram];\n}\n\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/GLFilters/DouYinEffect/GLImageZoomFilter.h",
    "content": "//\n//  GLImageZoomFilter.h\n//  GPURenderKit\n//\n//  Created by 刘海东 on 2019/4/17.\n//  Copyright © 2019 刘海东. All rights reserved.\n//\n\n#import <GPURenderKit/GPURenderKit.h>\n\nNS_ASSUME_NONNULL_BEGIN\n\n@interface GLImageZoomFilter : GPUImageFilter\n\n@end\n\nNS_ASSUME_NONNULL_END\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/GLFilters/DouYinEffect/GLImageZoomFilter.m",
    "content": "//\n//  GLImageZoomFilter.m\n//  GPURenderKitDemo\n//\n//  Created by 刘海东 on 2019/4/16.\n//  Copyright © 2019 刘海东. All rights reserved.\n//\n\n#import \"GLImageZoomFilter.h\"\n#define kMaxResetCount 20\n#define kMinResetCount 15\n\n\nNSString *const kGLImageZoomFragmentShaderString = SHADER_STRING\n(\n precision highp float;\n varying vec2 textureCoordinate;\n uniform float scale;//放大缩小的比例值\n \n uniform sampler2D inputImageTexture;\n \n void main()\n {\n     \n     highp vec2 uv = textureCoordinate;\n     \n     //uv坐标的中心点并非是（0.0，0.0），所以这里进行一次偏移，后面在偏移回来就可以了\n     vec2 center = vec2(0.5, 0.5);\n     uv -= center;\n     uv = uv / scale;\n     uv += center;\n     \n     gl_FragColor = texture2D(inputImageTexture, uv);\n     \n }\n );\n\n@interface GLImageZoomFilter ()\n\n@property (nonatomic, assign) NSInteger currentFrameCount;\n\n@property (nonatomic, assign) NSInteger resetCount;\n\n@end\n\n\n@implementation GLImageZoomFilter\n\n\n- (instancetype)init\n{\n    self = [super initWithFragmentShaderFromString:kGLImageZoomFragmentShaderString];\n    if (self) {\n        \n        self.currentFrameCount = 0;\n        self.resetCount = 0;\n        \n        \n    }\n    return self;\n}\n\n- (void)newFrameReadyAtTime:(CMTime)frameTime atIndex:(NSInteger)textureIndex{\n    \n    [super newFrameReadyAtTime:frameTime atIndex:textureIndex];\n    \n    self.currentFrameCount = self.currentFrameCount + 1;\n    \n    \n    if (self.currentFrameCount == kMaxResetCount) {\n        self.currentFrameCount =0;\n    }\n    \n    if (self.currentFrameCount>=kMinResetCount) {\n        self.resetCount = self.resetCount + 12;\n    }else{\n        self.resetCount = self.resetCount - 12;\n        \n        //不让缩到最小\n        if (self.resetCount<0) {\n            self.resetCount = 0;\n        }\n    }\n\n    //这里做放大的计算\n    NSInteger value = self.resetCount;\n    [self updateForegroundTexture:1.0+(value/100.0)];\n}\n\n\n- (void)updateForegroundTexture:(float)scale{\n    [self setFloat:scale forUniformName:@\"scale\"];\n}\n\n\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/GLFilters/FaceFilters/GLImageFaceChangeFilter.h",
    "content": "//\n//  GLImageFaceChangeFilter.h\n//  GPURenderKitDemo\n//\n//  Created by 刘海东 on 2019/4/16.\n//  Copyright © 2019 刘海东. All rights reserved.\n//\n\n\n\n#import <GPURenderKit/GPURenderKit.h>\n#import \"GPUImageFilter.h\"\nNS_ASSUME_NONNULL_BEGIN\n\n@interface GLImageFaceChangeFilter : GPUImageFilter\n{\n    GLint faceArrayUniform,iResolutionUniform,haveFaceUniform;\n}\n\n/** 是否检测到人脸 */\n@property (nonatomic, assign) BOOL isHaveFace;\n/** 瘦脸调节【-1.0 - 1.0】*/\n@property (nonatomic, assign) float thinFaceParam;\n/** 眼睛调节【-1.0 - 1.0】*/\n@property (nonatomic, assign) float eyeParam;\n/** 鼻子调节【-1.0 - 1.0】*/\n@property (nonatomic, assign) float noseParam;\n\n\n- (void)setFacePointsArray:(NSArray *)pointArrays;\n\n- (void)setCaptureDevicePosition:(AVCaptureDevicePosition)captureDevicePosition;\n\n\n@end\n\nNS_ASSUME_NONNULL_END\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/GLFilters/FaceFilters/GLImageFaceChangeFilter.m",
    "content": "//\n//  GLImageFaceChangeFilter.m\n//  GPURenderKitDemo\n//\n//  Created by 刘海东 on 2019/4/16.\n//  Copyright © 2019 刘海东. All rights reserved.\n//\n\n\n/**\n\n 这里面用到的算法参考文章地址 \"http://www.shenyanhao.com/2015/09/眼睛放大美颜算法/\"\n \n */\n\n#import \"GLImageFaceChangeFilter.h\"\n\n#define FACE_POINTS_COUNT 106\n\n\nNSString *const kGLImageFaceChangeFragmentShaderString = SHADER_STRING\n(\n precision mediump float;\n \n varying highp vec2 textureCoordinate;\n uniform sampler2D inputImageTexture;\n \n /** 瘦脸i调节 */\n uniform float thin_face_param;\n /** 大眼调节 */\n uniform float eye_param;\n /** 鼻子调节 */\n uniform float nose_param;\n\n uniform vec2 resolution;\n uniform int haveFaceBool;\n \n uniform mediump vec2 locArray[106];\n \n \n highp vec2 warpPositionToUse1(vec2 currentPoint, vec2 contourPointA,  vec2 contourPointB, float radius, float delta, float aspectRatio)\n{\n    highp vec2 positionToUse = currentPoint;\n    \n    vec2 currentPointToUse = vec2(currentPoint.x, currentPoint.y * aspectRatio + 0.5 - 0.5 * aspectRatio);\n    vec2 contourPointAToUse = vec2(contourPointA.x, contourPointA.y * aspectRatio + 0.5 - 0.5 * aspectRatio);\n    highp float r = distance(currentPointToUse, contourPointAToUse);\n    \n    if(r < radius)\n    {\n        vec2 dir = normalize(contourPointB - contourPointA);\n        float dist = radius * radius - r * r;\n        float alpha = dist / (dist + (r-delta) * (r-delta));\n        alpha = alpha * alpha;\n        \n        positionToUse = positionToUse - alpha * delta * dir;\n    }\n    \n    return positionToUse;\n}\n \n \n //脸部调节\n vec2 adjust_thinFace(vec2 coord, float eye_dist, vec2 dir_up, vec2 dir_right, float aspect_ratio, float intensity)\n{\n    vec2 positionToUse = coord;\n    int arraySize = 3;\n    vec2 leftContourPoints[3];\n    vec2 rightContourPoints[3];\n    \n    float deltaArray[3];\n    \n    leftContourPoints[0] = locArray[4] - dir_right * eye_dist*0.13;\n    leftContourPoints[1] = locArray[9] - dir_right * eye_dist*0.33;\n    leftContourPoints[2] = locArray[13]- dir_right * eye_dist*0.33;\n    \n    \n    rightContourPoints[0] = locArray[28] + dir_right * eye_dist*0.13;\n    rightContourPoints[1] = locArray[23] + dir_right * eye_dist*0.33;\n    rightContourPoints[2] = locArray[19] + dir_right * eye_dist*0.33;\n    \n    float x = 3.14159 / 30.0;\n    float scaleFactor = eye_dist * 2.0;\n    float radius = 0.4 * scaleFactor;\n    \n    \n    deltaArray[0] = sin(x) * intensity * 0.150 * scaleFactor;\n    deltaArray[1] = sin(x*2.0) * intensity * 0.150 * scaleFactor;\n    deltaArray[2] = sin(x*2.0) * intensity * 0.150 * scaleFactor;\n    \n    \n    for(int i = 0; i < arraySize; i++)\n    {\n        positionToUse = warpPositionToUse1(positionToUse, leftContourPoints[i], rightContourPoints[i], radius, deltaArray[i], aspect_ratio);\n        positionToUse = warpPositionToUse1(positionToUse, rightContourPoints[i], leftContourPoints[i], radius, deltaArray[i], aspect_ratio);\n    }\n    \n    return positionToUse;\n}\n \n //大眼\n vec2 adjust_eye(vec2 coord, float eye_dist, vec2 dir_up, vec2 dir_right, float aspect_ratio, float intensity)\n{\n    float eyeEnlarge = intensity * 0.24;\n    \n    float res_ratio = resolution.x/resolution.y;\n    \n    vec2 newCoord = vec2(coord.x*res_ratio,coord.y);\n    \n    vec2 eyea = vec2(locArray[74].x * res_ratio, locArray[74].y);\n    vec2 eyeb = vec2(locArray[77].x * res_ratio, locArray[77].y);\n    \n    vec2 eye_far = vec2(locArray[52].x * res_ratio, locArray[52].y);\n    vec2 eye_near = vec2(locArray[55].x * res_ratio, locArray[55].y);\n    \n    float weight = 0.0;\n    float eye_width = distance(eye_far, eye_near);\n    \n    // left eye\n    float eyeRadius = eye_width;\n    float dis_eye1 = distance(newCoord, eyea);\n    if (dis_eye1 < 0.01) {\n        \n        weight = pow((dis_eye1+0.01) / eyeRadius, eyeEnlarge);\n        newCoord = eyea + (newCoord - eyea)*weight;\n        \n    } else if (dis_eye1 <= eyeRadius) {\n        weight = pow(dis_eye1 / eyeRadius, eyeEnlarge);\n        newCoord = eyea + (newCoord - eyea)*weight;\n    }\n    \n    // right eye\n    float dis_eye2 = distance(newCoord, eyeb);\n    if (dis_eye2 < 0.01) {\n        \n        weight = pow((dis_eye2+0.01) / eyeRadius, eyeEnlarge);\n        newCoord = eyeb + (newCoord - eyeb)*weight;\n        \n    } else if (dis_eye2 <= eyeRadius) {\n        weight = pow(dis_eye2 / eyeRadius, eyeEnlarge);\n        newCoord = eyeb + (newCoord - eyeb)*weight;\n    }\n    \n    newCoord = vec2(newCoord.x/res_ratio, newCoord.y);\n    return newCoord;\n}\n \n vec2 newNarrowNose_2(vec2 coord, float eye_dist, vec2 dir_up, vec2 dir_right, float aspect_ratio, float intensity)\n{\n    vec2 positionToUse = coord;\n    float scaleFactor = eye_dist *0.28;\n    float noseMorph = intensity * scaleFactor;\n    \n    int arraySize = 2;\n    float radius = 0.16;\n    float delta = noseMorph *scaleFactor;\n    \n    vec2 left_loca = locArray[48] +dir_up*0.09;\n    vec2 right_loca = locArray[50] +dir_up*0.09;\n    \n    \n    vec2 leftContourPoints[2];\n    leftContourPoints[0] = left_loca;\n    leftContourPoints[1] = locArray[82];\n    \n    vec2 rightContourPoints[2] ;\n    rightContourPoints[0] = right_loca;\n    rightContourPoints[1] = locArray[83];\n    \n    for(int i = 0; i < arraySize; i++)\n    {\n        positionToUse = warpPositionToUse1(positionToUse, leftContourPoints[i], rightContourPoints[i], radius, delta, aspect_ratio);\n        \n        positionToUse = warpPositionToUse1(positionToUse, rightContourPoints[i], leftContourPoints[i], radius, delta, aspect_ratio);\n    }\n    \n    return positionToUse;\n}\n\n \n void main()\n {\n     \n     vec2 newCoord = textureCoordinate;\n     \n     // 眼距\n     highp float eye_dist = distance(locArray[74], locArray[77]);\n     // 屏幕高宽比\n     highp float aspect_ratio = resolution.y / resolution.x;\n     \n     // 面部方向\n     vec2 dir_up     = normalize(locArray[43] - locArray[16]);\n     vec2 dir_right  = normalize(locArray[77] - locArray[74]);\n     \n     if (haveFaceBool == 1)\n     {\n         //瘦脸调节\n         newCoord = adjust_thinFace(newCoord, eye_dist, dir_up, dir_right, aspect_ratio, thin_face_param);\n         //眼部调节\n         newCoord = adjust_eye(newCoord, eye_dist, dir_up, dir_right, aspect_ratio, eye_param);\n         //鼻子调节\n         newCoord = newNarrowNose_2(newCoord, eye_dist, dir_up, dir_right, aspect_ratio, nose_param);\n         \n     }\n     \n     vec3 newColor = texture2D(inputImageTexture, newCoord).rgb;\n     gl_FragColor = vec4(newColor, 1.0);\n }\n );\n\n@interface GLImageFaceChangeFilter ()\n@property (nonatomic, assign) CGSize frameBufferSize;\n/** 是否是前置摄像头 */\n@property (nonatomic, assign) BOOL isFront;\n@end\n\n\n@implementation GLImageFaceChangeFilter\n\n- (instancetype)init\n{\n    self = [super initWithFragmentShaderFromString:kGLImageFaceChangeFragmentShaderString];\n    if (self) {\n        \n        faceArrayUniform = [filterProgram uniformIndex:@\"locArray\"];\n        iResolutionUniform = [filterProgram uniformIndex:@\"resolution\"];\n        haveFaceUniform = [filterProgram uniformIndex:@\"haveFaceBool\"];\n    }\n    return self;\n}\n\n- (void)setIsHaveFace:(BOOL)isHaveFace{\n    _isHaveFace = isHaveFace;\n    int value = isHaveFace == YES ? 1:0;\n    [self setInteger:value forUniform:haveFaceUniform program:filterProgram];\n}\n\n\n- (void)setThinFaceParam:(float)thinFaceParam\n{\n    _thinFaceParam = thinFaceParam;\n    [self setFloat:thinFaceParam forUniformName:@\"thin_face_param\"];\n}\n\n- (void)setEyeParam:(float)eyeParam{\n    _eyeParam = eyeParam;\n    [self setFloat:eyeParam forUniformName:@\"eye_param\"];\n}\n\n- (void)setNoseParam:(float)noseParam{\n    _noseParam = noseParam;\n    [self setFloat:noseParam forUniformName:@\"nose_param\"];\n}\n\n- (void)setFacePointsArray:(NSArray *)pointArrays{\n    \n    if (pointArrays.count==0) {\n        return;\n    }\n    \n    static GLfloat facePoints[FACE_POINTS_COUNT * 2] = {0};\n    \n    float width = _frameBufferSize.width;\n    float height = _frameBufferSize.height;\n    \n    for (int index = 0; index < FACE_POINTS_COUNT; index++)\n    {\n        CGPoint point = [pointArrays[index] CGPointValue];\n        \n        if (self.isFront) {\n            facePoints[2 * index + 0] = (point.y / width);\n        }else{\n            facePoints[2 * index + 0] = 1.0 - (point.y / width);\n        }\n        \n        facePoints[2 * index + 1] = (point.x / height);\n    }\n    \n    [self setFloatVec2Array:facePoints length:FACE_POINTS_COUNT*2 forUniform:faceArrayUniform program:filterProgram];\n}\n\n\n- (void)setupFilterForSize:(CGSize)filterFrameSize\n{\n    _frameBufferSize = filterFrameSize;\n    [self setSize:filterFrameSize forUniform:iResolutionUniform program:filterProgram];\n}\n\n- (void)setCaptureDevicePosition:(AVCaptureDevicePosition)captureDevicePosition{\n    \n    if (captureDevicePosition == AVCaptureDevicePositionBack) {\n        self.isFront = NO;\n    }else{\n        self.isFront = YES;\n    }\n}\n\n\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/GLFilters/FaceFilters/GLImageFaceChangeFilterGroup.h",
    "content": "//\n//  GLImageFaceChangeFilterGroup.h\n//  GPURenderKit\n//\n//  Created by 刘海东 on 2019/4/26.\n//  Copyright © 2019 刘海东. All rights reserved.\n//\n\n#import <GPURenderKit/GPURenderKit.h>\n\nNS_ASSUME_NONNULL_BEGIN\n\n@interface GLImageFaceChangeFilterGroup : GPUImageFilterGroup\n\n\n/** 是否检测到人脸 */\n@property (nonatomic, assign) BOOL isHaveFace;\n/** 瘦脸调节【-1.0 - 1.0】 */\n@property (nonatomic, assign) float thinFaceParam;\n/** 眼睛调节【-1.0 - 1.0】*/\n@property (nonatomic, assign) float eyeParam;\n/** 鼻子调节【-1.0 - 1.0】*/\n@property (nonatomic, assign) float noseParam;\n\n/** 人脸检测点显示 默认开启*/\n@property (nonatomic, assign) BOOL isShowFaceDetectPointBool;\n\n- (void)setFacePointsArray:(NSArray *)pointArrays;\n\n- (void)setCaptureDevicePosition:(AVCaptureDevicePosition)captureDevicePosition;\n\n\n@end\n\nNS_ASSUME_NONNULL_END\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/GLFilters/FaceFilters/GLImageFaceChangeFilterGroup.m",
    "content": "//\n//  GLImageFaceChangeFilterGroup.m\n//  GPURenderKit\n//\n//  Created by 刘海东 on 2019/4/26.\n//  Copyright © 2019 刘海东. All rights reserved.\n//\n\n#import \"GLImageFaceChangeFilterGroup.h\"\n\n@interface GLImageFaceChangeFilterGroup ()\n\n@property (nonatomic, strong) GLImageFaceChangeFilter *faceChangeFilter;\n@property (nonatomic, strong) GLImageFaceDetectPointFilter *faceDetectPointFilter;\n@property (nonatomic, strong) GPUImageScreenBlendFilter *blendFilter;\n\n@end\n\n\n@implementation GLImageFaceChangeFilterGroup\n\n- (instancetype)init\n{\n    self = [super init];\n    if (self) {\n        \n        self.faceChangeFilter = [[GLImageFaceChangeFilter alloc]init];\n        [self addFilter:self.faceChangeFilter];\n\n        self.faceDetectPointFilter = [[GLImageFaceDetectPointFilter alloc]init];\n        self.faceDetectPointFilter.isShowFaceDetectPointBool = YES;\n        [self addFilter:self.faceDetectPointFilter];\n        \n        // 混合模式\n        self.blendFilter = [[GPUImageScreenBlendFilter alloc] init];\n        [self addFilter:self.blendFilter];\n        \n        [self.faceChangeFilter addTarget:self.blendFilter atTextureLocation:0];\n        [self.faceDetectPointFilter addTarget:self.blendFilter atTextureLocation:1];\n\n        self.initialFilters = [NSArray arrayWithObjects:self.faceChangeFilter, self.faceDetectPointFilter, self.blendFilter,nil];\n        self.terminalFilter = self.blendFilter;\n\n        \n    }\n    return self;\n}\n\n- (void)setFacePointsArray:(NSArray *)pointArrays{\n    \n    [self.faceChangeFilter setFacePointsArray:pointArrays];\n    [self.faceDetectPointFilter setFacePointsArray:pointArrays];\n\n}\n\n- (void)setCaptureDevicePosition:(AVCaptureDevicePosition)captureDevicePosition{\n    \n    [self.faceChangeFilter setCaptureDevicePosition:captureDevicePosition];\n    [self.faceDetectPointFilter setCaptureDevicePosition:captureDevicePosition];\n\n}\n\n\n#pragma mark ------------------------------------------------------ publicFunc ------------------------------------------------------\n- (void)setIsHaveFace:(BOOL)isHaveFace{\n    self.faceChangeFilter.isHaveFace = isHaveFace;\n}\n\n- (void)setThinFaceParam:(float)thinFaceParam{\n    self.faceChangeFilter.thinFaceParam = thinFaceParam;\n}\n\n- (void)setEyeParam:(float)eyeParam{\n    self.faceChangeFilter.eyeParam = eyeParam;\n}\n\n- (void)setNoseParam:(float)noseParam{\n    self.faceChangeFilter.noseParam = noseParam;\n}\n\n- (void)setIsShowFaceDetectPointBool:(BOOL)isShowFaceDetectPointBool{\n    self.faceDetectPointFilter.isShowFaceDetectPointBool = isShowFaceDetectPointBool;\n}\n\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/GLFilters/FaceFilters/GLImageFaceDetectPointFilter.h",
    "content": "//\n//  GLImageFaceDetectPointFilter.h\n//  GPURenderKit\n//\n//  Created by 刘海东 on 2019/4/25.\n//  Copyright © 2019 刘海东. All rights reserved.\n//\n\n\n\n#import <GPURenderKit/GPURenderKit.h>\n\nNS_ASSUME_NONNULL_BEGIN\n\n/** 人脸检检测特征点显示 */\n@interface GLImageFaceDetectPointFilter : GPUImageFilter\n/** 人脸检测点显示 默认开启*/\n@property (nonatomic, assign) BOOL isShowFaceDetectPointBool;\n\n- (void)setFacePointsArray:(NSArray *)pointArrays;\n\n- (void)setCaptureDevicePosition:(AVCaptureDevicePosition)captureDevicePosition;\n\n\n@end\n\nNS_ASSUME_NONNULL_END\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/GLFilters/FaceFilters/GLImageFaceDetectPointFilter.m",
    "content": "//\n//  GLImageFaceDetectPointFilter.m\n//  GPURenderKit\n//\n//  Created by 刘海东 on 2019/4/25.\n//  Copyright © 2019 刘海东. All rights reserved.\n//\n\n#import \"GLImageFaceDetectPointFilter.h\"\n\nNSString *const kGLImageFaceDetectPointFragmentShaderString = SHADER_STRING\n(\n precision highp float;\n varying vec2 textureCoordinate;\n \n uniform sampler2D inputImageTexture;\n \n void main()\n {\n     \n     highp vec2 uv = textureCoordinate;     \n     gl_FragColor = vec4(0.2, 0.709803922, 0.898039216, 1.0);\n }\n );\n\nNSString *const kGLImageFaceDetectPointVertexShaderString = SHADER_STRING\n(\n attribute vec4 position;\n attribute vec4 inputTextureCoordinate;\n varying vec2 textureCoordinate;\n \n void main()\n {\n     gl_Position = position;\n     gl_PointSize = 8.0;\n     textureCoordinate = inputTextureCoordinate.xy;\n }\n );\n\n@interface GLImageFaceDetectPointFilter ()\n\n@property (nonatomic, strong) NSArray *pointArrays;\n\n@property (nonatomic, assign) GLfloat videoFrameW;\n@property (nonatomic, assign) GLfloat videoFrameH;\n/** 是否是前置摄像头 */\n@property (nonatomic, assign) BOOL isFront;\n\n@end\n\n\n@implementation GLImageFaceDetectPointFilter\n\n- (instancetype)init\n{\n    self = [super initWithVertexShaderFromString:kGLImageFaceDetectPointVertexShaderString fragmentShaderFromString:kGLImageFaceDetectPointFragmentShaderString];\n    if (self) {\n        self.isShowFaceDetectPointBool = YES;\n    }\n    return self;\n}\n\n- (void)setFacePointsArray:(NSArray *)pointArrays{\n    _pointArrays = pointArrays;    \n}\n\n- (void)setIsShowFaceDetectPointBool:(BOOL)isShowFaceDetectPointBool{\n    _isShowFaceDetectPointBool = isShowFaceDetectPointBool;\n}\n\n\n- (void)setupFilterForSize:(CGSize)filterFrameSize\n{\n    self.videoFrameW = filterFrameSize.width;\n    self.videoFrameH = filterFrameSize.height;\n}\n\n- (void)renderToTextureWithVertices:(const GLfloat *)vertices textureCoordinates:(const GLfloat *)textureCoordinates{\n    \n    \n}\n\n- (void)newFrameReadyAtTime:(CMTime)frameTime atIndex:(NSInteger)textureIndex\n{\n    //是否开启人脸监测点\n    if (!_isShowFaceDetectPointBool) {\n        [super newFrameReadyAtTime:frameTime atIndex:textureIndex];\n        return;\n    }\n    \n    if (self.pointArrays.count==0) {\n        [super newFrameReadyAtTime:frameTime atIndex:textureIndex];\n        return;\n    }\n\n    if (self.preventRendering)\n    {\n        [firstInputFramebuffer unlock];\n        return;\n    }\n    \n    [GPUImageContext setActiveShaderProgram:filterProgram];\n    outputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:[self sizeOfFBO] textureOptions:self.outputTextureOptions onlyTexture:NO];\n    [outputFramebuffer activateFramebuffer];\n    \n    [self setUniformsForProgramAtIndex:0];\n    \n    if (usingNextFrameForImageCapture)\n    {\n        [outputFramebuffer lock];\n    }\n    \n    glClearColor(backgroundColorRed, backgroundColorGreen, backgroundColorBlue, backgroundColorAlpha);\n    glClear(GL_COLOR_BUFFER_BIT);\n    \n    glActiveTexture(GL_TEXTURE2);\n    glBindTexture(GL_TEXTURE_2D, [firstInputFramebuffer texture]);\n    glUniform1i(filterInputTextureUniform, 2);\n    \n    const GLsizei pointCount = (GLsizei)self.pointArrays.count;\n    GLfloat tempPoint[pointCount * 2];\n    \n    for (int i = 0; i < self.pointArrays.count; i ++) {\n        CGPoint pointer = [self.pointArrays[i] CGPointValue];\n        \n        if (self.isFront) {\n            tempPoint[i*2+0]=  (pointer.y/_videoFrameW) *2.0 - 1.0;\n            tempPoint[i*2+1]=  (pointer.x/_videoFrameH) *2.0 - 1.0;\n        }else{\n            tempPoint[i*2+0]=  1.0 - ((pointer.y/_videoFrameW) * 2.0);\n            tempPoint[i*2+1]=  (pointer.x/_videoFrameH) *2.0 - 1.0;\n        }\n    }\n    \n    glVertexAttribPointer(self->filterPositionAttribute, 2, GL_FLOAT, GL_FALSE, 0, tempPoint);\n    glDrawArrays(GL_POINTS, 0, (GLsizei)self.pointArrays.count);\n\n    [self informTargetsAboutNewFrameAtTime:frameTime];\n    [firstInputFramebuffer unlock];\n    if (usingNextFrameForImageCapture)\n    {\n        dispatch_semaphore_signal(imageCaptureSemaphore);\n    }\n\n}\n\n- (void)setCaptureDevicePosition:(AVCaptureDevicePosition)captureDevicePosition{\n    \n    if (captureDevicePosition == AVCaptureDevicePositionBack) {\n        self.isFront = NO;\n    }else{\n        self.isFront = YES;\n    }\n}\n\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/GLFilters/FaceFilters/GPUImageBeautifyFilter.h",
    "content": "//\n//  GPUImageBeautifyFilter.h\n//  BeautifyFaceDemo\n//\n//  Created by guikz on 16/4/28.\n//  Copyright © 2016年 guikz. All rights reserved.\n//\n\n#import \"GPUImageFilterGroup.h\"\n#import \"GPUImageBilateralFilter.h\"\n#import \"GPUImageFilterGroup.h\"\n#import \"GPUImageCannyEdgeDetectionFilter.h\"\n#import \"GPUImageHSBFilter.h\"\n#import \"GPUImageThreeInputFilter.h\"\n\n@class GPUImageCombinationFilter;\n\n@interface GPUImageBeautifyFilter : GPUImageFilterGroup {\n    GPUImageBilateralFilter *bilateralFilter;\n    GPUImageCannyEdgeDetectionFilter *cannyEdgeFilter;\n    GPUImageCombinationFilter *combinationFilter;\n    GPUImageHSBFilter *hsbFilter;\n}\n\n\n@property (nonatomic, assign) CGFloat intensity;\n\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/GLFilters/FaceFilters/GPUImageBeautifyFilter.m",
    "content": "//\n//  GPUImageBeautifyFilter.m\n//  BeautifyFaceDemo\n//\n//  Created by guikz on 16/4/28.\n//  Copyright © 2016年 guikz. All rights reserved.\n//\n\n#import \"GPUImageBeautifyFilter.h\"\n\n// Internal CombinationFilter(It should not be used outside)\n@interface GPUImageCombinationFilter : GPUImageThreeInputFilter\n{\n    GLint smoothDegreeUniform;\n}\n\n@property (nonatomic, assign) CGFloat intensity;\n\n@end\n\nNSString *const kGPUImageBeautifyFragmentShaderString = SHADER_STRING\n(\n varying highp vec2 textureCoordinate;\n varying highp vec2 textureCoordinate2;\n varying highp vec2 textureCoordinate3;\n \n uniform sampler2D inputImageTexture;\n uniform sampler2D inputImageTexture2;\n uniform sampler2D inputImageTexture3;\n uniform mediump float smoothDegree;\n \n void main()\n {\n     highp vec4 bilateral = texture2D(inputImageTexture, textureCoordinate);\n     highp vec4 canny = texture2D(inputImageTexture2, textureCoordinate2);\n     highp vec4 origin = texture2D(inputImageTexture3,textureCoordinate3);\n     highp vec4 smooth;\n     lowp float r = origin.r;\n     lowp float g = origin.g;\n     lowp float b = origin.b;\n     if (canny.r < 0.2 && r > 0.3725 && g > 0.1568 && b > 0.0784 && r > b && (max(max(r, g), b) - min(min(r, g), b)) > 0.0588 && abs(r-g) > 0.0588) {\n         smooth = (1.0 - smoothDegree) * (origin - bilateral) + bilateral;\n     }\n     else {\n         smooth = origin;\n     }\n     smooth.r = log(1.0 + 0.2 * smooth.r)/log(1.2);\n     smooth.g = log(1.0 + 0.2 * smooth.g)/log(1.2);\n     smooth.b = log(1.0 + 0.2 * smooth.b)/log(1.2);\n     gl_FragColor = smooth;\n }\n );\n\n@implementation GPUImageCombinationFilter\n\n- (id)init {\n    if (self = [super initWithFragmentShaderFromString:kGPUImageBeautifyFragmentShaderString]) {\n        smoothDegreeUniform = [filterProgram uniformIndex:@\"smoothDegree\"];\n    }\n    self.intensity = 0.0;\n    return self;\n}\n\n- (void)setIntensity:(CGFloat)intensity {\n    _intensity = intensity;\n    [self setFloat:intensity forUniform:smoothDegreeUniform program:filterProgram];\n}\n\n@end\n\n@implementation GPUImageBeautifyFilter\n\n- (id)init;\n{\n    if (!(self = [super init]))\n    {\n        return nil;\n    }\n    \n    // First pass: face smoothing filter\n    bilateralFilter = [[GPUImageBilateralFilter alloc] init];\n    bilateralFilter.distanceNormalizationFactor = 4.0;\n    [self addFilter:bilateralFilter];\n    \n    // Second pass: edge detection\n    cannyEdgeFilter = [[GPUImageCannyEdgeDetectionFilter alloc] init];\n    [self addFilter:cannyEdgeFilter];\n    \n    // Third pass: combination bilateral, edge detection and origin\n    combinationFilter = [[GPUImageCombinationFilter alloc] init];\n    [self addFilter:combinationFilter];\n    \n    // Adjust HSB\n    hsbFilter = [[GPUImageHSBFilter alloc] init];\n    [hsbFilter adjustBrightness:1.1];\n    [hsbFilter adjustSaturation:1.1];\n    \n    [bilateralFilter addTarget:combinationFilter];\n    [cannyEdgeFilter addTarget:combinationFilter];\n    \n    [combinationFilter addTarget:hsbFilter];\n    \n    self.initialFilters = [NSArray arrayWithObjects:bilateralFilter,cannyEdgeFilter,combinationFilter,nil];\n    self.terminalFilter = hsbFilter;\n    \n    return self;\n}\n\n- (void)setIntensity:(CGFloat)intensity\n{\n    [combinationFilter setIntensity:intensity];\n}\n\n#pragma mark -\n#pragma mark GPUImageInput protocol\n\n- (void)newFrameReadyAtTime:(CMTime)frameTime atIndex:(NSInteger)textureIndex;\n{\n    for (GPUImageOutput<GPUImageInput> *currentFilter in self.initialFilters)\n    {\n        if (currentFilter != self.inputFilterToIgnoreForUpdates)\n        {\n            if (currentFilter == combinationFilter) {\n                textureIndex = 2;\n            }\n            [currentFilter newFrameReadyAtTime:frameTime atIndex:textureIndex];\n        }\n    }\n}\n\n- (void)setInputFramebuffer:(GPUImageFramebuffer *)newInputFramebuffer atIndex:(NSInteger)textureIndex;\n{\n    for (GPUImageOutput<GPUImageInput> *currentFilter in self.initialFilters)\n    {\n        if (currentFilter == combinationFilter) {\n            textureIndex = 2;\n        }\n        [currentFilter setInputFramebuffer:newInputFramebuffer atIndex:textureIndex];\n    }\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/GLFilters/GLImageMovie/GLImageMovie.h",
    "content": "//\n//  GLImageMovie.h\n//  WEOpenGLDemo\n//\n//  Created by 刘海东 on 2019/1/4.\n//  Copyright © 2019 Leo. All rights reserved.\n//\n\n#import \"GPUImage.h\"\nNS_ASSUME_NONNULL_BEGIN\n\n@interface GLImageMovie : GPUImageOutput\n\n@property (nonatomic, assign) BOOL runBenchmark;\n\n- (void)processMovieFrameSampleBuffer:(CMSampleBufferRef)sampleBufferRef;\n\n- (void)endProcessing;\n\n\n@end\n\nNS_ASSUME_NONNULL_END\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/GLFilters/GLImageMovie/GLImageMovie.m",
    "content": "//\n//  GLImageMovie.m\n//  WEOpenGLDemo\n//\n//  Created by 刘海东 on 2019/1/4.\n//  Copyright © 2019 Leo. All rights reserved.\n//\n\n#import \"GLImageMovie.h\"\n\n@interface GLImageMovie ()\n{\n    CMTime previousFrameTime, processingFrameTime;\n    BOOL isFullYUVRange;\n    \n    int imageBufferWidth, imageBufferHeight;\n    const GLfloat *_preferredConversion;\n    \n    GLuint luminanceTexture, chrominanceTexture;\n    \n    GLProgram *yuvConversionProgram;\n    GLint yuvConversionPositionAttribute, yuvConversionTextureCoordinateAttribute;\n    GLint yuvConversionLuminanceTextureUniform, yuvConversionChrominanceTextureUniform;\n    GLint yuvConversionMatrixUniform;\n}\n@end\n\n\n@implementation GLImageMovie\n\n- (instancetype)init\n{\n    self = [super init];\n    if (self) {\n        self.runBenchmark = YES;\n        [self yuvConversionSetup];\n    }\n    return self;\n}\n\n- (void)yuvConversionSetup;\n{\n    \n    \n    if ([GPUImageContext supportsFastTextureUpload])\n    {\n        runSynchronouslyOnVideoProcessingQueue(^{\n            [GPUImageContext useImageProcessingContext];\n            \n            self->_preferredConversion = kColorConversion709;\n            self->isFullYUVRange       = YES;\n            self->yuvConversionProgram = [[GPUImageContext sharedImageProcessingContext] programForVertexShaderString:kGPUImageVertexShaderString fragmentShaderString:kGPUImageYUVFullRangeConversionForLAFragmentShaderString];\n            \n            if (!self->yuvConversionProgram.initialized)\n            {\n                [self->yuvConversionProgram addAttribute:@\"position\"];\n                [self->yuvConversionProgram addAttribute:@\"inputTextureCoordinate\"];\n                \n                if (![self->yuvConversionProgram link])\n                {\n                    NSString *progLog = [self->yuvConversionProgram programLog];\n                    NSLog(@\"Program link log: %@\", progLog);\n                    NSString *fragLog = [self->yuvConversionProgram fragmentShaderLog];\n                    NSLog(@\"Fragment shader compile log: %@\", fragLog);\n                    NSString *vertLog = [self->yuvConversionProgram vertexShaderLog];\n                    NSLog(@\"Vertex shader compile log: %@\", vertLog);\n                    self->yuvConversionProgram = nil;\n                    NSAssert(NO, @\"Filter shader link failed\");\n                }\n            }\n            \n            self->yuvConversionPositionAttribute = [self->yuvConversionProgram attributeIndex:@\"position\"];\n            self->yuvConversionTextureCoordinateAttribute = [self->yuvConversionProgram attributeIndex:@\"inputTextureCoordinate\"];\n            self->yuvConversionLuminanceTextureUniform = [self->yuvConversionProgram uniformIndex:@\"luminanceTexture\"];\n            self->yuvConversionChrominanceTextureUniform = [self->yuvConversionProgram uniformIndex:@\"chrominanceTexture\"];\n            self->yuvConversionMatrixUniform = [self->yuvConversionProgram uniformIndex:@\"colorConversionMatrix\"];\n            \n            [GPUImageContext setActiveShaderProgram:self->yuvConversionProgram];\n            \n            glEnableVertexAttribArray(self->yuvConversionPositionAttribute);\n            glEnableVertexAttribArray(self->yuvConversionTextureCoordinateAttribute);\n        });\n    }\n}\n\n\n\n- (void)convertYUVToRGBOutput;\n{\n    [GPUImageContext setActiveShaderProgram:yuvConversionProgram];\n    outputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:CGSizeMake(imageBufferWidth, imageBufferHeight) onlyTexture:NO];\n    [outputFramebuffer activateFramebuffer];\n    \n    glClearColor(1.0f, 1.0f, 1.0f, 1.0f);\n    glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);\n    \n    static const GLfloat squareVertices[] = {\n        -1.0f, -1.0f,\n        1.0f, -1.0f,\n        -1.0f,  1.0f,\n        1.0f,  1.0f,\n    };\n    \n    static const GLfloat textureCoordinates[] = {\n        0.0f, 0.0f,\n        1.0f, 0.0f,\n        0.0f, 1.0f,\n        1.0f, 1.0f,\n    };\n    \n    glActiveTexture(GL_TEXTURE4);\n    glBindTexture(GL_TEXTURE_2D, luminanceTexture);\n    glUniform1i(yuvConversionLuminanceTextureUniform, 4);\n    \n    glActiveTexture(GL_TEXTURE5);\n    glBindTexture(GL_TEXTURE_2D, chrominanceTexture);\n    glUniform1i(yuvConversionChrominanceTextureUniform, 5);\n    \n    glUniformMatrix3fv(yuvConversionMatrixUniform, 1, GL_FALSE, _preferredConversion);\n    \n    glVertexAttribPointer(yuvConversionPositionAttribute, 2, GL_FLOAT, 0, 0, squareVertices);\n    glVertexAttribPointer(yuvConversionTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, textureCoordinates);\n    \n    glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);\n    \n}\n\n\n- (void)processMovieFrameSampleBuffer:(CMSampleBufferRef)sampleBufferRef\n{\n    __weak __typeof(self)weakSelf = self;\n    CFRetain(sampleBufferRef);\n    runSynchronouslyOnVideoProcessingQueue(^{\n        [weakSelf processMovieFrame:sampleBufferRef];\n        CMSampleBufferInvalidate(sampleBufferRef);\n        CFRelease(sampleBufferRef);\n    });\n    \n}\n\n- (void)processMovieFrame:(CMSampleBufferRef)movieSampleBuffer;\n{\n    //    CMTimeGetSeconds\n    //    CMTimeSubtract\n    CMTime currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(movieSampleBuffer);\n    CVImageBufferRef movieFrame = CMSampleBufferGetImageBuffer(movieSampleBuffer);\n    processingFrameTime = currentSampleTime;\n    [self processMovieFrame:movieFrame withSampleTime:currentSampleTime];\n}\n- (void)processMovieFrame:(CVPixelBufferRef)movieFrame withSampleTime:(CMTime)currentSampleTime\n{\n    \n    int bufferHeight = (int) CVPixelBufferGetHeight(movieFrame);\n    int bufferWidth = (int) CVPixelBufferGetWidth(movieFrame);\n    \n    CFTypeRef colorAttachments = CVBufferGetAttachment(movieFrame, kCVImageBufferYCbCrMatrixKey, NULL);\n    if (colorAttachments != NULL)\n    {\n        if(CFStringCompare(colorAttachments, kCVImageBufferYCbCrMatrix_ITU_R_601_4, 0) == kCFCompareEqualTo)\n        {\n            if (isFullYUVRange)\n            {\n                _preferredConversion = kColorConversion601FullRange;\n            }\n            else\n            {\n                _preferredConversion = kColorConversion601;\n            }\n        }\n        else\n        {\n            _preferredConversion = kColorConversion709;\n        }\n    }\n    else\n    {\n        if (isFullYUVRange)\n        {\n            _preferredConversion = kColorConversion601FullRange;\n        }\n        else\n        {\n            _preferredConversion = kColorConversion601;\n        }\n        \n    }\n    \n    CFAbsoluteTime startTime = CFAbsoluteTimeGetCurrent();\n    \n    // Fix issue 1580\n    [GPUImageContext useImageProcessingContext];\n    \n    if ([GPUImageContext supportsFastTextureUpload])\n    {\n        \n        CVOpenGLESTextureRef luminanceTextureRef = NULL;\n        CVOpenGLESTextureRef chrominanceTextureRef = NULL;\n        \n        //        if (captureAsYUV && [GPUImageContext deviceSupportsRedTextures])\n        if (CVPixelBufferGetPlaneCount(movieFrame) > 0) // Check for YUV planar inputs to do RGB conversion\n        {\n            // fix issue 2221\n            CVPixelBufferLockBaseAddress(movieFrame,0);\n            if ( (imageBufferWidth != bufferWidth) && (imageBufferHeight != bufferHeight) )\n            {\n                imageBufferWidth = bufferWidth;\n                imageBufferHeight = bufferHeight;\n            }\n            \n            CVReturn err;\n            // Y-plane\n            glActiveTexture(GL_TEXTURE4);\n            if ([GPUImageContext deviceSupportsRedTextures])\n            {\n                err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, [[GPUImageContext sharedImageProcessingContext] coreVideoTextureCache], movieFrame, NULL, GL_TEXTURE_2D, GL_LUMINANCE, bufferWidth, bufferHeight, GL_LUMINANCE, GL_UNSIGNED_BYTE, 0, &luminanceTextureRef);\n            }\n            else\n            {\n                err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, [[GPUImageContext sharedImageProcessingContext] coreVideoTextureCache], movieFrame, NULL, GL_TEXTURE_2D, GL_LUMINANCE, bufferWidth, bufferHeight, GL_LUMINANCE, GL_UNSIGNED_BYTE, 0, &luminanceTextureRef);\n            }\n            if (err)\n            {\n                NSLog(@\"Error at CVOpenGLESTextureCacheCreateTextureFromImage %d\", err);\n            }\n            \n            luminanceTexture = CVOpenGLESTextureGetName(luminanceTextureRef);\n            glBindTexture(GL_TEXTURE_2D, luminanceTexture);\n            glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);\n            glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);\n            \n            // UV-plane\n            glActiveTexture(GL_TEXTURE5);\n            if ([GPUImageContext deviceSupportsRedTextures])\n            {\n                err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, [[GPUImageContext sharedImageProcessingContext] coreVideoTextureCache], movieFrame, NULL, GL_TEXTURE_2D, GL_LUMINANCE_ALPHA, bufferWidth/2, bufferHeight/2, GL_LUMINANCE_ALPHA, GL_UNSIGNED_BYTE, 1, &chrominanceTextureRef);\n            }\n            else\n            {\n                err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, [[GPUImageContext sharedImageProcessingContext] coreVideoTextureCache], movieFrame, NULL, GL_TEXTURE_2D, GL_LUMINANCE_ALPHA, bufferWidth/2, bufferHeight/2, GL_LUMINANCE_ALPHA, GL_UNSIGNED_BYTE, 1, &chrominanceTextureRef);\n            }\n            if (err)\n            {\n                NSLog(@\"Error at CVOpenGLESTextureCacheCreateTextureFromImage %d\", err);\n            }\n            \n            chrominanceTexture = CVOpenGLESTextureGetName(chrominanceTextureRef);\n            glBindTexture(GL_TEXTURE_2D, chrominanceTexture);\n            glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);\n            glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);\n            \n            //            if (!allTargetsWantMonochromeData)\n            //            {\n            [self convertYUVToRGBOutput];\n            //            }\n            \n            for (id<GPUImageInput> currentTarget in targets)\n            {\n                NSInteger indexOfObject = [targets indexOfObject:currentTarget];\n                NSInteger targetTextureIndex = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];\n                [currentTarget setInputSize:CGSizeMake(bufferWidth, bufferHeight) atIndex:targetTextureIndex];\n                [currentTarget setInputFramebuffer:outputFramebuffer atIndex:targetTextureIndex];\n            }\n            \n            [outputFramebuffer unlock];\n            \n            for (id<GPUImageInput> currentTarget in targets)\n            {\n                NSInteger indexOfObject = [targets indexOfObject:currentTarget];\n                NSInteger targetTextureIndex = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];\n                if(currentSampleTime.value != 0)\n                {\n                    [currentTarget newFrameReadyAtTime:currentSampleTime atIndex:targetTextureIndex];\n                }\n            }\n            \n            CVPixelBufferUnlockBaseAddress(movieFrame, 0);\n            CFRelease(luminanceTextureRef);\n            CFRelease(chrominanceTextureRef);\n        }\n    }\n    else\n    {\n        // Upload to texture\n        CVPixelBufferLockBaseAddress(movieFrame, 0);\n        \n        outputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:CGSizeMake(bufferWidth, bufferHeight) textureOptions:self.outputTextureOptions onlyTexture:YES];\n        \n        glBindTexture(GL_TEXTURE_2D, [outputFramebuffer texture]);\n        // Using BGRA extension to pull in video frame data directly\n        glTexImage2D(GL_TEXTURE_2D,\n                     0,\n                     self.outputTextureOptions.internalFormat,\n                     bufferWidth,\n                     bufferHeight,\n                     0,\n                     self.outputTextureOptions.format,\n                     self.outputTextureOptions.type,\n                     CVPixelBufferGetBaseAddress(movieFrame));\n        \n        for (id<GPUImageInput> currentTarget in targets)\n        {\n            NSInteger indexOfObject = [targets indexOfObject:currentTarget];\n            NSInteger targetTextureIndex = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];\n            [currentTarget setInputSize:CGSizeMake(bufferWidth, bufferHeight) atIndex:targetTextureIndex];\n            [currentTarget setInputFramebuffer:outputFramebuffer atIndex:targetTextureIndex];\n        }\n        \n        [outputFramebuffer unlock];\n        \n        for (id<GPUImageInput> currentTarget in targets)\n        {\n            NSInteger indexOfObject = [targets indexOfObject:currentTarget];\n            NSInteger targetTextureIndex = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];\n            [currentTarget newFrameReadyAtTime:currentSampleTime atIndex:targetTextureIndex];\n        }\n        CVPixelBufferUnlockBaseAddress(movieFrame, 0);\n    }\n    \n    if (_runBenchmark)\n    {\n        CFAbsoluteTime currentFrameTime = (CFAbsoluteTimeGetCurrent() - startTime);\n        NSLog(@\"GLImageMovie -- Current frame time : %f ms\", 1000.0 * currentFrameTime);\n    }\n}\n\n\n- (void)endProcessing\n{\n    for (id<GPUImageInput> currentTarget in targets)\n    {\n        [currentTarget endProcessing];\n    }\n    \n//    if ([self.delegate respondsToSelector:@selector(moiveFrameDidCompletePlaying:)])\n//    {\n//        [self.delegate moiveFrameDidCompletePlaying:self];\n//    }\n}\n\n\n- (void)setRunBenchmark:(BOOL)runBenchmark\n{\n    _runBenchmark = runBenchmark;\n}\n\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/GLFilters/GLImageMovie/GPUImageMovieWriterFix.h",
    "content": "#import <Foundation/Foundation.h>\n#import <AVFoundation/AVFoundation.h>\n#import \"GPUImageContext.h\"\nextern NSString *const kGPUImageColorSwizzlingFragmentShaderStringFix;\n\n@protocol GPUImageMovieWriterFixDelegate <NSObject>\n\n@optional\n- (void)movieRecordingCompleted;\n- (void)movieRecordingFailedWithError:(NSError*)error;\n\n@end\n\n@interface GPUImageMovieWriterFix : NSObject <GPUImageInput>\n{\n    BOOL alreadyFinishedRecording;\n    \n    NSURL *movieURL;\n    NSString *fileType;\n\tAVAssetWriter *assetWriter;\n\tAVAssetWriterInput *assetWriterAudioInput;\n\tAVAssetWriterInput *assetWriterVideoInput;\n    AVAssetWriterInputPixelBufferAdaptor *assetWriterPixelBufferInput;\n    \n    GPUImageContext *_movieWriterContext;\n    CVPixelBufferRef renderTarget;\n    CVOpenGLESTextureRef renderTexture;\n\n    CGSize videoSize;\n    GPUImageRotationMode inputRotation;\n}\n\n@property(readwrite, nonatomic) BOOL hasAudioTrack;\n@property(readwrite, nonatomic) BOOL shouldPassthroughAudio;\n@property(readwrite, nonatomic) BOOL shouldInvalidateAudioSampleWhenDone;\n@property(nonatomic, copy) void(^completionBlock)(void);\n@property(nonatomic, copy) void(^failureBlock)(NSError*);\n@property(nonatomic, assign) id<GPUImageMovieWriterFixDelegate> delegate;\n@property(readwrite, nonatomic) BOOL encodingLiveVideo;\n@property(nonatomic, copy) BOOL(^videoInputReadyCallback)(void);\n@property(nonatomic, copy) BOOL(^audioInputReadyCallback)(void);\n@property(nonatomic, copy) void(^audioProcessingCallback)(SInt16 **samplesRef, CMItemCount numSamplesInBuffer);\n@property(nonatomic) BOOL enabled;\n@property(nonatomic, readonly) AVAssetWriter *assetWriter;\n@property(nonatomic, readonly) CMTime duration;\n@property(nonatomic, assign) CGAffineTransform transform;\n@property(nonatomic, copy) NSArray *metaData;\n@property(nonatomic, assign, getter = isPaused) BOOL paused;\n@property(nonatomic, retain) GPUImageContext *movieWriterContext;\n\n// Initialization and teardown\n- (id)initWithMovieURL:(NSURL *)newMovieURL size:(CGSize)newSize;\n- (id)initWithMovieURL:(NSURL *)newMovieURL size:(CGSize)newSize fileType:(NSString *)newFileType outputSettings:(NSDictionary *)outputSettings;\n\n- (void)setHasAudioTrack:(BOOL)hasAudioTrack audioSettings:(NSDictionary *)audioOutputSettings;\n\n// Movie recording\n- (void)startRecording;\n- (void)startRecordingInOrientation:(CGAffineTransform)orientationTransform;\n- (void)finishRecording;\n- (void)finishRecordingWithCompletionHandler:(void (^)(void))handler;\n- (void)cancelRecording;\n- (void)processAudioBuffer:(CMSampleBufferRef)audioBuffer;\n- (void)enableSynchronizationCallbacks;\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/GLFilters/GLImageMovie/GPUImageMovieWriterFix.m",
    "content": "#import \"GPUImageMovieWriterFix.h\"\n\n#import \"GPUImageContext.h\"\n#import \"GLProgram.h\"\n#import \"GPUImageFilter.h\"\n\nNSString *const kGPUImageColorSwizzlingFragmentShaderStringFix = SHADER_STRING\n(\n varying highp vec2 textureCoordinate;\n \n uniform sampler2D inputImageTexture;\n \n void main()\n {\n     gl_FragColor = texture2D(inputImageTexture, textureCoordinate).bgra;\n }\n);\n\nstatic BOOL allowWriteAudio = NO;\n\n@interface GPUImageMovieWriterFix ()\n{\n    GLuint movieFramebuffer, movieRenderbuffer;\n    \n    GLProgram *colorSwizzlingProgram;\n    GLint colorSwizzlingPositionAttribute, colorSwizzlingTextureCoordinateAttribute;\n    GLint colorSwizzlingInputTextureUniform;\n\n    GPUImageFramebuffer *firstInputFramebuffer;\n    \n    BOOL discont;\n    CMTime startTime, previousFrameTime, previousAudioTime;\n    CMTime offsetTime;\n    \n    dispatch_queue_t audioQueue, videoQueue;\n    BOOL audioEncodingIsFinished, videoEncodingIsFinished;\n\n    BOOL isRecording;\n}\n\n// Movie recording\n- (void)initializeMovieWithOutputSettings:(NSMutableDictionary *)outputSettings;\n\n// Frame rendering\n- (void)createDataFBO;\n- (void)destroyDataFBO;\n- (void)setFilterFBO;\n\n- (void)renderAtInternalSizeUsingFramebuffer:(GPUImageFramebuffer *)inputFramebufferToUse;\n\n@end\n\n@implementation GPUImageMovieWriterFix\n\n@synthesize hasAudioTrack = _hasAudioTrack;\n@synthesize encodingLiveVideo = _encodingLiveVideo;\n@synthesize shouldPassthroughAudio = _shouldPassthroughAudio;\n@synthesize completionBlock;\n@synthesize failureBlock;\n@synthesize videoInputReadyCallback;\n@synthesize audioInputReadyCallback;\n@synthesize enabled;\n@synthesize shouldInvalidateAudioSampleWhenDone = _shouldInvalidateAudioSampleWhenDone;\n@synthesize paused = _paused;\n@synthesize movieWriterContext = _movieWriterContext;\n\n@synthesize delegate = _delegate;\n\n#pragma mark -\n#pragma mark Initialization and teardown\n\n- (id)initWithMovieURL:(NSURL *)newMovieURL size:(CGSize)newSize;\n{\n    return [self initWithMovieURL:newMovieURL size:newSize fileType:AVFileTypeQuickTimeMovie outputSettings:nil];\n}\n\n- (id)initWithMovieURL:(NSURL *)newMovieURL size:(CGSize)newSize fileType:(NSString *)newFileType outputSettings:(NSMutableDictionary *)outputSettings;\n{\n    if (!(self = [super init]))\n    {\n\t\treturn nil;\n    }\n\n    _shouldInvalidateAudioSampleWhenDone = NO;\n    \n    self.enabled = YES;\n    alreadyFinishedRecording = NO;\n    videoEncodingIsFinished = NO;\n    audioEncodingIsFinished = NO;\n\n    discont = NO;\n    videoSize = newSize;\n    movieURL = newMovieURL;\n    fileType = newFileType;\n    startTime = kCMTimeInvalid;\n    _encodingLiveVideo = [[outputSettings objectForKey:@\"EncodingLiveVideo\"] isKindOfClass:[NSNumber class]] ? [[outputSettings objectForKey:@\"EncodingLiveVideo\"] boolValue] : YES;\n    previousFrameTime = kCMTimeNegativeInfinity;\n    previousAudioTime = kCMTimeNegativeInfinity;\n    inputRotation = kGPUImageNoRotation;\n    \n    _movieWriterContext = [[GPUImageContext alloc] init];\n    [_movieWriterContext useSharegroup:[[[GPUImageContext sharedImageProcessingContext] context] sharegroup]];\n\n    runSynchronouslyOnContextQueue(_movieWriterContext, ^{\n        [_movieWriterContext useAsCurrentContext];\n        \n        if ([GPUImageContext supportsFastTextureUpload])\n        {\n            colorSwizzlingProgram = [_movieWriterContext programForVertexShaderString:kGPUImageVertexShaderString fragmentShaderString:kGPUImagePassthroughFragmentShaderString];\n        }\n        else\n        {\n            colorSwizzlingProgram = [_movieWriterContext programForVertexShaderString:kGPUImageVertexShaderString fragmentShaderString:kGPUImageColorSwizzlingFragmentShaderStringFix];\n        }\n        \n        if (!colorSwizzlingProgram.initialized)\n        {\n            [colorSwizzlingProgram addAttribute:@\"position\"];\n            [colorSwizzlingProgram addAttribute:@\"inputTextureCoordinate\"];\n            \n            if (![colorSwizzlingProgram link])\n            {\n                NSString *progLog = [colorSwizzlingProgram programLog];\n                NSLog(@\"Program link log: %@\", progLog);\n                NSString *fragLog = [colorSwizzlingProgram fragmentShaderLog];\n                NSLog(@\"Fragment shader compile log: %@\", fragLog);\n                NSString *vertLog = [colorSwizzlingProgram vertexShaderLog];\n                NSLog(@\"Vertex shader compile log: %@\", vertLog);\n                colorSwizzlingProgram = nil;\n                NSAssert(NO, @\"Filter shader link failed\");\n            }\n        }        \n        \n        colorSwizzlingPositionAttribute = [colorSwizzlingProgram attributeIndex:@\"position\"];\n        colorSwizzlingTextureCoordinateAttribute = [colorSwizzlingProgram attributeIndex:@\"inputTextureCoordinate\"];\n        colorSwizzlingInputTextureUniform = [colorSwizzlingProgram uniformIndex:@\"inputImageTexture\"];\n        \n        [_movieWriterContext setContextShaderProgram:colorSwizzlingProgram];\n        \n        glEnableVertexAttribArray(colorSwizzlingPositionAttribute);\n        glEnableVertexAttribArray(colorSwizzlingTextureCoordinateAttribute);\n    });\n        \n    [self initializeMovieWithOutputSettings:outputSettings];\n\n    return self;\n}\n\n- (void)dealloc;\n{\n    [self destroyDataFBO];\n\n#if !OS_OBJECT_USE_OBJC\n    if( audioQueue != NULL )\n    {\n        dispatch_release(audioQueue);\n    }\n    if( videoQueue != NULL )\n    {\n        dispatch_release(videoQueue);\n    }\n#endif\n}\n\n#pragma mark -\n#pragma mark Movie recording\n\n- (void)initializeMovieWithOutputSettings:(NSDictionary *)outputSettings;\n{\n    isRecording = NO;\n    \n    self.enabled = YES;\n    NSError *error = nil;\n    assetWriter = [[AVAssetWriter alloc] initWithURL:movieURL fileType:fileType error:&error];\n    if (error != nil)\n    {\n        NSLog(@\"Error: %@\", error);\n        if (failureBlock) \n        {\n            failureBlock(error);\n        }\n        else \n        {\n            if(self.delegate && [self.delegate respondsToSelector:@selector(movieRecordingFailedWithError:)])\n            {\n                [self.delegate movieRecordingFailedWithError:error];\n            }\n        }\n    }\n    \n    // Set this to make sure that a functional movie is produced, even if the recording is cut off mid-stream. Only the last second should be lost in that case.\n    assetWriter.movieFragmentInterval = CMTimeMakeWithSeconds(1.0, 1000);\n    \n    // use default output settings if none specified\n    if (outputSettings == nil) \n    {\n        NSMutableDictionary *settings = [[NSMutableDictionary alloc] init];\n        [settings setObject:AVVideoCodecH264 forKey:AVVideoCodecKey];\n        [settings setObject:[NSNumber numberWithInt:videoSize.width] forKey:AVVideoWidthKey];\n        [settings setObject:[NSNumber numberWithInt:videoSize.height] forKey:AVVideoHeightKey];\n        outputSettings = settings;\n    }\n    // custom output settings specified\n    else \n    {\n\t\t__unused NSString *videoCodec = [outputSettings objectForKey:AVVideoCodecKey];\n\t\t__unused NSNumber *width = [outputSettings objectForKey:AVVideoWidthKey];\n\t\t__unused NSNumber *height = [outputSettings objectForKey:AVVideoHeightKey];\n\t\t\n\t\tNSAssert(videoCodec && width && height, @\"OutputSettings is missing required parameters.\");\n        \n        if( [outputSettings objectForKey:@\"EncodingLiveVideo\"] ) {\n            NSMutableDictionary *tmp = [outputSettings mutableCopy];\n            [tmp removeObjectForKey:@\"EncodingLiveVideo\"];\n            outputSettings = tmp;\n        }\n    }\n    \n    /*\n    NSDictionary *videoCleanApertureSettings = [NSDictionary dictionaryWithObjectsAndKeys:\n                                                [NSNumber numberWithInt:videoSize.width], AVVideoCleanApertureWidthKey,\n                                                [NSNumber numberWithInt:videoSize.height], AVVideoCleanApertureHeightKey,\n                                                [NSNumber numberWithInt:0], AVVideoCleanApertureHorizontalOffsetKey,\n                                                [NSNumber numberWithInt:0], AVVideoCleanApertureVerticalOffsetKey,\n                                                nil];\n\n    NSDictionary *videoAspectRatioSettings = [NSDictionary dictionaryWithObjectsAndKeys:\n                                              [NSNumber numberWithInt:3], AVVideoPixelAspectRatioHorizontalSpacingKey,\n                                              [NSNumber numberWithInt:3], AVVideoPixelAspectRatioVerticalSpacingKey,\n                                              nil];\n\n    NSMutableDictionary * compressionProperties = [[NSMutableDictionary alloc] init];\n    [compressionProperties setObject:videoCleanApertureSettings forKey:AVVideoCleanApertureKey];\n    [compressionProperties setObject:videoAspectRatioSettings forKey:AVVideoPixelAspectRatioKey];\n    [compressionProperties setObject:[NSNumber numberWithInt: 2000000] forKey:AVVideoAverageBitRateKey];\n    [compressionProperties setObject:[NSNumber numberWithInt: 16] forKey:AVVideoMaxKeyFrameIntervalKey];\n    [compressionProperties setObject:AVVideoProfileLevelH264Main31 forKey:AVVideoProfileLevelKey];\n    \n    [outputSettings setObject:compressionProperties forKey:AVVideoCompressionPropertiesKey];\n    */\n     \n    assetWriterVideoInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:outputSettings];\n    assetWriterVideoInput.expectsMediaDataInRealTime = _encodingLiveVideo;\n    \n    // You need to use BGRA for the video in order to get realtime encoding. I use a color-swizzling shader to line up glReadPixels' normal RGBA output with the movie input's BGRA.\n    NSDictionary *sourcePixelBufferAttributesDictionary = [NSDictionary dictionaryWithObjectsAndKeys: [NSNumber numberWithInt:kCVPixelFormatType_32BGRA], kCVPixelBufferPixelFormatTypeKey,\n                                                           [NSNumber numberWithInt:videoSize.width], kCVPixelBufferWidthKey,\n                                                           [NSNumber numberWithInt:videoSize.height], kCVPixelBufferHeightKey,\n                                                           nil];\n//    NSDictionary *sourcePixelBufferAttributesDictionary = [NSDictionary dictionaryWithObjectsAndKeys: [NSNumber numberWithInt:kCVPixelFormatType_32ARGB], kCVPixelBufferPixelFormatTypeKey,\n//                                                           nil];\n        \n    assetWriterPixelBufferInput = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:assetWriterVideoInput sourcePixelBufferAttributes:sourcePixelBufferAttributesDictionary];\n    \n    [assetWriter addInput:assetWriterVideoInput];\n}\n\n- (void)setEncodingLiveVideo:(BOOL) value\n{\n    _encodingLiveVideo = value;\n    if (isRecording) {\n        NSAssert(NO, @\"Can not change Encoding Live Video while recording\");\n    }\n    else\n    {\n        assetWriterVideoInput.expectsMediaDataInRealTime = _encodingLiveVideo;\n        assetWriterAudioInput.expectsMediaDataInRealTime = _encodingLiveVideo;\n    }\n}\n\n- (void)startRecording;\n{\n    alreadyFinishedRecording = NO;\n    startTime = kCMTimeInvalid;\n    runSynchronouslyOnContextQueue(_movieWriterContext, ^{\n        if (audioInputReadyCallback == NULL)\n        {\n            [assetWriter startWriting];\n        }\n    });\n    isRecording = YES;\n    allowWriteAudio = NO;\n\t//    [assetWriter startSessionAtSourceTime:kCMTimeZero];\n}\n\n- (void)startRecordingInOrientation:(CGAffineTransform)orientationTransform;\n{\n\tassetWriterVideoInput.transform = orientationTransform;\n\n\t[self startRecording];\n}\n\n- (void)cancelRecording;\n{\n    if (assetWriter.status == AVAssetWriterStatusCompleted)\n    {\n        return;\n    }\n    \n    isRecording = NO;\n    runSynchronouslyOnContextQueue(_movieWriterContext, ^{\n        alreadyFinishedRecording = YES;\n\n        if( assetWriter.status == AVAssetWriterStatusWriting && ! videoEncodingIsFinished )\n        {\n            videoEncodingIsFinished = YES;\n            [assetWriterVideoInput markAsFinished];\n        }\n        if( assetWriter.status == AVAssetWriterStatusWriting && ! audioEncodingIsFinished )\n        {\n            audioEncodingIsFinished = YES;\n            [assetWriterAudioInput markAsFinished];\n        }\n        [assetWriter cancelWriting];\n    });\n}\n\n- (void)finishRecording;\n{\n    [self finishRecordingWithCompletionHandler:NULL];\n}\n\n- (void)finishRecordingWithCompletionHandler:(void (^)(void))handler;\n{\n    runSynchronouslyOnContextQueue(_movieWriterContext, ^{\n        isRecording = NO;\n        \n        if (assetWriter.status == AVAssetWriterStatusCompleted || assetWriter.status == AVAssetWriterStatusCancelled || assetWriter.status == AVAssetWriterStatusUnknown)\n        {\n            if (handler)\n                runAsynchronouslyOnContextQueue(_movieWriterContext, handler);\n            return;\n        }\n        if( assetWriter.status == AVAssetWriterStatusWriting && ! videoEncodingIsFinished )\n        {\n            videoEncodingIsFinished = YES;\n            [assetWriterVideoInput markAsFinished];\n        }\n        if( assetWriter.status == AVAssetWriterStatusWriting && ! audioEncodingIsFinished )\n        {\n            audioEncodingIsFinished = YES;\n            [assetWriterAudioInput markAsFinished];\n        }\n#if (!defined(__IPHONE_6_0) || (__IPHONE_OS_VERSION_MAX_ALLOWED < __IPHONE_6_0))\n        // Not iOS 6 SDK\n        [assetWriter finishWriting];\n        if (handler)\n            runAsynchronouslyOnContextQueue(_movieWriterContext,handler);\n#else\n        // iOS 6 SDK\n        if ([assetWriter respondsToSelector:@selector(finishWritingWithCompletionHandler:)]) {\n            // Running iOS 6\n            [assetWriter finishWritingWithCompletionHandler:(handler ?: ^{ })];\n        }\n        else {\n            // Not running iOS 6\n#pragma clang diagnostic push\n#pragma clang diagnostic ignored \"-Wdeprecated-declarations\"\n            [assetWriter finishWriting];\n#pragma clang diagnostic pop\n            if (handler)\n                runAsynchronouslyOnContextQueue(_movieWriterContext, handler);\n        }\n#endif\n    });\n}\n\n- (void)processAudioBuffer:(CMSampleBufferRef)audioBuffer;\n{\n    if (!isRecording || _paused)\n    {\n        return;\n    }\n    \n    if (!allowWriteAudio) {\n        return;\n    }\n\n    \n//    if (_hasAudioTrack && CMTIME_IS_VALID(startTime))\n    if (_hasAudioTrack)\n    {\n        CFRetain(audioBuffer);\n\n        CMTime currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(audioBuffer);\n        \n        if (CMTIME_IS_INVALID(startTime))\n        {\n            runSynchronouslyOnContextQueue(_movieWriterContext, ^{\n                if ((audioInputReadyCallback == NULL) && (assetWriter.status != AVAssetWriterStatusWriting))\n                {\n                    [assetWriter startWriting];\n                }\n                [assetWriter startSessionAtSourceTime:currentSampleTime];\n                startTime = currentSampleTime;\n            });\n        }\n\n        if (!assetWriterAudioInput.readyForMoreMediaData && _encodingLiveVideo)\n        {\n            NSLog(@\"1: Had to drop an audio frame: %@\", CFBridgingRelease(CMTimeCopyDescription(kCFAllocatorDefault, currentSampleTime)));\n            if (_shouldInvalidateAudioSampleWhenDone)\n            {\n                CMSampleBufferInvalidate(audioBuffer);\n            }\n            CFRelease(audioBuffer);\n            return;\n        }\n        \n        if (discont) {\n            discont = NO;\n            \n            CMTime current;\n            if (offsetTime.value > 0) {\n                current = CMTimeSubtract(currentSampleTime, offsetTime);\n            } else {\n                current = currentSampleTime;\n            }\n            \n            CMTime offset = CMTimeSubtract(current, previousAudioTime);\n            \n            if (offsetTime.value == 0) {\n                offsetTime = offset;\n            } else {\n                offsetTime = CMTimeAdd(offsetTime, offset);\n            }\n        }\n        \n        if (offsetTime.value > 0) {\n            CFRelease(audioBuffer);\n            audioBuffer = [self adjustTime:audioBuffer by:offsetTime];\n            CFRetain(audioBuffer);\n        }\n        \n        // record most recent time so we know the length of the pause\n        currentSampleTime = CMSampleBufferGetPresentationTimeStamp(audioBuffer);\n\n        previousAudioTime = currentSampleTime;\n        \n        //if the consumer wants to do something with the audio samples before writing, let him.\n        if (self.audioProcessingCallback) {\n            //need to introspect into the opaque CMBlockBuffer structure to find its raw sample buffers.\n            CMBlockBufferRef buffer = CMSampleBufferGetDataBuffer(audioBuffer);\n            CMItemCount numSamplesInBuffer = CMSampleBufferGetNumSamples(audioBuffer);\n            AudioBufferList audioBufferList;\n            \n            CMSampleBufferGetAudioBufferListWithRetainedBlockBuffer(audioBuffer,\n                                                                    NULL,\n                                                                    &audioBufferList,\n                                                                    sizeof(audioBufferList),\n                                                                    NULL,\n                                                                    NULL,\n                                                                    kCMSampleBufferFlag_AudioBufferList_Assure16ByteAlignment,\n                                                                    &buffer\n                                                                    );\n            //passing a live pointer to the audio buffers, try to process them in-place or we might have syncing issues.\n            for (int bufferCount=0; bufferCount < audioBufferList.mNumberBuffers; bufferCount++) {\n                SInt16 *samples = (SInt16 *)audioBufferList.mBuffers[bufferCount].mData;\n                self.audioProcessingCallback(&samples, numSamplesInBuffer);\n            }\n        }\n        \n//        NSLog(@\"Recorded audio sample time: %lld, %d, %lld\", currentSampleTime.value, currentSampleTime.timescale, currentSampleTime.epoch);\n        void(^write)(void) = ^() {\n            while( ! assetWriterAudioInput.readyForMoreMediaData && ! _encodingLiveVideo && ! audioEncodingIsFinished ) {\n                NSDate *maxDate = [NSDate dateWithTimeIntervalSinceNow:0.5];\n                //NSLog(@\"audio waiting...\");\n                [[NSRunLoop currentRunLoop] runUntilDate:maxDate];\n            }\n            if (!assetWriterAudioInput.readyForMoreMediaData)\n            {\n                NSLog(@\"2: Had to drop an audio frame %@\", CFBridgingRelease(CMTimeCopyDescription(kCFAllocatorDefault, currentSampleTime)));\n            }\n            else if(assetWriter.status == AVAssetWriterStatusWriting)\n            {\n                if (![assetWriterAudioInput appendSampleBuffer:audioBuffer])\n                    NSLog(@\"Problem appending audio buffer at time: %@\", CFBridgingRelease(CMTimeCopyDescription(kCFAllocatorDefault, currentSampleTime)));\n            }\n            else\n            {\n                //NSLog(@\"Wrote an audio frame %@\", CFBridgingRelease(CMTimeCopyDescription(kCFAllocatorDefault, currentSampleTime)));\n            }\n\n            if (_shouldInvalidateAudioSampleWhenDone)\n            {\n                CMSampleBufferInvalidate(audioBuffer);\n            }\n            CFRelease(audioBuffer);\n        };\n//        runAsynchronouslyOnContextQueue(_movieWriterContext, write);\n        if( _encodingLiveVideo )\n\n        {\n            runAsynchronouslyOnContextQueue(_movieWriterContext, write);\n        }\n        else\n        {\n            write();\n        }\n    }\n}\n\n- (void)enableSynchronizationCallbacks;\n{\n    if (videoInputReadyCallback != NULL)\n    {\n        if( assetWriter.status != AVAssetWriterStatusWriting )\n        {\n            [assetWriter startWriting];\n        }\n        videoQueue = dispatch_queue_create(\"com.sunsetlakesoftware.GPUImage.videoReadingQueue\", GPUImageDefaultQueueAttribute());\n        [assetWriterVideoInput requestMediaDataWhenReadyOnQueue:videoQueue usingBlock:^{\n            if( _paused )\n            {\n                //NSLog(@\"video requestMediaDataWhenReadyOnQueue paused\");\n                // if we don't sleep, we'll get called back almost immediately, chewing up CPU\n                usleep(10000);\n                return;\n            }\n            //NSLog(@\"video requestMediaDataWhenReadyOnQueue begin\");\n            while( assetWriterVideoInput.readyForMoreMediaData && ! _paused )\n            {\n                if( videoInputReadyCallback && ! videoInputReadyCallback() && ! videoEncodingIsFinished )\n                {\n                    runAsynchronouslyOnContextQueue(_movieWriterContext, ^{\n                        if( assetWriter.status == AVAssetWriterStatusWriting && ! videoEncodingIsFinished )\n                        {\n                            videoEncodingIsFinished = YES;\n                            [assetWriterVideoInput markAsFinished];\n                        }\n                    });\n                }\n            }\n            //NSLog(@\"video requestMediaDataWhenReadyOnQueue end\");\n        }];\n    }\n    \n    if (audioInputReadyCallback != NULL)\n    {\n        audioQueue = dispatch_queue_create(\"com.sunsetlakesoftware.GPUImage.audioReadingQueue\", GPUImageDefaultQueueAttribute());\n        [assetWriterAudioInput requestMediaDataWhenReadyOnQueue:audioQueue usingBlock:^{\n            if( _paused )\n            {\n                //NSLog(@\"audio requestMediaDataWhenReadyOnQueue paused\");\n                // if we don't sleep, we'll get called back almost immediately, chewing up CPU\n                usleep(10000);\n                return;\n            }\n            //NSLog(@\"audio requestMediaDataWhenReadyOnQueue begin\");\n            while( assetWriterAudioInput.readyForMoreMediaData && ! _paused )\n            {\n                if( audioInputReadyCallback && ! audioInputReadyCallback() && ! audioEncodingIsFinished )\n                {\n                    runAsynchronouslyOnContextQueue(_movieWriterContext, ^{\n                        if( assetWriter.status == AVAssetWriterStatusWriting && ! audioEncodingIsFinished )\n                        {\n                            audioEncodingIsFinished = YES;\n                            [assetWriterAudioInput markAsFinished];\n                        }\n                    });\n                }\n            }\n            //NSLog(@\"audio requestMediaDataWhenReadyOnQueue end\");\n        }];\n    }        \n    \n}\n\n#pragma mark -\n#pragma mark Frame rendering\n\n- (void)createDataFBO;\n{\n    glActiveTexture(GL_TEXTURE1);\n    glGenFramebuffers(1, &movieFramebuffer);\n    glBindFramebuffer(GL_FRAMEBUFFER, movieFramebuffer);\n    \n    if ([GPUImageContext supportsFastTextureUpload])\n    {\n        // Code originally sourced from http://allmybrain.com/2011/12/08/rendering-to-a-texture-with-ios-5-texture-cache-api/\n        \n\n        CVPixelBufferPoolCreatePixelBuffer (NULL, [assetWriterPixelBufferInput pixelBufferPool], &renderTarget);\n\n        /* AVAssetWriter will use BT.601 conversion matrix for RGB to YCbCr conversion\n         * regardless of the kCVImageBufferYCbCrMatrixKey value.\n         * Tagging the resulting video file as BT.601, is the best option right now.\n         * Creating a proper BT.709 video is not possible at the moment.\n         */\n        CVBufferSetAttachment(renderTarget, kCVImageBufferColorPrimariesKey, kCVImageBufferColorPrimaries_ITU_R_709_2, kCVAttachmentMode_ShouldPropagate);\n        CVBufferSetAttachment(renderTarget, kCVImageBufferYCbCrMatrixKey, kCVImageBufferYCbCrMatrix_ITU_R_601_4, kCVAttachmentMode_ShouldPropagate);\n        CVBufferSetAttachment(renderTarget, kCVImageBufferTransferFunctionKey, kCVImageBufferTransferFunction_ITU_R_709_2, kCVAttachmentMode_ShouldPropagate);\n        \n        CVOpenGLESTextureCacheCreateTextureFromImage (kCFAllocatorDefault, [_movieWriterContext coreVideoTextureCache], renderTarget,\n                                                      NULL, // texture attributes\n                                                      GL_TEXTURE_2D,\n                                                      GL_RGBA, // opengl format\n                                                      (int)videoSize.width,\n                                                      (int)videoSize.height,\n                                                      GL_BGRA, // native iOS format\n                                                      GL_UNSIGNED_BYTE,\n                                                      0,\n                                                      &renderTexture);\n        \n        glBindTexture(CVOpenGLESTextureGetTarget(renderTexture), CVOpenGLESTextureGetName(renderTexture));\n        glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);\n        glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);\n        \n        glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, CVOpenGLESTextureGetName(renderTexture), 0);\n    }\n    else\n    {\n        glGenRenderbuffers(1, &movieRenderbuffer);\n        glBindRenderbuffer(GL_RENDERBUFFER, movieRenderbuffer);\n        glRenderbufferStorage(GL_RENDERBUFFER, GL_RGBA8_OES, (int)videoSize.width, (int)videoSize.height);\n        glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_RENDERBUFFER, movieRenderbuffer);\t\n    }\n    \n\t\n\t__unused GLenum status = glCheckFramebufferStatus(GL_FRAMEBUFFER);\n    \n    NSAssert(status == GL_FRAMEBUFFER_COMPLETE, @\"Incomplete filter FBO: %d\", status);\n}\n\n- (void)destroyDataFBO;\n{\n    runSynchronouslyOnContextQueue(_movieWriterContext, ^{\n        [_movieWriterContext useAsCurrentContext];\n\n        if (movieFramebuffer)\n        {\n            glDeleteFramebuffers(1, &movieFramebuffer);\n            movieFramebuffer = 0;\n        }\n        \n        if (movieRenderbuffer)\n        {\n            glDeleteRenderbuffers(1, &movieRenderbuffer);\n            movieRenderbuffer = 0;\n        }\n        \n        if ([GPUImageContext supportsFastTextureUpload])\n        {\n            if (renderTexture)\n            {\n                CFRelease(renderTexture);\n            }\n            if (renderTarget)\n            {\n                CVPixelBufferRelease(renderTarget);\n            }\n            \n        }\n    });\n}\n\n- (void)setFilterFBO;\n{\n    if (!movieFramebuffer)\n    {\n        [self createDataFBO];\n    }\n    \n    glBindFramebuffer(GL_FRAMEBUFFER, movieFramebuffer);\n    \n    glViewport(0, 0, (int)videoSize.width, (int)videoSize.height);\n}\n\n- (void)renderAtInternalSizeUsingFramebuffer:(GPUImageFramebuffer *)inputFramebufferToUse;\n{\n    [_movieWriterContext useAsCurrentContext];\n    [self setFilterFBO];\n    \n    [_movieWriterContext setContextShaderProgram:colorSwizzlingProgram];\n    \n    glClearColor(1.0f, 0.0f, 0.0f, 1.0f);\n    glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);\n    \n    // This needs to be flipped to write out to video correctly\n    static const GLfloat squareVertices[] = {\n        -1.0f, -1.0f,\n        1.0f, -1.0f,\n        -1.0f,  1.0f,\n        1.0f,  1.0f,\n    };\n    \n    const GLfloat *textureCoordinates = [GPUImageFilter textureCoordinatesForRotation:inputRotation];\n    \n\tglActiveTexture(GL_TEXTURE4);\n\tglBindTexture(GL_TEXTURE_2D, [inputFramebufferToUse texture]);\n\tglUniform1i(colorSwizzlingInputTextureUniform, 4);\n    \n//    NSLog(@\"Movie writer framebuffer: %@\", inputFramebufferToUse);\n    \n    glVertexAttribPointer(colorSwizzlingPositionAttribute, 2, GL_FLOAT, 0, 0, squareVertices);\n\tglVertexAttribPointer(colorSwizzlingTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, textureCoordinates);\n    \n    glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);\n    glFinish();\n}\n\n#pragma mark -\n#pragma mark GPUImageInput protocol\n\n- (void)newFrameReadyAtTime:(CMTime)frameTime atIndex:(NSInteger)textureIndex;\n{\n    if (!isRecording || _paused)\n    {\n        [firstInputFramebuffer unlock];\n        return;\n    }\n\n    if (discont) {\n        discont = NO;\n        CMTime current;\n        \n        if (offsetTime.value > 0) {\n            current = CMTimeSubtract(frameTime, offsetTime);\n        } else {\n            current = frameTime;\n        }\n        \n        CMTime offset  = CMTimeSubtract(current, previousFrameTime);\n        \n        if (offsetTime.value == 0) {\n            offsetTime = offset;\n        } else {\n            offsetTime = CMTimeAdd(offsetTime, offset);\n        }\n    }\n    \n    if (offsetTime.value > 0) {\n        frameTime = CMTimeSubtract(frameTime, offsetTime);\n    }\n    \n    // Drop frames forced by images and other things with no time constants\n    // Also, if two consecutive times with the same value are added to the movie, it aborts recording, so I bail on that case\n    if ( (CMTIME_IS_INVALID(frameTime)) || (CMTIME_COMPARE_INLINE(frameTime, ==, previousFrameTime)) || (CMTIME_IS_INDEFINITE(frameTime)) ) \n    {\n        [firstInputFramebuffer unlock];\n        return;\n    }\n\n    if (CMTIME_IS_INVALID(startTime))\n    {\n        runSynchronouslyOnContextQueue(_movieWriterContext, ^{\n            if ((videoInputReadyCallback == NULL) && (assetWriter.status != AVAssetWriterStatusWriting))\n            {\n                [assetWriter startWriting];\n            }\n            \n            [assetWriter startSessionAtSourceTime:frameTime];\n            startTime = frameTime;\n        });\n    }\n\n    GPUImageFramebuffer *inputFramebufferForBlock = firstInputFramebuffer;\n    glFinish();\n\n    runAsynchronouslyOnContextQueue(_movieWriterContext, ^{\n        if (!assetWriterVideoInput.readyForMoreMediaData && _encodingLiveVideo)\n        {\n            [inputFramebufferForBlock unlock];\n            NSLog(@\"1: Had to drop a video frame: %@\", CFBridgingRelease(CMTimeCopyDescription(kCFAllocatorDefault, frameTime)));\n            return;\n        }\n        \n        // Render the frame with swizzled colors, so that they can be uploaded quickly as BGRA frames\n        [_movieWriterContext useAsCurrentContext];\n        [self renderAtInternalSizeUsingFramebuffer:inputFramebufferForBlock];\n        \n        CVPixelBufferRef pixel_buffer = NULL;\n        \n        if ([GPUImageContext supportsFastTextureUpload])\n        {\n            pixel_buffer = renderTarget;\n            CVPixelBufferLockBaseAddress(pixel_buffer, 0);\n        }\n        else\n        {\n            CVReturn status = CVPixelBufferPoolCreatePixelBuffer (NULL, [assetWriterPixelBufferInput pixelBufferPool], &pixel_buffer);\n            if ((pixel_buffer == NULL) || (status != kCVReturnSuccess))\n            {\n                CVPixelBufferRelease(pixel_buffer);\n                return;\n            }\n            else\n            {\n                CVPixelBufferLockBaseAddress(pixel_buffer, 0);\n                \n                GLubyte *pixelBufferData = (GLubyte *)CVPixelBufferGetBaseAddress(pixel_buffer);\n                glReadPixels(0, 0, videoSize.width, videoSize.height, GL_RGBA, GL_UNSIGNED_BYTE, pixelBufferData);\n            }\n        }\n        \n        void(^write)(void) = ^() {\n            while( ! assetWriterVideoInput.readyForMoreMediaData && ! _encodingLiveVideo && ! videoEncodingIsFinished ) {\n                NSDate *maxDate = [NSDate dateWithTimeIntervalSinceNow:0.1];\n                //            NSLog(@\"video waiting...\");\n                [[NSRunLoop currentRunLoop] runUntilDate:maxDate];\n            }\n            if (!assetWriterVideoInput.readyForMoreMediaData)\n            {\n                NSLog(@\"2: Had to drop a video frame: %@\", CFBridgingRelease(CMTimeCopyDescription(kCFAllocatorDefault, frameTime)));\n            }\n            else if(self.assetWriter.status == AVAssetWriterStatusWriting)\n            {\n                \n                if (![assetWriterPixelBufferInput appendPixelBuffer:pixel_buffer withPresentationTime:frameTime])\n                    NSLog(@\"Problem appending pixel buffer at time: %@\", CFBridgingRelease(CMTimeCopyDescription(kCFAllocatorDefault, frameTime)));\n                allowWriteAudio = YES; \n\n            }\n            else\n            {\n                NSLog(@\"Couldn't write a frame\");\n                //NSLog(@\"Wrote a video frame: %@\", CFBridgingRelease(CMTimeCopyDescription(kCFAllocatorDefault, frameTime)));\n            }\n            CVPixelBufferUnlockBaseAddress(pixel_buffer, 0);\n            \n            previousFrameTime = frameTime;\n            \n            if (![GPUImageContext supportsFastTextureUpload])\n            {\n                CVPixelBufferRelease(pixel_buffer);\n            }\n        };\n        \n        write();\n        \n        [inputFramebufferForBlock unlock];\n    });\n}\n\n- (NSInteger)nextAvailableTextureIndex;\n{\n    return 0;\n}\n\n- (void)setInputFramebuffer:(GPUImageFramebuffer *)newInputFramebuffer atIndex:(NSInteger)textureIndex;\n{\n    [newInputFramebuffer lock];\n//    runSynchronouslyOnContextQueue(_movieWriterContext, ^{\n        firstInputFramebuffer = newInputFramebuffer;\n//    });\n}\n\n- (void)setInputRotation:(GPUImageRotationMode)newInputRotation atIndex:(NSInteger)textureIndex;\n{\n    inputRotation = newInputRotation;\n}\n\n- (void)setInputSize:(CGSize)newSize atIndex:(NSInteger)textureIndex;\n{\n}\n\n- (CGSize)maximumOutputSize;\n{\n    return videoSize;\n}\n\n- (void)endProcessing \n{\n    if (completionBlock) \n    {\n        if (!alreadyFinishedRecording)\n        {\n            alreadyFinishedRecording = YES;\n            completionBlock();\n        }        \n    }\n    else \n    {\n        if (_delegate && [_delegate respondsToSelector:@selector(movieRecordingCompleted)])\n        {\n            [_delegate movieRecordingCompleted];\n        }\n    }\n}\n\n- (BOOL)shouldIgnoreUpdatesToThisTarget;\n{\n    return NO;\n}\n\n- (BOOL)wantsMonochromeInput;\n{\n    return NO;\n}\n\n- (void)setCurrentlyReceivingMonochromeInput:(BOOL)newValue;\n{\n    \n}\n\n#pragma mark -\n#pragma mark Accessors\n\n- (void)setHasAudioTrack:(BOOL)newValue\n{\n\t[self setHasAudioTrack:newValue audioSettings:nil];\n}\n\n- (void)setHasAudioTrack:(BOOL)newValue audioSettings:(NSDictionary *)audioOutputSettings;\n{\n    _hasAudioTrack = newValue;\n    \n    if (_hasAudioTrack)\n    {\n        if (_shouldPassthroughAudio)\n        {\n\t\t\t// Do not set any settings so audio will be the same as passthrough\n\t\t\taudioOutputSettings = nil;\n        }\n        else if (audioOutputSettings == nil)\n        {\n            AVAudioSession *sharedAudioSession = [AVAudioSession sharedInstance];\n            double preferredHardwareSampleRate;\n            \n            if ([sharedAudioSession respondsToSelector:@selector(sampleRate)])\n            {\n                preferredHardwareSampleRate = [sharedAudioSession sampleRate];\n            }\n            else\n            {\n#pragma clang diagnostic push\n#pragma clang diagnostic ignored \"-Wdeprecated-declarations\"\n                preferredHardwareSampleRate = [[AVAudioSession sharedInstance] currentHardwareSampleRate];\n#pragma clang diagnostic pop\n            }\n            \n            AudioChannelLayout acl;\n            bzero( &acl, sizeof(acl));\n            acl.mChannelLayoutTag = kAudioChannelLayoutTag_Mono;\n            \n            audioOutputSettings = [NSDictionary dictionaryWithObjectsAndKeys:\n                                         [ NSNumber numberWithInt: kAudioFormatMPEG4AAC], AVFormatIDKey,\n                                         [ NSNumber numberWithInt: 1 ], AVNumberOfChannelsKey,\n                                         [ NSNumber numberWithFloat: preferredHardwareSampleRate ], AVSampleRateKey,\n                                         [ NSData dataWithBytes: &acl length: sizeof( acl ) ], AVChannelLayoutKey,\n                                         //[ NSNumber numberWithInt:AVAudioQualityLow], AVEncoderAudioQualityKey,\n                                         [ NSNumber numberWithInt: 64000 ], AVEncoderBitRateKey,\n                                         nil];\n/*\n            AudioChannelLayout acl;\n            bzero( &acl, sizeof(acl));\n            acl.mChannelLayoutTag = kAudioChannelLayoutTag_Mono;\n            \n            audioOutputSettings = [NSDictionary dictionaryWithObjectsAndKeys:\n                                   [ NSNumber numberWithInt: kAudioFormatMPEG4AAC ], AVFormatIDKey,\n                                   [ NSNumber numberWithInt: 1 ], AVNumberOfChannelsKey,\n                                   [ NSNumber numberWithFloat: 44100.0 ], AVSampleRateKey,\n                                   [ NSNumber numberWithInt: 64000 ], AVEncoderBitRateKey,\n                                   [ NSData dataWithBytes: &acl length: sizeof( acl ) ], AVChannelLayoutKey,\n                                   nil];*/\n        }\n        \n        assetWriterAudioInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings:audioOutputSettings];\n        [assetWriter addInput:assetWriterAudioInput];\n        assetWriterAudioInput.expectsMediaDataInRealTime = _encodingLiveVideo;\n    }\n    else\n    {\n        // Remove audio track if it exists\n    }\n}\n\n- (NSArray*)metaData {\n    return assetWriter.metadata;\n}\n\n- (void)setMetaData:(NSArray*)metaData {\n    assetWriter.metadata = metaData;\n}\n \n- (CMTime)duration {\n    if( ! CMTIME_IS_VALID(startTime) )\n        return kCMTimeZero;\n    if( ! CMTIME_IS_NEGATIVE_INFINITY(previousFrameTime) )\n        return CMTimeSubtract(previousFrameTime, startTime);\n    if( ! CMTIME_IS_NEGATIVE_INFINITY(previousAudioTime) )\n        return CMTimeSubtract(previousAudioTime, startTime);\n    return kCMTimeZero;\n}\n\n- (CGAffineTransform)transform {\n    return assetWriterVideoInput.transform;\n}\n\n- (void)setTransform:(CGAffineTransform)transform {\n    assetWriterVideoInput.transform = transform;\n}\n\n- (AVAssetWriter*)assetWriter {\n    return assetWriter;\n}\n\n- (void)setPaused:(BOOL)newValue {\n    if (_paused != newValue) {\n        _paused = newValue;\n        \n        if (_paused) {\n            discont = YES;\n        }\n    }\n}\n\n- (CMSampleBufferRef)adjustTime:(CMSampleBufferRef) sample by:(CMTime) offset {\n    CMItemCount count;\n    CMSampleBufferGetSampleTimingInfoArray(sample, 0, nil, &count);\n    CMSampleTimingInfo* pInfo = malloc(sizeof(CMSampleTimingInfo) * count);\n    CMSampleBufferGetSampleTimingInfoArray(sample, count, pInfo, &count);\n    \n    for (CMItemCount i = 0; i < count; i++) {\n        pInfo[i].decodeTimeStamp = CMTimeSubtract(pInfo[i].decodeTimeStamp, offset);\n        pInfo[i].presentationTimeStamp = CMTimeSubtract(pInfo[i].presentationTimeStamp, offset);\n    }\n    \n    CMSampleBufferRef sout;\n    CMSampleBufferCreateCopyWithNewTiming(nil, sample, count, pInfo, &sout);\n    free(pInfo);\n    \n    return sout;\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/GPUImage.h",
    "content": "#import \"GLProgram.h\"\n\n// Base classes\n#import \"GPUImageContext.h\"\n#import \"GPUImageOutput.h\"\n#import \"GPUImageView.h\"\n#import \"GPUImageVideoCamera.h\"\n#import \"GPUImageStillCamera.h\"\n#import \"GPUImageMovie.h\"\n#import \"GPUImagePicture.h\"\n#import \"GPUImageRawDataInput.h\"\n#import \"GPUImageRawDataOutput.h\"\n#import \"GPUImageMovieWriter.h\"\n#import \"GPUImageFilterPipeline.h\"\n#import \"GPUImageTextureOutput.h\"\n#import \"GPUImageFilterGroup.h\"\n#import \"GPUImageTextureInput.h\"\n#import \"GPUImageUIElement.h\"\n#import \"GPUImageBuffer.h\"\n#import \"GPUImageFramebuffer.h\"\n#import \"GPUImageFramebufferCache.h\"\n\n// Filters\n#import \"GPUImageFilter.h\"\n#import \"GPUImageTwoInputFilter.h\"\n#import \"GPUImagePixellateFilter.h\"\n#import \"GPUImagePixellatePositionFilter.h\"\n#import \"GPUImageSepiaFilter.h\"\n#import \"GPUImageColorInvertFilter.h\"\n#import \"GPUImageSaturationFilter.h\"\n#import \"GPUImageContrastFilter.h\"\n#import \"GPUImageExposureFilter.h\"\n#import \"GPUImageBrightnessFilter.h\"\n#import \"GPUImageLevelsFilter.h\"\n#import \"GPUImageSharpenFilter.h\"\n#import \"GPUImageGammaFilter.h\"\n#import \"GPUImageSobelEdgeDetectionFilter.h\"\n#import \"GPUImageSketchFilter.h\"\n#import \"GPUImageToonFilter.h\"\n#import \"GPUImageSmoothToonFilter.h\"\n#import \"GPUImageMultiplyBlendFilter.h\"\n#import \"GPUImageDissolveBlendFilter.h\"\n#import \"GPUImageKuwaharaFilter.h\"\n#import \"GPUImageKuwaharaRadius3Filter.h\"\n#import \"GPUImageVignetteFilter.h\"\n#import \"GPUImageGaussianBlurFilter.h\"\n#import \"GPUImageGaussianBlurPositionFilter.h\"\n#import \"GPUImageGaussianSelectiveBlurFilter.h\"\n#import \"GPUImageOverlayBlendFilter.h\"\n#import \"GPUImageDarkenBlendFilter.h\"\n#import \"GPUImageLightenBlendFilter.h\"\n#import \"GPUImageSwirlFilter.h\"\n#import \"GPUImageSourceOverBlendFilter.h\"\n#import \"GPUImageColorBurnBlendFilter.h\"\n#import \"GPUImageColorDodgeBlendFilter.h\"\n#import \"GPUImageScreenBlendFilter.h\"\n#import \"GPUImageExclusionBlendFilter.h\"\n#import \"GPUImageDifferenceBlendFilter.h\"\n#import \"GPUImageSubtractBlendFilter.h\"\n#import \"GPUImageHardLightBlendFilter.h\"\n#import \"GPUImageSoftLightBlendFilter.h\"\n#import \"GPUImageColorBlendFilter.h\"\n#import \"GPUImageHueBlendFilter.h\"\n#import \"GPUImageSaturationBlendFilter.h\"\n#import \"GPUImageLuminosityBlendFilter.h\"\n#import \"GPUImageCropFilter.h\"\n#import \"GPUImageGrayscaleFilter.h\"\n#import \"GPUImageTransformFilter.h\"\n#import \"GPUImageChromaKeyBlendFilter.h\"\n#import \"GPUImageHazeFilter.h\"\n#import \"GPUImageLuminanceThresholdFilter.h\"\n#import \"GPUImagePosterizeFilter.h\"\n#import \"GPUImageBoxBlurFilter.h\"\n#import \"GPUImageAdaptiveThresholdFilter.h\"\n#import \"GPUImageSolarizeFilter.h\"\n#import \"GPUImageUnsharpMaskFilter.h\"\n#import \"GPUImageBulgeDistortionFilter.h\"\n#import \"GPUImagePinchDistortionFilter.h\"\n#import \"GPUImageCrosshatchFilter.h\"\n#import \"GPUImageCGAColorspaceFilter.h\"\n#import \"GPUImagePolarPixellateFilter.h\"\n#import \"GPUImageStretchDistortionFilter.h\"\n#import \"GPUImagePerlinNoiseFilter.h\"\n#import \"GPUImageJFAVoronoiFilter.h\"\n#import \"GPUImageVoronoiConsumerFilter.h\"\n#import \"GPUImageMosaicFilter.h\"\n#import \"GPUImageTiltShiftFilter.h\"\n#import \"GPUImage3x3ConvolutionFilter.h\"\n#import \"GPUImageEmbossFilter.h\"\n#import \"GPUImageCannyEdgeDetectionFilter.h\"\n#import \"GPUImageThresholdEdgeDetectionFilter.h\"\n#import \"GPUImageMaskFilter.h\"\n#import \"GPUImageHistogramFilter.h\"\n#import \"GPUImageHistogramGenerator.h\"\n#import \"GPUImageHistogramEqualizationFilter.h\"\n#import \"GPUImagePrewittEdgeDetectionFilter.h\"\n#import \"GPUImageXYDerivativeFilter.h\"\n#import \"GPUImageHarrisCornerDetectionFilter.h\"\n#import \"GPUImageAlphaBlendFilter.h\"\n#import \"GPUImageNormalBlendFilter.h\"\n#import \"GPUImageNonMaximumSuppressionFilter.h\"\n#import \"GPUImageRGBFilter.h\"\n#import \"GPUImageMedianFilter.h\"\n#import \"GPUImageBilateralFilter.h\"\n#import \"GPUImageCrosshairGenerator.h\"\n#import \"GPUImageToneCurveFilter.h\"\n#import \"GPUImageNobleCornerDetectionFilter.h\"\n#import \"GPUImageShiTomasiFeatureDetectionFilter.h\"\n#import \"GPUImageErosionFilter.h\"\n#import \"GPUImageRGBErosionFilter.h\"\n#import \"GPUImageDilationFilter.h\"\n#import \"GPUImageRGBDilationFilter.h\"\n#import \"GPUImageOpeningFilter.h\"\n#import \"GPUImageRGBOpeningFilter.h\"\n#import \"GPUImageClosingFilter.h\"\n#import \"GPUImageRGBClosingFilter.h\"\n#import \"GPUImageColorPackingFilter.h\"\n#import \"GPUImageSphereRefractionFilter.h\"\n#import \"GPUImageMonochromeFilter.h\"\n#import \"GPUImageOpacityFilter.h\"\n#import \"GPUImageHighlightShadowFilter.h\"\n#import \"GPUImageFalseColorFilter.h\"\n#import \"GPUImageHSBFilter.h\"\n#import \"GPUImageHueFilter.h\"\n#import \"GPUImageGlassSphereFilter.h\"\n#import \"GPUImageLookupFilter.h\"\n#import \"GPUImageAmatorkaFilter.h\"\n#import \"GPUImageMissEtikateFilter.h\"\n#import \"GPUImageSoftEleganceFilter.h\"\n#import \"GPUImageAddBlendFilter.h\"\n#import \"GPUImageDivideBlendFilter.h\"\n#import \"GPUImagePolkaDotFilter.h\"\n#import \"GPUImageLocalBinaryPatternFilter.h\"\n#import \"GPUImageColorLocalBinaryPatternFilter.h\"\n#import \"GPUImageLanczosResamplingFilter.h\"\n#import \"GPUImageAverageColor.h\"\n#import \"GPUImageSolidColorGenerator.h\"\n#import \"GPUImageLuminosity.h\"\n#import \"GPUImageAverageLuminanceThresholdFilter.h\"\n#import \"GPUImageWhiteBalanceFilter.h\"\n#import \"GPUImageChromaKeyFilter.h\"\n#import \"GPUImageLowPassFilter.h\"\n#import \"GPUImageHighPassFilter.h\"\n#import \"GPUImageMotionDetector.h\"\n#import \"GPUImageHalftoneFilter.h\"\n#import \"GPUImageThresholdedNonMaximumSuppressionFilter.h\"\n#import \"GPUImageHoughTransformLineDetector.h\"\n#import \"GPUImageParallelCoordinateLineTransformFilter.h\"\n#import \"GPUImageThresholdSketchFilter.h\"\n#import \"GPUImageLineGenerator.h\"\n#import \"GPUImageLinearBurnBlendFilter.h\"\n#import \"GPUImageGaussianBlurPositionFilter.h\"\n#import \"GPUImagePixellatePositionFilter.h\"\n#import \"GPUImageTwoInputCrossTextureSamplingFilter.h\"\n#import \"GPUImagePoissonBlendFilter.h\"\n#import \"GPUImageMotionBlurFilter.h\"\n#import \"GPUImageZoomBlurFilter.h\"\n#import \"GPUImageLaplacianFilter.h\"\n#import \"GPUImageiOSBlurFilter.h\"\n#import \"GPUImageLuminanceRangeFilter.h\"\n#import \"GPUImageDirectionalNonMaximumSuppressionFilter.h\"\n#import \"GPUImageDirectionalSobelEdgeDetectionFilter.h\"\n#import \"GPUImageSingleComponentGaussianBlurFilter.h\"\n#import \"GPUImageThreeInputFilter.h\"\n#import \"GPUImageFourInputFilter.h\"\n#import \"GPUImageWeakPixelInclusionFilter.h\"\n#import \"GPUImageColorConversion.h\"\n#import \"GPUImageColourFASTFeatureDetector.h\"\n#import \"GPUImageColourFASTSamplingOperation.h\"\n\n\n\n\n\n\n//GL\n#import \"GLImageLutFilter.h\"\n#import \"GLImageTwoLutFilter.h\"\n#import \"GLImageStickerFilter.h\"\n#import \"GLImageAddStickerFilter.h\"\n#import \"GLImageAddStickerWithEffectFilter.h\"\n#import \"GLImageCircleFilter.h\"\n// Blend\n#import \"GLImageBlendFilter.h\" // This is the base class\n#import \"GLImageMixBlendFilter.h\"\n#import \"GPUImageGaussianBlurFilter.h\"\n#import \"GLImageShapeFilter.h\"\n#import \"GLImageShapeHighDefinitionFilter.h\"\n\n//抖音\n#import \"GLImageThreePartitionFilter.h\"\n#import \"GLImageThreePartitionGroupFilter.h\"\n#import \"GLImageFourPointsMirrorFilter.h\"\n#import \"GLImageGlitchEffectGridFilter.h\"\n#import \"GLImageGlitchEffectLineFilter.h\"\n#import \"GLImageSoulOutFilter.h\"\n#import \"GLImageZoomFilter.h\"\n#import \"GLImageWaterReflectionFilter.h\"\n#import \"GLImageBlurSnapViewFilter.h\"\n#import \"GLImageBlurSnapViewFilterGroup.h\"\n\n//Face\n#import \"GPUImageBeautifyFilter.h\"\n#import \"GLImageFaceChangeFilter.h\"\n#import \"GLImageFaceDetectPointFilter.h\"\n#import \"GLImageFaceChangeFilterGroup.h\"\n\n//GLImageMovie\n#import \"GLImageMovie.h\"\n#import \"GPUImageMovieWriterFix.h\"\n\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Mac/GPUImage.h",
    "content": "#import <Cocoa/Cocoa.h>\n\n// Base classes\n#import <GPUImage/GLProgram.h>\n#import <GPUImage/GPUImageContext.h>\n\n// Sources\n#import <GPUImage/GPUImageMovie.h>\n#import <GPUImage/GPUImageColorConversion.h>\n#import <GPUImage/GPUImageOutput.h>\n#import <GPUImage/GPUImageAVCamera.h>\n#import <GPUImage/GPUImagePicture.h>\n#import <GPUImage/GPUImageRawDataInput.h>\n#import <GPUImage/GPUImageRawDataOutput.h>\n#import <GPUImage/GPUImageTextureInput.h>\n\n// Filters\n#import <GPUImage/GPUImageFilter.h>\n#import <GPUImage/GPUImageTwoPassFilter.h>\n#import <GPUImage/GPUImage3x3TextureSamplingFilter.h>\n#import <GPUImage/GPUImageContrastFilter.h>\n#import <GPUImage/GPUImageSaturationFilter.h>\n#import <GPUImage/GPUImageBrightnessFilter.h>\n#import <GPUImage/GPUImageLevelsFilter.h>\n#import <GPUImage/GPUImageExposureFilter.h>\n#import <GPUImage/GPUImageRGBFilter.h>\n#import <GPUImage/GPUImageHueFilter.h>\n#import <GPUImage/GPUImageWhiteBalanceFilter.h>\n#import <GPUImage/GPUImageMonochromeFilter.h>\n#import <GPUImage/GPUImagePixellateFilter.h>\n#import <GPUImage/GPUImageSobelEdgeDetectionFilter.h>\n#import <GPUImage/GPUImageSketchFilter.h>\n#import <GPUImage/GPUImageToonFilter.h>\n#import <GPUImage/GPUImageGrayscaleFilter.h>\n#import <GPUImage/GPUImageKuwaharaFilter.h>\n#import <GPUImage/GPUImageFalseColorFilter.h>\n#import <GPUImage/GPUImageSharpenFilter.h>\n#import <GPUImage/GPUImageUnsharpMaskFilter.h>\n#import <GPUImage/GPUImageTwoInputFilter.h>\n#import <GPUImage/GPUImageGaussianBlurFilter.h>\n#import <GPUImage/GPUImageTwoPassTextureSamplingFilter.h>\n#import <GPUImage/GPUImageFilterGroup.h>\n#import <GPUImage/GPUImageTransformFilter.h>\n#import <GPUImage/GPUImageCropFilter.h>\n#import <GPUImage/GPUImageGaussianBlurPositionFilter.h>\n#import <GPUImage/GPUImageGaussianSelectiveBlurFilter.h>\n#import <GPUImage/GPUImageBilateralFilter.h>\n#import <GPUImage/GPUImageBoxBlurFilter.h>\n#import <GPUImage/GPUImageSingleComponentGaussianBlurFilter.h>\n#import <GPUImage/GPUImageMedianFilter.h>\n#import <GPUImage/GPUImageMotionBlurFilter.h>\n#import <GPUImage/GPUImageZoomBlurFilter.h>\n#import <GPUImage/GPUImageAddBlendFilter.h>\n#import <GPUImage/GPUImageColorBurnBlendFilter.h>\n#import <GPUImage/GPUImageDarkenBlendFilter.h>\n#import <GPUImage/GPUImageDivideBlendFilter.h>\n#import <GPUImage/GPUImageLightenBlendFilter.h>\n#import <GPUImage/GPUImageMultiplyBlendFilter.h>\n#import <GPUImage/GPUImageOverlayBlendFilter.h>\n#import <GPUImage/GPUImageColorDodgeBlendFilter.h>\n#import <GPUImage/GPUImageLinearBurnBlendFilter.h>\n#import <GPUImage/GPUImageScreenBlendFilter.h>\n#import <GPUImage/GPUImageColorBlendFilter.h>\n#import <GPUImage/GPUImageExclusionBlendFilter.h>\n#import <GPUImage/GPUImageHueBlendFilter.h>\n#import <GPUImage/GPUImageLuminosityBlendFilter.h>\n#import <GPUImage/GPUImageNormalBlendFilter.h>\n#import <GPUImage/GPUImagePoissonBlendFilter.h>\n#import <GPUImage/GPUImageSaturationBlendFilter.h>\n#import <GPUImage/GPUImageSoftLightBlendFilter.h>\n#import <GPUImage/GPUImageHardLightBlendFilter.h>\n#import <GPUImage/GPUImageSubtractBlendFilter.h>\n#import <GPUImage/GPUImageTwoInputCrossTextureSamplingFilter.h>\n#import <GPUImage/GPUImageDifferenceBlendFilter.h>\n#import <GPUImage/GPUImageDissolveBlendFilter.h>\n#import <GPUImage/GPUImageChromaKeyBlendFilter.h>\n#import <GPUImage/GPUImageMaskFilter.h>\n#import <GPUImage/GPUImageOpacityFilter.h>\n#import <GPUImage/GPUImageAlphaBlendFilter.h>\n#import <GPUImage/GPUImageColorMatrixFilter.h>\n#import <GPUImage/GPUImageSepiaFilter.h>\n#import <GPUImage/GPUImageGammaFilter.h>\n#import <GPUImage/GPUImageHazeFilter.h>\n#import <GPUImage/GPUImageToneCurveFilter.h>\n#import <GPUImage/GPUImageHighlightShadowFilter.h>\n#import <GPUImage/GPUImageLookupFilter.h>\n#import <GPUImage/GPUImageAmatorkaFilter.h>\n#import <GPUImage/GPUImageMissEtikateFilter.h>\n#import <GPUImage/GPUImageSoftEleganceFilter.h>\n#import <GPUImage/GPUImage3x3ConvolutionFilter.h>\n#import <GPUImage/GPUImageEmbossFilter.h>\n#import <GPUImage/GPUImageLaplacianFilter.h>\n#import <GPUImage/GPUImageLanczosResamplingFilter.h>\n#import <GPUImage/GPUImageThreeInputFilter.h>\n#import <GPUImage/GPUImageFourInputFilter.h>\n#import <GPUImage/GPUImageColorInvertFilter.h>\n#import <GPUImage/GPUImageHistogramFilter.h>\n#import <GPUImage/GPUImageHistogramGenerator.h>\n#import <GPUImage/GPUImageAverageColor.h>\n#import <GPUImage/GPUImageLuminosity.h>\n#import <GPUImage/GPUImageSolidColorGenerator.h>\n#import <GPUImage/GPUImageAdaptiveThresholdFilter.h>\n#import <GPUImage/GPUImageAverageLuminanceThresholdFilter.h>\n#import <GPUImage/GPUImageLuminanceThresholdFilter.h>\n#import <GPUImage/GPUImageSolarizeFilter.h>\n#import <GPUImage/GPUImageHalftoneFilter.h>\n#import <GPUImage/GPUImagePixellatePositionFilter.h>\n#import <GPUImage/GPUImagePolarPixellateFilter.h>\n#import <GPUImage/GPUImagePolkaDotFilter.h>\n#import <GPUImage/GPUImageCrosshatchFilter.h>\n#import <GPUImage/GPUImageXYDerivativeFilter.h>\n#import <GPUImage/GPUImageDirectionalNonMaximumSuppressionFilter.h>\n#import <GPUImage/GPUImageDirectionalSobelEdgeDetectionFilter.h>\n#import <GPUImage/GPUImageCannyEdgeDetectionFilter.h>\n#import <GPUImage/GPUImagePrewittEdgeDetectionFilter.h>\n#import <GPUImage/GPUImageThresholdEdgeDetectionFilter.h>\n#import <GPUImage/GPUImageHarrisCornerDetectionFilter.h>\n#import <GPUImage/GPUImageNobleCornerDetectionFilter.h>\n#import <GPUImage/GPUImageShiTomasiFeatureDetectionFilter.h>\n#import <GPUImage/GPUImageThresholdedNonMaximumSuppressionFilter.h>\n#import <GPUImage/GPUImageColorPackingFilter.h>\n#import <GPUImage/GPUImageHoughTransformLineDetector.h>\n#import <GPUImage/GPUImageParallelCoordinateLineTransformFilter.h>\n#import <GPUImage/GPUImageCrosshairGenerator.h>\n#import <GPUImage/GPUImageLineGenerator.h>\n#import <GPUImage/GPUImageBuffer.h>\n#import <GPUImage/GPUImageLowPassFilter.h>\n#import <GPUImage/GPUImageHighPassFilter.h>\n#import <GPUImage/GPUImageMotionDetector.h>\n#import <GPUImage/GPUImageThresholdSketchFilter.h>\n#import <GPUImage/GPUImageSmoothToonFilter.h>\n#import <GPUImage/GPUImageTiltShiftFilter.h>\n#import <GPUImage/GPUImageCGAColorspaceFilter.h>\n#import <GPUImage/GPUImagePosterizeFilter.h>\n#import <GPUImage/GPUImageKuwaharaRadius3Filter.h>\n#import <GPUImage/GPUImageChromaKeyFilter.h>\n#import <GPUImage/GPUImageVignetteFilter.h>\n#import <GPUImage/GPUImageBulgeDistortionFilter.h>\n#import <GPUImage/GPUImagePinchDistortionFilter.h>\n#import <GPUImage/GPUImageStretchDistortionFilter.h>\n#import <GPUImage/GPUImageClosingFilter.h>\n#import <GPUImage/GPUImageRGBClosingFilter.h>\n#import <GPUImage/GPUImageDilationFilter.h>\n#import <GPUImage/GPUImageRGBDilationFilter.h>\n#import <GPUImage/GPUImageErosionFilter.h>\n#import <GPUImage/GPUImageRGBErosionFilter.h>\n#import <GPUImage/GPUImageOpeningFilter.h>\n#import <GPUImage/GPUImageRGBOpeningFilter.h>\n#import <GPUImage/GPUImageSphereRefractionFilter.h>\n#import <GPUImage/GPUImageGlassSphereFilter.h>\n#import <GPUImage/GPUImageSwirlFilter.h>\n#import <GPUImage/GPUImageJFAVoronoiFilter.h>\n#import <GPUImage/GPUImageVoronoiConsumerFilter.h>\n#import <GPUImage/GPUImageLocalBinaryPatternFilter.h>\n#import <GPUImage/GPUImageColorLocalBinaryPatternFilter.h>\n#import <GPUImage/GPUImageMosaicFilter.h>\n#import <GPUImage/GPUImagePerlinNoiseFilter.h>\n#import <GPUImage/GPUImageWeakPixelInclusionFilter.h>\n#import <GPUImage/GPUImageNonMaximumSuppressionFilter.h>\n#import <GPUImage/GPUImageSourceOverBlendFilter.h>\n#import <GPUImage/GPUImageColourFASTFeatureDetector.h>\n#import <GPUImage/GPUImageColourFASTSamplingOperation.h>\n\n// Outputs\n#import <GPUImage/GPUImageView.h>\n#import <GPUImage/GPUImageMovieWriter.h>\n#import <GPUImage/GPUImageTextureOutput.h>\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Mac/GPUImageAVCamera.h",
    "content": "#import <Foundation/Foundation.h>\n#import <AVFoundation/AVFoundation.h>\n#import <CoreMedia/CoreMedia.h>\n#import \"GPUImageContext.h\"\n#import \"GPUImageOutput.h\"\n\n//Delegate Protocal for Face Detection.\n@protocol GPUImageVideoCameraDelegate <NSObject>\n\n@optional\n- (void)willOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer;\n@end\n\n\n/**\n A GPUImageOutput that provides frames from either camera\n*/\n@interface GPUImageAVCamera : GPUImageOutput <AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAudioDataOutputSampleBufferDelegate>\n{\n    NSUInteger numberOfFramesCaptured;\n    CGFloat totalFrameTimeDuringCapture;\n    \n    AVCaptureSession *_captureSession;\n    AVCaptureDevice *_inputCamera;\n    AVCaptureDevice *_microphone;\n    AVCaptureDeviceInput *videoInput;\n\tAVCaptureVideoDataOutput *videoOutput;\n\n    BOOL capturePaused;\n    GPUImageRotationMode outputRotation;\n    dispatch_semaphore_t frameRenderingSemaphore;\n        \n    BOOL captureAsYUV;\n    GLuint luminanceTexture, chrominanceTexture;\n\n    __unsafe_unretained id<GPUImageVideoCameraDelegate> _delegate;\n}\n\n/// The AVCaptureSession used to capture from the camera\n@property(readonly, retain, nonatomic) AVCaptureSession *captureSession;\n\n/// This enables the capture session preset to be changed on the fly\n@property (readwrite, nonatomic, copy) NSString *captureSessionPreset;\n\n/// This sets the frame rate of the camera (iOS 5 and above only)\n/**\n Setting this to 0 or below will set the frame rate back to the default setting for a particular preset.\n */\n@property (readwrite) NSInteger frameRate;\n\n/// Easy way to tell if front-facing camera is present on device\n@property (readonly, getter = isFrontFacingCameraPresent) BOOL frontFacingCameraPresent;\n\n/// This enables the benchmarking mode, which logs out instantaneous and average frame times to the console\n@property(readwrite, nonatomic) BOOL runBenchmark;\n\n/// Use this property to manage camera settings. Focus point, exposure point, etc.\n@property(readonly) AVCaptureDevice *inputCamera;\n\n/// These properties determine whether or not the two camera orientations should be mirrored. By default, both are NO.\n@property(readwrite, nonatomic) BOOL horizontallyMirrorFrontFacingCamera, horizontallyMirrorRearFacingCamera;\n\n@property(nonatomic, assign) id<GPUImageVideoCameraDelegate> delegate;\n\n/// @name Initialization and teardown\n\n+ (NSArray *)connectedCameraDevices;\n\n/** Begin a capture session\n \n See AVCaptureSession for acceptable values\n \n @param sessionPreset Session preset to use\n @param cameraPosition Camera to capture from\n */\n- (id)initWithDeviceUniqueID:(NSString *)deviceUniqueID;\n- (id)initWithSessionPreset:(NSString *)sessionPreset deviceUniqueID:(NSString *)deviceUniqueID;\n- (id)initWithSessionPreset:(NSString *)sessionPreset cameraDevice:(AVCaptureDevice *)cameraDevice;\n\n/** Tear down the capture session\n */\n- (void)removeInputsAndOutputs;\n\n/// @name Manage the camera video stream\n\n/** Start camera capturing\n */\n- (void)startCameraCapture;\n\n/** Stop camera capturing\n */\n- (void)stopCameraCapture;\n\n/** Pause camera capturing\n */\n- (void)pauseCameraCapture;\n\n/** Resume camera capturing\n */\n- (void)resumeCameraCapture;\n\n/** Process a video sample\n @param sampleBuffer Buffer to process\n */\n- (void)processVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer;\n\n/** Process an audio sample\n @param sampleBuffer Buffer to process\n */\n- (void)processAudioSampleBuffer:(CMSampleBufferRef)sampleBuffer;\n\n/** Get the position (front, rear) of the source camera\n */\n- (AVCaptureDevicePosition)cameraPosition;\n\n/** Get the AVCaptureConnection of the source camera\n */\n- (AVCaptureConnection *)videoCaptureConnection;\n\n/** This flips between the front and rear cameras\n */\n- (void)rotateCamera;\n\n/// @name Benchmarking\n\n/** When benchmarking is enabled, this will keep a running average of the time from uploading, processing, and final recording or display\n */\n- (CGFloat)averageFrameDurationDuringCapture;\n\n- (void)printSupportedPixelFormats;\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Mac/GPUImageAVCamera.m",
    "content": "#import \"GPUImageAVCamera.h\"\n#import \"GPUImageMovieWriter.h\"\n#import \"GPUImageFilter.h\"\n#import \"GPUImageColorConversion.h\"\n\n#pragma mark -\n#pragma mark Private methods and instance variables\n\n@interface GPUImageAVCamera () \n{\n\tAVCaptureDeviceInput *audioInput;\n\tAVCaptureAudioDataOutput *audioOutput;\n    NSDate *startingCaptureTime;\n\t\n\tNSInteger _frameRate;\n    \n    dispatch_queue_t cameraProcessingQueue, audioProcessingQueue;\n    \n    GLProgram *yuvConversionProgram;\n    GLint yuvConversionPositionAttribute, yuvConversionTextureCoordinateAttribute;\n    GLint yuvConversionLuminanceTextureUniform, yuvConversionChrominanceTextureUniform;\n    \n    int imageBufferWidth, imageBufferHeight;\n}\n\n- (void)updateOrientationSendToTargets;\n- (void)convertYUVToRGBOutput;\n\n@end\n\n@implementation GPUImageAVCamera\n\n@synthesize captureSessionPreset = _captureSessionPreset;\n@synthesize captureSession = _captureSession;\n@synthesize inputCamera = _inputCamera;\n@synthesize runBenchmark = _runBenchmark;\n@synthesize delegate = _delegate;\n@synthesize horizontallyMirrorFrontFacingCamera = _horizontallyMirrorFrontFacingCamera, horizontallyMirrorRearFacingCamera = _horizontallyMirrorRearFacingCamera;\n\n#pragma mark -\n#pragma mark Initialization and teardown\n\n+ (NSArray *)connectedCameraDevices;\n{\n    NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];\n    return devices;\n}\n\n- (id)init;\n{\n    if (!(self = [self initWithSessionPreset:AVCaptureSessionPreset640x480 cameraDevice:nil]))\n    {\n\t\treturn nil;\n    }\n    \n    return self;\n}\n\n- (id)initWithDeviceUniqueID:(NSString *)deviceUniqueID;\n{\n    if (!(self = [self initWithSessionPreset:AVCaptureSessionPreset640x480 deviceUniqueID:deviceUniqueID]))\n    {\n\t\treturn nil;\n    }\n    \n    return self;\n}\n\n- (id)initWithSessionPreset:(NSString *)sessionPreset deviceUniqueID:(NSString *)deviceUniqueID;\n{\n    if (!(self = [self initWithSessionPreset:sessionPreset cameraDevice:[AVCaptureDevice deviceWithUniqueID:deviceUniqueID]]))\n    {\n\t\treturn nil;\n    }\n    \n    return self;\n}\n\n- (id)initWithSessionPreset:(NSString *)sessionPreset cameraDevice:(AVCaptureDevice *)cameraDevice;\n{\n\tif (!(self = [super init]))\n    {\n\t\treturn nil;\n    }\n    \n\tcameraProcessingQueue = dispatch_queue_create(\"com.sunsetlakesoftware.GPUImage.cameraProcessingQueue\", NULL);\n\taudioProcessingQueue = dispatch_queue_create(\"com.sunsetlakesoftware.GPUImage.audioProcessingQueue\", NULL);\n    frameRenderingSemaphore = dispatch_semaphore_create(1);\n\n\t_frameRate = 0; // This will not set frame rate unless this value gets set to 1 or above\n    _runBenchmark = NO;\n    capturePaused = NO;\n    outputRotation = kGPUImageNoRotation;\n//    captureAsYUV = YES;\n    captureAsYUV = NO;\n\n    runSynchronouslyOnVideoProcessingQueue(^{\n        \n        if (captureAsYUV)\n        {\n            [GPUImageContext useImageProcessingContext];\n//            if ([GPUImageContext deviceSupportsRedTextures])\n//            {\n//                yuvConversionProgram = [[GPUImageContext sharedImageProcessingContext] programForVertexShaderString:kGPUImageVertexShaderString fragmentShaderString:kGPUImageYUVVideoRangeConversionForRGFragmentShaderString];\n//            }\n//            else\n//            {\n                yuvConversionProgram = [[GPUImageContext sharedImageProcessingContext] programForVertexShaderString:kGPUImageVertexShaderString fragmentShaderString:kGPUImageYUVVideoRangeConversionForLAFragmentShaderString];\n//            }\n\n            if (!yuvConversionProgram.initialized)\n            {\n                [yuvConversionProgram addAttribute:@\"position\"];\n                [yuvConversionProgram addAttribute:@\"inputTextureCoordinate\"];\n                \n                if (![yuvConversionProgram link])\n                {\n                    NSString *progLog = [yuvConversionProgram programLog];\n                    NSLog(@\"Program link log: %@\", progLog);\n                    NSString *fragLog = [yuvConversionProgram fragmentShaderLog];\n                    NSLog(@\"Fragment shader compile log: %@\", fragLog);\n                    NSString *vertLog = [yuvConversionProgram vertexShaderLog];\n                    NSLog(@\"Vertex shader compile log: %@\", vertLog);\n                    yuvConversionProgram = nil;\n                    NSAssert(NO, @\"Filter shader link failed\");\n                }\n            }\n            \n            yuvConversionPositionAttribute = [yuvConversionProgram attributeIndex:@\"position\"];\n            yuvConversionTextureCoordinateAttribute = [yuvConversionProgram attributeIndex:@\"inputTextureCoordinate\"];\n            yuvConversionLuminanceTextureUniform = [yuvConversionProgram uniformIndex:@\"luminanceTexture\"];\n            yuvConversionChrominanceTextureUniform = [yuvConversionProgram uniformIndex:@\"chrominanceTexture\"];\n            \n            [GPUImageContext setActiveShaderProgram:yuvConversionProgram];\n            \n            glEnableVertexAttribArray(yuvConversionPositionAttribute);\n            glEnableVertexAttribArray(yuvConversionTextureCoordinateAttribute);\n        }\n    });\n    \n\t// Grab the back-facing or front-facing camera\n    _inputCamera = nil;\n    \n    if (cameraDevice == nil)\n    {\n        _inputCamera = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];\n    }\n    else\n    {\n        _inputCamera = cameraDevice;\n    }\n    \n    if (!_inputCamera) {\n        return nil;\n    }\n    \n\t// Create the capture session\n\t_captureSession = [[AVCaptureSession alloc] init];\n\t\n    [_captureSession beginConfiguration];\n    \n\t// Add the video input\t\n\tNSError *error = nil;\n\tvideoInput = [[AVCaptureDeviceInput alloc] initWithDevice:_inputCamera error:&error];\n\tif ([_captureSession canAddInput:videoInput]) \n\t{\n\t\t[_captureSession addInput:videoInput];\n\t}\n\t\n\t// Add the video frame output\t\n\tvideoOutput = [[AVCaptureVideoDataOutput alloc] init];\n\t[videoOutput setAlwaysDiscardsLateVideoFrames:NO];\n    \n//    NSLog(@\"Camera: %@\", _inputCamera);\n//    [self printSupportedPixelFormats];\n    \n//    if (captureAsYUV && [GPUImageContext deviceSupportsRedTextures])\n    if (captureAsYUV && [GPUImageContext supportsFastTextureUpload])\n    {\n        BOOL supportsFullYUVRange = NO;\n        NSArray *supportedPixelFormats = videoOutput.availableVideoCVPixelFormatTypes;\n        for (NSNumber *currentPixelFormat in supportedPixelFormats)\n        {            \n            if ([currentPixelFormat intValue] == kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)\n            {\n                supportsFullYUVRange = YES;\n            }\n        }\n        \n        if (supportsFullYUVRange)\n        {\n            [videoOutput setVideoSettings:[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarFullRange] forKey:(id)kCVPixelBufferPixelFormatTypeKey]];\n        }\n        else\n        {\n            [videoOutput setVideoSettings:[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange] forKey:(id)kCVPixelBufferPixelFormatTypeKey]];\n        }\n    }\n    else\n    {\n        // Despite returning a longer list of supported pixel formats, only RGB, RGBA, BGRA, and the YUV 4:2:2 variants seem to return cleanly\n        [videoOutput setVideoSettings:[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_32BGRA] forKey:(id)kCVPixelBufferPixelFormatTypeKey]];\n//        [videoOutput setVideoSettings:[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_422YpCbCr8_yuvs] forKey:(id)kCVPixelBufferPixelFormatTypeKey]];\n    }\n    \n    [videoOutput setSampleBufferDelegate:self queue:cameraProcessingQueue];\n//    [videoOutput setSampleBufferDelegate:self queue:[GPUImageContext sharedContextQueue]];\n\tif ([_captureSession canAddOutput:videoOutput])\n\t{\n\t\t[_captureSession addOutput:videoOutput];\n\t}\n\telse\n\t{\n\t\tNSLog(@\"Couldn't add video output\");\n        return nil;\n\t}\n    \n\t_captureSessionPreset = sessionPreset;\n    [_captureSession setSessionPreset:_captureSessionPreset];\n\n// This will let you get 60 FPS video from the 720p preset on an iPhone 4S, but only that device and that preset\n//    AVCaptureConnection *conn = [videoOutput connectionWithMediaType:AVMediaTypeVideo];\n//    \n//    if (conn.supportsVideoMinFrameDuration)\n//        conn.videoMinFrameDuration = CMTimeMake(1,60);\n//    if (conn.supportsVideoMaxFrameDuration)\n//        conn.videoMaxFrameDuration = CMTimeMake(1,60);\n    \n    [_captureSession commitConfiguration];\n    \n\treturn self;\n}\n\n- (void)dealloc \n{\n    [self stopCameraCapture];\n    [videoOutput setSampleBufferDelegate:nil queue:dispatch_get_main_queue()];\n    [audioOutput setSampleBufferDelegate:nil queue:dispatch_get_main_queue()];\n    \n    [self removeInputsAndOutputs];\n    \n// ARC forbids explicit message send of 'release'; since iOS 6 even for dispatch_release() calls: stripping it out in that case is required.\n//#if ( (__IPHONE_OS_VERSION_MIN_REQUIRED < __IPHONE_6_0) || (!defined(__IPHONE_6_0)) )\n#if __MAC_OS_X_VERSION_MAX_ALLOWED <= __MAC_10_7\n    if (cameraProcessingQueue != NULL)\n    {\n        dispatch_release(cameraProcessingQueue);\n    }\n\n    if (audioProcessingQueue != NULL)\n    {\n        dispatch_release(audioProcessingQueue);\n    }\n    \n    if (frameRenderingSemaphore != NULL)\n    {\n        dispatch_release(frameRenderingSemaphore);\n    }\n#endif\n}\n\n- (void)removeInputsAndOutputs;\n{\n    [_captureSession removeInput:videoInput];\n    [_captureSession removeOutput:videoOutput];\n    if (_microphone != nil)\n    {\n        [_captureSession removeInput:audioInput];\n        [_captureSession removeOutput:audioOutput];\n    }\n}\n\n#pragma mark -\n#pragma mark Managing targets\n\n- (void)addTarget:(id<GPUImageInput>)newTarget atTextureLocation:(NSInteger)textureLocation;\n{\n    [super addTarget:newTarget atTextureLocation:textureLocation];\n    \n    [newTarget setInputRotation:outputRotation atIndex:textureLocation];\n}\n\n#pragma mark -\n#pragma mark Manage the camera video stream\n\n- (void)startCameraCapture;\n{\n    if (![_captureSession isRunning])\n\t{\n        startingCaptureTime = [NSDate date];\n\t\t[_captureSession startRunning];\n\t};\n}\n\n- (void)stopCameraCapture;\n{\n    if ([_captureSession isRunning])\n    {\n        [_captureSession stopRunning];\n    }\n}\n\n- (void)pauseCameraCapture;\n{\n    capturePaused = YES;\n}\n\n- (void)resumeCameraCapture;\n{\n    capturePaused = NO;\n}\n\n- (void)rotateCamera\n{\n\tif (self.frontFacingCameraPresent == NO)\n\t\treturn;\n\t\n    NSError *error;\n    AVCaptureDeviceInput *newVideoInput;\n    AVCaptureDevicePosition currentCameraPosition = [[videoInput device] position];\n    \n    if (currentCameraPosition == AVCaptureDevicePositionBack)\n    {\n        currentCameraPosition = AVCaptureDevicePositionFront;\n    }\n    else\n    {\n        currentCameraPosition = AVCaptureDevicePositionBack;\n    }\n    \n    AVCaptureDevice *backFacingCamera = nil;\n    NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];\n\tfor (AVCaptureDevice *device in devices) \n\t{\n\t\tif ([device position] == currentCameraPosition)\n\t\t{\n\t\t\tbackFacingCamera = device;\n\t\t}\n\t}\n    newVideoInput = [[AVCaptureDeviceInput alloc] initWithDevice:backFacingCamera error:&error];\n    \n    if (newVideoInput != nil)\n    {\n        [_captureSession beginConfiguration];\n        \n        [_captureSession removeInput:videoInput];\n        if ([_captureSession canAddInput:newVideoInput])\n        {\n            [_captureSession addInput:newVideoInput];\n            videoInput = newVideoInput;\n        }\n        else\n        {\n            [_captureSession addInput:videoInput];\n        }\n        //captureSession.sessionPreset = oriPreset;\n        [_captureSession commitConfiguration];\n    }\n    \n    _inputCamera = backFacingCamera;\n}\n\n- (AVCaptureDevicePosition)cameraPosition \n{\n    return [[videoInput device] position];\n}\n\n- (BOOL)isFrontFacingCameraPresent;\n{\n\tNSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];\n\t\n\tfor (AVCaptureDevice *device in devices)\n\t{\n\t\tif ([device position] == AVCaptureDevicePositionFront)\n\t\t\treturn YES;\n\t}\n\t\n\treturn NO;\n}\n\n- (void)setCaptureSessionPreset:(NSString *)captureSessionPreset;\n{\n\t[_captureSession beginConfiguration];\n\t\n\t_captureSessionPreset = captureSessionPreset;\n\t[_captureSession setSessionPreset:_captureSessionPreset];\n\t\n\t[_captureSession commitConfiguration];\n}\n\n- (void)setFrameRate:(NSInteger)frameRate;\n{\n\t_frameRate = frameRate;\n\t\n\tif (_frameRate > 0)\n\t{\n\t\tfor (AVCaptureConnection *connection in videoOutput.connections)\n\t\t{\n\t\t\tif ([connection respondsToSelector:@selector(setVideoMinFrameDuration:)])\n\t\t\t\tconnection.videoMinFrameDuration = CMTimeMake(1, (int32_t)_frameRate);\n\t\t\t\n\t\t}\n\t}\n\telse\n\t{\n\t\tfor (AVCaptureConnection *connection in videoOutput.connections)\n\t\t{\n\t\t\tif ([connection respondsToSelector:@selector(setVideoMinFrameDuration:)])\n\t\t\t\tconnection.videoMinFrameDuration = kCMTimeInvalid; // This sets videoMinFrameDuration back to default\n\t\t}\n\t}\n}\n\n- (NSInteger)frameRate;\n{\n\treturn _frameRate;\n}\n\n- (AVCaptureConnection *)videoCaptureConnection {\n    for (AVCaptureConnection *connection in [videoOutput connections] ) {\n\t\tfor ( AVCaptureInputPort *port in [connection inputPorts] ) {\n\t\t\tif ( [[port mediaType] isEqual:AVMediaTypeVideo] ) {\n\t\t\t\treturn connection;\n\t\t\t}\n\t\t}\n\t}\n    \n    return nil;\n}\n\n#define INITIALFRAMESTOIGNOREFORBENCHMARK 5\n\n- (void)updateTargetsForVideoCameraUsingCacheTextureAtWidth:(int)bufferWidth height:(int)bufferHeight time:(CMTime)currentTime;\n{\n    // First, update all the framebuffers in the targets\n    for (id<GPUImageInput> currentTarget in targets)\n    {\n        if ([currentTarget enabled])\n        {\n            NSInteger indexOfObject = [targets indexOfObject:currentTarget];\n            NSInteger textureIndexOfTarget = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];\n            \n            if (currentTarget != self.targetToIgnoreForUpdates)\n            {\n                [currentTarget setInputRotation:outputRotation atIndex:textureIndexOfTarget];\n                [currentTarget setInputSize:CGSizeMake(bufferWidth, bufferHeight) atIndex:textureIndexOfTarget];\n                \n                if ([currentTarget wantsMonochromeInput] && captureAsYUV)\n                {\n                    [currentTarget setCurrentlyReceivingMonochromeInput:YES];\n                    // TODO: Replace optimization for monochrome output\n                    [currentTarget setInputFramebuffer:outputFramebuffer atIndex:textureIndexOfTarget];\n                }\n                else\n                {\n                    [currentTarget setCurrentlyReceivingMonochromeInput:NO];\n                    [currentTarget setInputFramebuffer:outputFramebuffer atIndex:textureIndexOfTarget];\n                }\n            }\n            else\n            {\n                [currentTarget setInputRotation:outputRotation atIndex:textureIndexOfTarget];\n                [currentTarget setInputFramebuffer:outputFramebuffer atIndex:textureIndexOfTarget];\n            }\n        }\n    }\n    \n    // Then release our hold on the local framebuffer to send it back to the cache as soon as it's no longer needed\n    [outputFramebuffer unlock];\n    \n    // Finally, trigger rendering as needed\n    for (id<GPUImageInput> currentTarget in targets)\n    {\n        if ([currentTarget enabled])\n        {\n            NSInteger indexOfObject = [targets indexOfObject:currentTarget];\n            NSInteger textureIndexOfTarget = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];\n            \n            if (currentTarget != self.targetToIgnoreForUpdates)\n            {\n                [currentTarget newFrameReadyAtTime:currentTime atIndex:textureIndexOfTarget];\n            }\n        }\n    }\n}\n\n- (void)processVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer;\n{\n    if (capturePaused)\n    {\n        return;\n    }\n    \n    CFAbsoluteTime startTime = CFAbsoluteTimeGetCurrent();\n    CVImageBufferRef cameraFrame = CMSampleBufferGetImageBuffer(sampleBuffer);\n    GLsizei bufferWidth = (GLsizei)CVPixelBufferGetWidth(cameraFrame);\n    GLsizei bufferHeight = (GLsizei)CVPixelBufferGetHeight(cameraFrame);\n    \n\tCMTime currentTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);\n\n    [GPUImageContext useImageProcessingContext];\n\n    CVPixelBufferLockBaseAddress(cameraFrame, 0);\n    \n    outputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:CGSizeMake(bufferWidth, bufferHeight) onlyTexture:YES];\n    \n    glBindTexture(GL_TEXTURE_2D, [outputFramebuffer texture]);\n    \n    //        glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, bufferWidth, bufferHeight, 0, GL_BGRA, GL_UNSIGNED_BYTE, CVPixelBufferGetBaseAddress(cameraFrame));\n    \n    // Using BGRA extension to pull in video frame data directly\n//    glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, bytesPerRow / 3, bufferHeight, 0, GL_RGB, GL_UNSIGNED_BYTE, CVPixelBufferGetBaseAddress(cameraFrame));\n//\tglTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, bufferWidth, bufferHeight, 0, GL_YCBCR_422_APPLE, GL_UNSIGNED_SHORT_8_8_REV_APPLE, CVPixelBufferGetBaseAddress(cameraFrame));\n    glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, bufferWidth, bufferHeight, 0, GL_BGRA, GL_UNSIGNED_BYTE, CVPixelBufferGetBaseAddress(cameraFrame));\n\n    [self updateTargetsForVideoCameraUsingCacheTextureAtWidth:bufferWidth height:bufferHeight time:currentTime];\n\n//    for (id<GPUImageInput> currentTarget in targets)\n//    {\n//        if ([currentTarget enabled])\n//        {\n//            if (currentTarget != self.targetToIgnoreForUpdates)\n//            {\n//                NSInteger indexOfObject = [targets indexOfObject:currentTarget];\n//                NSInteger textureIndexOfTarget = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];\n//                \n//                [currentTarget setInputSize:CGSizeMake(bufferWidth, bufferHeight) atIndex:textureIndexOfTarget];\n//                [currentTarget newFrameReadyAtTime:currentTime atIndex:textureIndexOfTarget];\n//            }\n//        }\n//    }\n    \n    CVPixelBufferUnlockBaseAddress(cameraFrame, 0);\n    \n    if (_runBenchmark)\n    {\n        numberOfFramesCaptured++;\n        if (numberOfFramesCaptured > INITIALFRAMESTOIGNOREFORBENCHMARK)\n        {\n            CFAbsoluteTime currentFrameTime = (CFAbsoluteTimeGetCurrent() - startTime);\n            totalFrameTimeDuringCapture += currentFrameTime;\n            NSLog(@\"Average frame time : %f ms\", [self averageFrameDurationDuringCapture]);\n            NSLog(@\"Current frame time : %f ms\", 1000.0 * currentFrameTime);\n        }\n    }\n}\n\n- (void)processAudioSampleBuffer:(CMSampleBufferRef)sampleBuffer;\n{\n    [self.audioEncodingTarget processAudioBuffer:sampleBuffer]; \n}\n\n- (void)convertYUVToRGBOutput;\n{\n    [GPUImageContext setActiveShaderProgram:yuvConversionProgram];\n\n    outputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:CGSizeMake(imageBufferWidth, imageBufferHeight) textureOptions:self.outputTextureOptions onlyTexture:NO];\n    [outputFramebuffer activateFramebuffer];\n    \n    glClearColor(0.0f, 0.0f, 0.0f, 1.0f);\n    glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);\n    \n    static const GLfloat squareVertices[] = {\n        -1.0f, -1.0f,\n        1.0f, -1.0f,\n        -1.0f,  1.0f,\n        1.0f,  1.0f,\n    };\n    \n    static const GLfloat textureCoordinates[] = {\n        0.0f, 0.0f,\n        1.0f, 0.0f,\n        0.0f, 1.0f,\n        1.0f, 1.0f,\n    };\n    \n\tglActiveTexture(GL_TEXTURE4);\n\tglBindTexture(GL_TEXTURE_2D, luminanceTexture);\n\tglUniform1i(yuvConversionLuminanceTextureUniform, 4);\n\n    glActiveTexture(GL_TEXTURE5);\n\tglBindTexture(GL_TEXTURE_2D, chrominanceTexture);\n\tglUniform1i(yuvConversionChrominanceTextureUniform, 5);\n\n    glVertexAttribPointer(yuvConversionPositionAttribute, 2, GL_FLOAT, 0, 0, squareVertices);\n\tglVertexAttribPointer(yuvConversionTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, textureCoordinates);\n    \n    glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);\n}\n\n#pragma mark -\n#pragma mark Benchmarking\n\n- (CGFloat)averageFrameDurationDuringCapture;\n{\n    return (totalFrameTimeDuringCapture / (CGFloat)(numberOfFramesCaptured - INITIALFRAMESTOIGNOREFORBENCHMARK)) * 1000.0;\n}\n\n#pragma mark -\n#pragma mark AVCaptureVideoDataOutputSampleBufferDelegate\n\n- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection\n{\n    if (captureOutput == audioOutput)\n    {\n//        if (dispatch_semaphore_wait(frameRenderingSemaphore, DISPATCH_TIME_NOW) != 0)\n//        {\n//            return;\n//        }\n\n        CFRetain(sampleBuffer);\n        runAsynchronouslyOnVideoProcessingQueue(^{\n            [self processAudioSampleBuffer:sampleBuffer];\n            CFRelease(sampleBuffer);\n//            dispatch_semaphore_signal(frameRenderingSemaphore);\n        });\n    }\n    else\n    {\n        if (dispatch_semaphore_wait(frameRenderingSemaphore, DISPATCH_TIME_NOW) != 0)\n        {\n            return;\n        }\n\n        CFRetain(sampleBuffer);\n        runAsynchronouslyOnVideoProcessingQueue(^{\n            //Feature Detection Hook.\n            if (self.delegate && [self.delegate respondsToSelector:@selector(willOutputSampleBuffer:)])\n            {\n                [self.delegate willOutputSampleBuffer:sampleBuffer];\n            }\n            \n            [self processVideoSampleBuffer:sampleBuffer];\n            \n            CFRelease(sampleBuffer);\n            dispatch_semaphore_signal(frameRenderingSemaphore);\n        });\n    }\n}\n\n#pragma mark -\n#pragma mark Accessors\n\n- (void)setAudioEncodingTarget:(GPUImageMovieWriter *)newValue;\n{\n    runSynchronouslyOnVideoProcessingQueue(^{\n        [_captureSession beginConfiguration];\n        \n        if (newValue == nil)\n        {\n            if (audioOutput)\n            {\n                [_captureSession removeInput:audioInput];\n                [_captureSession removeOutput:audioOutput];\n                audioInput = nil;\n                audioOutput = nil;\n                _microphone = nil;\n            }\n        }\n        else\n        {\n            _microphone = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];\n            audioInput = [AVCaptureDeviceInput deviceInputWithDevice:_microphone error:nil];\n            if ([_captureSession canAddInput:audioInput])\n            {\n                [_captureSession addInput:audioInput];\n            }\n            audioOutput = [[AVCaptureAudioDataOutput alloc] init];\n            \n            if ([_captureSession canAddOutput:audioOutput])\n            {\n                [_captureSession addOutput:audioOutput];\n            }\n            else\n            {\n                NSLog(@\"Couldn't add audio output\");\n            }\n            [audioOutput setSampleBufferDelegate:self queue:audioProcessingQueue];\n        }\n        \n        [_captureSession commitConfiguration];\n        \n        [super setAudioEncodingTarget:newValue];\n    });\n}\n\n- (void)updateOrientationSendToTargets;\n{\n    runSynchronouslyOnVideoProcessingQueue(^{\n        \n        //    From the iOS 5.0 release notes:\n        //    In previous iOS versions, the front-facing camera would always deliver buffers in AVCaptureVideoOrientationLandscapeLeft and the back-facing camera would always deliver buffers in AVCaptureVideoOrientationLandscapeRight.\n                \n        outputRotation = kGPUImageNoRotation;\n        for (id<GPUImageInput> currentTarget in targets)\n        {\n            NSInteger indexOfObject = [targets indexOfObject:currentTarget];\n            [currentTarget setInputRotation:outputRotation atIndex:[[targetTextureIndices objectAtIndex:indexOfObject] integerValue]];\n        }\n    });\n}\n\n- (void)setHorizontallyMirrorFrontFacingCamera:(BOOL)newValue\n{\n    _horizontallyMirrorFrontFacingCamera = newValue;\n    [self updateOrientationSendToTargets];\n}\n\n- (void)setHorizontallyMirrorRearFacingCamera:(BOOL)newValue\n{\n    _horizontallyMirrorRearFacingCamera = newValue;\n    [self updateOrientationSendToTargets];\n}\n\n- (void)printSupportedPixelFormats;\n{\n    NSArray *supportedPixelFormats = videoOutput.availableVideoCVPixelFormatTypes;\n    for (NSNumber *currentPixelFormat in supportedPixelFormats)\n    {\n        NSString *pixelFormatName = nil;\n        \n        switch([currentPixelFormat intValue])\n        {\n            case kCVPixelFormatType_1Monochrome: pixelFormatName = @\"kCVPixelFormatType_1Monochrome\"; break;\n            case kCVPixelFormatType_2Indexed: pixelFormatName = @\"kCVPixelFormatType_2Indexed\"; break;\n            case kCVPixelFormatType_4Indexed: pixelFormatName = @\"kCVPixelFormatType_4Indexed\"; break;\n            case kCVPixelFormatType_8Indexed: pixelFormatName = @\"kCVPixelFormatType_8Indexed\"; break;\n            case kCVPixelFormatType_1IndexedGray_WhiteIsZero: pixelFormatName = @\"kCVPixelFormatType_1IndexedGray_WhiteIsZero\"; break;\n            case kCVPixelFormatType_2IndexedGray_WhiteIsZero: pixelFormatName = @\"kCVPixelFormatType_2IndexedGray_WhiteIsZero\"; break;\n            case kCVPixelFormatType_4IndexedGray_WhiteIsZero: pixelFormatName = @\"kCVPixelFormatType_4IndexedGray_WhiteIsZero\"; break;\n            case kCVPixelFormatType_8IndexedGray_WhiteIsZero: pixelFormatName = @\"kCVPixelFormatType_8IndexedGray_WhiteIsZero\"; break;\n            case kCVPixelFormatType_16BE555: pixelFormatName = @\"kCVPixelFormatType_16BE555\"; break;\n            case kCVPixelFormatType_16LE555: pixelFormatName = @\"kCVPixelFormatType_16LE555\"; break;\n            case kCVPixelFormatType_16LE5551: pixelFormatName = @\"kCVPixelFormatType_16LE5551\"; break;\n            case kCVPixelFormatType_16BE565: pixelFormatName = @\"kCVPixelFormatType_16BE565\"; break;\n            case kCVPixelFormatType_16LE565: pixelFormatName = @\"kCVPixelFormatType_16LE565\"; break;\n            case kCVPixelFormatType_24RGB: pixelFormatName = @\"kCVPixelFormatType_24RGB\"; break;\n            case kCVPixelFormatType_24BGR: pixelFormatName = @\"kCVPixelFormatType_24BGR\"; break;\n            case kCVPixelFormatType_32ARGB: pixelFormatName = @\"kCVPixelFormatType_32ARGB\"; break;\n            case kCVPixelFormatType_32BGRA: pixelFormatName = @\"kCVPixelFormatType_32BGRA\"; break;\n            case kCVPixelFormatType_32ABGR: pixelFormatName = @\"kCVPixelFormatType_32ABGR\"; break;\n            case kCVPixelFormatType_32RGBA: pixelFormatName = @\"kCVPixelFormatType_32RGBA\"; break;\n            case kCVPixelFormatType_64ARGB: pixelFormatName = @\"kCVPixelFormatType_64ARGB\"; break;\n            case kCVPixelFormatType_48RGB: pixelFormatName = @\"kCVPixelFormatType_48RGB\"; break;\n            case kCVPixelFormatType_32AlphaGray: pixelFormatName = @\"kCVPixelFormatType_32AlphaGray\"; break;\n            case kCVPixelFormatType_16Gray: pixelFormatName = @\"kCVPixelFormatType_16Gray\"; break;\n            case kCVPixelFormatType_30RGB: pixelFormatName = @\"kCVPixelFormatType_30RGB\"; break;\n            case kCVPixelFormatType_422YpCbCr8: pixelFormatName = @\"kCVPixelFormatType_422YpCbCr8\"; break;\n            case kCVPixelFormatType_4444YpCbCrA8: pixelFormatName = @\"kCVPixelFormatType_4444YpCbCrA8\"; break;\n            case kCVPixelFormatType_4444YpCbCrA8R: pixelFormatName = @\"kCVPixelFormatType_4444YpCbCrA8R\"; break;\n            case kCVPixelFormatType_4444AYpCbCr8: pixelFormatName = @\"kCVPixelFormatType_4444AYpCbCr8\"; break;\n            case kCVPixelFormatType_4444AYpCbCr16: pixelFormatName = @\"kCVPixelFormatType_4444AYpCbCr16\"; break;\n            case kCVPixelFormatType_444YpCbCr8: pixelFormatName = @\"kCVPixelFormatType_444YpCbCr8\"; break;\n            case kCVPixelFormatType_422YpCbCr16: pixelFormatName = @\"kCVPixelFormatType_422YpCbCr16\"; break;\n            case kCVPixelFormatType_422YpCbCr10: pixelFormatName = @\"kCVPixelFormatType_422YpCbCr10\"; break;\n            case kCVPixelFormatType_444YpCbCr10: pixelFormatName = @\"kCVPixelFormatType_444YpCbCr10\"; break;\n            case kCVPixelFormatType_420YpCbCr8Planar: pixelFormatName = @\"kCVPixelFormatType_420YpCbCr8Planar\"; break;\n            case kCVPixelFormatType_420YpCbCr8PlanarFullRange: pixelFormatName = @\"kCVPixelFormatType_420YpCbCr8PlanarFullRange\"; break;\n            case kCVPixelFormatType_422YpCbCr_4A_8BiPlanar: pixelFormatName = @\"kCVPixelFormatType_422YpCbCr_4A_8BiPlanar\"; break;\n            case kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange: pixelFormatName = @\"kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange\"; break;\n            case kCVPixelFormatType_420YpCbCr8BiPlanarFullRange: pixelFormatName = @\"kCVPixelFormatType_420YpCbCr8BiPlanarFullRange\"; break;\n            case kCVPixelFormatType_422YpCbCr8_yuvs: pixelFormatName = @\"kCVPixelFormatType_422YpCbCr8_yuvs\"; break;\n            case kCVPixelFormatType_422YpCbCr8FullRange: pixelFormatName = @\"kCVPixelFormatType_422YpCbCr8FullRange\"; break;\n            case kCVPixelFormatType_OneComponent8: pixelFormatName = @\"kCVPixelFormatType_OneComponent8\"; break;\n            case kCVPixelFormatType_TwoComponent8: pixelFormatName = @\"kCVPixelFormatType_TwoComponent8\"; break;\n        }\n        NSLog(@\"Supported pixel format: %@\", pixelFormatName);\n    }\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Mac/GPUImageContext.h",
    "content": "#import <Foundation/Foundation.h>\n#import <QuartzCore/QuartzCore.h>\n#import <CoreMedia/CoreMedia.h>\n#import \"GLProgram.h\"\n#import \"GPUImageFramebuffer.h\"\n#import \"GPUImageFramebufferCache.h\"\n\n#define GPUImageRotationSwapsWidthAndHeight(rotation) (((rotation) == kGPUImageRotateLeft) || ((rotation) == kGPUImageRotateRight) || ((rotation) == kGPUImageRotateRightFlipVertical) )\n\ntypedef enum { kGPUImageNoRotation, kGPUImageRotateLeft, kGPUImageRotateRight, kGPUImageFlipVertical, kGPUImageFlipHorizonal, kGPUImageRotateRightFlipVertical, kGPUImageRotateRightFlipHorizontal, kGPUImageRotate180 } GPUImageRotationMode;\n\n@interface GPUImageContext : NSObject\n\n@property(readonly, nonatomic) dispatch_queue_t contextQueue;\n@property(readwrite, retain, nonatomic) GLProgram *currentShaderProgram;\n@property(readonly, retain, nonatomic) NSOpenGLContext *context;\n@property(readonly) CVOpenGLTextureCacheRef coreVideoTextureCache;\n@property(readonly) GPUImageFramebufferCache *framebufferCache;\n\n+ (void *)contextKey;\n+ (GPUImageContext *)sharedImageProcessingContext;\n+ (dispatch_queue_t)sharedContextQueue;\n+ (GPUImageFramebufferCache *)sharedFramebufferCache;\n+ (void)useImageProcessingContext;\n+ (void)setActiveShaderProgram:(GLProgram *)shaderProgram;\n+ (GLint)maximumTextureSizeForThisDevice;\n+ (GLint)maximumTextureUnitsForThisDevice;\n+ (BOOL)deviceSupportsOpenGLESExtension:(NSString *)extension;\n+ (BOOL)deviceSupportsRedTextures;\n+ (BOOL)deviceSupportsFramebufferReads;\n+ (CGSize)sizeThatFitsWithinATextureForSize:(CGSize)inputSize;\n\n- (void)presentBufferForDisplay;\n- (GLProgram *)programForVertexShaderString:(NSString *)vertexShaderString fragmentShaderString:(NSString *)fragmentShaderString;\n\n- (void)useSharegroup:(CGLShareGroupObj *)sharegroup;\n\n// Manage fast texture upload\n+ (BOOL)supportsFastTextureUpload;\n\n@end\n\n@protocol GPUImageInput <NSObject>\n- (void)newFrameReadyAtTime:(CMTime)frameTime atIndex:(NSInteger)textureIndex;\n- (void)setInputFramebuffer:(GPUImageFramebuffer *)newInputFramebuffer atIndex:(NSInteger)textureIndex;\n- (NSInteger)nextAvailableTextureIndex;\n- (void)setInputSize:(CGSize)newSize atIndex:(NSInteger)textureIndex;\n- (void)setInputRotation:(GPUImageRotationMode)newInputRotation atIndex:(NSInteger)textureIndex;\n- (CGSize)maximumOutputSize;\n- (void)endProcessing;\n- (BOOL)shouldIgnoreUpdatesToThisTarget;\n- (BOOL)enabled;\n- (BOOL)wantsMonochromeInput;\n- (void)setCurrentlyReceivingMonochromeInput:(BOOL)newValue;\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Mac/GPUImageContext.m",
    "content": "#import \"GPUImageContext.h\"\n#import <AVFoundation/AVFoundation.h>\n\n@interface GPUImageContext()\n{\n    NSMutableDictionary *shaderProgramCache;\n    CGLShareGroupObj *_sharegroup;\n    NSOpenGLPixelFormat *_pixelFormat;\n}\n\n@end\n\n@implementation GPUImageContext\n\n@synthesize context = _context;\n@synthesize currentShaderProgram = _currentShaderProgram;\n@synthesize contextQueue = _contextQueue;\n@synthesize coreVideoTextureCache = _coreVideoTextureCache;\n@synthesize framebufferCache = _framebufferCache;\n\nstatic void *openGLESContextQueueKey;\n\n- (id)init;\n{\n    if (!(self = [super init]))\n    {\n\t\treturn nil;\n    }\n\n\topenGLESContextQueueKey = &openGLESContextQueueKey;\n    _contextQueue = dispatch_queue_create(\"com.sunsetlakesoftware.GPUImage.openGLESContextQueue\", NULL);\n\tdispatch_queue_set_specific(_contextQueue, openGLESContextQueueKey, (__bridge void *)self, NULL);\n    shaderProgramCache = [[NSMutableDictionary alloc] init];\n    \n    return self;\n}\n\n+ (void *)contextKey {\n\treturn openGLESContextQueueKey;\n}\n\n// Based on Colin Wheeler's example here: http://cocoasamurai.blogspot.com/2011/04/singletons-your-doing-them-wrong.html\n+ (GPUImageContext *)sharedImageProcessingContext;\n{\n    static dispatch_once_t pred;\n    static GPUImageContext *sharedImageProcessingContext = nil;\n    \n    dispatch_once(&pred, ^{\n        sharedImageProcessingContext = [[[self class] alloc] init];\n    });\n    return sharedImageProcessingContext;\n}\n\n+ (dispatch_queue_t)sharedContextQueue;\n{\n    return [[self sharedImageProcessingContext] contextQueue];\n}\n\n+ (GPUImageFramebufferCache *)sharedFramebufferCache;\n{\n    return [[self sharedImageProcessingContext] framebufferCache];\n}\n\n+ (void)useImageProcessingContext;\n{\n    NSOpenGLContext *imageProcessingContext = [[GPUImageContext sharedImageProcessingContext] context];\n    if ([NSOpenGLContext currentContext] != imageProcessingContext)\n    {\n        [imageProcessingContext makeCurrentContext];\n    }\n}\n\n+ (void)setActiveShaderProgram:(GLProgram *)shaderProgram;\n{\n    GPUImageContext *sharedContext = [GPUImageContext sharedImageProcessingContext];\n    NSOpenGLContext *imageProcessingContext = [sharedContext context];\n    if ([NSOpenGLContext currentContext] != imageProcessingContext)\n    {\n        [imageProcessingContext makeCurrentContext];\n    }\n    \n    if (sharedContext.currentShaderProgram != shaderProgram)\n    {\n        sharedContext.currentShaderProgram = shaderProgram;\n        [shaderProgram use];\n    }\n}\n\n+ (GLint)maximumTextureSizeForThisDevice;\n{\n    static dispatch_once_t pred;\n    static GLint maxTextureSize = 0;\n    \n    dispatch_once(&pred, ^{\n        [self useImageProcessingContext];\n        glGetIntegerv(GL_MAX_TEXTURE_SIZE, &maxTextureSize);\n    });\n    \n    return maxTextureSize;\n}\n\n+ (GLint)maximumTextureUnitsForThisDevice;\n{\n    GLint maxTextureUnits; \n    glGetIntegerv(GL_MAX_TEXTURE_IMAGE_UNITS, &maxTextureUnits);\n    return maxTextureUnits;\n}\n\n+ (BOOL)deviceSupportsOpenGLESExtension:(NSString *)extension;\n{\n    static dispatch_once_t pred;\n    static NSArray *extensionNames = nil;\n\n    // Cache extensions for later quick reference, since this won't change for a given device\n    dispatch_once(&pred, ^{\n        [GPUImageContext useImageProcessingContext];\n        NSString *extensionsString = [NSString stringWithCString:(const char *)glGetString(GL_EXTENSIONS) encoding:NSASCIIStringEncoding];\n        extensionNames = [extensionsString componentsSeparatedByString:@\" \"];\n    });\n\n    return [extensionNames containsObject:extension];\n}\n\n+ (BOOL)deviceSupportsFramebufferReads;\n{\n    return NO;\n}\n\n// http://www.khronos.org/registry/gles/extensions/EXT/EXT_texture_rg.txt\n\n+ (BOOL)deviceSupportsRedTextures;\n{\n    static dispatch_once_t pred;\n    static BOOL supportsRedTextures = NO;\n    \n    dispatch_once(&pred, ^{\n        supportsRedTextures = [GPUImageContext deviceSupportsOpenGLESExtension:@\"GL_EXT_texture_rg\"];\n    });\n    \n    return supportsRedTextures;\n}\n\n\n+ (CGSize)sizeThatFitsWithinATextureForSize:(CGSize)inputSize;\n{\n    GLint maxTextureSize = [self maximumTextureSizeForThisDevice]; \n    if ( (inputSize.width < maxTextureSize) && (inputSize.height < maxTextureSize) )\n    {\n        return inputSize;\n    }\n    \n    CGSize adjustedSize;\n    if (inputSize.width > inputSize.height)\n    {\n        adjustedSize.width = (CGFloat)maxTextureSize;\n        adjustedSize.height = ((CGFloat)maxTextureSize / inputSize.width) * inputSize.height;\n    }\n    else\n    {\n        adjustedSize.height = (CGFloat)maxTextureSize;\n        adjustedSize.width = ((CGFloat)maxTextureSize / inputSize.height) * inputSize.width;\n    }\n\n    return adjustedSize;\n}\n\n- (void)presentBufferForDisplay;\n{\n    [self.context flushBuffer];\n}\n\n- (GLProgram *)programForVertexShaderString:(NSString *)vertexShaderString fragmentShaderString:(NSString *)fragmentShaderString;\n{\n    NSString *lookupKeyForShaderProgram = [NSString stringWithFormat:@\"V: %@ - F: %@\", vertexShaderString, fragmentShaderString];\n    GLProgram *programFromCache = [shaderProgramCache objectForKey:lookupKeyForShaderProgram];\n\n    if (programFromCache == nil)\n    {\n        programFromCache = [[GLProgram alloc] initWithVertexShaderString:vertexShaderString fragmentShaderString:fragmentShaderString];\n        [shaderProgramCache setObject:programFromCache forKey:lookupKeyForShaderProgram];\n    }\n    \n    return programFromCache;\n}\n\n- (void)useSharegroup:(CGLShareGroupObj *)sharegroup;\n{\n    NSAssert(_context == nil, @\"Unable to use a share group when the context has already been created. Call this method before you use the context for the first time.\");\n    \n    _sharegroup = sharegroup;\n}\n\n- (NSOpenGLContext *)createContext;\n{\n    NSOpenGLPixelFormatAttribute pixelFormatAttributes[] = {\n        NSOpenGLPFADoubleBuffer,\n        NSOpenGLPFAAccelerated, 0,\n        0\n    };\n    \n    _pixelFormat = [[NSOpenGLPixelFormat alloc] initWithAttributes:pixelFormatAttributes];\n\tif (_pixelFormat == nil)\n\t{\n\t\tNSLog(@\"Error: No appropriate pixel format found\");\n\t}\n    // TODO: Take into account the sharegroup\n    NSOpenGLContext *context = [[NSOpenGLContext alloc] initWithFormat:_pixelFormat shareContext:nil];\n\n    NSAssert(context != nil, @\"Unable to create an OpenGL context. The GPUImage framework requires OpenGL support to work.\");\n    return context;\n}\n\n- (CVOpenGLTextureCacheRef)coreVideoTextureCache;\n{\n    if (_coreVideoTextureCache == NULL)\n    {\n        \n        CVReturn err = CVOpenGLTextureCacheCreate(kCFAllocatorDefault, NULL, (__bridge void *)[self context], [_pixelFormat CGLPixelFormatObj], NULL, &_coreVideoTextureCache);\n        \n        if (err)\n        {\n            NSAssert(NO, @\"Error at CVOpenGLESTextureCacheCreate %d\", err);\n        }\n        \n    }\n    \n    return _coreVideoTextureCache;\n}\n\n\n#pragma mark -\n#pragma mark Manage fast texture upload\n\n+ (BOOL)supportsFastTextureUpload;\n{\n    // This may need to be redone to account for the Mac's accelerated data transfer methods\n    return NO;\n}\n\n#pragma mark -\n#pragma mark Accessors\n\n- (NSOpenGLContext *)context;\n{\n    if (_context == nil)\n    {\n        _context = [self createContext];\n        [_context makeCurrentContext];\n\n        // Set up a few global settings for the image processing pipeline\n        glDisable(GL_DEPTH_TEST);\n        glEnable(GL_TEXTURE_2D);\n    }\n    \n    return _context;\n}\n\n- (GPUImageFramebufferCache *)framebufferCache;\n{\n    if (_framebufferCache == nil)\n    {\n        _framebufferCache = [[GPUImageFramebufferCache alloc] init];\n    }\n    \n    return _framebufferCache;\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Mac/GPUImageMac-Info.plist",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<!DOCTYPE plist PUBLIC \"-//Apple//DTD PLIST 1.0//EN\" \"http://www.apple.com/DTDs/PropertyList-1.0.dtd\">\n<plist version=\"1.0\">\n<dict>\n\t<key>CFBundleDevelopmentRegion</key>\n\t<string>English</string>\n\t<key>CFBundleExecutable</key>\n\t<string>${EXECUTABLE_NAME}</string>\n\t<key>CFBundleIconFile</key>\n\t<string></string>\n\t<key>CFBundleIdentifier</key>\n\t<string>com.sunsetlakesoftware.${PRODUCT_NAME:rfc1034identifier}</string>\n\t<key>CFBundleInfoDictionaryVersion</key>\n\t<string>6.0</string>\n\t<key>CFBundleName</key>\n\t<string>${PRODUCT_NAME}</string>\n\t<key>CFBundlePackageType</key>\n\t<string>FMWK</string>\n\t<key>CFBundleShortVersionString</key>\n\t<string>1.0</string>\n\t<key>CFBundleSignature</key>\n\t<string>????</string>\n\t<key>CFBundleVersion</key>\n\t<string>1</string>\n\t<key>NSHumanReadableCopyright</key>\n\t<string>Copyright © 2013 Sunset Lake Software LLC. All rights reserved.</string>\n\t<key>NSPrincipalClass</key>\n\t<string></string>\n</dict>\n</plist>\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Mac/GPUImageMac-Prefix.pch",
    "content": "//\n// Prefix header for all source files of the 'GPUImageMac' target in the 'GPUImageMac' project\n//\n\n#ifdef __OBJC__\n    #import <Cocoa/Cocoa.h>\n#endif\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Mac/GPUImageMovieWriter.h",
    "content": "#import <Foundation/Foundation.h>\n#import <AVFoundation/AVFoundation.h>\n#import \"GPUImageContext.h\"\n\nextern NSString *const kGPUImageColorSwizzlingFragmentShaderString;\n\n@protocol GPUImageMovieWriterDelegate <NSObject>\n\n@optional\n- (void)movieRecordingCompleted;\n- (void)movieRecordingFailedWithError:(NSError*)error;\n\n@end\n\n@interface GPUImageMovieWriter : NSObject <GPUImageInput>\n{\n    CMVideoDimensions videoDimensions;\n\tCMVideoCodecType videoType;\n\n    NSURL *movieURL;\n    NSString *fileType;\n\tAVAssetWriter *assetWriter;\n\tAVAssetWriterInput *assetWriterAudioInput;\n\tAVAssetWriterInput *assetWriterVideoInput;\n    AVAssetWriterInputPixelBufferAdaptor *assetWriterPixelBufferInput;\n\tdispatch_queue_t movieWritingQueue;\n    \n    CGSize videoSize;\n    GPUImageRotationMode inputRotation;\n}\n\n@property(readwrite, nonatomic) BOOL hasAudioTrack;\n@property(readwrite, nonatomic) BOOL shouldPassthroughAudio;\n@property(nonatomic, copy) void(^completionBlock)(void);\n@property(nonatomic, copy) void(^failureBlock)(NSError*);\n@property(nonatomic, assign) id<GPUImageMovieWriterDelegate> delegate;\n@property(readwrite, nonatomic) BOOL encodingLiveVideo;\n@property(nonatomic, copy) void(^videoInputReadyCallback)(void);\n@property(nonatomic, copy) void(^audioInputReadyCallback)(void);\n@property(nonatomic) BOOL enabled;\n\n// Initialization and teardown\n- (id)initWithMovieURL:(NSURL *)newMovieURL size:(CGSize)newSize;\n- (id)initWithMovieURL:(NSURL *)newMovieURL size:(CGSize)newSize fileType:(NSString *)newFileType outputSettings:(NSMutableDictionary *)outputSettings;\n\n- (void)setHasAudioTrack:(BOOL)hasAudioTrack audioSettings:(NSDictionary *)audioOutputSettings;\n\n// Movie recording\n- (void)startRecording;\n- (void)startRecordingInOrientation:(CGAffineTransform)orientationTransform;\n- (void)finishRecording;\n- (void)finishRecordingWithCompletionHandler:(void (^)(void))handler;\n- (void)cancelRecording;\n- (void)processAudioBuffer:(CMSampleBufferRef)audioBuffer;\n- (void)enableSynchronizationCallbacks;\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Mac/GPUImageMovieWriter.m",
    "content": "#import \"GPUImageMovieWriter.h\"\n\n#import \"GPUImageContext.h\"\n#import \"GLProgram.h\"\n#import \"GPUImageFilter.h\"\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageColorSwizzlingFragmentShaderString = SHADER_STRING\n(\n varying highp vec2 textureCoordinate;\n \n uniform sampler2D inputImageTexture;\n \n void main()\n {\n     gl_FragColor = texture2D(inputImageTexture, textureCoordinate).bgra;\n }\n );\n#else\nNSString *const kGPUImageColorSwizzlingFragmentShaderString = SHADER_STRING\n(\n varying vec2 textureCoordinate;\n \n uniform sampler2D inputImageTexture;\n \n void main()\n {\n     gl_FragColor = texture2D(inputImageTexture, textureCoordinate).bgra;\n }\n );\n#endif\n\n\n@interface GPUImageMovieWriter ()\n{\n    GPUImageFramebuffer *firstInputFramebuffer;\n\n    GLuint movieFramebuffer, movieRenderbuffer;\n    \n    GLProgram *colorSwizzlingProgram;\n    GLint colorSwizzlingPositionAttribute, colorSwizzlingTextureCoordinateAttribute;\n    GLint colorSwizzlingInputTextureUniform;\n\n    GLubyte *frameData;\n    \n    CMTime startTime, previousFrameTime;\n    \n    BOOL isRecording;\n}\n\n// Movie recording\n- (void)initializeMovieWithOutputSettings:(NSMutableDictionary *)outputSettings;\n\n// Frame rendering\n- (void)createDataFBO;\n- (void)destroyDataFBO;\n- (void)setFilterFBO;\n\n- (void)renderAtInternalSize;\n\n@end\n\n@implementation GPUImageMovieWriter\n\n@synthesize hasAudioTrack = _hasAudioTrack;\n@synthesize encodingLiveVideo = _encodingLiveVideo;\n@synthesize shouldPassthroughAudio = _shouldPassthroughAudio;\n@synthesize completionBlock;\n@synthesize failureBlock;\n@synthesize videoInputReadyCallback;\n@synthesize audioInputReadyCallback;\n@synthesize enabled;\n\n@synthesize delegate = _delegate;\n\n#pragma mark -\n#pragma mark Initialization and teardown\n\n- (id)initWithMovieURL:(NSURL *)newMovieURL size:(CGSize)newSize;\n{\n    return [self initWithMovieURL:newMovieURL size:newSize fileType:AVFileTypeQuickTimeMovie outputSettings:nil];\n}\n\n- (id)initWithMovieURL:(NSURL *)newMovieURL size:(CGSize)newSize fileType:(NSString *)newFileType outputSettings:(NSMutableDictionary *)outputSettings;\n{\n    if (!(self = [super init]))\n    {\n\t\treturn nil;\n    }\n\n    self.enabled = YES;\n    \n    videoSize = newSize;\n    movieURL = newMovieURL;\n    fileType = newFileType;\n    startTime = kCMTimeInvalid;\n    _encodingLiveVideo = YES;\n    previousFrameTime = kCMTimeNegativeInfinity;\n    inputRotation = kGPUImageNoRotation;\n\n    runSynchronouslyOnVideoProcessingQueue(^{\n        [GPUImageContext useImageProcessingContext];\n        \n        if ([GPUImageContext supportsFastTextureUpload])\n        {\n            colorSwizzlingProgram = [[GPUImageContext sharedImageProcessingContext] programForVertexShaderString:kGPUImageVertexShaderString fragmentShaderString:kGPUImagePassthroughFragmentShaderString];\n        }\n        else\n        {\n            colorSwizzlingProgram = [[GPUImageContext sharedImageProcessingContext] programForVertexShaderString:kGPUImageVertexShaderString fragmentShaderString:kGPUImageColorSwizzlingFragmentShaderString];\n        }\n        \n        if (!colorSwizzlingProgram.initialized)\n        {\n            [colorSwizzlingProgram addAttribute:@\"position\"];\n            [colorSwizzlingProgram addAttribute:@\"inputTextureCoordinate\"];\n            \n            if (![colorSwizzlingProgram link])\n            {\n                NSString *progLog = [colorSwizzlingProgram programLog];\n                NSLog(@\"Program link log: %@\", progLog);\n                NSString *fragLog = [colorSwizzlingProgram fragmentShaderLog];\n                NSLog(@\"Fragment shader compile log: %@\", fragLog);\n                NSString *vertLog = [colorSwizzlingProgram vertexShaderLog];\n                NSLog(@\"Vertex shader compile log: %@\", vertLog);\n                colorSwizzlingProgram = nil;\n                NSAssert(NO, @\"Filter shader link failed\");\n            }\n        }        \n        \n        colorSwizzlingPositionAttribute = [colorSwizzlingProgram attributeIndex:@\"position\"];\n        colorSwizzlingTextureCoordinateAttribute = [colorSwizzlingProgram attributeIndex:@\"inputTextureCoordinate\"];\n        colorSwizzlingInputTextureUniform = [colorSwizzlingProgram uniformIndex:@\"inputImageTexture\"];\n        \n        // REFACTOR: Wrap this in a block for the image processing queue\n        [GPUImageContext setActiveShaderProgram:colorSwizzlingProgram];\n        \n        glEnableVertexAttribArray(colorSwizzlingPositionAttribute);\n        glEnableVertexAttribArray(colorSwizzlingTextureCoordinateAttribute);\n    });\n        \n    [self initializeMovieWithOutputSettings:outputSettings];\n\n    return self;\n}\n\n- (void)dealloc;\n{\n    [self destroyDataFBO];\n\n    if (frameData != NULL)\n    {\n        free(frameData);\n    }\n}\n\n#pragma mark -\n#pragma mark Movie recording\n\n- (void)initializeMovieWithOutputSettings:(NSMutableDictionary *)outputSettings;\n{\n    isRecording = NO;\n    \n    self.enabled = YES;\n    frameData = (GLubyte *) malloc((int)videoSize.width * (int)videoSize.height * 4);\n\n//    frameData = (GLubyte *) calloc(videoSize.width * videoSize.height * 4, sizeof(GLubyte));\n    NSError *error = nil;\n    assetWriter = [[AVAssetWriter alloc] initWithURL:movieURL fileType:fileType error:&error];\n    if (error != nil)\n    {\n        NSLog(@\"Error: %@\", error);\n        if (failureBlock) \n        {\n            failureBlock(error);\n        }\n        else \n        {\n            if(self.delegate && [self.delegate respondsToSelector:@selector(movieRecordingFailedWithError:)])\n            {\n                [self.delegate movieRecordingFailedWithError:error];\n            }\n        }\n    }\n    \n    // Set this to make sure that a functional movie is produced, even if the recording is cut off mid-stream. Only the last second should be lost in that case.\n    assetWriter.movieFragmentInterval = CMTimeMakeWithSeconds(1.0, 1000);\n    \n    // use default output settings if none specified\n    if (outputSettings == nil) \n    {\n        outputSettings = [[NSMutableDictionary alloc] init];\n        [outputSettings setObject:AVVideoCodecH264 forKey:AVVideoCodecKey];\n        [outputSettings setObject:[NSNumber numberWithInt:videoSize.width] forKey:AVVideoWidthKey];\n        [outputSettings setObject:[NSNumber numberWithInt:videoSize.height] forKey:AVVideoHeightKey];\n    }\n    // custom output settings specified\n    else \n    {\n        #ifndef NS_BLOCK_ASSERTIONS\n\t\tNSString *videoCodec = [outputSettings objectForKey:AVVideoCodecKey];\n\t\tNSNumber *width = [outputSettings objectForKey:AVVideoWidthKey];\n\t\tNSNumber *height = [outputSettings objectForKey:AVVideoHeightKey];\n\t\t\n\t\tNSAssert(videoCodec && width && height, @\"OutputSettings is missing required parameters.\");\n        #endif\n    }\n    \n    /*\n    NSDictionary *videoCleanApertureSettings = [NSDictionary dictionaryWithObjectsAndKeys:\n                                                [NSNumber numberWithInt:videoSize.width], AVVideoCleanApertureWidthKey,\n                                                [NSNumber numberWithInt:videoSize.height], AVVideoCleanApertureHeightKey,\n                                                [NSNumber numberWithInt:0], AVVideoCleanApertureHorizontalOffsetKey,\n                                                [NSNumber numberWithInt:0], AVVideoCleanApertureVerticalOffsetKey,\n                                                nil];\n\n    NSDictionary *videoAspectRatioSettings = [NSDictionary dictionaryWithObjectsAndKeys:\n                                              [NSNumber numberWithInt:3], AVVideoPixelAspectRatioHorizontalSpacingKey,\n                                              [NSNumber numberWithInt:3], AVVideoPixelAspectRatioVerticalSpacingKey,\n                                              nil];\n\n    NSMutableDictionary * compressionProperties = [[NSMutableDictionary alloc] init];\n    [compressionProperties setObject:videoCleanApertureSettings forKey:AVVideoCleanApertureKey];\n    [compressionProperties setObject:videoAspectRatioSettings forKey:AVVideoPixelAspectRatioKey];\n    [compressionProperties setObject:[NSNumber numberWithInt: 2000000] forKey:AVVideoAverageBitRateKey];\n    [compressionProperties setObject:[NSNumber numberWithInt: 16] forKey:AVVideoMaxKeyFrameIntervalKey];\n    [compressionProperties setObject:AVVideoProfileLevelH264Main31 forKey:AVVideoProfileLevelKey];\n    \n    [outputSettings setObject:compressionProperties forKey:AVVideoCompressionPropertiesKey];\n    */\n     \n    assetWriterVideoInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:outputSettings];\n    assetWriterVideoInput.expectsMediaDataInRealTime = _encodingLiveVideo;\n    \n    // You need to use BGRA for the video in order to get realtime encoding. I use a color-swizzling shader to line up glReadPixels' normal RGBA output with the movie input's BGRA.\n    NSDictionary *sourcePixelBufferAttributesDictionary = [NSDictionary dictionaryWithObjectsAndKeys: [NSNumber numberWithInt:kCVPixelFormatType_32BGRA], kCVPixelBufferPixelFormatTypeKey,\n                                                           [NSNumber numberWithInt:videoSize.width], kCVPixelBufferWidthKey,\n                                                           [NSNumber numberWithInt:videoSize.height], kCVPixelBufferHeightKey,\n                                                           nil];\n//    NSDictionary *sourcePixelBufferAttributesDictionary = [NSDictionary dictionaryWithObjectsAndKeys: [NSNumber numberWithInt:kCVPixelFormatType_32ARGB], kCVPixelBufferPixelFormatTypeKey,\n//                                                           nil];\n        \n    assetWriterPixelBufferInput = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:assetWriterVideoInput sourcePixelBufferAttributes:sourcePixelBufferAttributesDictionary];\n    \n    [assetWriter addInput:assetWriterVideoInput];\n}\n\n- (void)startRecording;\n{\n    isRecording = YES;\n    startTime = kCMTimeInvalid;\n\t//    [assetWriter startWriting];\n    \n\t//    [assetWriter startSessionAtSourceTime:kCMTimeZero];\n}\n\n- (void)startRecordingInOrientation:(CGAffineTransform)orientationTransform;\n{\n\tassetWriterVideoInput.transform = orientationTransform;\n\n\t[self startRecording];\n}\n\n- (void)cancelRecording;\n{\n    if (assetWriter.status == AVAssetWriterStatusCompleted)\n    {\n        return;\n    }\n    \n    isRecording = NO;\n    runOnMainQueueWithoutDeadlocking(^{\n        [assetWriterVideoInput markAsFinished];\n        [assetWriterAudioInput markAsFinished];\n        [assetWriter cancelWriting];\n    });\n}\n\n- (void)finishRecording;\n{\n    [self finishRecordingWithCompletionHandler:nil];\n}\n\n- (void)finishRecordingWithCompletionHandler:(void (^)(void))handler;\n{\n    if (assetWriter.status == AVAssetWriterStatusCompleted)\n    {\n        return;\n    }\n\n    isRecording = NO;\n    runOnMainQueueWithoutDeadlocking(^{\n        [assetWriterVideoInput markAsFinished];\n        [assetWriterAudioInput markAsFinished];\n#if (!defined(__IPHONE_6_0) || (__IPHONE_OS_VERSION_MAX_ALLOWED < __IPHONE_6_0))\n        // Not iOS 6 SDK\n        [assetWriter finishWriting];\n        if (handler) handler();\n#else\n        // iOS 6 SDK\n        if ([assetWriter respondsToSelector:@selector(finishWritingWithCompletionHandler:)]) {\n            // Running iOS 6\n            [assetWriter finishWritingWithCompletionHandler:(handler ?: ^{ })];\n        }\n        else {\n            // Not running iOS 6\n            [assetWriter finishWriting];\n            if (handler) handler();\n        }\n#endif\n    });\n}\n\n- (void)processAudioBuffer:(CMSampleBufferRef)audioBuffer;\n{\n    if (!isRecording)\n    {\n        return;\n    }\n    \n    if (_hasAudioTrack)\n    {\n        CMTime currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(audioBuffer);\n        \n        if (CMTIME_IS_INVALID(startTime))\n        {\n            if (audioInputReadyCallback == NULL)\n            {\n                [assetWriter startWriting];\n            }\n            [assetWriter startSessionAtSourceTime:currentSampleTime];\n            startTime = currentSampleTime;\n        }\n\n        if (!assetWriterAudioInput.readyForMoreMediaData)\n        {\n            NSLog(@\"Had to drop an audio frame\");\n            return;\n        }\n        \n//        NSLog(@\"Recorded audio sample time: %lld, %d, %lld\", currentSampleTime.value, currentSampleTime.timescale, currentSampleTime.epoch);\n        [assetWriterAudioInput appendSampleBuffer:audioBuffer];\n    }\n}\n\n- (void)enableSynchronizationCallbacks;\n{\n    if (videoInputReadyCallback != NULL)\n    {\n        [assetWriter startWriting];\n        [assetWriterVideoInput requestMediaDataWhenReadyOnQueue:[GPUImageContext sharedContextQueue] usingBlock:videoInputReadyCallback];\n    }\n    \n    if (audioInputReadyCallback != NULL)\n    {\n        [assetWriterAudioInput requestMediaDataWhenReadyOnQueue:[GPUImageContext sharedContextQueue] usingBlock:audioInputReadyCallback];\n    }        \n    \n}\n\n#pragma mark -\n#pragma mark Frame rendering\n\n- (void)createDataFBO;\n{\n    glActiveTexture(GL_TEXTURE1);\n    glGenFramebuffers(1, &movieFramebuffer);\n    glBindFramebuffer(GL_FRAMEBUFFER, movieFramebuffer);\n    \n    glGenRenderbuffers(1, &movieRenderbuffer);\n    glBindRenderbuffer(GL_RENDERBUFFER, movieRenderbuffer);\n    glRenderbufferStorage(GL_RENDERBUFFER, GL_RGBA8, (int)videoSize.width, (int)videoSize.height);\n    glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_RENDERBUFFER, movieRenderbuffer);\t\n\t\n    #ifndef NS_BLOCK_ASSERTIONS\n\tGLenum status = glCheckFramebufferStatus(GL_FRAMEBUFFER);\n    NSAssert(status == GL_FRAMEBUFFER_COMPLETE, @\"Incomplete filter FBO: %d\", status);\n    #endif\n}\n\n- (void)destroyDataFBO;\n{\n    [GPUImageContext useImageProcessingContext];\n\n    if (movieFramebuffer)\n\t{\n\t\tglDeleteFramebuffers(1, &movieFramebuffer);\n\t\tmovieFramebuffer = 0;\n\t}\t\n    \n    if (movieRenderbuffer)\n\t{\n\t\tglDeleteRenderbuffers(1, &movieRenderbuffer);\n\t\tmovieRenderbuffer = 0;\n\t}\n}\n\n- (void)setFilterFBO;\n{\n    if (!movieFramebuffer)\n    {\n        [self createDataFBO];\n    }\n    \n    glBindFramebuffer(GL_FRAMEBUFFER, movieFramebuffer);\n    \n    glViewport(0, 0, (int)videoSize.width, (int)videoSize.height);\n}\n\n- (void)renderAtInternalSize;\n{\n    [GPUImageContext useImageProcessingContext];\n    [self setFilterFBO];\n    \n    [GPUImageContext setActiveShaderProgram:colorSwizzlingProgram];\n    \n    glClearColor(1.0f, 0.0f, 0.0f, 1.0f);\n    glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);\n    \n    // This needs to be flipped to write out to video correctly\n    static const GLfloat squareVertices[] = {\n        -1.0f, -1.0f,\n        1.0f, -1.0f,\n        -1.0f,  1.0f,\n        1.0f,  1.0f,\n    };\n    \n    static const GLfloat textureCoordinates[] = {\n        0.0f, 0.0f,\n        1.0f, 0.0f,\n        0.0f, 1.0f,\n        1.0f, 1.0f,\n    };\n    \n\tglActiveTexture(GL_TEXTURE4);\n\tglBindTexture(GL_TEXTURE_2D, [firstInputFramebuffer texture]);\n\tglUniform1i(colorSwizzlingInputTextureUniform, 4);\n    \n    glVertexAttribPointer(colorSwizzlingPositionAttribute, 2, GL_FLOAT, 0, 0, squareVertices);\n\tglVertexAttribPointer(colorSwizzlingTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, textureCoordinates);\n    \n    glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);\n    \n    glFinish();\n    [firstInputFramebuffer unlock];\n}\n\n#pragma mark -\n#pragma mark GPUImageInput protocol\n\n- (void)newFrameReadyAtTime:(CMTime)frameTime atIndex:(NSInteger)textureIndex;\n{\n    if (!isRecording)\n    {\n        [firstInputFramebuffer unlock];\n        return;\n    }\n\n    // Drop frames forced by images and other things with no time constants\n    // Also, if two consecutive times with the same value are added to the movie, it aborts recording, so I bail on that case\n    if ( (CMTIME_IS_INVALID(frameTime)) || (CMTIME_COMPARE_INLINE(frameTime, ==, previousFrameTime)) || (CMTIME_IS_INDEFINITE(frameTime)) ) \n    {\n        [firstInputFramebuffer unlock];\n        return;\n    }\n\n    if (CMTIME_IS_INVALID(startTime))\n    {\n        if (videoInputReadyCallback == NULL)\n        {\n            [assetWriter startWriting];\n        }\n        \n        [assetWriter startSessionAtSourceTime:frameTime];\n        startTime = frameTime;\n    }\n\n    if (!assetWriterVideoInput.readyForMoreMediaData)\n    {\n        [firstInputFramebuffer unlock];\n        NSLog(@\"Had to drop a video frame\");\n        return;\n    }\n    \n    // Render the frame with swizzled colors, so that they can be uploaded quickly as BGRA frames\n    [GPUImageContext useImageProcessingContext];\n    [self renderAtInternalSize];\n\n    CVPixelBufferRef pixel_buffer = NULL;\n\n    CVReturn status = CVPixelBufferPoolCreatePixelBuffer (NULL, [assetWriterPixelBufferInput pixelBufferPool], &pixel_buffer);\n    if ((pixel_buffer == NULL) || (status != kCVReturnSuccess))\n    {\n        return;\n    }\n    else\n    {\n        CVPixelBufferLockBaseAddress(pixel_buffer, 0);\n        \n        GLubyte *pixelBufferData = (GLubyte *)CVPixelBufferGetBaseAddress(pixel_buffer);\n        glReadPixels(0, 0, videoSize.width, videoSize.height, GL_RGBA, GL_UNSIGNED_BYTE, pixelBufferData);\n    }\n    \n//    if(![assetWriterPixelBufferInput appendPixelBuffer:pixel_buffer withPresentationTime:CMTimeSubtract(frameTime, startTime)]) \n    if(![assetWriterPixelBufferInput appendPixelBuffer:pixel_buffer withPresentationTime:frameTime]) \n    {\n        NSLog(@\"Problem appending pixel buffer at time: %lld\", frameTime.value);\n    } \n    else \n    {\n//        NSLog(@\"Recorded video sample time: %lld, %d, %lld\", frameTime.value, frameTime.timescale, frameTime.epoch);\n    }\n    CVPixelBufferUnlockBaseAddress(pixel_buffer, 0);\n    \n    previousFrameTime = frameTime;\n    \n    if (![GPUImageContext supportsFastTextureUpload])\n    {\n        CVPixelBufferRelease(pixel_buffer);\n    }\n}\n\n- (NSInteger)nextAvailableTextureIndex;\n{\n    return 0;\n}\n\n- (void)setInputFramebuffer:(GPUImageFramebuffer *)newInputFramebuffer atIndex:(NSInteger)textureIndex;\n{\n    firstInputFramebuffer = newInputFramebuffer;\n    [firstInputFramebuffer lock];\n}\n\n- (void)setInputRotation:(GPUImageRotationMode)newInputRotation atIndex:(NSInteger)textureIndex;\n{\n    inputRotation = newInputRotation;\n}\n\n- (void)setInputSize:(CGSize)newSize atIndex:(NSInteger)textureIndex;\n{\n}\n\n- (CGSize)maximumOutputSize;\n{\n    return videoSize;\n}\n\n- (void)endProcessing \n{\n    if (completionBlock) \n    {\n        completionBlock();\n    }\n    else \n    {\n        if (_delegate && [_delegate respondsToSelector:@selector(movieRecordingCompleted)])\n        {\n            [_delegate movieRecordingCompleted];\n        }\n    }\n}\n\n- (BOOL)shouldIgnoreUpdatesToThisTarget;\n{\n    return NO;\n}\n\n- (void)conserveMemoryForNextFrame;\n{\n    \n}\n\n- (BOOL)wantsMonochromeInput;\n{\n    return NO;\n}\n\n- (void)setCurrentlyReceivingMonochromeInput:(BOOL)newValue;\n{\n    \n}\n\n#pragma mark -\n#pragma mark Accessors\n\n- (void)setHasAudioTrack:(BOOL)newValue\n{\n\t[self setHasAudioTrack:newValue audioSettings:nil];\n}\n\n- (void)setHasAudioTrack:(BOOL)newValue audioSettings:(NSDictionary *)audioOutputSettings;\n{\n    _hasAudioTrack = newValue;\n    \n    if (_hasAudioTrack)\n    {\n        if (_shouldPassthroughAudio)\n        {\n\t\t\t// Do not set any settings so audio will be the same as passthrough\n\t\t\taudioOutputSettings = nil;\n        }\n        else if (audioOutputSettings == nil)\n        {\n//            double preferredHardwareSampleRate = [[AVAudioSession sharedInstance] currentHardwareSampleRate];\n            double preferredHardwareSampleRate = 48000; // ? - TODO: Fix this, because it's probably broken\n            \n            AudioChannelLayout acl;\n            bzero( &acl, sizeof(acl));\n            acl.mChannelLayoutTag = kAudioChannelLayoutTag_Mono;\n            \n            audioOutputSettings = [NSDictionary dictionaryWithObjectsAndKeys:\n                                         [ NSNumber numberWithInt: kAudioFormatMPEG4AAC], AVFormatIDKey,\n                                         [ NSNumber numberWithInt: 1 ], AVNumberOfChannelsKey,\n                                         [ NSNumber numberWithFloat: preferredHardwareSampleRate ], AVSampleRateKey,\n                                         [ NSData dataWithBytes: &acl length: sizeof( acl ) ], AVChannelLayoutKey,\n                                         //[ NSNumber numberWithInt:AVAudioQualityLow], AVEncoderAudioQualityKey,\n                                         [ NSNumber numberWithInt: 64000 ], AVEncoderBitRateKey,\n                                         nil];\n/*\n            AudioChannelLayout acl;\n            bzero( &acl, sizeof(acl));\n            acl.mChannelLayoutTag = kAudioChannelLayoutTag_Mono;\n            \n            audioOutputSettings = [NSDictionary dictionaryWithObjectsAndKeys:\n                                   [ NSNumber numberWithInt: kAudioFormatMPEG4AAC ], AVFormatIDKey,\n                                   [ NSNumber numberWithInt: 1 ], AVNumberOfChannelsKey,\n                                   [ NSNumber numberWithFloat: 44100.0 ], AVSampleRateKey,\n                                   [ NSNumber numberWithInt: 64000 ], AVEncoderBitRateKey,\n                                   [ NSData dataWithBytes: &acl length: sizeof( acl ) ], AVChannelLayoutKey,\n                                   nil];*/\n        }\n        \n        assetWriterAudioInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings:audioOutputSettings];\n        [assetWriter addInput:assetWriterAudioInput];\n        assetWriterAudioInput.expectsMediaDataInRealTime = _encodingLiveVideo;\n    }\n    else\n    {\n        // Remove audio track if it exists\n    }\n}\n\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Mac/GPUImagePicture.h",
    "content": "#import <Cocoa/Cocoa.h>\n#import \"GPUImageOutput.h\"\n\n@interface GPUImagePicture : GPUImageOutput\n{\n    CGSize pixelSizeOfImage;\n    BOOL hasProcessedImage;\n    \n    dispatch_semaphore_t imageUpdateSemaphore;\n}\n\n// Initialization and teardown\n- (id)initWithURL:(NSURL *)url;\n- (id)initWithImage:(NSImage *)newImageSource;\n- (id)initWithCGImage:(CGImageRef)newImageSource;\n- (id)initWithImage:(NSImage *)newImageSource smoothlyScaleOutput:(BOOL)smoothlyScaleOutput;\n- (id)initWithCGImage:(CGImageRef)newImageSource smoothlyScaleOutput:(BOOL)smoothlyScaleOutput;\n\n// Image rendering\n- (void)processImage;\n- (BOOL)processImageWithCompletionHandler:(void (^)(void))completion;\n- (void)processImageUpToFilter:(GPUImageOutput<GPUImageInput> *)finalFilterInChain withCompletionHandler:(void (^)(NSImage *processedImage))block;\n- (CGSize)outputImageSize;\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Mac/GPUImagePicture.m",
    "content": "#import \"GPUImagePicture.h\"\n\n@implementation GPUImagePicture\n\n#pragma mark -\n#pragma mark Initialization and teardown\n\n- (id)initWithURL:(NSURL *)url;\n{\n    NSData *imageData = [[NSData alloc] initWithContentsOfURL:url];\n    \n    if (!(self = [self initWithData:imageData]))\n    {\n        return nil;\n    }\n\n    return self;\n}\n\n- (id)initWithData:(NSData *)imageData;\n{\n    NSImage *inputImage = [[NSImage alloc] initWithData:imageData];\n    \n    if (!(self = [self initWithImage:inputImage]))\n    {\n\t\treturn nil;\n    }\n    \n    return self;\n}\n\n- (id)initWithImage:(NSImage *)newImageSource;\n{\n    if (!(self = [self initWithImage:newImageSource smoothlyScaleOutput:NO]))\n    {\n\t\treturn nil;\n    }\n    \n    return self;\n}\n\n- (id)initWithCGImage:(CGImageRef)newImageSource;\n{\n    if (!(self = [self initWithCGImage:newImageSource smoothlyScaleOutput:NO]))\n    {\n\t\treturn nil;\n    }\n    return self;\n}\n\n- (id)initWithImage:(NSImage *)newImageSource smoothlyScaleOutput:(BOOL)smoothlyScaleOutput;\n{\n    return [self initWithCGImage:[newImageSource CGImageForProposedRect:NULL context:NULL hints:nil] smoothlyScaleOutput:smoothlyScaleOutput];\n}\n\n- (id)initWithCGImage:(CGImageRef)newImageSource smoothlyScaleOutput:(BOOL)smoothlyScaleOutput;\n{\n    if (!(self = [super init]))\n    {\n\t\treturn nil;\n    }\n    \n    hasProcessedImage = NO;\n    self.shouldSmoothlyScaleOutput = smoothlyScaleOutput;\n    imageUpdateSemaphore = dispatch_semaphore_create(1);\n    \n    // TODO: Dispatch this whole thing asynchronously to move image loading off main thread\n    CGFloat widthOfImage = CGImageGetWidth(newImageSource);\n    CGFloat heightOfImage = CGImageGetHeight(newImageSource);\n    \n    // If passed an empty image reference, CGContextDrawImage will fail in future versions of the SDK.\n    NSAssert( widthOfImage > 0 && heightOfImage > 0, @\"Passed image must not be empty - it should be at least 1px tall and wide\");\n    \n    pixelSizeOfImage = CGSizeMake(widthOfImage, heightOfImage);\n    CGSize pixelSizeToUseForTexture = pixelSizeOfImage;\n    \n    BOOL shouldRedrawUsingCoreGraphics = NO;\n    \n    // For now, deal with images larger than the maximum texture size by resizing to be within that limit\n    CGSize scaledImageSizeToFitOnGPU = [GPUImageContext sizeThatFitsWithinATextureForSize:pixelSizeOfImage];\n    if (!CGSizeEqualToSize(scaledImageSizeToFitOnGPU, pixelSizeOfImage))\n    {\n        pixelSizeOfImage = scaledImageSizeToFitOnGPU;\n        pixelSizeToUseForTexture = pixelSizeOfImage;\n        shouldRedrawUsingCoreGraphics = YES;\n    }\n    \n    if (self.shouldSmoothlyScaleOutput)\n    {\n        // In order to use mipmaps, you need to provide power-of-two textures, so convert to the next largest power of two and stretch to fill\n        CGFloat powerClosestToWidth = ceil(log2(pixelSizeOfImage.width));\n        CGFloat powerClosestToHeight = ceil(log2(pixelSizeOfImage.height));\n        \n        pixelSizeToUseForTexture = CGSizeMake(pow(2.0, powerClosestToWidth), pow(2.0, powerClosestToHeight));\n        \n        shouldRedrawUsingCoreGraphics = YES;\n    }\n    \n    GLubyte *imageData = NULL;\n    CFDataRef dataFromImageDataProvider;\n    GLenum format = GL_BGRA;\n    \n    if (!shouldRedrawUsingCoreGraphics) {\n        /* Check that the memory layout is compatible with GL, as we cannot use glPixelStore to\n         * tell GL about the memory layout with GLES.\n         */\n        if (CGImageGetBytesPerRow(newImageSource) != CGImageGetWidth(newImageSource) * 4 ||\n            CGImageGetBitsPerPixel(newImageSource) != 32 ||\n            CGImageGetBitsPerComponent(newImageSource) != 8)\n        {\n            shouldRedrawUsingCoreGraphics = YES;\n        } else {\n            /* Check that the bitmap pixel format is compatible with GL */\n            CGBitmapInfo bitmapInfo = CGImageGetBitmapInfo(newImageSource);\n            if ((bitmapInfo & kCGBitmapFloatComponents) != 0) {\n                /* We don't support float components for use directly in GL */\n                shouldRedrawUsingCoreGraphics = YES;\n            } else {\n                CGBitmapInfo byteOrderInfo = bitmapInfo & kCGBitmapByteOrderMask;\n                if (byteOrderInfo == kCGBitmapByteOrder32Little) {\n                    /* Little endian, for alpha-first we can use this bitmap directly in GL */\n                    CGImageAlphaInfo alphaInfo = bitmapInfo & kCGBitmapAlphaInfoMask;\n                    if (alphaInfo != kCGImageAlphaPremultipliedFirst && alphaInfo != kCGImageAlphaFirst &&\n                        alphaInfo != kCGImageAlphaNoneSkipFirst) {\n                        shouldRedrawUsingCoreGraphics = YES;\n                    }\n                } else if (byteOrderInfo == kCGBitmapByteOrderDefault || byteOrderInfo == kCGBitmapByteOrder32Big) {\n                    /* Big endian, for alpha-last we can use this bitmap directly in GL */\n                    CGImageAlphaInfo alphaInfo = bitmapInfo & kCGBitmapAlphaInfoMask;\n                    if (alphaInfo != kCGImageAlphaPremultipliedLast && alphaInfo != kCGImageAlphaLast &&\n                        alphaInfo != kCGImageAlphaNoneSkipLast) {\n                        shouldRedrawUsingCoreGraphics = YES;\n                    } else {\n                        /* Can access directly using GL_RGBA pixel format */\n                        format = GL_RGBA;\n                    }\n                }\n            }\n        }\n    }\n    \n    //    CFAbsoluteTime elapsedTime, startTime = CFAbsoluteTimeGetCurrent();\n    \n    if (shouldRedrawUsingCoreGraphics)\n    {\n        // For resized or incompatible image: redraw\n        imageData = (GLubyte *) calloc(1, (int)pixelSizeToUseForTexture.width * (int)pixelSizeToUseForTexture.height * 4);\n        \n        CGColorSpaceRef genericRGBColorspace = CGColorSpaceCreateDeviceRGB();\n        \n        CGContextRef imageContext = CGBitmapContextCreate(imageData, (size_t)pixelSizeToUseForTexture.width, (size_t)pixelSizeToUseForTexture.height, 8, (size_t)pixelSizeToUseForTexture.width * 4, genericRGBColorspace,  kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);\n        //        CGContextSetBlendMode(imageContext, kCGBlendModeCopy); // From Technical Q&A QA1708: http://developer.apple.com/library/ios/#qa/qa1708/_index.html\n        CGContextDrawImage(imageContext, CGRectMake(0.0, 0.0, pixelSizeToUseForTexture.width, pixelSizeToUseForTexture.height), newImageSource);\n        CGContextRelease(imageContext);\n        CGColorSpaceRelease(genericRGBColorspace);\n    }\n    else\n    {\n        // Access the raw image bytes directly\n        dataFromImageDataProvider = CGDataProviderCopyData(CGImageGetDataProvider(newImageSource));\n        imageData = (GLubyte *)CFDataGetBytePtr(dataFromImageDataProvider);\n    }\n    \n    //    elapsedTime = (CFAbsoluteTimeGetCurrent() - startTime) * 1000.0;\n    //    NSLog(@\"Core Graphics drawing time: %f\", elapsedTime);\n    \n    //    CGFloat currentRedTotal = 0.0f, currentGreenTotal = 0.0f, currentBlueTotal = 0.0f, currentAlphaTotal = 0.0f;\n    //\tNSUInteger totalNumberOfPixels = round(pixelSizeToUseForTexture.width * pixelSizeToUseForTexture.height);\n    //\n    //    for (NSUInteger currentPixel = 0; currentPixel < totalNumberOfPixels; currentPixel++)\n    //    {\n    //        currentBlueTotal += (CGFloat)imageData[(currentPixel * 4)] / 255.0f;\n    //        currentGreenTotal += (CGFloat)imageData[(currentPixel * 4) + 1] / 255.0f;\n    //        currentRedTotal += (CGFloat)imageData[(currentPixel * 4 + 2)] / 255.0f;\n    //        currentAlphaTotal += (CGFloat)imageData[(currentPixel * 4) + 3] / 255.0f;\n    //    }\n    //\n    //    NSLog(@\"Debug, average input image red: %f, green: %f, blue: %f, alpha: %f\", currentRedTotal / (CGFloat)totalNumberOfPixels, currentGreenTotal / (CGFloat)totalNumberOfPixels, currentBlueTotal / (CGFloat)totalNumberOfPixels, currentAlphaTotal / (CGFloat)totalNumberOfPixels);\n    \n    runSynchronouslyOnVideoProcessingQueue(^{\n        [GPUImageContext useImageProcessingContext];\n        \n        outputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:pixelSizeToUseForTexture onlyTexture:YES];\n        [outputFramebuffer disableReferenceCounting];\n        \n        glBindTexture(GL_TEXTURE_2D, [outputFramebuffer texture]);\n        if (self.shouldSmoothlyScaleOutput)\n        {\n            glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR_MIPMAP_LINEAR);\n        }\n        // no need to use self.outputTextureOptions here since pictures need this texture formats and type\n        glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, (int)pixelSizeToUseForTexture.width, (int)pixelSizeToUseForTexture.height, 0, format, GL_UNSIGNED_BYTE, imageData);\n        \n        if (self.shouldSmoothlyScaleOutput)\n        {\n            glGenerateMipmap(GL_TEXTURE_2D);\n        }\n        glBindTexture(GL_TEXTURE_2D, 0);\n    });\n    \n    if (shouldRedrawUsingCoreGraphics)\n    {\n        free(imageData);\n    }\n    else\n    {\n        CFRelease(dataFromImageDataProvider);\n    }\n    \n    return self;\n}\n\n// ARC forbids explicit message send of 'release' on Mountain Lion, but needs this on Lion and older\n#if ( (MAC_OS_X_VERSION_MIN_REQUIRED < __MAC_10_8) || (!defined(__MAC_10_8)) )\n- (void)dealloc;\n{\n    [outputFramebuffer enableReferenceCounting];\n    [outputFramebuffer unlock];\n\n    if (imageUpdateSemaphore != NULL)\n    {\n        dispatch_release(imageUpdateSemaphore);\n    }\n}\n#endif\n\n#pragma mark -\n#pragma mark Image rendering\n\n- (void)removeAllTargets;\n{\n    [super removeAllTargets];\n    hasProcessedImage = NO;\n}\n\n- (void)processImage;\n{\n    [self processImageWithCompletionHandler:nil];\n}\n\n- (BOOL)processImageWithCompletionHandler:(void (^)(void))completion;\n{\n    hasProcessedImage = YES;\n    \n    //    dispatch_semaphore_wait(imageUpdateSemaphore, DISPATCH_TIME_FOREVER);\n    \n    if (dispatch_semaphore_wait(imageUpdateSemaphore, DISPATCH_TIME_NOW) != 0)\n    {\n        return NO;\n    }\n    \n    runAsynchronouslyOnVideoProcessingQueue(^{\n        for (id<GPUImageInput> currentTarget in targets)\n        {\n            NSInteger indexOfObject = [targets indexOfObject:currentTarget];\n            NSInteger textureIndexOfTarget = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];\n            \n            [currentTarget setCurrentlyReceivingMonochromeInput:NO];\n            [currentTarget setInputSize:pixelSizeOfImage atIndex:textureIndexOfTarget];\n            [currentTarget setInputFramebuffer:outputFramebuffer atIndex:textureIndexOfTarget];\n            [currentTarget newFrameReadyAtTime:kCMTimeIndefinite atIndex:textureIndexOfTarget];\n        }\n        \n        dispatch_semaphore_signal(imageUpdateSemaphore);\n        \n        if (completion != nil) {\n            completion();\n        }\n    });\n    \n    return YES;\n}\n\n- (void)processImageUpToFilter:(GPUImageOutput<GPUImageInput> *)finalFilterInChain withCompletionHandler:(void (^)(NSImage *processedImage))block;\n{\n    [finalFilterInChain useNextFrameForImageCapture];\n    [self processImageWithCompletionHandler:^{\n        NSImage *imageFromFilter = [finalFilterInChain imageFromCurrentFramebuffer];\n        block(imageFromFilter);\n    }];\n}\n\n- (CGSize)outputImageSize;\n{\n    return pixelSizeOfImage;\n}\n\n- (void)addTarget:(id<GPUImageInput>)newTarget atTextureLocation:(NSInteger)textureLocation;\n{\n    [super addTarget:newTarget atTextureLocation:textureLocation];\n    \n    if (hasProcessedImage)\n    {\n        [newTarget setInputSize:pixelSizeOfImage atIndex:textureLocation];\n        [newTarget newFrameReadyAtTime:kCMTimeIndefinite atIndex:textureLocation];\n    }\n}\n\n@end"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Mac/GPUImageView.h",
    "content": "#import <Cocoa/Cocoa.h>\n#import \"GPUImageContext.h\"\n\ntypedef enum {\n    kGPUImageFillModeStretch,                       // Stretch to fill the full view, which may distort the image outside of its normal aspect ratio\n    kGPUImageFillModePreserveAspectRatio,           // Maintains the aspect ratio of the source image, adding bars of the specified background color\n    kGPUImageFillModePreserveAspectRatioAndFill     // Maintains the aspect ratio of the source image, zooming in on its center to fill the view\n} GPUImageFillModeType; \n\n/**\n UIView subclass to use as an endpoint for displaying GPUImage outputs\n */\n@interface GPUImageView : NSOpenGLView <GPUImageInput>\n{\n    GPUImageRotationMode inputRotation;\n}\n\n/** The fill mode dictates how images are fit in the view, with the default being kGPUImageFillModePreserveAspectRatio\n */\n@property(readwrite, nonatomic) GPUImageFillModeType fillMode;\n\n/** This calculates the current display size, in pixels, taking into account Retina scaling factors\n */\n@property(readonly, nonatomic) CGSize sizeInPixels;\n\n@property(nonatomic) BOOL enabled;\n\n/** Handling fill mode\n \n @param redComponent Red component for background color\n @param greenComponent Green component for background color\n @param blueComponent Blue component for background color\n @param alphaComponent Alpha component for background color\n */\n- (void)setBackgroundColorRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent alpha:(GLfloat)alphaComponent;\n\n- (void)setCurrentlyReceivingMonochromeInput:(BOOL)newValue;\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Mac/GPUImageView.m",
    "content": "#import \"GPUImageView.h\"\n#import <QuartzCore/QuartzCore.h>\n#import \"GPUImageContext.h\"\n#import \"GPUImageFilter.h\"\n#import <AVFoundation/AVFoundation.h>\n\n#pragma mark -\n#pragma mark Private methods and instance variables\n\n@interface GPUImageView () \n{\n    GPUImageFramebuffer *inputFramebufferForDisplay;\n\n    GLProgram *displayProgram;\n    GLint displayPositionAttribute, displayTextureCoordinateAttribute;\n    GLint displayInputTextureUniform;\n    \n    CGSize inputImageSize;\n    GLfloat imageVertices[8];\n    GLfloat backgroundColorRed, backgroundColorGreen, backgroundColorBlue, backgroundColorAlpha;\n}\n\n// Initialization and teardown\n- (void)commonInit;\n\n// Managing the display FBOs\n- (void)createDisplayFramebuffer;\n- (void)destroyDisplayFramebuffer;\n\n// Handling fill mode\n- (void)recalculateViewGeometry;\n\n@end\n\n@implementation GPUImageView\n\n@synthesize sizeInPixels = _sizeInPixels;\n@synthesize fillMode = _fillMode;\n@synthesize enabled;\n\n#pragma mark -\n#pragma mark Initialization and teardown\n\n- (id)initWithFrame:(CGRect)frame\n{\n    if (!(self = [super initWithFrame:frame]))\n    {\n\t\treturn nil;\n    }\n    \n    [self commonInit];\n    \n    return self;\n}\n\n-(id)initWithCoder:(NSCoder *)coder\n{\n\tif (!(self = [super initWithCoder:coder])) \n    {\n        return nil;\n\t}\n\n    [self commonInit];\n\n\treturn self;\n}\n\n- (void)commonInit;\n{\n    [self setOpenGLContext:[[GPUImageContext sharedImageProcessingContext] context]];\n\n    if ([self respondsToSelector:@selector(setWantsBestResolutionOpenGLSurface:)])\n    {\n        [self  setWantsBestResolutionOpenGLSurface:YES];\n    }\n    \n    inputRotation = kGPUImageNoRotation;\n    self.hidden = NO;\n\n    self.enabled = YES;\n    \n    runSynchronouslyOnVideoProcessingQueue(^{\n        [GPUImageContext useImageProcessingContext];\n        displayProgram = [[GPUImageContext sharedImageProcessingContext] programForVertexShaderString:kGPUImageVertexShaderString fragmentShaderString:kGPUImagePassthroughFragmentShaderString];\n\n        if (!displayProgram.initialized)\n        {\n            [displayProgram addAttribute:@\"position\"];\n            [displayProgram addAttribute:@\"inputTextureCoordinate\"];\n            \n            if (![displayProgram link])\n            {\n                NSString *progLog = [displayProgram programLog];\n                NSLog(@\"Program link log: %@\", progLog);\n                NSString *fragLog = [displayProgram fragmentShaderLog];\n                NSLog(@\"Fragment shader compile log: %@\", fragLog);\n                NSString *vertLog = [displayProgram vertexShaderLog];\n                NSLog(@\"Vertex shader compile log: %@\", vertLog);\n                displayProgram = nil;\n                NSAssert(NO, @\"Filter shader link failed\");\n            }\n        }\n        \n        displayPositionAttribute = [displayProgram attributeIndex:@\"position\"];\n        displayTextureCoordinateAttribute = [displayProgram attributeIndex:@\"inputTextureCoordinate\"];\n        displayInputTextureUniform = [displayProgram uniformIndex:@\"inputImageTexture\"];\n        \n        [GPUImageContext setActiveShaderProgram:displayProgram];\n\n        glEnableVertexAttribArray(displayPositionAttribute);\n        glEnableVertexAttribArray(displayTextureCoordinateAttribute);\n    \n        [self setBackgroundColorRed:0.0 green:0.0 blue:0.0 alpha:1.0];\n        _fillMode = kGPUImageFillModePreserveAspectRatio;\n        [self createDisplayFramebuffer];\n    });\n    \n}\n\n- (void)dealloc\n{\n}\n\n#pragma mark -\n#pragma mark Managing the display FBOs\n\n- (void)createDisplayFramebuffer;\n{\n    // Perhaps I'll use an FBO at some time later, but for now will render directly to the screen\n    if ([self respondsToSelector:@selector(convertSizeToBacking:)])\n    {\n        _sizeInPixels = [self convertSizeToBacking:self.bounds.size];\n    }\n    else\n    {\n        _sizeInPixels = self.bounds.size;\n    }\n}\n\n- (void)destroyDisplayFramebuffer;\n{\n    [self.openGLContext makeCurrentContext];\n}\n\n- (void)setDisplayFramebuffer;\n{\n    glBindFramebuffer(GL_FRAMEBUFFER, 0);\n    glBindRenderbuffer(GL_RENDERBUFFER, 0);\n    \n    glViewport(0, 0, (GLint)_sizeInPixels.width, (GLint)_sizeInPixels.height);\n}\n\n- (void)presentFramebuffer;\n{\n    [self.openGLContext flushBuffer];\n}\n\n- (void)reshape;\n{\n    CGSize viewSize = self.bounds.size;\n    if ([self respondsToSelector:@selector(convertSizeToBacking:)])\n    {\n        viewSize = [self convertSizeToBacking:self.bounds.size];\n    }\n    \n    if ( (_sizeInPixels.width == viewSize.width) && (_sizeInPixels.height == viewSize.height) )\n    {\n        return;\n    }\n    \n    _sizeInPixels = viewSize;\n\n    [self recalculateViewGeometry];\n    dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{\n        [self newFrameReadyAtTime:kCMTimeInvalid atIndex:0];\n    });\n}\n\n#pragma mark -\n#pragma mark Handling fill mode\n\n- (void)recalculateViewGeometry;\n{\n    CGFloat heightScaling, widthScaling;\n    \n    CGSize currentViewSize = self.sizeInPixels;\n\n    if ((inputImageSize.width < 1.0) || (inputImageSize.height < 1.0))\n    {\n        return;\n    }\n\n    CGRect insetRect = AVMakeRectWithAspectRatioInsideRect(inputImageSize, CGRectMake(0.0,0.0,currentViewSize.width,currentViewSize.height));\n    if ((insetRect.size.width < 1.0) || (insetRect.size.width < 1.0))\n    {\n        insetRect = CGRectMake(0.0,0.0,currentViewSize.width,currentViewSize.height);\n    }\n    \n    switch(_fillMode)\n    {\n        case kGPUImageFillModeStretch:\n        {\n            widthScaling = 1.0;\n            heightScaling = 1.0;\n        }; break;\n        case kGPUImageFillModePreserveAspectRatio:\n        {\n            widthScaling = insetRect.size.width / currentViewSize.width;\n            heightScaling = insetRect.size.height / currentViewSize.height;\n        }; break;\n        case kGPUImageFillModePreserveAspectRatioAndFill:\n        {\n            widthScaling = currentViewSize.height / insetRect.size.height;\n            heightScaling = currentViewSize.width / insetRect.size.width;\n        }; break;\n    }\n    \n    imageVertices[0] = -widthScaling;\n    imageVertices[1] = -heightScaling;\n    imageVertices[2] = widthScaling;\n    imageVertices[3] = -heightScaling;\n    imageVertices[4] = -widthScaling;\n    imageVertices[5] = heightScaling;\n    imageVertices[6] = widthScaling;\n    imageVertices[7] = heightScaling;\n}\n\n- (void)setBackgroundColorRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent alpha:(GLfloat)alphaComponent;\n{\n    backgroundColorRed = redComponent;\n    backgroundColorGreen = greenComponent;\n    backgroundColorBlue = blueComponent;\n    backgroundColorAlpha = alphaComponent;\n}\n\n+ (const GLfloat *)textureCoordinatesForRotation:(GPUImageRotationMode)rotationMode;\n{\n//    static const GLfloat noRotationTextureCoordinates[] = {\n//        0.0f, 0.0f,\n//        1.0f, 0.0f,\n//        0.0f, 1.0f,\n//        1.0f, 1.0f,\n//    };\n    \n    static const GLfloat noRotationTextureCoordinates[] = {\n        0.0f, 1.0f,\n        1.0f, 1.0f,\n        0.0f, 0.0f,\n        1.0f, 0.0f,\n    };\n\n    static const GLfloat rotateRightTextureCoordinates[] = {\n        1.0f, 1.0f,\n        1.0f, 0.0f,\n        0.0f, 1.0f,\n        0.0f, 0.0f,\n    };\n\n    static const GLfloat rotateLeftTextureCoordinates[] = {\n        0.0f, 0.0f,\n        0.0f, 1.0f,\n        1.0f, 0.0f,\n        1.0f, 1.0f,\n    };\n        \n    static const GLfloat verticalFlipTextureCoordinates[] = {\n        0.0f, 0.0f,\n        1.0f, 0.0f,\n        0.0f, 1.0f,\n        1.0f, 1.0f,\n    };\n    \n    static const GLfloat horizontalFlipTextureCoordinates[] = {\n        1.0f, 1.0f,\n        0.0f, 1.0f,\n        1.0f, 0.0f,\n        0.0f, 0.0f,\n    };\n    \n    static const GLfloat rotateRightVerticalFlipTextureCoordinates[] = {\n        1.0f, 0.0f,\n        1.0f, 1.0f,\n        0.0f, 0.0f,\n        0.0f, 1.0f,\n    };\n    \n    static const GLfloat rotateRightHorizontalFlipTextureCoordinates[] = {\n        1.0f, 1.0f,\n        1.0f, 0.0f,\n        0.0f, 1.0f,\n        0.0f, 0.0f,\n    };\n    \n    static const GLfloat rotate180TextureCoordinates[] = {\n        1.0f, 0.0f,\n        0.0f, 0.0f,\n        1.0f, 1.0f,\n        0.0f, 1.0f,\n    };\n    \n    switch(rotationMode)\n    {\n        case kGPUImageNoRotation: return noRotationTextureCoordinates;\n        case kGPUImageRotateLeft: return rotateLeftTextureCoordinates;\n        case kGPUImageRotateRight: return rotateRightTextureCoordinates;\n        case kGPUImageFlipVertical: return verticalFlipTextureCoordinates;\n        case kGPUImageFlipHorizonal: return horizontalFlipTextureCoordinates;\n        case kGPUImageRotateRightFlipVertical: return rotateRightVerticalFlipTextureCoordinates;\n        case kGPUImageRotateRightFlipHorizontal: return rotateRightHorizontalFlipTextureCoordinates;\n        case kGPUImageRotate180: return rotate180TextureCoordinates;\n    }\n}\n\n#pragma mark -\n#pragma mark GPUInput protocol\n\n- (void)newFrameReadyAtTime:(CMTime)frameTime atIndex:(NSInteger)textureIndex;\n{\n    runSynchronouslyOnVideoProcessingQueue(^{\n        [GPUImageContext setActiveShaderProgram:displayProgram];\n        [self setDisplayFramebuffer];\n        \n        glClearColor(backgroundColorRed, backgroundColorGreen, backgroundColorBlue, backgroundColorAlpha);\n        glClear(GL_COLOR_BUFFER_BIT);\n        \n        // Re-render onscreen, flipped to a normal orientation\n        glBindFramebuffer(GL_FRAMEBUFFER, 0);\n        glBindRenderbuffer(GL_RENDERBUFFER, 0);\n\n        glActiveTexture(GL_TEXTURE4);\n        glBindTexture(GL_TEXTURE_2D, [inputFramebufferForDisplay texture]);\n        glUniform1i(displayInputTextureUniform, 4);\n\n        glVertexAttribPointer(displayPositionAttribute, 2, GL_FLOAT, 0, 0, imageVertices);\n        glVertexAttribPointer(displayTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, [GPUImageView textureCoordinatesForRotation:inputRotation]);\n\n        BOOL canLockFocus = YES;\n        if ([self respondsToSelector:@selector(lockFocusIfCanDraw)])\n        {\n            canLockFocus = [self lockFocusIfCanDraw];\n        }\n        else\n        {\n            [self lockFocus];\n        }\n        \n        if (canLockFocus)\n        {\n            glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);\n            \n            [self presentFramebuffer];\n            glBindTexture(GL_TEXTURE_2D, 0);\n            [self unlockFocus];\n        }\n        \n        [inputFramebufferForDisplay unlock];\n        inputFramebufferForDisplay = nil;\n    });\n}\n\n- (NSInteger)nextAvailableTextureIndex;\n{\n    return 0;\n}\n\n- (void)setInputFramebuffer:(GPUImageFramebuffer *)newInputFramebuffer atIndex:(NSInteger)textureIndex;\n{\n    inputFramebufferForDisplay = newInputFramebuffer;\n    [inputFramebufferForDisplay lock];\n}\n\n- (void)setInputRotation:(GPUImageRotationMode)newInputRotation atIndex:(NSInteger)textureIndex;\n{\n    inputRotation = newInputRotation;\n}\n\n- (void)setInputSize:(CGSize)newSize atIndex:(NSInteger)textureIndex;\n{\n    if ((newSize.width < 1.0) || (newSize.height < 1.0))\n    {\n        return;\n    }\n    \n    runSynchronouslyOnVideoProcessingQueue(^{\n        CGSize rotatedSize = newSize;\n        \n        if (GPUImageRotationSwapsWidthAndHeight(inputRotation))\n        {\n            rotatedSize.width = newSize.height;\n            rotatedSize.height = newSize.width;\n        }\n        \n        if (!CGSizeEqualToSize(inputImageSize, rotatedSize))\n        {\n            inputImageSize = rotatedSize;\n            [self recalculateViewGeometry];\n        }\n    });\n}\n\n- (CGSize)maximumOutputSize;\n{\n    if ([self respondsToSelector:@selector(convertSizeToBacking:)])\n    {\n        return [self convertSizeToBacking:self.bounds.size];\n    }\n    else\n    {\n        return self.bounds.size;\n    }\n}\n\n- (void)endProcessing\n{\n}\n\n- (BOOL)shouldIgnoreUpdatesToThisTarget;\n{\n    return NO;\n}\n\n- (void)conserveMemoryForNextFrame;\n{\n    \n}\n\n- (BOOL)wantsMonochromeInput;\n{\n    return NO;\n}\n\n- (void)setCurrentlyReceivingMonochromeInput:(BOOL)newValue;\n{\n    \n}\n\n#pragma mark -\n#pragma mark Accessors\n\n- (CGSize)sizeInPixels;\n{\n    if (CGSizeEqualToSize(_sizeInPixels, CGSizeZero))\n    {\n        return [self maximumOutputSize];\n    }\n    else\n    {\n        return _sizeInPixels;\n    }\n}\n\n- (void)setFillMode:(GPUImageFillModeType)newValue;\n{\n    _fillMode = newValue;\n    [self recalculateViewGeometry];\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/Mac/en.lproj/InfoPlist.strings",
    "content": "/* Localized versions of Info.plist keys */\n\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/iOS/Framework/GPUImageFramework.h",
    "content": "#import <UIKit/UIKit.h>\n\n//! Project version number for GPUImageFramework.\nFOUNDATION_EXPORT double GPUImageFrameworkVersionNumber;\n\n//! Project version string for GPUImageFramework.\nFOUNDATION_EXPORT const unsigned char GPUImageFrameworkVersionString[];\n\n#import <GPUImage/GLProgram.h>\n\n// Base classes\n#import <GPUImage/GPUImageContext.h>\n#import <GPUImage/GPUImageOutput.h>\n#import <GPUImage/GPUImageView.h>\n#import <GPUImage/GPUImageVideoCamera.h>\n#import <GPUImage/GPUImageStillCamera.h>\n#import <GPUImage/GPUImageMovie.h>\n#import <GPUImage/GPUImagePicture.h>\n#import <GPUImage/GPUImageRawDataInput.h>\n#import <GPUImage/GPUImageRawDataOutput.h>\n#import <GPUImage/GPUImageMovieWriter.h>\n#import <GPUImage/GPUImageFilterPipeline.h>\n#import <GPUImage/GPUImageTextureOutput.h>\n#import <GPUImage/GPUImageFilterGroup.h>\n#import <GPUImage/GPUImageTextureInput.h>\n#import <GPUImage/GPUImageUIElement.h>\n#import <GPUImage/GPUImageBuffer.h>\n#import <GPUImage/GPUImageFramebuffer.h>\n#import <GPUImage/GPUImageFramebufferCache.h>\n\n// Filters\n#import <GPUImage/GPUImageFilter.h>\n#import <GPUImage/GPUImageTwoInputFilter.h>\n#import <GPUImage/GPUImagePixellateFilter.h>\n#import <GPUImage/GPUImagePixellatePositionFilter.h>\n#import <GPUImage/GPUImageSepiaFilter.h>\n#import <GPUImage/GPUImageColorInvertFilter.h>\n#import <GPUImage/GPUImageSaturationFilter.h>\n#import <GPUImage/GPUImageContrastFilter.h>\n#import <GPUImage/GPUImageExposureFilter.h>\n#import <GPUImage/GPUImageBrightnessFilter.h>\n#import <GPUImage/GPUImageLevelsFilter.h>\n#import <GPUImage/GPUImageSharpenFilter.h>\n#import <GPUImage/GPUImageGammaFilter.h>\n#import <GPUImage/GPUImageSobelEdgeDetectionFilter.h>\n#import <GPUImage/GPUImageSketchFilter.h>\n#import <GPUImage/GPUImageToonFilter.h>\n#import <GPUImage/GPUImageSmoothToonFilter.h>\n#import <GPUImage/GPUImageMultiplyBlendFilter.h>\n#import <GPUImage/GPUImageDissolveBlendFilter.h>\n#import <GPUImage/GPUImageKuwaharaFilter.h>\n#import <GPUImage/GPUImageKuwaharaRadius3Filter.h>\n#import <GPUImage/GPUImageVignetteFilter.h>\n#import <GPUImage/GPUImageGaussianBlurFilter.h>\n#import <GPUImage/GPUImageGaussianBlurPositionFilter.h>\n#import <GPUImage/GPUImageGaussianSelectiveBlurFilter.h>\n#import <GPUImage/GPUImageOverlayBlendFilter.h>\n#import <GPUImage/GPUImageDarkenBlendFilter.h>\n#import <GPUImage/GPUImageLightenBlendFilter.h>\n#import <GPUImage/GPUImageSwirlFilter.h>\n#import <GPUImage/GPUImageSourceOverBlendFilter.h>\n#import <GPUImage/GPUImageColorBurnBlendFilter.h>\n#import <GPUImage/GPUImageColorDodgeBlendFilter.h>\n#import <GPUImage/GPUImageScreenBlendFilter.h>\n#import <GPUImage/GPUImageExclusionBlendFilter.h>\n#import <GPUImage/GPUImageDifferenceBlendFilter.h>\n#import <GPUImage/GPUImageSubtractBlendFilter.h>\n#import <GPUImage/GPUImageHardLightBlendFilter.h>\n#import <GPUImage/GPUImageSoftLightBlendFilter.h>\n#import <GPUImage/GPUImageColorBlendFilter.h>\n#import <GPUImage/GPUImageHueBlendFilter.h>\n#import <GPUImage/GPUImageSaturationBlendFilter.h>\n#import <GPUImage/GPUImageLuminosityBlendFilter.h>\n#import <GPUImage/GPUImageCropFilter.h>\n#import <GPUImage/GPUImageGrayscaleFilter.h>\n#import <GPUImage/GPUImageTransformFilter.h>\n#import <GPUImage/GPUImageChromaKeyBlendFilter.h>\n#import <GPUImage/GPUImageHazeFilter.h>\n#import <GPUImage/GPUImageLuminanceThresholdFilter.h>\n#import <GPUImage/GPUImagePosterizeFilter.h>\n#import <GPUImage/GPUImageBoxBlurFilter.h>\n#import <GPUImage/GPUImageAdaptiveThresholdFilter.h>\n#import <GPUImage/GPUImageUnsharpMaskFilter.h>\n#import <GPUImage/GPUImageBulgeDistortionFilter.h>\n#import <GPUImage/GPUImagePinchDistortionFilter.h>\n#import <GPUImage/GPUImageCrosshatchFilter.h>\n#import <GPUImage/GPUImageCGAColorspaceFilter.h>\n#import <GPUImage/GPUImagePolarPixellateFilter.h>\n#import <GPUImage/GPUImageStretchDistortionFilter.h>\n#import <GPUImage/GPUImagePerlinNoiseFilter.h>\n#import <GPUImage/GPUImageJFAVoronoiFilter.h>\n#import <GPUImage/GPUImageVoronoiConsumerFilter.h>\n#import <GPUImage/GPUImageMosaicFilter.h>\n#import <GPUImage/GPUImageTiltShiftFilter.h>\n#import <GPUImage/GPUImage3x3ConvolutionFilter.h>\n#import <GPUImage/GPUImageEmbossFilter.h>\n#import <GPUImage/GPUImageCannyEdgeDetectionFilter.h>\n#import <GPUImage/GPUImageThresholdEdgeDetectionFilter.h>\n#import <GPUImage/GPUImageMaskFilter.h>\n#import <GPUImage/GPUImageHistogramFilter.h>\n#import <GPUImage/GPUImageHistogramGenerator.h>\n#import <GPUImage/GPUImagePrewittEdgeDetectionFilter.h>\n#import <GPUImage/GPUImageXYDerivativeFilter.h>\n#import <GPUImage/GPUImageHarrisCornerDetectionFilter.h>\n#import <GPUImage/GPUImageAlphaBlendFilter.h>\n#import <GPUImage/GPUImageNormalBlendFilter.h>\n#import <GPUImage/GPUImageNonMaximumSuppressionFilter.h>\n#import <GPUImage/GPUImageRGBFilter.h>\n#import <GPUImage/GPUImageMedianFilter.h>\n#import <GPUImage/GPUImageBilateralFilter.h>\n#import <GPUImage/GPUImageCrosshairGenerator.h>\n#import <GPUImage/GPUImageToneCurveFilter.h>\n#import <GPUImage/GPUImageNobleCornerDetectionFilter.h>\n#import <GPUImage/GPUImageShiTomasiFeatureDetectionFilter.h>\n#import <GPUImage/GPUImageErosionFilter.h>\n#import <GPUImage/GPUImageRGBErosionFilter.h>\n#import <GPUImage/GPUImageDilationFilter.h>\n#import <GPUImage/GPUImageRGBDilationFilter.h>\n#import <GPUImage/GPUImageOpeningFilter.h>\n#import <GPUImage/GPUImageRGBOpeningFilter.h>\n#import <GPUImage/GPUImageClosingFilter.h>\n#import <GPUImage/GPUImageRGBClosingFilter.h>\n#import <GPUImage/GPUImageColorPackingFilter.h>\n#import <GPUImage/GPUImageSphereRefractionFilter.h>\n#import <GPUImage/GPUImageMonochromeFilter.h>\n#import <GPUImage/GPUImageOpacityFilter.h>\n#import <GPUImage/GPUImageHighlightShadowFilter.h>\n#import <GPUImage/GPUImageFalseColorFilter.h>\n#import <GPUImage/GPUImageHSBFilter.h>\n#import <GPUImage/GPUImageHueFilter.h>\n#import <GPUImage/GPUImageGlassSphereFilter.h>\n#import <GPUImage/GPUImageLookupFilter.h>\n#import <GPUImage/GPUImageAmatorkaFilter.h>\n#import <GPUImage/GPUImageMissEtikateFilter.h>\n#import <GPUImage/GPUImageSoftEleganceFilter.h>\n#import <GPUImage/GPUImageAddBlendFilter.h>\n#import <GPUImage/GPUImageDivideBlendFilter.h>\n#import <GPUImage/GPUImagePolkaDotFilter.h>\n#import <GPUImage/GPUImageLocalBinaryPatternFilter.h>\n#import <GPUImage/GPUImageColorLocalBinaryPatternFilter.h>\n#import <GPUImage/GPUImageLanczosResamplingFilter.h>\n#import <GPUImage/GPUImageAverageColor.h>\n#import <GPUImage/GPUImageSolidColorGenerator.h>\n#import <GPUImage/GPUImageLuminosity.h>\n#import <GPUImage/GPUImageAverageLuminanceThresholdFilter.h>\n#import <GPUImage/GPUImageWhiteBalanceFilter.h>\n#import <GPUImage/GPUImageChromaKeyFilter.h>\n#import <GPUImage/GPUImageLowPassFilter.h>\n#import <GPUImage/GPUImageHighPassFilter.h>\n#import <GPUImage/GPUImageMotionDetector.h>\n#import <GPUImage/GPUImageHalftoneFilter.h>\n#import <GPUImage/GPUImageThresholdedNonMaximumSuppressionFilter.h>\n#import <GPUImage/GPUImageHoughTransformLineDetector.h>\n#import <GPUImage/GPUImageParallelCoordinateLineTransformFilter.h>\n#import <GPUImage/GPUImageThresholdSketchFilter.h>\n#import <GPUImage/GPUImageLineGenerator.h>\n#import <GPUImage/GPUImageLinearBurnBlendFilter.h>\n#import <GPUImage/GPUImageGaussianBlurPositionFilter.h>\n#import <GPUImage/GPUImagePixellatePositionFilter.h>\n#import <GPUImage/GPUImageTwoInputCrossTextureSamplingFilter.h>\n#import <GPUImage/GPUImagePoissonBlendFilter.h>\n#import <GPUImage/GPUImageMotionBlurFilter.h>\n#import <GPUImage/GPUImageZoomBlurFilter.h>\n#import <GPUImage/GPUImageLaplacianFilter.h>\n#import <GPUImage/GPUImageiOSBlurFilter.h>\n#import <GPUImage/GPUImageLuminanceRangeFilter.h>\n#import <GPUImage/GPUImageDirectionalNonMaximumSuppressionFilter.h>\n#import <GPUImage/GPUImageDirectionalSobelEdgeDetectionFilter.h>\n#import <GPUImage/GPUImageSingleComponentGaussianBlurFilter.h>\n#import <GPUImage/GPUImageThreeInputFilter.h>\n#import <GPUImage/GPUImageFourInputFilter.h>\n#import <GPUImage/GPUImageWeakPixelInclusionFilter.h>\n#import <GPUImage/GPUImageFASTCornerDetectionFilter.h>\n#import <GPUImage/GPUImageMovieComposition.h>\n#import <GPUImage/GPUImageColourFASTFeatureDetector.h>\n#import <GPUImage/GPUImageColourFASTSamplingOperation.h>\n#import <GPUImage/GPUImageSolarizeFilter.h>\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/iOS/Framework/Info.plist",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<!DOCTYPE plist PUBLIC \"-//Apple//DTD PLIST 1.0//EN\" \"http://www.apple.com/DTDs/PropertyList-1.0.dtd\">\n<plist version=\"1.0\">\n<dict>\n\t<key>CFBundleDevelopmentRegion</key>\n\t<string>en</string>\n\t<key>CFBundleExecutable</key>\n\t<string>${EXECUTABLE_NAME}</string>\n\t<key>CFBundleIdentifier</key>\n\t<string>$(PRODUCT_BUNDLE_IDENTIFIER)</string>\n\t<key>CFBundleInfoDictionaryVersion</key>\n\t<string>6.0</string>\n\t<key>CFBundleName</key>\n\t<string>${PRODUCT_NAME}</string>\n\t<key>CFBundlePackageType</key>\n\t<string>FMWK</string>\n\t<key>CFBundleShortVersionString</key>\n\t<string>0.1.4</string>\n\t<key>CFBundleSignature</key>\n\t<string>????</string>\n\t<key>CFBundleVersion</key>\n\t<string>${CURRENT_PROJECT_VERSION}</string>\n\t<key>NSPrincipalClass</key>\n\t<string></string>\n</dict>\n</plist>\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/iOS/Framework/module.modulemap",
    "content": "framework module GPUImage {\n  umbrella header \"GPUImageFramework.h\"\n\n  export *\n  module * { export * }\n}\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/iOS/GPUImage-Prefix.pch",
    "content": "//\n// Prefix header for all source files of the 'GPUImage' target in the 'GPUImage' project\n//\n\n#ifdef __OBJC__\n    #import <Foundation/Foundation.h>\n#endif\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/iOS/GPUImageContext.h",
    "content": "#import \"GLProgram.h\"\n#import \"GPUImageFramebuffer.h\"\n#import \"GPUImageFramebufferCache.h\"\n\n#define GPUImageRotationSwapsWidthAndHeight(rotation) ((rotation) == kGPUImageRotateLeft || (rotation) == kGPUImageRotateRight || (rotation) == kGPUImageRotateRightFlipVertical || (rotation) == kGPUImageRotateRightFlipHorizontal)\n\ntypedef NS_ENUM(NSUInteger, GPUImageRotationMode) {\n\tkGPUImageNoRotation,\n\tkGPUImageRotateLeft,\n\tkGPUImageRotateRight,\n\tkGPUImageFlipVertical,\n\tkGPUImageFlipHorizonal,\n\tkGPUImageRotateRightFlipVertical,\n\tkGPUImageRotateRightFlipHorizontal,\n\tkGPUImageRotate180\n};\n\n@interface GPUImageContext : NSObject\n\n@property(readonly, nonatomic) dispatch_queue_t contextQueue;\n@property(readwrite, retain, nonatomic) GLProgram *currentShaderProgram;\n@property(readonly, retain, nonatomic) EAGLContext *context;\n@property(readonly) CVOpenGLESTextureCacheRef coreVideoTextureCache;\n@property(readonly) GPUImageFramebufferCache *framebufferCache;\n\n+ (void *)contextKey;\n+ (GPUImageContext *)sharedImageProcessingContext;\n+ (dispatch_queue_t)sharedContextQueue;\n+ (GPUImageFramebufferCache *)sharedFramebufferCache;\n+ (void)useImageProcessingContext;\n- (void)useAsCurrentContext;\n+ (void)setActiveShaderProgram:(GLProgram *)shaderProgram;\n- (void)setContextShaderProgram:(GLProgram *)shaderProgram;\n+ (GLint)maximumTextureSizeForThisDevice;\n+ (GLint)maximumTextureUnitsForThisDevice;\n+ (GLint)maximumVaryingVectorsForThisDevice;\n+ (BOOL)deviceSupportsOpenGLESExtension:(NSString *)extension;\n+ (BOOL)deviceSupportsRedTextures;\n+ (BOOL)deviceSupportsFramebufferReads;\n+ (CGSize)sizeThatFitsWithinATextureForSize:(CGSize)inputSize;\n\n- (void)presentBufferForDisplay;\n- (GLProgram *)programForVertexShaderString:(NSString *)vertexShaderString fragmentShaderString:(NSString *)fragmentShaderString;\n\n- (void)useSharegroup:(EAGLSharegroup *)sharegroup;\n\n// Manage fast texture upload\n+ (BOOL)supportsFastTextureUpload;\n\n@end\n\n@protocol GPUImageInput <NSObject>\n- (void)newFrameReadyAtTime:(CMTime)frameTime atIndex:(NSInteger)textureIndex;\n- (void)setInputFramebuffer:(GPUImageFramebuffer *)newInputFramebuffer atIndex:(NSInteger)textureIndex;\n- (NSInteger)nextAvailableTextureIndex;\n- (void)setInputSize:(CGSize)newSize atIndex:(NSInteger)textureIndex;\n- (void)setInputRotation:(GPUImageRotationMode)newInputRotation atIndex:(NSInteger)textureIndex;\n- (CGSize)maximumOutputSize;\n- (void)endProcessing;\n- (BOOL)shouldIgnoreUpdatesToThisTarget;\n- (BOOL)enabled;\n- (BOOL)wantsMonochromeInput;\n- (void)setCurrentlyReceivingMonochromeInput:(BOOL)newValue;\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/iOS/GPUImageContext.m",
    "content": "#import \"GPUImageContext.h\"\n#import <OpenGLES/EAGLDrawable.h>\n#import <AVFoundation/AVFoundation.h>\n\n#define MAXSHADERPROGRAMSALLOWEDINCACHE 40\n\nextern dispatch_queue_attr_t GPUImageDefaultQueueAttribute(void);\n\n@interface GPUImageContext()\n{\n    NSMutableDictionary *shaderProgramCache;\n    NSMutableArray *shaderProgramUsageHistory;\n    EAGLSharegroup *_sharegroup;\n}\n\n@end\n\n@implementation GPUImageContext\n\n@synthesize context = _context;\n@synthesize currentShaderProgram = _currentShaderProgram;\n@synthesize contextQueue = _contextQueue;\n@synthesize coreVideoTextureCache = _coreVideoTextureCache;\n@synthesize framebufferCache = _framebufferCache;\n\nstatic void *openGLESContextQueueKey;\n\n- (id)init;\n{\n    if (!(self = [super init]))\n    {\n\t\treturn nil;\n    }\n\n\topenGLESContextQueueKey = &openGLESContextQueueKey;\n    _contextQueue = dispatch_queue_create(\"com.sunsetlakesoftware.GPUImage.openGLESContextQueue\", GPUImageDefaultQueueAttribute());\n    \n#if OS_OBJECT_USE_OBJC\n\tdispatch_queue_set_specific(_contextQueue, openGLESContextQueueKey, (__bridge void *)self, NULL);\n#endif\n    shaderProgramCache = [[NSMutableDictionary alloc] init];\n    shaderProgramUsageHistory = [[NSMutableArray alloc] init];\n    \n    return self;\n}\n\n+ (void *)contextKey {\n\treturn openGLESContextQueueKey;\n}\n\n// Based on Colin Wheeler's example here: http://cocoasamurai.blogspot.com/2011/04/singletons-your-doing-them-wrong.html\n+ (GPUImageContext *)sharedImageProcessingContext;\n{\n    static dispatch_once_t pred;\n    static GPUImageContext *sharedImageProcessingContext = nil;\n    \n    dispatch_once(&pred, ^{\n        sharedImageProcessingContext = [[[self class] alloc] init];\n    });\n    return sharedImageProcessingContext;\n}\n\n+ (dispatch_queue_t)sharedContextQueue;\n{\n    return [[self sharedImageProcessingContext] contextQueue];\n}\n\n+ (GPUImageFramebufferCache *)sharedFramebufferCache;\n{\n    return [[self sharedImageProcessingContext] framebufferCache];\n}\n\n+ (void)useImageProcessingContext;\n{\n    [[GPUImageContext sharedImageProcessingContext] useAsCurrentContext];\n}\n\n- (void)useAsCurrentContext;\n{\n    EAGLContext *imageProcessingContext = [self context];\n    if ([EAGLContext currentContext] != imageProcessingContext)\n    {\n        [EAGLContext setCurrentContext:imageProcessingContext];\n    }\n}\n\n+ (void)setActiveShaderProgram:(GLProgram *)shaderProgram;\n{\n    GPUImageContext *sharedContext = [GPUImageContext sharedImageProcessingContext];\n    [sharedContext setContextShaderProgram:shaderProgram];\n}\n\n- (void)setContextShaderProgram:(GLProgram *)shaderProgram;\n{\n    EAGLContext *imageProcessingContext = [self context];\n    if ([EAGLContext currentContext] != imageProcessingContext)\n    {\n        [EAGLContext setCurrentContext:imageProcessingContext];\n    }\n    \n    if (self.currentShaderProgram != shaderProgram)\n    {\n        self.currentShaderProgram = shaderProgram;\n        [shaderProgram use];\n    }\n}\n\n+ (GLint)maximumTextureSizeForThisDevice;\n{\n    static dispatch_once_t pred;\n    static GLint maxTextureSize = 0;\n    \n    dispatch_once(&pred, ^{\n        [self useImageProcessingContext];\n        glGetIntegerv(GL_MAX_TEXTURE_SIZE, &maxTextureSize);\n    });\n\n    return maxTextureSize;\n}\n\n+ (GLint)maximumTextureUnitsForThisDevice;\n{\n    static dispatch_once_t pred;\n    static GLint maxTextureUnits = 0;\n\n    dispatch_once(&pred, ^{\n        [self useImageProcessingContext];\n        glGetIntegerv(GL_MAX_TEXTURE_IMAGE_UNITS, &maxTextureUnits);\n    });\n    \n    return maxTextureUnits;\n}\n\n+ (GLint)maximumVaryingVectorsForThisDevice;\n{\n    static dispatch_once_t pred;\n    static GLint maxVaryingVectors = 0;\n\n    dispatch_once(&pred, ^{\n        [self useImageProcessingContext];\n        glGetIntegerv(GL_MAX_VARYING_VECTORS, &maxVaryingVectors);\n    });\n\n    return maxVaryingVectors;\n}\n\n+ (BOOL)deviceSupportsOpenGLESExtension:(NSString *)extension;\n{\n    static dispatch_once_t pred;\n    static NSArray *extensionNames = nil;\n\n    // Cache extensions for later quick reference, since this won't change for a given device\n    dispatch_once(&pred, ^{\n        [GPUImageContext useImageProcessingContext];\n        NSString *extensionsString = [NSString stringWithCString:(const char *)glGetString(GL_EXTENSIONS) encoding:NSASCIIStringEncoding];\n        extensionNames = [extensionsString componentsSeparatedByString:@\" \"];\n    });\n\n    return [extensionNames containsObject:extension];\n}\n\n\n// http://www.khronos.org/registry/gles/extensions/EXT/EXT_texture_rg.txt\n\n+ (BOOL)deviceSupportsRedTextures;\n{\n    static dispatch_once_t pred;\n    static BOOL supportsRedTextures = NO;\n    \n    dispatch_once(&pred, ^{\n        supportsRedTextures = [GPUImageContext deviceSupportsOpenGLESExtension:@\"GL_EXT_texture_rg\"];\n    });\n    \n    return supportsRedTextures;\n}\n\n+ (BOOL)deviceSupportsFramebufferReads;\n{\n    static dispatch_once_t pred;\n    static BOOL supportsFramebufferReads = NO;\n    \n    dispatch_once(&pred, ^{\n        supportsFramebufferReads = [GPUImageContext deviceSupportsOpenGLESExtension:@\"GL_EXT_shader_framebuffer_fetch\"];\n    });\n    \n    return supportsFramebufferReads;\n}\n\n+ (CGSize)sizeThatFitsWithinATextureForSize:(CGSize)inputSize;\n{\n    GLint maxTextureSize = [self maximumTextureSizeForThisDevice]; \n    if ( (inputSize.width < maxTextureSize) && (inputSize.height < maxTextureSize) )\n    {\n        return inputSize;\n    }\n    \n    CGSize adjustedSize;\n    if (inputSize.width > inputSize.height)\n    {\n        adjustedSize.width = (CGFloat)maxTextureSize;\n        adjustedSize.height = ((CGFloat)maxTextureSize / inputSize.width) * inputSize.height;\n    }\n    else\n    {\n        adjustedSize.height = (CGFloat)maxTextureSize;\n        adjustedSize.width = ((CGFloat)maxTextureSize / inputSize.height) * inputSize.width;\n    }\n\n    return adjustedSize;\n}\n\n- (void)presentBufferForDisplay;\n{\n    [self.context presentRenderbuffer:GL_RENDERBUFFER];\n}\n\n- (GLProgram *)programForVertexShaderString:(NSString *)vertexShaderString fragmentShaderString:(NSString *)fragmentShaderString;\n{\n    NSString *lookupKeyForShaderProgram = [NSString stringWithFormat:@\"V: %@ - F: %@\", vertexShaderString, fragmentShaderString];\n    GLProgram *programFromCache = [shaderProgramCache objectForKey:lookupKeyForShaderProgram];\n\n    if (programFromCache == nil)\n    {\n        programFromCache = [[GLProgram alloc] initWithVertexShaderString:vertexShaderString fragmentShaderString:fragmentShaderString];\n        [shaderProgramCache setObject:programFromCache forKey:lookupKeyForShaderProgram];\n//        [shaderProgramUsageHistory addObject:lookupKeyForShaderProgram];\n//        if ([shaderProgramUsageHistory count] >= MAXSHADERPROGRAMSALLOWEDINCACHE)\n//        {\n//            for (NSUInteger currentShaderProgramRemovedFromCache = 0; currentShaderProgramRemovedFromCache < 10; currentShaderProgramRemovedFromCache++)\n//            {\n//                NSString *shaderProgramToRemoveFromCache = [shaderProgramUsageHistory objectAtIndex:0];\n//                [shaderProgramUsageHistory removeObjectAtIndex:0];\n//                [shaderProgramCache removeObjectForKey:shaderProgramToRemoveFromCache];\n//            }\n//        }\n    }\n    \n    return programFromCache;\n}\n\n- (void)useSharegroup:(EAGLSharegroup *)sharegroup;\n{\n    NSAssert(_context == nil, @\"Unable to use a share group when the context has already been created. Call this method before you use the context for the first time.\");\n    \n    _sharegroup = sharegroup;\n}\n\n- (EAGLContext *)createContext;\n{\n    EAGLContext *context = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2 sharegroup:_sharegroup];\n    NSAssert(context != nil, @\"Unable to create an OpenGL ES 2.0 context. The GPUImage framework requires OpenGL ES 2.0 support to work.\");\n    return context;\n}\n\n\n#pragma mark -\n#pragma mark Manage fast texture upload\n\n+ (BOOL)supportsFastTextureUpload;\n{\n#if TARGET_IPHONE_SIMULATOR\n    return NO;\n#else\n    \n#pragma clang diagnostic push\n#pragma clang diagnostic ignored \"-Wtautological-pointer-compare\"\n    return (CVOpenGLESTextureCacheCreate != NULL);\n#pragma clang diagnostic pop\n\n#endif\n}\n\n#pragma mark -\n#pragma mark Accessors\n\n- (EAGLContext *)context;\n{\n    if (_context == nil)\n    {\n        _context = [self createContext];\n        [EAGLContext setCurrentContext:_context];\n        \n        // Set up a few global settings for the image processing pipeline\n        glDisable(GL_DEPTH_TEST);\n    }\n    \n    return _context;\n}\n\n- (CVOpenGLESTextureCacheRef)coreVideoTextureCache;\n{\n    if (_coreVideoTextureCache == NULL)\n    {\n#if defined(__IPHONE_6_0)\n        CVReturn err = CVOpenGLESTextureCacheCreate(kCFAllocatorDefault, NULL, [self context], NULL, &_coreVideoTextureCache);\n#else\n        CVReturn err = CVOpenGLESTextureCacheCreate(kCFAllocatorDefault, NULL, (__bridge void *)[self context], NULL, &_coreVideoTextureCache);\n#endif\n        \n        if (err)\n        {\n            NSAssert(NO, @\"Error at CVOpenGLESTextureCacheCreate %d\", err);\n        }\n\n    }\n    \n    return _coreVideoTextureCache;\n}\n\n- (GPUImageFramebufferCache *)framebufferCache;\n{\n    if (_framebufferCache == nil)\n    {\n        _framebufferCache = [[GPUImageFramebufferCache alloc] init];\n    }\n    \n    return _framebufferCache;\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/iOS/GPUImageMovieWriter.h",
    "content": "#import <Foundation/Foundation.h>\n#import <AVFoundation/AVFoundation.h>\n#import \"GPUImageContext.h\"\n\nextern NSString *const kGPUImageColorSwizzlingFragmentShaderString;\n\n@protocol GPUImageMovieWriterDelegate <NSObject>\n\n@optional\n- (void)movieRecordingCompleted;\n- (void)movieRecordingFailedWithError:(NSError*)error;\n\n@end\n\n@interface GPUImageMovieWriter : NSObject <GPUImageInput>\n{\n    BOOL alreadyFinishedRecording;\n    \n    NSURL *movieURL;\n    NSString *fileType;\n\tAVAssetWriter *assetWriter;\n\tAVAssetWriterInput *assetWriterAudioInput;\n\tAVAssetWriterInput *assetWriterVideoInput;\n    AVAssetWriterInputPixelBufferAdaptor *assetWriterPixelBufferInput;\n    \n    GPUImageContext *_movieWriterContext;\n    CVPixelBufferRef renderTarget;\n    CVOpenGLESTextureRef renderTexture;\n\n    CGSize videoSize;\n    GPUImageRotationMode inputRotation;\n}\n\n@property(readwrite, nonatomic) BOOL hasAudioTrack;\n@property(readwrite, nonatomic) BOOL shouldPassthroughAudio;\n@property(readwrite, nonatomic) BOOL shouldInvalidateAudioSampleWhenDone;\n@property(nonatomic, copy) void(^completionBlock)(void);\n@property(nonatomic, copy) void(^failureBlock)(NSError*);\n@property(nonatomic, assign) id<GPUImageMovieWriterDelegate> delegate;\n@property(readwrite, nonatomic) BOOL encodingLiveVideo;\n@property(nonatomic, copy) BOOL(^videoInputReadyCallback)(void);\n@property(nonatomic, copy) BOOL(^audioInputReadyCallback)(void);\n@property(nonatomic, copy) void(^audioProcessingCallback)(SInt16 **samplesRef, CMItemCount numSamplesInBuffer);\n@property(nonatomic) BOOL enabled;\n@property(nonatomic, readonly) AVAssetWriter *assetWriter;\n@property(nonatomic, readonly) CMTime duration;\n@property(nonatomic, assign) CGAffineTransform transform;\n@property(nonatomic, copy) NSArray *metaData;\n@property(nonatomic, assign, getter = isPaused) BOOL paused;\n@property(nonatomic, retain) GPUImageContext *movieWriterContext;\n\n// Initialization and teardown\n- (id)initWithMovieURL:(NSURL *)newMovieURL size:(CGSize)newSize;\n- (id)initWithMovieURL:(NSURL *)newMovieURL size:(CGSize)newSize fileType:(NSString *)newFileType outputSettings:(NSDictionary *)outputSettings;\n\n- (void)setHasAudioTrack:(BOOL)hasAudioTrack audioSettings:(NSDictionary *)audioOutputSettings;\n\n// Movie recording\n- (void)startRecording;\n- (void)startRecordingInOrientation:(CGAffineTransform)orientationTransform;\n- (void)finishRecording;\n- (void)finishRecordingWithCompletionHandler:(void (^)(void))handler;\n- (void)cancelRecording;\n- (void)processAudioBuffer:(CMSampleBufferRef)audioBuffer;\n- (void)enableSynchronizationCallbacks;\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/iOS/GPUImageMovieWriter.m",
    "content": "#import \"GPUImageMovieWriter.h\"\n\n#import \"GPUImageContext.h\"\n#import \"GLProgram.h\"\n#import \"GPUImageFilter.h\"\n\nNSString *const kGPUImageColorSwizzlingFragmentShaderString = SHADER_STRING\n(\n varying highp vec2 textureCoordinate;\n \n uniform sampler2D inputImageTexture;\n \n void main()\n {\n     gl_FragColor = texture2D(inputImageTexture, textureCoordinate).bgra;\n }\n);\n\n\n@interface GPUImageMovieWriter ()\n{\n    GLuint movieFramebuffer, movieRenderbuffer;\n    \n    GLProgram *colorSwizzlingProgram;\n    GLint colorSwizzlingPositionAttribute, colorSwizzlingTextureCoordinateAttribute;\n    GLint colorSwizzlingInputTextureUniform;\n\n    GPUImageFramebuffer *firstInputFramebuffer;\n    \n    BOOL discont;\n    CMTime startTime, previousFrameTime, previousAudioTime;\n    CMTime offsetTime;\n    \n    dispatch_queue_t audioQueue, videoQueue;\n    BOOL audioEncodingIsFinished, videoEncodingIsFinished;\n\n    BOOL isRecording;\n}\n\n// Movie recording\n- (void)initializeMovieWithOutputSettings:(NSMutableDictionary *)outputSettings;\n\n// Frame rendering\n- (void)createDataFBO;\n- (void)destroyDataFBO;\n- (void)setFilterFBO;\n\n- (void)renderAtInternalSizeUsingFramebuffer:(GPUImageFramebuffer *)inputFramebufferToUse;\n\n@end\n\n@implementation GPUImageMovieWriter\n\n@synthesize hasAudioTrack = _hasAudioTrack;\n@synthesize encodingLiveVideo = _encodingLiveVideo;\n@synthesize shouldPassthroughAudio = _shouldPassthroughAudio;\n@synthesize completionBlock;\n@synthesize failureBlock;\n@synthesize videoInputReadyCallback;\n@synthesize audioInputReadyCallback;\n@synthesize enabled;\n@synthesize shouldInvalidateAudioSampleWhenDone = _shouldInvalidateAudioSampleWhenDone;\n@synthesize paused = _paused;\n@synthesize movieWriterContext = _movieWriterContext;\n\n@synthesize delegate = _delegate;\n\n#pragma mark -\n#pragma mark Initialization and teardown\n\n- (id)initWithMovieURL:(NSURL *)newMovieURL size:(CGSize)newSize;\n{\n    return [self initWithMovieURL:newMovieURL size:newSize fileType:AVFileTypeQuickTimeMovie outputSettings:nil];\n}\n\n- (id)initWithMovieURL:(NSURL *)newMovieURL size:(CGSize)newSize fileType:(NSString *)newFileType outputSettings:(NSMutableDictionary *)outputSettings;\n{\n    if (!(self = [super init]))\n    {\n\t\treturn nil;\n    }\n\n    _shouldInvalidateAudioSampleWhenDone = NO;\n    \n    self.enabled = YES;\n    alreadyFinishedRecording = NO;\n    videoEncodingIsFinished = NO;\n    audioEncodingIsFinished = NO;\n\n    discont = NO;\n    videoSize = newSize;\n    movieURL = newMovieURL;\n    fileType = newFileType;\n    startTime = kCMTimeInvalid;\n    _encodingLiveVideo = [[outputSettings objectForKey:@\"EncodingLiveVideo\"] isKindOfClass:[NSNumber class]] ? [[outputSettings objectForKey:@\"EncodingLiveVideo\"] boolValue] : YES;\n    previousFrameTime = kCMTimeNegativeInfinity;\n    previousAudioTime = kCMTimeNegativeInfinity;\n    inputRotation = kGPUImageNoRotation;\n    \n    _movieWriterContext = [[GPUImageContext alloc] init];\n    [_movieWriterContext useSharegroup:[[[GPUImageContext sharedImageProcessingContext] context] sharegroup]];\n\n    runSynchronouslyOnContextQueue(_movieWriterContext, ^{\n        [_movieWriterContext useAsCurrentContext];\n        \n        if ([GPUImageContext supportsFastTextureUpload])\n        {\n            colorSwizzlingProgram = [_movieWriterContext programForVertexShaderString:kGPUImageVertexShaderString fragmentShaderString:kGPUImagePassthroughFragmentShaderString];\n        }\n        else\n        {\n            colorSwizzlingProgram = [_movieWriterContext programForVertexShaderString:kGPUImageVertexShaderString fragmentShaderString:kGPUImageColorSwizzlingFragmentShaderString];\n        }\n        \n        if (!colorSwizzlingProgram.initialized)\n        {\n            [colorSwizzlingProgram addAttribute:@\"position\"];\n            [colorSwizzlingProgram addAttribute:@\"inputTextureCoordinate\"];\n            \n            if (![colorSwizzlingProgram link])\n            {\n                NSString *progLog = [colorSwizzlingProgram programLog];\n                NSLog(@\"Program link log: %@\", progLog);\n                NSString *fragLog = [colorSwizzlingProgram fragmentShaderLog];\n                NSLog(@\"Fragment shader compile log: %@\", fragLog);\n                NSString *vertLog = [colorSwizzlingProgram vertexShaderLog];\n                NSLog(@\"Vertex shader compile log: %@\", vertLog);\n                colorSwizzlingProgram = nil;\n                NSAssert(NO, @\"Filter shader link failed\");\n            }\n        }        \n        \n        colorSwizzlingPositionAttribute = [colorSwizzlingProgram attributeIndex:@\"position\"];\n        colorSwizzlingTextureCoordinateAttribute = [colorSwizzlingProgram attributeIndex:@\"inputTextureCoordinate\"];\n        colorSwizzlingInputTextureUniform = [colorSwizzlingProgram uniformIndex:@\"inputImageTexture\"];\n        \n        [_movieWriterContext setContextShaderProgram:colorSwizzlingProgram];\n        \n        glEnableVertexAttribArray(colorSwizzlingPositionAttribute);\n        glEnableVertexAttribArray(colorSwizzlingTextureCoordinateAttribute);\n    });\n        \n    [self initializeMovieWithOutputSettings:outputSettings];\n\n    return self;\n}\n\n- (void)dealloc;\n{\n    [self destroyDataFBO];\n\n#if !OS_OBJECT_USE_OBJC\n    if( audioQueue != NULL )\n    {\n        dispatch_release(audioQueue);\n    }\n    if( videoQueue != NULL )\n    {\n        dispatch_release(videoQueue);\n    }\n#endif\n}\n\n#pragma mark -\n#pragma mark Movie recording\n\n- (void)initializeMovieWithOutputSettings:(NSDictionary *)outputSettings;\n{\n    isRecording = NO;\n    \n    self.enabled = YES;\n    NSError *error = nil;\n    assetWriter = [[AVAssetWriter alloc] initWithURL:movieURL fileType:fileType error:&error];\n    if (error != nil)\n    {\n        NSLog(@\"Error: %@\", error);\n        if (failureBlock) \n        {\n            failureBlock(error);\n        }\n        else \n        {\n            if(self.delegate && [self.delegate respondsToSelector:@selector(movieRecordingFailedWithError:)])\n            {\n                [self.delegate movieRecordingFailedWithError:error];\n            }\n        }\n    }\n    \n    // Set this to make sure that a functional movie is produced, even if the recording is cut off mid-stream. Only the last second should be lost in that case.\n    assetWriter.movieFragmentInterval = CMTimeMakeWithSeconds(1.0, 1000);\n    \n    // use default output settings if none specified\n    if (outputSettings == nil) \n    {\n        NSMutableDictionary *settings = [[NSMutableDictionary alloc] init];\n        [settings setObject:AVVideoCodecH264 forKey:AVVideoCodecKey];\n        [settings setObject:[NSNumber numberWithInt:videoSize.width] forKey:AVVideoWidthKey];\n        [settings setObject:[NSNumber numberWithInt:videoSize.height] forKey:AVVideoHeightKey];\n        outputSettings = settings;\n    }\n    // custom output settings specified\n    else \n    {\n\t\t__unused NSString *videoCodec = [outputSettings objectForKey:AVVideoCodecKey];\n\t\t__unused NSNumber *width = [outputSettings objectForKey:AVVideoWidthKey];\n\t\t__unused NSNumber *height = [outputSettings objectForKey:AVVideoHeightKey];\n\t\t\n\t\tNSAssert(videoCodec && width && height, @\"OutputSettings is missing required parameters.\");\n        \n        if( [outputSettings objectForKey:@\"EncodingLiveVideo\"] ) {\n            NSMutableDictionary *tmp = [outputSettings mutableCopy];\n            [tmp removeObjectForKey:@\"EncodingLiveVideo\"];\n            outputSettings = tmp;\n        }\n    }\n    \n    /*\n    NSDictionary *videoCleanApertureSettings = [NSDictionary dictionaryWithObjectsAndKeys:\n                                                [NSNumber numberWithInt:videoSize.width], AVVideoCleanApertureWidthKey,\n                                                [NSNumber numberWithInt:videoSize.height], AVVideoCleanApertureHeightKey,\n                                                [NSNumber numberWithInt:0], AVVideoCleanApertureHorizontalOffsetKey,\n                                                [NSNumber numberWithInt:0], AVVideoCleanApertureVerticalOffsetKey,\n                                                nil];\n\n    NSDictionary *videoAspectRatioSettings = [NSDictionary dictionaryWithObjectsAndKeys:\n                                              [NSNumber numberWithInt:3], AVVideoPixelAspectRatioHorizontalSpacingKey,\n                                              [NSNumber numberWithInt:3], AVVideoPixelAspectRatioVerticalSpacingKey,\n                                              nil];\n\n    NSMutableDictionary * compressionProperties = [[NSMutableDictionary alloc] init];\n    [compressionProperties setObject:videoCleanApertureSettings forKey:AVVideoCleanApertureKey];\n    [compressionProperties setObject:videoAspectRatioSettings forKey:AVVideoPixelAspectRatioKey];\n    [compressionProperties setObject:[NSNumber numberWithInt: 2000000] forKey:AVVideoAverageBitRateKey];\n    [compressionProperties setObject:[NSNumber numberWithInt: 16] forKey:AVVideoMaxKeyFrameIntervalKey];\n    [compressionProperties setObject:AVVideoProfileLevelH264Main31 forKey:AVVideoProfileLevelKey];\n    \n    [outputSettings setObject:compressionProperties forKey:AVVideoCompressionPropertiesKey];\n    */\n     \n    assetWriterVideoInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:outputSettings];\n    assetWriterVideoInput.expectsMediaDataInRealTime = _encodingLiveVideo;\n    \n    // You need to use BGRA for the video in order to get realtime encoding. I use a color-swizzling shader to line up glReadPixels' normal RGBA output with the movie input's BGRA.\n    NSDictionary *sourcePixelBufferAttributesDictionary = [NSDictionary dictionaryWithObjectsAndKeys: [NSNumber numberWithInt:kCVPixelFormatType_32BGRA], kCVPixelBufferPixelFormatTypeKey,\n                                                           [NSNumber numberWithInt:videoSize.width], kCVPixelBufferWidthKey,\n                                                           [NSNumber numberWithInt:videoSize.height], kCVPixelBufferHeightKey,\n                                                           nil];\n//    NSDictionary *sourcePixelBufferAttributesDictionary = [NSDictionary dictionaryWithObjectsAndKeys: [NSNumber numberWithInt:kCVPixelFormatType_32ARGB], kCVPixelBufferPixelFormatTypeKey,\n//                                                           nil];\n        \n    assetWriterPixelBufferInput = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:assetWriterVideoInput sourcePixelBufferAttributes:sourcePixelBufferAttributesDictionary];\n    \n    [assetWriter addInput:assetWriterVideoInput];\n}\n\n- (void)setEncodingLiveVideo:(BOOL) value\n{\n    _encodingLiveVideo = value;\n    if (isRecording) {\n        NSAssert(NO, @\"Can not change Encoding Live Video while recording\");\n    }\n    else\n    {\n        assetWriterVideoInput.expectsMediaDataInRealTime = _encodingLiveVideo;\n        assetWriterAudioInput.expectsMediaDataInRealTime = _encodingLiveVideo;\n    }\n}\n\n- (void)startRecording;\n{\n    alreadyFinishedRecording = NO;\n    startTime = kCMTimeInvalid;\n    runSynchronouslyOnContextQueue(_movieWriterContext, ^{\n        if (audioInputReadyCallback == NULL)\n        {\n            [assetWriter startWriting];\n        }\n    });\n    isRecording = YES;\n\t//    [assetWriter startSessionAtSourceTime:kCMTimeZero];\n}\n\n- (void)startRecordingInOrientation:(CGAffineTransform)orientationTransform;\n{\n\tassetWriterVideoInput.transform = orientationTransform;\n\n\t[self startRecording];\n}\n\n- (void)cancelRecording;\n{\n    if (assetWriter.status == AVAssetWriterStatusCompleted)\n    {\n        return;\n    }\n    \n    isRecording = NO;\n    runSynchronouslyOnContextQueue(_movieWriterContext, ^{\n        alreadyFinishedRecording = YES;\n\n        if( assetWriter.status == AVAssetWriterStatusWriting && ! videoEncodingIsFinished )\n        {\n            videoEncodingIsFinished = YES;\n            [assetWriterVideoInput markAsFinished];\n        }\n        if( assetWriter.status == AVAssetWriterStatusWriting && ! audioEncodingIsFinished )\n        {\n            audioEncodingIsFinished = YES;\n            [assetWriterAudioInput markAsFinished];\n        }\n        [assetWriter cancelWriting];\n    });\n}\n\n- (void)finishRecording;\n{\n    [self finishRecordingWithCompletionHandler:NULL];\n}\n\n- (void)finishRecordingWithCompletionHandler:(void (^)(void))handler;\n{\n    runSynchronouslyOnContextQueue(_movieWriterContext, ^{\n        isRecording = NO;\n        \n        if (assetWriter.status == AVAssetWriterStatusCompleted || assetWriter.status == AVAssetWriterStatusCancelled || assetWriter.status == AVAssetWriterStatusUnknown)\n        {\n            if (handler)\n                runAsynchronouslyOnContextQueue(_movieWriterContext, handler);\n            return;\n        }\n        if( assetWriter.status == AVAssetWriterStatusWriting && ! videoEncodingIsFinished )\n        {\n            videoEncodingIsFinished = YES;\n            [assetWriterVideoInput markAsFinished];\n        }\n        if( assetWriter.status == AVAssetWriterStatusWriting && ! audioEncodingIsFinished )\n        {\n            audioEncodingIsFinished = YES;\n            [assetWriterAudioInput markAsFinished];\n        }\n#if (!defined(__IPHONE_6_0) || (__IPHONE_OS_VERSION_MAX_ALLOWED < __IPHONE_6_0))\n        // Not iOS 6 SDK\n        [assetWriter finishWriting];\n        if (handler)\n            runAsynchronouslyOnContextQueue(_movieWriterContext,handler);\n#else\n        // iOS 6 SDK\n        if ([assetWriter respondsToSelector:@selector(finishWritingWithCompletionHandler:)]) {\n            // Running iOS 6\n            [assetWriter finishWritingWithCompletionHandler:(handler ?: ^{ })];\n        }\n        else {\n            // Not running iOS 6\n#pragma clang diagnostic push\n#pragma clang diagnostic ignored \"-Wdeprecated-declarations\"\n            [assetWriter finishWriting];\n#pragma clang diagnostic pop\n            if (handler)\n                runAsynchronouslyOnContextQueue(_movieWriterContext, handler);\n        }\n#endif\n    });\n}\n\n- (void)processAudioBuffer:(CMSampleBufferRef)audioBuffer;\n{\n    if (!isRecording || _paused)\n    {\n        return;\n    }\n    \n//    if (_hasAudioTrack && CMTIME_IS_VALID(startTime))\n    if (_hasAudioTrack)\n    {\n        CFRetain(audioBuffer);\n\n        CMTime currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(audioBuffer);\n        \n        if (CMTIME_IS_INVALID(startTime))\n        {\n            runSynchronouslyOnContextQueue(_movieWriterContext, ^{\n                if ((audioInputReadyCallback == NULL) && (assetWriter.status != AVAssetWriterStatusWriting))\n                {\n                    [assetWriter startWriting];\n                }\n                [assetWriter startSessionAtSourceTime:currentSampleTime];\n                startTime = currentSampleTime;\n            });\n        }\n\n        if (!assetWriterAudioInput.readyForMoreMediaData && _encodingLiveVideo)\n        {\n            NSLog(@\"1: Had to drop an audio frame: %@\", CFBridgingRelease(CMTimeCopyDescription(kCFAllocatorDefault, currentSampleTime)));\n            if (_shouldInvalidateAudioSampleWhenDone)\n            {\n                CMSampleBufferInvalidate(audioBuffer);\n            }\n            CFRelease(audioBuffer);\n            return;\n        }\n        \n        if (discont) {\n            discont = NO;\n            \n            CMTime current;\n            if (offsetTime.value > 0) {\n                current = CMTimeSubtract(currentSampleTime, offsetTime);\n            } else {\n                current = currentSampleTime;\n            }\n            \n            CMTime offset = CMTimeSubtract(current, previousAudioTime);\n            \n            if (offsetTime.value == 0) {\n                offsetTime = offset;\n            } else {\n                offsetTime = CMTimeAdd(offsetTime, offset);\n            }\n        }\n        \n        if (offsetTime.value > 0) {\n            CFRelease(audioBuffer);\n            audioBuffer = [self adjustTime:audioBuffer by:offsetTime];\n            CFRetain(audioBuffer);\n        }\n        \n        // record most recent time so we know the length of the pause\n        currentSampleTime = CMSampleBufferGetPresentationTimeStamp(audioBuffer);\n\n        previousAudioTime = currentSampleTime;\n        \n        //if the consumer wants to do something with the audio samples before writing, let him.\n        if (self.audioProcessingCallback) {\n            //need to introspect into the opaque CMBlockBuffer structure to find its raw sample buffers.\n            CMBlockBufferRef buffer = CMSampleBufferGetDataBuffer(audioBuffer);\n            CMItemCount numSamplesInBuffer = CMSampleBufferGetNumSamples(audioBuffer);\n            AudioBufferList audioBufferList;\n            \n            CMSampleBufferGetAudioBufferListWithRetainedBlockBuffer(audioBuffer,\n                                                                    NULL,\n                                                                    &audioBufferList,\n                                                                    sizeof(audioBufferList),\n                                                                    NULL,\n                                                                    NULL,\n                                                                    kCMSampleBufferFlag_AudioBufferList_Assure16ByteAlignment,\n                                                                    &buffer\n                                                                    );\n            //passing a live pointer to the audio buffers, try to process them in-place or we might have syncing issues.\n            for (int bufferCount=0; bufferCount < audioBufferList.mNumberBuffers; bufferCount++) {\n                SInt16 *samples = (SInt16 *)audioBufferList.mBuffers[bufferCount].mData;\n                self.audioProcessingCallback(&samples, numSamplesInBuffer);\n            }\n        }\n        \n//        NSLog(@\"Recorded audio sample time: %lld, %d, %lld\", currentSampleTime.value, currentSampleTime.timescale, currentSampleTime.epoch);\n        void(^write)() = ^() {\n            while( ! assetWriterAudioInput.readyForMoreMediaData && ! _encodingLiveVideo && ! audioEncodingIsFinished ) {\n                NSDate *maxDate = [NSDate dateWithTimeIntervalSinceNow:0.5];\n                //NSLog(@\"audio waiting...\");\n                [[NSRunLoop currentRunLoop] runUntilDate:maxDate];\n            }\n            if (!assetWriterAudioInput.readyForMoreMediaData)\n            {\n                NSLog(@\"2: Had to drop an audio frame %@\", CFBridgingRelease(CMTimeCopyDescription(kCFAllocatorDefault, currentSampleTime)));\n            }\n            else if(assetWriter.status == AVAssetWriterStatusWriting)\n            {\n                if (![assetWriterAudioInput appendSampleBuffer:audioBuffer])\n                    NSLog(@\"Problem appending audio buffer at time: %@\", CFBridgingRelease(CMTimeCopyDescription(kCFAllocatorDefault, currentSampleTime)));\n            }\n            else\n            {\n                //NSLog(@\"Wrote an audio frame %@\", CFBridgingRelease(CMTimeCopyDescription(kCFAllocatorDefault, currentSampleTime)));\n            }\n\n            if (_shouldInvalidateAudioSampleWhenDone)\n            {\n                CMSampleBufferInvalidate(audioBuffer);\n            }\n            CFRelease(audioBuffer);\n        };\n//        runAsynchronouslyOnContextQueue(_movieWriterContext, write);\n        if( _encodingLiveVideo )\n\n        {\n            runAsynchronouslyOnContextQueue(_movieWriterContext, write);\n        }\n        else\n        {\n            write();\n        }\n    }\n}\n\n- (void)enableSynchronizationCallbacks;\n{\n    if (videoInputReadyCallback != NULL)\n    {\n        if( assetWriter.status != AVAssetWriterStatusWriting )\n        {\n            [assetWriter startWriting];\n        }\n        videoQueue = dispatch_queue_create(\"com.sunsetlakesoftware.GPUImage.videoReadingQueue\", GPUImageDefaultQueueAttribute());\n        [assetWriterVideoInput requestMediaDataWhenReadyOnQueue:videoQueue usingBlock:^{\n            if( _paused )\n            {\n                //NSLog(@\"video requestMediaDataWhenReadyOnQueue paused\");\n                // if we don't sleep, we'll get called back almost immediately, chewing up CPU\n                usleep(10000);\n                return;\n            }\n            //NSLog(@\"video requestMediaDataWhenReadyOnQueue begin\");\n            while( assetWriterVideoInput.readyForMoreMediaData && ! _paused )\n            {\n                if( videoInputReadyCallback && ! videoInputReadyCallback() && ! videoEncodingIsFinished )\n                {\n                    runAsynchronouslyOnContextQueue(_movieWriterContext, ^{\n                        if( assetWriter.status == AVAssetWriterStatusWriting && ! videoEncodingIsFinished )\n                        {\n                            videoEncodingIsFinished = YES;\n                            [assetWriterVideoInput markAsFinished];\n                        }\n                    });\n                }\n            }\n            //NSLog(@\"video requestMediaDataWhenReadyOnQueue end\");\n        }];\n    }\n    \n    if (audioInputReadyCallback != NULL)\n    {\n        audioQueue = dispatch_queue_create(\"com.sunsetlakesoftware.GPUImage.audioReadingQueue\", GPUImageDefaultQueueAttribute());\n        [assetWriterAudioInput requestMediaDataWhenReadyOnQueue:audioQueue usingBlock:^{\n            if( _paused )\n            {\n                //NSLog(@\"audio requestMediaDataWhenReadyOnQueue paused\");\n                // if we don't sleep, we'll get called back almost immediately, chewing up CPU\n                usleep(10000);\n                return;\n            }\n            //NSLog(@\"audio requestMediaDataWhenReadyOnQueue begin\");\n            while( assetWriterAudioInput.readyForMoreMediaData && ! _paused )\n            {\n                if( audioInputReadyCallback && ! audioInputReadyCallback() && ! audioEncodingIsFinished )\n                {\n                    runAsynchronouslyOnContextQueue(_movieWriterContext, ^{\n                        if( assetWriter.status == AVAssetWriterStatusWriting && ! audioEncodingIsFinished )\n                        {\n                            audioEncodingIsFinished = YES;\n                            [assetWriterAudioInput markAsFinished];\n                        }\n                    });\n                }\n            }\n            //NSLog(@\"audio requestMediaDataWhenReadyOnQueue end\");\n        }];\n    }        \n    \n}\n\n#pragma mark -\n#pragma mark Frame rendering\n\n- (void)createDataFBO;\n{\n    glActiveTexture(GL_TEXTURE1);\n    glGenFramebuffers(1, &movieFramebuffer);\n    glBindFramebuffer(GL_FRAMEBUFFER, movieFramebuffer);\n    \n    if ([GPUImageContext supportsFastTextureUpload])\n    {\n        // Code originally sourced from http://allmybrain.com/2011/12/08/rendering-to-a-texture-with-ios-5-texture-cache-api/\n        \n\n        CVPixelBufferPoolCreatePixelBuffer (NULL, [assetWriterPixelBufferInput pixelBufferPool], &renderTarget);\n\n        /* AVAssetWriter will use BT.601 conversion matrix for RGB to YCbCr conversion\n         * regardless of the kCVImageBufferYCbCrMatrixKey value.\n         * Tagging the resulting video file as BT.601, is the best option right now.\n         * Creating a proper BT.709 video is not possible at the moment.\n         */\n        CVBufferSetAttachment(renderTarget, kCVImageBufferColorPrimariesKey, kCVImageBufferColorPrimaries_ITU_R_709_2, kCVAttachmentMode_ShouldPropagate);\n        CVBufferSetAttachment(renderTarget, kCVImageBufferYCbCrMatrixKey, kCVImageBufferYCbCrMatrix_ITU_R_601_4, kCVAttachmentMode_ShouldPropagate);\n        CVBufferSetAttachment(renderTarget, kCVImageBufferTransferFunctionKey, kCVImageBufferTransferFunction_ITU_R_709_2, kCVAttachmentMode_ShouldPropagate);\n        \n        CVOpenGLESTextureCacheCreateTextureFromImage (kCFAllocatorDefault, [_movieWriterContext coreVideoTextureCache], renderTarget,\n                                                      NULL, // texture attributes\n                                                      GL_TEXTURE_2D,\n                                                      GL_RGBA, // opengl format\n                                                      (int)videoSize.width,\n                                                      (int)videoSize.height,\n                                                      GL_BGRA, // native iOS format\n                                                      GL_UNSIGNED_BYTE,\n                                                      0,\n                                                      &renderTexture);\n        \n        glBindTexture(CVOpenGLESTextureGetTarget(renderTexture), CVOpenGLESTextureGetName(renderTexture));\n        glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);\n        glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);\n        \n        glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, CVOpenGLESTextureGetName(renderTexture), 0);\n    }\n    else\n    {\n        glGenRenderbuffers(1, &movieRenderbuffer);\n        glBindRenderbuffer(GL_RENDERBUFFER, movieRenderbuffer);\n        glRenderbufferStorage(GL_RENDERBUFFER, GL_RGBA8_OES, (int)videoSize.width, (int)videoSize.height);\n        glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_RENDERBUFFER, movieRenderbuffer);\t\n    }\n    \n\t\n\t__unused GLenum status = glCheckFramebufferStatus(GL_FRAMEBUFFER);\n    \n    NSAssert(status == GL_FRAMEBUFFER_COMPLETE, @\"Incomplete filter FBO: %d\", status);\n}\n\n- (void)destroyDataFBO;\n{\n    runSynchronouslyOnContextQueue(_movieWriterContext, ^{\n        [_movieWriterContext useAsCurrentContext];\n\n        if (movieFramebuffer)\n        {\n            glDeleteFramebuffers(1, &movieFramebuffer);\n            movieFramebuffer = 0;\n        }\n        \n        if (movieRenderbuffer)\n        {\n            glDeleteRenderbuffers(1, &movieRenderbuffer);\n            movieRenderbuffer = 0;\n        }\n        \n        if ([GPUImageContext supportsFastTextureUpload])\n        {\n            if (renderTexture)\n            {\n                CFRelease(renderTexture);\n            }\n            if (renderTarget)\n            {\n                CVPixelBufferRelease(renderTarget);\n            }\n            \n        }\n    });\n}\n\n- (void)setFilterFBO;\n{\n    if (!movieFramebuffer)\n    {\n        [self createDataFBO];\n    }\n    \n    glBindFramebuffer(GL_FRAMEBUFFER, movieFramebuffer);\n    \n    glViewport(0, 0, (int)videoSize.width, (int)videoSize.height);\n}\n\n- (void)renderAtInternalSizeUsingFramebuffer:(GPUImageFramebuffer *)inputFramebufferToUse;\n{\n    [_movieWriterContext useAsCurrentContext];\n    [self setFilterFBO];\n    \n    [_movieWriterContext setContextShaderProgram:colorSwizzlingProgram];\n    \n    glClearColor(1.0f, 0.0f, 0.0f, 1.0f);\n    glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);\n    \n    // This needs to be flipped to write out to video correctly\n    static const GLfloat squareVertices[] = {\n        -1.0f, -1.0f,\n        1.0f, -1.0f,\n        -1.0f,  1.0f,\n        1.0f,  1.0f,\n    };\n    \n    const GLfloat *textureCoordinates = [GPUImageFilter textureCoordinatesForRotation:inputRotation];\n    \n\tglActiveTexture(GL_TEXTURE4);\n\tglBindTexture(GL_TEXTURE_2D, [inputFramebufferToUse texture]);\n\tglUniform1i(colorSwizzlingInputTextureUniform, 4);\n    \n//    NSLog(@\"Movie writer framebuffer: %@\", inputFramebufferToUse);\n    \n    glVertexAttribPointer(colorSwizzlingPositionAttribute, 2, GL_FLOAT, 0, 0, squareVertices);\n\tglVertexAttribPointer(colorSwizzlingTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, textureCoordinates);\n    \n    glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);\n    glFinish();\n}\n\n#pragma mark -\n#pragma mark GPUImageInput protocol\n\n- (void)newFrameReadyAtTime:(CMTime)frameTime atIndex:(NSInteger)textureIndex;\n{\n    if (!isRecording || _paused)\n    {\n        [firstInputFramebuffer unlock];\n        return;\n    }\n\n    if (discont) {\n        discont = NO;\n        CMTime current;\n        \n        if (offsetTime.value > 0) {\n            current = CMTimeSubtract(frameTime, offsetTime);\n        } else {\n            current = frameTime;\n        }\n        \n        CMTime offset  = CMTimeSubtract(current, previousFrameTime);\n        \n        if (offsetTime.value == 0) {\n            offsetTime = offset;\n        } else {\n            offsetTime = CMTimeAdd(offsetTime, offset);\n        }\n    }\n    \n    if (offsetTime.value > 0) {\n        frameTime = CMTimeSubtract(frameTime, offsetTime);\n    }\n    \n    // Drop frames forced by images and other things with no time constants\n    // Also, if two consecutive times with the same value are added to the movie, it aborts recording, so I bail on that case\n    if ( (CMTIME_IS_INVALID(frameTime)) || (CMTIME_COMPARE_INLINE(frameTime, ==, previousFrameTime)) || (CMTIME_IS_INDEFINITE(frameTime)) ) \n    {\n        [firstInputFramebuffer unlock];\n        return;\n    }\n\n    if (CMTIME_IS_INVALID(startTime))\n    {\n        runSynchronouslyOnContextQueue(_movieWriterContext, ^{\n            if ((videoInputReadyCallback == NULL) && (assetWriter.status != AVAssetWriterStatusWriting))\n            {\n                [assetWriter startWriting];\n            }\n            \n            [assetWriter startSessionAtSourceTime:frameTime];\n            startTime = frameTime;\n        });\n    }\n\n    GPUImageFramebuffer *inputFramebufferForBlock = firstInputFramebuffer;\n    glFinish();\n\n    runAsynchronouslyOnContextQueue(_movieWriterContext, ^{\n        if (!assetWriterVideoInput.readyForMoreMediaData && _encodingLiveVideo)\n        {\n            [inputFramebufferForBlock unlock];\n            NSLog(@\"1: Had to drop a video frame: %@\", CFBridgingRelease(CMTimeCopyDescription(kCFAllocatorDefault, frameTime)));\n            return;\n        }\n        \n        // Render the frame with swizzled colors, so that they can be uploaded quickly as BGRA frames\n        [_movieWriterContext useAsCurrentContext];\n        [self renderAtInternalSizeUsingFramebuffer:inputFramebufferForBlock];\n        \n        CVPixelBufferRef pixel_buffer = NULL;\n        \n        if ([GPUImageContext supportsFastTextureUpload])\n        {\n            pixel_buffer = renderTarget;\n            CVPixelBufferLockBaseAddress(pixel_buffer, 0);\n        }\n        else\n        {\n            CVReturn status = CVPixelBufferPoolCreatePixelBuffer (NULL, [assetWriterPixelBufferInput pixelBufferPool], &pixel_buffer);\n            if ((pixel_buffer == NULL) || (status != kCVReturnSuccess))\n            {\n                CVPixelBufferRelease(pixel_buffer);\n                return;\n            }\n            else\n            {\n                CVPixelBufferLockBaseAddress(pixel_buffer, 0);\n                \n                GLubyte *pixelBufferData = (GLubyte *)CVPixelBufferGetBaseAddress(pixel_buffer);\n                glReadPixels(0, 0, videoSize.width, videoSize.height, GL_RGBA, GL_UNSIGNED_BYTE, pixelBufferData);\n            }\n        }\n        \n        void(^write)() = ^() {\n            while( ! assetWriterVideoInput.readyForMoreMediaData && ! _encodingLiveVideo && ! videoEncodingIsFinished ) {\n                NSDate *maxDate = [NSDate dateWithTimeIntervalSinceNow:0.1];\n                //            NSLog(@\"video waiting...\");\n                [[NSRunLoop currentRunLoop] runUntilDate:maxDate];\n            }\n            if (!assetWriterVideoInput.readyForMoreMediaData)\n            {\n                NSLog(@\"2: Had to drop a video frame: %@\", CFBridgingRelease(CMTimeCopyDescription(kCFAllocatorDefault, frameTime)));\n            }\n            else if(self.assetWriter.status == AVAssetWriterStatusWriting)\n            {\n                if (![assetWriterPixelBufferInput appendPixelBuffer:pixel_buffer withPresentationTime:frameTime])\n                    NSLog(@\"Problem appending pixel buffer at time: %@\", CFBridgingRelease(CMTimeCopyDescription(kCFAllocatorDefault, frameTime)));\n            }\n            else\n            {\n                NSLog(@\"Couldn't write a frame\");\n                //NSLog(@\"Wrote a video frame: %@\", CFBridgingRelease(CMTimeCopyDescription(kCFAllocatorDefault, frameTime)));\n            }\n            CVPixelBufferUnlockBaseAddress(pixel_buffer, 0);\n            \n            previousFrameTime = frameTime;\n            \n            if (![GPUImageContext supportsFastTextureUpload])\n            {\n                CVPixelBufferRelease(pixel_buffer);\n            }\n        };\n        \n        write();\n        \n        [inputFramebufferForBlock unlock];\n    });\n}\n\n- (NSInteger)nextAvailableTextureIndex;\n{\n    return 0;\n}\n\n- (void)setInputFramebuffer:(GPUImageFramebuffer *)newInputFramebuffer atIndex:(NSInteger)textureIndex;\n{\n    [newInputFramebuffer lock];\n//    runSynchronouslyOnContextQueue(_movieWriterContext, ^{\n        firstInputFramebuffer = newInputFramebuffer;\n//    });\n}\n\n- (void)setInputRotation:(GPUImageRotationMode)newInputRotation atIndex:(NSInteger)textureIndex;\n{\n    inputRotation = newInputRotation;\n}\n\n- (void)setInputSize:(CGSize)newSize atIndex:(NSInteger)textureIndex;\n{\n}\n\n- (CGSize)maximumOutputSize;\n{\n    return videoSize;\n}\n\n- (void)endProcessing \n{\n    if (completionBlock) \n    {\n        if (!alreadyFinishedRecording)\n        {\n            alreadyFinishedRecording = YES;\n            completionBlock();\n        }        \n    }\n    else \n    {\n        if (_delegate && [_delegate respondsToSelector:@selector(movieRecordingCompleted)])\n        {\n            [_delegate movieRecordingCompleted];\n        }\n    }\n}\n\n- (BOOL)shouldIgnoreUpdatesToThisTarget;\n{\n    return NO;\n}\n\n- (BOOL)wantsMonochromeInput;\n{\n    return NO;\n}\n\n- (void)setCurrentlyReceivingMonochromeInput:(BOOL)newValue;\n{\n    \n}\n\n#pragma mark -\n#pragma mark Accessors\n\n- (void)setHasAudioTrack:(BOOL)newValue\n{\n\t[self setHasAudioTrack:newValue audioSettings:nil];\n}\n\n- (void)setHasAudioTrack:(BOOL)newValue audioSettings:(NSDictionary *)audioOutputSettings;\n{\n    _hasAudioTrack = newValue;\n    \n    if (_hasAudioTrack)\n    {\n        if (_shouldPassthroughAudio)\n        {\n\t\t\t// Do not set any settings so audio will be the same as passthrough\n\t\t\taudioOutputSettings = nil;\n        }\n        else if (audioOutputSettings == nil)\n        {\n            AVAudioSession *sharedAudioSession = [AVAudioSession sharedInstance];\n            double preferredHardwareSampleRate;\n            \n            if ([sharedAudioSession respondsToSelector:@selector(sampleRate)])\n            {\n                preferredHardwareSampleRate = [sharedAudioSession sampleRate];\n            }\n            else\n            {\n#pragma clang diagnostic push\n#pragma clang diagnostic ignored \"-Wdeprecated-declarations\"\n                preferredHardwareSampleRate = [[AVAudioSession sharedInstance] currentHardwareSampleRate];\n#pragma clang diagnostic pop\n            }\n            \n            AudioChannelLayout acl;\n            bzero( &acl, sizeof(acl));\n            acl.mChannelLayoutTag = kAudioChannelLayoutTag_Mono;\n            \n            audioOutputSettings = [NSDictionary dictionaryWithObjectsAndKeys:\n                                         [ NSNumber numberWithInt: kAudioFormatMPEG4AAC], AVFormatIDKey,\n                                         [ NSNumber numberWithInt: 1 ], AVNumberOfChannelsKey,\n                                         [ NSNumber numberWithFloat: preferredHardwareSampleRate ], AVSampleRateKey,\n                                         [ NSData dataWithBytes: &acl length: sizeof( acl ) ], AVChannelLayoutKey,\n                                         //[ NSNumber numberWithInt:AVAudioQualityLow], AVEncoderAudioQualityKey,\n                                         [ NSNumber numberWithInt: 64000 ], AVEncoderBitRateKey,\n                                         nil];\n/*\n            AudioChannelLayout acl;\n            bzero( &acl, sizeof(acl));\n            acl.mChannelLayoutTag = kAudioChannelLayoutTag_Mono;\n            \n            audioOutputSettings = [NSDictionary dictionaryWithObjectsAndKeys:\n                                   [ NSNumber numberWithInt: kAudioFormatMPEG4AAC ], AVFormatIDKey,\n                                   [ NSNumber numberWithInt: 1 ], AVNumberOfChannelsKey,\n                                   [ NSNumber numberWithFloat: 44100.0 ], AVSampleRateKey,\n                                   [ NSNumber numberWithInt: 64000 ], AVEncoderBitRateKey,\n                                   [ NSData dataWithBytes: &acl length: sizeof( acl ) ], AVChannelLayoutKey,\n                                   nil];*/\n        }\n        \n        assetWriterAudioInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings:audioOutputSettings];\n        [assetWriter addInput:assetWriterAudioInput];\n        assetWriterAudioInput.expectsMediaDataInRealTime = _encodingLiveVideo;\n    }\n    else\n    {\n        // Remove audio track if it exists\n    }\n}\n\n- (NSArray*)metaData {\n    return assetWriter.metadata;\n}\n\n- (void)setMetaData:(NSArray*)metaData {\n    assetWriter.metadata = metaData;\n}\n \n- (CMTime)duration {\n    if( ! CMTIME_IS_VALID(startTime) )\n        return kCMTimeZero;\n    if( ! CMTIME_IS_NEGATIVE_INFINITY(previousFrameTime) )\n        return CMTimeSubtract(previousFrameTime, startTime);\n    if( ! CMTIME_IS_NEGATIVE_INFINITY(previousAudioTime) )\n        return CMTimeSubtract(previousAudioTime, startTime);\n    return kCMTimeZero;\n}\n\n- (CGAffineTransform)transform {\n    return assetWriterVideoInput.transform;\n}\n\n- (void)setTransform:(CGAffineTransform)transform {\n    assetWriterVideoInput.transform = transform;\n}\n\n- (AVAssetWriter*)assetWriter {\n    return assetWriter;\n}\n\n- (void)setPaused:(BOOL)newValue {\n    if (_paused != newValue) {\n        _paused = newValue;\n        \n        if (_paused) {\n            discont = YES;\n        }\n    }\n}\n\n- (CMSampleBufferRef)adjustTime:(CMSampleBufferRef) sample by:(CMTime) offset {\n    CMItemCount count;\n    CMSampleBufferGetSampleTimingInfoArray(sample, 0, nil, &count);\n    CMSampleTimingInfo* pInfo = malloc(sizeof(CMSampleTimingInfo) * count);\n    CMSampleBufferGetSampleTimingInfoArray(sample, count, pInfo, &count);\n    \n    for (CMItemCount i = 0; i < count; i++) {\n        pInfo[i].decodeTimeStamp = CMTimeSubtract(pInfo[i].decodeTimeStamp, offset);\n        pInfo[i].presentationTimeStamp = CMTimeSubtract(pInfo[i].presentationTimeStamp, offset);\n    }\n    \n    CMSampleBufferRef sout;\n    CMSampleBufferCreateCopyWithNewTiming(nil, sample, count, pInfo, &sout);\n    free(pInfo);\n    \n    return sout;\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/iOS/GPUImagePicture+TextureSubimage.h",
    "content": "//\n//  GPUImagePicture+TextureSubimage.h\n//  GPUImage\n//\n//  Created by Jack Wu on 2014-05-28.\n//  Copyright (c) 2014 Brad Larson. All rights reserved.\n//\n\n#import \"GPUImagePicture.h\"\n\n@interface GPUImagePicture (TextureSubimage)\n\n- (void)replaceTextureWithSubimage:(UIImage*)subimage;\n- (void)replaceTextureWithSubCGImage:(CGImageRef)subimageSource;\n\n- (void)replaceTextureWithSubimage:(UIImage*)subimage inRect:(CGRect)subRect;\n- (void)replaceTextureWithSubCGImage:(CGImageRef)subimageSource inRect:(CGRect)subRect;\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/iOS/GPUImagePicture+TextureSubimage.m",
    "content": "//\n//  GPUImagePicture+TextureSubimage.m\n//  GPUImage\n//\n//  Created by Jack Wu on 2014-05-28.\n//  Copyright (c) 2014 Brad Larson. All rights reserved.\n//\n\n#import \"GPUImagePicture+TextureSubimage.h\"\n\n@implementation GPUImagePicture (TextureSubimage)\n\n- (void)replaceTextureWithSubimage:(UIImage*)subimage {\n    return [self replaceTextureWithSubCGImage:[subimage CGImage]];\n}\n\n- (void)replaceTextureWithSubCGImage:(CGImageRef)subimageSource {\n    CGRect rect = (CGRect) {.origin = CGPointZero, .size = (CGSize){.width = CGImageGetWidth(subimageSource), .height = CGImageGetHeight(subimageSource)}};\n    return [self replaceTextureWithSubCGImage:subimageSource inRect:rect];\n}\n\n- (void)replaceTextureWithSubimage:(UIImage*)subimage inRect:(CGRect)subRect {\n    return [self replaceTextureWithSubCGImage:[subimage CGImage] inRect:subRect];\n}\n\n- (void)replaceTextureWithSubCGImage:(CGImageRef)subimageSource inRect:(CGRect)subRect {\n    NSAssert(outputFramebuffer, @\"Picture must be initialized first before replacing subtexture\");\n    NSAssert(self.framebufferForOutput.textureOptions.internalFormat == GL_RGBA, @\"For replacing subtexture the internal texture format must be GL_RGBA.\");\n\n    CGRect subimageRect = (CGRect){.origin = CGPointZero, .size = (CGSize){.width = CGImageGetWidth(subimageSource), .height = CGImageGetHeight(subimageSource)}};\n    NSAssert(!CGRectIsEmpty(subimageRect), @\"Passed sub image must not be empty - it should be at least 1px tall and wide\");\n    NSAssert(!CGRectIsEmpty(subRect), @\"Passed sub rect must not be empty\");\n\n    NSAssert(CGSizeEqualToSize(subimageRect.size, subRect.size), @\"Subimage size must match the size of sub rect\");\n    \n    // We don't have to worry about scaling the subimage or finding a power of two size.\n    // The initialization has taken care of that for us.\n    \n    dispatch_semaphore_signal(imageUpdateSemaphore);\n\n    BOOL shouldRedrawUsingCoreGraphics = NO;\n\n    // Since internal format is always RGBA, we need the input data in RGBA as well.\n    CGBitmapInfo bitmapInfo = CGImageGetBitmapInfo(subimageSource);\n    CGBitmapInfo byteOrderInfo = bitmapInfo & kCGBitmapByteOrderMask;\n    if (byteOrderInfo != kCGBitmapByteOrderDefault && byteOrderInfo != kCGBitmapByteOrder32Big) {\n        shouldRedrawUsingCoreGraphics = YES;\n    }\n    else {\n        CGImageAlphaInfo alphaInfo = bitmapInfo & kCGBitmapAlphaInfoMask;\n        if (alphaInfo != kCGImageAlphaPremultipliedLast && alphaInfo != kCGImageAlphaLast && alphaInfo != kCGImageAlphaNoneSkipLast) {\n            shouldRedrawUsingCoreGraphics = YES;\n        }\n    }\n\n    GLubyte *imageData = NULL;\n    CFDataRef dataFromImageDataProvider;\n    if (shouldRedrawUsingCoreGraphics)\n    {\n        // For resized or incompatible image: redraw\n        imageData = (GLubyte *) calloc(1, (int)subimageRect.size.width * (int)subimageRect.size.height * 4);\n        \n        CGColorSpaceRef genericRGBColorspace = CGColorSpaceCreateDeviceRGB();\n        \n        CGContextRef imageContext = CGBitmapContextCreate(imageData, (size_t)subimageRect.size.width, (size_t)subimageRect.size.height, 8, (size_t)subimageRect.size.width * 4, genericRGBColorspace,  kCGBitmapByteOrderDefault | kCGImageAlphaPremultipliedLast);\n        \n        CGContextDrawImage(imageContext, CGRectMake(0.0, 0.0, subimageRect.size.width, subimageRect.size.height), subimageSource);\n        CGContextRelease(imageContext);\n        CGColorSpaceRelease(genericRGBColorspace);\n    }\n    else\n    {\n        // Access the raw image bytes directly\n        dataFromImageDataProvider = CGDataProviderCopyData(CGImageGetDataProvider(subimageSource));\n        imageData = (GLubyte *)CFDataGetBytePtr(dataFromImageDataProvider);\n    }\n    \n    runSynchronouslyOnVideoProcessingQueue(^{\n        [GPUImageContext useImageProcessingContext];\n        [outputFramebuffer disableReferenceCounting];\n        \n        glBindTexture(GL_TEXTURE_2D, [outputFramebuffer texture]);\n        \n        // no need to use self.outputTextureOptions here since pictures need this texture formats and type\n        glTexSubImage2D(GL_TEXTURE_2D, 0, subRect.origin.x, subRect.origin.y, (GLint)subRect.size.width, subRect.size.height, GL_RGBA, GL_UNSIGNED_BYTE, imageData);\n        \n        if (self.shouldSmoothlyScaleOutput)\n        {\n            glGenerateMipmap(GL_TEXTURE_2D);\n        }\n        glBindTexture(GL_TEXTURE_2D, 0);\n    });\n\n    if (shouldRedrawUsingCoreGraphics)\n    {\n        free(imageData);\n    }\n    else\n    {\n        CFRelease(dataFromImageDataProvider);\n    }\n}\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/iOS/GPUImagePicture.h",
    "content": "#import <UIKit/UIKit.h>\n#import \"GPUImageOutput.h\"\n\n\n@interface GPUImagePicture : GPUImageOutput\n{\n    CGSize pixelSizeOfImage;\n    BOOL hasProcessedImage;\n    \n    dispatch_semaphore_t imageUpdateSemaphore;\n}\n\n// Initialization and teardown\n- (id)initWithURL:(NSURL *)url;\n- (id)initWithImage:(UIImage *)newImageSource;\n- (id)initWithCGImage:(CGImageRef)newImageSource;\n- (id)initWithImage:(UIImage *)newImageSource smoothlyScaleOutput:(BOOL)smoothlyScaleOutput;\n- (id)initWithCGImage:(CGImageRef)newImageSource smoothlyScaleOutput:(BOOL)smoothlyScaleOutput;\n- (id)initWithImage:(UIImage *)newImageSource removePremultiplication:(BOOL)removePremultiplication;\n- (id)initWithCGImage:(CGImageRef)newImageSource removePremultiplication:(BOOL)removePremultiplication;\n- (id)initWithImage:(UIImage *)newImageSource smoothlyScaleOutput:(BOOL)smoothlyScaleOutput removePremultiplication:(BOOL)removePremultiplication;\n- (id)initWithCGImage:(CGImageRef)newImageSource smoothlyScaleOutput:(BOOL)smoothlyScaleOutput removePremultiplication:(BOOL)removePremultiplication;\n\n// Image rendering\n- (void)processImage;\n- (CGSize)outputImageSize;\n\n/**\n * Process image with all targets and filters asynchronously\n * The completion handler is called after processing finished in the\n * GPU's dispatch queue - and only if this method did not return NO.\n *\n * @returns NO if resource is blocked and processing is discarded, YES otherwise\n */\n- (BOOL)processImageWithCompletionHandler:(void (^)(void))completion;\n- (void)processImageUpToFilter:(GPUImageOutput<GPUImageInput> *)finalFilterInChain withCompletionHandler:(void (^)(UIImage *processedImage))block;\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/iOS/GPUImagePicture.m",
    "content": "#import \"GPUImagePicture.h\"\n\n@implementation GPUImagePicture\n\n#pragma mark -\n#pragma mark Initialization and teardown\n\n- (id)initWithURL:(NSURL *)url;\n{\n    NSData *imageData = [[NSData alloc] initWithContentsOfURL:url];\n    \n    if (!(self = [self initWithData:imageData]))\n    {\n        return nil;\n    }\n    \n    return self;\n}\n\n- (id)initWithData:(NSData *)imageData;\n{\n    UIImage *inputImage = [[UIImage alloc] initWithData:imageData];\n    \n    if (!(self = [self initWithImage:inputImage]))\n    {\n\t\treturn nil;\n    }\n    \n    return self;\n}\n\n- (id)initWithImage:(UIImage *)newImageSource;\n{\n    if (!(self = [self initWithImage:newImageSource smoothlyScaleOutput:NO]))\n    {\n\t\treturn nil;\n    }\n    \n    return self;\n}\n\n- (id)initWithCGImage:(CGImageRef)newImageSource;\n{\n    if (!(self = [self initWithCGImage:newImageSource smoothlyScaleOutput:NO]))\n    {\n\t\treturn nil;\n    }\n    return self;\n}\n\n- (id)initWithImage:(UIImage *)newImageSource smoothlyScaleOutput:(BOOL)smoothlyScaleOutput;\n{\n    return [self initWithCGImage:[newImageSource CGImage] smoothlyScaleOutput:smoothlyScaleOutput];\n}\n\n- (id)initWithCGImage:(CGImageRef)newImageSource smoothlyScaleOutput:(BOOL)smoothlyScaleOutput;\n{\n    return [self initWithCGImage:newImageSource smoothlyScaleOutput:smoothlyScaleOutput removePremultiplication:NO];\n}\n\n- (id)initWithImage:(UIImage *)newImageSource removePremultiplication:(BOOL)removePremultiplication;\n{\n    return [self initWithCGImage:[newImageSource CGImage] smoothlyScaleOutput:NO removePremultiplication:removePremultiplication];\n}\n\n- (id)initWithCGImage:(CGImageRef)newImageSource removePremultiplication:(BOOL)removePremultiplication;\n{\n    return [self initWithCGImage:newImageSource smoothlyScaleOutput:NO removePremultiplication:removePremultiplication];\n}\n\n- (id)initWithImage:(UIImage *)newImageSource smoothlyScaleOutput:(BOOL)smoothlyScaleOutput removePremultiplication:(BOOL)removePremultiplication;\n{\n    return [self initWithCGImage:[newImageSource CGImage] smoothlyScaleOutput:smoothlyScaleOutput removePremultiplication:removePremultiplication];\n}\n\n- (id)initWithCGImage:(CGImageRef)newImageSource smoothlyScaleOutput:(BOOL)smoothlyScaleOutput removePremultiplication:(BOOL)removePremultiplication;\n{\n    if (!(self = [super init]))\n    {\n\t\treturn nil;\n    }\n    \n    hasProcessedImage = NO;\n    self.shouldSmoothlyScaleOutput = smoothlyScaleOutput;\n    imageUpdateSemaphore = dispatch_semaphore_create(0);\n    dispatch_semaphore_signal(imageUpdateSemaphore);\n\n\n    // TODO: Dispatch this whole thing asynchronously to move image loading off main thread\n    CGFloat widthOfImage = CGImageGetWidth(newImageSource);\n    CGFloat heightOfImage = CGImageGetHeight(newImageSource);\n\n    // If passed an empty image reference, CGContextDrawImage will fail in future versions of the SDK.\n    NSAssert( widthOfImage > 0 && heightOfImage > 0, @\"Passed image must not be empty - it should be at least 1px tall and wide\");\n    \n    pixelSizeOfImage = CGSizeMake(widthOfImage, heightOfImage);\n    CGSize pixelSizeToUseForTexture = pixelSizeOfImage;\n    \n    BOOL shouldRedrawUsingCoreGraphics = NO;\n    \n    // For now, deal with images larger than the maximum texture size by resizing to be within that limit\n    CGSize scaledImageSizeToFitOnGPU = [GPUImageContext sizeThatFitsWithinATextureForSize:pixelSizeOfImage];\n    if (!CGSizeEqualToSize(scaledImageSizeToFitOnGPU, pixelSizeOfImage))\n    {\n        pixelSizeOfImage = scaledImageSizeToFitOnGPU;\n        pixelSizeToUseForTexture = pixelSizeOfImage;\n        shouldRedrawUsingCoreGraphics = YES;\n    }\n    \n    if (self.shouldSmoothlyScaleOutput)\n    {\n        // In order to use mipmaps, you need to provide power-of-two textures, so convert to the next largest power of two and stretch to fill\n        CGFloat powerClosestToWidth = ceil(log2(pixelSizeOfImage.width));\n        CGFloat powerClosestToHeight = ceil(log2(pixelSizeOfImage.height));\n        \n        pixelSizeToUseForTexture = CGSizeMake(pow(2.0, powerClosestToWidth), pow(2.0, powerClosestToHeight));\n        \n        shouldRedrawUsingCoreGraphics = YES;\n    }\n    \n    GLubyte *imageData = NULL;\n    CFDataRef dataFromImageDataProvider = NULL;\n    GLenum format = GL_BGRA;\n    BOOL isLitteEndian = YES;\n    BOOL alphaFirst = NO;\n    BOOL premultiplied = NO;\n\t\n    if (!shouldRedrawUsingCoreGraphics) {\n        /* Check that the memory layout is compatible with GL, as we cannot use glPixelStore to\n         * tell GL about the memory layout with GLES.\n         */\n        if (CGImageGetBytesPerRow(newImageSource) != CGImageGetWidth(newImageSource) * 4 ||\n            CGImageGetBitsPerPixel(newImageSource) != 32 ||\n            CGImageGetBitsPerComponent(newImageSource) != 8)\n        {\n            shouldRedrawUsingCoreGraphics = YES;\n        } else {\n            /* Check that the bitmap pixel format is compatible with GL */\n            CGBitmapInfo bitmapInfo = CGImageGetBitmapInfo(newImageSource);\n            if ((bitmapInfo & kCGBitmapFloatComponents) != 0) {\n                /* We don't support float components for use directly in GL */\n                shouldRedrawUsingCoreGraphics = YES;\n            } else {\n                CGBitmapInfo byteOrderInfo = bitmapInfo & kCGBitmapByteOrderMask;\n                if (byteOrderInfo == kCGBitmapByteOrder32Little) {\n                    /* Little endian, for alpha-first we can use this bitmap directly in GL */\n                    CGImageAlphaInfo alphaInfo = bitmapInfo & kCGBitmapAlphaInfoMask;\n                    if (alphaInfo != kCGImageAlphaPremultipliedFirst && alphaInfo != kCGImageAlphaFirst &&\n                        alphaInfo != kCGImageAlphaNoneSkipFirst) {\n                        shouldRedrawUsingCoreGraphics = YES;\n                    }\n                } else if (byteOrderInfo == kCGBitmapByteOrderDefault || byteOrderInfo == kCGBitmapByteOrder32Big) {\n\t\t\t\t\tisLitteEndian = NO;\n                    /* Big endian, for alpha-last we can use this bitmap directly in GL */\n                    CGImageAlphaInfo alphaInfo = bitmapInfo & kCGBitmapAlphaInfoMask;\n                    if (alphaInfo != kCGImageAlphaPremultipliedLast && alphaInfo != kCGImageAlphaLast &&\n                        alphaInfo != kCGImageAlphaNoneSkipLast) {\n                        shouldRedrawUsingCoreGraphics = YES;\n                    } else {\n                        /* Can access directly using GL_RGBA pixel format */\n\t\t\t\t\t\tpremultiplied = alphaInfo == kCGImageAlphaPremultipliedLast || alphaInfo == kCGImageAlphaPremultipliedLast;\n\t\t\t\t\t\talphaFirst = alphaInfo == kCGImageAlphaFirst || alphaInfo == kCGImageAlphaPremultipliedFirst;\n\t\t\t\t\t\tformat = GL_RGBA;\n                    }\n                }\n            }\n        }\n    }\n    \n    //    CFAbsoluteTime elapsedTime, startTime = CFAbsoluteTimeGetCurrent();\n    \n    if (shouldRedrawUsingCoreGraphics)\n    {\n        // For resized or incompatible image: redraw\n        imageData = (GLubyte *) calloc(1, (int)pixelSizeToUseForTexture.width * (int)pixelSizeToUseForTexture.height * 4);\n        \n        CGColorSpaceRef genericRGBColorspace = CGColorSpaceCreateDeviceRGB();\n        \n        CGContextRef imageContext = CGBitmapContextCreate(imageData, (size_t)pixelSizeToUseForTexture.width, (size_t)pixelSizeToUseForTexture.height, 8, (size_t)pixelSizeToUseForTexture.width * 4, genericRGBColorspace,  kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);\n        //        CGContextSetBlendMode(imageContext, kCGBlendModeCopy); // From Technical Q&A QA1708: http://developer.apple.com/library/ios/#qa/qa1708/_index.html\n        CGContextDrawImage(imageContext, CGRectMake(0.0, 0.0, pixelSizeToUseForTexture.width, pixelSizeToUseForTexture.height), newImageSource);\n        CGContextRelease(imageContext);\n        CGColorSpaceRelease(genericRGBColorspace);\n\t\tisLitteEndian = YES;\n\t\talphaFirst = YES;\n\t\tpremultiplied = YES;\n    }\n    else\n    {\n        // Access the raw image bytes directly\n        dataFromImageDataProvider = CGDataProviderCopyData(CGImageGetDataProvider(newImageSource));\n        imageData = (GLubyte *)CFDataGetBytePtr(dataFromImageDataProvider);\n    }\n\t\n\tif (removePremultiplication && premultiplied) {\n\t\tNSUInteger\ttotalNumberOfPixels = round(pixelSizeToUseForTexture.width * pixelSizeToUseForTexture.height);\n\t\tuint32_t\t*pixelP = (uint32_t *)imageData;\n\t\tuint32_t\tpixel;\n\t\tCGFloat\t\tsrcR, srcG, srcB, srcA;\n\n\t\tfor (NSUInteger idx=0; idx<totalNumberOfPixels; idx++, pixelP++) {\n\t\t\tpixel = isLitteEndian ? CFSwapInt32LittleToHost(*pixelP) : CFSwapInt32BigToHost(*pixelP);\n\n\t\t\tif (alphaFirst) {\n\t\t\t\tsrcA = (CGFloat)((pixel & 0xff000000) >> 24) / 255.0f;\n\t\t\t}\n\t\t\telse {\n\t\t\t\tsrcA = (CGFloat)(pixel & 0x000000ff) / 255.0f;\n\t\t\t\tpixel >>= 8;\n\t\t\t}\n\n\t\t\tsrcR = (CGFloat)((pixel & 0x00ff0000) >> 16) / 255.0f;\n\t\t\tsrcG = (CGFloat)((pixel & 0x0000ff00) >> 8) / 255.0f;\n\t\t\tsrcB = (CGFloat)(pixel & 0x000000ff) / 255.0f;\n\t\t\t\n\t\t\tsrcR /= srcA; srcG /= srcA; srcB /= srcA;\n\t\t\t\n\t\t\tpixel = (uint32_t)(srcR * 255.0) << 16;\n\t\t\tpixel |= (uint32_t)(srcG * 255.0) << 8;\n\t\t\tpixel |= (uint32_t)(srcB * 255.0);\n\n\t\t\tif (alphaFirst) {\n\t\t\t\tpixel |= (uint32_t)(srcA * 255.0) << 24;\n\t\t\t}\n\t\t\telse {\n\t\t\t\tpixel <<= 8;\n\t\t\t\tpixel |= (uint32_t)(srcA * 255.0);\n\t\t\t}\n\t\t\t*pixelP = isLitteEndian ? CFSwapInt32HostToLittle(pixel) : CFSwapInt32HostToBig(pixel);\n\t\t}\n\t}\n\t\n    //    elapsedTime = (CFAbsoluteTimeGetCurrent() - startTime) * 1000.0;\n    //    NSLog(@\"Core Graphics drawing time: %f\", elapsedTime);\n    \n    //    CGFloat currentRedTotal = 0.0f, currentGreenTotal = 0.0f, currentBlueTotal = 0.0f, currentAlphaTotal = 0.0f;\n    //\tNSUInteger totalNumberOfPixels = round(pixelSizeToUseForTexture.width * pixelSizeToUseForTexture.height);\n    //\n    //    for (NSUInteger currentPixel = 0; currentPixel < totalNumberOfPixels; currentPixel++)\n    //    {\n    //        currentBlueTotal += (CGFloat)imageData[(currentPixel * 4)] / 255.0f;\n    //        currentGreenTotal += (CGFloat)imageData[(currentPixel * 4) + 1] / 255.0f;\n    //        currentRedTotal += (CGFloat)imageData[(currentPixel * 4 + 2)] / 255.0f;\n    //        currentAlphaTotal += (CGFloat)imageData[(currentPixel * 4) + 3] / 255.0f;\n    //    }\n    //\n    //    NSLog(@\"Debug, average input image red: %f, green: %f, blue: %f, alpha: %f\", currentRedTotal / (CGFloat)totalNumberOfPixels, currentGreenTotal / (CGFloat)totalNumberOfPixels, currentBlueTotal / (CGFloat)totalNumberOfPixels, currentAlphaTotal / (CGFloat)totalNumberOfPixels);\n    \n    runSynchronouslyOnVideoProcessingQueue(^{\n        [GPUImageContext useImageProcessingContext];\n        \n        outputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:pixelSizeToUseForTexture onlyTexture:YES];\n        [outputFramebuffer disableReferenceCounting];\n\n        glBindTexture(GL_TEXTURE_2D, [outputFramebuffer texture]);\n        if (self.shouldSmoothlyScaleOutput)\n        {\n            glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR_MIPMAP_LINEAR);\n        }\n        // no need to use self.outputTextureOptions here since pictures need this texture formats and type\n        glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, (int)pixelSizeToUseForTexture.width, (int)pixelSizeToUseForTexture.height, 0, format, GL_UNSIGNED_BYTE, imageData);\n        \n        if (self.shouldSmoothlyScaleOutput)\n        {\n            glGenerateMipmap(GL_TEXTURE_2D);\n        }\n        glBindTexture(GL_TEXTURE_2D, 0);\n    });\n    \n    if (shouldRedrawUsingCoreGraphics)\n    {\n        free(imageData);\n    }\n    else\n    {\n        if (dataFromImageDataProvider)\n        {\n            CFRelease(dataFromImageDataProvider);\n        }\n    }\n    \n    return self;\n}\n\n// ARC forbids explicit message send of 'release'; since iOS 6 even for dispatch_release() calls: stripping it out in that case is required.\n- (void)dealloc;\n{\n    [outputFramebuffer enableReferenceCounting];\n    [outputFramebuffer unlock];\n\n#if !OS_OBJECT_USE_OBJC\n    if (imageUpdateSemaphore != NULL)\n    {\n        dispatch_release(imageUpdateSemaphore);\n    }\n#endif\n}\n\n#pragma mark -\n#pragma mark Image rendering\n\n- (void)removeAllTargets;\n{\n    [super removeAllTargets];\n    hasProcessedImage = NO;\n}\n\n- (void)processImage;\n{\n    [self processImageWithCompletionHandler:nil];\n}\n\n- (BOOL)processImageWithCompletionHandler:(void (^)(void))completion;\n{\n    hasProcessedImage = YES;\n    \n    //    dispatch_semaphore_wait(imageUpdateSemaphore, DISPATCH_TIME_FOREVER);\n    \n    if (dispatch_semaphore_wait(imageUpdateSemaphore, DISPATCH_TIME_NOW) != 0)\n    {\n        return NO;\n    }\n    \n    runAsynchronouslyOnVideoProcessingQueue(^{        \n        for (id<GPUImageInput> currentTarget in targets)\n        {\n            NSInteger indexOfObject = [targets indexOfObject:currentTarget];\n            NSInteger textureIndexOfTarget = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];\n            \n            [currentTarget setCurrentlyReceivingMonochromeInput:NO];\n            [currentTarget setInputSize:pixelSizeOfImage atIndex:textureIndexOfTarget];\n            [currentTarget setInputFramebuffer:outputFramebuffer atIndex:textureIndexOfTarget];\n            [currentTarget newFrameReadyAtTime:kCMTimeIndefinite atIndex:textureIndexOfTarget];\n        }\n        \n        dispatch_semaphore_signal(imageUpdateSemaphore);\n        \n        if (completion != nil) {\n            completion();\n        }\n    });\n    \n    return YES;\n}\n\n- (void)processImageUpToFilter:(GPUImageOutput<GPUImageInput> *)finalFilterInChain withCompletionHandler:(void (^)(UIImage *processedImage))block;\n{\n    [finalFilterInChain useNextFrameForImageCapture];\n    [self processImageWithCompletionHandler:^{\n        UIImage *imageFromFilter = [finalFilterInChain imageFromCurrentFramebuffer];\n        block(imageFromFilter);\n    }];\n}\n\n- (CGSize)outputImageSize;\n{\n    return pixelSizeOfImage;\n}\n\n- (void)addTarget:(id<GPUImageInput>)newTarget atTextureLocation:(NSInteger)textureLocation;\n{\n    [super addTarget:newTarget atTextureLocation:textureLocation];\n    \n    if (hasProcessedImage)\n    {\n        [newTarget setInputSize:pixelSizeOfImage atIndex:textureLocation];\n        [newTarget newFrameReadyAtTime:kCMTimeIndefinite atIndex:textureLocation];\n    }\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/iOS/GPUImageView.h",
    "content": "#import <UIKit/UIKit.h>\n#import \"GPUImageContext.h\"\n\ntypedef NS_ENUM(NSUInteger, GPUImageFillModeType) {\n    kGPUImageFillModeStretch,                       // Stretch to fill the full view, which may distort the image outside of its normal aspect ratio\n    kGPUImageFillModePreserveAspectRatio,           // Maintains the aspect ratio of the source image, adding bars of the specified background color\n    kGPUImageFillModePreserveAspectRatioAndFill     // Maintains the aspect ratio of the source image, zooming in on its center to fill the view\n};\n\n\n\n/**\n UIView subclass to use as an endpoint for displaying GPUImage outputs\n */\n@interface GPUImageView : UIView <GPUImageInput>\n{\n    GPUImageRotationMode inputRotation;\n}\n\n/** The fill mode dictates how images are fit in the view, with the default being kGPUImageFillModePreserveAspectRatio\n */\n@property(readwrite, nonatomic) GPUImageFillModeType fillMode;\n\n/** This calculates the current display size, in pixels, taking into account Retina scaling factors\n */\n@property(readonly, nonatomic) CGSize sizeInPixels;\n\n@property(nonatomic) BOOL enabled;\n\n/** Handling fill mode\n \n @param redComponent Red component for background color\n @param greenComponent Green component for background color\n @param blueComponent Blue component for background color\n @param alphaComponent Alpha component for background color\n */\n- (void)setBackgroundColorRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent alpha:(GLfloat)alphaComponent;\n\n- (void)setCurrentlyReceivingMonochromeInput:(BOOL)newValue;\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPUImage/iOS/GPUImageView.m",
    "content": "#import \"GPUImageView.h\"\n#import <OpenGLES/EAGLDrawable.h>\n#import <QuartzCore/QuartzCore.h>\n#import \"GPUImageContext.h\"\n#import \"GPUImageFilter.h\"\n#import <AVFoundation/AVFoundation.h>\n\n#pragma mark -\n#pragma mark Private methods and instance variables\n\n@interface GPUImageView () \n{\n    GPUImageFramebuffer *inputFramebufferForDisplay;\n    GLuint displayRenderbuffer, displayFramebuffer;\n    \n    GLProgram *displayProgram;\n    GLint displayPositionAttribute, displayTextureCoordinateAttribute;\n    GLint displayInputTextureUniform;\n\n    CGSize inputImageSize;\n    GLfloat imageVertices[8];\n    GLfloat backgroundColorRed, backgroundColorGreen, backgroundColorBlue, backgroundColorAlpha;\n\n    CGSize boundsSizeAtFrameBufferEpoch;\n}\n\n@property (assign, nonatomic) NSUInteger aspectRatio;\n\n// Initialization and teardown\n- (void)commonInit;\n\n// Managing the display FBOs\n- (void)createDisplayFramebuffer;\n- (void)destroyDisplayFramebuffer;\n\n// Handling fill mode\n- (void)recalculateViewGeometry;\n\n@end\n\n@implementation GPUImageView\n\n@synthesize aspectRatio;\n@synthesize sizeInPixels = _sizeInPixels;\n@synthesize fillMode = _fillMode;\n@synthesize enabled;\n\n#pragma mark -\n#pragma mark Initialization and teardown\n\n+ (Class)layerClass \n{\n\treturn [CAEAGLLayer class];\n}\n\n- (id)initWithFrame:(CGRect)frame\n{\n    if (!(self = [super initWithFrame:frame]))\n    {\n\t\treturn nil;\n    }\n    \n    [self commonInit];\n    \n    return self;\n}\n\n-(id)initWithCoder:(NSCoder *)coder\n{\n\tif (!(self = [super initWithCoder:coder])) \n    {\n        return nil;\n\t}\n\n    [self commonInit];\n\n\treturn self;\n}\n\n- (void)commonInit;\n{\n    // Set scaling to account for Retina display\t\n    if ([self respondsToSelector:@selector(setContentScaleFactor:)])\n    {\n        self.contentScaleFactor = [[UIScreen mainScreen] scale];\n    }\n\n    inputRotation = kGPUImageNoRotation;\n    self.opaque = YES;\n    self.hidden = NO;\n    CAEAGLLayer *eaglLayer = (CAEAGLLayer *)self.layer;\n    eaglLayer.opaque = YES;\n    eaglLayer.drawableProperties = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithBool:NO], kEAGLDrawablePropertyRetainedBacking, kEAGLColorFormatRGBA8, kEAGLDrawablePropertyColorFormat, nil];\n\n    self.enabled = YES;\n    \n    runSynchronouslyOnVideoProcessingQueue(^{\n        [GPUImageContext useImageProcessingContext];\n        \n        displayProgram = [[GPUImageContext sharedImageProcessingContext] programForVertexShaderString:kGPUImageVertexShaderString fragmentShaderString:kGPUImagePassthroughFragmentShaderString];\n        if (!displayProgram.initialized)\n        {\n            [displayProgram addAttribute:@\"position\"];\n            [displayProgram addAttribute:@\"inputTextureCoordinate\"];\n            \n            if (![displayProgram link])\n            {\n                NSString *progLog = [displayProgram programLog];\n                NSLog(@\"Program link log: %@\", progLog);\n                NSString *fragLog = [displayProgram fragmentShaderLog];\n                NSLog(@\"Fragment shader compile log: %@\", fragLog);\n                NSString *vertLog = [displayProgram vertexShaderLog];\n                NSLog(@\"Vertex shader compile log: %@\", vertLog);\n                displayProgram = nil;\n                NSAssert(NO, @\"Filter shader link failed\");\n            }\n        }\n        \n        displayPositionAttribute = [displayProgram attributeIndex:@\"position\"];\n        displayTextureCoordinateAttribute = [displayProgram attributeIndex:@\"inputTextureCoordinate\"];\n        displayInputTextureUniform = [displayProgram uniformIndex:@\"inputImageTexture\"]; // This does assume a name of \"inputTexture\" for the fragment shader\n\n        [GPUImageContext setActiveShaderProgram:displayProgram];\n        glEnableVertexAttribArray(displayPositionAttribute);\n        glEnableVertexAttribArray(displayTextureCoordinateAttribute);\n        \n        [self setBackgroundColorRed:0.0 green:0.0 blue:0.0 alpha:1.0];\n        _fillMode = kGPUImageFillModePreserveAspectRatio;\n        [self createDisplayFramebuffer];\n    });\n}\n\n- (void)layoutSubviews {\n    [super layoutSubviews];\n    \n    // The frame buffer needs to be trashed and re-created when the view size changes.\n    if (!CGSizeEqualToSize(self.bounds.size, boundsSizeAtFrameBufferEpoch) &&\n        !CGSizeEqualToSize(self.bounds.size, CGSizeZero)) {\n        runSynchronouslyOnVideoProcessingQueue(^{\n            [self destroyDisplayFramebuffer];\n            [self createDisplayFramebuffer];\n        });\n    } else if (!CGSizeEqualToSize(self.bounds.size, CGSizeZero)) {\n        [self recalculateViewGeometry];\n    }\n}\n\n- (void)dealloc\n{\n    runSynchronouslyOnVideoProcessingQueue(^{\n        [self destroyDisplayFramebuffer];\n    });\n}\n\n#pragma mark -\n#pragma mark Managing the display FBOs\n\n- (void)createDisplayFramebuffer;\n{\n    [GPUImageContext useImageProcessingContext];\n    \n    glGenFramebuffers(1, &displayFramebuffer);\n    glBindFramebuffer(GL_FRAMEBUFFER, displayFramebuffer);\n\t\n    glGenRenderbuffers(1, &displayRenderbuffer);\n    glBindRenderbuffer(GL_RENDERBUFFER, displayRenderbuffer);\n\t\n    [[[GPUImageContext sharedImageProcessingContext] context] renderbufferStorage:GL_RENDERBUFFER fromDrawable:(CAEAGLLayer*)self.layer];\n\t\n    GLint backingWidth, backingHeight;\n\n    glGetRenderbufferParameteriv(GL_RENDERBUFFER, GL_RENDERBUFFER_WIDTH, &backingWidth);\n    glGetRenderbufferParameteriv(GL_RENDERBUFFER, GL_RENDERBUFFER_HEIGHT, &backingHeight);\n    \n    if ( (backingWidth == 0) || (backingHeight == 0) )\n    {\n        [self destroyDisplayFramebuffer];\n        return;\n    }\n    \n    _sizeInPixels.width = (CGFloat)backingWidth;\n    _sizeInPixels.height = (CGFloat)backingHeight;\n\n//    NSLog(@\"Backing width: %d, height: %d\", backingWidth, backingHeight);\n\n    glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_RENDERBUFFER, displayRenderbuffer);\n\t\n    __unused GLuint framebufferCreationStatus = glCheckFramebufferStatus(GL_FRAMEBUFFER);\n    NSAssert(framebufferCreationStatus == GL_FRAMEBUFFER_COMPLETE, @\"Failure with display framebuffer generation for display of size: %f, %f\", self.bounds.size.width, self.bounds.size.height);\n    boundsSizeAtFrameBufferEpoch = self.bounds.size;\n\n    [self recalculateViewGeometry];\n}\n\n- (void)destroyDisplayFramebuffer;\n{\n    [GPUImageContext useImageProcessingContext];\n\n    if (displayFramebuffer)\n\t{\n\t\tglDeleteFramebuffers(1, &displayFramebuffer);\n\t\tdisplayFramebuffer = 0;\n\t}\n\t\n\tif (displayRenderbuffer)\n\t{\n\t\tglDeleteRenderbuffers(1, &displayRenderbuffer);\n\t\tdisplayRenderbuffer = 0;\n\t}\n}\n\n- (void)setDisplayFramebuffer;\n{\n    if (!displayFramebuffer)\n    {\n        [self createDisplayFramebuffer];\n    }\n    \n    glBindFramebuffer(GL_FRAMEBUFFER, displayFramebuffer);\n    \n    glViewport(0, 0, (GLint)_sizeInPixels.width, (GLint)_sizeInPixels.height);\n}\n\n- (void)presentFramebuffer;\n{\n    glBindRenderbuffer(GL_RENDERBUFFER, displayRenderbuffer);\n    [[GPUImageContext sharedImageProcessingContext] presentBufferForDisplay];\n}\n\n#pragma mark -\n#pragma mark Handling fill mode\n\n- (void)recalculateViewGeometry;\n{\n    runSynchronouslyOnVideoProcessingQueue(^{\n        CGFloat heightScaling, widthScaling;\n        \n        CGSize currentViewSize = self.bounds.size;\n        \n        //    CGFloat imageAspectRatio = inputImageSize.width / inputImageSize.height;\n        //    CGFloat viewAspectRatio = currentViewSize.width / currentViewSize.height;\n        \n        CGRect insetRect = AVMakeRectWithAspectRatioInsideRect(inputImageSize, self.bounds);\n        \n        switch(_fillMode)\n        {\n            case kGPUImageFillModeStretch:\n            {\n                widthScaling = 1.0;\n                heightScaling = 1.0;\n            }; break;\n            case kGPUImageFillModePreserveAspectRatio:\n            {\n                widthScaling = insetRect.size.width / currentViewSize.width;\n                heightScaling = insetRect.size.height / currentViewSize.height;\n            }; break;\n            case kGPUImageFillModePreserveAspectRatioAndFill:\n            {\n                //            CGFloat widthHolder = insetRect.size.width / currentViewSize.width;\n                widthScaling = currentViewSize.height / insetRect.size.height;\n                heightScaling = currentViewSize.width / insetRect.size.width;\n            }; break;\n        }\n        \n        imageVertices[0] = -widthScaling;\n        imageVertices[1] = -heightScaling;\n        imageVertices[2] = widthScaling;\n        imageVertices[3] = -heightScaling;\n        imageVertices[4] = -widthScaling;\n        imageVertices[5] = heightScaling;\n        imageVertices[6] = widthScaling;\n        imageVertices[7] = heightScaling;\n    });\n    \n//    static const GLfloat imageVertices[] = {\n//        -1.0f, -1.0f,\n//        1.0f, -1.0f,\n//        -1.0f,  1.0f,\n//        1.0f,  1.0f,\n//    };\n}\n\n- (void)setBackgroundColorRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent alpha:(GLfloat)alphaComponent;\n{\n    backgroundColorRed = redComponent;\n    backgroundColorGreen = greenComponent;\n    backgroundColorBlue = blueComponent;\n    backgroundColorAlpha = alphaComponent;\n}\n\n+ (const GLfloat *)textureCoordinatesForRotation:(GPUImageRotationMode)rotationMode;\n{\n//    static const GLfloat noRotationTextureCoordinates[] = {\n//        0.0f, 0.0f,\n//        1.0f, 0.0f,\n//        0.0f, 1.0f,\n//        1.0f, 1.0f,\n//    };\n    \n    static const GLfloat noRotationTextureCoordinates[] = {\n        0.0f, 1.0f,\n        1.0f, 1.0f,\n        0.0f, 0.0f,\n        1.0f, 0.0f,\n    };\n\n    static const GLfloat rotateRightTextureCoordinates[] = {\n        1.0f, 1.0f,\n        1.0f, 0.0f,\n        0.0f, 1.0f,\n        0.0f, 0.0f,\n    };\n\n    static const GLfloat rotateLeftTextureCoordinates[] = {\n        0.0f, 0.0f,\n        0.0f, 1.0f,\n        1.0f, 0.0f,\n        1.0f, 1.0f,\n    };\n        \n    static const GLfloat verticalFlipTextureCoordinates[] = {\n        0.0f, 0.0f,\n        1.0f, 0.0f,\n        0.0f, 1.0f,\n        1.0f, 1.0f,\n    };\n    \n    static const GLfloat horizontalFlipTextureCoordinates[] = {\n        1.0f, 1.0f,\n        0.0f, 1.0f,\n        1.0f, 0.0f,\n        0.0f, 0.0f,\n    };\n    \n    static const GLfloat rotateRightVerticalFlipTextureCoordinates[] = {\n        1.0f, 0.0f,\n        1.0f, 1.0f,\n        0.0f, 0.0f,\n        0.0f, 1.0f,\n    };\n    \n    static const GLfloat rotateRightHorizontalFlipTextureCoordinates[] = {\n        0.0f, 1.0f,\n        0.0f, 0.0f,\n        1.0f, 1.0f,\n        1.0f, 0.0f,\n    };\n\n    static const GLfloat rotate180TextureCoordinates[] = {\n        1.0f, 0.0f,\n        0.0f, 0.0f,\n        1.0f, 1.0f,\n        0.0f, 1.0f,\n    };\n    \n    switch(rotationMode)\n    {\n        case kGPUImageNoRotation: return noRotationTextureCoordinates;\n        case kGPUImageRotateLeft: return rotateLeftTextureCoordinates;\n        case kGPUImageRotateRight: return rotateRightTextureCoordinates;\n        case kGPUImageFlipVertical: return verticalFlipTextureCoordinates;\n        case kGPUImageFlipHorizonal: return horizontalFlipTextureCoordinates;\n        case kGPUImageRotateRightFlipVertical: return rotateRightVerticalFlipTextureCoordinates;\n        case kGPUImageRotateRightFlipHorizontal: return rotateRightHorizontalFlipTextureCoordinates;\n        case kGPUImageRotate180: return rotate180TextureCoordinates;\n    }\n}\n\n#pragma mark -\n#pragma mark GPUInput protocol\n\n- (void)newFrameReadyAtTime:(CMTime)frameTime atIndex:(NSInteger)textureIndex;\n{\n    runSynchronouslyOnVideoProcessingQueue(^{\n        [GPUImageContext setActiveShaderProgram:displayProgram];\n        [self setDisplayFramebuffer];\n        \n        glClearColor(backgroundColorRed, backgroundColorGreen, backgroundColorBlue, backgroundColorAlpha);\n        glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);\n        \n        glActiveTexture(GL_TEXTURE4);\n        glBindTexture(GL_TEXTURE_2D, [inputFramebufferForDisplay texture]);\n        glUniform1i(displayInputTextureUniform, 4);\n        \n        glVertexAttribPointer(displayPositionAttribute, 2, GL_FLOAT, 0, 0, imageVertices);\n        glVertexAttribPointer(displayTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, [GPUImageView textureCoordinatesForRotation:inputRotation]);\n        \n        glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);\n        \n        [self presentFramebuffer];\n        [inputFramebufferForDisplay unlock];\n        inputFramebufferForDisplay = nil;\n    });\n}\n\n- (NSInteger)nextAvailableTextureIndex;\n{\n    return 0;\n}\n\n- (void)setInputFramebuffer:(GPUImageFramebuffer *)newInputFramebuffer atIndex:(NSInteger)textureIndex;\n{\n    inputFramebufferForDisplay = newInputFramebuffer;\n    [inputFramebufferForDisplay lock];\n}\n\n- (void)setInputRotation:(GPUImageRotationMode)newInputRotation atIndex:(NSInteger)textureIndex;\n{\n    inputRotation = newInputRotation;\n}\n\n- (void)setInputSize:(CGSize)newSize atIndex:(NSInteger)textureIndex;\n{\n    runSynchronouslyOnVideoProcessingQueue(^{\n        CGSize rotatedSize = newSize;\n        \n        if (GPUImageRotationSwapsWidthAndHeight(inputRotation))\n        {\n            rotatedSize.width = newSize.height;\n            rotatedSize.height = newSize.width;\n        }\n        \n        if (!CGSizeEqualToSize(inputImageSize, rotatedSize))\n        {\n            inputImageSize = rotatedSize;\n            [self recalculateViewGeometry];\n        }\n    });\n}\n\n- (CGSize)maximumOutputSize;\n{\n    if ([self respondsToSelector:@selector(setContentScaleFactor:)])\n    {\n        CGSize pointSize = self.bounds.size;\n        return CGSizeMake(self.contentScaleFactor * pointSize.width, self.contentScaleFactor * pointSize.height);\n    }\n    else\n    {\n        return self.bounds.size;\n    }\n}\n\n- (void)endProcessing\n{\n}\n\n- (BOOL)shouldIgnoreUpdatesToThisTarget;\n{\n    return NO;\n}\n\n- (BOOL)wantsMonochromeInput;\n{\n    return NO;\n}\n\n- (void)setCurrentlyReceivingMonochromeInput:(BOOL)newValue;\n{\n    \n}\n\n#pragma mark -\n#pragma mark Accessors\n\n- (CGSize)sizeInPixels;\n{\n    if (CGSizeEqualToSize(_sizeInPixels, CGSizeZero))\n    {\n        return [self maximumOutputSize];\n    }\n    else\n    {\n        return _sizeInPixels;\n    }\n}\n\n- (void)setFillMode:(GPUImageFillModeType)newValue;\n{\n    _fillMode = newValue;\n    [self recalculateViewGeometry];\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/GPURenderKit.h",
    "content": "//\n//  GPURenderKit.h\n//  GPURenderKit\n//\n//  Created by 刘海东 on 2019/2/22.\n//  Copyright © 2019 刘海东. All rights reserved.\n//\n\n#import <UIKit/UIKit.h>\n\n//! Project version number for GPURenderKit.\nFOUNDATION_EXPORT double GPURenderKitVersionNumber;\n\n//! Project version string for GPURenderKit.\nFOUNDATION_EXPORT const unsigned char GPURenderKitVersionString[];\n\n// In this header, you should import all the public headers of your framework using statements like #import <GPURenderKit/PublicHeader.h>\n\n\n#import \"GPUImage.h\"\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit/Info.plist",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<!DOCTYPE plist PUBLIC \"-//Apple//DTD PLIST 1.0//EN\" \"http://www.apple.com/DTDs/PropertyList-1.0.dtd\">\n<plist version=\"1.0\">\n<dict>\n\t<key>CFBundleDevelopmentRegion</key>\n\t<string>$(DEVELOPMENT_LANGUAGE)</string>\n\t<key>CFBundleExecutable</key>\n\t<string>$(EXECUTABLE_NAME)</string>\n\t<key>CFBundleIdentifier</key>\n\t<string>$(PRODUCT_BUNDLE_IDENTIFIER)</string>\n\t<key>CFBundleInfoDictionaryVersion</key>\n\t<string>6.0</string>\n\t<key>CFBundleName</key>\n\t<string>$(PRODUCT_NAME)</string>\n\t<key>CFBundlePackageType</key>\n\t<string>FMWK</string>\n\t<key>CFBundleShortVersionString</key>\n\t<string>1.0</string>\n\t<key>CFBundleVersion</key>\n\t<string>$(CURRENT_PROJECT_VERSION)</string>\n</dict>\n</plist>\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit.xcodeproj/project.pbxproj",
    "content": "// !$*UTF8*$!\n{\n\tarchiveVersion = 1;\n\tclasses = {\n\t};\n\tobjectVersion = 50;\n\tobjects = {\n\n/* Begin PBXBuildFile section */\n\t\tB41317502277277300B4866F /* GLImageShapeFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B413174C2277277200B4866F /* GLImageShapeFilter.m */; };\n\t\tB41317512277277300B4866F /* GLImageShapeFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B413174D2277277300B4866F /* GLImageShapeFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB41317522277277300B4866F /* GLImageShapeHighDefinitionFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B413174E2277277300B4866F /* GLImageShapeHighDefinitionFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB41317532277277300B4866F /* GLImageShapeHighDefinitionFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B413174F2277277300B4866F /* GLImageShapeHighDefinitionFilter.m */; };\n\t\tB41FE41B226A260F008DF295 /* GLImageBlurSnapViewFilterGroup.h in Headers */ = {isa = PBXBuildFile; fileRef = B41FE419226A260F008DF295 /* GLImageBlurSnapViewFilterGroup.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB41FE41C226A260F008DF295 /* GLImageBlurSnapViewFilterGroup.m in Sources */ = {isa = PBXBuildFile; fileRef = B41FE41A226A260F008DF295 /* GLImageBlurSnapViewFilterGroup.m */; };\n\t\tB42D8D62223F8A5600517EA7 /* GLImageCircleFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B42D8D60223F8A5500517EA7 /* GLImageCircleFilter.m */; };\n\t\tB42D8D63223F8A5600517EA7 /* GLImageCircleFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B42D8D61223F8A5600517EA7 /* GLImageCircleFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB42EC08522520B6F00226FA0 /* GLImageThreePartitionGroupFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B42EC07F22520B6F00226FA0 /* GLImageThreePartitionGroupFilter.m */; };\n\t\tB42EC08622520B6F00226FA0 /* GLImageThreePartitionFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B42EC08022520B6F00226FA0 /* GLImageThreePartitionFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB42EC08722520B6F00226FA0 /* GLImageFourPointsMirrorFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B42EC08122520B6F00226FA0 /* GLImageFourPointsMirrorFilter.m */; };\n\t\tB42EC08822520B6F00226FA0 /* GLImageThreePartitionFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B42EC08222520B6F00226FA0 /* GLImageThreePartitionFilter.m */; };\n\t\tB42EC08922520B6F00226FA0 /* GLImageThreePartitionGroupFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B42EC08322520B6F00226FA0 /* GLImageThreePartitionGroupFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB42EC08A22520B6F00226FA0 /* GLImageFourPointsMirrorFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B42EC08422520B6F00226FA0 /* GLImageFourPointsMirrorFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB436FEC72266D28F009C5A0E /* GLImageZoomFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B436FEC52266D28F009C5A0E /* GLImageZoomFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB436FEC82266D28F009C5A0E /* GLImageZoomFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B436FEC62266D28F009C5A0E /* GLImageZoomFilter.m */; };\n\t\tB436FECB2266DA2E009C5A0E /* GLImageWaterReflectionFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B436FEC92266DA2E009C5A0E /* GLImageWaterReflectionFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB436FECC2266DA2E009C5A0E /* GLImageWaterReflectionFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B436FECA2266DA2E009C5A0E /* GLImageWaterReflectionFilter.m */; };\n\t\tB44ADD09222668DD00F17956 /* GLImageAddStickerFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B44ADD03222668DD00F17956 /* GLImageAddStickerFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB44ADD0A222668DD00F17956 /* GLImageAddStickerWithEffectFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B44ADD04222668DD00F17956 /* GLImageAddStickerWithEffectFilter.m */; };\n\t\tB44ADD0B222668DD00F17956 /* GLImageAddStickerFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B44ADD05222668DD00F17956 /* GLImageAddStickerFilter.m */; };\n\t\tB44ADD0C222668DD00F17956 /* GLImageStickerFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B44ADD06222668DD00F17956 /* GLImageStickerFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB44ADD0D222668DD00F17956 /* GLImageStickerFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B44ADD07222668DD00F17956 /* GLImageStickerFilter.m */; };\n\t\tB44ADD0E222668DD00F17956 /* GLImageAddStickerWithEffectFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B44ADD08222668DD00F17956 /* GLImageAddStickerWithEffectFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB44ADD132226695200F17956 /* GLImageMixBlendFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B44ADD0F2226695100F17956 /* GLImageMixBlendFilter.m */; };\n\t\tB44ADD142226695200F17956 /* GLImageBlendFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B44ADD102226695100F17956 /* GLImageBlendFilter.m */; };\n\t\tB44ADD152226695200F17956 /* GLImageBlendFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B44ADD112226695100F17956 /* GLImageBlendFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB44ADD162226695200F17956 /* GLImageMixBlendFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B44ADD122226695200F17956 /* GLImageMixBlendFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB44ADD19222669AB00F17956 /* GLImageGassianBlurMixFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B44ADD17222669AA00F17956 /* GLImageGassianBlurMixFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB44ADD1A222669AB00F17956 /* GLImageGassianBlurMixFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B44ADD18222669AB00F17956 /* GLImageGassianBlurMixFilter.m */; };\n\t\tB4620D4A22203FBE00EE2876 /* GPURenderKit.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = B4620D4022203FBE00EE2876 /* GPURenderKit.framework */; };\n\t\tB4620D4F22203FBE00EE2876 /* GPURenderKitTests.m in Sources */ = {isa = PBXBuildFile; fileRef = B4620D4E22203FBE00EE2876 /* GPURenderKitTests.m */; };\n\t\tB4620D5122203FBE00EE2876 /* GPURenderKit.h in Headers */ = {isa = PBXBuildFile; fileRef = B4620D4322203FBE00EE2876 /* GPURenderKit.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB46212AF2220436300EE2876 /* GPUImage.h in Headers */ = {isa = PBXBuildFile; fileRef = B46211402220435400EE2876 /* GPUImage.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB46213382220436300EE2876 /* GPUImageView.h in Headers */ = {isa = PBXBuildFile; fileRef = B46211CA2220436100EE2876 /* GPUImageView.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB46213392220436300EE2876 /* GPUImageContext.m in Sources */ = {isa = PBXBuildFile; fileRef = B46211CB2220436100EE2876 /* GPUImageContext.m */; };\n\t\tB462133A2220436300EE2876 /* GPUImageMovieWriter.m in Sources */ = {isa = PBXBuildFile; fileRef = B46211CC2220436100EE2876 /* GPUImageMovieWriter.m */; };\n\t\tB462133B2220436300EE2876 /* GPUImageFramework.h in Headers */ = {isa = PBXBuildFile; fileRef = B46211CE2220436100EE2876 /* GPUImageFramework.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB462133C2220436300EE2876 /* Info.plist in Resources */ = {isa = PBXBuildFile; fileRef = B46211D02220436100EE2876 /* Info.plist */; };\n\t\tB462133D2220436300EE2876 /* GPUImagePicture.m in Sources */ = {isa = PBXBuildFile; fileRef = B46211D12220436100EE2876 /* GPUImagePicture.m */; };\n\t\tB462133E2220436300EE2876 /* GPUImagePicture+TextureSubimage.m in Sources */ = {isa = PBXBuildFile; fileRef = B46211D22220436100EE2876 /* GPUImagePicture+TextureSubimage.m */; };\n\t\tB462133F2220436300EE2876 /* GPUImageContext.h in Headers */ = {isa = PBXBuildFile; fileRef = B46211D32220436100EE2876 /* GPUImageContext.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB46213402220436300EE2876 /* GPUImageView.m in Sources */ = {isa = PBXBuildFile; fileRef = B46211D42220436100EE2876 /* GPUImageView.m */; };\n\t\tB46213412220436300EE2876 /* GPUImageMovieWriter.h in Headers */ = {isa = PBXBuildFile; fileRef = B46211D52220436100EE2876 /* GPUImageMovieWriter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB46213422220436300EE2876 /* GPUImagePicture.h in Headers */ = {isa = PBXBuildFile; fileRef = B46211D62220436100EE2876 /* GPUImagePicture.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB46213432220436300EE2876 /* GPUImagePicture+TextureSubimage.h in Headers */ = {isa = PBXBuildFile; fileRef = B46211D72220436100EE2876 /* GPUImagePicture+TextureSubimage.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB4621617222046DF00EE2876 /* GPUImageTwoPassFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B46215F5222046DC00EE2876 /* GPUImageTwoPassFilter.m */; };\n\t\tB4621618222046DF00EE2876 /* GPUImageFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B46215F6222046DC00EE2876 /* GPUImageFilter.m */; };\n\t\tB4621619222046DF00EE2876 /* GPUImageTextureOutput.h in Headers */ = {isa = PBXBuildFile; fileRef = B46215F7222046DC00EE2876 /* GPUImageTextureOutput.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB462161A222046DF00EE2876 /* GPUImageTwoPassTextureSamplingFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B46215F8222046DC00EE2876 /* GPUImageTwoPassTextureSamplingFilter.m */; };\n\t\tB462161B222046DF00EE2876 /* GPUImageTwoInputFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B46215F9222046DC00EE2876 /* GPUImageTwoInputFilter.m */; };\n\t\tB462161C222046DF00EE2876 /* GPUImageMovie.m in Sources */ = {isa = PBXBuildFile; fileRef = B46215FA222046DC00EE2876 /* GPUImageMovie.m */; };\n\t\tB462161D222046DF00EE2876 /* GLProgram.h in Headers */ = {isa = PBXBuildFile; fileRef = B46215FB222046DC00EE2876 /* GLProgram.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB462161E222046DF00EE2876 /* GPUImageMovie.h in Headers */ = {isa = PBXBuildFile; fileRef = B46215FC222046DC00EE2876 /* GPUImageMovie.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB462161F222046DF00EE2876 /* GPUImageOutput.h in Headers */ = {isa = PBXBuildFile; fileRef = B46215FD222046DC00EE2876 /* GPUImageOutput.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB4621620222046DF00EE2876 /* GPUImageMovieComposition.m in Sources */ = {isa = PBXBuildFile; fileRef = B46215FE222046DC00EE2876 /* GPUImageMovieComposition.m */; };\n\t\tB4621621222046DF00EE2876 /* GLProgram.m in Sources */ = {isa = PBXBuildFile; fileRef = B46215FF222046DC00EE2876 /* GLProgram.m */; };\n\t\tB4621622222046DF00EE2876 /* GPUImageTextureInput.m in Sources */ = {isa = PBXBuildFile; fileRef = B4621600222046DC00EE2876 /* GPUImageTextureInput.m */; };\n\t\tB4621623222046DF00EE2876 /* GPUImageFourInputFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B4621601222046DC00EE2876 /* GPUImageFourInputFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB4621624222046DF00EE2876 /* GPUImageFilterGroup.m in Sources */ = {isa = PBXBuildFile; fileRef = B4621602222046DC00EE2876 /* GPUImageFilterGroup.m */; };\n\t\tB4621625222046DF00EE2876 /* GPUImageOutput.m in Sources */ = {isa = PBXBuildFile; fileRef = B4621603222046DD00EE2876 /* GPUImageOutput.m */; };\n\t\tB4621626222046DF00EE2876 /* GPUImageTwoPassTextureSamplingFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B4621604222046DD00EE2876 /* GPUImageTwoPassTextureSamplingFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB4621627222046DF00EE2876 /* GPUImageFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B4621605222046DD00EE2876 /* GPUImageFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB4621628222046DF00EE2876 /* GPUImageFilterGroup.h in Headers */ = {isa = PBXBuildFile; fileRef = B4621606222046DD00EE2876 /* GPUImageFilterGroup.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB4621629222046DF00EE2876 /* GPUImageFramebufferCache.h in Headers */ = {isa = PBXBuildFile; fileRef = B4621607222046DD00EE2876 /* GPUImageFramebufferCache.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB462162A222046DF00EE2876 /* GPUImageVideoCamera.m in Sources */ = {isa = PBXBuildFile; fileRef = B4621608222046DD00EE2876 /* GPUImageVideoCamera.m */; };\n\t\tB462162B222046DF00EE2876 /* GPUImageTwoInputFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B4621609222046DD00EE2876 /* GPUImageTwoInputFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB462162C222046DF00EE2876 /* GPUImageFramebuffer.m in Sources */ = {isa = PBXBuildFile; fileRef = B462160A222046DD00EE2876 /* GPUImageFramebuffer.m */; };\n\t\tB462162D222046DF00EE2876 /* GPUImageBuffer.h in Headers */ = {isa = PBXBuildFile; fileRef = B462160B222046DD00EE2876 /* GPUImageBuffer.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB462162E222046DF00EE2876 /* GPUImageFramebuffer.h in Headers */ = {isa = PBXBuildFile; fileRef = B462160C222046DD00EE2876 /* GPUImageFramebuffer.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB462162F222046DF00EE2876 /* GPUImageTextureInput.h in Headers */ = {isa = PBXBuildFile; fileRef = B462160D222046DE00EE2876 /* GPUImageTextureInput.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB4621630222046DF00EE2876 /* GPUImageBuffer.m in Sources */ = {isa = PBXBuildFile; fileRef = B462160E222046DE00EE2876 /* GPUImageBuffer.m */; };\n\t\tB4621631222046DF00EE2876 /* GPUImageFramebufferCache.m in Sources */ = {isa = PBXBuildFile; fileRef = B462160F222046DE00EE2876 /* GPUImageFramebufferCache.m */; };\n\t\tB4621632222046DF00EE2876 /* GPUImageTwoPassFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B4621610222046DE00EE2876 /* GPUImageTwoPassFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB4621633222046DF00EE2876 /* GPUImageVideoCamera.h in Headers */ = {isa = PBXBuildFile; fileRef = B4621611222046DE00EE2876 /* GPUImageVideoCamera.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB4621634222046DF00EE2876 /* GPUImageTextureOutput.m in Sources */ = {isa = PBXBuildFile; fileRef = B4621612222046DE00EE2876 /* GPUImageTextureOutput.m */; };\n\t\tB4621635222046DF00EE2876 /* GPUImageFilterPipeline.m in Sources */ = {isa = PBXBuildFile; fileRef = B4621613222046DF00EE2876 /* GPUImageFilterPipeline.m */; };\n\t\tB4621636222046DF00EE2876 /* GPUImageMovieComposition.h in Headers */ = {isa = PBXBuildFile; fileRef = B4621614222046DF00EE2876 /* GPUImageMovieComposition.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB4621637222046DF00EE2876 /* GPUImageFilterPipeline.h in Headers */ = {isa = PBXBuildFile; fileRef = B4621615222046DF00EE2876 /* GPUImageFilterPipeline.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB4621638222046DF00EE2876 /* GPUImageFourInputFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B4621616222046DF00EE2876 /* GPUImageFourInputFilter.m */; };\n\t\tB46216542220474D00EE2876 /* GPUImage3x3ConvolutionFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B462163A2220474B00EE2876 /* GPUImage3x3ConvolutionFilter.m */; };\n\t\tB46216552220474D00EE2876 /* GPUImageAverageColor.h in Headers */ = {isa = PBXBuildFile; fileRef = B462163B2220474B00EE2876 /* GPUImageAverageColor.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB46216562220474D00EE2876 /* GPUImageBulgeDistortionFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B462163C2220474B00EE2876 /* GPUImageBulgeDistortionFilter.m */; };\n\t\tB46216572220474D00EE2876 /* GPUImageBilateralFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B462163D2220474B00EE2876 /* GPUImageBilateralFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB46216582220474D00EE2876 /* GPUImageBoxBlurFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B462163E2220474B00EE2876 /* GPUImageBoxBlurFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB46216592220474D00EE2876 /* GPUImageBrightnessFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B462163F2220474B00EE2876 /* GPUImageBrightnessFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB462165A2220474D00EE2876 /* GPUImageAmatorkaFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B46216402220474B00EE2876 /* GPUImageAmatorkaFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB462165B2220474D00EE2876 /* GPUImageBoxBlurFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B46216412220474B00EE2876 /* GPUImageBoxBlurFilter.m */; };\n\t\tB462165C2220474D00EE2876 /* GPUImageAverageLuminanceThresholdFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B46216422220474B00EE2876 /* GPUImageAverageLuminanceThresholdFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB462165D2220474D00EE2876 /* GPUImageAlphaBlendFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B46216432220474B00EE2876 /* GPUImageAlphaBlendFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB462165E2220474D00EE2876 /* GPUImageAverageLuminanceThresholdFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B46216442220474C00EE2876 /* GPUImageAverageLuminanceThresholdFilter.m */; };\n\t\tB462165F2220474D00EE2876 /* GPUImageAdaptiveThresholdFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B46216452220474C00EE2876 /* GPUImageAdaptiveThresholdFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB46216602220474D00EE2876 /* GPUImageAverageColor.m in Sources */ = {isa = PBXBuildFile; fileRef = B46216462220474C00EE2876 /* GPUImageAverageColor.m */; };\n\t\tB46216612220474D00EE2876 /* GPUImageAddBlendFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B46216472220474C00EE2876 /* GPUImageAddBlendFilter.m */; };\n\t\tB46216622220474D00EE2876 /* GPUImageCannyEdgeDetectionFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B46216482220474C00EE2876 /* GPUImageCannyEdgeDetectionFilter.m */; };\n\t\tB46216632220474D00EE2876 /* GPUImageAlphaBlendFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B46216492220474C00EE2876 /* GPUImageAlphaBlendFilter.m */; };\n\t\tB46216642220474D00EE2876 /* GPUImageAddBlendFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B462164A2220474C00EE2876 /* GPUImageAddBlendFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB46216652220474D00EE2876 /* GPUImageCannyEdgeDetectionFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B462164B2220474C00EE2876 /* GPUImageCannyEdgeDetectionFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB46216662220474D00EE2876 /* GPUImageBulgeDistortionFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B462164C2220474C00EE2876 /* GPUImageBulgeDistortionFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB46216672220474D00EE2876 /* GPUImageAdaptiveThresholdFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B462164D2220474C00EE2876 /* GPUImageAdaptiveThresholdFilter.m */; };\n\t\tB46216682220474D00EE2876 /* GPUImageBilateralFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B462164E2220474C00EE2876 /* GPUImageBilateralFilter.m */; };\n\t\tB46216692220474D00EE2876 /* GPUImageBrightnessFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B462164F2220474C00EE2876 /* GPUImageBrightnessFilter.m */; };\n\t\tB462166A2220474D00EE2876 /* GPUImage3x3ConvolutionFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B46216502220474D00EE2876 /* GPUImage3x3ConvolutionFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB462166B2220474D00EE2876 /* GPUImage3x3TextureSamplingFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B46216512220474D00EE2876 /* GPUImage3x3TextureSamplingFilter.m */; };\n\t\tB462166C2220474D00EE2876 /* GPUImage3x3TextureSamplingFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B46216522220474D00EE2876 /* GPUImage3x3TextureSamplingFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB462166D2220474D00EE2876 /* GPUImageAmatorkaFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B46216532220474D00EE2876 /* GPUImageAmatorkaFilter.m */; };\n\t\tB462168A2220475E00EE2876 /* GPUImageClosingFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B462166E2220475C00EE2876 /* GPUImageClosingFilter.m */; };\n\t\tB462168B2220475E00EE2876 /* GPUImageCGAColorspaceFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B462166F2220475C00EE2876 /* GPUImageCGAColorspaceFilter.m */; };\n\t\tB462168C2220475E00EE2876 /* GPUImageColorInvertFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B46216702220475C00EE2876 /* GPUImageColorInvertFilter.m */; };\n\t\tB462168D2220475E00EE2876 /* GPUImageColourFASTFeatureDetector.m in Sources */ = {isa = PBXBuildFile; fileRef = B46216712220475C00EE2876 /* GPUImageColourFASTFeatureDetector.m */; };\n\t\tB462168E2220475E00EE2876 /* GPUImageChromaKeyFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B46216722220475C00EE2876 /* GPUImageChromaKeyFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB462168F2220475E00EE2876 /* GPUImageColorConversion.m in Sources */ = {isa = PBXBuildFile; fileRef = B46216732220475C00EE2876 /* GPUImageColorConversion.m */; };\n\t\tB46216902220475E00EE2876 /* GPUImageColorMatrixFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B46216742220475C00EE2876 /* GPUImageColorMatrixFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB46216912220475E00EE2876 /* GPUImageChromaKeyFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B46216752220475C00EE2876 /* GPUImageChromaKeyFilter.m */; };\n\t\tB46216922220475E00EE2876 /* GPUImageCGAColorspaceFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B46216762220475C00EE2876 /* GPUImageCGAColorspaceFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB46216932220475E00EE2876 /* GPUImageColorPackingFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B46216772220475C00EE2876 /* GPUImageColorPackingFilter.m */; };\n\t\tB46216942220475E00EE2876 /* GPUImageColorDodgeBlendFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B46216782220475D00EE2876 /* GPUImageColorDodgeBlendFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB46216952220475E00EE2876 /* GPUImageChromaKeyBlendFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B46216792220475D00EE2876 /* GPUImageChromaKeyBlendFilter.m */; };\n\t\tB46216962220475E00EE2876 /* GPUImageColourFASTSamplingOperation.h in Headers */ = {isa = PBXBuildFile; fileRef = B462167A2220475D00EE2876 /* GPUImageColourFASTSamplingOperation.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB46216972220475E00EE2876 /* GPUImageColorBurnBlendFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B462167B2220475D00EE2876 /* GPUImageColorBurnBlendFilter.m */; };\n\t\tB46216982220475E00EE2876 /* GPUImageColorBurnBlendFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B462167C2220475D00EE2876 /* GPUImageColorBurnBlendFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB46216992220475E00EE2876 /* GPUImageChromaKeyBlendFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B462167D2220475D00EE2876 /* GPUImageChromaKeyBlendFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB462169A2220475E00EE2876 /* GPUImageColorDodgeBlendFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B462167E2220475D00EE2876 /* GPUImageColorDodgeBlendFilter.m */; };\n\t\tB462169B2220475E00EE2876 /* GPUImageColorPackingFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B462167F2220475D00EE2876 /* GPUImageColorPackingFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB462169C2220475E00EE2876 /* GPUImageClosingFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B46216802220475D00EE2876 /* GPUImageClosingFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB462169D2220475E00EE2876 /* GPUImageColorConversion.h in Headers */ = {isa = PBXBuildFile; fileRef = B46216812220475D00EE2876 /* GPUImageColorConversion.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB462169E2220475E00EE2876 /* GPUImageColorBlendFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B46216822220475D00EE2876 /* GPUImageColorBlendFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB462169F2220475E00EE2876 /* GPUImageColorMatrixFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B46216832220475E00EE2876 /* GPUImageColorMatrixFilter.m */; };\n\t\tB46216A02220475E00EE2876 /* GPUImageColorBlendFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B46216842220475E00EE2876 /* GPUImageColorBlendFilter.m */; };\n\t\tB46216A12220475E00EE2876 /* GPUImageColorInvertFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B46216852220475E00EE2876 /* GPUImageColorInvertFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB46216A22220475E00EE2876 /* GPUImageColorLocalBinaryPatternFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B46216862220475E00EE2876 /* GPUImageColorLocalBinaryPatternFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB46216A32220475E00EE2876 /* GPUImageColorLocalBinaryPatternFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B46216872220475E00EE2876 /* GPUImageColorLocalBinaryPatternFilter.m */; };\n\t\tB46216A42220475E00EE2876 /* GPUImageColourFASTFeatureDetector.h in Headers */ = {isa = PBXBuildFile; fileRef = B46216882220475E00EE2876 /* GPUImageColourFASTFeatureDetector.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB46216A52220475E00EE2876 /* GPUImageColourFASTSamplingOperation.m in Sources */ = {isa = PBXBuildFile; fileRef = B46216892220475E00EE2876 /* GPUImageColourFASTSamplingOperation.m */; };\n\t\tB46216CA2220477200EE2876 /* GPUImageToonFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B46216A62220476E00EE2876 /* GPUImageToonFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB46216CB2220477200EE2876 /* GPUImageTwoInputCrossTextureSamplingFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B46216A72220476F00EE2876 /* GPUImageTwoInputCrossTextureSamplingFilter.m */; };\n\t\tB46216CC2220477200EE2876 /* GPUImageThresholdSketchFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B46216A82220476F00EE2876 /* GPUImageThresholdSketchFilter.m */; };\n\t\tB46216CD2220477200EE2876 /* GPUImageVibranceFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B46216A92220476F00EE2876 /* GPUImageVibranceFilter.m */; };\n\t\tB46216CE2220477200EE2876 /* GPUImageXYDerivativeFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B46216AA2220476F00EE2876 /* GPUImageXYDerivativeFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB46216CF2220477200EE2876 /* GPUImageToneCurveFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B46216AB2220476F00EE2876 /* GPUImageToneCurveFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB46216D02220477200EE2876 /* GPUImageToonFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B46216AC2220476F00EE2876 /* GPUImageToonFilter.m */; };\n\t\tB46216D12220477200EE2876 /* GPUImageWeakPixelInclusionFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B46216AD2220476F00EE2876 /* GPUImageWeakPixelInclusionFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB46216D22220477200EE2876 /* GPUImageTransformFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B46216AE2220476F00EE2876 /* GPUImageTransformFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB46216D32220477200EE2876 /* GPUImageThresholdedNonMaximumSuppressionFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B46216AF2220476F00EE2876 /* GPUImageThresholdedNonMaximumSuppressionFilter.m */; };\n\t\tB46216D42220477200EE2876 /* GPUImageWeakPixelInclusionFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B46216B02220476F00EE2876 /* GPUImageWeakPixelInclusionFilter.m */; };\n\t\tB46216D52220477200EE2876 /* GPUImageZoomBlurFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B46216B12220477000EE2876 /* GPUImageZoomBlurFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB46216D62220477200EE2876 /* GPUImageUIElement.m in Sources */ = {isa = PBXBuildFile; fileRef = B46216B22220477000EE2876 /* GPUImageUIElement.m */; };\n\t\tB46216D72220477200EE2876 /* GPUImageTiltShiftFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B46216B32220477000EE2876 /* GPUImageTiltShiftFilter.m */; };\n\t\tB46216D92220477200EE2876 /* GPUImageToneCurveFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B46216B52220477000EE2876 /* GPUImageToneCurveFilter.m */; };\n\t\tB46216DA2220477200EE2876 /* GPUImageZoomBlurFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B46216B62220477000EE2876 /* GPUImageZoomBlurFilter.m */; };\n\t\tB46216DB2220477200EE2876 /* GPUImageUnsharpMaskFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B46216B72220477000EE2876 /* GPUImageUnsharpMaskFilter.m */; };\n\t\tB46216DC2220477200EE2876 /* GPUImageXYDerivativeFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B46216B82220477000EE2876 /* GPUImageXYDerivativeFilter.m */; };\n\t\tB46216DD2220477200EE2876 /* GPUImageThresholdEdgeDetectionFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B46216B92220477000EE2876 /* GPUImageThresholdEdgeDetectionFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB46216DE2220477200EE2876 /* GPUImageUIElement.h in Headers */ = {isa = PBXBuildFile; fileRef = B46216BA2220477000EE2876 /* GPUImageUIElement.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB46216DF2220477200EE2876 /* GPUImageVignetteFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B46216BB2220477000EE2876 /* GPUImageVignetteFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB46216E12220477200EE2876 /* GPUImageVignetteFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B46216BD2220477100EE2876 /* GPUImageVignetteFilter.m */; };\n\t\tB46216E22220477200EE2876 /* GPUImageWhiteBalanceFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B46216BE2220477100EE2876 /* GPUImageWhiteBalanceFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB46216E32220477200EE2876 /* GPUImageTiltShiftFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B46216BF2220477100EE2876 /* GPUImageTiltShiftFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB46216E42220477200EE2876 /* GPUImageThresholdedNonMaximumSuppressionFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B46216C02220477100EE2876 /* GPUImageThresholdedNonMaximumSuppressionFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB46216E52220477200EE2876 /* GPUImageVibranceFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B46216C12220477100EE2876 /* GPUImageVibranceFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB46216E62220477200EE2876 /* GPUImageUnsharpMaskFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B46216C22220477100EE2876 /* GPUImageUnsharpMaskFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB46216E72220477200EE2876 /* GPUImageThresholdSketchFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B46216C32220477100EE2876 /* GPUImageThresholdSketchFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB46216E82220477200EE2876 /* GPUImageVoronoiConsumerFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B46216C42220477100EE2876 /* GPUImageVoronoiConsumerFilter.m */; };\n\t\tB46216E92220477200EE2876 /* GPUImageThresholdEdgeDetectionFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B46216C52220477100EE2876 /* GPUImageThresholdEdgeDetectionFilter.m */; };\n\t\tB46216EA2220477200EE2876 /* GPUImageVoronoiConsumerFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B46216C62220477100EE2876 /* GPUImageVoronoiConsumerFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB46216EB2220477200EE2876 /* GPUImageTwoInputCrossTextureSamplingFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B46216C72220477100EE2876 /* GPUImageTwoInputCrossTextureSamplingFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB46216EC2220477200EE2876 /* GPUImageWhiteBalanceFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B46216C82220477200EE2876 /* GPUImageWhiteBalanceFilter.m */; };\n\t\tB46216ED2220477200EE2876 /* GPUImageTransformFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B46216C92220477200EE2876 /* GPUImageTransformFilter.m */; };\n\t\tB46217862220479F00EE2876 /* GPUImageHistogramFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B46216EE2220479100EE2876 /* GPUImageHistogramFilter.m */; };\n\t\tB46217872220479F00EE2876 /* GPUImageExposureFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B46216EF2220479100EE2876 /* GPUImageExposureFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB46217882220479F00EE2876 /* GPUImageLaplacianFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B46216F02220479100EE2876 /* GPUImageLaplacianFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB46217892220479F00EE2876 /* GPUImageJFAVoronoiFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B46216F12220479100EE2876 /* GPUImageJFAVoronoiFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB462178A2220479F00EE2876 /* GPUImageHalftoneFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B46216F22220479100EE2876 /* GPUImageHalftoneFilter.m */; };\n\t\tB462178B2220479F00EE2876 /* GPUImageLevelsFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B46216F32220479100EE2876 /* GPUImageLevelsFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB462178C2220479F00EE2876 /* GPUImageNormalBlendFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B46216F42220479100EE2876 /* GPUImageNormalBlendFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB462178D2220479F00EE2876 /* GPUImageMonochromeFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B46216F52220479100EE2876 /* GPUImageMonochromeFilter.m */; };\n\t\tB462178E2220479F00EE2876 /* GPUImageDivideBlendFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B46216F62220479200EE2876 /* GPUImageDivideBlendFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB462178F2220479F00EE2876 /* GPUImageLookupFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B46216F72220479200EE2876 /* GPUImageLookupFilter.m */; };\n\t\tB46217902220479F00EE2876 /* GPUImageGlassSphereFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B46216F82220479200EE2876 /* GPUImageGlassSphereFilter.m */; };\n\t\tB46217912220479F00EE2876 /* GPUImageHSBFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B46216F92220479200EE2876 /* GPUImageHSBFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB46217922220479F00EE2876 /* GPUImageHueFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B46216FA2220479200EE2876 /* GPUImageHueFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB46217932220479F00EE2876 /* GPUImageiOSBlurFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B46216FB2220479200EE2876 /* GPUImageiOSBlurFilter.m */; };\n\t\tB46217942220479F00EE2876 /* GPUImageNobleCornerDetectionFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B46216FC2220479200EE2876 /* GPUImageNobleCornerDetectionFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB46217952220479F00EE2876 /* GPUImageNonMaximumSuppressionFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B46216FD2220479200EE2876 /* GPUImageNonMaximumSuppressionFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB46217962220479F00EE2876 /* GPUImageHarrisCornerDetectionFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B46216FE2220479200EE2876 /* GPUImageHarrisCornerDetectionFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB46217972220479F00EE2876 /* GPUImageHoughTransformLineDetector.m in Sources */ = {isa = PBXBuildFile; fileRef = B46216FF2220479200EE2876 /* GPUImageHoughTransformLineDetector.m */; };\n\t\tB46217982220479F00EE2876 /* GPUImageLuminosity.h in Headers */ = {isa = PBXBuildFile; fileRef = B46217002220479200EE2876 /* GPUImageLuminosity.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB46217992220479F00EE2876 /* GPUImageLowPassFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B46217012220479300EE2876 /* GPUImageLowPassFilter.m */; };\n\t\tB462179A2220479F00EE2876 /* GPUImagePolkaDotFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B46217022220479300EE2876 /* GPUImagePolkaDotFilter.m */; };\n\t\tB462179B2220479F00EE2876 /* GPUImageHueBlendFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B46217032220479300EE2876 /* GPUImageHueBlendFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB462179C2220479F00EE2876 /* GPUImageJFAVoronoiFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B46217042220479300EE2876 /* GPUImageJFAVoronoiFilter.m */; };\n\t\tB462179D2220479F00EE2876 /* GPUImageGrayscaleFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B46217052220479300EE2876 /* GPUImageGrayscaleFilter.m */; };\n\t\tB462179E2220479F00EE2876 /* GPUImageHistogramEqualizationFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B46217062220479300EE2876 /* GPUImageHistogramEqualizationFilter.m */; };\n\t\tB462179F2220479F00EE2876 /* GPUImageHardLightBlendFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B46217072220479300EE2876 /* GPUImageHardLightBlendFilter.m */; };\n\t\tB46217A02220479F00EE2876 /* GPUImageEmbossFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B46217082220479300EE2876 /* GPUImageEmbossFilter.m */; };\n\t\tB46217A12220479F00EE2876 /* GPUImageLineGenerator.h in Headers */ = {isa = PBXBuildFile; fileRef = B46217092220479300EE2876 /* GPUImageLineGenerator.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB46217A22220479F00EE2876 /* GPUImageLocalBinaryPatternFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B462170A2220479300EE2876 /* GPUImageLocalBinaryPatternFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB46217A32220479F00EE2876 /* GPUImageHueBlendFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B462170B2220479300EE2876 /* GPUImageHueBlendFilter.m */; };\n\t\tB46217A42220479F00EE2876 /* GPUImageDirectionalNonMaximumSuppressionFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B462170C2220479400EE2876 /* GPUImageDirectionalNonMaximumSuppressionFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB46217A52220479F00EE2876 /* GPUImageMultiplyBlendFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B462170D2220479400EE2876 /* GPUImageMultiplyBlendFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB46217A62220479F00EE2876 /* GPUImageOpeningFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B462170E2220479400EE2876 /* GPUImageOpeningFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB46217A72220479F00EE2876 /* GPUImageHighlightShadowTintFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B462170F2220479400EE2876 /* GPUImageHighlightShadowTintFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB46217A82220479F00EE2876 /* GPUImageCropFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B46217102220479400EE2876 /* GPUImageCropFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB46217A92220479F00EE2876 /* GPUImageHistogramGenerator.h in Headers */ = {isa = PBXBuildFile; fileRef = B46217112220479400EE2876 /* GPUImageHistogramGenerator.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB46217AA2220479F00EE2876 /* GPUImageHSBFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B46217122220479400EE2876 /* GPUImageHSBFilter.m */; };\n\t\tB46217AB2220479F00EE2876 /* GPUImageCrosshatchFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B46217132220479400EE2876 /* GPUImageCrosshatchFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB46217AC2220479F00EE2876 /* GPUImageHighPassFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B46217142220479400EE2876 /* GPUImageHighPassFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB46217AD2220479F00EE2876 /* GPUImageMotionBlurFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B46217152220479400EE2876 /* GPUImageMotionBlurFilter.m */; };\n\t\tB46217AE2220479F00EE2876 /* GPUImageDifferenceBlendFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B46217162220479400EE2876 /* GPUImageDifferenceBlendFilter.m */; };\n\t\tB46217AF2220479F00EE2876 /* GPUImageCrosshatchFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B46217172220479400EE2876 /* GPUImageCrosshatchFilter.m */; };\n\t\tB46217B02220479F00EE2876 /* GPUImageHueFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B46217182220479500EE2876 /* GPUImageHueFilter.m */; };\n\t\tB46217B12220479F00EE2876 /* GPUImageGaussianSelectiveBlurFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B46217192220479500EE2876 /* GPUImageGaussianSelectiveBlurFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB46217B22220479F00EE2876 /* GPUImageExclusionBlendFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B462171A2220479500EE2876 /* GPUImageExclusionBlendFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB46217B32220479F00EE2876 /* GPUImageHardLightBlendFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B462171B2220479500EE2876 /* GPUImageHardLightBlendFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB46217B42220479F00EE2876 /* GPUImageKuwaharaFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B462171C2220479500EE2876 /* GPUImageKuwaharaFilter.m */; };\n\t\tB46217B52220479F00EE2876 /* GPUImageLanczosResamplingFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B462171D2220479500EE2876 /* GPUImageLanczosResamplingFilter.m */; };\n\t\tB46217B62220479F00EE2876 /* GPUImageHistogramFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B462171E2220479500EE2876 /* GPUImageHistogramFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB46217B72220479F00EE2876 /* GPUImageMotionDetector.h in Headers */ = {isa = PBXBuildFile; fileRef = B462171F2220479500EE2876 /* GPUImageMotionDetector.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB46217B82220479F00EE2876 /* GPUImageHazeFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B46217202220479500EE2876 /* GPUImageHazeFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB46217B92220479F00EE2876 /* GPUImageCropFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B46217212220479500EE2876 /* GPUImageCropFilter.m */; };\n\t\tB46217BA2220479F00EE2876 /* GPUImageLinearBurnBlendFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B46217222220479500EE2876 /* GPUImageLinearBurnBlendFilter.m */; };\n\t\tB46217BB2220479F00EE2876 /* GPUImageGrayscaleFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B46217232220479600EE2876 /* GPUImageGrayscaleFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB46217BC2220479F00EE2876 /* GPUImagePoissonBlendFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B46217242220479600EE2876 /* GPUImagePoissonBlendFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB46217BD2220479F00EE2876 /* GPUImageLaplacianFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B46217252220479600EE2876 /* GPUImageLaplacianFilter.m */; };\n\t\tB46217BE2220479F00EE2876 /* GPUImageExclusionBlendFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B46217262220479600EE2876 /* GPUImageExclusionBlendFilter.m */; };\n\t\tB46217BF2220479F00EE2876 /* GPUImageGammaFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B46217272220479600EE2876 /* GPUImageGammaFilter.m */; };\n\t\tB46217C02220479F00EE2876 /* GPUImageFalseColorFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B46217282220479600EE2876 /* GPUImageFalseColorFilter.m */; };\n\t\tB46217C12220479F00EE2876 /* GPUImageDarkenBlendFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B46217292220479600EE2876 /* GPUImageDarkenBlendFilter.m */; };\n\t\tB46217C22220479F00EE2876 /* GPUImageCrosshairGenerator.m in Sources */ = {isa = PBXBuildFile; fileRef = B462172A2220479600EE2876 /* GPUImageCrosshairGenerator.m */; };\n\t\tB46217C32220479F00EE2876 /* GPUImageLineGenerator.m in Sources */ = {isa = PBXBuildFile; fileRef = B462172B2220479600EE2876 /* GPUImageLineGenerator.m */; };\n\t\tB46217C42220479F00EE2876 /* GPUImageDissolveBlendFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B462172C2220479600EE2876 /* GPUImageDissolveBlendFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB46217C52220479F00EE2876 /* GPUImageLowPassFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B462172D2220479600EE2876 /* GPUImageLowPassFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB46217C62220479F00EE2876 /* GPUImageErosionFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B462172E2220479700EE2876 /* GPUImageErosionFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB46217C72220479F00EE2876 /* GPUImageGammaFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B462172F2220479700EE2876 /* GPUImageGammaFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB46217C82220479F00EE2876 /* GPUImageOverlayBlendFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B46217302220479700EE2876 /* GPUImageOverlayBlendFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB46217C92220479F00EE2876 /* GPUImageMissEtikateFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B46217312220479700EE2876 /* GPUImageMissEtikateFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB46217CA2220479F00EE2876 /* GPUImageDirectionalSobelEdgeDetectionFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B46217322220479700EE2876 /* GPUImageDirectionalSobelEdgeDetectionFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB46217CB2220479F00EE2876 /* GPUImageLevelsFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B46217332220479700EE2876 /* GPUImageLevelsFilter.m */; };\n\t\tB46217CC2220479F00EE2876 /* GPUImageKuwaharaFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B46217342220479700EE2876 /* GPUImageKuwaharaFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB46217CD2220479F00EE2876 /* GPUImageFASTCornerDetectionFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B46217352220479700EE2876 /* GPUImageFASTCornerDetectionFilter.m */; };\n\t\tB46217CE2220479F00EE2876 /* GPUImageOverlayBlendFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B46217362220479700EE2876 /* GPUImageOverlayBlendFilter.m */; };\n\t\tB46217CF2220479F00EE2876 /* GPUImageMedianFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B46217372220479700EE2876 /* GPUImageMedianFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB46217D02220479F00EE2876 /* GPUImagePinchDistortionFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B46217382220479700EE2876 /* GPUImagePinchDistortionFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB46217D12220479F00EE2876 /* GPUImageOpacityFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B46217392220479800EE2876 /* GPUImageOpacityFilter.m */; };\n\t\tB46217D22220479F00EE2876 /* GPUImageNormalBlendFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B462173A2220479800EE2876 /* GPUImageNormalBlendFilter.m */; };\n\t\tB46217D32220479F00EE2876 /* GPUImagePixellateFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B462173B2220479800EE2876 /* GPUImagePixellateFilter.m */; };\n\t\tB46217D42220479F00EE2876 /* GPUImageGaussianBlurFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B462173C2220479800EE2876 /* GPUImageGaussianBlurFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB46217D52220479F00EE2876 /* GPUImagePerlinNoiseFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B462173D2220479800EE2876 /* GPUImagePerlinNoiseFilter.m */; };\n\t\tB46217D62220479F00EE2876 /* GPUImageContrastFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B462173E2220479800EE2876 /* GPUImageContrastFilter.m */; };\n\t\tB46217D72220479F00EE2876 /* GPUImagePixellatePositionFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B462173F2220479800EE2876 /* GPUImagePixellatePositionFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB46217D82220479F00EE2876 /* GPUImageLocalBinaryPatternFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B46217402220479800EE2876 /* GPUImageLocalBinaryPatternFilter.m */; };\n\t\tB46217D92220479F00EE2876 /* GPUImageHighlightShadowFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B46217412220479800EE2876 /* GPUImageHighlightShadowFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB46217DA2220479F00EE2876 /* GPUImageLuminanceThresholdFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B46217422220479800EE2876 /* GPUImageLuminanceThresholdFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB46217DB2220479F00EE2876 /* GPUImageDifferenceBlendFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B46217432220479800EE2876 /* GPUImageDifferenceBlendFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB46217DC2220479F00EE2876 /* GPUImagePixellatePositionFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B46217442220479900EE2876 /* GPUImagePixellatePositionFilter.m */; };\n\t\tB46217DD2220479F00EE2876 /* GPUImageHalftoneFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B46217452220479900EE2876 /* GPUImageHalftoneFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB46217DE2220479F00EE2876 /* GPUImageMonochromeFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B46217462220479900EE2876 /* GPUImageMonochromeFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB46217DF2220479F00EE2876 /* GPUImageNonMaximumSuppressionFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B46217472220479900EE2876 /* GPUImageNonMaximumSuppressionFilter.m */; };\n\t\tB46217E02220479F00EE2876 /* GPUImageGaussianBlurPositionFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B46217482220479900EE2876 /* GPUImageGaussianBlurPositionFilter.m */; };\n\t\tB46217E12220479F00EE2876 /* GPUImageKuwaharaRadius3Filter.h in Headers */ = {isa = PBXBuildFile; fileRef = B46217492220479900EE2876 /* GPUImageKuwaharaRadius3Filter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB46217E22220479F00EE2876 /* GPUImageKuwaharaRadius3Filter.m in Sources */ = {isa = PBXBuildFile; fileRef = B462174A2220479900EE2876 /* GPUImageKuwaharaRadius3Filter.m */; };\n\t\tB46217E32220479F00EE2876 /* GPUImageHoughTransformLineDetector.h in Headers */ = {isa = PBXBuildFile; fileRef = B462174B2220479900EE2876 /* GPUImageHoughTransformLineDetector.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB46217E42220479F00EE2876 /* GPUImageMaskFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B462174C2220479900EE2876 /* GPUImageMaskFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB46217E52220479F00EE2876 /* GPUImageHazeFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B462174D2220479900EE2876 /* GPUImageHazeFilter.m */; };\n\t\tB46217E62220479F00EE2876 /* GPUImageFASTCornerDetectionFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B462174E2220479A00EE2876 /* GPUImageFASTCornerDetectionFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB46217E72220479F00EE2876 /* GPUImageMosaicFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B462174F2220479A00EE2876 /* GPUImageMosaicFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB46217E82220479F00EE2876 /* GPUImagePolarPixellateFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B46217502220479A00EE2876 /* GPUImagePolarPixellateFilter.m */; };\n\t\tB46217E92220479F00EE2876 /* GPUImageCrosshairGenerator.h in Headers */ = {isa = PBXBuildFile; fileRef = B46217512220479A00EE2876 /* GPUImageCrosshairGenerator.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB46217EA2220479F00EE2876 /* GPUImageLuminosityBlendFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B46217522220479A00EE2876 /* GPUImageLuminosityBlendFilter.m */; };\n\t\tB46217EB2220479F00EE2876 /* GPUImageLightenBlendFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B46217532220479A00EE2876 /* GPUImageLightenBlendFilter.m */; };\n\t\tB46217EC2220479F00EE2876 /* GPUImageHighlightShadowTintFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B46217542220479A00EE2876 /* GPUImageHighlightShadowTintFilter.m */; };\n\t\tB46217ED2220479F00EE2876 /* GPUImageLuminanceRangeFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B46217552220479A00EE2876 /* GPUImageLuminanceRangeFilter.m */; };\n\t\tB46217EE2220479F00EE2876 /* GPUImagePixellateFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B46217562220479A00EE2876 /* GPUImagePixellateFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB46217EF2220479F00EE2876 /* GPUImageDilationFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B46217572220479A00EE2876 /* GPUImageDilationFilter.m */; };\n\t\tB46217F02220479F00EE2876 /* GPUImageLuminanceRangeFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B46217582220479A00EE2876 /* GPUImageLuminanceRangeFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB46217F12220479F00EE2876 /* GPUImageMedianFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B46217592220479B00EE2876 /* GPUImageMedianFilter.m */; };\n\t\tB46217F22220479F00EE2876 /* GPUImageEmbossFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B462175A2220479B00EE2876 /* GPUImageEmbossFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB46217F32220479F00EE2876 /* GPUImageHistogramGenerator.m in Sources */ = {isa = PBXBuildFile; fileRef = B462175B2220479B00EE2876 /* GPUImageHistogramGenerator.m */; };\n\t\tB46217F42220479F00EE2876 /* GPUImageMissEtikateFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B462175C2220479B00EE2876 /* GPUImageMissEtikateFilter.m */; };\n\t\tB46217F52220479F00EE2876 /* GPUImageHighlightShadowFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B462175D2220479B00EE2876 /* GPUImageHighlightShadowFilter.m */; };\n\t\tB46217F62220479F00EE2876 /* GPUImageParallelCoordinateLineTransformFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B462175E2220479B00EE2876 /* GPUImageParallelCoordinateLineTransformFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB46217F72220479F00EE2876 /* GPUImageMotionBlurFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B462175F2220479B00EE2876 /* GPUImageMotionBlurFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB46217F82220479F00EE2876 /* GPUImageNobleCornerDetectionFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B46217602220479B00EE2876 /* GPUImageNobleCornerDetectionFilter.m */; };\n\t\tB46217F92220479F00EE2876 /* GPUImagePoissonBlendFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B46217612220479B00EE2876 /* GPUImagePoissonBlendFilter.m */; };\n\t\tB46217FA2220479F00EE2876 /* GPUImageGlassSphereFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B46217622220479B00EE2876 /* GPUImageGlassSphereFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB46217FB2220479F00EE2876 /* GPUImageiOSBlurFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B46217632220479C00EE2876 /* GPUImageiOSBlurFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB46217FC2220479F00EE2876 /* GPUImageErosionFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B46217642220479C00EE2876 /* GPUImageErosionFilter.m */; };\n\t\tB46217FD2220479F00EE2876 /* GPUImageExposureFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B46217652220479C00EE2876 /* GPUImageExposureFilter.m */; };\n\t\tB46217FE2220479F00EE2876 /* GPUImageHighPassFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B46217662220479C00EE2876 /* GPUImageHighPassFilter.m */; };\n\t\tB46217FF2220479F00EE2876 /* GPUImageLuminosityBlendFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B46217672220479C00EE2876 /* GPUImageLuminosityBlendFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB46218002220479F00EE2876 /* GPUImagePinchDistortionFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B46217682220479C00EE2876 /* GPUImagePinchDistortionFilter.m */; };\n\t\tB46218012220479F00EE2876 /* GPUImageDivideBlendFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B46217692220479C00EE2876 /* GPUImageDivideBlendFilter.m */; };\n\t\tB46218022220479F00EE2876 /* GPUImageLuminosity.m in Sources */ = {isa = PBXBuildFile; fileRef = B462176A2220479C00EE2876 /* GPUImageLuminosity.m */; };\n\t\tB46218032220479F00EE2876 /* GPUImageHarrisCornerDetectionFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B462176B2220479C00EE2876 /* GPUImageHarrisCornerDetectionFilter.m */; };\n\t\tB46218042220479F00EE2876 /* GPUImageDirectionalSobelEdgeDetectionFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B462176C2220479C00EE2876 /* GPUImageDirectionalSobelEdgeDetectionFilter.m */; };\n\t\tB46218052220479F00EE2876 /* GPUImageDilationFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B462176D2220479C00EE2876 /* GPUImageDilationFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB46218062220479F00EE2876 /* GPUImageLinearBurnBlendFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B462176E2220479D00EE2876 /* GPUImageLinearBurnBlendFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB46218072220479F00EE2876 /* GPUImageLanczosResamplingFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B462176F2220479D00EE2876 /* GPUImageLanczosResamplingFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB46218082220479F00EE2876 /* GPUImageMotionDetector.m in Sources */ = {isa = PBXBuildFile; fileRef = B46217702220479D00EE2876 /* GPUImageMotionDetector.m */; };\n\t\tB46218092220479F00EE2876 /* GPUImageLuminanceThresholdFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B46217712220479D00EE2876 /* GPUImageLuminanceThresholdFilter.m */; };\n\t\tB462180A2220479F00EE2876 /* GPUImageHistogramEqualizationFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B46217722220479D00EE2876 /* GPUImageHistogramEqualizationFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB462180B2220479F00EE2876 /* GPUImageOpacityFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B46217732220479D00EE2876 /* GPUImageOpacityFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB462180C2220479F00EE2876 /* GPUImageGaussianBlurFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B46217742220479D00EE2876 /* GPUImageGaussianBlurFilter.m */; };\n\t\tB462180D2220479F00EE2876 /* GPUImageParallelCoordinateLineTransformFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B46217752220479D00EE2876 /* GPUImageParallelCoordinateLineTransformFilter.m */; };\n\t\tB462180E2220479F00EE2876 /* GPUImageDirectionalNonMaximumSuppressionFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B46217762220479D00EE2876 /* GPUImageDirectionalNonMaximumSuppressionFilter.m */; };\n\t\tB462180F2220479F00EE2876 /* GPUImagePerlinNoiseFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B46217772220479D00EE2876 /* GPUImagePerlinNoiseFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB46218102220479F00EE2876 /* GPUImageMosaicFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B46217782220479D00EE2876 /* GPUImageMosaicFilter.m */; };\n\t\tB46218112220479F00EE2876 /* GPUImagePolarPixellateFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B46217792220479E00EE2876 /* GPUImagePolarPixellateFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB46218122220479F00EE2876 /* GPUImageGaussianSelectiveBlurFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B462177A2220479E00EE2876 /* GPUImageGaussianSelectiveBlurFilter.m */; };\n\t\tB46218132220479F00EE2876 /* GPUImagePolkaDotFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B462177B2220479E00EE2876 /* GPUImagePolkaDotFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB46218142220479F00EE2876 /* GPUImageFalseColorFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B462177C2220479E00EE2876 /* GPUImageFalseColorFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB46218152220479F00EE2876 /* GPUImageDarkenBlendFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B462177D2220479E00EE2876 /* GPUImageDarkenBlendFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB46218162220479F00EE2876 /* GPUImageLookupFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B462177E2220479E00EE2876 /* GPUImageLookupFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB46218172220479F00EE2876 /* GPUImageContrastFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B462177F2220479E00EE2876 /* GPUImageContrastFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB46218182220479F00EE2876 /* GPUImageMultiplyBlendFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B46217802220479E00EE2876 /* GPUImageMultiplyBlendFilter.m */; };\n\t\tB46218192220479F00EE2876 /* GPUImageGaussianBlurPositionFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B46217812220479E00EE2876 /* GPUImageGaussianBlurPositionFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB462181A2220479F00EE2876 /* GPUImageMaskFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B46217822220479E00EE2876 /* GPUImageMaskFilter.m */; };\n\t\tB462181B2220479F00EE2876 /* GPUImageLightenBlendFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B46217832220479E00EE2876 /* GPUImageLightenBlendFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB462181C2220479F00EE2876 /* GPUImageDissolveBlendFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B46217842220479F00EE2876 /* GPUImageDissolveBlendFilter.m */; };\n\t\tB462181D2220479F00EE2876 /* GPUImageOpeningFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B46217852220479F00EE2876 /* GPUImageOpeningFilter.m */; };\n\t\tB4621844222047B600EE2876 /* GPUImageRawDataOutput.m in Sources */ = {isa = PBXBuildFile; fileRef = B462181E222047B300EE2876 /* GPUImageRawDataOutput.m */; };\n\t\tB4621845222047B600EE2876 /* GPUImageRGBFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B462181F222047B300EE2876 /* GPUImageRGBFilter.m */; };\n\t\tB4621846222047B600EE2876 /* GPUImageSingleComponentGaussianBlurFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B4621820222047B300EE2876 /* GPUImageSingleComponentGaussianBlurFilter.m */; };\n\t\tB4621847222047B600EE2876 /* GPUImageSkinToneFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B4621821222047B300EE2876 /* GPUImageSkinToneFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB4621848222047B600EE2876 /* GPUImageSketchFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B4621822222047B300EE2876 /* GPUImageSketchFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB4621849222047B600EE2876 /* GPUImageRGBDilationFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B4621823222047B300EE2876 /* GPUImageRGBDilationFilter.m */; };\n\t\tB462184A222047B600EE2876 /* GPUImageRGBFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B4621824222047B300EE2876 /* GPUImageRGBFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB462184B222047B600EE2876 /* GPUImageScreenBlendFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B4621825222047B300EE2876 /* GPUImageScreenBlendFilter.m */; };\n\t\tB462184C222047B600EE2876 /* GPUImageSketchFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B4621826222047B300EE2876 /* GPUImageSketchFilter.m */; };\n\t\tB462184D222047B600EE2876 /* GPUImageSaturationFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B4621827222047B300EE2876 /* GPUImageSaturationFilter.m */; };\n\t\tB462184E222047B600EE2876 /* GPUImagePosterizeFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B4621828222047B400EE2876 /* GPUImagePosterizeFilter.m */; };\n\t\tB462184F222047B600EE2876 /* GPUImageSmoothToonFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B4621829222047B400EE2876 /* GPUImageSmoothToonFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB4621850222047B600EE2876 /* GPUImageSaturationBlendFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B462182A222047B400EE2876 /* GPUImageSaturationBlendFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB4621851222047B600EE2876 /* GPUImageRGBOpeningFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B462182B222047B400EE2876 /* GPUImageRGBOpeningFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB4621852222047B600EE2876 /* GPUImagePrewittEdgeDetectionFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B462182C222047B400EE2876 /* GPUImagePrewittEdgeDetectionFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB4621853222047B600EE2876 /* GPUImageSmoothToonFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B462182D222047B400EE2876 /* GPUImageSmoothToonFilter.m */; };\n\t\tB4621854222047B600EE2876 /* GPUImageRawDataInput.h in Headers */ = {isa = PBXBuildFile; fileRef = B462182E222047B400EE2876 /* GPUImageRawDataInput.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB4621855222047B600EE2876 /* GPUImagePosterizeFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B462182F222047B400EE2876 /* GPUImagePosterizeFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB4621856222047B600EE2876 /* GPUImageRGBErosionFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B4621830222047B400EE2876 /* GPUImageRGBErosionFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB4621857222047B600EE2876 /* GPUImageSepiaFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B4621831222047B400EE2876 /* GPUImageSepiaFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB4621858222047B600EE2876 /* GPUImageSkinToneFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B4621832222047B400EE2876 /* GPUImageSkinToneFilter.m */; };\n\t\tB4621859222047B600EE2876 /* GPUImageSaturationFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B4621833222047B500EE2876 /* GPUImageSaturationFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB462185A222047B600EE2876 /* GPUImageSharpenFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B4621834222047B500EE2876 /* GPUImageSharpenFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB462185B222047B600EE2876 /* GPUImageRawDataInput.m in Sources */ = {isa = PBXBuildFile; fileRef = B4621835222047B500EE2876 /* GPUImageRawDataInput.m */; };\n\t\tB462185C222047B600EE2876 /* GPUImageShiTomasiFeatureDetectionFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B4621836222047B500EE2876 /* GPUImageShiTomasiFeatureDetectionFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB462185D222047B600EE2876 /* GPUImageRGBErosionFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B4621837222047B500EE2876 /* GPUImageRGBErosionFilter.m */; };\n\t\tB462185E222047B600EE2876 /* GPUImageRGBClosingFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B4621838222047B500EE2876 /* GPUImageRGBClosingFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB462185F222047B600EE2876 /* GPUImageSingleComponentGaussianBlurFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B4621839222047B500EE2876 /* GPUImageSingleComponentGaussianBlurFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB4621860222047B600EE2876 /* GPUImageRawDataOutput.h in Headers */ = {isa = PBXBuildFile; fileRef = B462183A222047B500EE2876 /* GPUImageRawDataOutput.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB4621861222047B600EE2876 /* GPUImageSepiaFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B462183B222047B500EE2876 /* GPUImageSepiaFilter.m */; };\n\t\tB4621862222047B600EE2876 /* GPUImageSaturationBlendFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B462183C222047B500EE2876 /* GPUImageSaturationBlendFilter.m */; };\n\t\tB4621863222047B600EE2876 /* GPUImageRGBOpeningFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B462183D222047B500EE2876 /* GPUImageRGBOpeningFilter.m */; };\n\t\tB4621864222047B600EE2876 /* GPUImageRGBDilationFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B462183E222047B600EE2876 /* GPUImageRGBDilationFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB4621865222047B600EE2876 /* GPUImageSharpenFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B462183F222047B600EE2876 /* GPUImageSharpenFilter.m */; };\n\t\tB4621866222047B600EE2876 /* GPUImageRGBClosingFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B4621840222047B600EE2876 /* GPUImageRGBClosingFilter.m */; };\n\t\tB4621867222047B600EE2876 /* GPUImageShiTomasiFeatureDetectionFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B4621841222047B600EE2876 /* GPUImageShiTomasiFeatureDetectionFilter.m */; };\n\t\tB4621868222047B600EE2876 /* GPUImageScreenBlendFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B4621842222047B600EE2876 /* GPUImageScreenBlendFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB4621869222047B600EE2876 /* GPUImagePrewittEdgeDetectionFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B4621843222047B600EE2876 /* GPUImagePrewittEdgeDetectionFilter.m */; };\n\t\tB4621880222047C400EE2876 /* GPUImageSobelEdgeDetectionFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B462186A222047C200EE2876 /* GPUImageSobelEdgeDetectionFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB4621881222047C400EE2876 /* GPUImageStillCamera.h in Headers */ = {isa = PBXBuildFile; fileRef = B462186B222047C200EE2876 /* GPUImageStillCamera.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB4621882222047C400EE2876 /* GPUImageSoftLightBlendFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B462186C222047C200EE2876 /* GPUImageSoftLightBlendFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB4621883222047C400EE2876 /* GPUImageSourceOverBlendFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B462186D222047C200EE2876 /* GPUImageSourceOverBlendFilter.m */; };\n\t\tB4621884222047C400EE2876 /* GPUImageStillCamera.m in Sources */ = {isa = PBXBuildFile; fileRef = B462186E222047C200EE2876 /* GPUImageStillCamera.m */; };\n\t\tB4621885222047C400EE2876 /* GPUImageSolarizeFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B462186F222047C200EE2876 /* GPUImageSolarizeFilter.m */; };\n\t\tB4621886222047C400EE2876 /* GPUImageSphereRefractionFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B4621870222047C200EE2876 /* GPUImageSphereRefractionFilter.m */; };\n\t\tB4621887222047C400EE2876 /* GPUImageSwirlFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B4621871222047C200EE2876 /* GPUImageSwirlFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB4621888222047C400EE2876 /* GPUImageSoftEleganceFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B4621872222047C200EE2876 /* GPUImageSoftEleganceFilter.m */; };\n\t\tB4621889222047C400EE2876 /* GPUImageSoftEleganceFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B4621873222047C200EE2876 /* GPUImageSoftEleganceFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB462188A222047C400EE2876 /* GPUImageSolidColorGenerator.h in Headers */ = {isa = PBXBuildFile; fileRef = B4621874222047C300EE2876 /* GPUImageSolidColorGenerator.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB462188B222047C400EE2876 /* GPUImageSourceOverBlendFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B4621875222047C300EE2876 /* GPUImageSourceOverBlendFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB462188C222047C400EE2876 /* GPUImageSobelEdgeDetectionFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B4621876222047C300EE2876 /* GPUImageSobelEdgeDetectionFilter.m */; };\n\t\tB462188D222047C400EE2876 /* GPUImageSwirlFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B4621877222047C300EE2876 /* GPUImageSwirlFilter.m */; };\n\t\tB462188E222047C400EE2876 /* GPUImageStretchDistortionFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B4621878222047C300EE2876 /* GPUImageStretchDistortionFilter.m */; };\n\t\tB462188F222047C400EE2876 /* GPUImageSubtractBlendFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B4621879222047C300EE2876 /* GPUImageSubtractBlendFilter.m */; };\n\t\tB4621890222047C400EE2876 /* GPUImageSoftLightBlendFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B462187A222047C300EE2876 /* GPUImageSoftLightBlendFilter.m */; };\n\t\tB4621891222047C400EE2876 /* GPUImageSubtractBlendFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B462187B222047C300EE2876 /* GPUImageSubtractBlendFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB4621892222047C400EE2876 /* GPUImageStretchDistortionFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B462187C222047C300EE2876 /* GPUImageStretchDistortionFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB4621893222047C400EE2876 /* GPUImageSphereRefractionFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B462187D222047C400EE2876 /* GPUImageSphereRefractionFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB4621894222047C400EE2876 /* GPUImageSolidColorGenerator.m in Sources */ = {isa = PBXBuildFile; fileRef = B462187E222047C400EE2876 /* GPUImageSolidColorGenerator.m */; };\n\t\tB4621895222047C400EE2876 /* GPUImageSolarizeFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B462187F222047C400EE2876 /* GPUImageSolarizeFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB4621898222048E500EE2876 /* GPUImageThreeInputFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B4621896222048E400EE2876 /* GPUImageThreeInputFilter.m */; };\n\t\tB4621899222048E500EE2876 /* GPUImageThreeInputFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B4621897222048E500EE2876 /* GPUImageThreeInputFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB47A5EEB22212D57006E07A4 /* GLImageLutFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B47A5EE722212D57006E07A4 /* GLImageLutFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB47A5EEC22212D57006E07A4 /* GLImageTwoLutFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B47A5EE822212D57006E07A4 /* GLImageTwoLutFilter.m */; };\n\t\tB47A5EED22212D57006E07A4 /* GLImageLutFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B47A5EE922212D57006E07A4 /* GLImageLutFilter.m */; };\n\t\tB47A5EEE22212D57006E07A4 /* GLImageTwoLutFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B47A5EEA22212D57006E07A4 /* GLImageTwoLutFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB47A5EF322212EA3006E07A4 /* GPUImageBeautifyFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B47A5EF122212EA3006E07A4 /* GPUImageBeautifyFilter.m */; };\n\t\tB47A5EF422212EA3006E07A4 /* GPUImageBeautifyFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B47A5EF222212EA3006E07A4 /* GPUImageBeautifyFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB49EF6332263521C00C220B0 /* GLImageGlitchEffectGridFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B49EF62F2263521C00C220B0 /* GLImageGlitchEffectGridFilter.m */; };\n\t\tB49EF6342263521C00C220B0 /* GLImageGlitchEffectLineFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B49EF6302263521C00C220B0 /* GLImageGlitchEffectLineFilter.m */; };\n\t\tB49EF6352263521C00C220B0 /* GLImageGlitchEffectGridFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B49EF6312263521C00C220B0 /* GLImageGlitchEffectGridFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB49EF6362263521C00C220B0 /* GLImageGlitchEffectLineFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B49EF6322263521C00C220B0 /* GLImageGlitchEffectLineFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB4A9FE892265CFDA00BC6E51 /* GLImageFaceChangeFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B4A9FE872265CFDA00BC6E51 /* GLImageFaceChangeFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB4A9FE8A2265CFDA00BC6E51 /* GLImageFaceChangeFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B4A9FE882265CFDA00BC6E51 /* GLImageFaceChangeFilter.m */; };\n\t\tB4A9FE992266409F00BC6E51 /* GLImageSoulOutFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B4A9FE972266409F00BC6E51 /* GLImageSoulOutFilter.m */; };\n\t\tB4A9FE9A2266409F00BC6E51 /* GLImageSoulOutFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B4A9FE982266409F00BC6E51 /* GLImageSoulOutFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB4C64C1D2271FFF9001BB716 /* GLImageFaceDetectPointFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B4C64C1B2271FFF9001BB716 /* GLImageFaceDetectPointFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB4C64C1E2271FFF9001BB716 /* GLImageFaceDetectPointFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B4C64C1C2271FFF9001BB716 /* GLImageFaceDetectPointFilter.m */; };\n\t\tB4C64C22227217C2001BB716 /* GLImageFaceChangeFilterGroup.h in Headers */ = {isa = PBXBuildFile; fileRef = B4C64C20227217C2001BB716 /* GLImageFaceChangeFilterGroup.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB4C64C23227217C2001BB716 /* GLImageFaceChangeFilterGroup.m in Sources */ = {isa = PBXBuildFile; fileRef = B4C64C21227217C2001BB716 /* GLImageFaceChangeFilterGroup.m */; };\n\t\tB4D9F70C226A10EE00FD18FC /* GLImageBlurSnapViewFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B4D9F70A226A10EE00FD18FC /* GLImageBlurSnapViewFilter.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB4D9F70D226A10EE00FD18FC /* GLImageBlurSnapViewFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B4D9F70B226A10EE00FD18FC /* GLImageBlurSnapViewFilter.m */; };\n\t\tB4DAAC47225724AB0094A3B2 /* GLImageMovie.m in Sources */ = {isa = PBXBuildFile; fileRef = B4DAAC43225724AB0094A3B2 /* GLImageMovie.m */; };\n\t\tB4DAAC48225724AB0094A3B2 /* GPUImageMovieWriterFix.m in Sources */ = {isa = PBXBuildFile; fileRef = B4DAAC44225724AB0094A3B2 /* GPUImageMovieWriterFix.m */; };\n\t\tB4DAAC49225724AB0094A3B2 /* GLImageMovie.h in Headers */ = {isa = PBXBuildFile; fileRef = B4DAAC45225724AB0094A3B2 /* GLImageMovie.h */; settings = {ATTRIBUTES = (Public, ); }; };\n\t\tB4DAAC4A225724AB0094A3B2 /* GPUImageMovieWriterFix.h in Headers */ = {isa = PBXBuildFile; fileRef = B4DAAC46225724AB0094A3B2 /* GPUImageMovieWriterFix.h */; settings = {ATTRIBUTES = (Public, ); }; };\n/* End PBXBuildFile section */\n\n/* Begin PBXContainerItemProxy section */\n\t\tB4620D4B22203FBE00EE2876 /* PBXContainerItemProxy */ = {\n\t\t\tisa = PBXContainerItemProxy;\n\t\t\tcontainerPortal = B4620D3722203FBE00EE2876 /* Project object */;\n\t\t\tproxyType = 1;\n\t\t\tremoteGlobalIDString = B4620D3F22203FBE00EE2876;\n\t\t\tremoteInfo = GPURenderKit;\n\t\t};\n/* End PBXContainerItemProxy section */\n\n/* Begin PBXFileReference section */\n\t\tB413174C2277277200B4866F /* GLImageShapeFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GLImageShapeFilter.m; sourceTree = \"<group>\"; };\n\t\tB413174D2277277300B4866F /* GLImageShapeFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GLImageShapeFilter.h; sourceTree = \"<group>\"; };\n\t\tB413174E2277277300B4866F /* GLImageShapeHighDefinitionFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GLImageShapeHighDefinitionFilter.h; sourceTree = \"<group>\"; };\n\t\tB413174F2277277300B4866F /* GLImageShapeHighDefinitionFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GLImageShapeHighDefinitionFilter.m; sourceTree = \"<group>\"; };\n\t\tB41FE419226A260F008DF295 /* GLImageBlurSnapViewFilterGroup.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = GLImageBlurSnapViewFilterGroup.h; sourceTree = \"<group>\"; };\n\t\tB41FE41A226A260F008DF295 /* GLImageBlurSnapViewFilterGroup.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = GLImageBlurSnapViewFilterGroup.m; sourceTree = \"<group>\"; };\n\t\tB42D8D60223F8A5500517EA7 /* GLImageCircleFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GLImageCircleFilter.m; sourceTree = \"<group>\"; };\n\t\tB42D8D61223F8A5600517EA7 /* GLImageCircleFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GLImageCircleFilter.h; sourceTree = \"<group>\"; };\n\t\tB42EC07F22520B6F00226FA0 /* GLImageThreePartitionGroupFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GLImageThreePartitionGroupFilter.m; sourceTree = \"<group>\"; };\n\t\tB42EC08022520B6F00226FA0 /* GLImageThreePartitionFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GLImageThreePartitionFilter.h; sourceTree = \"<group>\"; };\n\t\tB42EC08122520B6F00226FA0 /* GLImageFourPointsMirrorFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GLImageFourPointsMirrorFilter.m; sourceTree = \"<group>\"; };\n\t\tB42EC08222520B6F00226FA0 /* GLImageThreePartitionFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GLImageThreePartitionFilter.m; sourceTree = \"<group>\"; };\n\t\tB42EC08322520B6F00226FA0 /* GLImageThreePartitionGroupFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GLImageThreePartitionGroupFilter.h; sourceTree = \"<group>\"; };\n\t\tB42EC08422520B6F00226FA0 /* GLImageFourPointsMirrorFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GLImageFourPointsMirrorFilter.h; sourceTree = \"<group>\"; };\n\t\tB436FEC52266D28F009C5A0E /* GLImageZoomFilter.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = GLImageZoomFilter.h; sourceTree = \"<group>\"; };\n\t\tB436FEC62266D28F009C5A0E /* GLImageZoomFilter.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = GLImageZoomFilter.m; sourceTree = \"<group>\"; };\n\t\tB436FEC92266DA2E009C5A0E /* GLImageWaterReflectionFilter.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = GLImageWaterReflectionFilter.h; sourceTree = \"<group>\"; };\n\t\tB436FECA2266DA2E009C5A0E /* GLImageWaterReflectionFilter.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = GLImageWaterReflectionFilter.m; sourceTree = \"<group>\"; };\n\t\tB44ADD03222668DD00F17956 /* GLImageAddStickerFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GLImageAddStickerFilter.h; sourceTree = \"<group>\"; };\n\t\tB44ADD04222668DD00F17956 /* GLImageAddStickerWithEffectFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GLImageAddStickerWithEffectFilter.m; sourceTree = \"<group>\"; };\n\t\tB44ADD05222668DD00F17956 /* GLImageAddStickerFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GLImageAddStickerFilter.m; sourceTree = \"<group>\"; };\n\t\tB44ADD06222668DD00F17956 /* GLImageStickerFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GLImageStickerFilter.h; sourceTree = \"<group>\"; };\n\t\tB44ADD07222668DD00F17956 /* GLImageStickerFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GLImageStickerFilter.m; sourceTree = \"<group>\"; };\n\t\tB44ADD08222668DD00F17956 /* GLImageAddStickerWithEffectFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GLImageAddStickerWithEffectFilter.h; sourceTree = \"<group>\"; };\n\t\tB44ADD0F2226695100F17956 /* GLImageMixBlendFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GLImageMixBlendFilter.m; sourceTree = \"<group>\"; };\n\t\tB44ADD102226695100F17956 /* GLImageBlendFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GLImageBlendFilter.m; sourceTree = \"<group>\"; };\n\t\tB44ADD112226695100F17956 /* GLImageBlendFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GLImageBlendFilter.h; sourceTree = \"<group>\"; };\n\t\tB44ADD122226695200F17956 /* GLImageMixBlendFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GLImageMixBlendFilter.h; sourceTree = \"<group>\"; };\n\t\tB44ADD17222669AA00F17956 /* GLImageGassianBlurMixFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GLImageGassianBlurMixFilter.h; sourceTree = \"<group>\"; };\n\t\tB44ADD18222669AB00F17956 /* GLImageGassianBlurMixFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GLImageGassianBlurMixFilter.m; sourceTree = \"<group>\"; };\n\t\tB4620D4022203FBE00EE2876 /* GPURenderKit.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = GPURenderKit.framework; sourceTree = BUILT_PRODUCTS_DIR; };\n\t\tB4620D4322203FBE00EE2876 /* GPURenderKit.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = GPURenderKit.h; sourceTree = \"<group>\"; };\n\t\tB4620D4422203FBE00EE2876 /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = \"<group>\"; };\n\t\tB4620D4922203FBE00EE2876 /* GPURenderKitTests.xctest */ = {isa = PBXFileReference; explicitFileType = wrapper.cfbundle; includeInIndex = 0; path = GPURenderKitTests.xctest; sourceTree = BUILT_PRODUCTS_DIR; };\n\t\tB4620D4E22203FBE00EE2876 /* GPURenderKitTests.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = GPURenderKitTests.m; sourceTree = \"<group>\"; };\n\t\tB4620D5022203FBE00EE2876 /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = \"<group>\"; };\n\t\tB46211402220435400EE2876 /* GPUImage.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImage.h; sourceTree = \"<group>\"; };\n\t\tB46211CA2220436100EE2876 /* GPUImageView.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageView.h; sourceTree = \"<group>\"; };\n\t\tB46211CB2220436100EE2876 /* GPUImageContext.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageContext.m; sourceTree = \"<group>\"; };\n\t\tB46211CC2220436100EE2876 /* GPUImageMovieWriter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageMovieWriter.m; sourceTree = \"<group>\"; };\n\t\tB46211CE2220436100EE2876 /* GPUImageFramework.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageFramework.h; sourceTree = \"<group>\"; };\n\t\tB46211CF2220436100EE2876 /* module.modulemap */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = \"sourcecode.module-map\"; path = module.modulemap; sourceTree = \"<group>\"; };\n\t\tB46211D02220436100EE2876 /* Info.plist */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = \"<group>\"; };\n\t\tB46211D12220436100EE2876 /* GPUImagePicture.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImagePicture.m; sourceTree = \"<group>\"; };\n\t\tB46211D22220436100EE2876 /* GPUImagePicture+TextureSubimage.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = \"GPUImagePicture+TextureSubimage.m\"; sourceTree = \"<group>\"; };\n\t\tB46211D32220436100EE2876 /* GPUImageContext.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageContext.h; sourceTree = \"<group>\"; };\n\t\tB46211D42220436100EE2876 /* GPUImageView.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageView.m; sourceTree = \"<group>\"; };\n\t\tB46211D52220436100EE2876 /* GPUImageMovieWriter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageMovieWriter.h; sourceTree = \"<group>\"; };\n\t\tB46211D62220436100EE2876 /* GPUImagePicture.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImagePicture.h; sourceTree = \"<group>\"; };\n\t\tB46211D72220436100EE2876 /* GPUImagePicture+TextureSubimage.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = \"GPUImagePicture+TextureSubimage.h\"; sourceTree = \"<group>\"; };\n\t\tB46215F5222046DC00EE2876 /* GPUImageTwoPassFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageTwoPassFilter.m; sourceTree = \"<group>\"; };\n\t\tB46215F6222046DC00EE2876 /* GPUImageFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageFilter.m; sourceTree = \"<group>\"; };\n\t\tB46215F7222046DC00EE2876 /* GPUImageTextureOutput.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageTextureOutput.h; sourceTree = \"<group>\"; };\n\t\tB46215F8222046DC00EE2876 /* GPUImageTwoPassTextureSamplingFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageTwoPassTextureSamplingFilter.m; sourceTree = \"<group>\"; };\n\t\tB46215F9222046DC00EE2876 /* GPUImageTwoInputFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageTwoInputFilter.m; sourceTree = \"<group>\"; };\n\t\tB46215FA222046DC00EE2876 /* GPUImageMovie.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageMovie.m; sourceTree = \"<group>\"; };\n\t\tB46215FB222046DC00EE2876 /* GLProgram.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GLProgram.h; sourceTree = \"<group>\"; };\n\t\tB46215FC222046DC00EE2876 /* GPUImageMovie.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageMovie.h; sourceTree = \"<group>\"; };\n\t\tB46215FD222046DC00EE2876 /* GPUImageOutput.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageOutput.h; sourceTree = \"<group>\"; };\n\t\tB46215FE222046DC00EE2876 /* GPUImageMovieComposition.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageMovieComposition.m; sourceTree = \"<group>\"; };\n\t\tB46215FF222046DC00EE2876 /* GLProgram.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GLProgram.m; sourceTree = \"<group>\"; };\n\t\tB4621600222046DC00EE2876 /* GPUImageTextureInput.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageTextureInput.m; sourceTree = \"<group>\"; };\n\t\tB4621601222046DC00EE2876 /* GPUImageFourInputFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageFourInputFilter.h; sourceTree = \"<group>\"; };\n\t\tB4621602222046DC00EE2876 /* GPUImageFilterGroup.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageFilterGroup.m; sourceTree = \"<group>\"; };\n\t\tB4621603222046DD00EE2876 /* GPUImageOutput.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageOutput.m; sourceTree = \"<group>\"; };\n\t\tB4621604222046DD00EE2876 /* GPUImageTwoPassTextureSamplingFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageTwoPassTextureSamplingFilter.h; sourceTree = \"<group>\"; };\n\t\tB4621605222046DD00EE2876 /* GPUImageFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageFilter.h; sourceTree = \"<group>\"; };\n\t\tB4621606222046DD00EE2876 /* GPUImageFilterGroup.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageFilterGroup.h; sourceTree = \"<group>\"; };\n\t\tB4621607222046DD00EE2876 /* GPUImageFramebufferCache.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageFramebufferCache.h; sourceTree = \"<group>\"; };\n\t\tB4621608222046DD00EE2876 /* GPUImageVideoCamera.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageVideoCamera.m; sourceTree = \"<group>\"; };\n\t\tB4621609222046DD00EE2876 /* GPUImageTwoInputFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageTwoInputFilter.h; sourceTree = \"<group>\"; };\n\t\tB462160A222046DD00EE2876 /* GPUImageFramebuffer.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageFramebuffer.m; sourceTree = \"<group>\"; };\n\t\tB462160B222046DD00EE2876 /* GPUImageBuffer.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageBuffer.h; sourceTree = \"<group>\"; };\n\t\tB462160C222046DD00EE2876 /* GPUImageFramebuffer.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageFramebuffer.h; sourceTree = \"<group>\"; };\n\t\tB462160D222046DE00EE2876 /* GPUImageTextureInput.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageTextureInput.h; sourceTree = \"<group>\"; };\n\t\tB462160E222046DE00EE2876 /* GPUImageBuffer.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageBuffer.m; sourceTree = \"<group>\"; };\n\t\tB462160F222046DE00EE2876 /* GPUImageFramebufferCache.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageFramebufferCache.m; sourceTree = \"<group>\"; };\n\t\tB4621610222046DE00EE2876 /* GPUImageTwoPassFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageTwoPassFilter.h; sourceTree = \"<group>\"; };\n\t\tB4621611222046DE00EE2876 /* GPUImageVideoCamera.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageVideoCamera.h; sourceTree = \"<group>\"; };\n\t\tB4621612222046DE00EE2876 /* GPUImageTextureOutput.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageTextureOutput.m; sourceTree = \"<group>\"; };\n\t\tB4621613222046DF00EE2876 /* GPUImageFilterPipeline.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageFilterPipeline.m; sourceTree = \"<group>\"; };\n\t\tB4621614222046DF00EE2876 /* GPUImageMovieComposition.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageMovieComposition.h; sourceTree = \"<group>\"; };\n\t\tB4621615222046DF00EE2876 /* GPUImageFilterPipeline.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageFilterPipeline.h; sourceTree = \"<group>\"; };\n\t\tB4621616222046DF00EE2876 /* GPUImageFourInputFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageFourInputFilter.m; sourceTree = \"<group>\"; };\n\t\tB462163A2220474B00EE2876 /* GPUImage3x3ConvolutionFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImage3x3ConvolutionFilter.m; sourceTree = \"<group>\"; };\n\t\tB462163B2220474B00EE2876 /* GPUImageAverageColor.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageAverageColor.h; sourceTree = \"<group>\"; };\n\t\tB462163C2220474B00EE2876 /* GPUImageBulgeDistortionFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageBulgeDistortionFilter.m; sourceTree = \"<group>\"; };\n\t\tB462163D2220474B00EE2876 /* GPUImageBilateralFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageBilateralFilter.h; sourceTree = \"<group>\"; };\n\t\tB462163E2220474B00EE2876 /* GPUImageBoxBlurFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageBoxBlurFilter.h; sourceTree = \"<group>\"; };\n\t\tB462163F2220474B00EE2876 /* GPUImageBrightnessFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageBrightnessFilter.h; sourceTree = \"<group>\"; };\n\t\tB46216402220474B00EE2876 /* GPUImageAmatorkaFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageAmatorkaFilter.h; sourceTree = \"<group>\"; };\n\t\tB46216412220474B00EE2876 /* GPUImageBoxBlurFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageBoxBlurFilter.m; sourceTree = \"<group>\"; };\n\t\tB46216422220474B00EE2876 /* GPUImageAverageLuminanceThresholdFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageAverageLuminanceThresholdFilter.h; sourceTree = \"<group>\"; };\n\t\tB46216432220474B00EE2876 /* GPUImageAlphaBlendFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageAlphaBlendFilter.h; sourceTree = \"<group>\"; };\n\t\tB46216442220474C00EE2876 /* GPUImageAverageLuminanceThresholdFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageAverageLuminanceThresholdFilter.m; sourceTree = \"<group>\"; };\n\t\tB46216452220474C00EE2876 /* GPUImageAdaptiveThresholdFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageAdaptiveThresholdFilter.h; sourceTree = \"<group>\"; };\n\t\tB46216462220474C00EE2876 /* GPUImageAverageColor.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageAverageColor.m; sourceTree = \"<group>\"; };\n\t\tB46216472220474C00EE2876 /* GPUImageAddBlendFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageAddBlendFilter.m; sourceTree = \"<group>\"; };\n\t\tB46216482220474C00EE2876 /* GPUImageCannyEdgeDetectionFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageCannyEdgeDetectionFilter.m; sourceTree = \"<group>\"; };\n\t\tB46216492220474C00EE2876 /* GPUImageAlphaBlendFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageAlphaBlendFilter.m; sourceTree = \"<group>\"; };\n\t\tB462164A2220474C00EE2876 /* GPUImageAddBlendFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageAddBlendFilter.h; sourceTree = \"<group>\"; };\n\t\tB462164B2220474C00EE2876 /* GPUImageCannyEdgeDetectionFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageCannyEdgeDetectionFilter.h; sourceTree = \"<group>\"; };\n\t\tB462164C2220474C00EE2876 /* GPUImageBulgeDistortionFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageBulgeDistortionFilter.h; sourceTree = \"<group>\"; };\n\t\tB462164D2220474C00EE2876 /* GPUImageAdaptiveThresholdFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageAdaptiveThresholdFilter.m; sourceTree = \"<group>\"; };\n\t\tB462164E2220474C00EE2876 /* GPUImageBilateralFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageBilateralFilter.m; sourceTree = \"<group>\"; };\n\t\tB462164F2220474C00EE2876 /* GPUImageBrightnessFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageBrightnessFilter.m; sourceTree = \"<group>\"; };\n\t\tB46216502220474D00EE2876 /* GPUImage3x3ConvolutionFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImage3x3ConvolutionFilter.h; sourceTree = \"<group>\"; };\n\t\tB46216512220474D00EE2876 /* GPUImage3x3TextureSamplingFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImage3x3TextureSamplingFilter.m; sourceTree = \"<group>\"; };\n\t\tB46216522220474D00EE2876 /* GPUImage3x3TextureSamplingFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImage3x3TextureSamplingFilter.h; sourceTree = \"<group>\"; };\n\t\tB46216532220474D00EE2876 /* GPUImageAmatorkaFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageAmatorkaFilter.m; sourceTree = \"<group>\"; };\n\t\tB462166E2220475C00EE2876 /* GPUImageClosingFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageClosingFilter.m; sourceTree = \"<group>\"; };\n\t\tB462166F2220475C00EE2876 /* GPUImageCGAColorspaceFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageCGAColorspaceFilter.m; sourceTree = \"<group>\"; };\n\t\tB46216702220475C00EE2876 /* GPUImageColorInvertFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageColorInvertFilter.m; sourceTree = \"<group>\"; };\n\t\tB46216712220475C00EE2876 /* GPUImageColourFASTFeatureDetector.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageColourFASTFeatureDetector.m; sourceTree = \"<group>\"; };\n\t\tB46216722220475C00EE2876 /* GPUImageChromaKeyFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageChromaKeyFilter.h; sourceTree = \"<group>\"; };\n\t\tB46216732220475C00EE2876 /* GPUImageColorConversion.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageColorConversion.m; sourceTree = \"<group>\"; };\n\t\tB46216742220475C00EE2876 /* GPUImageColorMatrixFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageColorMatrixFilter.h; sourceTree = \"<group>\"; };\n\t\tB46216752220475C00EE2876 /* GPUImageChromaKeyFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageChromaKeyFilter.m; sourceTree = \"<group>\"; };\n\t\tB46216762220475C00EE2876 /* GPUImageCGAColorspaceFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageCGAColorspaceFilter.h; sourceTree = \"<group>\"; };\n\t\tB46216772220475C00EE2876 /* GPUImageColorPackingFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageColorPackingFilter.m; sourceTree = \"<group>\"; };\n\t\tB46216782220475D00EE2876 /* GPUImageColorDodgeBlendFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageColorDodgeBlendFilter.h; sourceTree = \"<group>\"; };\n\t\tB46216792220475D00EE2876 /* GPUImageChromaKeyBlendFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageChromaKeyBlendFilter.m; sourceTree = \"<group>\"; };\n\t\tB462167A2220475D00EE2876 /* GPUImageColourFASTSamplingOperation.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageColourFASTSamplingOperation.h; sourceTree = \"<group>\"; };\n\t\tB462167B2220475D00EE2876 /* GPUImageColorBurnBlendFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageColorBurnBlendFilter.m; sourceTree = \"<group>\"; };\n\t\tB462167C2220475D00EE2876 /* GPUImageColorBurnBlendFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageColorBurnBlendFilter.h; sourceTree = \"<group>\"; };\n\t\tB462167D2220475D00EE2876 /* GPUImageChromaKeyBlendFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageChromaKeyBlendFilter.h; sourceTree = \"<group>\"; };\n\t\tB462167E2220475D00EE2876 /* GPUImageColorDodgeBlendFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageColorDodgeBlendFilter.m; sourceTree = \"<group>\"; };\n\t\tB462167F2220475D00EE2876 /* GPUImageColorPackingFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageColorPackingFilter.h; sourceTree = \"<group>\"; };\n\t\tB46216802220475D00EE2876 /* GPUImageClosingFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageClosingFilter.h; sourceTree = \"<group>\"; };\n\t\tB46216812220475D00EE2876 /* GPUImageColorConversion.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageColorConversion.h; sourceTree = \"<group>\"; };\n\t\tB46216822220475D00EE2876 /* GPUImageColorBlendFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageColorBlendFilter.h; sourceTree = \"<group>\"; };\n\t\tB46216832220475E00EE2876 /* GPUImageColorMatrixFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageColorMatrixFilter.m; sourceTree = \"<group>\"; };\n\t\tB46216842220475E00EE2876 /* GPUImageColorBlendFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageColorBlendFilter.m; sourceTree = \"<group>\"; };\n\t\tB46216852220475E00EE2876 /* GPUImageColorInvertFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageColorInvertFilter.h; sourceTree = \"<group>\"; };\n\t\tB46216862220475E00EE2876 /* GPUImageColorLocalBinaryPatternFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageColorLocalBinaryPatternFilter.h; sourceTree = \"<group>\"; };\n\t\tB46216872220475E00EE2876 /* GPUImageColorLocalBinaryPatternFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageColorLocalBinaryPatternFilter.m; sourceTree = \"<group>\"; };\n\t\tB46216882220475E00EE2876 /* GPUImageColourFASTFeatureDetector.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageColourFASTFeatureDetector.h; sourceTree = \"<group>\"; };\n\t\tB46216892220475E00EE2876 /* GPUImageColourFASTSamplingOperation.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageColourFASTSamplingOperation.m; sourceTree = \"<group>\"; };\n\t\tB46216A62220476E00EE2876 /* GPUImageToonFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageToonFilter.h; sourceTree = \"<group>\"; };\n\t\tB46216A72220476F00EE2876 /* GPUImageTwoInputCrossTextureSamplingFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageTwoInputCrossTextureSamplingFilter.m; sourceTree = \"<group>\"; };\n\t\tB46216A82220476F00EE2876 /* GPUImageThresholdSketchFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageThresholdSketchFilter.m; sourceTree = \"<group>\"; };\n\t\tB46216A92220476F00EE2876 /* GPUImageVibranceFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageVibranceFilter.m; sourceTree = \"<group>\"; };\n\t\tB46216AA2220476F00EE2876 /* GPUImageXYDerivativeFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageXYDerivativeFilter.h; sourceTree = \"<group>\"; };\n\t\tB46216AB2220476F00EE2876 /* GPUImageToneCurveFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageToneCurveFilter.h; sourceTree = \"<group>\"; };\n\t\tB46216AC2220476F00EE2876 /* GPUImageToonFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageToonFilter.m; sourceTree = \"<group>\"; };\n\t\tB46216AD2220476F00EE2876 /* GPUImageWeakPixelInclusionFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageWeakPixelInclusionFilter.h; sourceTree = \"<group>\"; };\n\t\tB46216AE2220476F00EE2876 /* GPUImageTransformFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageTransformFilter.h; sourceTree = \"<group>\"; };\n\t\tB46216AF2220476F00EE2876 /* GPUImageThresholdedNonMaximumSuppressionFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageThresholdedNonMaximumSuppressionFilter.m; sourceTree = \"<group>\"; };\n\t\tB46216B02220476F00EE2876 /* GPUImageWeakPixelInclusionFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageWeakPixelInclusionFilter.m; sourceTree = \"<group>\"; };\n\t\tB46216B12220477000EE2876 /* GPUImageZoomBlurFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageZoomBlurFilter.h; sourceTree = \"<group>\"; };\n\t\tB46216B22220477000EE2876 /* GPUImageUIElement.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageUIElement.m; sourceTree = \"<group>\"; };\n\t\tB46216B32220477000EE2876 /* GPUImageTiltShiftFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageTiltShiftFilter.m; sourceTree = \"<group>\"; };\n\t\tB46216B52220477000EE2876 /* GPUImageToneCurveFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageToneCurveFilter.m; sourceTree = \"<group>\"; };\n\t\tB46216B62220477000EE2876 /* GPUImageZoomBlurFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageZoomBlurFilter.m; sourceTree = \"<group>\"; };\n\t\tB46216B72220477000EE2876 /* GPUImageUnsharpMaskFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageUnsharpMaskFilter.m; sourceTree = \"<group>\"; };\n\t\tB46216B82220477000EE2876 /* GPUImageXYDerivativeFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageXYDerivativeFilter.m; sourceTree = \"<group>\"; };\n\t\tB46216B92220477000EE2876 /* GPUImageThresholdEdgeDetectionFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageThresholdEdgeDetectionFilter.h; sourceTree = \"<group>\"; };\n\t\tB46216BA2220477000EE2876 /* GPUImageUIElement.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageUIElement.h; sourceTree = \"<group>\"; };\n\t\tB46216BB2220477000EE2876 /* GPUImageVignetteFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageVignetteFilter.h; sourceTree = \"<group>\"; };\n\t\tB46216BD2220477100EE2876 /* GPUImageVignetteFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageVignetteFilter.m; sourceTree = \"<group>\"; };\n\t\tB46216BE2220477100EE2876 /* GPUImageWhiteBalanceFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageWhiteBalanceFilter.h; sourceTree = \"<group>\"; };\n\t\tB46216BF2220477100EE2876 /* GPUImageTiltShiftFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageTiltShiftFilter.h; sourceTree = \"<group>\"; };\n\t\tB46216C02220477100EE2876 /* GPUImageThresholdedNonMaximumSuppressionFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageThresholdedNonMaximumSuppressionFilter.h; sourceTree = \"<group>\"; };\n\t\tB46216C12220477100EE2876 /* GPUImageVibranceFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageVibranceFilter.h; sourceTree = \"<group>\"; };\n\t\tB46216C22220477100EE2876 /* GPUImageUnsharpMaskFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageUnsharpMaskFilter.h; sourceTree = \"<group>\"; };\n\t\tB46216C32220477100EE2876 /* GPUImageThresholdSketchFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageThresholdSketchFilter.h; sourceTree = \"<group>\"; };\n\t\tB46216C42220477100EE2876 /* GPUImageVoronoiConsumerFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageVoronoiConsumerFilter.m; sourceTree = \"<group>\"; };\n\t\tB46216C52220477100EE2876 /* GPUImageThresholdEdgeDetectionFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageThresholdEdgeDetectionFilter.m; sourceTree = \"<group>\"; };\n\t\tB46216C62220477100EE2876 /* GPUImageVoronoiConsumerFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageVoronoiConsumerFilter.h; sourceTree = \"<group>\"; };\n\t\tB46216C72220477100EE2876 /* GPUImageTwoInputCrossTextureSamplingFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageTwoInputCrossTextureSamplingFilter.h; sourceTree = \"<group>\"; };\n\t\tB46216C82220477200EE2876 /* GPUImageWhiteBalanceFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageWhiteBalanceFilter.m; sourceTree = \"<group>\"; };\n\t\tB46216C92220477200EE2876 /* GPUImageTransformFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageTransformFilter.m; sourceTree = \"<group>\"; };\n\t\tB46216EE2220479100EE2876 /* GPUImageHistogramFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageHistogramFilter.m; sourceTree = \"<group>\"; };\n\t\tB46216EF2220479100EE2876 /* GPUImageExposureFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageExposureFilter.h; sourceTree = \"<group>\"; };\n\t\tB46216F02220479100EE2876 /* GPUImageLaplacianFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageLaplacianFilter.h; sourceTree = \"<group>\"; };\n\t\tB46216F12220479100EE2876 /* GPUImageJFAVoronoiFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageJFAVoronoiFilter.h; sourceTree = \"<group>\"; };\n\t\tB46216F22220479100EE2876 /* GPUImageHalftoneFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageHalftoneFilter.m; sourceTree = \"<group>\"; };\n\t\tB46216F32220479100EE2876 /* GPUImageLevelsFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageLevelsFilter.h; sourceTree = \"<group>\"; };\n\t\tB46216F42220479100EE2876 /* GPUImageNormalBlendFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageNormalBlendFilter.h; sourceTree = \"<group>\"; };\n\t\tB46216F52220479100EE2876 /* GPUImageMonochromeFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageMonochromeFilter.m; sourceTree = \"<group>\"; };\n\t\tB46216F62220479200EE2876 /* GPUImageDivideBlendFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageDivideBlendFilter.h; sourceTree = \"<group>\"; };\n\t\tB46216F72220479200EE2876 /* GPUImageLookupFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageLookupFilter.m; sourceTree = \"<group>\"; };\n\t\tB46216F82220479200EE2876 /* GPUImageGlassSphereFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageGlassSphereFilter.m; sourceTree = \"<group>\"; };\n\t\tB46216F92220479200EE2876 /* GPUImageHSBFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageHSBFilter.h; sourceTree = \"<group>\"; };\n\t\tB46216FA2220479200EE2876 /* GPUImageHueFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageHueFilter.h; sourceTree = \"<group>\"; };\n\t\tB46216FB2220479200EE2876 /* GPUImageiOSBlurFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageiOSBlurFilter.m; sourceTree = \"<group>\"; };\n\t\tB46216FC2220479200EE2876 /* GPUImageNobleCornerDetectionFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageNobleCornerDetectionFilter.h; sourceTree = \"<group>\"; };\n\t\tB46216FD2220479200EE2876 /* GPUImageNonMaximumSuppressionFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageNonMaximumSuppressionFilter.h; sourceTree = \"<group>\"; };\n\t\tB46216FE2220479200EE2876 /* GPUImageHarrisCornerDetectionFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageHarrisCornerDetectionFilter.h; sourceTree = \"<group>\"; };\n\t\tB46216FF2220479200EE2876 /* GPUImageHoughTransformLineDetector.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageHoughTransformLineDetector.m; sourceTree = \"<group>\"; };\n\t\tB46217002220479200EE2876 /* GPUImageLuminosity.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageLuminosity.h; sourceTree = \"<group>\"; };\n\t\tB46217012220479300EE2876 /* GPUImageLowPassFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageLowPassFilter.m; sourceTree = \"<group>\"; };\n\t\tB46217022220479300EE2876 /* GPUImagePolkaDotFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImagePolkaDotFilter.m; sourceTree = \"<group>\"; };\n\t\tB46217032220479300EE2876 /* GPUImageHueBlendFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageHueBlendFilter.h; sourceTree = \"<group>\"; };\n\t\tB46217042220479300EE2876 /* GPUImageJFAVoronoiFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageJFAVoronoiFilter.m; sourceTree = \"<group>\"; };\n\t\tB46217052220479300EE2876 /* GPUImageGrayscaleFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageGrayscaleFilter.m; sourceTree = \"<group>\"; };\n\t\tB46217062220479300EE2876 /* GPUImageHistogramEqualizationFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageHistogramEqualizationFilter.m; sourceTree = \"<group>\"; };\n\t\tB46217072220479300EE2876 /* GPUImageHardLightBlendFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageHardLightBlendFilter.m; sourceTree = \"<group>\"; };\n\t\tB46217082220479300EE2876 /* GPUImageEmbossFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageEmbossFilter.m; sourceTree = \"<group>\"; };\n\t\tB46217092220479300EE2876 /* GPUImageLineGenerator.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageLineGenerator.h; sourceTree = \"<group>\"; };\n\t\tB462170A2220479300EE2876 /* GPUImageLocalBinaryPatternFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageLocalBinaryPatternFilter.h; sourceTree = \"<group>\"; };\n\t\tB462170B2220479300EE2876 /* GPUImageHueBlendFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageHueBlendFilter.m; sourceTree = \"<group>\"; };\n\t\tB462170C2220479400EE2876 /* GPUImageDirectionalNonMaximumSuppressionFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageDirectionalNonMaximumSuppressionFilter.h; sourceTree = \"<group>\"; };\n\t\tB462170D2220479400EE2876 /* GPUImageMultiplyBlendFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageMultiplyBlendFilter.h; sourceTree = \"<group>\"; };\n\t\tB462170E2220479400EE2876 /* GPUImageOpeningFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageOpeningFilter.h; sourceTree = \"<group>\"; };\n\t\tB462170F2220479400EE2876 /* GPUImageHighlightShadowTintFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageHighlightShadowTintFilter.h; sourceTree = \"<group>\"; };\n\t\tB46217102220479400EE2876 /* GPUImageCropFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageCropFilter.h; sourceTree = \"<group>\"; };\n\t\tB46217112220479400EE2876 /* GPUImageHistogramGenerator.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageHistogramGenerator.h; sourceTree = \"<group>\"; };\n\t\tB46217122220479400EE2876 /* GPUImageHSBFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageHSBFilter.m; sourceTree = \"<group>\"; };\n\t\tB46217132220479400EE2876 /* GPUImageCrosshatchFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageCrosshatchFilter.h; sourceTree = \"<group>\"; };\n\t\tB46217142220479400EE2876 /* GPUImageHighPassFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageHighPassFilter.h; sourceTree = \"<group>\"; };\n\t\tB46217152220479400EE2876 /* GPUImageMotionBlurFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageMotionBlurFilter.m; sourceTree = \"<group>\"; };\n\t\tB46217162220479400EE2876 /* GPUImageDifferenceBlendFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageDifferenceBlendFilter.m; sourceTree = \"<group>\"; };\n\t\tB46217172220479400EE2876 /* GPUImageCrosshatchFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageCrosshatchFilter.m; sourceTree = \"<group>\"; };\n\t\tB46217182220479500EE2876 /* GPUImageHueFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageHueFilter.m; sourceTree = \"<group>\"; };\n\t\tB46217192220479500EE2876 /* GPUImageGaussianSelectiveBlurFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageGaussianSelectiveBlurFilter.h; sourceTree = \"<group>\"; };\n\t\tB462171A2220479500EE2876 /* GPUImageExclusionBlendFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageExclusionBlendFilter.h; sourceTree = \"<group>\"; };\n\t\tB462171B2220479500EE2876 /* GPUImageHardLightBlendFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageHardLightBlendFilter.h; sourceTree = \"<group>\"; };\n\t\tB462171C2220479500EE2876 /* GPUImageKuwaharaFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageKuwaharaFilter.m; sourceTree = \"<group>\"; };\n\t\tB462171D2220479500EE2876 /* GPUImageLanczosResamplingFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageLanczosResamplingFilter.m; sourceTree = \"<group>\"; };\n\t\tB462171E2220479500EE2876 /* GPUImageHistogramFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageHistogramFilter.h; sourceTree = \"<group>\"; };\n\t\tB462171F2220479500EE2876 /* GPUImageMotionDetector.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageMotionDetector.h; sourceTree = \"<group>\"; };\n\t\tB46217202220479500EE2876 /* GPUImageHazeFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageHazeFilter.h; sourceTree = \"<group>\"; };\n\t\tB46217212220479500EE2876 /* GPUImageCropFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageCropFilter.m; sourceTree = \"<group>\"; };\n\t\tB46217222220479500EE2876 /* GPUImageLinearBurnBlendFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageLinearBurnBlendFilter.m; sourceTree = \"<group>\"; };\n\t\tB46217232220479600EE2876 /* GPUImageGrayscaleFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageGrayscaleFilter.h; sourceTree = \"<group>\"; };\n\t\tB46217242220479600EE2876 /* GPUImagePoissonBlendFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImagePoissonBlendFilter.h; sourceTree = \"<group>\"; };\n\t\tB46217252220479600EE2876 /* GPUImageLaplacianFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageLaplacianFilter.m; sourceTree = \"<group>\"; };\n\t\tB46217262220479600EE2876 /* GPUImageExclusionBlendFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageExclusionBlendFilter.m; sourceTree = \"<group>\"; };\n\t\tB46217272220479600EE2876 /* GPUImageGammaFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageGammaFilter.m; sourceTree = \"<group>\"; };\n\t\tB46217282220479600EE2876 /* GPUImageFalseColorFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageFalseColorFilter.m; sourceTree = \"<group>\"; };\n\t\tB46217292220479600EE2876 /* GPUImageDarkenBlendFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageDarkenBlendFilter.m; sourceTree = \"<group>\"; };\n\t\tB462172A2220479600EE2876 /* GPUImageCrosshairGenerator.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageCrosshairGenerator.m; sourceTree = \"<group>\"; };\n\t\tB462172B2220479600EE2876 /* GPUImageLineGenerator.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageLineGenerator.m; sourceTree = \"<group>\"; };\n\t\tB462172C2220479600EE2876 /* GPUImageDissolveBlendFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageDissolveBlendFilter.h; sourceTree = \"<group>\"; };\n\t\tB462172D2220479600EE2876 /* GPUImageLowPassFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageLowPassFilter.h; sourceTree = \"<group>\"; };\n\t\tB462172E2220479700EE2876 /* GPUImageErosionFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageErosionFilter.h; sourceTree = \"<group>\"; };\n\t\tB462172F2220479700EE2876 /* GPUImageGammaFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageGammaFilter.h; sourceTree = \"<group>\"; };\n\t\tB46217302220479700EE2876 /* GPUImageOverlayBlendFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageOverlayBlendFilter.h; sourceTree = \"<group>\"; };\n\t\tB46217312220479700EE2876 /* GPUImageMissEtikateFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageMissEtikateFilter.h; sourceTree = \"<group>\"; };\n\t\tB46217322220479700EE2876 /* GPUImageDirectionalSobelEdgeDetectionFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageDirectionalSobelEdgeDetectionFilter.h; sourceTree = \"<group>\"; };\n\t\tB46217332220479700EE2876 /* GPUImageLevelsFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageLevelsFilter.m; sourceTree = \"<group>\"; };\n\t\tB46217342220479700EE2876 /* GPUImageKuwaharaFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageKuwaharaFilter.h; sourceTree = \"<group>\"; };\n\t\tB46217352220479700EE2876 /* GPUImageFASTCornerDetectionFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageFASTCornerDetectionFilter.m; sourceTree = \"<group>\"; };\n\t\tB46217362220479700EE2876 /* GPUImageOverlayBlendFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageOverlayBlendFilter.m; sourceTree = \"<group>\"; };\n\t\tB46217372220479700EE2876 /* GPUImageMedianFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageMedianFilter.h; sourceTree = \"<group>\"; };\n\t\tB46217382220479700EE2876 /* GPUImagePinchDistortionFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImagePinchDistortionFilter.h; sourceTree = \"<group>\"; };\n\t\tB46217392220479800EE2876 /* GPUImageOpacityFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageOpacityFilter.m; sourceTree = \"<group>\"; };\n\t\tB462173A2220479800EE2876 /* GPUImageNormalBlendFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageNormalBlendFilter.m; sourceTree = \"<group>\"; };\n\t\tB462173B2220479800EE2876 /* GPUImagePixellateFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImagePixellateFilter.m; sourceTree = \"<group>\"; };\n\t\tB462173C2220479800EE2876 /* GPUImageGaussianBlurFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageGaussianBlurFilter.h; sourceTree = \"<group>\"; };\n\t\tB462173D2220479800EE2876 /* GPUImagePerlinNoiseFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImagePerlinNoiseFilter.m; sourceTree = \"<group>\"; };\n\t\tB462173E2220479800EE2876 /* GPUImageContrastFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageContrastFilter.m; sourceTree = \"<group>\"; };\n\t\tB462173F2220479800EE2876 /* GPUImagePixellatePositionFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImagePixellatePositionFilter.h; sourceTree = \"<group>\"; };\n\t\tB46217402220479800EE2876 /* GPUImageLocalBinaryPatternFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageLocalBinaryPatternFilter.m; sourceTree = \"<group>\"; };\n\t\tB46217412220479800EE2876 /* GPUImageHighlightShadowFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageHighlightShadowFilter.h; sourceTree = \"<group>\"; };\n\t\tB46217422220479800EE2876 /* GPUImageLuminanceThresholdFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageLuminanceThresholdFilter.h; sourceTree = \"<group>\"; };\n\t\tB46217432220479800EE2876 /* GPUImageDifferenceBlendFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageDifferenceBlendFilter.h; sourceTree = \"<group>\"; };\n\t\tB46217442220479900EE2876 /* GPUImagePixellatePositionFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImagePixellatePositionFilter.m; sourceTree = \"<group>\"; };\n\t\tB46217452220479900EE2876 /* GPUImageHalftoneFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageHalftoneFilter.h; sourceTree = \"<group>\"; };\n\t\tB46217462220479900EE2876 /* GPUImageMonochromeFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageMonochromeFilter.h; sourceTree = \"<group>\"; };\n\t\tB46217472220479900EE2876 /* GPUImageNonMaximumSuppressionFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageNonMaximumSuppressionFilter.m; sourceTree = \"<group>\"; };\n\t\tB46217482220479900EE2876 /* GPUImageGaussianBlurPositionFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageGaussianBlurPositionFilter.m; sourceTree = \"<group>\"; };\n\t\tB46217492220479900EE2876 /* GPUImageKuwaharaRadius3Filter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageKuwaharaRadius3Filter.h; sourceTree = \"<group>\"; };\n\t\tB462174A2220479900EE2876 /* GPUImageKuwaharaRadius3Filter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageKuwaharaRadius3Filter.m; sourceTree = \"<group>\"; };\n\t\tB462174B2220479900EE2876 /* GPUImageHoughTransformLineDetector.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageHoughTransformLineDetector.h; sourceTree = \"<group>\"; };\n\t\tB462174C2220479900EE2876 /* GPUImageMaskFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageMaskFilter.h; sourceTree = \"<group>\"; };\n\t\tB462174D2220479900EE2876 /* GPUImageHazeFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageHazeFilter.m; sourceTree = \"<group>\"; };\n\t\tB462174E2220479A00EE2876 /* GPUImageFASTCornerDetectionFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageFASTCornerDetectionFilter.h; sourceTree = \"<group>\"; };\n\t\tB462174F2220479A00EE2876 /* GPUImageMosaicFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageMosaicFilter.h; sourceTree = \"<group>\"; };\n\t\tB46217502220479A00EE2876 /* GPUImagePolarPixellateFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImagePolarPixellateFilter.m; sourceTree = \"<group>\"; };\n\t\tB46217512220479A00EE2876 /* GPUImageCrosshairGenerator.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageCrosshairGenerator.h; sourceTree = \"<group>\"; };\n\t\tB46217522220479A00EE2876 /* GPUImageLuminosityBlendFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageLuminosityBlendFilter.m; sourceTree = \"<group>\"; };\n\t\tB46217532220479A00EE2876 /* GPUImageLightenBlendFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageLightenBlendFilter.m; sourceTree = \"<group>\"; };\n\t\tB46217542220479A00EE2876 /* GPUImageHighlightShadowTintFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageHighlightShadowTintFilter.m; sourceTree = \"<group>\"; };\n\t\tB46217552220479A00EE2876 /* GPUImageLuminanceRangeFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageLuminanceRangeFilter.m; sourceTree = \"<group>\"; };\n\t\tB46217562220479A00EE2876 /* GPUImagePixellateFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImagePixellateFilter.h; sourceTree = \"<group>\"; };\n\t\tB46217572220479A00EE2876 /* GPUImageDilationFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageDilationFilter.m; sourceTree = \"<group>\"; };\n\t\tB46217582220479A00EE2876 /* GPUImageLuminanceRangeFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageLuminanceRangeFilter.h; sourceTree = \"<group>\"; };\n\t\tB46217592220479B00EE2876 /* GPUImageMedianFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageMedianFilter.m; sourceTree = \"<group>\"; };\n\t\tB462175A2220479B00EE2876 /* GPUImageEmbossFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageEmbossFilter.h; sourceTree = \"<group>\"; };\n\t\tB462175B2220479B00EE2876 /* GPUImageHistogramGenerator.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageHistogramGenerator.m; sourceTree = \"<group>\"; };\n\t\tB462175C2220479B00EE2876 /* GPUImageMissEtikateFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageMissEtikateFilter.m; sourceTree = \"<group>\"; };\n\t\tB462175D2220479B00EE2876 /* GPUImageHighlightShadowFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageHighlightShadowFilter.m; sourceTree = \"<group>\"; };\n\t\tB462175E2220479B00EE2876 /* GPUImageParallelCoordinateLineTransformFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageParallelCoordinateLineTransformFilter.h; sourceTree = \"<group>\"; };\n\t\tB462175F2220479B00EE2876 /* GPUImageMotionBlurFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageMotionBlurFilter.h; sourceTree = \"<group>\"; };\n\t\tB46217602220479B00EE2876 /* GPUImageNobleCornerDetectionFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageNobleCornerDetectionFilter.m; sourceTree = \"<group>\"; };\n\t\tB46217612220479B00EE2876 /* GPUImagePoissonBlendFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImagePoissonBlendFilter.m; sourceTree = \"<group>\"; };\n\t\tB46217622220479B00EE2876 /* GPUImageGlassSphereFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageGlassSphereFilter.h; sourceTree = \"<group>\"; };\n\t\tB46217632220479C00EE2876 /* GPUImageiOSBlurFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageiOSBlurFilter.h; sourceTree = \"<group>\"; };\n\t\tB46217642220479C00EE2876 /* GPUImageErosionFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageErosionFilter.m; sourceTree = \"<group>\"; };\n\t\tB46217652220479C00EE2876 /* GPUImageExposureFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageExposureFilter.m; sourceTree = \"<group>\"; };\n\t\tB46217662220479C00EE2876 /* GPUImageHighPassFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageHighPassFilter.m; sourceTree = \"<group>\"; };\n\t\tB46217672220479C00EE2876 /* GPUImageLuminosityBlendFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageLuminosityBlendFilter.h; sourceTree = \"<group>\"; };\n\t\tB46217682220479C00EE2876 /* GPUImagePinchDistortionFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImagePinchDistortionFilter.m; sourceTree = \"<group>\"; };\n\t\tB46217692220479C00EE2876 /* GPUImageDivideBlendFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageDivideBlendFilter.m; sourceTree = \"<group>\"; };\n\t\tB462176A2220479C00EE2876 /* GPUImageLuminosity.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageLuminosity.m; sourceTree = \"<group>\"; };\n\t\tB462176B2220479C00EE2876 /* GPUImageHarrisCornerDetectionFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageHarrisCornerDetectionFilter.m; sourceTree = \"<group>\"; };\n\t\tB462176C2220479C00EE2876 /* GPUImageDirectionalSobelEdgeDetectionFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageDirectionalSobelEdgeDetectionFilter.m; sourceTree = \"<group>\"; };\n\t\tB462176D2220479C00EE2876 /* GPUImageDilationFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageDilationFilter.h; sourceTree = \"<group>\"; };\n\t\tB462176E2220479D00EE2876 /* GPUImageLinearBurnBlendFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageLinearBurnBlendFilter.h; sourceTree = \"<group>\"; };\n\t\tB462176F2220479D00EE2876 /* GPUImageLanczosResamplingFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageLanczosResamplingFilter.h; sourceTree = \"<group>\"; };\n\t\tB46217702220479D00EE2876 /* GPUImageMotionDetector.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageMotionDetector.m; sourceTree = \"<group>\"; };\n\t\tB46217712220479D00EE2876 /* GPUImageLuminanceThresholdFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageLuminanceThresholdFilter.m; sourceTree = \"<group>\"; };\n\t\tB46217722220479D00EE2876 /* GPUImageHistogramEqualizationFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageHistogramEqualizationFilter.h; sourceTree = \"<group>\"; };\n\t\tB46217732220479D00EE2876 /* GPUImageOpacityFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageOpacityFilter.h; sourceTree = \"<group>\"; };\n\t\tB46217742220479D00EE2876 /* GPUImageGaussianBlurFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageGaussianBlurFilter.m; sourceTree = \"<group>\"; };\n\t\tB46217752220479D00EE2876 /* GPUImageParallelCoordinateLineTransformFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageParallelCoordinateLineTransformFilter.m; sourceTree = \"<group>\"; };\n\t\tB46217762220479D00EE2876 /* GPUImageDirectionalNonMaximumSuppressionFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageDirectionalNonMaximumSuppressionFilter.m; sourceTree = \"<group>\"; };\n\t\tB46217772220479D00EE2876 /* GPUImagePerlinNoiseFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImagePerlinNoiseFilter.h; sourceTree = \"<group>\"; };\n\t\tB46217782220479D00EE2876 /* GPUImageMosaicFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageMosaicFilter.m; sourceTree = \"<group>\"; };\n\t\tB46217792220479E00EE2876 /* GPUImagePolarPixellateFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImagePolarPixellateFilter.h; sourceTree = \"<group>\"; };\n\t\tB462177A2220479E00EE2876 /* GPUImageGaussianSelectiveBlurFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageGaussianSelectiveBlurFilter.m; sourceTree = \"<group>\"; };\n\t\tB462177B2220479E00EE2876 /* GPUImagePolkaDotFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImagePolkaDotFilter.h; sourceTree = \"<group>\"; };\n\t\tB462177C2220479E00EE2876 /* GPUImageFalseColorFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageFalseColorFilter.h; sourceTree = \"<group>\"; };\n\t\tB462177D2220479E00EE2876 /* GPUImageDarkenBlendFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageDarkenBlendFilter.h; sourceTree = \"<group>\"; };\n\t\tB462177E2220479E00EE2876 /* GPUImageLookupFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageLookupFilter.h; sourceTree = \"<group>\"; };\n\t\tB462177F2220479E00EE2876 /* GPUImageContrastFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageContrastFilter.h; sourceTree = \"<group>\"; };\n\t\tB46217802220479E00EE2876 /* GPUImageMultiplyBlendFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageMultiplyBlendFilter.m; sourceTree = \"<group>\"; };\n\t\tB46217812220479E00EE2876 /* GPUImageGaussianBlurPositionFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageGaussianBlurPositionFilter.h; sourceTree = \"<group>\"; };\n\t\tB46217822220479E00EE2876 /* GPUImageMaskFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageMaskFilter.m; sourceTree = \"<group>\"; };\n\t\tB46217832220479E00EE2876 /* GPUImageLightenBlendFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageLightenBlendFilter.h; sourceTree = \"<group>\"; };\n\t\tB46217842220479F00EE2876 /* GPUImageDissolveBlendFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageDissolveBlendFilter.m; sourceTree = \"<group>\"; };\n\t\tB46217852220479F00EE2876 /* GPUImageOpeningFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageOpeningFilter.m; sourceTree = \"<group>\"; };\n\t\tB462181E222047B300EE2876 /* GPUImageRawDataOutput.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageRawDataOutput.m; sourceTree = \"<group>\"; };\n\t\tB462181F222047B300EE2876 /* GPUImageRGBFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageRGBFilter.m; sourceTree = \"<group>\"; };\n\t\tB4621820222047B300EE2876 /* GPUImageSingleComponentGaussianBlurFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageSingleComponentGaussianBlurFilter.m; sourceTree = \"<group>\"; };\n\t\tB4621821222047B300EE2876 /* GPUImageSkinToneFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageSkinToneFilter.h; sourceTree = \"<group>\"; };\n\t\tB4621822222047B300EE2876 /* GPUImageSketchFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageSketchFilter.h; sourceTree = \"<group>\"; };\n\t\tB4621823222047B300EE2876 /* GPUImageRGBDilationFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageRGBDilationFilter.m; sourceTree = \"<group>\"; };\n\t\tB4621824222047B300EE2876 /* GPUImageRGBFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageRGBFilter.h; sourceTree = \"<group>\"; };\n\t\tB4621825222047B300EE2876 /* GPUImageScreenBlendFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageScreenBlendFilter.m; sourceTree = \"<group>\"; };\n\t\tB4621826222047B300EE2876 /* GPUImageSketchFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageSketchFilter.m; sourceTree = \"<group>\"; };\n\t\tB4621827222047B300EE2876 /* GPUImageSaturationFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageSaturationFilter.m; sourceTree = \"<group>\"; };\n\t\tB4621828222047B400EE2876 /* GPUImagePosterizeFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImagePosterizeFilter.m; sourceTree = \"<group>\"; };\n\t\tB4621829222047B400EE2876 /* GPUImageSmoothToonFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageSmoothToonFilter.h; sourceTree = \"<group>\"; };\n\t\tB462182A222047B400EE2876 /* GPUImageSaturationBlendFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageSaturationBlendFilter.h; sourceTree = \"<group>\"; };\n\t\tB462182B222047B400EE2876 /* GPUImageRGBOpeningFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageRGBOpeningFilter.h; sourceTree = \"<group>\"; };\n\t\tB462182C222047B400EE2876 /* GPUImagePrewittEdgeDetectionFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImagePrewittEdgeDetectionFilter.h; sourceTree = \"<group>\"; };\n\t\tB462182D222047B400EE2876 /* GPUImageSmoothToonFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageSmoothToonFilter.m; sourceTree = \"<group>\"; };\n\t\tB462182E222047B400EE2876 /* GPUImageRawDataInput.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageRawDataInput.h; sourceTree = \"<group>\"; };\n\t\tB462182F222047B400EE2876 /* GPUImagePosterizeFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImagePosterizeFilter.h; sourceTree = \"<group>\"; };\n\t\tB4621830222047B400EE2876 /* GPUImageRGBErosionFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageRGBErosionFilter.h; sourceTree = \"<group>\"; };\n\t\tB4621831222047B400EE2876 /* GPUImageSepiaFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageSepiaFilter.h; sourceTree = \"<group>\"; };\n\t\tB4621832222047B400EE2876 /* GPUImageSkinToneFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageSkinToneFilter.m; sourceTree = \"<group>\"; };\n\t\tB4621833222047B500EE2876 /* GPUImageSaturationFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageSaturationFilter.h; sourceTree = \"<group>\"; };\n\t\tB4621834222047B500EE2876 /* GPUImageSharpenFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageSharpenFilter.h; sourceTree = \"<group>\"; };\n\t\tB4621835222047B500EE2876 /* GPUImageRawDataInput.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageRawDataInput.m; sourceTree = \"<group>\"; };\n\t\tB4621836222047B500EE2876 /* GPUImageShiTomasiFeatureDetectionFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageShiTomasiFeatureDetectionFilter.h; sourceTree = \"<group>\"; };\n\t\tB4621837222047B500EE2876 /* GPUImageRGBErosionFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageRGBErosionFilter.m; sourceTree = \"<group>\"; };\n\t\tB4621838222047B500EE2876 /* GPUImageRGBClosingFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageRGBClosingFilter.h; sourceTree = \"<group>\"; };\n\t\tB4621839222047B500EE2876 /* GPUImageSingleComponentGaussianBlurFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageSingleComponentGaussianBlurFilter.h; sourceTree = \"<group>\"; };\n\t\tB462183A222047B500EE2876 /* GPUImageRawDataOutput.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageRawDataOutput.h; sourceTree = \"<group>\"; };\n\t\tB462183B222047B500EE2876 /* GPUImageSepiaFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageSepiaFilter.m; sourceTree = \"<group>\"; };\n\t\tB462183C222047B500EE2876 /* GPUImageSaturationBlendFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageSaturationBlendFilter.m; sourceTree = \"<group>\"; };\n\t\tB462183D222047B500EE2876 /* GPUImageRGBOpeningFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageRGBOpeningFilter.m; sourceTree = \"<group>\"; };\n\t\tB462183E222047B600EE2876 /* GPUImageRGBDilationFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageRGBDilationFilter.h; sourceTree = \"<group>\"; };\n\t\tB462183F222047B600EE2876 /* GPUImageSharpenFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageSharpenFilter.m; sourceTree = \"<group>\"; };\n\t\tB4621840222047B600EE2876 /* GPUImageRGBClosingFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageRGBClosingFilter.m; sourceTree = \"<group>\"; };\n\t\tB4621841222047B600EE2876 /* GPUImageShiTomasiFeatureDetectionFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageShiTomasiFeatureDetectionFilter.m; sourceTree = \"<group>\"; };\n\t\tB4621842222047B600EE2876 /* GPUImageScreenBlendFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageScreenBlendFilter.h; sourceTree = \"<group>\"; };\n\t\tB4621843222047B600EE2876 /* GPUImagePrewittEdgeDetectionFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImagePrewittEdgeDetectionFilter.m; sourceTree = \"<group>\"; };\n\t\tB462186A222047C200EE2876 /* GPUImageSobelEdgeDetectionFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageSobelEdgeDetectionFilter.h; sourceTree = \"<group>\"; };\n\t\tB462186B222047C200EE2876 /* GPUImageStillCamera.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageStillCamera.h; sourceTree = \"<group>\"; };\n\t\tB462186C222047C200EE2876 /* GPUImageSoftLightBlendFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageSoftLightBlendFilter.h; sourceTree = \"<group>\"; };\n\t\tB462186D222047C200EE2876 /* GPUImageSourceOverBlendFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageSourceOverBlendFilter.m; sourceTree = \"<group>\"; };\n\t\tB462186E222047C200EE2876 /* GPUImageStillCamera.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageStillCamera.m; sourceTree = \"<group>\"; };\n\t\tB462186F222047C200EE2876 /* GPUImageSolarizeFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageSolarizeFilter.m; sourceTree = \"<group>\"; };\n\t\tB4621870222047C200EE2876 /* GPUImageSphereRefractionFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageSphereRefractionFilter.m; sourceTree = \"<group>\"; };\n\t\tB4621871222047C200EE2876 /* GPUImageSwirlFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageSwirlFilter.h; sourceTree = \"<group>\"; };\n\t\tB4621872222047C200EE2876 /* GPUImageSoftEleganceFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageSoftEleganceFilter.m; sourceTree = \"<group>\"; };\n\t\tB4621873222047C200EE2876 /* GPUImageSoftEleganceFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageSoftEleganceFilter.h; sourceTree = \"<group>\"; };\n\t\tB4621874222047C300EE2876 /* GPUImageSolidColorGenerator.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageSolidColorGenerator.h; sourceTree = \"<group>\"; };\n\t\tB4621875222047C300EE2876 /* GPUImageSourceOverBlendFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageSourceOverBlendFilter.h; sourceTree = \"<group>\"; };\n\t\tB4621876222047C300EE2876 /* GPUImageSobelEdgeDetectionFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageSobelEdgeDetectionFilter.m; sourceTree = \"<group>\"; };\n\t\tB4621877222047C300EE2876 /* GPUImageSwirlFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageSwirlFilter.m; sourceTree = \"<group>\"; };\n\t\tB4621878222047C300EE2876 /* GPUImageStretchDistortionFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageStretchDistortionFilter.m; sourceTree = \"<group>\"; };\n\t\tB4621879222047C300EE2876 /* GPUImageSubtractBlendFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageSubtractBlendFilter.m; sourceTree = \"<group>\"; };\n\t\tB462187A222047C300EE2876 /* GPUImageSoftLightBlendFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageSoftLightBlendFilter.m; sourceTree = \"<group>\"; };\n\t\tB462187B222047C300EE2876 /* GPUImageSubtractBlendFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageSubtractBlendFilter.h; sourceTree = \"<group>\"; };\n\t\tB462187C222047C300EE2876 /* GPUImageStretchDistortionFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageStretchDistortionFilter.h; sourceTree = \"<group>\"; };\n\t\tB462187D222047C400EE2876 /* GPUImageSphereRefractionFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageSphereRefractionFilter.h; sourceTree = \"<group>\"; };\n\t\tB462187E222047C400EE2876 /* GPUImageSolidColorGenerator.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageSolidColorGenerator.m; sourceTree = \"<group>\"; };\n\t\tB462187F222047C400EE2876 /* GPUImageSolarizeFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageSolarizeFilter.h; sourceTree = \"<group>\"; };\n\t\tB4621896222048E400EE2876 /* GPUImageThreeInputFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageThreeInputFilter.m; sourceTree = \"<group>\"; };\n\t\tB4621897222048E500EE2876 /* GPUImageThreeInputFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageThreeInputFilter.h; sourceTree = \"<group>\"; };\n\t\tB47A5EE722212D57006E07A4 /* GLImageLutFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GLImageLutFilter.h; sourceTree = \"<group>\"; };\n\t\tB47A5EE822212D57006E07A4 /* GLImageTwoLutFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GLImageTwoLutFilter.m; sourceTree = \"<group>\"; };\n\t\tB47A5EE922212D57006E07A4 /* GLImageLutFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GLImageLutFilter.m; sourceTree = \"<group>\"; };\n\t\tB47A5EEA22212D57006E07A4 /* GLImageTwoLutFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GLImageTwoLutFilter.h; sourceTree = \"<group>\"; };\n\t\tB47A5EF122212EA3006E07A4 /* GPUImageBeautifyFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageBeautifyFilter.m; sourceTree = \"<group>\"; };\n\t\tB47A5EF222212EA3006E07A4 /* GPUImageBeautifyFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageBeautifyFilter.h; sourceTree = \"<group>\"; };\n\t\tB49EF62F2263521C00C220B0 /* GLImageGlitchEffectGridFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GLImageGlitchEffectGridFilter.m; sourceTree = \"<group>\"; };\n\t\tB49EF6302263521C00C220B0 /* GLImageGlitchEffectLineFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GLImageGlitchEffectLineFilter.m; sourceTree = \"<group>\"; };\n\t\tB49EF6312263521C00C220B0 /* GLImageGlitchEffectGridFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GLImageGlitchEffectGridFilter.h; sourceTree = \"<group>\"; };\n\t\tB49EF6322263521C00C220B0 /* GLImageGlitchEffectLineFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GLImageGlitchEffectLineFilter.h; sourceTree = \"<group>\"; };\n\t\tB4A9FE872265CFDA00BC6E51 /* GLImageFaceChangeFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GLImageFaceChangeFilter.h; sourceTree = \"<group>\"; };\n\t\tB4A9FE882265CFDA00BC6E51 /* GLImageFaceChangeFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GLImageFaceChangeFilter.m; sourceTree = \"<group>\"; };\n\t\tB4A9FE972266409F00BC6E51 /* GLImageSoulOutFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GLImageSoulOutFilter.m; sourceTree = \"<group>\"; };\n\t\tB4A9FE982266409F00BC6E51 /* GLImageSoulOutFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GLImageSoulOutFilter.h; sourceTree = \"<group>\"; };\n\t\tB4C64C1B2271FFF9001BB716 /* GLImageFaceDetectPointFilter.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = GLImageFaceDetectPointFilter.h; sourceTree = \"<group>\"; };\n\t\tB4C64C1C2271FFF9001BB716 /* GLImageFaceDetectPointFilter.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = GLImageFaceDetectPointFilter.m; sourceTree = \"<group>\"; };\n\t\tB4C64C20227217C2001BB716 /* GLImageFaceChangeFilterGroup.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = GLImageFaceChangeFilterGroup.h; sourceTree = \"<group>\"; };\n\t\tB4C64C21227217C2001BB716 /* GLImageFaceChangeFilterGroup.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = GLImageFaceChangeFilterGroup.m; sourceTree = \"<group>\"; };\n\t\tB4D9F70A226A10EE00FD18FC /* GLImageBlurSnapViewFilter.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = GLImageBlurSnapViewFilter.h; sourceTree = \"<group>\"; };\n\t\tB4D9F70B226A10EE00FD18FC /* GLImageBlurSnapViewFilter.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = GLImageBlurSnapViewFilter.m; sourceTree = \"<group>\"; };\n\t\tB4DAAC43225724AB0094A3B2 /* GLImageMovie.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GLImageMovie.m; sourceTree = \"<group>\"; };\n\t\tB4DAAC44225724AB0094A3B2 /* GPUImageMovieWriterFix.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GPUImageMovieWriterFix.m; sourceTree = \"<group>\"; };\n\t\tB4DAAC45225724AB0094A3B2 /* GLImageMovie.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GLImageMovie.h; sourceTree = \"<group>\"; };\n\t\tB4DAAC46225724AB0094A3B2 /* GPUImageMovieWriterFix.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPUImageMovieWriterFix.h; sourceTree = \"<group>\"; };\n/* End PBXFileReference section */\n\n/* Begin PBXFrameworksBuildPhase section */\n\t\tB4620D3D22203FBE00EE2876 /* Frameworks */ = {\n\t\t\tisa = PBXFrameworksBuildPhase;\n\t\t\tbuildActionMask = 2147483647;\n\t\t\tfiles = (\n\t\t\t);\n\t\t\trunOnlyForDeploymentPostprocessing = 0;\n\t\t};\n\t\tB4620D4622203FBE00EE2876 /* Frameworks */ = {\n\t\t\tisa = PBXFrameworksBuildPhase;\n\t\t\tbuildActionMask = 2147483647;\n\t\t\tfiles = (\n\t\t\t\tB4620D4A22203FBE00EE2876 /* GPURenderKit.framework in Frameworks */,\n\t\t\t);\n\t\t\trunOnlyForDeploymentPostprocessing = 0;\n\t\t};\n/* End PBXFrameworksBuildPhase section */\n\n/* Begin PBXGroup section */\n\t\tB42EC07E22520B6F00226FA0 /* DouYinEffect */ = {\n\t\t\tisa = PBXGroup;\n\t\t\tchildren = (\n\t\t\t\tB42EC08322520B6F00226FA0 /* GLImageThreePartitionGroupFilter.h */,\n\t\t\t\tB42EC07F22520B6F00226FA0 /* GLImageThreePartitionGroupFilter.m */,\n\t\t\t\tB42EC08022520B6F00226FA0 /* GLImageThreePartitionFilter.h */,\n\t\t\t\tB42EC08222520B6F00226FA0 /* GLImageThreePartitionFilter.m */,\n\t\t\t\tB42EC08422520B6F00226FA0 /* GLImageFourPointsMirrorFilter.h */,\n\t\t\t\tB42EC08122520B6F00226FA0 /* GLImageFourPointsMirrorFilter.m */,\n\t\t\t\tB49EF6312263521C00C220B0 /* GLImageGlitchEffectGridFilter.h */,\n\t\t\t\tB49EF62F2263521C00C220B0 /* GLImageGlitchEffectGridFilter.m */,\n\t\t\t\tB49EF6322263521C00C220B0 /* GLImageGlitchEffectLineFilter.h */,\n\t\t\t\tB49EF6302263521C00C220B0 /* GLImageGlitchEffectLineFilter.m */,\n\t\t\t\tB4A9FE982266409F00BC6E51 /* GLImageSoulOutFilter.h */,\n\t\t\t\tB4A9FE972266409F00BC6E51 /* GLImageSoulOutFilter.m */,\n\t\t\t\tB436FEC52266D28F009C5A0E /* GLImageZoomFilter.h */,\n\t\t\t\tB436FEC62266D28F009C5A0E /* GLImageZoomFilter.m */,\n\t\t\t\tB436FEC92266DA2E009C5A0E /* GLImageWaterReflectionFilter.h */,\n\t\t\t\tB436FECA2266DA2E009C5A0E /* GLImageWaterReflectionFilter.m */,\n\t\t\t\tB4D9F70A226A10EE00FD18FC /* GLImageBlurSnapViewFilter.h */,\n\t\t\t\tB4D9F70B226A10EE00FD18FC /* GLImageBlurSnapViewFilter.m */,\n\t\t\t\tB41FE419226A260F008DF295 /* GLImageBlurSnapViewFilterGroup.h */,\n\t\t\t\tB41FE41A226A260F008DF295 /* GLImageBlurSnapViewFilterGroup.m */,\n\t\t\t);\n\t\t\tpath = DouYinEffect;\n\t\t\tsourceTree = \"<group>\";\n\t\t};\n\t\tB4620D3622203FBE00EE2876 = {\n\t\t\tisa = PBXGroup;\n\t\t\tchildren = (\n\t\t\t\tB4620D4222203FBE00EE2876 /* GPURenderKit */,\n\t\t\t\tB4620D4D22203FBE00EE2876 /* GPURenderKitTests */,\n\t\t\t\tB4620D4122203FBE00EE2876 /* Products */,\n\t\t\t);\n\t\t\tsourceTree = \"<group>\";\n\t\t};\n\t\tB4620D4122203FBE00EE2876 /* Products */ = {\n\t\t\tisa = PBXGroup;\n\t\t\tchildren = (\n\t\t\t\tB4620D4022203FBE00EE2876 /* GPURenderKit.framework */,\n\t\t\t\tB4620D4922203FBE00EE2876 /* GPURenderKitTests.xctest */,\n\t\t\t);\n\t\t\tname = Products;\n\t\t\tsourceTree = \"<group>\";\n\t\t};\n\t\tB4620D4222203FBE00EE2876 /* GPURenderKit */ = {\n\t\t\tisa = PBXGroup;\n\t\t\tchildren = (\n\t\t\t\tB4620D4322203FBE00EE2876 /* GPURenderKit.h */,\n\t\t\t\tB4620D982220430300EE2876 /* GPUImage */,\n\t\t\t\tB4620D4422203FBE00EE2876 /* Info.plist */,\n\t\t\t);\n\t\t\tpath = GPURenderKit;\n\t\t\tsourceTree = \"<group>\";\n\t\t};\n\t\tB4620D4D22203FBE00EE2876 /* GPURenderKitTests */ = {\n\t\t\tisa = PBXGroup;\n\t\t\tchildren = (\n\t\t\t\tB4620D4E22203FBE00EE2876 /* GPURenderKitTests.m */,\n\t\t\t\tB4620D5022203FBE00EE2876 /* Info.plist */,\n\t\t\t);\n\t\t\tpath = GPURenderKitTests;\n\t\t\tsourceTree = \"<group>\";\n\t\t};\n\t\tB4620D982220430300EE2876 /* GPUImage */ = {\n\t\t\tisa = PBXGroup;\n\t\t\tchildren = (\n\t\t\t\tB46211402220435400EE2876 /* GPUImage.h */,\n\t\t\t\tB47A5EE522212D57006E07A4 /* GLFilters */,\n\t\t\t\tB4621353222046C700EE2876 /* BaseClass */,\n\t\t\t\tB4621376222046C700EE2876 /* Filters */,\n\t\t\t\tB46211C92220436100EE2876 /* iOS */,\n\t\t\t);\n\t\t\tpath = GPUImage;\n\t\t\tsourceTree = \"<group>\";\n\t\t};\n\t\tB46211C92220436100EE2876 /* iOS */ = {\n\t\t\tisa = PBXGroup;\n\t\t\tchildren = (\n\t\t\t\tB46211CA2220436100EE2876 /* GPUImageView.h */,\n\t\t\t\tB46211CB2220436100EE2876 /* GPUImageContext.m */,\n\t\t\t\tB46211CC2220436100EE2876 /* GPUImageMovieWriter.m */,\n\t\t\t\tB46211CD2220436100EE2876 /* Framework */,\n\t\t\t\tB46211D12220436100EE2876 /* GPUImagePicture.m */,\n\t\t\t\tB46211D22220436100EE2876 /* GPUImagePicture+TextureSubimage.m */,\n\t\t\t\tB46211D32220436100EE2876 /* GPUImageContext.h */,\n\t\t\t\tB46211D42220436100EE2876 /* GPUImageView.m */,\n\t\t\t\tB46211D52220436100EE2876 /* GPUImageMovieWriter.h */,\n\t\t\t\tB46211D62220436100EE2876 /* GPUImagePicture.h */,\n\t\t\t\tB46211D72220436100EE2876 /* GPUImagePicture+TextureSubimage.h */,\n\t\t\t);\n\t\t\tpath = iOS;\n\t\t\tsourceTree = \"<group>\";\n\t\t};\n\t\tB46211CD2220436100EE2876 /* Framework */ = {\n\t\t\tisa = PBXGroup;\n\t\t\tchildren = (\n\t\t\t\tB46211CE2220436100EE2876 /* GPUImageFramework.h */,\n\t\t\t\tB46211CF2220436100EE2876 /* module.modulemap */,\n\t\t\t\tB46211D02220436100EE2876 /* Info.plist */,\n\t\t\t);\n\t\t\tpath = Framework;\n\t\t\tsourceTree = \"<group>\";\n\t\t};\n\t\tB4621353222046C700EE2876 /* BaseClass */ = {\n\t\t\tisa = PBXGroup;\n\t\t\tchildren = (\n\t\t\t\tB4621897222048E500EE2876 /* GPUImageThreeInputFilter.h */,\n\t\t\t\tB4621896222048E400EE2876 /* GPUImageThreeInputFilter.m */,\n\t\t\t\tB46215FB222046DC00EE2876 /* GLProgram.h */,\n\t\t\t\tB46215FF222046DC00EE2876 /* GLProgram.m */,\n\t\t\t\tB462160B222046DD00EE2876 /* GPUImageBuffer.h */,\n\t\t\t\tB462160E222046DE00EE2876 /* GPUImageBuffer.m */,\n\t\t\t\tB4621605222046DD00EE2876 /* GPUImageFilter.h */,\n\t\t\t\tB46215F6222046DC00EE2876 /* GPUImageFilter.m */,\n\t\t\t\tB4621606222046DD00EE2876 /* GPUImageFilterGroup.h */,\n\t\t\t\tB4621602222046DC00EE2876 /* GPUImageFilterGroup.m */,\n\t\t\t\tB4621615222046DF00EE2876 /* GPUImageFilterPipeline.h */,\n\t\t\t\tB4621613222046DF00EE2876 /* GPUImageFilterPipeline.m */,\n\t\t\t\tB4621601222046DC00EE2876 /* GPUImageFourInputFilter.h */,\n\t\t\t\tB4621616222046DF00EE2876 /* GPUImageFourInputFilter.m */,\n\t\t\t\tB462160C222046DD00EE2876 /* GPUImageFramebuffer.h */,\n\t\t\t\tB462160A222046DD00EE2876 /* GPUImageFramebuffer.m */,\n\t\t\t\tB4621607222046DD00EE2876 /* GPUImageFramebufferCache.h */,\n\t\t\t\tB462160F222046DE00EE2876 /* GPUImageFramebufferCache.m */,\n\t\t\t\tB46215FC222046DC00EE2876 /* GPUImageMovie.h */,\n\t\t\t\tB46215FA222046DC00EE2876 /* GPUImageMovie.m */,\n\t\t\t\tB4621614222046DF00EE2876 /* GPUImageMovieComposition.h */,\n\t\t\t\tB46215FE222046DC00EE2876 /* GPUImageMovieComposition.m */,\n\t\t\t\tB46215FD222046DC00EE2876 /* GPUImageOutput.h */,\n\t\t\t\tB4621603222046DD00EE2876 /* GPUImageOutput.m */,\n\t\t\t\tB462160D222046DE00EE2876 /* GPUImageTextureInput.h */,\n\t\t\t\tB4621600222046DC00EE2876 /* GPUImageTextureInput.m */,\n\t\t\t\tB46215F7222046DC00EE2876 /* GPUImageTextureOutput.h */,\n\t\t\t\tB4621612222046DE00EE2876 /* GPUImageTextureOutput.m */,\n\t\t\t\tB4621609222046DD00EE2876 /* GPUImageTwoInputFilter.h */,\n\t\t\t\tB46215F9222046DC00EE2876 /* GPUImageTwoInputFilter.m */,\n\t\t\t\tB4621610222046DE00EE2876 /* GPUImageTwoPassFilter.h */,\n\t\t\t\tB46215F5222046DC00EE2876 /* GPUImageTwoPassFilter.m */,\n\t\t\t\tB4621604222046DD00EE2876 /* GPUImageTwoPassTextureSamplingFilter.h */,\n\t\t\t\tB46215F8222046DC00EE2876 /* GPUImageTwoPassTextureSamplingFilter.m */,\n\t\t\t\tB4621611222046DE00EE2876 /* GPUImageVideoCamera.h */,\n\t\t\t\tB4621608222046DD00EE2876 /* GPUImageVideoCamera.m */,\n\t\t\t);\n\t\t\tpath = BaseClass;\n\t\t\tsourceTree = \"<group>\";\n\t\t};\n\t\tB4621376222046C700EE2876 /* Filters */ = {\n\t\t\tisa = PBXGroup;\n\t\t\tchildren = (\n\t\t\t\tB462186A222047C200EE2876 /* GPUImageSobelEdgeDetectionFilter.h */,\n\t\t\t\tB4621876222047C300EE2876 /* GPUImageSobelEdgeDetectionFilter.m */,\n\t\t\t\tB4621873222047C200EE2876 /* GPUImageSoftEleganceFilter.h */,\n\t\t\t\tB4621872222047C200EE2876 /* GPUImageSoftEleganceFilter.m */,\n\t\t\t\tB462186C222047C200EE2876 /* GPUImageSoftLightBlendFilter.h */,\n\t\t\t\tB462187A222047C300EE2876 /* GPUImageSoftLightBlendFilter.m */,\n\t\t\t\tB462187F222047C400EE2876 /* GPUImageSolarizeFilter.h */,\n\t\t\t\tB462186F222047C200EE2876 /* GPUImageSolarizeFilter.m */,\n\t\t\t\tB4621874222047C300EE2876 /* GPUImageSolidColorGenerator.h */,\n\t\t\t\tB462187E222047C400EE2876 /* GPUImageSolidColorGenerator.m */,\n\t\t\t\tB4621875222047C300EE2876 /* GPUImageSourceOverBlendFilter.h */,\n\t\t\t\tB462186D222047C200EE2876 /* GPUImageSourceOverBlendFilter.m */,\n\t\t\t\tB462187D222047C400EE2876 /* GPUImageSphereRefractionFilter.h */,\n\t\t\t\tB4621870222047C200EE2876 /* GPUImageSphereRefractionFilter.m */,\n\t\t\t\tB462186B222047C200EE2876 /* GPUImageStillCamera.h */,\n\t\t\t\tB462186E222047C200EE2876 /* GPUImageStillCamera.m */,\n\t\t\t\tB462187C222047C300EE2876 /* GPUImageStretchDistortionFilter.h */,\n\t\t\t\tB4621878222047C300EE2876 /* GPUImageStretchDistortionFilter.m */,\n\t\t\t\tB462187B222047C300EE2876 /* GPUImageSubtractBlendFilter.h */,\n\t\t\t\tB4621879222047C300EE2876 /* GPUImageSubtractBlendFilter.m */,\n\t\t\t\tB4621871222047C200EE2876 /* GPUImageSwirlFilter.h */,\n\t\t\t\tB4621877222047C300EE2876 /* GPUImageSwirlFilter.m */,\n\t\t\t\tB462182F222047B400EE2876 /* GPUImagePosterizeFilter.h */,\n\t\t\t\tB4621828222047B400EE2876 /* GPUImagePosterizeFilter.m */,\n\t\t\t\tB462182C222047B400EE2876 /* GPUImagePrewittEdgeDetectionFilter.h */,\n\t\t\t\tB4621843222047B600EE2876 /* GPUImagePrewittEdgeDetectionFilter.m */,\n\t\t\t\tB462182E222047B400EE2876 /* GPUImageRawDataInput.h */,\n\t\t\t\tB4621835222047B500EE2876 /* GPUImageRawDataInput.m */,\n\t\t\t\tB462183A222047B500EE2876 /* GPUImageRawDataOutput.h */,\n\t\t\t\tB462181E222047B300EE2876 /* GPUImageRawDataOutput.m */,\n\t\t\t\tB4621838222047B500EE2876 /* GPUImageRGBClosingFilter.h */,\n\t\t\t\tB4621840222047B600EE2876 /* GPUImageRGBClosingFilter.m */,\n\t\t\t\tB462183E222047B600EE2876 /* GPUImageRGBDilationFilter.h */,\n\t\t\t\tB4621823222047B300EE2876 /* GPUImageRGBDilationFilter.m */,\n\t\t\t\tB4621830222047B400EE2876 /* GPUImageRGBErosionFilter.h */,\n\t\t\t\tB4621837222047B500EE2876 /* GPUImageRGBErosionFilter.m */,\n\t\t\t\tB4621824222047B300EE2876 /* GPUImageRGBFilter.h */,\n\t\t\t\tB462181F222047B300EE2876 /* GPUImageRGBFilter.m */,\n\t\t\t\tB462182B222047B400EE2876 /* GPUImageRGBOpeningFilter.h */,\n\t\t\t\tB462183D222047B500EE2876 /* GPUImageRGBOpeningFilter.m */,\n\t\t\t\tB462182A222047B400EE2876 /* GPUImageSaturationBlendFilter.h */,\n\t\t\t\tB462183C222047B500EE2876 /* GPUImageSaturationBlendFilter.m */,\n\t\t\t\tB4621833222047B500EE2876 /* GPUImageSaturationFilter.h */,\n\t\t\t\tB4621827222047B300EE2876 /* GPUImageSaturationFilter.m */,\n\t\t\t\tB4621842222047B600EE2876 /* GPUImageScreenBlendFilter.h */,\n\t\t\t\tB4621825222047B300EE2876 /* GPUImageScreenBlendFilter.m */,\n\t\t\t\tB4621831222047B400EE2876 /* GPUImageSepiaFilter.h */,\n\t\t\t\tB462183B222047B500EE2876 /* GPUImageSepiaFilter.m */,\n\t\t\t\tB4621834222047B500EE2876 /* GPUImageSharpenFilter.h */,\n\t\t\t\tB462183F222047B600EE2876 /* GPUImageSharpenFilter.m */,\n\t\t\t\tB4621836222047B500EE2876 /* GPUImageShiTomasiFeatureDetectionFilter.h */,\n\t\t\t\tB4621841222047B600EE2876 /* GPUImageShiTomasiFeatureDetectionFilter.m */,\n\t\t\t\tB4621839222047B500EE2876 /* GPUImageSingleComponentGaussianBlurFilter.h */,\n\t\t\t\tB4621820222047B300EE2876 /* GPUImageSingleComponentGaussianBlurFilter.m */,\n\t\t\t\tB4621822222047B300EE2876 /* GPUImageSketchFilter.h */,\n\t\t\t\tB4621826222047B300EE2876 /* GPUImageSketchFilter.m */,\n\t\t\t\tB4621821222047B300EE2876 /* GPUImageSkinToneFilter.h */,\n\t\t\t\tB4621832222047B400EE2876 /* GPUImageSkinToneFilter.m */,\n\t\t\t\tB4621829222047B400EE2876 /* GPUImageSmoothToonFilter.h */,\n\t\t\t\tB462182D222047B400EE2876 /* GPUImageSmoothToonFilter.m */,\n\t\t\t\tB462177F2220479E00EE2876 /* GPUImageContrastFilter.h */,\n\t\t\t\tB462173E2220479800EE2876 /* GPUImageContrastFilter.m */,\n\t\t\t\tB46217102220479400EE2876 /* GPUImageCropFilter.h */,\n\t\t\t\tB46217212220479500EE2876 /* GPUImageCropFilter.m */,\n\t\t\t\tB46217512220479A00EE2876 /* GPUImageCrosshairGenerator.h */,\n\t\t\t\tB462172A2220479600EE2876 /* GPUImageCrosshairGenerator.m */,\n\t\t\t\tB46217132220479400EE2876 /* GPUImageCrosshatchFilter.h */,\n\t\t\t\tB46217172220479400EE2876 /* GPUImageCrosshatchFilter.m */,\n\t\t\t\tB462177D2220479E00EE2876 /* GPUImageDarkenBlendFilter.h */,\n\t\t\t\tB46217292220479600EE2876 /* GPUImageDarkenBlendFilter.m */,\n\t\t\t\tB46217432220479800EE2876 /* GPUImageDifferenceBlendFilter.h */,\n\t\t\t\tB46217162220479400EE2876 /* GPUImageDifferenceBlendFilter.m */,\n\t\t\t\tB462176D2220479C00EE2876 /* GPUImageDilationFilter.h */,\n\t\t\t\tB46217572220479A00EE2876 /* GPUImageDilationFilter.m */,\n\t\t\t\tB462170C2220479400EE2876 /* GPUImageDirectionalNonMaximumSuppressionFilter.h */,\n\t\t\t\tB46217762220479D00EE2876 /* GPUImageDirectionalNonMaximumSuppressionFilter.m */,\n\t\t\t\tB46217322220479700EE2876 /* GPUImageDirectionalSobelEdgeDetectionFilter.h */,\n\t\t\t\tB462176C2220479C00EE2876 /* GPUImageDirectionalSobelEdgeDetectionFilter.m */,\n\t\t\t\tB462172C2220479600EE2876 /* GPUImageDissolveBlendFilter.h */,\n\t\t\t\tB46217842220479F00EE2876 /* GPUImageDissolveBlendFilter.m */,\n\t\t\t\tB46216F62220479200EE2876 /* GPUImageDivideBlendFilter.h */,\n\t\t\t\tB46217692220479C00EE2876 /* GPUImageDivideBlendFilter.m */,\n\t\t\t\tB462175A2220479B00EE2876 /* GPUImageEmbossFilter.h */,\n\t\t\t\tB46217082220479300EE2876 /* GPUImageEmbossFilter.m */,\n\t\t\t\tB462172E2220479700EE2876 /* GPUImageErosionFilter.h */,\n\t\t\t\tB46217642220479C00EE2876 /* GPUImageErosionFilter.m */,\n\t\t\t\tB462171A2220479500EE2876 /* GPUImageExclusionBlendFilter.h */,\n\t\t\t\tB46217262220479600EE2876 /* GPUImageExclusionBlendFilter.m */,\n\t\t\t\tB46216EF2220479100EE2876 /* GPUImageExposureFilter.h */,\n\t\t\t\tB46217652220479C00EE2876 /* GPUImageExposureFilter.m */,\n\t\t\t\tB462177C2220479E00EE2876 /* GPUImageFalseColorFilter.h */,\n\t\t\t\tB46217282220479600EE2876 /* GPUImageFalseColorFilter.m */,\n\t\t\t\tB462174E2220479A00EE2876 /* GPUImageFASTCornerDetectionFilter.h */,\n\t\t\t\tB46217352220479700EE2876 /* GPUImageFASTCornerDetectionFilter.m */,\n\t\t\t\tB462172F2220479700EE2876 /* GPUImageGammaFilter.h */,\n\t\t\t\tB46217272220479600EE2876 /* GPUImageGammaFilter.m */,\n\t\t\t\tB462173C2220479800EE2876 /* GPUImageGaussianBlurFilter.h */,\n\t\t\t\tB46217742220479D00EE2876 /* GPUImageGaussianBlurFilter.m */,\n\t\t\t\tB46217812220479E00EE2876 /* GPUImageGaussianBlurPositionFilter.h */,\n\t\t\t\tB46217482220479900EE2876 /* GPUImageGaussianBlurPositionFilter.m */,\n\t\t\t\tB46217192220479500EE2876 /* GPUImageGaussianSelectiveBlurFilter.h */,\n\t\t\t\tB462177A2220479E00EE2876 /* GPUImageGaussianSelectiveBlurFilter.m */,\n\t\t\t\tB46217622220479B00EE2876 /* GPUImageGlassSphereFilter.h */,\n\t\t\t\tB46216F82220479200EE2876 /* GPUImageGlassSphereFilter.m */,\n\t\t\t\tB46217232220479600EE2876 /* GPUImageGrayscaleFilter.h */,\n\t\t\t\tB46217052220479300EE2876 /* GPUImageGrayscaleFilter.m */,\n\t\t\t\tB46217452220479900EE2876 /* GPUImageHalftoneFilter.h */,\n\t\t\t\tB46216F22220479100EE2876 /* GPUImageHalftoneFilter.m */,\n\t\t\t\tB462171B2220479500EE2876 /* GPUImageHardLightBlendFilter.h */,\n\t\t\t\tB46217072220479300EE2876 /* GPUImageHardLightBlendFilter.m */,\n\t\t\t\tB46216FE2220479200EE2876 /* GPUImageHarrisCornerDetectionFilter.h */,\n\t\t\t\tB462176B2220479C00EE2876 /* GPUImageHarrisCornerDetectionFilter.m */,\n\t\t\t\tB46217202220479500EE2876 /* GPUImageHazeFilter.h */,\n\t\t\t\tB462174D2220479900EE2876 /* GPUImageHazeFilter.m */,\n\t\t\t\tB46217412220479800EE2876 /* GPUImageHighlightShadowFilter.h */,\n\t\t\t\tB462175D2220479B00EE2876 /* GPUImageHighlightShadowFilter.m */,\n\t\t\t\tB462170F2220479400EE2876 /* GPUImageHighlightShadowTintFilter.h */,\n\t\t\t\tB46217542220479A00EE2876 /* GPUImageHighlightShadowTintFilter.m */,\n\t\t\t\tB46217142220479400EE2876 /* GPUImageHighPassFilter.h */,\n\t\t\t\tB46217662220479C00EE2876 /* GPUImageHighPassFilter.m */,\n\t\t\t\tB46217722220479D00EE2876 /* GPUImageHistogramEqualizationFilter.h */,\n\t\t\t\tB46217062220479300EE2876 /* GPUImageHistogramEqualizationFilter.m */,\n\t\t\t\tB462171E2220479500EE2876 /* GPUImageHistogramFilter.h */,\n\t\t\t\tB46216EE2220479100EE2876 /* GPUImageHistogramFilter.m */,\n\t\t\t\tB46217112220479400EE2876 /* GPUImageHistogramGenerator.h */,\n\t\t\t\tB462175B2220479B00EE2876 /* GPUImageHistogramGenerator.m */,\n\t\t\t\tB462174B2220479900EE2876 /* GPUImageHoughTransformLineDetector.h */,\n\t\t\t\tB46216FF2220479200EE2876 /* GPUImageHoughTransformLineDetector.m */,\n\t\t\t\tB46216F92220479200EE2876 /* GPUImageHSBFilter.h */,\n\t\t\t\tB46217122220479400EE2876 /* GPUImageHSBFilter.m */,\n\t\t\t\tB46217032220479300EE2876 /* GPUImageHueBlendFilter.h */,\n\t\t\t\tB462170B2220479300EE2876 /* GPUImageHueBlendFilter.m */,\n\t\t\t\tB46216FA2220479200EE2876 /* GPUImageHueFilter.h */,\n\t\t\t\tB46217182220479500EE2876 /* GPUImageHueFilter.m */,\n\t\t\t\tB46217632220479C00EE2876 /* GPUImageiOSBlurFilter.h */,\n\t\t\t\tB46216FB2220479200EE2876 /* GPUImageiOSBlurFilter.m */,\n\t\t\t\tB46216F12220479100EE2876 /* GPUImageJFAVoronoiFilter.h */,\n\t\t\t\tB46217042220479300EE2876 /* GPUImageJFAVoronoiFilter.m */,\n\t\t\t\tB46217342220479700EE2876 /* GPUImageKuwaharaFilter.h */,\n\t\t\t\tB462171C2220479500EE2876 /* GPUImageKuwaharaFilter.m */,\n\t\t\t\tB46217492220479900EE2876 /* GPUImageKuwaharaRadius3Filter.h */,\n\t\t\t\tB462174A2220479900EE2876 /* GPUImageKuwaharaRadius3Filter.m */,\n\t\t\t\tB462176F2220479D00EE2876 /* GPUImageLanczosResamplingFilter.h */,\n\t\t\t\tB462171D2220479500EE2876 /* GPUImageLanczosResamplingFilter.m */,\n\t\t\t\tB46216F02220479100EE2876 /* GPUImageLaplacianFilter.h */,\n\t\t\t\tB46217252220479600EE2876 /* GPUImageLaplacianFilter.m */,\n\t\t\t\tB46216F32220479100EE2876 /* GPUImageLevelsFilter.h */,\n\t\t\t\tB46217332220479700EE2876 /* GPUImageLevelsFilter.m */,\n\t\t\t\tB46217832220479E00EE2876 /* GPUImageLightenBlendFilter.h */,\n\t\t\t\tB46217532220479A00EE2876 /* GPUImageLightenBlendFilter.m */,\n\t\t\t\tB462176E2220479D00EE2876 /* GPUImageLinearBurnBlendFilter.h */,\n\t\t\t\tB46217222220479500EE2876 /* GPUImageLinearBurnBlendFilter.m */,\n\t\t\t\tB46217092220479300EE2876 /* GPUImageLineGenerator.h */,\n\t\t\t\tB462172B2220479600EE2876 /* GPUImageLineGenerator.m */,\n\t\t\t\tB462170A2220479300EE2876 /* GPUImageLocalBinaryPatternFilter.h */,\n\t\t\t\tB46217402220479800EE2876 /* GPUImageLocalBinaryPatternFilter.m */,\n\t\t\t\tB462177E2220479E00EE2876 /* GPUImageLookupFilter.h */,\n\t\t\t\tB46216F72220479200EE2876 /* GPUImageLookupFilter.m */,\n\t\t\t\tB462172D2220479600EE2876 /* GPUImageLowPassFilter.h */,\n\t\t\t\tB46217012220479300EE2876 /* GPUImageLowPassFilter.m */,\n\t\t\t\tB46217582220479A00EE2876 /* GPUImageLuminanceRangeFilter.h */,\n\t\t\t\tB46217552220479A00EE2876 /* GPUImageLuminanceRangeFilter.m */,\n\t\t\t\tB46217422220479800EE2876 /* GPUImageLuminanceThresholdFilter.h */,\n\t\t\t\tB46217712220479D00EE2876 /* GPUImageLuminanceThresholdFilter.m */,\n\t\t\t\tB46217002220479200EE2876 /* GPUImageLuminosity.h */,\n\t\t\t\tB462176A2220479C00EE2876 /* GPUImageLuminosity.m */,\n\t\t\t\tB46217672220479C00EE2876 /* GPUImageLuminosityBlendFilter.h */,\n\t\t\t\tB46217522220479A00EE2876 /* GPUImageLuminosityBlendFilter.m */,\n\t\t\t\tB462174C2220479900EE2876 /* GPUImageMaskFilter.h */,\n\t\t\t\tB46217822220479E00EE2876 /* GPUImageMaskFilter.m */,\n\t\t\t\tB46217372220479700EE2876 /* GPUImageMedianFilter.h */,\n\t\t\t\tB46217592220479B00EE2876 /* GPUImageMedianFilter.m */,\n\t\t\t\tB46217312220479700EE2876 /* GPUImageMissEtikateFilter.h */,\n\t\t\t\tB462175C2220479B00EE2876 /* GPUImageMissEtikateFilter.m */,\n\t\t\t\tB46217462220479900EE2876 /* GPUImageMonochromeFilter.h */,\n\t\t\t\tB46216F52220479100EE2876 /* GPUImageMonochromeFilter.m */,\n\t\t\t\tB462174F2220479A00EE2876 /* GPUImageMosaicFilter.h */,\n\t\t\t\tB46217782220479D00EE2876 /* GPUImageMosaicFilter.m */,\n\t\t\t\tB462175F2220479B00EE2876 /* GPUImageMotionBlurFilter.h */,\n\t\t\t\tB46217152220479400EE2876 /* GPUImageMotionBlurFilter.m */,\n\t\t\t\tB462171F2220479500EE2876 /* GPUImageMotionDetector.h */,\n\t\t\t\tB46217702220479D00EE2876 /* GPUImageMotionDetector.m */,\n\t\t\t\tB462170D2220479400EE2876 /* GPUImageMultiplyBlendFilter.h */,\n\t\t\t\tB46217802220479E00EE2876 /* GPUImageMultiplyBlendFilter.m */,\n\t\t\t\tB46216FC2220479200EE2876 /* GPUImageNobleCornerDetectionFilter.h */,\n\t\t\t\tB46217602220479B00EE2876 /* GPUImageNobleCornerDetectionFilter.m */,\n\t\t\t\tB46216FD2220479200EE2876 /* GPUImageNonMaximumSuppressionFilter.h */,\n\t\t\t\tB46217472220479900EE2876 /* GPUImageNonMaximumSuppressionFilter.m */,\n\t\t\t\tB46216F42220479100EE2876 /* GPUImageNormalBlendFilter.h */,\n\t\t\t\tB462173A2220479800EE2876 /* GPUImageNormalBlendFilter.m */,\n\t\t\t\tB46217732220479D00EE2876 /* GPUImageOpacityFilter.h */,\n\t\t\t\tB46217392220479800EE2876 /* GPUImageOpacityFilter.m */,\n\t\t\t\tB462170E2220479400EE2876 /* GPUImageOpeningFilter.h */,\n\t\t\t\tB46217852220479F00EE2876 /* GPUImageOpeningFilter.m */,\n\t\t\t\tB46217302220479700EE2876 /* GPUImageOverlayBlendFilter.h */,\n\t\t\t\tB46217362220479700EE2876 /* GPUImageOverlayBlendFilter.m */,\n\t\t\t\tB462175E2220479B00EE2876 /* GPUImageParallelCoordinateLineTransformFilter.h */,\n\t\t\t\tB46217752220479D00EE2876 /* GPUImageParallelCoordinateLineTransformFilter.m */,\n\t\t\t\tB46217772220479D00EE2876 /* GPUImagePerlinNoiseFilter.h */,\n\t\t\t\tB462173D2220479800EE2876 /* GPUImagePerlinNoiseFilter.m */,\n\t\t\t\tB46217382220479700EE2876 /* GPUImagePinchDistortionFilter.h */,\n\t\t\t\tB46217682220479C00EE2876 /* GPUImagePinchDistortionFilter.m */,\n\t\t\t\tB46217562220479A00EE2876 /* GPUImagePixellateFilter.h */,\n\t\t\t\tB462173B2220479800EE2876 /* GPUImagePixellateFilter.m */,\n\t\t\t\tB462173F2220479800EE2876 /* GPUImagePixellatePositionFilter.h */,\n\t\t\t\tB46217442220479900EE2876 /* GPUImagePixellatePositionFilter.m */,\n\t\t\t\tB46217242220479600EE2876 /* GPUImagePoissonBlendFilter.h */,\n\t\t\t\tB46217612220479B00EE2876 /* GPUImagePoissonBlendFilter.m */,\n\t\t\t\tB46217792220479E00EE2876 /* GPUImagePolarPixellateFilter.h */,\n\t\t\t\tB46217502220479A00EE2876 /* GPUImagePolarPixellateFilter.m */,\n\t\t\t\tB462177B2220479E00EE2876 /* GPUImagePolkaDotFilter.h */,\n\t\t\t\tB46217022220479300EE2876 /* GPUImagePolkaDotFilter.m */,\n\t\t\t\tB46216B92220477000EE2876 /* GPUImageThresholdEdgeDetectionFilter.h */,\n\t\t\t\tB46216C52220477100EE2876 /* GPUImageThresholdEdgeDetectionFilter.m */,\n\t\t\t\tB46216C02220477100EE2876 /* GPUImageThresholdedNonMaximumSuppressionFilter.h */,\n\t\t\t\tB46216AF2220476F00EE2876 /* GPUImageThresholdedNonMaximumSuppressionFilter.m */,\n\t\t\t\tB46216C32220477100EE2876 /* GPUImageThresholdSketchFilter.h */,\n\t\t\t\tB46216A82220476F00EE2876 /* GPUImageThresholdSketchFilter.m */,\n\t\t\t\tB46216BF2220477100EE2876 /* GPUImageTiltShiftFilter.h */,\n\t\t\t\tB46216B32220477000EE2876 /* GPUImageTiltShiftFilter.m */,\n\t\t\t\tB46216AB2220476F00EE2876 /* GPUImageToneCurveFilter.h */,\n\t\t\t\tB46216B52220477000EE2876 /* GPUImageToneCurveFilter.m */,\n\t\t\t\tB46216A62220476E00EE2876 /* GPUImageToonFilter.h */,\n\t\t\t\tB46216AC2220476F00EE2876 /* GPUImageToonFilter.m */,\n\t\t\t\tB46216AE2220476F00EE2876 /* GPUImageTransformFilter.h */,\n\t\t\t\tB46216C92220477200EE2876 /* GPUImageTransformFilter.m */,\n\t\t\t\tB46216C72220477100EE2876 /* GPUImageTwoInputCrossTextureSamplingFilter.h */,\n\t\t\t\tB46216A72220476F00EE2876 /* GPUImageTwoInputCrossTextureSamplingFilter.m */,\n\t\t\t\tB46216BA2220477000EE2876 /* GPUImageUIElement.h */,\n\t\t\t\tB46216B22220477000EE2876 /* GPUImageUIElement.m */,\n\t\t\t\tB46216C22220477100EE2876 /* GPUImageUnsharpMaskFilter.h */,\n\t\t\t\tB46216B72220477000EE2876 /* GPUImageUnsharpMaskFilter.m */,\n\t\t\t\tB46216C12220477100EE2876 /* GPUImageVibranceFilter.h */,\n\t\t\t\tB46216A92220476F00EE2876 /* GPUImageVibranceFilter.m */,\n\t\t\t\tB46216BB2220477000EE2876 /* GPUImageVignetteFilter.h */,\n\t\t\t\tB46216BD2220477100EE2876 /* GPUImageVignetteFilter.m */,\n\t\t\t\tB46216C62220477100EE2876 /* GPUImageVoronoiConsumerFilter.h */,\n\t\t\t\tB46216C42220477100EE2876 /* GPUImageVoronoiConsumerFilter.m */,\n\t\t\t\tB46216AD2220476F00EE2876 /* GPUImageWeakPixelInclusionFilter.h */,\n\t\t\t\tB46216B02220476F00EE2876 /* GPUImageWeakPixelInclusionFilter.m */,\n\t\t\t\tB46216BE2220477100EE2876 /* GPUImageWhiteBalanceFilter.h */,\n\t\t\t\tB46216C82220477200EE2876 /* GPUImageWhiteBalanceFilter.m */,\n\t\t\t\tB46216AA2220476F00EE2876 /* GPUImageXYDerivativeFilter.h */,\n\t\t\t\tB46216B82220477000EE2876 /* GPUImageXYDerivativeFilter.m */,\n\t\t\t\tB46216B12220477000EE2876 /* GPUImageZoomBlurFilter.h */,\n\t\t\t\tB46216B62220477000EE2876 /* GPUImageZoomBlurFilter.m */,\n\t\t\t\tB46216762220475C00EE2876 /* GPUImageCGAColorspaceFilter.h */,\n\t\t\t\tB462166F2220475C00EE2876 /* GPUImageCGAColorspaceFilter.m */,\n\t\t\t\tB462167D2220475D00EE2876 /* GPUImageChromaKeyBlendFilter.h */,\n\t\t\t\tB46216792220475D00EE2876 /* GPUImageChromaKeyBlendFilter.m */,\n\t\t\t\tB46216722220475C00EE2876 /* GPUImageChromaKeyFilter.h */,\n\t\t\t\tB46216752220475C00EE2876 /* GPUImageChromaKeyFilter.m */,\n\t\t\t\tB46216802220475D00EE2876 /* GPUImageClosingFilter.h */,\n\t\t\t\tB462166E2220475C00EE2876 /* GPUImageClosingFilter.m */,\n\t\t\t\tB46216822220475D00EE2876 /* GPUImageColorBlendFilter.h */,\n\t\t\t\tB46216842220475E00EE2876 /* GPUImageColorBlendFilter.m */,\n\t\t\t\tB462167C2220475D00EE2876 /* GPUImageColorBurnBlendFilter.h */,\n\t\t\t\tB462167B2220475D00EE2876 /* GPUImageColorBurnBlendFilter.m */,\n\t\t\t\tB46216812220475D00EE2876 /* GPUImageColorConversion.h */,\n\t\t\t\tB46216732220475C00EE2876 /* GPUImageColorConversion.m */,\n\t\t\t\tB46216782220475D00EE2876 /* GPUImageColorDodgeBlendFilter.h */,\n\t\t\t\tB462167E2220475D00EE2876 /* GPUImageColorDodgeBlendFilter.m */,\n\t\t\t\tB46216852220475E00EE2876 /* GPUImageColorInvertFilter.h */,\n\t\t\t\tB46216702220475C00EE2876 /* GPUImageColorInvertFilter.m */,\n\t\t\t\tB46216862220475E00EE2876 /* GPUImageColorLocalBinaryPatternFilter.h */,\n\t\t\t\tB46216872220475E00EE2876 /* GPUImageColorLocalBinaryPatternFilter.m */,\n\t\t\t\tB46216742220475C00EE2876 /* GPUImageColorMatrixFilter.h */,\n\t\t\t\tB46216832220475E00EE2876 /* GPUImageColorMatrixFilter.m */,\n\t\t\t\tB462167F2220475D00EE2876 /* GPUImageColorPackingFilter.h */,\n\t\t\t\tB46216772220475C00EE2876 /* GPUImageColorPackingFilter.m */,\n\t\t\t\tB46216882220475E00EE2876 /* GPUImageColourFASTFeatureDetector.h */,\n\t\t\t\tB46216712220475C00EE2876 /* GPUImageColourFASTFeatureDetector.m */,\n\t\t\t\tB462167A2220475D00EE2876 /* GPUImageColourFASTSamplingOperation.h */,\n\t\t\t\tB46216892220475E00EE2876 /* GPUImageColourFASTSamplingOperation.m */,\n\t\t\t\tB46216502220474D00EE2876 /* GPUImage3x3ConvolutionFilter.h */,\n\t\t\t\tB462163A2220474B00EE2876 /* GPUImage3x3ConvolutionFilter.m */,\n\t\t\t\tB46216522220474D00EE2876 /* GPUImage3x3TextureSamplingFilter.h */,\n\t\t\t\tB46216512220474D00EE2876 /* GPUImage3x3TextureSamplingFilter.m */,\n\t\t\t\tB46216452220474C00EE2876 /* GPUImageAdaptiveThresholdFilter.h */,\n\t\t\t\tB462164D2220474C00EE2876 /* GPUImageAdaptiveThresholdFilter.m */,\n\t\t\t\tB462164A2220474C00EE2876 /* GPUImageAddBlendFilter.h */,\n\t\t\t\tB46216472220474C00EE2876 /* GPUImageAddBlendFilter.m */,\n\t\t\t\tB46216432220474B00EE2876 /* GPUImageAlphaBlendFilter.h */,\n\t\t\t\tB46216492220474C00EE2876 /* GPUImageAlphaBlendFilter.m */,\n\t\t\t\tB46216402220474B00EE2876 /* GPUImageAmatorkaFilter.h */,\n\t\t\t\tB46216532220474D00EE2876 /* GPUImageAmatorkaFilter.m */,\n\t\t\t\tB462163B2220474B00EE2876 /* GPUImageAverageColor.h */,\n\t\t\t\tB46216462220474C00EE2876 /* GPUImageAverageColor.m */,\n\t\t\t\tB46216422220474B00EE2876 /* GPUImageAverageLuminanceThresholdFilter.h */,\n\t\t\t\tB46216442220474C00EE2876 /* GPUImageAverageLuminanceThresholdFilter.m */,\n\t\t\t\tB462163D2220474B00EE2876 /* GPUImageBilateralFilter.h */,\n\t\t\t\tB462164E2220474C00EE2876 /* GPUImageBilateralFilter.m */,\n\t\t\t\tB462163E2220474B00EE2876 /* GPUImageBoxBlurFilter.h */,\n\t\t\t\tB46216412220474B00EE2876 /* GPUImageBoxBlurFilter.m */,\n\t\t\t\tB462163F2220474B00EE2876 /* GPUImageBrightnessFilter.h */,\n\t\t\t\tB462164F2220474C00EE2876 /* GPUImageBrightnessFilter.m */,\n\t\t\t\tB462164C2220474C00EE2876 /* GPUImageBulgeDistortionFilter.h */,\n\t\t\t\tB462163C2220474B00EE2876 /* GPUImageBulgeDistortionFilter.m */,\n\t\t\t\tB462164B2220474C00EE2876 /* GPUImageCannyEdgeDetectionFilter.h */,\n\t\t\t\tB46216482220474C00EE2876 /* GPUImageCannyEdgeDetectionFilter.m */,\n\t\t\t);\n\t\t\tpath = Filters;\n\t\t\tsourceTree = \"<group>\";\n\t\t};\n\t\tB47A5EE522212D57006E07A4 /* GLFilters */ = {\n\t\t\tisa = PBXGroup;\n\t\t\tchildren = (\n\t\t\t\tB4DAAC42225724AB0094A3B2 /* GLImageMovie */,\n\t\t\t\tB42EC07E22520B6F00226FA0 /* DouYinEffect */,\n\t\t\t\tB47A5EF022212EA3006E07A4 /* FaceFilters */,\n\t\t\t\tB47A5EE622212D57006E07A4 /* ColorProcessing */,\n\t\t\t);\n\t\t\tpath = GLFilters;\n\t\t\tsourceTree = \"<group>\";\n\t\t};\n\t\tB47A5EE622212D57006E07A4 /* ColorProcessing */ = {\n\t\t\tisa = PBXGroup;\n\t\t\tchildren = (\n\t\t\t\tB42D8D61223F8A5600517EA7 /* GLImageCircleFilter.h */,\n\t\t\t\tB42D8D60223F8A5500517EA7 /* GLImageCircleFilter.m */,\n\t\t\t\tB44ADD03222668DD00F17956 /* GLImageAddStickerFilter.h */,\n\t\t\t\tB44ADD05222668DD00F17956 /* GLImageAddStickerFilter.m */,\n\t\t\t\tB44ADD08222668DD00F17956 /* GLImageAddStickerWithEffectFilter.h */,\n\t\t\t\tB44ADD04222668DD00F17956 /* GLImageAddStickerWithEffectFilter.m */,\n\t\t\t\tB44ADD06222668DD00F17956 /* GLImageStickerFilter.h */,\n\t\t\t\tB44ADD07222668DD00F17956 /* GLImageStickerFilter.m */,\n\t\t\t\tB47A5EE722212D57006E07A4 /* GLImageLutFilter.h */,\n\t\t\t\tB47A5EE922212D57006E07A4 /* GLImageLutFilter.m */,\n\t\t\t\tB47A5EE822212D57006E07A4 /* GLImageTwoLutFilter.m */,\n\t\t\t\tB47A5EEA22212D57006E07A4 /* GLImageTwoLutFilter.h */,\n\t\t\t\tB44ADD112226695100F17956 /* GLImageBlendFilter.h */,\n\t\t\t\tB44ADD102226695100F17956 /* GLImageBlendFilter.m */,\n\t\t\t\tB44ADD122226695200F17956 /* GLImageMixBlendFilter.h */,\n\t\t\t\tB44ADD0F2226695100F17956 /* GLImageMixBlendFilter.m */,\n\t\t\t\tB44ADD17222669AA00F17956 /* GLImageGassianBlurMixFilter.h */,\n\t\t\t\tB44ADD18222669AB00F17956 /* GLImageGassianBlurMixFilter.m */,\n\t\t\t\tB413174D2277277300B4866F /* GLImageShapeFilter.h */,\n\t\t\t\tB413174C2277277200B4866F /* GLImageShapeFilter.m */,\n\t\t\t\tB413174E2277277300B4866F /* GLImageShapeHighDefinitionFilter.h */,\n\t\t\t\tB413174F2277277300B4866F /* GLImageShapeHighDefinitionFilter.m */,\n\t\t\t);\n\t\t\tpath = ColorProcessing;\n\t\t\tsourceTree = \"<group>\";\n\t\t};\n\t\tB47A5EF022212EA3006E07A4 /* FaceFilters */ = {\n\t\t\tisa = PBXGroup;\n\t\t\tchildren = (\n\t\t\t\tB4A9FE872265CFDA00BC6E51 /* GLImageFaceChangeFilter.h */,\n\t\t\t\tB4A9FE882265CFDA00BC6E51 /* GLImageFaceChangeFilter.m */,\n\t\t\t\tB4C64C1B2271FFF9001BB716 /* GLImageFaceDetectPointFilter.h */,\n\t\t\t\tB4C64C1C2271FFF9001BB716 /* GLImageFaceDetectPointFilter.m */,\n\t\t\t\tB4C64C20227217C2001BB716 /* GLImageFaceChangeFilterGroup.h */,\n\t\t\t\tB4C64C21227217C2001BB716 /* GLImageFaceChangeFilterGroup.m */,\n\t\t\t\tB47A5EF122212EA3006E07A4 /* GPUImageBeautifyFilter.m */,\n\t\t\t\tB47A5EF222212EA3006E07A4 /* GPUImageBeautifyFilter.h */,\n\t\t\t);\n\t\t\tpath = FaceFilters;\n\t\t\tsourceTree = \"<group>\";\n\t\t};\n\t\tB4DAAC42225724AB0094A3B2 /* GLImageMovie */ = {\n\t\t\tisa = PBXGroup;\n\t\t\tchildren = (\n\t\t\t\tB4DAAC45225724AB0094A3B2 /* GLImageMovie.h */,\n\t\t\t\tB4DAAC43225724AB0094A3B2 /* GLImageMovie.m */,\n\t\t\t\tB4DAAC46225724AB0094A3B2 /* GPUImageMovieWriterFix.h */,\n\t\t\t\tB4DAAC44225724AB0094A3B2 /* GPUImageMovieWriterFix.m */,\n\t\t\t);\n\t\t\tpath = GLImageMovie;\n\t\t\tsourceTree = \"<group>\";\n\t\t};\n/* End PBXGroup section */\n\n/* Begin PBXHeadersBuildPhase section */\n\t\tB4620D3B22203FBE00EE2876 /* Headers */ = {\n\t\t\tisa = PBXHeadersBuildPhase;\n\t\t\tbuildActionMask = 2147483647;\n\t\t\tfiles = (\n\t\t\t\tB41317512277277300B4866F /* GLImageShapeFilter.h in Headers */,\n\t\t\t\tB4C64C22227217C2001BB716 /* GLImageFaceChangeFilterGroup.h in Headers */,\n\t\t\t\tB4C64C1D2271FFF9001BB716 /* GLImageFaceDetectPointFilter.h in Headers */,\n\t\t\t\tB41317522277277300B4866F /* GLImageShapeHighDefinitionFilter.h in Headers */,\n\t\t\t\tB41FE41B226A260F008DF295 /* GLImageBlurSnapViewFilterGroup.h in Headers */,\n\t\t\t\tB4D9F70C226A10EE00FD18FC /* GLImageBlurSnapViewFilter.h in Headers */,\n\t\t\t\tB436FECB2266DA2E009C5A0E /* GLImageWaterReflectionFilter.h in Headers */,\n\t\t\t\tB436FEC72266D28F009C5A0E /* GLImageZoomFilter.h in Headers */,\n\t\t\t\tB4A9FE9A2266409F00BC6E51 /* GLImageSoulOutFilter.h in Headers */,\n\t\t\t\tB4A9FE892265CFDA00BC6E51 /* GLImageFaceChangeFilter.h in Headers */,\n\t\t\t\tB49EF6352263521C00C220B0 /* GLImageGlitchEffectGridFilter.h in Headers */,\n\t\t\t\tB49EF6362263521C00C220B0 /* GLImageGlitchEffectLineFilter.h in Headers */,\n\t\t\t\tB4DAAC4A225724AB0094A3B2 /* GPUImageMovieWriterFix.h in Headers */,\n\t\t\t\tB42EC08622520B6F00226FA0 /* GLImageThreePartitionFilter.h in Headers */,\n\t\t\t\tB4DAAC49225724AB0094A3B2 /* GLImageMovie.h in Headers */,\n\t\t\t\tB42EC08A22520B6F00226FA0 /* GLImageFourPointsMirrorFilter.h in Headers */,\n\t\t\t\tB42EC08922520B6F00226FA0 /* GLImageThreePartitionGroupFilter.h in Headers */,\n\t\t\t\tB42D8D63223F8A5600517EA7 /* GLImageCircleFilter.h in Headers */,\n\t\t\t\tB44ADD09222668DD00F17956 /* GLImageAddStickerFilter.h in Headers */,\n\t\t\t\tB44ADD0C222668DD00F17956 /* GLImageStickerFilter.h in Headers */,\n\t\t\t\tB44ADD19222669AB00F17956 /* GLImageGassianBlurMixFilter.h in Headers */,\n\t\t\t\tB47A5EF422212EA3006E07A4 /* GPUImageBeautifyFilter.h in Headers */,\n\t\t\t\tB46217C92220479F00EE2876 /* GPUImageMissEtikateFilter.h in Headers */,\n\t\t\t\tB46216E42220477200EE2876 /* GPUImageThresholdedNonMaximumSuppressionFilter.h in Headers */,\n\t\t\t\tB47A5EEE22212D57006E07A4 /* GLImageTwoLutFilter.h in Headers */,\n\t\t\t\tB44ADD0E222668DD00F17956 /* GLImageAddStickerWithEffectFilter.h in Headers */,\n\t\t\t\tB47A5EEB22212D57006E07A4 /* GLImageLutFilter.h in Headers */,\n\t\t\t\tB462162E222046DF00EE2876 /* GPUImageFramebuffer.h in Headers */,\n\t\t\t\tB4621637222046DF00EE2876 /* GPUImageFilterPipeline.h in Headers */,\n\t\t\t\tB46217872220479F00EE2876 /* GPUImageExposureFilter.h in Headers */,\n\t\t\t\tB46217952220479F00EE2876 /* GPUImageNonMaximumSuppressionFilter.h in Headers */,\n\t\t\t\tB46217A92220479F00EE2876 /* GPUImageHistogramGenerator.h in Headers */,\n\t\t\t\tB46217FF2220479F00EE2876 /* GPUImageLuminosityBlendFilter.h in Headers */,\n\t\t\t\tB462181B2220479F00EE2876 /* GPUImageLightenBlendFilter.h in Headers */,\n\t\t\t\tB46218142220479F00EE2876 /* GPUImageFalseColorFilter.h in Headers */,\n\t\t\t\tB4621856222047B600EE2876 /* GPUImageRGBErosionFilter.h in Headers */,\n\t\t\t\tB4621892222047C400EE2876 /* GPUImageStretchDistortionFilter.h in Headers */,\n\t\t\t\tB462178C2220479F00EE2876 /* GPUImageNormalBlendFilter.h in Headers */,\n\t\t\t\tB46216552220474D00EE2876 /* GPUImageAverageColor.h in Headers */,\n\t\t\t\tB46216DF2220477200EE2876 /* GPUImageVignetteFilter.h in Headers */,\n\t\t\t\tB4621629222046DF00EE2876 /* GPUImageFramebufferCache.h in Headers */,\n\t\t\t\tB46217942220479F00EE2876 /* GPUImageNobleCornerDetectionFilter.h in Headers */,\n\t\t\t\tB462133B2220436300EE2876 /* GPUImageFramework.h in Headers */,\n\t\t\t\tB4621880222047C400EE2876 /* GPUImageSobelEdgeDetectionFilter.h in Headers */,\n\t\t\t\tB4621847222047B600EE2876 /* GPUImageSkinToneFilter.h in Headers */,\n\t\t\t\tB46216DD2220477200EE2876 /* GPUImageThresholdEdgeDetectionFilter.h in Headers */,\n\t\t\t\tB44ADD162226695200F17956 /* GLImageMixBlendFilter.h in Headers */,\n\t\t\t\tB44ADD152226695200F17956 /* GLImageBlendFilter.h in Headers */,\n\t\t\t\tB4621632222046DF00EE2876 /* GPUImageTwoPassFilter.h in Headers */,\n\t\t\t\tB462161E222046DF00EE2876 /* GPUImageMovie.h in Headers */,\n\t\t\t\tB46213422220436300EE2876 /* GPUImagePicture.h in Headers */,\n\t\t\t\tB462161F222046DF00EE2876 /* GPUImageOutput.h in Headers */,\n\t\t\t\tB46218052220479F00EE2876 /* GPUImageDilationFilter.h in Headers */,\n\t\t\t\tB46218112220479F00EE2876 /* GPUImagePolarPixellateFilter.h in Headers */,\n\t\t\t\tB46213432220436300EE2876 /* GPUImagePicture+TextureSubimage.h in Headers */,\n\t\t\t\tB462178E2220479F00EE2876 /* GPUImageDivideBlendFilter.h in Headers */,\n\t\t\t\tB46217A12220479F00EE2876 /* GPUImageLineGenerator.h in Headers */,\n\t\t\t\tB462166A2220474D00EE2876 /* GPUImage3x3ConvolutionFilter.h in Headers */,\n\t\t\t\tB46217BC2220479F00EE2876 /* GPUImagePoissonBlendFilter.h in Headers */,\n\t\t\t\tB46217B62220479F00EE2876 /* GPUImageHistogramFilter.h in Headers */,\n\t\t\t\tB46218162220479F00EE2876 /* GPUImageLookupFilter.h in Headers */,\n\t\t\t\tB46217CC2220479F00EE2876 /* GPUImageKuwaharaFilter.h in Headers */,\n\t\t\t\tB46216652220474D00EE2876 /* GPUImageCannyEdgeDetectionFilter.h in Headers */,\n\t\t\t\tB46217B82220479F00EE2876 /* GPUImageHazeFilter.h in Headers */,\n\t\t\t\tB46217DE2220479F00EE2876 /* GPUImageMonochromeFilter.h in Headers */,\n\t\t\t\tB4621619222046DF00EE2876 /* GPUImageTextureOutput.h in Headers */,\n\t\t\t\tB46217982220479F00EE2876 /* GPUImageLuminosity.h in Headers */,\n\t\t\t\tB46217E42220479F00EE2876 /* GPUImageMaskFilter.h in Headers */,\n\t\t\t\tB462162D222046DF00EE2876 /* GPUImageBuffer.h in Headers */,\n\t\t\t\tB462179B2220479F00EE2876 /* GPUImageHueBlendFilter.h in Headers */,\n\t\t\t\tB4621895222047C400EE2876 /* GPUImageSolarizeFilter.h in Headers */,\n\t\t\t\tB46216642220474D00EE2876 /* GPUImageAddBlendFilter.h in Headers */,\n\t\t\t\tB46217E32220479F00EE2876 /* GPUImageHoughTransformLineDetector.h in Headers */,\n\t\t\t\tB46217CF2220479F00EE2876 /* GPUImageMedianFilter.h in Headers */,\n\t\t\t\tB462185F222047B600EE2876 /* GPUImageSingleComponentGaussianBlurFilter.h in Headers */,\n\t\t\t\tB46212AF2220436300EE2876 /* GPUImage.h in Headers */,\n\t\t\t\tB46217F22220479F00EE2876 /* GPUImageEmbossFilter.h in Headers */,\n\t\t\t\tB4621636222046DF00EE2876 /* GPUImageMovieComposition.h in Headers */,\n\t\t\t\tB4621893222047C400EE2876 /* GPUImageSphereRefractionFilter.h in Headers */,\n\t\t\t\tB46218062220479F00EE2876 /* GPUImageLinearBurnBlendFilter.h in Headers */,\n\t\t\t\tB462180B2220479F00EE2876 /* GPUImageOpacityFilter.h in Headers */,\n\t\t\t\tB46217F02220479F00EE2876 /* GPUImageLuminanceRangeFilter.h in Headers */,\n\t\t\t\tB4621891222047C400EE2876 /* GPUImageSubtractBlendFilter.h in Headers */,\n\t\t\t\tB462169D2220475E00EE2876 /* GPUImageColorConversion.h in Headers */,\n\t\t\t\tB4621633222046DF00EE2876 /* GPUImageVideoCamera.h in Headers */,\n\t\t\t\tB46217E62220479F00EE2876 /* GPUImageFASTCornerDetectionFilter.h in Headers */,\n\t\t\t\tB46217882220479F00EE2876 /* GPUImageLaplacianFilter.h in Headers */,\n\t\t\t\tB462185E222047B600EE2876 /* GPUImageRGBClosingFilter.h in Headers */,\n\t\t\t\tB462168E2220475E00EE2876 /* GPUImageChromaKeyFilter.h in Headers */,\n\t\t\t\tB46217AB2220479F00EE2876 /* GPUImageCrosshatchFilter.h in Headers */,\n\t\t\t\tB4621899222048E500EE2876 /* GPUImageThreeInputFilter.h in Headers */,\n\t\t\t\tB46213382220436300EE2876 /* GPUImageView.h in Headers */,\n\t\t\t\tB462180A2220479F00EE2876 /* GPUImageHistogramEqualizationFilter.h in Headers */,\n\t\t\t\tB46218192220479F00EE2876 /* GPUImageGaussianBlurPositionFilter.h in Headers */,\n\t\t\t\tB46216D12220477200EE2876 /* GPUImageWeakPixelInclusionFilter.h in Headers */,\n\t\t\t\tB46217E72220479F00EE2876 /* GPUImageMosaicFilter.h in Headers */,\n\t\t\t\tB4621627222046DF00EE2876 /* GPUImageFilter.h in Headers */,\n\t\t\t\tB46217CA2220479F00EE2876 /* GPUImageDirectionalSobelEdgeDetectionFilter.h in Headers */,\n\t\t\t\tB46216592220474D00EE2876 /* GPUImageBrightnessFilter.h in Headers */,\n\t\t\t\tB462184F222047B600EE2876 /* GPUImageSmoothToonFilter.h in Headers */,\n\t\t\t\tB46216A22220475E00EE2876 /* GPUImageColorLocalBinaryPatternFilter.h in Headers */,\n\t\t\t\tB46216922220475E00EE2876 /* GPUImageCGAColorspaceFilter.h in Headers */,\n\t\t\t\tB462162B222046DF00EE2876 /* GPUImageTwoInputFilter.h in Headers */,\n\t\t\t\tB46217F72220479F00EE2876 /* GPUImageMotionBlurFilter.h in Headers */,\n\t\t\t\tB46217C42220479F00EE2876 /* GPUImageDissolveBlendFilter.h in Headers */,\n\t\t\t\tB46216902220475E00EE2876 /* GPUImageColorMatrixFilter.h in Headers */,\n\t\t\t\tB462169E2220475E00EE2876 /* GPUImageColorBlendFilter.h in Headers */,\n\t\t\t\tB46216EB2220477200EE2876 /* GPUImageTwoInputCrossTextureSamplingFilter.h in Headers */,\n\t\t\t\tB46217962220479F00EE2876 /* GPUImageHarrisCornerDetectionFilter.h in Headers */,\n\t\t\t\tB46216CA2220477200EE2876 /* GPUImageToonFilter.h in Headers */,\n\t\t\t\tB46218172220479F00EE2876 /* GPUImageContrastFilter.h in Headers */,\n\t\t\t\tB46216E22220477200EE2876 /* GPUImageWhiteBalanceFilter.h in Headers */,\n\t\t\t\tB46217892220479F00EE2876 /* GPUImageJFAVoronoiFilter.h in Headers */,\n\t\t\t\tB4621851222047B600EE2876 /* GPUImageRGBOpeningFilter.h in Headers */,\n\t\t\t\tB46217AC2220479F00EE2876 /* GPUImageHighPassFilter.h in Headers */,\n\t\t\t\tB46217B72220479F00EE2876 /* GPUImageMotionDetector.h in Headers */,\n\t\t\t\tB46216E52220477200EE2876 /* GPUImageVibranceFilter.h in Headers */,\n\t\t\t\tB462188B222047C400EE2876 /* GPUImageSourceOverBlendFilter.h in Headers */,\n\t\t\t\tB4621852222047B600EE2876 /* GPUImagePrewittEdgeDetectionFilter.h in Headers */,\n\t\t\t\tB462133F2220436300EE2876 /* GPUImageContext.h in Headers */,\n\t\t\t\tB46217A82220479F00EE2876 /* GPUImageCropFilter.h in Headers */,\n\t\t\t\tB46217C72220479F00EE2876 /* GPUImageGammaFilter.h in Headers */,\n\t\t\t\tB46217DD2220479F00EE2876 /* GPUImageHalftoneFilter.h in Headers */,\n\t\t\t\tB46217912220479F00EE2876 /* GPUImageHSBFilter.h in Headers */,\n\t\t\t\tB46218132220479F00EE2876 /* GPUImagePolkaDotFilter.h in Headers */,\n\t\t\t\tB46217DB2220479F00EE2876 /* GPUImageDifferenceBlendFilter.h in Headers */,\n\t\t\t\tB46217F62220479F00EE2876 /* GPUImageParallelCoordinateLineTransformFilter.h in Headers */,\n\t\t\t\tB46217B12220479F00EE2876 /* GPUImageGaussianSelectiveBlurFilter.h in Headers */,\n\t\t\t\tB4621857222047B600EE2876 /* GPUImageSepiaFilter.h in Headers */,\n\t\t\t\tB462165A2220474D00EE2876 /* GPUImageAmatorkaFilter.h in Headers */,\n\t\t\t\tB4621855222047B600EE2876 /* GPUImagePosterizeFilter.h in Headers */,\n\t\t\t\tB4621628222046DF00EE2876 /* GPUImageFilterGroup.h in Headers */,\n\t\t\t\tB4621887222047C400EE2876 /* GPUImageSwirlFilter.h in Headers */,\n\t\t\t\tB46216942220475E00EE2876 /* GPUImageColorDodgeBlendFilter.h in Headers */,\n\t\t\t\tB4621626222046DF00EE2876 /* GPUImageTwoPassTextureSamplingFilter.h in Headers */,\n\t\t\t\tB46216992220475E00EE2876 /* GPUImageChromaKeyBlendFilter.h in Headers */,\n\t\t\t\tB46216D52220477200EE2876 /* GPUImageZoomBlurFilter.h in Headers */,\n\t\t\t\tB46217B22220479F00EE2876 /* GPUImageExclusionBlendFilter.h in Headers */,\n\t\t\t\tB46217A22220479F00EE2876 /* GPUImageLocalBinaryPatternFilter.h in Headers */,\n\t\t\t\tB46218072220479F00EE2876 /* GPUImageLanczosResamplingFilter.h in Headers */,\n\t\t\t\tB46218152220479F00EE2876 /* GPUImageDarkenBlendFilter.h in Headers */,\n\t\t\t\tB462169C2220475E00EE2876 /* GPUImageClosingFilter.h in Headers */,\n\t\t\t\tB46217922220479F00EE2876 /* GPUImageHueFilter.h in Headers */,\n\t\t\t\tB46216572220474D00EE2876 /* GPUImageBilateralFilter.h in Headers */,\n\t\t\t\tB4620D5122203FBE00EE2876 /* GPURenderKit.h in Headers */,\n\t\t\t\tB46217C52220479F00EE2876 /* GPUImageLowPassFilter.h in Headers */,\n\t\t\t\tB46216962220475E00EE2876 /* GPUImageColourFASTSamplingOperation.h in Headers */,\n\t\t\t\tB462184A222047B600EE2876 /* GPUImageRGBFilter.h in Headers */,\n\t\t\t\tB46216CF2220477200EE2876 /* GPUImageToneCurveFilter.h in Headers */,\n\t\t\t\tB46216E72220477200EE2876 /* GPUImageThresholdSketchFilter.h in Headers */,\n\t\t\t\tB46217C62220479F00EE2876 /* GPUImageErosionFilter.h in Headers */,\n\t\t\t\tB46217E92220479F00EE2876 /* GPUImageCrosshairGenerator.h in Headers */,\n\t\t\t\tB46217D72220479F00EE2876 /* GPUImagePixellatePositionFilter.h in Headers */,\n\t\t\t\tB46216DE2220477200EE2876 /* GPUImageUIElement.h in Headers */,\n\t\t\t\tB462165F2220474D00EE2876 /* GPUImageAdaptiveThresholdFilter.h in Headers */,\n\t\t\t\tB4621881222047C400EE2876 /* GPUImageStillCamera.h in Headers */,\n\t\t\t\tB46217A52220479F00EE2876 /* GPUImageMultiplyBlendFilter.h in Headers */,\n\t\t\t\tB46216582220474D00EE2876 /* GPUImageBoxBlurFilter.h in Headers */,\n\t\t\t\tB46217DA2220479F00EE2876 /* GPUImageLuminanceThresholdFilter.h in Headers */,\n\t\t\t\tB46217E12220479F00EE2876 /* GPUImageKuwaharaRadius3Filter.h in Headers */,\n\t\t\t\tB46213412220436300EE2876 /* GPUImageMovieWriter.h in Headers */,\n\t\t\t\tB46217BB2220479F00EE2876 /* GPUImageGrayscaleFilter.h in Headers */,\n\t\t\t\tB462178B2220479F00EE2876 /* GPUImageLevelsFilter.h in Headers */,\n\t\t\t\tB46217D02220479F00EE2876 /* GPUImagePinchDistortionFilter.h in Headers */,\n\t\t\t\tB46216E32220477200EE2876 /* GPUImageTiltShiftFilter.h in Headers */,\n\t\t\t\tB46216E62220477200EE2876 /* GPUImageUnsharpMaskFilter.h in Headers */,\n\t\t\t\tB462165D2220474D00EE2876 /* GPUImageAlphaBlendFilter.h in Headers */,\n\t\t\t\tB46217FA2220479F00EE2876 /* GPUImageGlassSphereFilter.h in Headers */,\n\t\t\t\tB4621623222046DF00EE2876 /* GPUImageFourInputFilter.h in Headers */,\n\t\t\t\tB46217B32220479F00EE2876 /* GPUImageHardLightBlendFilter.h in Headers */,\n\t\t\t\tB4621860222047B600EE2876 /* GPUImageRawDataOutput.h in Headers */,\n\t\t\t\tB46217A42220479F00EE2876 /* GPUImageDirectionalNonMaximumSuppressionFilter.h in Headers */,\n\t\t\t\tB46216D22220477200EE2876 /* GPUImageTransformFilter.h in Headers */,\n\t\t\t\tB4621882222047C400EE2876 /* GPUImageSoftLightBlendFilter.h in Headers */,\n\t\t\t\tB46217D42220479F00EE2876 /* GPUImageGaussianBlurFilter.h in Headers */,\n\t\t\t\tB46217C82220479F00EE2876 /* GPUImageOverlayBlendFilter.h in Headers */,\n\t\t\t\tB46217A72220479F00EE2876 /* GPUImageHighlightShadowTintFilter.h in Headers */,\n\t\t\t\tB46217A62220479F00EE2876 /* GPUImageOpeningFilter.h in Headers */,\n\t\t\t\tB46216EA2220477200EE2876 /* GPUImageVoronoiConsumerFilter.h in Headers */,\n\t\t\t\tB4621848222047B600EE2876 /* GPUImageSketchFilter.h in Headers */,\n\t\t\t\tB46216982220475E00EE2876 /* GPUImageColorBurnBlendFilter.h in Headers */,\n\t\t\t\tB4621864222047B600EE2876 /* GPUImageRGBDilationFilter.h in Headers */,\n\t\t\t\tB46216A42220475E00EE2876 /* GPUImageColourFASTFeatureDetector.h in Headers */,\n\t\t\t\tB462166C2220474D00EE2876 /* GPUImage3x3TextureSamplingFilter.h in Headers */,\n\t\t\t\tB4621854222047B600EE2876 /* GPUImageRawDataInput.h in Headers */,\n\t\t\t\tB462185A222047B600EE2876 /* GPUImageSharpenFilter.h in Headers */,\n\t\t\t\tB462169B2220475E00EE2876 /* GPUImageColorPackingFilter.h in Headers */,\n\t\t\t\tB46216662220474D00EE2876 /* GPUImageBulgeDistortionFilter.h in Headers */,\n\t\t\t\tB46216CE2220477200EE2876 /* GPUImageXYDerivativeFilter.h in Headers */,\n\t\t\t\tB462188A222047C400EE2876 /* GPUImageSolidColorGenerator.h in Headers */,\n\t\t\t\tB462180F2220479F00EE2876 /* GPUImagePerlinNoiseFilter.h in Headers */,\n\t\t\t\tB4621859222047B600EE2876 /* GPUImageSaturationFilter.h in Headers */,\n\t\t\t\tB462162F222046DF00EE2876 /* GPUImageTextureInput.h in Headers */,\n\t\t\t\tB462185C222047B600EE2876 /* GPUImageShiTomasiFeatureDetectionFilter.h in Headers */,\n\t\t\t\tB46217D92220479F00EE2876 /* GPUImageHighlightShadowFilter.h in Headers */,\n\t\t\t\tB46216A12220475E00EE2876 /* GPUImageColorInvertFilter.h in Headers */,\n\t\t\t\tB4621850222047B600EE2876 /* GPUImageSaturationBlendFilter.h in Headers */,\n\t\t\t\tB46217FB2220479F00EE2876 /* GPUImageiOSBlurFilter.h in Headers */,\n\t\t\t\tB462165C2220474D00EE2876 /* GPUImageAverageLuminanceThresholdFilter.h in Headers */,\n\t\t\t\tB4621868222047B600EE2876 /* GPUImageScreenBlendFilter.h in Headers */,\n\t\t\t\tB46217EE2220479F00EE2876 /* GPUImagePixellateFilter.h in Headers */,\n\t\t\t\tB4621889222047C400EE2876 /* GPUImageSoftEleganceFilter.h in Headers */,\n\t\t\t\tB462161D222046DF00EE2876 /* GLProgram.h in Headers */,\n\t\t\t);\n\t\t\trunOnlyForDeploymentPostprocessing = 0;\n\t\t};\n/* End PBXHeadersBuildPhase section */\n\n/* Begin PBXNativeTarget section */\n\t\tB4620D3F22203FBE00EE2876 /* GPURenderKit */ = {\n\t\t\tisa = PBXNativeTarget;\n\t\t\tbuildConfigurationList = B4620D5422203FBE00EE2876 /* Build configuration list for PBXNativeTarget \"GPURenderKit\" */;\n\t\t\tbuildPhases = (\n\t\t\t\tB4620D3B22203FBE00EE2876 /* Headers */,\n\t\t\t\tB4620D3C22203FBE00EE2876 /* Sources */,\n\t\t\t\tB4620D3D22203FBE00EE2876 /* Frameworks */,\n\t\t\t\tB4620D3E22203FBE00EE2876 /* Resources */,\n\t\t\t);\n\t\t\tbuildRules = (\n\t\t\t);\n\t\t\tdependencies = (\n\t\t\t);\n\t\t\tname = GPURenderKit;\n\t\t\tproductName = GPURenderKit;\n\t\t\tproductReference = B4620D4022203FBE00EE2876 /* GPURenderKit.framework */;\n\t\t\tproductType = \"com.apple.product-type.framework\";\n\t\t};\n\t\tB4620D4822203FBE00EE2876 /* GPURenderKitTests */ = {\n\t\t\tisa = PBXNativeTarget;\n\t\t\tbuildConfigurationList = B4620D5722203FBE00EE2876 /* Build configuration list for PBXNativeTarget \"GPURenderKitTests\" */;\n\t\t\tbuildPhases = (\n\t\t\t\tB4620D4522203FBE00EE2876 /* Sources */,\n\t\t\t\tB4620D4622203FBE00EE2876 /* Frameworks */,\n\t\t\t\tB4620D4722203FBE00EE2876 /* Resources */,\n\t\t\t);\n\t\t\tbuildRules = (\n\t\t\t);\n\t\t\tdependencies = (\n\t\t\t\tB4620D4C22203FBE00EE2876 /* PBXTargetDependency */,\n\t\t\t);\n\t\t\tname = GPURenderKitTests;\n\t\t\tproductName = GPURenderKitTests;\n\t\t\tproductReference = B4620D4922203FBE00EE2876 /* GPURenderKitTests.xctest */;\n\t\t\tproductType = \"com.apple.product-type.bundle.unit-test\";\n\t\t};\n/* End PBXNativeTarget section */\n\n/* Begin PBXProject section */\n\t\tB4620D3722203FBE00EE2876 /* Project object */ = {\n\t\t\tisa = PBXProject;\n\t\t\tattributes = {\n\t\t\t\tLastUpgradeCheck = 1010;\n\t\t\t\tORGANIZATIONNAME = \"刘海东\";\n\t\t\t\tTargetAttributes = {\n\t\t\t\t\tB4620D3F22203FBE00EE2876 = {\n\t\t\t\t\t\tCreatedOnToolsVersion = 10.1;\n\t\t\t\t\t};\n\t\t\t\t\tB4620D4822203FBE00EE2876 = {\n\t\t\t\t\t\tCreatedOnToolsVersion = 10.1;\n\t\t\t\t\t};\n\t\t\t\t};\n\t\t\t};\n\t\t\tbuildConfigurationList = B4620D3A22203FBE00EE2876 /* Build configuration list for PBXProject \"GPURenderKit\" */;\n\t\t\tcompatibilityVersion = \"Xcode 9.3\";\n\t\t\tdevelopmentRegion = en;\n\t\t\thasScannedForEncodings = 0;\n\t\t\tknownRegions = (\n\t\t\t\ten,\n\t\t\t);\n\t\t\tmainGroup = B4620D3622203FBE00EE2876;\n\t\t\tproductRefGroup = B4620D4122203FBE00EE2876 /* Products */;\n\t\t\tprojectDirPath = \"\";\n\t\t\tprojectRoot = \"\";\n\t\t\ttargets = (\n\t\t\t\tB4620D3F22203FBE00EE2876 /* GPURenderKit */,\n\t\t\t\tB4620D4822203FBE00EE2876 /* GPURenderKitTests */,\n\t\t\t);\n\t\t};\n/* End PBXProject section */\n\n/* Begin PBXResourcesBuildPhase section */\n\t\tB4620D3E22203FBE00EE2876 /* Resources */ = {\n\t\t\tisa = PBXResourcesBuildPhase;\n\t\t\tbuildActionMask = 2147483647;\n\t\t\tfiles = (\n\t\t\t\tB462133C2220436300EE2876 /* Info.plist in Resources */,\n\t\t\t);\n\t\t\trunOnlyForDeploymentPostprocessing = 0;\n\t\t};\n\t\tB4620D4722203FBE00EE2876 /* Resources */ = {\n\t\t\tisa = PBXResourcesBuildPhase;\n\t\t\tbuildActionMask = 2147483647;\n\t\t\tfiles = (\n\t\t\t);\n\t\t\trunOnlyForDeploymentPostprocessing = 0;\n\t\t};\n/* End PBXResourcesBuildPhase section */\n\n/* Begin PBXSourcesBuildPhase section */\n\t\tB4620D3C22203FBE00EE2876 /* Sources */ = {\n\t\t\tisa = PBXSourcesBuildPhase;\n\t\t\tbuildActionMask = 2147483647;\n\t\t\tfiles = (\n\t\t\t\tB46217D82220479F00EE2876 /* GPUImageLocalBinaryPatternFilter.m in Sources */,\n\t\t\t\tB462162A222046DF00EE2876 /* GPUImageVideoCamera.m in Sources */,\n\t\t\t\tB46217F92220479F00EE2876 /* GPUImagePoissonBlendFilter.m in Sources */,\n\t\t\t\tB46217EC2220479F00EE2876 /* GPUImageHighlightShadowTintFilter.m in Sources */,\n\t\t\t\tB42EC08722520B6F00226FA0 /* GLImageFourPointsMirrorFilter.m in Sources */,\n\t\t\t\tB46216D32220477200EE2876 /* GPUImageThresholdedNonMaximumSuppressionFilter.m in Sources */,\n\t\t\t\tB46216A52220475E00EE2876 /* GPUImageColourFASTSamplingOperation.m in Sources */,\n\t\t\t\tB462161B222046DF00EE2876 /* GPUImageTwoInputFilter.m in Sources */,\n\t\t\t\tB46216D92220477200EE2876 /* GPUImageToneCurveFilter.m in Sources */,\n\t\t\t\tB4621888222047C400EE2876 /* GPUImageSoftEleganceFilter.m in Sources */,\n\t\t\t\tB46217D22220479F00EE2876 /* GPUImageNormalBlendFilter.m in Sources */,\n\t\t\t\tB4621894222047C400EE2876 /* GPUImageSolidColorGenerator.m in Sources */,\n\t\t\t\tB46216ED2220477200EE2876 /* GPUImageTransformFilter.m in Sources */,\n\t\t\t\tB46217BA2220479F00EE2876 /* GPUImageLinearBurnBlendFilter.m in Sources */,\n\t\t\t\tB46218002220479F00EE2876 /* GPUImagePinchDistortionFilter.m in Sources */,\n\t\t\t\tB4A9FE8A2265CFDA00BC6E51 /* GLImageFaceChangeFilter.m in Sources */,\n\t\t\t\tB47A5EED22212D57006E07A4 /* GLImageLutFilter.m in Sources */,\n\t\t\t\tB46217902220479F00EE2876 /* GPUImageGlassSphereFilter.m in Sources */,\n\t\t\t\tB462133A2220436300EE2876 /* GPUImageMovieWriter.m in Sources */,\n\t\t\t\tB47A5EF322212EA3006E07A4 /* GPUImageBeautifyFilter.m in Sources */,\n\t\t\t\tB46217EF2220479F00EE2876 /* GPUImageDilationFilter.m in Sources */,\n\t\t\t\tB46217CB2220479F00EE2876 /* GPUImageLevelsFilter.m in Sources */,\n\t\t\t\tB462165E2220474D00EE2876 /* GPUImageAverageLuminanceThresholdFilter.m in Sources */,\n\t\t\t\tB4621844222047B600EE2876 /* GPUImageRawDataOutput.m in Sources */,\n\t\t\t\tB462188C222047C400EE2876 /* GPUImageSobelEdgeDetectionFilter.m in Sources */,\n\t\t\t\tB46216682220474D00EE2876 /* GPUImageBilateralFilter.m in Sources */,\n\t\t\t\tB46217D32220479F00EE2876 /* GPUImagePixellateFilter.m in Sources */,\n\t\t\t\tB46217CD2220479F00EE2876 /* GPUImageFASTCornerDetectionFilter.m in Sources */,\n\t\t\t\tB46216CC2220477200EE2876 /* GPUImageThresholdSketchFilter.m in Sources */,\n\t\t\t\tB462180E2220479F00EE2876 /* GPUImageDirectionalNonMaximumSuppressionFilter.m in Sources */,\n\t\t\t\tB4621866222047B600EE2876 /* GPUImageRGBClosingFilter.m in Sources */,\n\t\t\t\tB4621883222047C400EE2876 /* GPUImageSourceOverBlendFilter.m in Sources */,\n\t\t\t\tB46218032220479F00EE2876 /* GPUImageHarrisCornerDetectionFilter.m in Sources */,\n\t\t\t\tB46217F32220479F00EE2876 /* GPUImageHistogramGenerator.m in Sources */,\n\t\t\t\tB46216CD2220477200EE2876 /* GPUImageVibranceFilter.m in Sources */,\n\t\t\t\tB46216932220475E00EE2876 /* GPUImageColorPackingFilter.m in Sources */,\n\t\t\t\tB46217ED2220479F00EE2876 /* GPUImageLuminanceRangeFilter.m in Sources */,\n\t\t\t\tB4621638222046DF00EE2876 /* GPUImageFourInputFilter.m in Sources */,\n\t\t\t\tB4621617222046DF00EE2876 /* GPUImageTwoPassFilter.m in Sources */,\n\t\t\t\tB462181C2220479F00EE2876 /* GPUImageDissolveBlendFilter.m in Sources */,\n\t\t\t\tB4A9FE992266409F00BC6E51 /* GLImageSoulOutFilter.m in Sources */,\n\t\t\t\tB4621630222046DF00EE2876 /* GPUImageBuffer.m in Sources */,\n\t\t\t\tB462133D2220436300EE2876 /* GPUImagePicture.m in Sources */,\n\t\t\t\tB4DAAC48225724AB0094A3B2 /* GPUImageMovieWriterFix.m in Sources */,\n\t\t\t\tB46216E92220477200EE2876 /* GPUImageThresholdEdgeDetectionFilter.m in Sources */,\n\t\t\t\tB46217FE2220479F00EE2876 /* GPUImageHighPassFilter.m in Sources */,\n\t\t\t\tB4621858222047B600EE2876 /* GPUImageSkinToneFilter.m in Sources */,\n\t\t\t\tB46216562220474D00EE2876 /* GPUImageBulgeDistortionFilter.m in Sources */,\n\t\t\t\tB46213402220436300EE2876 /* GPUImageView.m in Sources */,\n\t\t\t\tB462133E2220436300EE2876 /* GPUImagePicture+TextureSubimage.m in Sources */,\n\t\t\t\tB46217E02220479F00EE2876 /* GPUImageGaussianBlurPositionFilter.m in Sources */,\n\t\t\t\tB4621898222048E500EE2876 /* GPUImageThreeInputFilter.m in Sources */,\n\t\t\t\tB4D9F70D226A10EE00FD18FC /* GLImageBlurSnapViewFilter.m in Sources */,\n\t\t\t\tB4621886222047C400EE2876 /* GPUImageSphereRefractionFilter.m in Sources */,\n\t\t\t\tB44ADD0D222668DD00F17956 /* GLImageStickerFilter.m in Sources */,\n\t\t\t\tB4621869222047B600EE2876 /* GPUImagePrewittEdgeDetectionFilter.m in Sources */,\n\t\t\t\tB46217BD2220479F00EE2876 /* GPUImageLaplacianFilter.m in Sources */,\n\t\t\t\tB46216D62220477200EE2876 /* GPUImageUIElement.m in Sources */,\n\t\t\t\tB46217AA2220479F00EE2876 /* GPUImageHSBFilter.m in Sources */,\n\t\t\t\tB4DAAC47225724AB0094A3B2 /* GLImageMovie.m in Sources */,\n\t\t\t\tB46217D52220479F00EE2876 /* GPUImagePerlinNoiseFilter.m in Sources */,\n\t\t\t\tB4621862222047B600EE2876 /* GPUImageSaturationBlendFilter.m in Sources */,\n\t\t\t\tB46218042220479F00EE2876 /* GPUImageDirectionalSobelEdgeDetectionFilter.m in Sources */,\n\t\t\t\tB462161A222046DF00EE2876 /* GPUImageTwoPassTextureSamplingFilter.m in Sources */,\n\t\t\t\tB42D8D62223F8A5600517EA7 /* GLImageCircleFilter.m in Sources */,\n\t\t\t\tB4621853222047B600EE2876 /* GPUImageSmoothToonFilter.m in Sources */,\n\t\t\t\tB462178F2220479F00EE2876 /* GPUImageLookupFilter.m in Sources */,\n\t\t\t\tB462166B2220474D00EE2876 /* GPUImage3x3TextureSamplingFilter.m in Sources */,\n\t\t\t\tB436FECC2266DA2E009C5A0E /* GLImageWaterReflectionFilter.m in Sources */,\n\t\t\t\tB46217A02220479F00EE2876 /* GPUImageEmbossFilter.m in Sources */,\n\t\t\t\tB49EF6342263521C00C220B0 /* GLImageGlitchEffectLineFilter.m in Sources */,\n\t\t\t\tB46218012220479F00EE2876 /* GPUImageDivideBlendFilter.m in Sources */,\n\t\t\t\tB46218022220479F00EE2876 /* GPUImageLuminosity.m in Sources */,\n\t\t\t\tB46217D12220479F00EE2876 /* GPUImageOpacityFilter.m in Sources */,\n\t\t\t\tB46217F82220479F00EE2876 /* GPUImageNobleCornerDetectionFilter.m in Sources */,\n\t\t\t\tB4621634222046DF00EE2876 /* GPUImageTextureOutput.m in Sources */,\n\t\t\t\tB46217DC2220479F00EE2876 /* GPUImagePixellatePositionFilter.m in Sources */,\n\t\t\t\tB46217B52220479F00EE2876 /* GPUImageLanczosResamplingFilter.m in Sources */,\n\t\t\t\tB4621845222047B600EE2876 /* GPUImageRGBFilter.m in Sources */,\n\t\t\t\tB46218182220479F00EE2876 /* GPUImageMultiplyBlendFilter.m in Sources */,\n\t\t\t\tB462184C222047B600EE2876 /* GPUImageSketchFilter.m in Sources */,\n\t\t\t\tB46217862220479F00EE2876 /* GPUImageHistogramFilter.m in Sources */,\n\t\t\t\tB462168D2220475E00EE2876 /* GPUImageColourFASTFeatureDetector.m in Sources */,\n\t\t\t\tB49EF6332263521C00C220B0 /* GLImageGlitchEffectGridFilter.m in Sources */,\n\t\t\t\tB462178A2220479F00EE2876 /* GPUImageHalftoneFilter.m in Sources */,\n\t\t\t\tB46217E22220479F00EE2876 /* GPUImageKuwaharaRadius3Filter.m in Sources */,\n\t\t\t\tB46217D62220479F00EE2876 /* GPUImageContrastFilter.m in Sources */,\n\t\t\t\tB44ADD1A222669AB00F17956 /* GLImageGassianBlurMixFilter.m in Sources */,\n\t\t\t\tB46217992220479F00EE2876 /* GPUImageLowPassFilter.m in Sources */,\n\t\t\t\tB46216622220474D00EE2876 /* GPUImageCannyEdgeDetectionFilter.m in Sources */,\n\t\t\t\tB46216692220474D00EE2876 /* GPUImageBrightnessFilter.m in Sources */,\n\t\t\t\tB46217E52220479F00EE2876 /* GPUImageHazeFilter.m in Sources */,\n\t\t\t\tB4621618222046DF00EE2876 /* GPUImageFilter.m in Sources */,\n\t\t\t\tB462180D2220479F00EE2876 /* GPUImageParallelCoordinateLineTransformFilter.m in Sources */,\n\t\t\t\tB46216CB2220477200EE2876 /* GPUImageTwoInputCrossTextureSamplingFilter.m in Sources */,\n\t\t\t\tB46216A02220475E00EE2876 /* GPUImageColorBlendFilter.m in Sources */,\n\t\t\t\tB4621620222046DF00EE2876 /* GPUImageMovieComposition.m in Sources */,\n\t\t\t\tB46217C02220479F00EE2876 /* GPUImageFalseColorFilter.m in Sources */,\n\t\t\t\tB462168B2220475E00EE2876 /* GPUImageCGAColorspaceFilter.m in Sources */,\n\t\t\t\tB4621865222047B600EE2876 /* GPUImageSharpenFilter.m in Sources */,\n\t\t\t\tB46216DA2220477200EE2876 /* GPUImageZoomBlurFilter.m in Sources */,\n\t\t\t\tB46217B92220479F00EE2876 /* GPUImageCropFilter.m in Sources */,\n\t\t\t\tB4621635222046DF00EE2876 /* GPUImageFilterPipeline.m in Sources */,\n\t\t\t\tB46217AE2220479F00EE2876 /* GPUImageDifferenceBlendFilter.m in Sources */,\n\t\t\t\tB462169F2220475E00EE2876 /* GPUImageColorMatrixFilter.m in Sources */,\n\t\t\t\tB4621849222047B600EE2876 /* GPUImageRGBDilationFilter.m in Sources */,\n\t\t\t\tB4621890222047C400EE2876 /* GPUImageSoftLightBlendFilter.m in Sources */,\n\t\t\t\tB4621884222047C400EE2876 /* GPUImageStillCamera.m in Sources */,\n\t\t\t\tB462169A2220475E00EE2876 /* GPUImageColorDodgeBlendFilter.m in Sources */,\n\t\t\t\tB4621625222046DF00EE2876 /* GPUImageOutput.m in Sources */,\n\t\t\t\tB46217972220479F00EE2876 /* GPUImageHoughTransformLineDetector.m in Sources */,\n\t\t\t\tB42EC08522520B6F00226FA0 /* GLImageThreePartitionGroupFilter.m in Sources */,\n\t\t\t\tB46217932220479F00EE2876 /* GPUImageiOSBlurFilter.m in Sources */,\n\t\t\t\tB44ADD0A222668DD00F17956 /* GLImageAddStickerWithEffectFilter.m in Sources */,\n\t\t\t\tB462166D2220474D00EE2876 /* GPUImageAmatorkaFilter.m in Sources */,\n\t\t\t\tB462179E2220479F00EE2876 /* GPUImageHistogramEqualizationFilter.m in Sources */,\n\t\t\t\tB46217F42220479F00EE2876 /* GPUImageMissEtikateFilter.m in Sources */,\n\t\t\t\tB46217B42220479F00EE2876 /* GPUImageKuwaharaFilter.m in Sources */,\n\t\t\t\tB46217C22220479F00EE2876 /* GPUImageCrosshairGenerator.m in Sources */,\n\t\t\t\tB46218102220479F00EE2876 /* GPUImageMosaicFilter.m in Sources */,\n\t\t\t\tB462165B2220474D00EE2876 /* GPUImageBoxBlurFilter.m in Sources */,\n\t\t\t\tB462162C222046DF00EE2876 /* GPUImageFramebuffer.m in Sources */,\n\t\t\t\tB46216E12220477200EE2876 /* GPUImageVignetteFilter.m in Sources */,\n\t\t\t\tB462180C2220479F00EE2876 /* GPUImageGaussianBlurFilter.m in Sources */,\n\t\t\t\tB462179A2220479F00EE2876 /* GPUImagePolkaDotFilter.m in Sources */,\n\t\t\t\tB4621863222047B600EE2876 /* GPUImageRGBOpeningFilter.m in Sources */,\n\t\t\t\tB462168C2220475E00EE2876 /* GPUImageColorInvertFilter.m in Sources */,\n\t\t\t\tB462184B222047B600EE2876 /* GPUImageScreenBlendFilter.m in Sources */,\n\t\t\t\tB46217CE2220479F00EE2876 /* GPUImageOverlayBlendFilter.m in Sources */,\n\t\t\t\tB462168F2220475E00EE2876 /* GPUImageColorConversion.m in Sources */,\n\t\t\t\tB4621861222047B600EE2876 /* GPUImageSepiaFilter.m in Sources */,\n\t\t\t\tB46213392220436300EE2876 /* GPUImageContext.m in Sources */,\n\t\t\t\tB46217BF2220479F00EE2876 /* GPUImageGammaFilter.m in Sources */,\n\t\t\t\tB46217AD2220479F00EE2876 /* GPUImageMotionBlurFilter.m in Sources */,\n\t\t\t\tB462184E222047B600EE2876 /* GPUImagePosterizeFilter.m in Sources */,\n\t\t\t\tB46217E82220479F00EE2876 /* GPUImagePolarPixellateFilter.m in Sources */,\n\t\t\t\tB41FE41C226A260F008DF295 /* GLImageBlurSnapViewFilterGroup.m in Sources */,\n\t\t\t\tB46216542220474D00EE2876 /* GPUImage3x3ConvolutionFilter.m in Sources */,\n\t\t\t\tB462188D222047C400EE2876 /* GPUImageSwirlFilter.m in Sources */,\n\t\t\t\tB42EC08822520B6F00226FA0 /* GLImageThreePartitionFilter.m in Sources */,\n\t\t\t\tB46217EA2220479F00EE2876 /* GPUImageLuminosityBlendFilter.m in Sources */,\n\t\t\t\tB46216632220474D00EE2876 /* GPUImageAlphaBlendFilter.m in Sources */,\n\t\t\t\tB462181A2220479F00EE2876 /* GPUImageMaskFilter.m in Sources */,\n\t\t\t\tB462168A2220475E00EE2876 /* GPUImageClosingFilter.m in Sources */,\n\t\t\t\tB46217C32220479F00EE2876 /* GPUImageLineGenerator.m in Sources */,\n\t\t\t\tB46217F52220479F00EE2876 /* GPUImageHighlightShadowFilter.m in Sources */,\n\t\t\t\tB46216952220475E00EE2876 /* GPUImageChromaKeyBlendFilter.m in Sources */,\n\t\t\t\tB462178D2220479F00EE2876 /* GPUImageMonochromeFilter.m in Sources */,\n\t\t\t\tB46216972220475E00EE2876 /* GPUImageColorBurnBlendFilter.m in Sources */,\n\t\t\t\tB47A5EEC22212D57006E07A4 /* GLImageTwoLutFilter.m in Sources */,\n\t\t\t\tB46217B02220479F00EE2876 /* GPUImageHueFilter.m in Sources */,\n\t\t\t\tB462179D2220479F00EE2876 /* GPUImageGrayscaleFilter.m in Sources */,\n\t\t\t\tB46216672220474D00EE2876 /* GPUImageAdaptiveThresholdFilter.m in Sources */,\n\t\t\t\tB46217FD2220479F00EE2876 /* GPUImageExposureFilter.m in Sources */,\n\t\t\t\tB46216612220474D00EE2876 /* GPUImageAddBlendFilter.m in Sources */,\n\t\t\t\tB462188E222047C400EE2876 /* GPUImageStretchDistortionFilter.m in Sources */,\n\t\t\t\tB46216A32220475E00EE2876 /* GPUImageColorLocalBinaryPatternFilter.m in Sources */,\n\t\t\t\tB436FEC82266D28F009C5A0E /* GLImageZoomFilter.m in Sources */,\n\t\t\t\tB4621867222047B600EE2876 /* GPUImageShiTomasiFeatureDetectionFilter.m in Sources */,\n\t\t\t\tB46217BE2220479F00EE2876 /* GPUImageExclusionBlendFilter.m in Sources */,\n\t\t\t\tB46218082220479F00EE2876 /* GPUImageMotionDetector.m in Sources */,\n\t\t\t\tB46216D02220477200EE2876 /* GPUImageToonFilter.m in Sources */,\n\t\t\t\tB462184D222047B600EE2876 /* GPUImageSaturationFilter.m in Sources */,\n\t\t\t\tB462185D222047B600EE2876 /* GPUImageRGBErosionFilter.m in Sources */,\n\t\t\t\tB462185B222047B600EE2876 /* GPUImageRawDataInput.m in Sources */,\n\t\t\t\tB4C64C23227217C2001BB716 /* GLImageFaceChangeFilterGroup.m in Sources */,\n\t\t\t\tB46216602220474D00EE2876 /* GPUImageAverageColor.m in Sources */,\n\t\t\t\tB462179C2220479F00EE2876 /* GPUImageJFAVoronoiFilter.m in Sources */,\n\t\t\t\tB46216912220475E00EE2876 /* GPUImageChromaKeyFilter.m in Sources */,\n\t\t\t\tB46217A32220479F00EE2876 /* GPUImageHueBlendFilter.m in Sources */,\n\t\t\t\tB46216DB2220477200EE2876 /* GPUImageUnsharpMaskFilter.m in Sources */,\n\t\t\t\tB41317502277277300B4866F /* GLImageShapeFilter.m in Sources */,\n\t\t\t\tB44ADD0B222668DD00F17956 /* GLImageAddStickerFilter.m in Sources */,\n\t\t\t\tB4621624222046DF00EE2876 /* GPUImageFilterGroup.m in Sources */,\n\t\t\t\tB462188F222047C400EE2876 /* GPUImageSubtractBlendFilter.m in Sources */,\n\t\t\t\tB46216D72220477200EE2876 /* GPUImageTiltShiftFilter.m in Sources */,\n\t\t\t\tB46217AF2220479F00EE2876 /* GPUImageCrosshatchFilter.m in Sources */,\n\t\t\t\tB46217DF2220479F00EE2876 /* GPUImageNonMaximumSuppressionFilter.m in Sources */,\n\t\t\t\tB46217C12220479F00EE2876 /* GPUImageDarkenBlendFilter.m in Sources */,\n\t\t\t\tB46216E82220477200EE2876 /* GPUImageVoronoiConsumerFilter.m in Sources */,\n\t\t\t\tB4621631222046DF00EE2876 /* GPUImageFramebufferCache.m in Sources */,\n\t\t\t\tB4621846222047B600EE2876 /* GPUImageSingleComponentGaussianBlurFilter.m in Sources */,\n\t\t\t\tB462179F2220479F00EE2876 /* GPUImageHardLightBlendFilter.m in Sources */,\n\t\t\t\tB46216EC2220477200EE2876 /* GPUImageWhiteBalanceFilter.m in Sources */,\n\t\t\t\tB46217FC2220479F00EE2876 /* GPUImageErosionFilter.m in Sources */,\n\t\t\t\tB46216D42220477200EE2876 /* GPUImageWeakPixelInclusionFilter.m in Sources */,\n\t\t\t\tB44ADD132226695200F17956 /* GLImageMixBlendFilter.m in Sources */,\n\t\t\t\tB462181D2220479F00EE2876 /* GPUImageOpeningFilter.m in Sources */,\n\t\t\t\tB4621885222047C400EE2876 /* GPUImageSolarizeFilter.m in Sources */,\n\t\t\t\tB462161C222046DF00EE2876 /* GPUImageMovie.m in Sources */,\n\t\t\t\tB4C64C1E2271FFF9001BB716 /* GLImageFaceDetectPointFilter.m in Sources */,\n\t\t\t\tB46218122220479F00EE2876 /* GPUImageGaussianSelectiveBlurFilter.m in Sources */,\n\t\t\t\tB46218092220479F00EE2876 /* GPUImageLuminanceThresholdFilter.m in Sources */,\n\t\t\t\tB46217EB2220479F00EE2876 /* GPUImageLightenBlendFilter.m in Sources */,\n\t\t\t\tB46217F12220479F00EE2876 /* GPUImageMedianFilter.m in Sources */,\n\t\t\t\tB4621621222046DF00EE2876 /* GLProgram.m in Sources */,\n\t\t\t\tB44ADD142226695200F17956 /* GLImageBlendFilter.m in Sources */,\n\t\t\t\tB4621622222046DF00EE2876 /* GPUImageTextureInput.m in Sources */,\n\t\t\t\tB46216DC2220477200EE2876 /* GPUImageXYDerivativeFilter.m in Sources */,\n\t\t\t\tB41317532277277300B4866F /* GLImageShapeHighDefinitionFilter.m in Sources */,\n\t\t\t);\n\t\t\trunOnlyForDeploymentPostprocessing = 0;\n\t\t};\n\t\tB4620D4522203FBE00EE2876 /* Sources */ = {\n\t\t\tisa = PBXSourcesBuildPhase;\n\t\t\tbuildActionMask = 2147483647;\n\t\t\tfiles = (\n\t\t\t\tB4620D4F22203FBE00EE2876 /* GPURenderKitTests.m in Sources */,\n\t\t\t);\n\t\t\trunOnlyForDeploymentPostprocessing = 0;\n\t\t};\n/* End PBXSourcesBuildPhase section */\n\n/* Begin PBXTargetDependency section */\n\t\tB4620D4C22203FBE00EE2876 /* PBXTargetDependency */ = {\n\t\t\tisa = PBXTargetDependency;\n\t\t\ttarget = B4620D3F22203FBE00EE2876 /* GPURenderKit */;\n\t\t\ttargetProxy = B4620D4B22203FBE00EE2876 /* PBXContainerItemProxy */;\n\t\t};\n/* End PBXTargetDependency section */\n\n/* Begin XCBuildConfiguration section */\n\t\tB4620D5222203FBE00EE2876 /* Debug */ = {\n\t\t\tisa = XCBuildConfiguration;\n\t\t\tbuildSettings = {\n\t\t\t\tALWAYS_SEARCH_USER_PATHS = NO;\n\t\t\t\tCLANG_ANALYZER_NONNULL = YES;\n\t\t\t\tCLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE;\n\t\t\t\tCLANG_CXX_LANGUAGE_STANDARD = \"gnu++14\";\n\t\t\t\tCLANG_CXX_LIBRARY = \"libc++\";\n\t\t\t\tCLANG_ENABLE_MODULES = YES;\n\t\t\t\tCLANG_ENABLE_OBJC_ARC = YES;\n\t\t\t\tCLANG_ENABLE_OBJC_WEAK = YES;\n\t\t\t\tCLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;\n\t\t\t\tCLANG_WARN_BOOL_CONVERSION = YES;\n\t\t\t\tCLANG_WARN_COMMA = YES;\n\t\t\t\tCLANG_WARN_CONSTANT_CONVERSION = YES;\n\t\t\t\tCLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES;\n\t\t\t\tCLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;\n\t\t\t\tCLANG_WARN_DOCUMENTATION_COMMENTS = YES;\n\t\t\t\tCLANG_WARN_EMPTY_BODY = YES;\n\t\t\t\tCLANG_WARN_ENUM_CONVERSION = YES;\n\t\t\t\tCLANG_WARN_INFINITE_RECURSION = YES;\n\t\t\t\tCLANG_WARN_INT_CONVERSION = YES;\n\t\t\t\tCLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;\n\t\t\t\tCLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES;\n\t\t\t\tCLANG_WARN_OBJC_LITERAL_CONVERSION = YES;\n\t\t\t\tCLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;\n\t\t\t\tCLANG_WARN_RANGE_LOOP_ANALYSIS = YES;\n\t\t\t\tCLANG_WARN_STRICT_PROTOTYPES = YES;\n\t\t\t\tCLANG_WARN_SUSPICIOUS_MOVE = YES;\n\t\t\t\tCLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE;\n\t\t\t\tCLANG_WARN_UNREACHABLE_CODE = YES;\n\t\t\t\tCLANG_WARN__DUPLICATE_METHOD_MATCH = YES;\n\t\t\t\tCODE_SIGN_IDENTITY = \"iPhone Developer\";\n\t\t\t\tCOPY_PHASE_STRIP = NO;\n\t\t\t\tCURRENT_PROJECT_VERSION = 1;\n\t\t\t\tDEBUG_INFORMATION_FORMAT = dwarf;\n\t\t\t\tENABLE_STRICT_OBJC_MSGSEND = YES;\n\t\t\t\tENABLE_TESTABILITY = YES;\n\t\t\t\tGCC_C_LANGUAGE_STANDARD = gnu11;\n\t\t\t\tGCC_DYNAMIC_NO_PIC = NO;\n\t\t\t\tGCC_NO_COMMON_BLOCKS = YES;\n\t\t\t\tGCC_OPTIMIZATION_LEVEL = 0;\n\t\t\t\tGCC_PREPROCESSOR_DEFINITIONS = (\n\t\t\t\t\t\"DEBUG=1\",\n\t\t\t\t\t\"$(inherited)\",\n\t\t\t\t);\n\t\t\t\tGCC_WARN_64_TO_32_BIT_CONVERSION = YES;\n\t\t\t\tGCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;\n\t\t\t\tGCC_WARN_UNDECLARED_SELECTOR = YES;\n\t\t\t\tGCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;\n\t\t\t\tGCC_WARN_UNUSED_FUNCTION = YES;\n\t\t\t\tGCC_WARN_UNUSED_VARIABLE = YES;\n\t\t\t\tIPHONEOS_DEPLOYMENT_TARGET = 12.1;\n\t\t\t\tMTL_ENABLE_DEBUG_INFO = INCLUDE_SOURCE;\n\t\t\t\tMTL_FAST_MATH = YES;\n\t\t\t\tONLY_ACTIVE_ARCH = YES;\n\t\t\t\tSDKROOT = iphoneos;\n\t\t\t\tVERSIONING_SYSTEM = \"apple-generic\";\n\t\t\t\tVERSION_INFO_PREFIX = \"\";\n\t\t\t};\n\t\t\tname = Debug;\n\t\t};\n\t\tB4620D5322203FBE00EE2876 /* Release */ = {\n\t\t\tisa = XCBuildConfiguration;\n\t\t\tbuildSettings = {\n\t\t\t\tALWAYS_SEARCH_USER_PATHS = NO;\n\t\t\t\tCLANG_ANALYZER_NONNULL = YES;\n\t\t\t\tCLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE;\n\t\t\t\tCLANG_CXX_LANGUAGE_STANDARD = \"gnu++14\";\n\t\t\t\tCLANG_CXX_LIBRARY = \"libc++\";\n\t\t\t\tCLANG_ENABLE_MODULES = YES;\n\t\t\t\tCLANG_ENABLE_OBJC_ARC = YES;\n\t\t\t\tCLANG_ENABLE_OBJC_WEAK = YES;\n\t\t\t\tCLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;\n\t\t\t\tCLANG_WARN_BOOL_CONVERSION = YES;\n\t\t\t\tCLANG_WARN_COMMA = YES;\n\t\t\t\tCLANG_WARN_CONSTANT_CONVERSION = YES;\n\t\t\t\tCLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES;\n\t\t\t\tCLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;\n\t\t\t\tCLANG_WARN_DOCUMENTATION_COMMENTS = YES;\n\t\t\t\tCLANG_WARN_EMPTY_BODY = YES;\n\t\t\t\tCLANG_WARN_ENUM_CONVERSION = YES;\n\t\t\t\tCLANG_WARN_INFINITE_RECURSION = YES;\n\t\t\t\tCLANG_WARN_INT_CONVERSION = YES;\n\t\t\t\tCLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;\n\t\t\t\tCLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES;\n\t\t\t\tCLANG_WARN_OBJC_LITERAL_CONVERSION = YES;\n\t\t\t\tCLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;\n\t\t\t\tCLANG_WARN_RANGE_LOOP_ANALYSIS = YES;\n\t\t\t\tCLANG_WARN_STRICT_PROTOTYPES = YES;\n\t\t\t\tCLANG_WARN_SUSPICIOUS_MOVE = YES;\n\t\t\t\tCLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE;\n\t\t\t\tCLANG_WARN_UNREACHABLE_CODE = YES;\n\t\t\t\tCLANG_WARN__DUPLICATE_METHOD_MATCH = YES;\n\t\t\t\tCODE_SIGN_IDENTITY = \"iPhone Developer\";\n\t\t\t\tCOPY_PHASE_STRIP = NO;\n\t\t\t\tCURRENT_PROJECT_VERSION = 1;\n\t\t\t\tDEBUG_INFORMATION_FORMAT = \"dwarf-with-dsym\";\n\t\t\t\tENABLE_NS_ASSERTIONS = NO;\n\t\t\t\tENABLE_STRICT_OBJC_MSGSEND = YES;\n\t\t\t\tGCC_C_LANGUAGE_STANDARD = gnu11;\n\t\t\t\tGCC_NO_COMMON_BLOCKS = YES;\n\t\t\t\tGCC_WARN_64_TO_32_BIT_CONVERSION = YES;\n\t\t\t\tGCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;\n\t\t\t\tGCC_WARN_UNDECLARED_SELECTOR = YES;\n\t\t\t\tGCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;\n\t\t\t\tGCC_WARN_UNUSED_FUNCTION = YES;\n\t\t\t\tGCC_WARN_UNUSED_VARIABLE = YES;\n\t\t\t\tIPHONEOS_DEPLOYMENT_TARGET = 12.1;\n\t\t\t\tMTL_ENABLE_DEBUG_INFO = NO;\n\t\t\t\tMTL_FAST_MATH = YES;\n\t\t\t\tSDKROOT = iphoneos;\n\t\t\t\tVALIDATE_PRODUCT = YES;\n\t\t\t\tVERSIONING_SYSTEM = \"apple-generic\";\n\t\t\t\tVERSION_INFO_PREFIX = \"\";\n\t\t\t};\n\t\t\tname = Release;\n\t\t};\n\t\tB4620D5522203FBE00EE2876 /* Debug */ = {\n\t\t\tisa = XCBuildConfiguration;\n\t\t\tbuildSettings = {\n\t\t\t\tCLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = NO;\n\t\t\t\tCODE_SIGN_IDENTITY = \"iPhone Developer\";\n\t\t\t\tCODE_SIGN_STYLE = Automatic;\n\t\t\t\tDEAD_CODE_STRIPPING = NO;\n\t\t\t\tDEFINES_MODULE = YES;\n\t\t\t\tDEVELOPMENT_TEAM = A9426LDZX7;\n\t\t\t\tDYLIB_COMPATIBILITY_VERSION = 1;\n\t\t\t\tDYLIB_CURRENT_VERSION = 1;\n\t\t\t\tDYLIB_INSTALL_NAME_BASE = \"@rpath\";\n\t\t\t\tENABLE_BITCODE = NO;\n\t\t\t\tGCC_PRECOMPILE_PREFIX_HEADER = NO;\n\t\t\t\tGCC_PREFIX_HEADER = \"\";\n\t\t\t\tINFOPLIST_FILE = GPURenderKit/Info.plist;\n\t\t\t\tINSTALL_PATH = \"$(LOCAL_LIBRARY_DIR)/Frameworks\";\n\t\t\t\tIPHONEOS_DEPLOYMENT_TARGET = 9.0;\n\t\t\t\tLD_RUNPATH_SEARCH_PATHS = (\n\t\t\t\t\t\"$(inherited)\",\n\t\t\t\t\t\"@executable_path/Frameworks\",\n\t\t\t\t\t\"@loader_path/Frameworks\",\n\t\t\t\t);\n\t\t\t\tMACH_O_TYPE = staticlib;\n\t\t\t\tONLY_ACTIVE_ARCH = NO;\n\t\t\t\tPRODUCT_BUNDLE_IDENTIFIER = simon.GPURenderKit;\n\t\t\t\tPRODUCT_NAME = \"$(TARGET_NAME:c99extidentifier)\";\n\t\t\t\tSKIP_INSTALL = YES;\n\t\t\t\tTARGETED_DEVICE_FAMILY = \"1,2\";\n\t\t\t};\n\t\t\tname = Debug;\n\t\t};\n\t\tB4620D5622203FBE00EE2876 /* Release */ = {\n\t\t\tisa = XCBuildConfiguration;\n\t\t\tbuildSettings = {\n\t\t\t\tCLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = NO;\n\t\t\t\tCODE_SIGN_IDENTITY = \"\";\n\t\t\t\tCODE_SIGN_STYLE = Automatic;\n\t\t\t\tDEAD_CODE_STRIPPING = NO;\n\t\t\t\tDEFINES_MODULE = YES;\n\t\t\t\tDEVELOPMENT_TEAM = A9426LDZX7;\n\t\t\t\tDYLIB_COMPATIBILITY_VERSION = 1;\n\t\t\t\tDYLIB_CURRENT_VERSION = 1;\n\t\t\t\tDYLIB_INSTALL_NAME_BASE = \"@rpath\";\n\t\t\t\tENABLE_BITCODE = NO;\n\t\t\t\tGCC_OPTIMIZATION_LEVEL = 0;\n\t\t\t\tGCC_PRECOMPILE_PREFIX_HEADER = NO;\n\t\t\t\tGCC_PREFIX_HEADER = \"\";\n\t\t\t\tINFOPLIST_FILE = GPURenderKit/Info.plist;\n\t\t\t\tINSTALL_PATH = \"$(LOCAL_LIBRARY_DIR)/Frameworks\";\n\t\t\t\tIPHONEOS_DEPLOYMENT_TARGET = 9.0;\n\t\t\t\tLD_RUNPATH_SEARCH_PATHS = (\n\t\t\t\t\t\"$(inherited)\",\n\t\t\t\t\t\"@executable_path/Frameworks\",\n\t\t\t\t\t\"@loader_path/Frameworks\",\n\t\t\t\t);\n\t\t\t\tMACH_O_TYPE = staticlib;\n\t\t\t\tONLY_ACTIVE_ARCH = NO;\n\t\t\t\tPRODUCT_BUNDLE_IDENTIFIER = simon.GPURenderKit;\n\t\t\t\tPRODUCT_NAME = \"$(TARGET_NAME:c99extidentifier)\";\n\t\t\t\tSKIP_INSTALL = YES;\n\t\t\t\tTARGETED_DEVICE_FAMILY = \"1,2\";\n\t\t\t};\n\t\t\tname = Release;\n\t\t};\n\t\tB4620D5822203FBE00EE2876 /* Debug */ = {\n\t\t\tisa = XCBuildConfiguration;\n\t\t\tbuildSettings = {\n\t\t\t\tCODE_SIGN_STYLE = Automatic;\n\t\t\t\tINFOPLIST_FILE = GPURenderKitTests/Info.plist;\n\t\t\t\tLD_RUNPATH_SEARCH_PATHS = (\n\t\t\t\t\t\"$(inherited)\",\n\t\t\t\t\t\"@executable_path/Frameworks\",\n\t\t\t\t\t\"@loader_path/Frameworks\",\n\t\t\t\t);\n\t\t\t\tPRODUCT_BUNDLE_IDENTIFIER = simon.GPURenderKitTests;\n\t\t\t\tPRODUCT_NAME = \"$(TARGET_NAME)\";\n\t\t\t\tTARGETED_DEVICE_FAMILY = \"1,2\";\n\t\t\t};\n\t\t\tname = Debug;\n\t\t};\n\t\tB4620D5922203FBE00EE2876 /* Release */ = {\n\t\t\tisa = XCBuildConfiguration;\n\t\t\tbuildSettings = {\n\t\t\t\tCODE_SIGN_STYLE = Automatic;\n\t\t\t\tINFOPLIST_FILE = GPURenderKitTests/Info.plist;\n\t\t\t\tLD_RUNPATH_SEARCH_PATHS = (\n\t\t\t\t\t\"$(inherited)\",\n\t\t\t\t\t\"@executable_path/Frameworks\",\n\t\t\t\t\t\"@loader_path/Frameworks\",\n\t\t\t\t);\n\t\t\t\tPRODUCT_BUNDLE_IDENTIFIER = simon.GPURenderKitTests;\n\t\t\t\tPRODUCT_NAME = \"$(TARGET_NAME)\";\n\t\t\t\tTARGETED_DEVICE_FAMILY = \"1,2\";\n\t\t\t};\n\t\t\tname = Release;\n\t\t};\n/* End XCBuildConfiguration section */\n\n/* Begin XCConfigurationList section */\n\t\tB4620D3A22203FBE00EE2876 /* Build configuration list for PBXProject \"GPURenderKit\" */ = {\n\t\t\tisa = XCConfigurationList;\n\t\t\tbuildConfigurations = (\n\t\t\t\tB4620D5222203FBE00EE2876 /* Debug */,\n\t\t\t\tB4620D5322203FBE00EE2876 /* Release */,\n\t\t\t);\n\t\t\tdefaultConfigurationIsVisible = 0;\n\t\t\tdefaultConfigurationName = Release;\n\t\t};\n\t\tB4620D5422203FBE00EE2876 /* Build configuration list for PBXNativeTarget \"GPURenderKit\" */ = {\n\t\t\tisa = XCConfigurationList;\n\t\t\tbuildConfigurations = (\n\t\t\t\tB4620D5522203FBE00EE2876 /* Debug */,\n\t\t\t\tB4620D5622203FBE00EE2876 /* Release */,\n\t\t\t);\n\t\t\tdefaultConfigurationIsVisible = 0;\n\t\t\tdefaultConfigurationName = Release;\n\t\t};\n\t\tB4620D5722203FBE00EE2876 /* Build configuration list for PBXNativeTarget \"GPURenderKitTests\" */ = {\n\t\t\tisa = XCConfigurationList;\n\t\t\tbuildConfigurations = (\n\t\t\t\tB4620D5822203FBE00EE2876 /* Debug */,\n\t\t\t\tB4620D5922203FBE00EE2876 /* Release */,\n\t\t\t);\n\t\t\tdefaultConfigurationIsVisible = 0;\n\t\t\tdefaultConfigurationName = Release;\n\t\t};\n/* End XCConfigurationList section */\n\t};\n\trootObject = B4620D3722203FBE00EE2876 /* Project object */;\n}\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit.xcodeproj/project.xcworkspace/contents.xcworkspacedata",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<Workspace\n   version = \"1.0\">\n   <FileRef\n      location = \"self:GPURenderKit.xcodeproj\">\n   </FileRef>\n</Workspace>\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit.xcodeproj/project.xcworkspace/xcshareddata/WorkspaceSettings.xcsettings",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<!DOCTYPE plist PUBLIC \"-//Apple//DTD PLIST 1.0//EN\" \"http://www.apple.com/DTDs/PropertyList-1.0.dtd\">\n<plist version=\"1.0\">\n<dict>\n\t<key>BuildSystemType</key>\n\t<string>Original</string>\n</dict>\n</plist>\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit.xcodeproj/project.xcworkspace/xcuserdata/liuhaidong.xcuserdatad/WorkspaceSettings.xcsettings",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<!DOCTYPE plist PUBLIC \"-//Apple//DTD PLIST 1.0//EN\" \"http://www.apple.com/DTDs/PropertyList-1.0.dtd\">\n<plist version=\"1.0\">\n<dict>\n\t<key>BuildLocationStyle</key>\n\t<string>UseAppPreferences</string>\n\t<key>CustomBuildLocationType</key>\n\t<string>RelativeToDerivedData</string>\n\t<key>DerivedDataLocationStyle</key>\n\t<string>Default</string>\n\t<key>EnabledFullIndexStoreVisibility</key>\n\t<false/>\n\t<key>IssueFilterStyle</key>\n\t<string>ShowActiveSchemeOnly</string>\n\t<key>LiveSourceIssuesEnabled</key>\n\t<true/>\n</dict>\n</plist>\n"
  },
  {
    "path": "GPURenderKit/GPURenderKit.xcodeproj/xcshareddata/xcschemes/GPURenderKit.xcscheme",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<Scheme\n   LastUpgradeVersion = \"1010\"\n   version = \"1.3\">\n   <BuildAction\n      parallelizeBuildables = \"YES\"\n      buildImplicitDependencies = \"YES\">\n      <BuildActionEntries>\n         <BuildActionEntry\n            buildForTesting = \"YES\"\n            buildForRunning = \"YES\"\n            buildForProfiling = \"YES\"\n            buildForArchiving = \"YES\"\n            buildForAnalyzing = \"YES\">\n            <BuildableReference\n               BuildableIdentifier = \"primary\"\n               BlueprintIdentifier = \"B4620D3F22203FBE00EE2876\"\n               BuildableName = \"GPURenderKit.framework\"\n               BlueprintName = \"GPURenderKit\"\n               ReferencedContainer = \"container:GPURenderKit.xcodeproj\">\n            </BuildableReference>\n         </BuildActionEntry>\n      </BuildActionEntries>\n   </BuildAction>\n   <TestAction\n      buildConfiguration = \"Debug\"\n      selectedDebuggerIdentifier = \"Xcode.DebuggerFoundation.Debugger.LLDB\"\n      selectedLauncherIdentifier = \"Xcode.DebuggerFoundation.Launcher.LLDB\"\n      shouldUseLaunchSchemeArgsEnv = \"YES\">\n      <Testables>\n         <TestableReference\n            skipped = \"NO\">\n            <BuildableReference\n               BuildableIdentifier = \"primary\"\n               BlueprintIdentifier = \"B4620D4822203FBE00EE2876\"\n               BuildableName = \"GPURenderKitTests.xctest\"\n               BlueprintName = \"GPURenderKitTests\"\n               ReferencedContainer = \"container:GPURenderKit.xcodeproj\">\n            </BuildableReference>\n         </TestableReference>\n      </Testables>\n      <MacroExpansion>\n         <BuildableReference\n            BuildableIdentifier = \"primary\"\n            BlueprintIdentifier = \"B4620D3F22203FBE00EE2876\"\n            BuildableName = \"GPURenderKit.framework\"\n            BlueprintName = \"GPURenderKit\"\n            ReferencedContainer = \"container:GPURenderKit.xcodeproj\">\n         </BuildableReference>\n      </MacroExpansion>\n      <AdditionalOptions>\n      </AdditionalOptions>\n   </TestAction>\n   <LaunchAction\n      buildConfiguration = \"Debug\"\n      selectedDebuggerIdentifier = \"Xcode.DebuggerFoundation.Debugger.LLDB\"\n      selectedLauncherIdentifier = \"Xcode.DebuggerFoundation.Launcher.LLDB\"\n      launchStyle = \"0\"\n      useCustomWorkingDirectory = \"NO\"\n      ignoresPersistentStateOnLaunch = \"NO\"\n      debugDocumentVersioning = \"YES\"\n      debugServiceExtension = \"internal\"\n      allowLocationSimulation = \"YES\">\n      <MacroExpansion>\n         <BuildableReference\n            BuildableIdentifier = \"primary\"\n            BlueprintIdentifier = \"B4620D3F22203FBE00EE2876\"\n            BuildableName = \"GPURenderKit.framework\"\n            BlueprintName = \"GPURenderKit\"\n            ReferencedContainer = \"container:GPURenderKit.xcodeproj\">\n         </BuildableReference>\n      </MacroExpansion>\n      <AdditionalOptions>\n      </AdditionalOptions>\n   </LaunchAction>\n   <ProfileAction\n      buildConfiguration = \"Release\"\n      shouldUseLaunchSchemeArgsEnv = \"YES\"\n      savedToolIdentifier = \"\"\n      useCustomWorkingDirectory = \"NO\"\n      debugDocumentVersioning = \"YES\">\n      <MacroExpansion>\n         <BuildableReference\n            BuildableIdentifier = \"primary\"\n            BlueprintIdentifier = \"B4620D3F22203FBE00EE2876\"\n            BuildableName = \"GPURenderKit.framework\"\n            BlueprintName = \"GPURenderKit\"\n            ReferencedContainer = \"container:GPURenderKit.xcodeproj\">\n         </BuildableReference>\n      </MacroExpansion>\n   </ProfileAction>\n   <AnalyzeAction\n      buildConfiguration = \"Debug\">\n   </AnalyzeAction>\n   <ArchiveAction\n      buildConfiguration = \"Release\"\n      revealArchiveInOrganizer = \"YES\">\n   </ArchiveAction>\n</Scheme>\n"
  },
  {
    "path": "GPURenderKit/GPURenderKitTests/GPURenderKitTests.m",
    "content": "//\n//  GPURenderKitTests.m\n//  GPURenderKitTests\n//\n//  Created by 刘海东 on 2019/2/22.\n//  Copyright © 2019 刘海东. All rights reserved.\n//\n\n#import <XCTest/XCTest.h>\n\n@interface GPURenderKitTests : XCTestCase\n\n@end\n\n@implementation GPURenderKitTests\n\n- (void)setUp {\n    // Put setup code here. This method is called before the invocation of each test method in the class.\n}\n\n- (void)tearDown {\n    // Put teardown code here. This method is called after the invocation of each test method in the class.\n}\n\n- (void)testExample {\n    // This is an example of a functional test case.\n    // Use XCTAssert and related functions to verify your tests produce the correct results.\n}\n\n- (void)testPerformanceExample {\n    // This is an example of a performance test case.\n    [self measureBlock:^{\n        // Put the code you want to measure the time of here.\n    }];\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKit/GPURenderKitTests/Info.plist",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<!DOCTYPE plist PUBLIC \"-//Apple//DTD PLIST 1.0//EN\" \"http://www.apple.com/DTDs/PropertyList-1.0.dtd\">\n<plist version=\"1.0\">\n<dict>\n\t<key>CFBundleDevelopmentRegion</key>\n\t<string>$(DEVELOPMENT_LANGUAGE)</string>\n\t<key>CFBundleExecutable</key>\n\t<string>$(EXECUTABLE_NAME)</string>\n\t<key>CFBundleIdentifier</key>\n\t<string>$(PRODUCT_BUNDLE_IDENTIFIER)</string>\n\t<key>CFBundleInfoDictionaryVersion</key>\n\t<string>6.0</string>\n\t<key>CFBundleName</key>\n\t<string>$(PRODUCT_NAME)</string>\n\t<key>CFBundlePackageType</key>\n\t<string>BNDL</string>\n\t<key>CFBundleShortVersionString</key>\n\t<string>1.0</string>\n\t<key>CFBundleVersion</key>\n\t<string>1</string>\n</dict>\n</plist>\n"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo/AppDelegate.h",
    "content": "//\n//  AppDelegate.h\n//  GPURenderKitDemo\n//\n//  Created by 刘海东 on 2019/2/22.\n//  Copyright © 2019 刘海东. All rights reserved.\n//\n\n#import <UIKit/UIKit.h>\n\n@interface AppDelegate : UIResponder <UIApplicationDelegate>\n\n@property (strong, nonatomic) UIWindow *window;\n\n\n@end\n\n"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo/AppDelegate.m",
    "content": "//\n//  AppDelegate.m\n//  GPURenderKitDemo\n//\n//  Created by 刘海东 on 2019/2/22.\n//  Copyright © 2019 刘海东. All rights reserved.\n//\n\n#import \"AppDelegate.h\"\n#import \"MainViewController.h\"\n\n@interface AppDelegate ()\n\n@end\n\n@implementation AppDelegate\n\n\n- (BOOL)application:(UIApplication *)application didFinishLaunchingWithOptions:(NSDictionary *)launchOptions {\n    self.window = [[UIWindow alloc] initWithFrame:[UIScreen mainScreen].bounds];\n    self.window.backgroundColor = [UIColor whiteColor];\n    [self.window makeKeyAndVisible];\n    MainViewController *vc = [[MainViewController alloc] init];\n    [self.window setRootViewController:[[UINavigationController alloc] initWithRootViewController:vc]];\n\n    return YES;\n}\n\n\n- (void)applicationWillResignActive:(UIApplication *)application {\n    // Sent when the application is about to move from active to inactive state. This can occur for certain types of temporary interruptions (such as an incoming phone call or SMS message) or when the user quits the application and it begins the transition to the background state.\n    // Use this method to pause ongoing tasks, disable timers, and invalidate graphics rendering callbacks. Games should use this method to pause the game.\n}\n\n\n- (void)applicationDidEnterBackground:(UIApplication *)application {\n    // Use this method to release shared resources, save user data, invalidate timers, and store enough application state information to restore your application to its current state in case it is terminated later.\n    // If your application supports background execution, this method is called instead of applicationWillTerminate: when the user quits.\n}\n\n\n- (void)applicationWillEnterForeground:(UIApplication *)application {\n    // Called as part of the transition from the background to the active state; here you can undo many of the changes made on entering the background.\n}\n\n\n- (void)applicationDidBecomeActive:(UIApplication *)application {\n    // Restart any tasks that were paused (or not yet started) while the application was inactive. If the application was previously in the background, optionally refresh the user interface.\n}\n\n\n- (void)applicationWillTerminate:(UIApplication *)application {\n    // Called when the application is about to terminate. Save data if appropriate. See also applicationDidEnterBackground:.\n}\n\n\n@end\n"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo/Assets.xcassets/AppIcon.appiconset/Contents.json",
    "content": "{\n  \"images\" : [\n    {\n      \"idiom\" : \"iphone\",\n      \"size\" : \"20x20\",\n      \"scale\" : \"2x\"\n    },\n    {\n      \"idiom\" : \"iphone\",\n      \"size\" : \"20x20\",\n      \"scale\" : \"3x\"\n    },\n    {\n      \"size\" : \"29x29\",\n      \"idiom\" : \"iphone\",\n      \"filename\" : \"Icon-Small.png\",\n      \"scale\" : \"1x\"\n    },\n    {\n      \"size\" : \"29x29\",\n      \"idiom\" : \"iphone\",\n      \"filename\" : \"Icon-Small@2x.png\",\n      \"scale\" : \"2x\"\n    },\n    {\n      \"size\" : \"29x29\",\n      \"idiom\" : \"iphone\",\n      \"filename\" : \"Icon-Small@3x.png\",\n      \"scale\" : \"3x\"\n    },\n    {\n      \"size\" : \"40x40\",\n      \"idiom\" : \"iphone\",\n      \"filename\" : \"Icon-Spotlight-40@2x.png\",\n      \"scale\" : \"2x\"\n    },\n    {\n      \"size\" : \"40x40\",\n      \"idiom\" : \"iphone\",\n      \"filename\" : \"Icon-Spotlight-40@3x.png\",\n      \"scale\" : \"3x\"\n    },\n    {\n      \"size\" : \"57x57\",\n      \"idiom\" : \"iphone\",\n      \"filename\" : \"Icon.png\",\n      \"scale\" : \"1x\"\n    },\n    {\n      \"size\" : \"57x57\",\n      \"idiom\" : \"iphone\",\n      \"filename\" : \"Icon@2x.png\",\n      \"scale\" : \"2x\"\n    },\n    {\n      \"size\" : \"60x60\",\n      \"idiom\" : \"iphone\",\n      \"filename\" : \"Icon-60@2x.png\",\n      \"scale\" : \"2x\"\n    },\n    {\n      \"size\" : \"60x60\",\n      \"idiom\" : \"iphone\",\n      \"filename\" : \"Icon-60@3x.png\",\n      \"scale\" : \"3x\"\n    },\n    {\n      \"idiom\" : \"ipad\",\n      \"size\" : \"20x20\",\n      \"scale\" : \"1x\"\n    },\n    {\n      \"idiom\" : \"ipad\",\n      \"size\" : \"20x20\",\n      \"scale\" : \"2x\"\n    },\n    {\n      \"idiom\" : \"ipad\",\n      \"size\" : \"29x29\",\n      \"scale\" : \"1x\"\n    },\n    {\n      \"idiom\" : \"ipad\",\n      \"size\" : \"29x29\",\n      \"scale\" : \"2x\"\n    },\n    {\n      \"idiom\" : \"ipad\",\n      \"size\" : \"40x40\",\n      \"scale\" : \"1x\"\n    },\n    {\n      \"idiom\" : \"ipad\",\n      \"size\" : \"40x40\",\n      \"scale\" : \"2x\"\n    },\n    {\n      \"idiom\" : \"ipad\",\n      \"size\" : \"76x76\",\n      \"scale\" : \"1x\"\n    },\n    {\n      \"idiom\" : \"ipad\",\n      \"size\" : \"76x76\",\n      \"scale\" : \"2x\"\n    },\n    {\n      \"idiom\" : \"ipad\",\n      \"size\" : \"83.5x83.5\",\n      \"scale\" : \"2x\"\n    },\n    {\n      \"idiom\" : \"ios-marketing\",\n      \"size\" : \"1024x1024\",\n      \"scale\" : \"1x\"\n    }\n  ],\n  \"info\" : {\n    \"version\" : 1,\n    \"author\" : \"xcode\"\n  }\n}"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo/Assets.xcassets/Contents.json",
    "content": "{\n  \"info\" : {\n    \"version\" : 1,\n    \"author\" : \"xcode\"\n  }\n}"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo/Assets.xcassets/EditorLut/Contents.json",
    "content": "{\n  \"info\" : {\n    \"version\" : 1,\n    \"author\" : \"xcode\"\n  }\n}"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo/Assets.xcassets/EditorLut/exposure_n.imageset/Contents.json",
    "content": "{\n  \"images\" : [\n    {\n      \"idiom\" : \"universal\",\n      \"filename\" : \"曝光-.jpeg\",\n      \"scale\" : \"1x\"\n    },\n    {\n      \"idiom\" : \"universal\",\n      \"scale\" : \"2x\"\n    },\n    {\n      \"idiom\" : \"universal\",\n      \"scale\" : \"3x\"\n    }\n  ],\n  \"info\" : {\n    \"version\" : 1,\n    \"author\" : \"xcode\"\n  }\n}"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo/Assets.xcassets/EditorLut/gaoya.imageset/Contents.json",
    "content": "{\n  \"images\" : [\n    {\n      \"idiom\" : \"universal\",\n      \"filename\" : \"gaoya.png\",\n      \"scale\" : \"1x\"\n    },\n    {\n      \"idiom\" : \"universal\",\n      \"scale\" : \"2x\"\n    },\n    {\n      \"idiom\" : \"universal\",\n      \"scale\" : \"3x\"\n    }\n  ],\n  \"info\" : {\n    \"version\" : 1,\n    \"author\" : \"xcode\"\n  }\n}"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo/Assets.xcassets/EditorLut/heibai.imageset/Contents.json",
    "content": "{\n  \"images\" : [\n    {\n      \"idiom\" : \"universal\",\n      \"filename\" : \"heibai.png\",\n      \"scale\" : \"1x\"\n    },\n    {\n      \"idiom\" : \"universal\",\n      \"scale\" : \"2x\"\n    },\n    {\n      \"idiom\" : \"universal\",\n      \"scale\" : \"3x\"\n    }\n  ],\n  \"info\" : {\n    \"version\" : 1,\n    \"author\" : \"xcode\"\n  }\n}"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo/Assets.xcassets/EditorLut/jingdu.imageset/Contents.json",
    "content": "{\n  \"images\" : [\n    {\n      \"idiom\" : \"universal\",\n      \"filename\" : \"jingdu.png\",\n      \"scale\" : \"1x\"\n    },\n    {\n      \"idiom\" : \"universal\",\n      \"scale\" : \"2x\"\n    },\n    {\n      \"idiom\" : \"universal\",\n      \"scale\" : \"3x\"\n    }\n  ],\n  \"info\" : {\n    \"version\" : 1,\n    \"author\" : \"xcode\"\n  }\n}"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo/Assets.xcassets/EditorLut/meishi.imageset/Contents.json",
    "content": "{\n  \"images\" : [\n    {\n      \"idiom\" : \"universal\",\n      \"filename\" : \"meishi.png\",\n      \"scale\" : \"1x\"\n    },\n    {\n      \"idiom\" : \"universal\",\n      \"scale\" : \"2x\"\n    },\n    {\n      \"idiom\" : \"universal\",\n      \"scale\" : \"3x\"\n    }\n  ],\n  \"info\" : {\n    \"version\" : 1,\n    \"author\" : \"xcode\"\n  }\n}"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo/Assets.xcassets/EditorLut/xiatian.imageset/Contents.json",
    "content": "{\n  \"images\" : [\n    {\n      \"idiom\" : \"universal\",\n      \"filename\" : \"xiatian.png\",\n      \"scale\" : \"1x\"\n    },\n    {\n      \"idiom\" : \"universal\",\n      \"scale\" : \"2x\"\n    },\n    {\n      \"idiom\" : \"universal\",\n      \"scale\" : \"3x\"\n    }\n  ],\n  \"info\" : {\n    \"version\" : 1,\n    \"author\" : \"xcode\"\n  }\n}"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo/Assets.xcassets/bunny.imageset/Contents.json",
    "content": "{\n  \"images\" : [\n    {\n      \"idiom\" : \"universal\",\n      \"filename\" : \"bunny.png\",\n      \"scale\" : \"1x\"\n    },\n    {\n      \"idiom\" : \"universal\",\n      \"scale\" : \"2x\"\n    },\n    {\n      \"idiom\" : \"universal\",\n      \"scale\" : \"3x\"\n    }\n  ],\n  \"info\" : {\n    \"version\" : 1,\n    \"author\" : \"xcode\"\n  }\n}"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo/Assets.xcassets/edit_beautify_rise_move.imageset/Contents.json",
    "content": "{\n  \"images\" : [\n    {\n      \"idiom\" : \"universal\",\n      \"scale\" : \"1x\"\n    },\n    {\n      \"idiom\" : \"universal\",\n      \"filename\" : \"edit_beautify_rise_move@2x.png\",\n      \"scale\" : \"2x\"\n    },\n    {\n      \"idiom\" : \"universal\",\n      \"filename\" : \"edit_beautify_rise_move@3x.png\",\n      \"scale\" : \"3x\"\n    }\n  ],\n  \"info\" : {\n    \"version\" : 1,\n    \"author\" : \"xcode\"\n  }\n}"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo/Assets.xcassets/edit_beautify_slim_move.imageset/Contents.json",
    "content": "{\n  \"images\" : [\n    {\n      \"idiom\" : \"universal\",\n      \"scale\" : \"1x\"\n    },\n    {\n      \"idiom\" : \"universal\",\n      \"filename\" : \"edit_beautify_slim_move@2x.png\",\n      \"scale\" : \"2x\"\n    },\n    {\n      \"idiom\" : \"universal\",\n      \"filename\" : \"edit_beautify_slim_move@3x.png\",\n      \"scale\" : \"3x\"\n    }\n  ],\n  \"info\" : {\n    \"version\" : 1,\n    \"author\" : \"xcode\"\n  }\n}"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo/Assets.xcassets/flower.imageset/Contents.json",
    "content": "{\n  \"images\" : [\n    {\n      \"idiom\" : \"universal\",\n      \"filename\" : \"Pink Lotus Flower.jpg\",\n      \"scale\" : \"1x\"\n    },\n    {\n      \"idiom\" : \"universal\",\n      \"scale\" : \"2x\"\n    },\n    {\n      \"idiom\" : \"universal\",\n      \"scale\" : \"3x\"\n    }\n  ],\n  \"info\" : {\n    \"version\" : 1,\n    \"author\" : \"xcode\"\n  }\n}"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo/Assets.xcassets/rotate.imageset/Contents.json",
    "content": "{\n  \"images\" : [\n    {\n      \"idiom\" : \"universal\",\n      \"scale\" : \"1x\"\n    },\n    {\n      \"idiom\" : \"universal\",\n      \"filename\" : \"rotate@2x.png\",\n      \"scale\" : \"2x\"\n    },\n    {\n      \"idiom\" : \"universal\",\n      \"scale\" : \"3x\"\n    }\n  ],\n  \"info\" : {\n    \"version\" : 1,\n    \"author\" : \"xcode\"\n  }\n}"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo/Base.lproj/LaunchScreen.storyboard",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"no\"?>\n<document type=\"com.apple.InterfaceBuilder3.CocoaTouch.Storyboard.XIB\" version=\"3.0\" toolsVersion=\"13122.16\" targetRuntime=\"iOS.CocoaTouch\" propertyAccessControl=\"none\" useAutolayout=\"YES\" launchScreen=\"YES\" useTraitCollections=\"YES\" useSafeAreas=\"YES\" colorMatched=\"YES\" initialViewController=\"01J-lp-oVM\">\n    <dependencies>\n        <plugIn identifier=\"com.apple.InterfaceBuilder.IBCocoaTouchPlugin\" version=\"13104.12\"/>\n        <capability name=\"Safe area layout guides\" minToolsVersion=\"9.0\"/>\n        <capability name=\"documents saved in the Xcode 8 format\" minToolsVersion=\"8.0\"/>\n    </dependencies>\n    <scenes>\n        <!--View Controller-->\n        <scene sceneID=\"EHf-IW-A2E\">\n            <objects>\n                <viewController id=\"01J-lp-oVM\" sceneMemberID=\"viewController\">\n                    <view key=\"view\" contentMode=\"scaleToFill\" id=\"Ze5-6b-2t3\">\n                        <rect key=\"frame\" x=\"0.0\" y=\"0.0\" width=\"375\" height=\"667\"/>\n                        <autoresizingMask key=\"autoresizingMask\" widthSizable=\"YES\" heightSizable=\"YES\"/>\n                        <color key=\"backgroundColor\" red=\"1\" green=\"1\" blue=\"1\" alpha=\"1\" colorSpace=\"custom\" customColorSpace=\"sRGB\"/>\n                        <viewLayoutGuide key=\"safeArea\" id=\"6Tk-OE-BBY\"/>\n                    </view>\n                </viewController>\n                <placeholder placeholderIdentifier=\"IBFirstResponder\" id=\"iYj-Kq-Ea1\" userLabel=\"First Responder\" sceneMemberID=\"firstResponder\"/>\n            </objects>\n            <point key=\"canvasLocation\" x=\"53\" y=\"375\"/>\n        </scene>\n    </scenes>\n</document>\n"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo/BaseViewController.h",
    "content": "//\n//  BaseViewController.h\n//  GPURenderKitDemo\n//\n//  Created by 刘海东 on 2019/2/23.\n//  Copyright © 2019 刘海东. All rights reserved.\n//\n\n#import <UIKit/UIKit.h>\n#import <GPURenderKit/GPURenderKit.h>\n\nNS_ASSUME_NONNULL_BEGIN\n\n@interface BaseViewController : UIViewController\n\n@end\n\nNS_ASSUME_NONNULL_END\n"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo/BaseViewController.m",
    "content": "//\n//  BaseViewController.m\n//  GPURenderKitDemo\n//\n//  Created by 刘海东 on 2019/2/23.\n//  Copyright © 2019 刘海东. All rights reserved.\n//\n\n#import \"BaseViewController.h\"\n\n@interface BaseViewController ()\n\n@end\n\n@implementation BaseViewController\n\n- (void)viewDidLoad {\n    [super viewDidLoad];\n    self.view.backgroundColor = [UIColor whiteColor];\n}\n- (void)dealloc\n{\n    NSLog(@\"dealloc---->%@\",[self class]);\n}\n@end\n"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo/DDShapeViewController/DDShapeViewController.h",
    "content": "//\n//  DDStrectchViewController.h\n//  WEOpenGLDemo\n//\n//  Created by 刘海东 on 2018/6/14.\n//  Copyright © 2018年 Leo. All rights reserved.\n//\n\n#import <UIKit/UIKit.h>\n#import \"BaseViewController.h\"\n@interface DDShapeViewController : BaseViewController\n\n@end\n"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo/DDShapeViewController/DDShapeViewController.m",
    "content": "//\n//  ViewController.m\n//  OpenGLStretchDemo\n//\n//  Created by 刘海东 on 2018/5/30.\n//  Copyright © 2018年 刘海东. All rights reserved.\n//\n\n#define kSW [UIScreen mainScreen].bounds.size.width\n#define kSH [UIScreen mainScreen].bounds.size.height\n#define kSelfSize self.frame.size\n\n\n#import \"DDShapeViewController.h\"\n#import \"DDGLShapeView.h\"\n#import \"DDGLShapeSelView.h\"\n#import \"DDGLShapeView.h\"\n#import <Photos/Photos.h>\n#import \"ATRiseViewController.h\"\n@interface DDShapeViewController ()<UIImagePickerControllerDelegate,UINavigationControllerDelegate>\n\n@property (nonatomic, strong) UIImage *image;\n@property (nonatomic, assign) DDGLShapeViewType type;\n\n@end\n\n@implementation DDShapeViewController\n\n- (void)viewDidLoad {\n    [super viewDidLoad];\n    \n    self.view.backgroundColor = [UIColor whiteColor];\n    \n    NSArray *array = [NSArray arrayWithObjects:@\"长图\",@\"宽图\",@\"方图\",nil];\n    //初始化UISegmentedControl\n    UISegmentedControl *segment = [[UISegmentedControl alloc]initWithItems:array];\n    //设置frame\n    segment.frame = CGRectMake(10, 100, self.view.frame.size.width-20, 30);\n    [segment addTarget:self action:@selector(change:) forControlEvents:UIControlEventValueChanged];\n    segment.selectedSegmentIndex = 0;\n    //添加到视图\n    [self.view addSubview:segment];\n    \n    \n    NSArray *array2 = [NSArray arrayWithObjects:@\"增高\",@\"瘦身\",nil];\n    //初始化UISegmentedControl\n    UISegmentedControl *segment2 = [[UISegmentedControl alloc]initWithItems:array2];\n    //设置frame\n    segment2.frame = CGRectMake(10, 200, self.view.frame.size.width-20, 30);\n    [segment2 addTarget:self action:@selector(changeFunc:) forControlEvents:UIControlEventValueChanged];\n    segment2.selectedSegmentIndex = 0;\n    //添加到视图\n    [self.view addSubview:segment2];\n    \n    \n    UIButton *btn = [UIButton buttonWithType:UIButtonTypeSystem];\n    [btn setTitle:@\"跳转\" forState:UIControlStateNormal];\n    btn.frame = CGRectMake(0, self.view.frame.size.height - 300, self.view.frame.size.width, 50);\n    [btn addTarget:self action:@selector(jumpAction:) forControlEvents:UIControlEventTouchUpInside];\n    [self.view addSubview:btn];\n    \n    \n    \n    \n    \n    self.image = [UIImage imageNamed:@\"长图.JPG\"];\n    self.type = 0;\n    \n    \n}\n\n- (void)change:(UISegmentedControl *)seg\n{\n    NSString* filePath = nil;\n    switch (seg.selectedSegmentIndex) {\n        case 0:\n        {\n            filePath = @\"长图.JPG\";\n        }\n            break;\n        case 1:\n        {\n            filePath = @\"宽图.JPG\";\n        }\n            break;\n        case 2:\n        {\n            filePath = @\"方图600*600.JPG\";\n        }\n            break;\n        case 3:\n        {\n            filePath = @\"4032*3024.JPG\";\n        }\n            break;\n            \n        default:\n            filePath = @\"长图.JPG\";\n            break;\n    }\n    self.image = [UIImage imageNamed:filePath];\n}\n\n- (void)changeFunc:(UISegmentedControl *)seg\n{\n    self.type = seg.selectedSegmentIndex;\n}\n\n- (void)jumpAction:(UIButton *)btn\n{\n    \n    ATRiseViewController *vc = [[ATRiseViewController alloc]init];\n    vc.type = self.type;\n    vc.previewImage = self.image;\n    [self presentViewController:vc animated:YES completion:nil];\n    \n}\n\n\n\n\n- (void)didReceiveMemoryWarning {\n    [super didReceiveMemoryWarning];\n}\n\n\n@end\n\n"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo/DDShapeViewController/Rise/Controller/ATRiseViewController.h",
    "content": "//\n//  ATRiseViewController.h\n//  Artist\n//\n//  Created by huangjinwen on 2018/6/21.\n//  Copyright © 2018年 wecut. All rights reserved.\n//\n\n#import <UIKit/UIKit.h>\n#import \"DDGLShapingView.h\"\n\n@interface ATRiseViewController : UIViewController\n\n@property (nonatomic, strong) UIImage *previewImage;\n@property (nonatomic, assign) DDGLShapeViewType type;\n\n\n\n\n@end\n"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo/DDShapeViewController/Rise/Controller/ATRiseViewController.m",
    "content": "//\n//  ATRiseViewController.m\n//  Artist\n//\n//  Created by huangjinwen on 2018/6/21.\n//  Copyright © 2018年 wecut. All rights reserved.\n//\n\n#import \"ATRiseViewController.h\"\n#import \"ATRiseMenuView.h\"\n#import <GPURenderKit/GPURenderKit.h>\n\n@interface ATRiseViewController ()<DDGLShapingViewDelegate>\n@property (nonatomic, strong) DDGLShapingView *glShapingView;\n@property (nonatomic, assign) DDGLNormValueRange rangeValue;\n@property (nonatomic, strong) UISlider *slider;\n@end\n\n@implementation ATRiseViewController\n\n- (void)viewDidLoad {\n    [super viewDidLoad];\n    DDGLNormValueRange range = {0,0};\n    range.max = 0.5;\n    range.min = 0.2;\n    _rangeValue = range;\n    self.view.backgroundColor = [UIColor blackColor];\n    \n    \n    dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t)(1.0 * NSEC_PER_SEC)), dispatch_get_main_queue(), ^{\n        [self configUI];\n    });\n}\n\n\n- (void)configUI\n{\n    UIButton *backBtn = [UIButton buttonWithType:UIButtonTypeCustom];\n    [backBtn setTitle:@\"返回\" forState:UIControlStateNormal];\n    backBtn.frame = CGRectMake(0, 50, 100, 50);\n    [backBtn addTarget:self action:@selector(backAction) forControlEvents:UIControlEventTouchUpInside];\n    [self.view addSubview:backBtn];\n    \n    \n    UISlider *slider = [[UISlider alloc]initWithFrame:CGRectMake(20, kScreen_H - 100, kScreen_W - 40, 50)];\n    [self.view addSubview:slider];\n    slider.minimumValue = 0.0;\n    slider.maximumValue = 1.0;\n    [slider addTarget:self action:@selector(valueChange:) forControlEvents:UIControlEventValueChanged];\n    self.slider = slider;\n    \n    \n    UIButton *saveBtn = [UIButton buttonWithType:UIButtonTypeCustom];\n    [saveBtn setTitle:@\"保存\" forState:UIControlStateNormal];\n    saveBtn.frame = CGRectMake(kScreen_W-100, 50, 100, 50);\n    [saveBtn addTarget:self action:@selector(saveAction) forControlEvents:UIControlEventTouchUpInside];\n    [self.view addSubview:saveBtn];\n\n}\n\n- (void)viewWillAppear:(BOOL)animated\n{\n    [super viewWillAppear:animated];\n}\n\n- (DDGLShapingView *)glShapingView\n{\n    if (!_glShapingView) {\n        float h = kScreen_H - kATRiseMenuView_h - SafeTopMargin - SafeBottomMargin - 50;\n        _glShapingView = [[DDGLShapingView alloc]initWithFrame:CGRectMake(0, SafeTopMargin + 50, kScreen_W, h) type:_type image:self.previewImage];\n        _glShapingView.delegate = self;\n        [self.view addSubview:_glShapingView];\n        [_glShapingView changeRange:self.rangeValue];\n    }\n    return _glShapingView;\n}\n\n- (void)shapingViewSwiping\n{\n    self.slider.value = 0.0;\n    [self.glShapingView changeValue:self.slider.value];\n    \n}\n- (void)shapingViewGetVertexArray:(NSArray *)vertexArray textureCoordinateArray:(NSArray *)textureCoordinateArray changeValue:(float)changeValue type:(DDGLShapeViewType)type\n{\n    \n}\n- (void)shapingViewSwipEndMaxValue:(float)max minValue:(float)min\n{\n    _rangeValue.max = max;\n    _rangeValue.min = min;\n    [_glShapingView changeRange:self.rangeValue];\n    \n}\n\n\n\n- (void)setType:(DDGLShapeViewType)type\n{\n    _type = type;\n}\n\n- (void)setPreviewImage:(UIImage *)previewImage\n{\n    _previewImage = previewImage;\n    [self glShapingView];\n}\n\n- (void)didReceiveMemoryWarning {\n    [super didReceiveMemoryWarning];\n}\n\n- (void)dealloc\n{\n    [[GPUImageContext sharedImageProcessingContext].framebufferCache purgeAllUnassignedFramebuffers];\n    NSLog(@\"ATRiseViewController-- 增高或瘦身 dealloc\");\n}\n\n- (void)backAction\n{\n    [self dismissViewControllerAnimated:YES completion:nil];\n}\n\n- (void)saveAction\n{\n    UIImage *ima = [self.glShapingView getProcessImage];\n    UIImageView *imageView = [[UIImageView alloc]initWithImage:ima];\n    float h = ((kScreen_W-100)/ima.size.width) *ima.size.height;\n    imageView.frame = CGRectMake(50, 200,  kScreen_W-100, h);\n    [self.view addSubview:imageView];\n    \n    dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t)(5.0 * NSEC_PER_SEC)), dispatch_get_main_queue(), ^{\n        [imageView removeFromSuperview];\n    });\n    \n}\n\n\n- (void)valueChange:(UISlider *)slider\n{\n    [self.glShapingView changeValue:slider.value];\n}\n\n\n\n\n@end\n"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo/DDShapeViewController/Rise/View/ATRiseMenuView.h",
    "content": "//\n//  ATRiseMenuView.h\n//  Artist\n//\n//  Created by 刘海东 on 2018/6/21.\n//  Copyright © 2018年 wecut. All rights reserved.\n//\n\n#import <UIKit/UIKit.h>\n\ntypedef NS_ENUM(NSInteger,ATRiseMenuViewActionType) {\n    \n    /** 关闭 */\n    ATRiseMenuViewActionType_Close = 0,\n    /** course教程 */\n    ATRiseMenuViewActionType_Course,\n    /** 确认 */\n    ATRiseMenuViewActionType_Enter,\n    \n};\n\n@interface ATRiseMenuView : UIView\n\n\n@property (nonatomic, copy) void (^valueDidChangeHandler)(float value);\n@property (nonatomic, copy) void (^touchBeginHandler)(float value);\n@property (nonatomic, copy) void (^touchEndHandler)(float value);\n\n/** 点击事件 */\n@property (nonatomic, copy) void (^clickActionHandler)(ATRiseMenuViewActionType type);\n\n- (void)hideHelpButton:(BOOL)state;\n\n- (float)getValue;\n\n- (void)setValue:(float)value;\n\n- (void)setTitle:(NSString *)tit;\n\n- (void)minmimValue:(float)value;\n- (void)maxmimValue:(float)value;\n\n\n@end\n"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo/DDShapeViewController/Rise/View/ATRiseMenuView.m",
    "content": "//\n//  ATRiseMenuView.m\n//  Artist\n//\n//  Created by 刘海东 on 2018/6/21.\n//  Copyright © 2018年 wecut. All rights reserved.\n//\n\n#import \"ATRiseMenuView.h\"\n#import \"ATSelectBarView.h\"\n#import \"ATSliderView.h\"\n#define kSliderView_h 26\n#define kSelectBarView_h 48\n\n@interface ATRiseMenuView ()\n@property (nonatomic, strong) ATSelectBarView *selectBarView;\n@property (nonatomic, strong) ATSliderView *sliderView;\n\n@end\n\n@implementation ATRiseMenuView\n\n- (instancetype)initWithFrame:(CGRect)frame\n{\n    self = [super initWithFrame:frame];\n    if (self) {\n        \n        self.backgroundColor = [UIColor blackColor];\n        [self selectBarView];\n        [self sliderView];\n    }\n    return self;\n}\n\n\n- (ATSelectBarView *)selectBarView\n{\n    if (!_selectBarView) {\n        _selectBarView = [[ATSelectBarView alloc]initWithFrame:CGRectMake(0, self.frame.size.height-kSelectBarView_h, kScreen_W, kSelectBarView_h)];\n        _selectBarView.hideHelpButton = NO;\n        kWeakeSelf;\n        _selectBarView.selectBlock = ^(NSInteger index) {\n            kStrongSelf;\n            if (strongSelf.clickActionHandler) {\n                strongSelf.clickActionHandler(index);\n            }\n        };\n        \n        \n        \n        [self addSubview:_selectBarView];\n    }\n    return _selectBarView;\n}\n\n- (ATSliderView *)sliderView\n{\n    if (!_sliderView) {\n        \n        _sliderView = [[ATSliderView alloc]initWithFrame:CGRectMake(20, 47, kScreen_W - 40, kSliderView_h)];\n        [_sliderView configBigFollowView];\n        [self addSubview:_sliderView];\n        kWeakeSelf;\n        \n        _sliderView.valueDidChangeHandler = ^(float value) {\n            kStrongSelf;\n            strongSelf.sliderView.topLabValue = [NSString stringWithFormat:@\"%.0f\",value*100];\n            if (strongSelf.valueDidChangeHandler) {\n                strongSelf.valueDidChangeHandler(value);\n            }\n        };\n        \n        _sliderView.touchBeginHandler = ^(float value) {\n            kStrongSelf;\n            strongSelf.sliderView.topLabValue = [NSString stringWithFormat:@\"%.0f\",value*100];\n            if (strongSelf.touchBeginHandler) {\n                strongSelf.touchBeginHandler(value);\n            }\n        };\n        \n        _sliderView.touchEndHandler = ^(float value) {\n            kStrongSelf;\n            strongSelf.sliderView.topLabValue = [NSString stringWithFormat:@\"%.0f\",value*100];\n            if (strongSelf.touchEndHandler) {\n                strongSelf.touchEndHandler(value);\n            }\n        };\n        \n    }\n    return _sliderView;\n}\n\n- (float)getValue\n{\n    return self.sliderView.value;\n}\n\n\n- (void)setValue:(float)value\n{\n    self.sliderView.value = value;\n    \n}\n\n- (void)setTitle:(NSString *)tit\n{\n    self.selectBarView.title = tit;\n    \n}\n\n- (void)minmimValue:(float)value\n{\n    self.sliderView.minmimValue = value;\n}\n- (void)maxmimValue:(float)value\n{\n    self.sliderView.maxmimValue = value;\n}\n\n- (void)hideHelpButton:(BOOL)state\n{\n    _selectBarView.hideHelpButton = state;\n}\n\n\n\n\n\n@end\n"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo/DDShapeViewController/Rise/View/ATSelectBarView.h",
    "content": "//\n//  ATSelectBarView.h\n//  Artist\n//\n//  Created by huangjinwen on 2018/6/21.\n//  Copyright © 2018年 wecut. All rights reserved.\n//\n\n#import <UIKit/UIKit.h>\n\n//传0是点了左边button  1是中间button   2是右边button\ntypedef void(^SelectBlock)(NSInteger index);\n\n@interface ATSelectBarView : UIView\n\n@property (nonatomic, copy) NSString *title;\n@property (nonatomic, copy) SelectBlock selectBlock;\n//是否显示中间帮助button\n@property (nonatomic, assign) BOOL hideHelpButton;\n\n@end\n"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo/DDShapeViewController/Rise/View/ATSelectBarView.m",
    "content": "//\n//  ATSelectBarView.m\n//  Artist\n//\n//  Created by huangjinwen on 2018/6/21.\n//  Copyright © 2018年 wecut. All rights reserved.\n//\n\n#import \"ATSelectBarView.h\"\n#import \"UIView+Xib.h\"\n\n@interface ATSelectBarView ()\n@property (weak, nonatomic) IBOutlet UILabel *titleLabel;\n@property (weak, nonatomic) IBOutlet UIButton *cancelButton;\n@property (weak, nonatomic) IBOutlet UIButton *okButton;\n@property (weak, nonatomic) IBOutlet UIButton *helpButton;\n@end\n\n\n@implementation ATSelectBarView\n\n\n- (void)awakeFromNib\n{\n    [super awakeFromNib];\n    //加载同名xib并添加到self\n    [self setupSelfNameXibOnSelf];\n}\n\n- (instancetype)initWithFrame:(CGRect)frame\n{\n    self = [super initWithFrame:frame];\n    if (self) {\n        UIView *view = [[[NSBundle mainBundle] loadNibNamed:NSStringFromClass(self.class) owner:self options:nil] firstObject];\n        self.frame = frame;\n        view.frame = CGRectMake(0, 0, CGRectGetWidth(frame), CGRectGetHeight(frame));\n        [self addSubview:view];\n        \n    }\n    return self;\n}\n\n\n#pragma mark - setter\n\n-(void)setTitle:(NSString *)title {\n    _title = title;\n    self.titleLabel.text = title;\n}\n\n\n#pragma mark - button event\n\n- (IBAction)buttonAction:(UIButton *)sender {\n    if (self.selectBlock) {\n        if (self.cancelButton == sender) {\n            self.selectBlock(0);\n        } else if (self.okButton == sender) {\n            self.selectBlock(2);\n        } else if (self.helpButton == sender) {\n            self.selectBlock(1);\n        }\n    }\n}\n\n-(void)setHideHelpButton:(BOOL)hideHelpButton {\n    _hideHelpButton = hideHelpButton;\n    self.helpButton.hidden = hideHelpButton;\n}\n\n\n\n\n\n\n@end\n"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo/DDShapeViewController/Rise/View/ATSelectBarView.xib",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<document type=\"com.apple.InterfaceBuilder3.CocoaTouch.XIB\" version=\"3.0\" toolsVersion=\"14109\" targetRuntime=\"iOS.CocoaTouch\" propertyAccessControl=\"none\" useAutolayout=\"YES\" useTraitCollections=\"YES\" useSafeAreas=\"YES\" colorMatched=\"YES\">\n    <device id=\"retina4_7\" orientation=\"portrait\">\n        <adaptation id=\"fullscreen\"/>\n    </device>\n    <dependencies>\n        <deployment identifier=\"iOS\"/>\n        <plugIn identifier=\"com.apple.InterfaceBuilder.IBCocoaTouchPlugin\" version=\"14088\"/>\n        <capability name=\"Safe area layout guides\" minToolsVersion=\"9.0\"/>\n        <capability name=\"documents saved in the Xcode 8 format\" minToolsVersion=\"8.0\"/>\n    </dependencies>\n    <objects>\n        <placeholder placeholderIdentifier=\"IBFilesOwner\" id=\"-1\" userLabel=\"File's Owner\" customClass=\"ATSelectBarView\">\n            <connections>\n                <outlet property=\"cancelButton\" destination=\"Yei-yK-eoT\" id=\"pBo-S1-dJO\"/>\n                <outlet property=\"helpButton\" destination=\"CId-mP-Phx\" id=\"8JY-4U-pg0\"/>\n                <outlet property=\"okButton\" destination=\"uy1-X2-9hX\" id=\"GLW-JN-vqJ\"/>\n                <outlet property=\"titleLabel\" destination=\"lwd-4c-6b4\" id=\"hQO-rd-lKY\"/>\n            </connections>\n        </placeholder>\n        <placeholder placeholderIdentifier=\"IBFirstResponder\" id=\"-2\" customClass=\"UIResponder\"/>\n        <view contentMode=\"scaleToFill\" id=\"iN0-l3-epB\">\n            <rect key=\"frame\" x=\"0.0\" y=\"0.0\" width=\"375\" height=\"667\"/>\n            <autoresizingMask key=\"autoresizingMask\" widthSizable=\"YES\" heightSizable=\"YES\"/>\n            <subviews>\n                <view contentMode=\"scaleToFill\" translatesAutoresizingMaskIntoConstraints=\"NO\" id=\"Ab5-eL-TDE\">\n                    <rect key=\"frame\" x=\"0.0\" y=\"309.5\" width=\"375\" height=\"48\"/>\n                    <subviews>\n                        <stackView opaque=\"NO\" contentMode=\"scaleToFill\" distribution=\"equalSpacing\" translatesAutoresizingMaskIntoConstraints=\"NO\" id=\"Xkl-Zl-X6s\">\n                            <rect key=\"frame\" x=\"14\" y=\"0.0\" width=\"347\" height=\"48\"/>\n                            <subviews>\n                                <button opaque=\"NO\" contentMode=\"scaleToFill\" contentHorizontalAlignment=\"center\" contentVerticalAlignment=\"center\" lineBreakMode=\"middleTruncation\" translatesAutoresizingMaskIntoConstraints=\"NO\" id=\"Yei-yK-eoT\">\n                                    <rect key=\"frame\" x=\"0.0\" y=\"0.0\" width=\"28\" height=\"48\"/>\n                                    <constraints>\n                                        <constraint firstAttribute=\"width\" constant=\"28\" id=\"c9y-ye-RxI\"/>\n                                        <constraint firstAttribute=\"height\" constant=\"48\" id=\"ucS-Xp-9WC\"/>\n                                    </constraints>\n                                    <state key=\"normal\" image=\"edit_cancel\"/>\n                                    <connections>\n                                        <action selector=\"buttonAction:\" destination=\"-1\" eventType=\"touchUpInside\" id=\"wcE-QZ-EX1\"/>\n                                    </connections>\n                                </button>\n                                <view contentMode=\"scaleToFill\" translatesAutoresizingMaskIntoConstraints=\"NO\" id=\"uN6-1r-2Qk\">\n                                    <rect key=\"frame\" x=\"73.5\" y=\"0.0\" width=\"200\" height=\"48\"/>\n                                    <subviews>\n                                        <label opaque=\"NO\" userInteractionEnabled=\"NO\" contentMode=\"left\" horizontalHuggingPriority=\"251\" verticalHuggingPriority=\"251\" text=\"请输入title\" textAlignment=\"center\" lineBreakMode=\"tailTruncation\" baselineAdjustment=\"alignBaselines\" adjustsFontSizeToFit=\"NO\" translatesAutoresizingMaskIntoConstraints=\"NO\" id=\"lwd-4c-6b4\">\n                                            <rect key=\"frame\" x=\"63.5\" y=\"15\" width=\"73\" height=\"18\"/>\n                                            <fontDescription key=\"fontDescription\" type=\"system\" weight=\"medium\" pointSize=\"15\"/>\n                                            <color key=\"textColor\" red=\"1\" green=\"1\" blue=\"1\" alpha=\"1\" colorSpace=\"custom\" customColorSpace=\"sRGB\"/>\n                                            <nil key=\"highlightedColor\"/>\n                                            <userDefinedRuntimeAttributes>\n                                                <userDefinedRuntimeAttribute type=\"boolean\" keyPath=\"hiddenText\" value=\"YES\"/>\n                                            </userDefinedRuntimeAttributes>\n                                        </label>\n                                        <button hidden=\"YES\" opaque=\"NO\" contentMode=\"scaleToFill\" contentHorizontalAlignment=\"center\" contentVerticalAlignment=\"center\" lineBreakMode=\"middleTruncation\" translatesAutoresizingMaskIntoConstraints=\"NO\" id=\"CId-mP-Phx\">\n                                            <rect key=\"frame\" x=\"142.5\" y=\"9.5\" width=\"28\" height=\"28\"/>\n                                            <constraints>\n                                                <constraint firstAttribute=\"height\" constant=\"28\" id=\"CF2-zJ-JdQ\"/>\n                                                <constraint firstAttribute=\"width\" constant=\"28\" id=\"Ozw-MW-rdm\"/>\n                                            </constraints>\n                                            <state key=\"normal\" title=\"Button\" image=\"edit_beautify_manual\"/>\n                                            <connections>\n                                                <action selector=\"buttonAction:\" destination=\"-1\" eventType=\"touchUpInside\" id=\"M7c-nT-dDg\"/>\n                                            </connections>\n                                        </button>\n                                    </subviews>\n                                    <color key=\"backgroundColor\" white=\"0.0\" alpha=\"1\" colorSpace=\"custom\" customColorSpace=\"genericGamma22GrayColorSpace\"/>\n                                    <constraints>\n                                        <constraint firstItem=\"CId-mP-Phx\" firstAttribute=\"centerY\" secondItem=\"uN6-1r-2Qk\" secondAttribute=\"centerY\" id=\"2HS-Hz-yEY\"/>\n                                        <constraint firstItem=\"lwd-4c-6b4\" firstAttribute=\"centerY\" secondItem=\"uN6-1r-2Qk\" secondAttribute=\"centerY\" id=\"IYo-m1-Ue0\"/>\n                                        <constraint firstAttribute=\"width\" constant=\"200\" id=\"Ofg-JP-TcS\"/>\n                                        <constraint firstItem=\"lwd-4c-6b4\" firstAttribute=\"centerX\" secondItem=\"uN6-1r-2Qk\" secondAttribute=\"centerX\" id=\"Rul-JI-oIB\"/>\n                                        <constraint firstItem=\"CId-mP-Phx\" firstAttribute=\"leading\" secondItem=\"lwd-4c-6b4\" secondAttribute=\"trailing\" constant=\"6\" id=\"c5Y-r2-15Z\"/>\n                                    </constraints>\n                                </view>\n                                <button opaque=\"NO\" contentMode=\"scaleToFill\" contentHorizontalAlignment=\"center\" contentVerticalAlignment=\"center\" lineBreakMode=\"middleTruncation\" translatesAutoresizingMaskIntoConstraints=\"NO\" id=\"uy1-X2-9hX\">\n                                    <rect key=\"frame\" x=\"319\" y=\"0.0\" width=\"28\" height=\"48\"/>\n                                    <constraints>\n                                        <constraint firstAttribute=\"height\" constant=\"48\" id=\"EjT-Nc-caa\"/>\n                                        <constraint firstAttribute=\"width\" constant=\"28\" id=\"m6c-KY-A21\"/>\n                                    </constraints>\n                                    <state key=\"normal\" image=\"edit_achieve\"/>\n                                    <connections>\n                                        <action selector=\"buttonAction:\" destination=\"-1\" eventType=\"touchUpInside\" id=\"zTo-Qw-Dor\"/>\n                                    </connections>\n                                </button>\n                            </subviews>\n                            <constraints>\n                                <constraint firstItem=\"uN6-1r-2Qk\" firstAttribute=\"centerX\" secondItem=\"Xkl-Zl-X6s\" secondAttribute=\"centerX\" id=\"Esl-QP-lPk\"/>\n                                <constraint firstAttribute=\"height\" constant=\"48\" id=\"lhR-hA-1Zb\"/>\n                            </constraints>\n                        </stackView>\n                    </subviews>\n                    <color key=\"backgroundColor\" white=\"0.0\" alpha=\"1\" colorSpace=\"custom\" customColorSpace=\"genericGamma22GrayColorSpace\"/>\n                    <constraints>\n                        <constraint firstAttribute=\"height\" constant=\"48\" id=\"2aY-zH-zLb\"/>\n                        <constraint firstItem=\"Xkl-Zl-X6s\" firstAttribute=\"leading\" secondItem=\"Ab5-eL-TDE\" secondAttribute=\"leading\" constant=\"14\" id=\"AsY-J6-Q9m\"/>\n                        <constraint firstAttribute=\"bottom\" secondItem=\"Xkl-Zl-X6s\" secondAttribute=\"bottom\" id=\"GW9-MQ-ye2\"/>\n                        <constraint firstAttribute=\"trailing\" secondItem=\"Xkl-Zl-X6s\" secondAttribute=\"trailing\" constant=\"14\" id=\"ZFO-Wu-rcf\"/>\n                    </constraints>\n                </view>\n            </subviews>\n            <color key=\"backgroundColor\" red=\"1\" green=\"1\" blue=\"1\" alpha=\"1\" colorSpace=\"custom\" customColorSpace=\"sRGB\"/>\n            <constraints>\n                <constraint firstItem=\"Ab5-eL-TDE\" firstAttribute=\"centerY\" secondItem=\"iN0-l3-epB\" secondAttribute=\"centerY\" id=\"2e9-1u-C8H\"/>\n                <constraint firstItem=\"Ab5-eL-TDE\" firstAttribute=\"leading\" secondItem=\"vUN-kp-3ea\" secondAttribute=\"leading\" id=\"Y1c-0w-zOf\"/>\n                <constraint firstItem=\"vUN-kp-3ea\" firstAttribute=\"trailing\" secondItem=\"Ab5-eL-TDE\" secondAttribute=\"trailing\" id=\"xfT-va-Bat\"/>\n            </constraints>\n            <viewLayoutGuide key=\"safeArea\" id=\"vUN-kp-3ea\"/>\n        </view>\n    </objects>\n    <resources>\n        <image name=\"edit_achieve\" width=\"28\" height=\"28\"/>\n        <image name=\"edit_beautify_manual\" width=\"28\" height=\"28\"/>\n        <image name=\"edit_cancel\" width=\"28\" height=\"28\"/>\n    </resources>\n</document>\n"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo/DDShapeViewController/Rise/View/DDGLShapeView/DDGLSelectView.h",
    "content": "//\n//  DDGLSelectView.h\n//  Artist\n//\n//  Created by 刘海东 on 2018/7/11.\n//  Copyright © 2018年 wecut. All rights reserved.\n//\n\n#import <UIKit/UIKit.h>\n#import \"DDGLShapeView.h\"\n@interface DDGLSelectView : UIView\n\n\n@property (nonatomic, assign) DDGLShapeViewType type;\n\n\n@end\n"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo/DDShapeViewController/Rise/View/DDGLShapeView/DDGLSelectView.m",
    "content": "//\n//  DDGLSelectView.m\n//  Artist\n//\n//  Created by 刘海东 on 2018/7/11.\n//  Copyright © 2018年 wecut. All rights reserved.\n//\n\n#import \"DDGLSelectView.h\"\n\n\n@interface DDGLSelectView ()\n\n@property (nonatomic, strong) UILabel *contentLab;\n\n@end\n\n\n@implementation DDGLSelectView\n\n- (void)layoutSubviews\n{\n    [super layoutSubviews];\n    \n    if (_type == DDGLShapeViewType_Vertical)\n    {\n        float w = self.frame.size.width;\n        float h = 30;\n        float y = (self.frame.size.height - h)/2.0;\n        if (self.frame.size.height<=h+10)\n        {\n            self.contentLab.hidden = YES;\n        }\n        else\n        {\n            self.contentLab.hidden = NO;\n        }\n        self.contentLab.frame = CGRectMake(0, y, w, h);\n    }\n    else\n    {\n        float w = 150;\n        float h = 60;\n        float y = (self.frame.size.height - h)/2.0;\n        float x = (self.frame.size.width - w)/2.0;\n        if (self.frame.size.width<=w+10)\n        {\n            self.contentLab.hidden = YES;\n        }\n        else\n        {\n            self.contentLab.hidden = NO;\n        }\n        self.contentLab.frame = CGRectMake(x, y, w, h);\n    }\n}\n\n\n- (instancetype)initWithFrame:(CGRect)frame\n{\n    self = [super initWithFrame:frame];\n    if (self) {\n        \n        \n        \n    }\n    return self;\n}\n\n\n- (UILabel *)contentLab\n{\n    if (!_contentLab)\n    {\n        _contentLab = [[UILabel alloc]init];\n        _contentLab.font = [UIFont fontWithName:@\"SFUIText-Semibold\" size:14];\n        _contentLab.textColor = [UIColor whiteColor];\n        _contentLab.textAlignment = NSTextAlignmentCenter;\n        [self addSubview:_contentLab];\n    }\n    return _contentLab;\n}\n\n\n- (void)setType:(DDGLShapeViewType)type\n{\n    _type = type;\n    if (_type == DDGLShapeViewType_Vertical)\n    {\n        self.contentLab.text = @\"Drag the line to select the area\";\n    }\n    else\n    {\n        self.contentLab.text = @\"Drag the line to select the area\";\n        self.contentLab.numberOfLines = 2;\n    }\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo/DDShapeViewController/Rise/View/DDGLShapeView/DDGLShapeControlView.h",
    "content": "//\n//  DDGLShapeControlView.h\n//  OpenGLStretchDemo\n//\n//  Created by 刘海东 on 2018/6/6.\n//  Copyright © 2018年 刘海东. All rights reserved.\n//\n\n#import <UIKit/UIKit.h>\n#import \"DDGLShapeView.h\"\n@interface DDGLShapeControlView : UIView\n\n- (instancetype)initWithFrame:(CGRect)frame type:(DDGLShapeViewType)type;\n\n\n@end\n"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo/DDShapeViewController/Rise/View/DDGLShapeView/DDGLShapeControlView.m",
    "content": "//\n//  DDGLShapeControlView.m\n//  OpenGLStretchDemo\n//\n//  Created by 刘海东 on 2018/6/6.\n//  Copyright © 2018年 刘海东. All rights reserved.\n//\n\n#import \"DDGLShapeControlView.h\"\n\n@interface DDGLShapeControlView ()\n\n\n@property (nonatomic, strong) UIView *lineView;\n\n@property (nonatomic, strong) UIImageView *controlView1;\n\n@property (nonatomic, strong) UIImageView *controlView2;\n\n@property (nonatomic, assign) DDGLShapeViewType type;\n\n\n\n@end\n\n@implementation DDGLShapeControlView\n\n\n- (instancetype)initWithFrame:(CGRect)frame type:(DDGLShapeViewType)type\n{\n    self = [super initWithFrame:frame];\n    if (self) {\n        \n        _type = type;\n        \n        [self lineView];\n        [self controlView1];\n        [self controlView2];\n        \n        \n        if (_type == DDGLShapeViewType_Vertical)\n        {\n            self.controlView1.hidden = YES;\n        }\n        else\n        {\n            self.controlView2.hidden = YES;\n\n        }\n\n        \n    }\n    return self;\n}\n\n\n- (UIView *)lineView\n{\n    if (!_lineView)\n    {\n        CGRect rect;\n        if (_type == DDGLShapeViewType_Vertical) {\n            rect = CGRectMake(0, self.frame.size.height/2.0, self.frame.size.width, 1.0);\n        }else\n        {\n            rect = CGRectMake(self.frame.size.width/2.0, 0, 1.0, self.frame.size.height);\n        }\n        _lineView = [[UIView alloc]initWithFrame:rect];\n        _lineView.layer.shadowOffset = CGSizeMake(0, 0);\n        _lineView.layer.shadowOpacity = 0.13;\n        _lineView.layer.shadowRadius = 2;\n        _lineView.layer.shadowColor = [UIColor colorWithRed:0 green:0 blue:0 alpha:1].CGColor;\n        _lineView.backgroundColor = [UIColor whiteColor];\n        [self addSubview:_lineView];\n\n    }\n    return _lineView;\n}\n\n\n- (UIImageView *)controlView1\n{\n    if (!_controlView1) {\n        \n        CGRect rect;\n        UIImage *image;\n        if (_type == DDGLShapeViewType_Vertical)\n        {\n            image = [UIImage imageNamed:@\"edit_beautify_rise_move\"];\n            rect = CGRectMake(0, (self.frame.size.height - image.size.height)/2.0, image.size.width, image.size.height);\n        }\n        else\n        {\n            image = [UIImage imageNamed:@\"edit_beautify_slim_move\"];\n            rect = CGRectMake((self.frame.size.width - image.size.width)/2.0, 0, image.size.width,image.size.height);\n        }\n        _controlView1 = [[UIImageView alloc]initWithFrame:rect];\n        _controlView1.image = image;\n        [self addSubview:_controlView1];\n\n    }\n    return _controlView1;\n}\n\n\n- (UIImageView *)controlView2\n{\n    if (!_controlView2) {\n        CGRect rect;\n        UIImage *image;\n        if (_type == DDGLShapeViewType_Vertical)\n        {\n            image = [UIImage imageNamed:@\"edit_beautify_rise_move\"];\n            rect = CGRectMake(self.frame.size.width - image.size.width, (self.frame.size.height - image.size.height)/2.0, image.size.width,image.size.height);\n        }\n        else\n        {\n            image = [UIImage imageNamed:@\"edit_beautify_slim_move\"];\n            rect = CGRectMake((self.frame.size.width - image.size.width)/2.0, self.frame.size.height-image.size.height, image.size.width,image.size.height);\n        }\n        _controlView2 = [[UIImageView alloc]initWithFrame:rect];\n        _controlView2.image = image;\n        [self addSubview:_controlView2];\n    }\n    return _controlView2;\n}\n\n\n\n- (void)handlePan:(UILongPressGestureRecognizer *)sender\n{\n    \n    \n\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo/DDShapeViewController/Rise/View/DDGLShapeView/DDGLShapeSelView.h",
    "content": "//\n//  DDGLShapeSelView.h\n//  OpenGLStretchDemo\n//\n//  Created by 刘海东 on 2018/6/6.\n//  Copyright © 2018年 刘海东. All rights reserved.\n//\n\n#import <UIKit/UIKit.h>\n#import \"DDGLShapeView.h\"\n\n\n@protocol DDGLShapeSelViewDelegate <NSObject>\n\n/** 滑动选择区域中 */\n- (void)strectchSelViewSwiping;\n\n/** 滑动结束 */\n- (void)strectchSelViewSwipEndMaxValue:(float)max minValue:(float)min;\n\n@end\n\n@interface DDGLShapeSelView : UIView\n\n\n\n- (instancetype)initWithFrame:(CGRect)frame type:(DDGLShapeViewType)type subFrame:(CGRect)subFrame;\n@property (nonatomic, assign) DDGLNormValueRange valueRange;\n\n@property (nonatomic, weak) id<DDGLShapeSelViewDelegate> delegate;\n\n\n@end\n"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo/DDShapeViewController/Rise/View/DDGLShapeView/DDGLShapeSelView.m",
    "content": "//\n//  DDGLShapeSelView.m\n//  OpenGLStretchDemo\n//\n//  Created by 刘海东 on 2018/6/6.\n//  Copyright © 2018年 刘海东. All rights reserved.\n//\n\n#define kControlViewW 50\n#define kSelfSize self.frame.size\n\n#import \"DDGLShapeSelView.h\"\n#import \"DDGLShapeControlView.h\"\n#import \"DDGLSelectView.h\"\n\n\n@interface DDGLShapeSelView ()\n@property (nonatomic, assign) DDGLShapeViewType type;\n@property (nonatomic, strong) DDGLShapeControlView *controlView1;\n@property (nonatomic, strong) DDGLShapeControlView *controlView2;\n@property (nonatomic, strong) DDGLSelectView *selectView;\n@property (nonatomic, assign) CGRect subFrame;\n@end\n\n\n@implementation DDGLShapeSelView\n\n- (instancetype)initWithFrame:(CGRect)frame type:(DDGLShapeViewType)type subFrame:(CGRect)subFrame\n{\n    self = [super initWithFrame:frame];\n    if (self) {\n        \n        self.userInteractionEnabled = YES;\n        _subFrame = subFrame;\n        \n        _type = type;\n\n    }\n    return self;\n}\n\n- (DDGLShapeControlView *)controlView1\n{\n    if (!_controlView1) {\n        \n        CGRect rect;\n        if (_type == DDGLShapeViewType_Vertical) {\n            rect = CGRectMake(kSelfSize.width-kSelfSize.width, -kControlViewW/2.0+kSelfSize.height*_valueRange.min, kSelfSize.width, kControlViewW);\n        }else\n        {\n            rect = CGRectMake(kSelfSize.width*_valueRange.min-kControlViewW/2.0, 0, kControlViewW, kSelfSize.height);\n        }\n        _controlView1 = [[DDGLShapeControlView alloc]initWithFrame:rect type:_type];\n        _controlView1.userInteractionEnabled = YES;\n        [self addSubview:_controlView1];\n        UILongPressGestureRecognizer *logPressGestureRecognizer = [[UILongPressGestureRecognizer alloc] initWithTarget:self action:@selector(handlePan:)];\n        logPressGestureRecognizer.minimumPressDuration = 0.0;\n        [_controlView1 addGestureRecognizer:logPressGestureRecognizer];\n        \n    }\n    return _controlView1;\n}\n\n- (DDGLShapeControlView *)controlView2\n{\n    if (!_controlView2) {\n        \n        CGRect rect;\n        if (_type == DDGLShapeViewType_Vertical) {\n            rect = CGRectMake(kSelfSize.width-kSelfSize.width, kSelfSize.height*_valueRange.max-kControlViewW/2.0, kSelfSize.width, kControlViewW);\n        }else\n        {\n            rect = CGRectMake((kSelfSize.width - kControlViewW/2.0)*_valueRange.max, 0, kControlViewW, kSelfSize.height);\n        }\n        _controlView2 = [[DDGLShapeControlView alloc]initWithFrame:rect type:_type];\n        _controlView2.userInteractionEnabled = YES;\n        [self addSubview:_controlView2];\n        UILongPressGestureRecognizer *logPressGestureRecognizer = [[UILongPressGestureRecognizer alloc] initWithTarget:self action:@selector(handlePan:)];\n        logPressGestureRecognizer.minimumPressDuration = 0.0;\n        [_controlView2 addGestureRecognizer:logPressGestureRecognizer];\n    }\n    return _controlView2;\n}\n\n- (DDGLSelectView *)selectView\n{\n    if (!_selectView) {\n        \n        _selectView = [[DDGLSelectView alloc]init];\n        _selectView.backgroundColor = [UIColor colorWithRed:1.0 green:0.0 blue:0.0 alpha:0.2];\n        _selectView.type = _type;\n        [self addSubview:_selectView];\n        [self sendSubviewToBack:_selectView];\n        if (_type == DDGLShapeViewType_Vertical)\n        {\n            _selectView.frame = CGRectMake(0, CGRectGetMidY(self.controlView1.frame), CGRectGetWidth(self.frame), CGRectGetMidY(self.controlView2.frame) - CGRectGetMidY(self.controlView1.frame));\n        }else\n        {\n            _selectView.frame = CGRectMake(CGRectGetMidX(self.controlView1.frame), 0, CGRectGetMidX(self.controlView2.frame)-CGRectGetMidX(self.controlView1.frame), CGRectGetHeight(self.frame));\n        }\n        _selectView.hidden = YES;\n        \n    }\n    return _selectView;\n}\n\n\n\n#pragma mark func\n\n- (void)handlePan:(UILongPressGestureRecognizer *)sender {\n    \n    \n    DDGLShapeControlView *tapSuperView = (DDGLShapeControlView *)sender.view;\n    \n    \n    switch (sender.state) {\n        case UIGestureRecognizerStateBegan:\n        {\n            self.selectView.hidden = NO;\n        }\n            break;\n        case UIGestureRecognizerStateChanged:\n        {\n            self.selectView.hidden = NO;\n            if (sender.numberOfTouches <= 0) {\n                return;\n            }\n            CGPoint tapPoint = [sender locationOfTouch:0 inView:self];\n            \n            \n            switch (_type) {\n                case DDGLShapeViewType_Vertical:\n                {\n                    tapPoint = CGPointMake(tapPoint.x, tapPoint.y-kControlViewW/2.0);\n                    \n                    [self verticalConfigPoint:tapPoint tapSuperView:tapSuperView];\n                }\n                    break;\n                case DDGLShapeViewType_Horizontal:\n                {\n                    tapPoint = CGPointMake(tapPoint.x-kControlViewW/2.0, tapPoint.y);\n                    [self horizontalConfigPoint:tapPoint tapSuperView:tapSuperView];\n                }\n                    break;\n                    \n                default:\n                    break;\n            }\n            \n            //滑动中\n            if (self.delegate && [self.delegate respondsToSelector:@selector(strectchSelViewSwiping)]) {\n                [self.delegate strectchSelViewSwiping];\n            }\n            \n            \n        }\n            break;\n        case UIGestureRecognizerStateCancelled:\n        {\n            self.selectView.hidden = YES;\n            [self calculateValue];\n        }\n            break;\n        case UIGestureRecognizerStateFailed:\n        {\n            self.selectView.hidden = YES;\n            [self calculateValue];\n        }\n            break;\n        case UIGestureRecognizerStateEnded:\n        {\n            self.selectView.hidden = YES;\n            [self calculateValue];\n        }\n            break;\n            \n        default:\n            break;\n    }\n    \n}\n\n/** 停止滑动时候 计算最大值 最小值 */\n- (void)calculateValue\n{\n    \n    float max = 1.0;\n    float min = 0.0;\n    \n    if (_type == DDGLShapeViewType_Vertical) {\n        float value1 = CGRectGetMidY(self.controlView1.frame)/self.frame.size.height;\n        float value2 = CGRectGetMidY(self.controlView2.frame)/self.frame.size.height;\n        if (value1>=value2) {\n            max = value1;\n            min = value2;\n        }else\n        {\n            max = value2;\n            min = value1;\n        }\n    }else\n    {\n        float value1 = CGRectGetMidX(self.controlView1.frame)/self.frame.size.width;\n        float value2 = CGRectGetMidX(self.controlView2.frame)/self.frame.size.width;\n        if (value1>=value2) {\n            max = value1;\n            min = value2;\n        }else\n        {\n            max = value2;\n            min = value1;\n        }\n\n    }\n    \n    \n    if ([self.delegate respondsToSelector:@selector(strectchSelViewSwipEndMaxValue:minValue:)]) {\n        [self.delegate strectchSelViewSwipEndMaxValue:max minValue:min];\n    }\n    \n    \n}\n\n\n#pragma mark 竖\n- (void)verticalConfigPoint:(CGPoint)tapPoint tapSuperView:(DDGLShapeControlView *)tapSuperView\n{\n    \n    CGRect rect;\n    if (tapSuperView == self.controlView1) {\n        rect = self.controlView1.frame;\n    }else{\n        rect = self.controlView2.frame;\n    }\n    \n    if (tapPoint.y<-kControlViewW/2.0)\n    {\n        tapPoint.y = -kControlViewW/2.0;\n    }\n    else if (tapPoint.y>kSelfSize.height-kControlViewW/2.0)\n    {\n        tapPoint.y = kSelfSize.height-kControlViewW/2.0;\n    }\n    \n    rect.origin.y = tapPoint.y;\n    \n    if (tapSuperView == self.controlView1) {\n        self.controlView1.frame = rect;\n    }else{\n        self.controlView2.frame = rect;\n    }\n    \n    CGRect rect1 = self.controlView1.frame;\n    CGRect rect2 = self.controlView2.frame;\n    \n    CGRect selectRect = self.selectView.frame;\n    \n    if (rect1.origin.y<rect2.origin.y) {\n        selectRect.origin.y = CGRectGetMidY(self.controlView1.frame);\n        selectRect.size.height = CGRectGetMidY(self.controlView2.frame) - CGRectGetMidY(self.controlView1.frame);\n    }\n    else\n    {\n        selectRect.origin.y = CGRectGetMidY(self.controlView2.frame);\n        selectRect.size.height = CGRectGetMidY(self.controlView1.frame) - CGRectGetMidY(self.controlView2.frame);\n    }\n    self.selectView.frame = selectRect;\n    \n}\n#pragma mark 横\n- (void)horizontalConfigPoint:(CGPoint)tapPoint tapSuperView:(DDGLShapeControlView *)tapSuperView\n{\n    \n    CGRect rect;\n    if (tapSuperView == self.controlView1) {\n        rect = self.controlView1.frame;\n    }else{\n        rect = self.controlView2.frame;\n    }\n    \n    if (tapPoint.x<-kControlViewW/2.0)\n    {\n        tapPoint.x = -kControlViewW/2.0;\n    }\n    else if (tapPoint.x>kSelfSize.width-kControlViewW/2.0)\n    {\n        tapPoint.x = kSelfSize.width-kControlViewW/2.0;\n    }\n    \n    rect.origin.x = tapPoint.x;\n    \n    if (tapSuperView == self.controlView1) {\n        self.controlView1.frame = rect;\n    }else{\n        self.controlView2.frame = rect;\n    }\n    \n    CGRect rect1 = self.controlView1.frame;\n    CGRect rect2 = self.controlView2.frame;\n    \n    CGRect selectRect = self.selectView.frame;\n    \n    if (rect1.origin.x<rect2.origin.x) {\n        selectRect.origin.x = CGRectGetMidX(self.controlView1.frame);\n        selectRect.size.width = CGRectGetMidX(self.controlView2.frame) - CGRectGetMidX(self.controlView1.frame);\n    }\n    else\n    {\n        selectRect.origin.x = CGRectGetMidX(self.controlView2.frame);\n        selectRect.size.width = CGRectGetMidX(self.controlView1.frame) - CGRectGetMidX(self.controlView2.frame);\n    }\n    self.selectView.frame = selectRect;\n    \n}\n\n-(UIView *)hitTest:(CGPoint)point withEvent:(UIEvent *)event {\n    UIView *view = [super hitTest:point withEvent:event];\n    if (view == nil)\n    {\n        for (UIView*subView in self.subviews)\n        {\n            if ([subView isKindOfClass:[DDGLShapeControlView class]])\n            {\n                CGPoint lowLeftPoint = [subView convertPoint:point fromView:self];\n                if ([subView pointInside:lowLeftPoint withEvent:event])\n                {\n                    view = subView;\n                }\n            }\n        }\n    }\n    return view;\n}\n\n#pragma mark set\n- (void)setValueRange:(DDGLNormValueRange)valueRange\n{\n    _valueRange = valueRange;\n    [self controlView1];\n    [self controlView2];\n    [self selectView];\n    [self updateSubViewsFrame];\n}\n\n- (void)updateSubViewsFrame\n{\n    CGRect rect;\n    if (_type == DDGLShapeViewType_Vertical) {\n        rect = CGRectMake(kSelfSize.width-kSelfSize.width, -kControlViewW/2.0+kSelfSize.height*_valueRange.min, kSelfSize.width, kControlViewW);\n    }else\n    {\n        rect = CGRectMake(kSelfSize.width*_valueRange.min-kControlViewW/2.0, 0, kControlViewW, kSelfSize.height);\n    }\n    _controlView1.frame = rect;\n    \n    \n    CGRect rect2;\n    if (_type == DDGLShapeViewType_Vertical) {\n        rect2 = CGRectMake(kSelfSize.width-kSelfSize.width, kSelfSize.height*_valueRange.max-kControlViewW/2.0, kSelfSize.width, kControlViewW);\n    }else\n    {\n        rect2 = CGRectMake((kSelfSize.width - kControlViewW/2.0)*_valueRange.max, 0, kControlViewW, kSelfSize.height);\n    }\n    _controlView2.frame = rect2;\n    \n    if (_type == DDGLShapeViewType_Vertical)\n    {\n        self.selectView.frame = CGRectMake(CGRectGetMinX(self.subFrame), CGRectGetMidY(self.controlView1.frame), CGRectGetWidth(self.subFrame), CGRectGetMidY(self.controlView2.frame) - CGRectGetMidY(self.controlView1.frame));\n    }else\n    {\n        self.selectView.frame = CGRectMake(CGRectGetMidX(self.controlView1.frame), self.subFrame.origin.y, CGRectGetMidX(self.controlView2.frame)-CGRectGetMidX(self.controlView1.frame), CGRectGetHeight(self.subFrame));\n    }\n}\n\n\n\n\n\n@end\n"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo/DDShapeViewController/Rise/View/DDGLShapeView/DDGLShapeView.h",
    "content": "//\n//  DDGLShapeView.h\n//  OpenGLStretchDemo\n//\n//  Created by 刘海东 on 2018/5/30.\n//  Copyright © 2018年 刘海东. All rights reserved.\n//\n\n#import <UIKit/UIKit.h>\n#import <OpenGLES/ES2/gl.h>\n#import <OpenGLES/ES2/glext.h>\n#import <GLKit/GLKit.h>\n\n/** 归一化的 0.0-1.0*/\nstruct DDGLNormValueRange{\n    /** 最大值 */\n    float max;\n    /** 最小值 */\n    float min;\n};\ntypedef struct DDGLNormValueRange DDGLNormValueRange;\n\n\n/** 拉伸图的类型 */\ntypedef NS_ENUM(NSInteger,DDGLShapeViewType)\n{\n    /** 竖直方向 */\n    DDGLShapeViewType_Vertical = 0,\n    /** 水平方向 */\n    DDGLShapeViewType_Horizontal,\n};\n\n@protocol DDGLShapeViewDelegate <NSObject>\n\n- (void)strectchViewGetVertexArray:(NSArray *)vertexArray changeValue:(float)changeValue;\n\n@end\n\n@interface DDGLShapeView : UIView\n\n@property (nonatomic, weak) id<DDGLShapeViewDelegate> delegate;\n- (instancetype)initWithFrame:(CGRect)frame type:(DDGLShapeViewType)type image:(UIImage *)image;\n\n@property (nonatomic, assign) DDGLNormValueRange valueRange;\n- (void)getOriginImageVertexConfig;\n- (void)changeValue:(float)value;\n\n@end\n"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo/DDShapeViewController/Rise/View/DDGLShapeView/DDGLShapeView.m",
    "content": "//\n//  DDGLShapeView.m\n//  OpenGLStretchDemo\n//\n//  Created by 刘海东 on 2018/5/30.\n//  Copyright © 2018年 刘海东. All rights reserved.\n//\n\n#define kVertical_h 200\n\n#import \"DDGLShapeView.h\"\n#import \"DDGLShapeSelView.h\"\n#import \"DDGLShapeControlView.h\"\n\n@interface DDGLShapeView ()<GLKViewDelegate>\n\n{\n    GLfloat squareVertexData[40];\n}\n\n@property (nonatomic, strong) GLKView *glkView;\n@property (nonatomic , strong) EAGLContext* mContext;\n@property (nonatomic , strong) GLKBaseEffect* mEffect;\n\n@property (nonatomic, assign) float changValue;\n\n/** 图像宽高 */\n@property (nonatomic, assign) float imageWidth;\n@property (nonatomic, assign) float imageHeight;\n/** 类型 */\n@property (nonatomic, assign) DDGLShapeViewType  type;\n\n@property (nonatomic, copy) NSArray *vertexArray;\n\n\n\n@end\n\n@implementation DDGLShapeView\n- (instancetype)initWithFrame:(CGRect)frame type:(DDGLShapeViewType)type image:(UIImage *)image\n{\n    self = [super initWithFrame:frame];\n    if (self) {\n        _type = type;\n        //新建OpenGLES 上下文\n        self.mContext = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2];\n        self.glkView.drawableColorFormat = GLKViewDrawableColorFormatRGBA8888;  //颜色缓冲区格式\n        [EAGLContext setCurrentContext:self.mContext];\n        \n        self.imageWidth = image.size.width;\n        self.imageHeight = image.size.height;\n        \n        NSDictionary* options = [NSDictionary dictionaryWithObjectsAndKeys:@(1), GLKTextureLoaderOriginBottomLeft, nil];\n        GLKTextureInfo* textureInfo = [GLKTextureLoader textureWithCGImage:image.CGImage options:options error:nil];\n        //着色器\n        self.mEffect = [[GLKBaseEffect alloc] init];\n        self.mEffect.texture2d0.enabled = GL_TRUE;\n        self.mEffect.texture2d0.name = textureInfo.name;\n        \n    }\n    return self;\n}\n\n- (GLKView *)glkView\n{\n    \n    if(!_glkView)\n    {\n        _glkView = [[GLKView alloc]initWithFrame:self.bounds context:self.mContext];\n        _glkView.delegate = self;\n        [self addSubview:_glkView];\n        \n    }\n    \n    return _glkView;\n}\n\n\n- (void)glkView:(GLKView *)view drawInRect:(CGRect)rect\n{\n    \n    glClearColor(1.f, 1.f, 1.0f, 1.0f);\n    glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);\n    NSArray * array;\n    \n    switch (_type) {\n        case DDGLShapeViewType_Vertical:\n        {\n            array = [self verticalConfigVertex];\n            _vertexArray = array;\n        }\n            break;\n        case DDGLShapeViewType_Horizontal:\n        {\n            array = [self horizontalConfigVertex];\n            _vertexArray = array;\n        }\n            break;\n        default:\n            break;\n    }\n        \n    for (int i=0; i!=array.count; i++) {\n        NSNumber *value = array[i];\n        squareVertexData[i] = value.floatValue;\n    }\n    \n    GLuint buffer;\n    glGenBuffers(1, &buffer);\n    glBindBuffer(GL_ARRAY_BUFFER, buffer);\n    glBufferData(GL_ARRAY_BUFFER, sizeof(squareVertexData), squareVertexData, GL_STATIC_DRAW);\n    glEnableVertexAttribArray(GLKVertexAttribPosition);\n    glVertexAttribPointer(GLKVertexAttribPosition, 3, GL_FLOAT, GL_FALSE, sizeof(GLfloat) * 5, (GLfloat *)NULL + 0);\n    glEnableVertexAttribArray(GLKVertexAttribTexCoord0);\n    glVertexAttribPointer(GLKVertexAttribTexCoord0, 2, GL_FLOAT, GL_FALSE, sizeof(GLfloat) * 5, (GLfloat *)NULL + 3);\n    [self.mEffect prepareToDraw];\n    glDrawArrays(GL_TRIANGLE_STRIP, 0, 8);\n}\n\n#pragma mark 配置竖直方向上面的顶点数据\n- (NSArray *)verticalConfigVertex\n{\n    \n    float imageRatio = (float)self.imageWidth/self.imageHeight;\n    float screenRatio = (float)self.frame.size.width/self.frame.size.height;\n    float xfactor=1.0;\n    float yfactor=1.0;\n    \n    \n    float tempValue = self.changValue;\n    //负值\n    float xMinus = -xfactor;\n    float yMinus = -yfactor;\n    \n    //正值\n    float xPlus = xfactor;\n    float yPlus = yfactor;\n    \n    \n    float x1,x2,x3,x4,x5,x6,x7,x8 = 0.0;\n    float y1,y2,y3,y4,y5,y6,y7,y8 = 0.0;\n    float tx1,tx2,tx3,tx4,tx5,tx6,tx7,tx8 = 0.0;\n    float ty1,ty2,ty3,ty4,ty5,ty6,ty7,ty8 = 0.0;\n    \n    \n    if (imageRatio > screenRatio) {\n        \n        //宽顶到边\n        yMinus = xMinus*screenRatio/(_imageWidth/(_imageHeight+tempValue*kVertical_h));\n        yPlus = xPlus*screenRatio/(_imageWidth/(_imageHeight+tempValue*kVertical_h));\n        \n        //原来的比例\n        float originY = xPlus*screenRatio/imageRatio;\n        \n        //高大于宽\n        x1 = xMinus;\n        y1 = yMinus;\n        tx1 = 0;\n        ty1 = 0;\n        \n        x2 = xPlus;\n        y2 = yMinus;\n        tx2 = 1;\n        ty2 = 0;\n        \n        \n        /** 极限值的判断处理 */\n        if ((yMinus<-1.0000001 || yMinus>-0.0000001) || (yPlus>1.0000001 || yPlus<.0000001)) {\n            \n            //负值\n            yMinus = -yfactor;\n            //正值\n            yPlus = yfactor;\n            \n            xMinus = yMinus*_imageWidth/(_imageHeight+tempValue*kVertical_h)/screenRatio;\n            xPlus = xPlus*_imageWidth/(_imageHeight+tempValue*kVertical_h)/screenRatio;\n            \n            //原来的比例\n            float originX = yMinus*imageRatio/screenRatio;\n            \n            x1 = xMinus;\n            y1 = yMinus;\n            tx1 = 0;\n            ty1 = 0;\n            \n            x2 = xPlus;\n            y2 = yMinus;\n            tx2 = 1;\n            ty2 = 0;\n            \n            //原来的高\n            CGFloat h = ABS(1-2*_valueRange.max + 1) *xMinus/originX;\n            \n            x3 = xMinus;\n            y3 = -(1.0-h);\n            tx3 = 0;\n            ty3 = (1-_valueRange.max);\n            \n            x4 = xPlus;\n            y4 = -(1.0-h);\n            tx4 = 1;\n            ty4 = (1-_valueRange.max);\n            \n            \n            x5 = xMinus;\n            y5 = (1-2*_valueRange.min*xMinus/originX);\n            tx5 = 0;\n            ty5 = (1-_valueRange.min);\n            \n            \n            x6 = xPlus;\n            y6 = (1-2*_valueRange.min*xMinus/originX);\n            tx6 = 1;\n            ty6 = (1-_valueRange.min);\n            \n            x7 = xMinus;\n            y7 = yPlus;\n            tx7 = 0;\n            ty7 = 1;\n            \n            x8 = xPlus;\n            y8 = yPlus;\n            tx8 = 1;\n            ty8 = 1;\n            \n        }\n        else\n        {\n            //形变\n            CGFloat h = yPlus;\n            CGFloat value = (h - originY);\n            \n            x3 = xMinus;\n            y3 = (1-2*_valueRange.max)*originY-value;\n            tx3 = 0;\n            ty3 = (1-_valueRange.max);\n            \n            x4 = xPlus;\n            y4 = (1-2*_valueRange.max)*originY-value;\n            tx4 = 1;\n            ty4 = (1-_valueRange.max);\n            \n            x5 = xMinus;\n            y5 =  (1-2*_valueRange.min)*originY+value;\n            tx5 = 0;\n            ty5 = (1-_valueRange.min);\n            \n            x6 = xPlus;\n            y6 = (1-2*_valueRange.min)*originY+value;\n            tx6 = 1;\n            ty6 = (1-_valueRange.min);\n            \n            x7 = xMinus;\n            y7 = yPlus;\n            tx7 = 0;\n            ty7 = 1;\n            \n            x8 = xPlus;\n            y8 = yPlus;\n            tx8 = 1;\n            ty8 = 1;\n            \n            \n        }\n        \n        \n    }\n    else\n    {\n        \n        //高顶到边\n        xMinus = yMinus*_imageWidth/(_imageHeight+tempValue*kVertical_h)/screenRatio;\n        xPlus = xPlus*_imageWidth/(_imageHeight+tempValue*kVertical_h)/screenRatio;\n        //原来的比例\n        float originX = yMinus*imageRatio/screenRatio;\n        \n        x1 = xMinus;\n        y1 = yMinus;\n        tx1 = 0;\n        ty1 = 0;\n        \n        x2 = xPlus;\n        y2 = yMinus;\n        tx2 = 1;\n        ty2 = 0;\n        \n        //原来的高\n        CGFloat h = ABS(1-2*_valueRange.max + 1) *xMinus/originX;\n        \n        x3 = xMinus;\n        y3 = -(1.0-h);\n        tx3 = 0;\n        ty3 = (1-_valueRange.max);\n        \n        \n        x4 = xPlus;\n        y4 = -(1.0-h);\n        tx4 = 1;\n        ty4 = (1-_valueRange.max);\n        \n        \n        x5 = xMinus;\n        y5 = (1-2*_valueRange.min*xMinus/originX);\n        tx5 = 0;\n        ty5 = (1-_valueRange.min);\n        \n        \n        x6 = xPlus;\n        y6 = (1-2*_valueRange.min*xMinus/originX);\n        tx6 = 1;\n        ty6 = (1-_valueRange.min);\n        \n        x7 = xMinus;\n        y7 = yPlus;\n        tx7 = 0;\n        ty7 = 1;\n        \n        x8 = xPlus;\n        y8 = yPlus;\n        tx8 = 1;\n        ty8 = 1;\n        \n        \n    }\n    \n    NSArray *array = @[\n                       //（x1,y1）\n                       @(x1), @(y1), @(0),  @(tx1), @(ty1),\n                       //（x2,y2）\n                       @(x2), @(y2), @(0),  @(tx2), @(ty2),\n                       //（x3,y3）\n                       @(x3), @(y3), @(0),  @(tx3), @(ty3),\n                       //（x4,y4）\n                       @(x4), @(y4), @(0),  @(tx4), @(ty4),\n                       //（x5,y5）\n                       @(x5), @(y5), @(0),  @(tx5), @(ty5),\n                       //（x6,y6）\n                       @(x6), @(y6), @(0),  @(tx6), @(ty6),\n                       //（x7,y7）\n                       @(x7), @(y7), @(0),  @(tx7), @(ty7),\n                       //（x8,y8）\n                       @(x8), @(y8), @(0),  @(tx8), @(ty8),\n                       ];\n    \n    return array;\n}\n\n#pragma mark 配置水平方向上面的顶点数据\n- (NSArray *)horizontalConfigVertex\n{\n    float imageRatio = (float)self.imageWidth/self.imageHeight;\n    float screenRatio = (float)self.frame.size.width/self.frame.size.height;\n    float xfactor=1.0;\n    float yfactor=1.0;\n    \n    float tempValue = self.changValue;\n    //负值\n    float xMinus = -xfactor;\n    float yMinus = -yfactor;\n    \n    //正值\n    float xPlus = xfactor;\n    float yPlus = yfactor;\n    \n    float x1,x2,x3,x4,x5,x6,x7,x8 = 0.0;\n    float y1,y2,y3,y4,y5,y6,y7,y8 = 0.0;\n    float tx1,tx2,tx3,tx4,tx5,tx6,tx7,tx8 = 0.0;\n    float ty1,ty2,ty3,ty4,ty5,ty6,ty7,ty8 = 0.0;\n    \n    //压缩最大的值域区间的80%\n    float compressMaxValue = (_valueRange.max - _valueRange.min)*_imageWidth*0.8;\n    \n    if (imageRatio > screenRatio) {\n        \n        //宽顶到边\n        yMinus = xMinus*screenRatio/imageRatio;\n        yPlus = xPlus*screenRatio/imageRatio;\n        \n        //        NSLog(@\"宽顶到边\");\n        \n        //改变的比例\n        float neW_xMinus = yMinus*((self.imageWidth-compressMaxValue*tempValue)/self.imageHeight)/screenRatio;\n        //改变的值\n        float w = ABS(xMinus - neW_xMinus);\n        \n        x1 = xMinus+w;\n        y1 = yMinus;\n        tx1 = 0;\n        ty1 = 0;\n        \n        x2 = xMinus+w;\n        y2 = yPlus;\n        tx2 = 0;\n        ty2 = 1;\n        \n        x3 = (1-2*_valueRange.min)/xMinus+w;\n        y3 = yMinus;\n        tx3 = _valueRange.min;\n        ty3 = 0;\n        \n        x4 = (1-2*_valueRange.min)/xMinus+w;\n        y4 = yPlus;\n        tx4 = _valueRange.min;\n        ty4 = 1;\n        \n        x5 =  (1-2*_valueRange.max)/xMinus-w;\n        y5 = yMinus;\n        tx5 = _valueRange.max;\n        ty5 = 0;\n        \n        \n        x6 = (1-2*_valueRange.max)/xMinus-w;\n        y6 = yPlus;\n        tx6 = _valueRange.max;\n        ty6 = 1;\n        \n        x7 = xPlus-w;\n        y7 = yMinus;\n        tx7 = 1;\n        ty7 = 0;\n        \n        x8 = xPlus-w;\n        y8 = yPlus;\n        tx8 = 1;\n        ty8 = 1;\n    }\n    else\n    {\n        \n        \n        xMinus = yMinus*((_imageWidth+tempValue*compressMaxValue*-1)/_imageHeight)/screenRatio;\n        xPlus = yPlus*((_imageWidth+tempValue*compressMaxValue*-1)/_imageHeight)/screenRatio;\n        //        NSLog(@\"高顶到边\");\n        //原来的比例\n        float originX = yPlus*imageRatio/screenRatio;\n        float w = originX - xPlus;\n        //高大于宽\n        x1 = xMinus;\n        y1 = yMinus;\n        tx1 = 0;\n        ty1 = 0;\n        \n        x2 = xMinus;\n        y2 = yPlus;\n        tx2 = 0;\n        ty2 = 1;\n        \n        x3 = -(1-2*_valueRange.min)*originX+w;\n        y3 = yMinus;\n        tx3 = _valueRange.min;\n        ty3 = 0;\n        \n        x4 = -(1-2*_valueRange.min)*originX+w;\n        y4 = yPlus;\n        tx4 = _valueRange.min;\n        ty4 = 1;\n        \n        x5 = -(1-2*_valueRange.max)*originX-w;\n        y5 = yMinus;\n        tx5 = _valueRange.max;\n        ty5 = 0;\n        \n        x6 = -(1-2*_valueRange.max)*originX-w;\n        y6 = yPlus;\n        tx6 = _valueRange.max;\n        ty6 = 1;\n        \n        x7 = xPlus;\n        y7 = yMinus;\n        tx7 = 1;\n        ty7 = 0;\n        \n        x8 = xPlus;\n        y8 = yPlus;\n        tx8 = 1;\n        ty8 = 1;\n    }\n    \n    NSArray *array = @[\n                       //（x1,y1）\n                       @(x1), @(y1), @(0),  @(tx1), @(ty1),\n                       //（x2,y2）\n                       @(x2), @(y2), @(0),  @(tx2), @(ty2),\n                       //（x3,y3）\n                       @(x3), @(y3), @(0),  @(tx3), @(ty3),\n                       //（x4,y4）\n                       @(x4), @(y4), @(0),  @(tx4), @(ty4),\n                       //（x5,y5）\n                       @(x5), @(y5), @(0),  @(tx5), @(ty5),\n                       //（x6,y6）\n                       @(x6), @(y6), @(0),  @(tx6), @(ty6),\n                       //（x7,y7）\n                       @(x7), @(y7), @(0),  @(tx7), @(ty7),\n                       //（x8,y8）\n                       @(x8), @(y8), @(0),  @(tx8), @(ty8),\n                       ];\n    return array;\n    \n}\n\n#pragma mark set\n- (void)setValueRange:(DDGLNormValueRange)valueRange\n{\n    _valueRange = valueRange;\n}\n\n#pragma mark func\n\n- (void)changeValue:(float)value\n{\n    self.changValue = value;\n    [self.glkView display];\n}\n\n- (void)getOriginImageVertexConfig\n{\n    \n    switch (_type) {\n        case DDGLShapeViewType_Vertical:\n        {\n            \n            NSNumber *yValue = _vertexArray[1];\n            CGFloat y = ABS(yValue.floatValue);\n            \n            NSMutableArray *muarray = [NSMutableArray arrayWithArray:_vertexArray];\n            for (int i=0; i!=_vertexArray.count; i++)\n            {\n                //处理x坐标\n                if (i%5==0)\n                {\n                    if (i%10==0)\n                    {\n                        muarray[i] = @(-1.0);\n                    }\n                    else\n                    {\n                        muarray[i] = @(1.0);\n                    }\n                }\n\n                if (i%5==1) {\n                    NSNumber *yV = _vertexArray[i];\n                    muarray[i] = @(yV.floatValue/y);\n                }\n            }\n            \n            float value=kVertical_h*self.changValue;\n            if (self.delegate && [self.delegate respondsToSelector:@selector(strectchViewGetVertexArray:changeValue:)])\n            {\n                [self.delegate strectchViewGetVertexArray:muarray changeValue:value];\n            }\n            \n            \n        }\n            break;\n        case DDGLShapeViewType_Horizontal:\n        {\n            NSMutableArray *muarray = [NSMutableArray arrayWithArray:_vertexArray];\n            \n            NSNumber *xValue = _vertexArray[0];\n            CGFloat x = ABS(xValue.floatValue);\n            \n            NSNumber *yValue = _vertexArray[1];\n            CGFloat y = ABS(yValue.floatValue);\n            \n            for (int i=0; i!=_vertexArray.count; i++)\n            {\n\n                if (i%5==0) {\n                    NSNumber *xV = _vertexArray[i];\n                    muarray[i] = @(xV.floatValue/x);\n                }\n\n                if (i%5==1) {\n                    NSNumber *yV = _vertexArray[i];\n                    muarray[i] = @(yV.floatValue/y);\n                }\n\n            }\n\n            float compressMaxValue = (_valueRange.max - _valueRange.min)*_imageWidth*0.8;\n            float value=compressMaxValue*self.changValue;\n            if (self.delegate && [self.delegate respondsToSelector:@selector(strectchViewGetVertexArray:changeValue:)])\n            {\n                [self.delegate strectchViewGetVertexArray:muarray changeValue:value];\n            }\n        }\n            break;\n        default:\n            break;\n    }\n    \n}\n\n- (void)dealloc\n{\n    if (self.mContext) {\n        _mContext = nil;\n        _mEffect = nil;\n        [EAGLContext setCurrentContext:nil];\n    }\n    NSLog(@\"DDGLShapeView---dealloc\");\n}\n\n\n@end\n"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo/DDShapeViewController/Rise/View/DDGLShapeView/DDGLShapingView.h",
    "content": "//\n//  DDGLShapingView.h\n//  OpenGLStretchDemo\n//\n//  Created by 刘海东 on 2018/6/12.\n//  Copyright © 2018年 刘海东. All rights reserved.\n//\n\n#import <UIKit/UIKit.h>\n#import \"DDGLShapeView.h\"\n\n\n@protocol DDGLShapingViewDelegate <NSObject>\n@optional\n/** 选择区域拖拽中 */\n- (void)shapingViewSwiping;\n- (void)shapingViewGetVertexArray:(NSArray *)vertexArray textureCoordinateArray:(NSArray *)textureCoordinateArray changeValue:(float)changeValue type:(DDGLShapeViewType)type;\n- (void)shapingViewSwipEndMaxValue:(float)max minValue:(float)min;\n\n@end\n\n\n@interface DDGLShapingView : UIView\n\n- (instancetype)initWithFrame:(CGRect)frame type:(DDGLShapeViewType)type image:(UIImage *)image;\n@property (nonatomic, weak) id<DDGLShapingViewDelegate> delegate;\n\n- (DDGLNormValueRange)getRange;\n\n/** 改变值 */\n- (void)changeValue:(float)value;\n\n/** 改变选择的区域 */\n- (void)changeRange:(DDGLNormValueRange)range;\n\n/** 获取配置 */\n- (void)getOriginImageVertexConfig;\n\n/** 隐藏选择区域的UI */\n- (void)hideStrectchSelView;\n\n/** 显示选择区域的UI */\n- (void)showStrectchSelView;\n\n- (UIImage *)getProcessImage;\n\n\n\n\n\n@end\n"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo/DDShapeViewController/Rise/View/DDGLShapeView/DDGLShapingView.m",
    "content": "//\n//  DDGLShapingView.m\n//  OpenGLStretchDemo\n//\n//  Created by 刘海东 on 2018/6/12.\n//  Copyright © 2018年 刘海东. All rights reserved.\n//\n\n#import \"DDGLShapingView.h\"\n#import \"DDGLShapeSelView.h\"\n#import <GPURenderKit/GPURenderKit.h>\n\n\n\n@interface DDGLShapingView ()<DDGLShapeSelViewDelegate>\n\n/** 选择区域 */\n@property (nonatomic, strong) DDGLShapeSelView *strectchSelView;\n\n/** 图像宽高 */\n@property (nonatomic, assign) float imageWidth;\n@property (nonatomic, assign) float imageHeight;\n/** 类型 */\n@property (nonatomic, assign) DDGLShapeViewType  type;\n@property (nonatomic, assign) DDGLNormValueRange range;\n\n/** 滑动中 */\n@property (nonatomic, assign) BOOL swipingBool;\n\n@property (nonatomic, strong) GPUImageView     *glPreview;\n@property (nonatomic, strong) GLImageShapeFilter *glShapeFilter;\n@property (nonatomic, strong) GPUImagePicture *glImagePicture;\n@property (nonatomic, copy) UIImage *image;\n\n\n@end\n\n@implementation DDGLShapingView\n\n- (instancetype)initWithFrame:(CGRect)frame type:(DDGLShapeViewType)type image:(UIImage *)image\n{\n    self = [super initWithFrame:frame];\n    if (self) {\n        \n        _type = type;\n        self.userInteractionEnabled = YES;\n        //默认的值\n        DDGLNormValueRange range = {0,0};\n        range.max = 75/100.0;\n        range.min = 25/100.0;\n        _range = range;\n        \n        _imageWidth = image.size.width;\n        _imageHeight = image.size.height;\n        \n        _image = image;\n        \n        self.glImagePicture = [[GPUImagePicture alloc]initWithImage:image];\n        [self.glImagePicture addTarget:self.glShapeFilter];\n        [self.glShapeFilter addTarget:self.glPreview];\n        [self.glImagePicture processImage];\n        [self strectchSelView];\n        \n    \n    }\n    return self;\n}\n\n\n\n- (GLImageShapeFilter *)glShapeFilter\n{\n    \n    if (!_glShapeFilter) {\n        _glShapeFilter = [[GLImageShapeFilter alloc] init];\n        _glShapeFilter.screenRatio = self.frame.size.width/self.frame.size.height;\n        _glShapeFilter.imageWidth = _imageWidth;\n        _glShapeFilter.imageHeight = _imageHeight;\n        _glShapeFilter.minValue = _range.min;\n        _glShapeFilter.maxValue = _range.max;\n        _glShapeFilter.type = _type;\n        [_glShapeFilter changeValue:0.0];\n        [_glShapeFilter forceProcessingAtSize:CGSizeMake(self.frame.size.width*[UIScreen mainScreen].scale, self.frame.size.height*[UIScreen mainScreen].scale)];\n    }\n    return _glShapeFilter;\n}\n\n\n- (GPUImageView *)glPreview\n{\n    if (!_glPreview) {\n        _glPreview = [[GPUImageView alloc] initWithFrame:self.bounds];\n        [_glPreview setBackgroundColorRed:0.0 green:0.0 blue:0 alpha:1.0];\n        _glPreview.backgroundColor = [UIColor blackColor];\n        [self addSubview:_glPreview];\n    }\n    return _glPreview;\n}\n\n\n- (DDGLShapeSelView *)strectchSelView\n{\n    if (!_strectchSelView)\n    {\n        float imageRatio = (float)_imageWidth/_imageHeight;\n        float screenRatio = (float)self.frame.size.width/self.frame.size.height;\n        float originX;\n        float originY;\n        \n        CGRect rect;\n        \n        if (_type == DDGLShapeViewType_Vertical) {\n            if (imageRatio > screenRatio) {\n                originY = 1.0*screenRatio/imageRatio;\n            }else\n            {\n                originY = 1.0;\n            }\n            rect = CGRectMake(0, (1-originY)/2.0*self.frame.size.height, self.frame.size.width, self.frame.size.height*originY);\n        }\n        else\n        {\n            \n            if (imageRatio > screenRatio) {\n                originX = 1.0;\n                originY = 1.0;\n            }else\n            {\n                originX = 1.0*imageRatio/screenRatio;\n                originY = 1.0;\n            }\n            rect = CGRectMake((1-originX)/2.0*self.frame.size.width, (1-originY)/2.0*self.frame.size.height, self.frame.size.width*originX, self.frame.size.height*originY);\n        }\n        \n        CGSize superSize = self.glPreview.frame.size;\n        CGSize fitSize = [self fitSizeWithImage:self.image superSize:superSize];\n        CGRect subFrame = CGRectMake((superSize.width - fitSize.width) / 2.0, (superSize.height - fitSize.height) / 2.0, fitSize.width, fitSize.height);\n\n        \n        _strectchSelView = [[DDGLShapeSelView alloc]initWithFrame:rect type:_type subFrame:subFrame];\n        _strectchSelView.valueRange = _range;\n        _strectchSelView.delegate = self;\n        [self addSubview:_strectchSelView];\n        \n    }\n    return _strectchSelView;\n}\n\n- (CGSize)fitSizeWithImage:(UIImage *)image superSize:(CGSize)superSize\n{\n    float imageRatio = image.size.width / image.size.height;\n    float superRatio = superSize.width / superSize.height;\n    CGSize size = superSize;\n    \n    if (superRatio > imageRatio)\n    {\n        size.height = superSize.height;\n        size.width = superSize.height * imageRatio;\n    }\n    else\n    {\n        size.width = superSize.width;\n        size.height = superSize.width / imageRatio;\n    }\n    \n    return size;\n}\n\n\n\n- (void)changeValue:(float)value\n{\n    [self.glShapeFilter changeValue:value];\n    [self.glImagePicture processImage];\n    \n    \n}\n\n- (void)changeRange:(DDGLNormValueRange)range\n{\n    _range = range;\n    _strectchSelView.valueRange = _range;\n    self.glShapeFilter.minValue = _range.min;\n    self.glShapeFilter.maxValue = _range.max;\n\n}\n\n\n- (DDGLNormValueRange)getRange\n{\n    return _range;\n}\n\n\n#pragma mark DDGLShapeSelViewDelegate\n- (void)strectchSelViewSwiping\n{\n    //滚动中\n    if (self.delegate && [self.delegate respondsToSelector:@selector(shapingViewSwiping)]) {\n        if (!self.swipingBool) {\n            [self.glShapeFilter changeValue:0];\n            [self.glImagePicture processImage];\n            [self.delegate shapingViewSwiping];\n        }\n        self.swipingBool = YES;\n    }\n}\n\n#pragma mark 重写点击区域\n-(UIView *)hitTest:(CGPoint)point withEvent:(UIEvent *)event {\n    UIView *view = [super hitTest:point withEvent:event];\n    if (view == nil)\n    {\n        for (UIView*subView in self.subviews)\n        {\n            if ([subView isKindOfClass:[DDGLShapeSelView class]])\n            {\n                CGPoint lowLeftPoint = [subView convertPoint:point fromView:self];\n                view = [subView hitTest:lowLeftPoint withEvent:event];\n            }\n        }\n    }\n    return view;\n}\n\n- (void)strectchSelViewSwipEndMaxValue:(float)max minValue:(float)min\n{\n    _range.max = max;\n    _range.min = min;\n    self.swipingBool = NO;\n    self.glShapeFilter.minValue = _range.min;\n    self.glShapeFilter.maxValue = _range.max;\n    if (self.delegate && [self.delegate respondsToSelector:@selector(shapingViewSwipEndMaxValue:minValue:)]) {\n        [self.delegate shapingViewSwipEndMaxValue:max minValue:min];\n    }\n}\n\n\n- (void)getOriginImageVertexConfig\n{\n\n    __weak typeof(self) weakSelf = self;\n    \n    [self.glShapeFilter getVerticesAndTextureCoordinatesHandle:^(NSArray *squareVertexes, NSArray *textureCoordinates, float changeValue,NSInteger type){\n        \n        if (weakSelf.delegate && [weakSelf.delegate respondsToSelector:@selector(shapingViewGetVertexArray:textureCoordinateArray:changeValue:type:)])\n        {\n            [weakSelf.delegate shapingViewGetVertexArray:squareVertexes textureCoordinateArray:textureCoordinates changeValue:changeValue type:type];\n        }\n        \n    }];\n\n}\n\n/** 隐藏选择区域的UI */\n- (void)hideStrectchSelView\n{\n    self.strectchSelView.hidden = YES;\n}\n\n/** 显示选择区域的UI */\n- (void)showStrectchSelView\n{\n    self.strectchSelView.hidden = NO;\n}\n\n- (UIImage *)getProcessImage\n{\n    [self.glImagePicture processImage];\n    [self.glShapeFilter useNextFrameForImageCapture];\n    return [self.glShapeFilter imageFromCurrentFramebuffer];\n}\n\n\n\n\n@end\n"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo/DDShapeViewController/Rise/View/Slider/ATSliderView.h",
    "content": "//\n//  ATSliderView.h\n//  Artist\n//\n//  Created by 刘海东 on 2018/6/30.\n//  Copyright © 2018年 wecut. All rights reserved.\n//\n\n#import \"WeSliderView.h\"\n\n@interface ATSliderView : WeSliderView\n\n@property (nonatomic, copy) NSString *topLabValue;\n\n/** 默认显示 */\n@property (nonatomic, assign) BOOL hideTopLab;\n\n\n/** 自动显示 默认不显示 */\n@property (nonatomic, assign)IBInspectable BOOL autoTopLab;\n/** 滑块的宽度大小 */\n@property (nonatomic, assign)IBInspectable  float thumbSize;\n\n\n-(void)configBigFollowView;\n\n-(void)configSmallFollowView;\n\n/**滑动球的默认大小：26 */\n-(void)configDefaultThumbWidth;\n\n/**滑动球的默认大小：20 */\n-(void)configSmallThumbWidth;\n\n\n\n\n\n@end\n"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo/DDShapeViewController/Rise/View/Slider/ATSliderView.m",
    "content": "//\n//  ATSliderView.m\n//  Artist\n//\n//  Created by 刘海东 on 2018/6/30.\n//  Copyright © 2018年 wecut. All rights reserved.\n//\n\n#import \"ATSliderView.h\"\n\n@interface ATSliderView ()\n@property (nonatomic, strong) UILabel *topLab;\n@property (nonatomic, assign) float topLabW;\n\n@end\n\n@implementation ATSliderView\n\n- (void)awakeFromNib\n{\n    [super awakeFromNib];\n    [self configBigFollowView];\n}\n\n\n-(void)configSmallFollowView\n{\n    self.topLabW = 40;\n    [self configUI];\n    self.thumbWidth = 20;\n}\n\n\n\n- (void)setThumbSize:(float)thumbSize\n{\n    _thumbSize = thumbSize;\n    self.thumbWidth = _thumbSize;\n}\n\n-(void)configBigFollowView\n{\n    self.topLabW = 50;\n    self.thumbWidth = 26;\n    [self configUI];\n}\n\n-(void)configDefaultThumbWidth {\n    self.thumbWidth = 26;\n}\n\n-(void)configSmallThumbWidth {\n    self.thumbWidth = 20;\n}\n\n- (void)configUI\n{\n\n    self.topLab.frame = CGRectMake(0, 0, self.topLabW, self.topLabW);\n\n    self.followView = self.topLab;\n\n    self.followViewIntervalY = 18;\n    self.minmimValue = 0.0;\n    self.maxmimValue = 1.0;\n    self.thumbTintColor = [UIColor whiteColor];\n    self.trackTintColor = [UIColor colorWithRed:1 green:1 blue:1 alpha:0.3];\n    self.trackHeight = 2.0;\n    self.progressTintColor = [UIColor whiteColor];\n    WEAKSELF\n    self.touchBeginHandler = ^(float value)\n    {\n        weakSelf.progress = value;\n    };\n    if (_thumbSize) {\n        self.thumbWidth = _thumbSize;\n    }\n\n}\n\n\n- (UILabel *)topLab\n{\n    if (!_topLab) {\n        \n        _topLab = [[UILabel alloc]initWithFrame:CGRectMake(0, 0, self.topLabW, self.topLabW)];\n        _topLab.textAlignment = NSTextAlignmentCenter;\n        _topLab.font = [UIFont boldSystemFontOfSize:16.f];\n        _topLab.textColor = [UIColor colorWithRed:0/255.0 green:0/255.0 blue:0/255.0 alpha:1];\n        _topLab.layer.cornerRadius = self.topLabW/2.0;\n        _topLab.layer.masksToBounds = YES;\n        _topLab.backgroundColor = [UIColor colorWithRed:255/255.0 green:255/255.0 blue:255/255.0 alpha:0.95];\n    }\n    return _topLab;\n}\n\n- (void)setTopLabValue:(NSString *)topLabValue\n{\n    if (![self.topLab.text isEqualToString:topLabValue] && ([topLabValue isEqualToString:@\"-100\"] || [topLabValue isEqualToString:@\"0\"] || [topLabValue isEqualToString:@\"100\"])) {\n    }\n    self.topLab.text = topLabValue;\n}\n\n- (void)setHideTopLab:(BOOL)hideTopLab\n{\n    _hideTopLab = hideTopLab;\n    self.topLab.hidden = hideTopLab;\n}\n\n- (void)setProgress:(float)progress\n{\n    [super setProgress:progress];\n    if (_autoTopLab) {\n        self.topLabValue = [NSString stringWithFormat:@\"%.0f\",MIN(progress*100, 100)];\n    }\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo/DDShapeViewController/Rise/View/Slider/WeSliderView.h",
    "content": "//\n//  WeSliderView.h\n//  LWSliderViewDemo\n//\n//  Created by Leo on 2018/3/14.\n//  Copyright © 2018年 leo. All rights reserved.\n//\n\n#import <UIKit/UIKit.h>\n\n@interface WeSliderView : UIControl\n\n/** 最小值 defult 0.0 */\n@property (nonatomic, assign) IBInspectable float     minmimValue;\n/** 最大值 defult 1.0 */\n@property (nonatomic, assign) IBInspectable float     maxmimValue;\n/** defult 0.0 */\n@property (nonatomic, assign) IBInspectable float     value;\n/** 滑块颜色 */\n@property (nonatomic, strong)  UIColor   *thumbTintColor;\n/** 滑块图片 */\n@property (nonatomic, strong)  UIImage   *thumbImage;\n/** 轨迹颜色 */\n@property (nonatomic, strong)  UIColor   *trackTintColor;\n/** 进度条颜色 */\n@property (nonatomic, strong)  UIColor   *progressTintColor;\n/** 轨迹高度 defult 4.0 */\n@property (nonatomic, assign) IBInspectable float     trackHeight;\n/** 隐藏滑块 */\n@property (nonatomic, assign) IBInspectable BOOL      hiddenThumb;\n/** 滑块上部跟随视图 */\n@property (nonatomic, strong) UIView    *followView;\n/** 滑块上部跟随视图 和滑块的间隙 */\n@property (nonatomic, assign) float followViewIntervalY;\n/** 滑块的宽度大小 */\n@property (nonatomic, assign)  float thumbWidth;\n\n@property (nonatomic, strong) UIView *trackView;\n\n/** 是否在value = 0.0处停顿下 defult YES 只在最小最大值异号时才起作用 */\n@property (nonatomic, assign IBInspectable) BOOL      needInterruptAtZero;\n@property (nonatomic, assign) float progress;\n\n/** 回调Block */\n@property (nonatomic, copy) void (^valueDidChangeHandler)(float value);\n@property (nonatomic, copy) void (^touchBeginHandler)(float value);\n@property (nonatomic, copy) void (^touchEndHandler)(float value);\n\n/** 设置轨道边框 */\n- (void)setTrackBorderWidth:(float)width color:(UIColor *)color;\n/** 设置滑块边框 */\n- (void)setThumbBorderWidth:(float)width color:(UIColor *)color;\n\n- (void)setValue:(float)value animation:(BOOL)animation;\n\n@end\n"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo/DDShapeViewController/Rise/View/Slider/WeSliderView.m",
    "content": "//\n//  WeSliderView.m\n//  LWSliderViewDemo\n//\n//  Created by Leo on 2018/3/14.\n//  Copyright © 2018年 leo. All rights reserved.\n//\n\n#import \"WeSliderView.h\"\n\n@interface WeSliderView () <UIGestureRecognizerDelegate>\n\n\n@property (nonatomic, strong) UIView *progressView;\n@property (nonatomic, strong) UIImageView *thumbView;\n@property (nonatomic, assign) float ratio;\n\n@end\n\n@implementation WeSliderView\n{\n    float currentRatio;\n    float threshold;\n}\n\n- (instancetype)initWithFrame:(CGRect)frame\n{\n    self = [super initWithFrame:frame];\n    if (self)\n    {\n        [self createSubviews];\n        [self setup];\n    }\n    return self;\n}\n\n- (instancetype)initWithCoder:(NSCoder *)aDecoder\n{\n    self = [super initWithCoder:aDecoder];\n    if (self)\n    {\n        [self createSubviews];\n        [self setup];\n    }\n    return self;\n}\n\n- (void)setup\n{\n    threshold   = 0.1;\n    _thumbWidth  = 20;\n    \n    _minmimValue = -1.0;\n    _maxmimValue = 1.0;\n    \n    _progress    = -1.0;\n    _trackHeight = 4.0;\n    self.ratio   = 0.0;\n    _followViewIntervalY = 0.0;\n    \n    _needInterruptAtZero = YES;\n    \n    self.thumbTintColor = [UIColor colorWithRed:255/255.0 green:108/255.0 blue:156/255.0 alpha:1.0];\n    self.trackTintColor = [UIColor lightGrayColor];\n    self.progressTintColor = [UIColor colorWithRed:255/255.0 green:108/255.0 blue:156/255.0 alpha:1.0];\n}\n\n- (void)createSubviews\n{\n    self.trackView = [[UIView alloc] init];\n    self.trackView.clipsToBounds = YES;\n    [self addSubview:self.trackView];\n    \n    self.progressView = [[UIView alloc] init];\n    [self.trackView addSubview:self.progressView];\n    \n    self.thumbView = [[UIImageView alloc] init];\n    self.thumbView.layer.cornerRadius = _thumbWidth / 2.0;\n    [self addSubview:self.thumbView];\n    \n    UIPanGestureRecognizer *panGesture = [[UIPanGestureRecognizer alloc] initWithTarget:self action:@selector(panGestureActionHandler:)];\n    panGesture.delegate = self;\n    [self addGestureRecognizer:panGesture];\n}\n\n- (CGPoint)centerPoint\n{\n    return CGPointMake(self.bounds.size.width / 2.0, self.bounds.size.height / 2.0);\n}\n\n- (void)layoutSubviews\n{\n    CGPoint center = [self centerPoint];\n    self.trackView.frame = CGRectMake(0, 0, self.bounds.size.width - _thumbWidth, _trackHeight);\n    self.trackView.center = center;\n    self.trackView.layer.cornerRadius = _trackHeight / 2.0;\n    \n    self.progressView.frame = self.trackView.bounds;\n    self.thumbView.frame = CGRectMake(0, 0, _thumbWidth, _thumbWidth);\n    self.thumbView.layer.cornerRadius = _thumbWidth / 2.0;\n    \n    [self updateProgressFrame];\n}\n\n- (void)updateProgressFrame\n{\n    float x = [self isOriginalPointCenter] ? self.trackView.bounds.size.width / 2.0 : 0.0;\n    float w = [self progressWidth];\n    self.progressView.frame = CGRectMake(x, 0, w * _progress, self.trackView.bounds.size.height);\n    self.thumbView.center = CGPointMake(self.trackView.frame.origin.x + x + w * _progress, [self centerPoint].y);\n    \n    if (self.followView)\n    {\n        self.followView.center = CGPointMake(self.thumbView.center.x, CGRectGetMinY(self.thumbView.frame) - self.followView.frame.size.height / 2.0 - _followViewIntervalY);\n    }\n}\n\n- (void)setThumbTintColor:(UIColor *)thumbTintColor\n{\n    _thumbTintColor = thumbTintColor;\n    self.thumbView.backgroundColor = thumbTintColor;\n}\n\n- (void)setTrackTintColor:(UIColor *)trackTintColor\n{\n    _trackTintColor = trackTintColor;\n    self.trackView.backgroundColor = trackTintColor;\n}\n\n- (void)setProgressTintColor:(UIColor *)progressTintColor\n{\n    _progressTintColor = progressTintColor;\n    self.progressView.backgroundColor = progressTintColor;\n}\n\n- (void)setThumbImage:(UIImage *)thumbImage\n{\n    _thumbImage = thumbImage;\n    [self.thumbView setImage:thumbImage];\n    \n    if (thumbImage)\n    {\n        self.thumbView.backgroundColor = [UIColor clearColor];\n        self.thumbView.layer.cornerRadius = 0.0;\n        _thumbWidth = thumbImage.size.width;\n        [self setNeedsLayout];\n    }\n}\n\n-(void)setThumbWidth:(float)thumbWidth {\n    _thumbWidth = thumbWidth;\n    [self setNeedsLayout];\n}\n\n- (void)setFollowView:(UIView *)followView\n{\n    if (_followView)\n    {\n        [_followView removeFromSuperview];\n    }\n    \n    _followView = followView;\n    followView.hidden = YES;\n    [self addSubview:_followView];\n}\n\n- (void)setTrackBorderWidth:(float)width color:(UIColor *)color\n{\n    self.trackView.layer.borderWidth = width;\n    self.trackView.layer.borderColor = color.CGColor;\n}\n\n- (void)setThumbBorderWidth:(float)width color:(UIColor *)color\n{\n    self.thumbView.layer.borderWidth = width;\n    self.thumbView.layer.borderColor = color.CGColor;\n}\n\n- (void)setTrackHeight:(float)trackHeight\n{\n    _trackHeight = trackHeight;\n    [self setNeedsLayout];\n}\n\n- (void)setHiddenThumb:(BOOL)hiddenThumb\n{\n    _hiddenThumb = hiddenThumb;\n    self.thumbView.hidden = hiddenThumb;\n}\n\n- (void)setMinmimValue:(float)minmimValue\n{\n    _minmimValue = minmimValue;\n    [self setValue:_value];\n}\n\n- (void)setFollowViewIntervalY:(float)followViewIntervalY\n{\n    _followViewIntervalY = followViewIntervalY;\n}\n\n- (void)setMaxmimValue:(float)maxmimValue\n{\n    _maxmimValue = maxmimValue;\n    [self setValue:_value];\n}\n\n- (void)setRatio:(float)ratio\n{\n    float minRatio = [self isOriginalPointCenter] ? -1.0 : threshold;\n    float currentThreshold = [self isOriginalPointCenter] && _needInterruptAtZero ? threshold : 0.0;\n    ratio = MAX(minRatio - threshold, MIN(1.0 + threshold, ratio));\n    _ratio = ratio;\n    \n    if (fabs(ratio) < currentThreshold)\n    {\n        self.progress = 0.0;\n    }\n    else\n    {\n        if (ratio > 0)\n        {\n            self.progress =  ratio - currentThreshold;\n        }\n        else\n        {\n            self.progress = ratio + currentThreshold;\n        }\n    }\n}\n\n- (void)setProgress:(float)progress\n{\n    float minProgress = [self isOriginalPointCenter] ? -1.0 : 0.0;\n    progress = MAX(minProgress, MIN(1.0, progress));\n    \n    if (_progress == progress)\n    {\n        return;\n    }\n    \n    _progress = progress;\n    [self updateProgressFrame];\n    \n    if (self.valueDidChangeHandler)\n    {\n        _value = progress >= 0 ? (_maxmimValue * progress) : (_minmimValue * -progress);\n        self.valueDidChangeHandler(_value);\n    }else{\n        _value = progress >= 0 ? (_maxmimValue * progress) : (_minmimValue * -progress);\n        [self sendActionsForControlEvents:UIControlEventValueChanged];\n    }\n}\n\n- (void)setValue:(float)value\n{\n    value = MAX(_minmimValue, MIN(_maxmimValue, value));\n    _value = value;\n    \n    if (_minmimValue != _maxmimValue)\n    {\n        if (_minmimValue >= 0)\n        {\n            _progress = (value - _minmimValue) / (_maxmimValue - _minmimValue);\n        }\n        else\n        {\n            _progress = value >= 0 ? value / _maxmimValue : -value / _minmimValue;\n        }\n        \n        float currentThreshold = [self isOriginalPointCenter] && _needInterruptAtZero ? threshold : 0.0;\n        \n        if (_progress >= 0)\n        {\n            _ratio = _progress + currentThreshold;\n        }\n        else\n        {\n            _ratio = _progress - currentThreshold;\n        }\n        \n        [self updateProgressFrame];\n    }\n}\n\n- (void)setValue:(float)value animation:(BOOL)animation\n{\n    if (animation)\n    {\n        [UIView animateWithDuration:0.35 animations:^{\n            self.value = value;\n        }];\n    }\n    else\n    {\n        self.value = value;\n    }\n}\n\n\n\n- (BOOL)isOriginalPointCenter\n{\n    return _minmimValue < 0 && _maxmimValue > 0;\n}\n\n- (float)progressWidth\n{\n    return [self isOriginalPointCenter] ? self.trackView.bounds.size.width / 2.0 : self.trackView.bounds.size.width;\n}\n\n- (void)panGestureActionHandler:(UIPanGestureRecognizer *)gesture\n{\n    if (gesture.state == UIGestureRecognizerStateBegan)\n    {\n        currentRatio  = self.ratio;\n    }\n    else if (gesture.state == UIGestureRecognizerStateChanged)\n    {\n        CGPoint translate = [gesture translationInView:gesture.view];\n        \n        if ([self isOriginalPointCenter])\n        {\n            self.ratio = currentRatio + translate.x / self.progressWidth * (1.0 + threshold);\n        }\n        else\n        {\n            self.ratio = currentRatio + translate.x / self.progressWidth;\n        }\n    }\n    else\n    {\n        [self doTouchEndAction];\n    }\n}\n\n- (void)doTouchBeginAction\n{\n    if (self.touchBeginHandler)\n    {\n        self.touchBeginHandler(_value);\n    }\n    \n    self.followView.hidden = NO;\n}\n\n- (void)doTouchEndAction\n{\n    [self sendActionsForControlEvents:UIControlEventTouchUpInside];\n\n    if (self.touchEndHandler)\n    {\n        self.touchEndHandler(_value);\n    }\n\n    self.followView.hidden = YES;\n}\n\n- (void)touchesBegan:(NSSet<UITouch *> *)touches withEvent:(UIEvent *)event\n{\n    [super touchesBegan:touches withEvent:event];\n    [self doTouchBeginAction];\n}\n\n- (void)touchesEnded:(NSSet<UITouch *> *)touches withEvent:(UIEvent *)event\n{\n    [super touchesEnded:touches withEvent:event];\n    [self doTouchEndAction];\n}\n\n- (BOOL)gestureRecognizer:(UIGestureRecognizer *)gestureRecognizer shouldRecognizeSimultaneouslyWithGestureRecognizer:(UIGestureRecognizer *)otherGestureRecognizer\n{\n    return YES;\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo/DDShapeViewController/View/UIView+Xib.h",
    "content": "//\n//  UIView+Xib.h\n//  MLProject\n//\n//  Created by 妙龙赖 on 15/11/22.\n//  Copyright © 2015年 妙龙赖. All rights reserved.\n//\n\n#import <UIKit/UIKit.h>\n\n@interface UIView (Xib)\n@property (nonatomic, strong, readonly) id containerView;\n/**\n *  生成与自身类同名的xibView,且约束与自己相同大小\n 同时将自己设置为FileOwner\n\n */\n- (void)setupSelfNameXibOnSelf;\n- (void)setupSelfNameXibOnSelfWithSerialNumber:(NSInteger)number;\n- (instancetype)loadSelfXibWithFileOwner:(id)fileOwner;\n- (instancetype)loadSelfXibWithFileOwner:(id)fileOwner serialNumber:(NSInteger)number;\n\n- (void)setupXibWithName:(NSString *)name;\n- (instancetype)loadXibWithName:(NSString *)name;\n- (instancetype)loadXibWithName:(NSString *)name serialNumber:(NSInteger)number;\n\n- (instancetype)loadXibWithName:(NSString *)name  FileOwner:(id)fileOwner serialNumber:(NSInteger)number;\n\n@end\n"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo/DDShapeViewController/View/UIView+Xib.m",
    "content": "//\n//  UIView+Xib.m\n//  MLProject\n//\n//  Created by 妙龙赖 on 15/11/22.\n//  Copyright © 2015年 妙龙赖. All rights reserved.\n//\n\n#import \"UIView+Xib.h\"\n#import <objc/runtime.h>\n\n@implementation UIView (Xib)\n- (void)setupSelfNameXibOnSelf\n{\n   \n    [self setupSelfNameXibOnSelfWithSerialNumber:0];\n}\n\n- (void)setupSelfNameXibOnSelfWithSerialNumber:(NSInteger)number\n{\n    UIView *containerView = [self loadSelfXibWithFileOwner:self serialNumber:number];\n    [self addSubview:containerView];\n}\n- (instancetype)loadSelfXibWithFileOwner:(id)fileOwner\n{\n    \n   return [self loadSelfXibWithFileOwner:fileOwner serialNumber:0];\n}\n\n- (instancetype)loadSelfXibWithFileOwner:(id)fileOwner serialNumber:(NSInteger)number\n{\n    \n    UIView *containerView = [self loadXibWithName:NSStringFromClass([self class]) FileOwner:self serialNumber:0];\n    return containerView;\n}\n\n\n- (void)setupXibWithName:(NSString *)name\n{\n    UIView *contianerView = [self loadXibWithName:name];\n    [self addSubview:contianerView];\n  \n}\n- (instancetype)loadXibWithName:(NSString *)name\n{\n    return [self loadXibWithName:name serialNumber:0];\n}\n- (instancetype)loadXibWithName:(NSString *)name serialNumber:(NSInteger)number\n{\n    return [self loadXibWithName:name FileOwner:self serialNumber:number];\n}\n- (instancetype)loadXibWithName:(NSString *)name  FileOwner:(id)fileOwner serialNumber:(NSInteger)number\n{\n    UIView *containerView = [[NSBundle mainBundle] loadNibNamed:name owner:fileOwner options:nil][number];\n    containerView.frame = self.bounds;\n    containerView.autoresizingMask = UIViewAutoresizingFlexibleHeight|UIViewAutoresizingFlexibleWidth;\n    objc_setAssociatedObject(fileOwner, @selector(containerView), containerView, OBJC_ASSOCIATION_RETAIN_NONATOMIC);\n    return containerView;\n}\n\n\n#pragma mark - ========= Setter & Getter =========\n- (id)containerView\n{\n    return objc_getAssociatedObject(self, @selector(containerView));\n}\n@end\n"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo/DouYinEffect/DouYinEffectTabView.h",
    "content": "//\n//  DouYinEffectTabView.h\n//  WEOpenGLDemo\n//\n//  Created by 刘海东 on 2019/2/20.\n//  Copyright © 2019 Leo. All rights reserved.\n//\n\n#import <UIKit/UIKit.h>\n#import \"GLDouYinEffectViewController.h\"\nNS_ASSUME_NONNULL_BEGIN\n\n\n\n@protocol DouYinEffectTabViewDelegate <NSObject>\n\n- (void)didSelectEffectType:(DouYinEffectType)type;\n\n@end\n\n\n\n\n@interface DouYinEffectTabView : UIView\n\n@property (nonatomic, weak) id<DouYinEffectTabViewDelegate>delegate;\n\n\n@end\n\nNS_ASSUME_NONNULL_END\n"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo/DouYinEffect/DouYinEffectTabView.m",
    "content": "//\n//  DouYinEffectTabView.m\n//  WEOpenGLDemo\n//\n//  Created by 刘海东 on 2019/2/20.\n//  Copyright © 2019 Leo. All rights reserved.\n//\n\n#import \"DouYinEffectTabView.h\"\n\n@interface DouYinEffectTabView ()<UITableViewDelegate,UITableViewDataSource>\n@property (nonatomic, strong) UITableView *tabView;\n@property (nonatomic, strong) NSMutableArray *tabViewData;\n\n@end\n\n\nstatic const NSString *kEffectName = @\"effectName\";\nstatic const NSString *kEffectType = @\"effectType\";\n\n\n@implementation DouYinEffectTabView\n\n\n- (instancetype)initWithFrame:(CGRect)frame\n{\n    self = [super initWithFrame:frame];\n    if (self) {\n        [self tabView];\n    }\n    return self;\n}\n\n- (UITableView *)tabView\n{\n    if (!_tabView) {\n        [self configTableViewData];\n        _tabView = [[UITableView alloc]initWithFrame:self.bounds style:UITableViewStylePlain];\n        _tabView.delegate = self;\n        _tabView.dataSource = self;\n        _tabView.backgroundColor = [UIColor clearColor];\n        [self addSubview:_tabView];\n    }\n    return _tabView;\n}\n\n- (void)configTableViewData\n{\n    \n    self.tabViewData = [NSMutableArray array];\n    NSDictionary *dic1 = [self createDicEffectName:@\"三屏带滤镜\" effectType:DouYinEffectType_GLImageThreePartition];\n    NSDictionary *dic2 = [self createDicEffectName:@\"四屏\" effectType:DouYinEffectType_GLImageFourPointsMirrorFilter];\n    NSDictionary *dic3 = [self createDicEffectName:@\"电流\" effectType:DouYinEffectType_GLImageGlitchEffectLineFilter];\n    NSDictionary *dic4 = [self createDicEffectName:@\"格子故障\" effectType:DouYinEffectType_GLImageGlitchEffectGridFilter];\n    NSDictionary *dic5 = [self createDicEffectName:@\"灵魂出窍\" effectType:DouYinEffectType_GLImageSoulOutFilter];\n    NSDictionary *dic6 = [self createDicEffectName:@\"放大缩小\" effectType:DouYinEffectType_GLImageZoomFilter];\n    NSDictionary *dic7 = [self createDicEffectName:@\"水面倒影\" effectType:DouYinEffectType_GLImageWaterReflectionFilter];\n    NSDictionary *dic8 = [self createDicEffectName:@\"模糊分屏\" effectType:DouYinEffectType_GLImageBlurSnapViewFilterGroup];\n    \n    [self.tabViewData addObject:dic1];\n    [self.tabViewData addObject:dic2];\n    [self.tabViewData addObject:dic3];\n    [self.tabViewData addObject:dic4];\n    [self.tabViewData addObject:dic5];\n    [self.tabViewData addObject:dic6];\n    [self.tabViewData addObject:dic7];\n    [self.tabViewData addObject:dic8];\n\n}\n\n- (NSDictionary *)createDicEffectName:(NSString *)effectName effectType:(DouYinEffectType)effectType\n{\n    return @{kEffectName:effectName,\n             kEffectType:@(effectType)\n             };\n}\n\n#pragma mark ------------------------------------------------------ ttabDelete ------------------------------------------------------\n- (NSInteger)tableView:(UITableView *)tableView numberOfRowsInSection:(NSInteger)section\n{\n    return self.tabViewData.count;\n}\n\n- (NSInteger)numberOfSectionsInTableView:(UITableView *)tableView\n{\n    return 1;\n}\n\n- (UITableViewCell *)tableView:(UITableView *)tableView cellForRowAtIndexPath:(NSIndexPath *)indexPath\n{\n    static NSString *cellId = @\"cellID\";\n    \n    UITableViewCell *cell = [tableView dequeueReusableCellWithIdentifier:cellId];\n    \n    if (!cell) {\n        cell = [[UITableViewCell alloc]initWithStyle:UITableViewCellStyleDefault reuseIdentifier:cellId];\n    }\n    \n    cell.backgroundColor = [UIColor clearColor];\n    [tableView deselectRowAtIndexPath:indexPath animated:NO];\n    cell.selectionStyle = UITableViewCellSelectionStyleNone;\n    \n    NSDictionary *dic = self.tabViewData[indexPath.row];\n    cell.textLabel.text = dic[kEffectName];\n    cell.textLabel.textAlignment = NSTextAlignmentCenter;\n    cell.textLabel.textColor = [UIColor blueColor];\n    \n    return cell;\n}\n\n- (void)tableView:(UITableView *)tableView didSelectRowAtIndexPath:(NSIndexPath *)indexPath\n{\n    NSDictionary *dic = self.tabViewData[indexPath.row];\n    \n    DouYinEffectType type = [dic[kEffectType] integerValue];\n    if (self.delegate && [self.delegate respondsToSelector:@selector(didSelectEffectType:)]) {\n        [self.delegate didSelectEffectType:type];\n    }\n    \n    \n}\n\n\n\n@end\n"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo/DouYinEffect/GLDouYinEffectViewController.h",
    "content": "//\n//  GLDouYinEffectViewController.h\n//  WEOpenGLDemo\n//\n//  Created by 刘海东 on 2019/2/19.\n//  Copyright © 2019 Leo. All rights reserved.\n//\n\n#import <UIKit/UIKit.h>\n#import \"BaseViewController.h\"\nNS_ASSUME_NONNULL_BEGIN\n\ntypedef NS_ENUM(NSInteger,DouYinEffectType)\n{\n    \n    /** 抖音三屏带滤镜 */\n    DouYinEffectType_GLImageThreePartition = 0,\n    /** 抖音四分镜 */\n    DouYinEffectType_GLImageFourPointsMirrorFilter,\n    /** 毛刺 */\n    DouYinEffectType_GLImageGlitchEffectLineFilter,\n    /** 格子故障 */\n    DouYinEffectType_GLImageGlitchEffectGridFilter,\n    /** 灵魂出窍 */\n    DouYinEffectType_GLImageSoulOutFilter,\n    /** 放大 */\n    DouYinEffectType_GLImageZoomFilter,\n    /** 水面倒影 */\n    DouYinEffectType_GLImageWaterReflectionFilter,\n    /** 模糊分屏 */\n    DouYinEffectType_GLImageBlurSnapViewFilterGroup,\n};\n\n\n\n\n\n@interface GLDouYinEffectViewController : BaseViewController\n\n\n\n\n@end\n\nNS_ASSUME_NONNULL_END\n"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo/DouYinEffect/GLDouYinEffectViewController.m",
    "content": "//\n//  GLDouYinEffectViewController.m\n//  WEOpenGLDemo\n//\n//  Created by 刘海东 on 2019/2/19.\n//  Copyright © 2019 Leo. All rights reserved.\n//\n\n#import \"GLDouYinEffectViewController.h\"\n#import \"DouYinEffectTabView.h\"\n#import <GPURenderKit/GPURenderKit.h>\n\n@interface GLDouYinEffectViewController ()<DouYinEffectTabViewDelegate>\n@property (nonatomic, strong) GPUImageVideoCamera *videoCamera;\n@property (nonatomic, strong) GPUImageView *preview;\n@property (nonatomic, strong) GLImageThreePartitionGroupFilter *partitionFilter;\n@property (nonatomic, strong) GLImageFourPointsMirrorFilter *pointsMirrorFiter;\n@property (nonatomic, strong) GLImageGlitchEffectGridFilter *glitchEffectGridFilter;\n@property (nonatomic, strong) GLImageGlitchEffectLineFilter *glitchEffectLineFilter;\n@property (nonatomic, strong) GLImageSoulOutFilter *soulOutFilter;\n@property (nonatomic, strong) GLImageZoomFilter *zoomFilter;\n@property (nonatomic, strong) GLImageWaterReflectionFilter *waterReflectionFilter;\n@property (nonatomic, strong) GLImageBlurSnapViewFilterGroup *blurSnapViewFilter;\n\n@property (nonatomic, strong) DouYinEffectTabView *douYinEffectTabView;\n@property (nonatomic, strong) GPUImageOutput<GPUImageInput> *outPutFilter;\n\n@property (nonatomic, assign) DouYinEffectType selectEffectType;\n@property (nonatomic, strong) CADisplayLink *displayLink;\n\n\n@end\n\n\n@implementation GLDouYinEffectViewController\n\n- (void)viewDidLoad {\n    [super viewDidLoad];\n    \n    self.view.backgroundColor = [UIColor whiteColor];\n    self.preview = [[GPUImageView alloc] initWithFrame:self.view.bounds];\n    self.preview.layer.contentsScale = 2.0;\n    self.preview.backgroundColor = [UIColor blackColor];\n    [self.preview setBackgroundColorRed:0.0 green:0.0 blue:0.0 alpha:1.0];\n    [self.view addSubview:self.preview];\n    \n    self.outPutFilter = self.partitionFilter;\n    [self.outPutFilter addTarget:self.preview];\n    [self.videoCamera addTarget:self.partitionFilter];\n\n    dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t)(1.0 * NSEC_PER_SEC)), dispatch_get_main_queue(), ^{\n        [self.videoCamera startCameraCapture];\n    });\n    \n    [self douYinEffectTabView];\n}\n\n- (void)viewWillDisappear:(BOOL)animated{\n    \n    [super viewWillDisappear:animated];\n    [self stopDisplayLink];\n}\n\n- (GPUImageVideoCamera *)videoCamera\n{\n    if (!_videoCamera)\n    {\n        _videoCamera = [[GPUImageVideoCamera alloc] initWithSessionPreset:AVCaptureSessionPreset1280x720 cameraPosition:AVCaptureDevicePositionBack];\n        _videoCamera.runBenchmark = NO;\n        _videoCamera.horizontallyMirrorFrontFacingCamera = YES;\n        _videoCamera.outputImageOrientation = UIInterfaceOrientationPortrait;\n    }\n    \n    return _videoCamera;\n}\n\n- (GLImageThreePartitionGroupFilter *)partitionFilter\n{\n    if (!_partitionFilter) {\n        _partitionFilter = [[GLImageThreePartitionGroupFilter alloc]init];\n        [_partitionFilter setTopLutImg:[UIImage imageNamed:@\"xiatian\"]];\n        [_partitionFilter setMidLutImg:[UIImage imageNamed:@\"meishi\"]];\n        [_partitionFilter setBottomLutImg:[UIImage imageNamed:@\"heibai\"]];\n    }\n    return _partitionFilter;\n}\n\n- (GLImageFourPointsMirrorFilter *)pointsMirrorFiter\n{\n    if (!_pointsMirrorFiter) {\n        _pointsMirrorFiter = [[GLImageFourPointsMirrorFilter alloc]init];\n    }\n    return _pointsMirrorFiter;\n}\n\n- (GLImageGlitchEffectLineFilter *)glitchEffectLineFilter{\n    if (!_glitchEffectLineFilter) {\n        _glitchEffectLineFilter = [[GLImageGlitchEffectLineFilter alloc]init];\n    }\n    return _glitchEffectLineFilter;\n}\n\n- (GLImageGlitchEffectGridFilter *)glitchEffectGridFilter{\n    if (!_glitchEffectGridFilter) {\n        _glitchEffectGridFilter = [[GLImageGlitchEffectGridFilter alloc]init];\n        [_glitchEffectGridFilter setPlaidImage:[UIImage imageNamed:@\"glitchPicture000.png\"]];\n    }\n    return _glitchEffectGridFilter;\n}\n\n- (GLImageSoulOutFilter *)soulOutFilter{\n    if (!_soulOutFilter) {\n        _soulOutFilter = [[GLImageSoulOutFilter alloc]init];\n    }\n    return _soulOutFilter;\n}\n\n- (GLImageZoomFilter *)zoomFilter{\n    if (!_zoomFilter) {\n        \n        _zoomFilter = [[GLImageZoomFilter alloc]init];\n    }\n    return _zoomFilter;\n}\n\n- (GLImageWaterReflectionFilter *)waterReflectionFilter{\n  \n    if (!_waterReflectionFilter) {\n        \n        _waterReflectionFilter = [[GLImageWaterReflectionFilter alloc]init];\n    }\n    return _waterReflectionFilter;\n}\n\n- (GLImageBlurSnapViewFilterGroup *)blurSnapViewFilter{\n    \n    if (!_blurSnapViewFilter) {\n        _blurSnapViewFilter = [[GLImageBlurSnapViewFilterGroup alloc]init];\n    }\n    return _blurSnapViewFilter;\n}\n\n\n- (DouYinEffectTabView *)douYinEffectTabView\n{\n    \n    if (!_douYinEffectTabView)\n    {\n        _douYinEffectTabView = [[DouYinEffectTabView alloc]initWithFrame:CGRectMake(100, (kScreen_H - 200)/2.0, kScreen_W - 100, 200)];\n        _douYinEffectTabView.delegate = self;\n        [self.view addSubview:_douYinEffectTabView];\n    }\n    return _douYinEffectTabView;\n    \n}\n\n- (void)startDisplayLinkFrameInterval:(NSInteger)frameInterval{\n    self.displayLink = nil;\n    self.displayLink = [CADisplayLink displayLinkWithTarget:self selector:@selector(handleDisplayLink:)];\n    [self.displayLink addToRunLoop:[NSRunLoop currentRunLoop] forMode:NSDefaultRunLoopMode];\n    self.displayLink.frameInterval = frameInterval;\n}\n- (void)handleDisplayLink:(CADisplayLink*)displayLink\n{\n    switch (self.selectEffectType) {\n        case DouYinEffectType_GLImageGlitchEffectLineFilter:\n        {\n            self.glitchEffectLineFilter.intensity = arc4random()%100/100.0;\n        }\n            break;\n        case DouYinEffectType_GLImageGlitchEffectGridFilter:\n        {\n            self.glitchEffectGridFilter.intensity = arc4random()%100/100.0;\n            int index = arc4random()%6;\n            UIImage *image = [UIImage imageNamed:[NSString stringWithFormat:@\"glitchPicture00%d.png\",index]];\n            [self.glitchEffectGridFilter setPlaidImage:image];\n        }\n            break;\n            \n            \n        default:\n            break;\n            \n    }\n}\n\n- (void)stopDisplayLink{\n    \n    [self.displayLink invalidate];\n    self.displayLink.paused = YES;\n    _displayLink = nil;\n}\n\n- (void)dealloc\n{\n    [self.videoCamera stopCameraCapture];\n    _videoCamera = nil;\n}\n\n\n- (void)didSelectEffectType:(DouYinEffectType)type{\n    \n    self.selectEffectType = type;\n    [self stopDisplayLink];\n    [self.outPutFilter removeTarget:self.preview];\n    \n    switch (type) {\n        case DouYinEffectType_GLImageThreePartition:\n        {\n            self.outPutFilter = self.partitionFilter;\n        }\n            break;\n        case DouYinEffectType_GLImageFourPointsMirrorFilter:\n        {\n            self.outPutFilter = self.pointsMirrorFiter;\n        }\n            break;\n        case DouYinEffectType_GLImageGlitchEffectLineFilter:\n        {\n            self.outPutFilter = self.glitchEffectLineFilter;\n            [self startDisplayLinkFrameInterval:2];\n        }\n            break;\n        case DouYinEffectType_GLImageGlitchEffectGridFilter:\n        {\n            self.outPutFilter = self.glitchEffectGridFilter;\n            [self startDisplayLinkFrameInterval:30];\n        }\n            break;\n        case DouYinEffectType_GLImageSoulOutFilter:\n        {\n            self.outPutFilter = self.soulOutFilter;\n        }\n            break;\n        case DouYinEffectType_GLImageZoomFilter:\n        {\n            self.outPutFilter = self.zoomFilter;\n        }\n            break;\n        case DouYinEffectType_GLImageWaterReflectionFilter:\n        {\n            self.outPutFilter = self.waterReflectionFilter;\n        }\n            break;\n        case DouYinEffectType_GLImageBlurSnapViewFilterGroup:\n        {\n            \n            self.outPutFilter = self.blurSnapViewFilter;\n        }\n            break;\n\n            \n        default:\n            break;\n    }\n    \n    [self.outPutFilter addTarget:self.preview];\n    [self.videoCamera addTarget:self.outPutFilter];\n}\n\n\n\n\n\n@end\n"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo/Face/Face++/MGLicense/MGLicenseCommon.h",
    "content": "//\n//  MGLicenseCommon.h\n//  MGMobileSDKAuth\n//\n//  Created by 张英堂 on 2017/1/10.\n//  Copyright © 2017年 megvii. All rights reserved.\n//\n\n#ifndef MGLicenseCommon_h\n#define MGLicenseCommon_h\n\n\n\nstatic NSString *MGLicenseURL_CN = @\"https://api-cn.faceplusplus.com/sdk/v3/auth\";\nstatic NSString *MGLicenseURL_US = @\"https://api-us.faceplusplus.com/sdk/v3/auth\";\n\n\n\n\n#endif /* MGLicenseCommon_h */\n"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo/Face/Face++/MGLicense/MGLicenseManager.h",
    "content": "//\n//  MGLicenseManager.h\n//  MGBaseKit\n//\n//  Created by 张英堂 on 16/9/5.\n//  Copyright © 2016年 megvii. All rights reserved.\n//\n\n#import <UIKit/UIKit.h>\n#import \"MGLicenseCommon.h\"\n\n@interface MGLicenseManager : NSObject\n\n\n/**\n 获取过期时间\n\n @param version SDK 版本号\n @return 过期日期\n */\n+ (NSDate *)getExpiretime:(NSString *)version;\n\n\n\n#pragma mark - 简单调用联网授权\n\n/**\n 联网获取授权信息\n \n @param UUID UUID\n @param version 通过SDK获取\n @param apiKey apiKey\n @param apiSecret apiSecret\n @param duration appKey有效期\n @param url\n @param complete 授权结束回调\n @return SessionTask\n */\n+ (NSURLSessionTask *)getLicenseWithUUID:(NSString *)UUID\n                                 version:(NSString *)version\n                                  apiKey:(NSString *)apiKey\n                               apiSecret:(NSString *)apiSecret\n                             apiDuration:(NSInteger)duration\n                               URLString:(NSString *)url\n                                  finish:(void(^)(bool License, NSError *error))complete;\n\n\n\n@end\n"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo/Face/Face++/MGLicenseManagerHelper/MGFaceLicenseHandle.h",
    "content": "//\n//  MGLicenseHandle.h\n//  MGSDKV2Test\n//\n//  Created by 张英堂 on 16/9/7.\n//  Copyright © 2016年 megvii. All rights reserved.\n//\n\n#import <Foundation/Foundation.h>\n#import <UIKit/UIKit.h>\n//#import <MGBaseKit/MGBaseKit.h>\n#import \"MGNetAccount.h\"\n#import \"MGLicenseManager.h\"\n\n\n\n@interface MGFaceLicenseHandle : NSObject\n\n\n/**\n *  获取当前SDK是否授权--- 子类需要重写该方法，通过该类获取的 是否授权无法全部包括使用的SDK\n *\n *  @return 是否授权\n */\n+ (BOOL)getLicense;\n\n+ (NSDate *)getLicenseDate;\n\n/**\n *  只有当授权时间少于 1天的时候，才会进行授权操作\n *\n *  @param finish \n */\n+ (void)licenseForNetwokrFinish:(void(^)(bool License, NSDate *sdkDate))finish;\n\n\n/**\n 获取 face SDK 是否需要联网授权\n\n @return 是否为联网授权版本\n */\n+ (BOOL)getNeedNetLicense;\n\n\n\n\n@end\n"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo/Face/Face++/MGLicenseManagerHelper/MGFaceLicenseHandle.m",
    "content": "//\n//  MGLicenseHandle.m\n//  MGSDKV2Test\n//\n//  Created by 张英堂 on 16/9/7.\n//  Copyright © 2016年 megvii. All rights reserved.\n//\n\n#import \"MGFaceLicenseHandle.h\"\n#import \"MGFacepp.h\"\n#import \"MGNetAccount.h\"\n\n\n@implementation MGFaceLicenseHandle\n\n\n+ (BOOL)getLicense{\n    NSDate *sdkDate = [self getLicenseDate];\n    return [self compareSDKDate:sdkDate];\n}\n\n\n+ (void)licenseForNetwokrFinish:(void(^)(bool License, NSDate *sdkDate))finish {\n    \n\n    \n    // 检查 apk\n    if ([MG_LICENSE_KEY isEqualToString:@\"\"] || [MG_LICENSE_SECRET isEqualToString:@\"\"]) {\n        UIAlertController *controller = [UIAlertController alertControllerWithTitle:@\"API Key 或 secret 不能为空\"\n                                                                            message:@\"请到官网申请 ‘https://www.faceplusplus.com.cn’\"\n                                                                     preferredStyle:UIAlertControllerStyleAlert];\n        UIAlertAction *action = [UIAlertAction actionWithTitle:@\"好\"\n                                                         style:UIAlertActionStyleCancel\n                                                       handler:nil];\n        [controller addAction:action];\n        UIViewController *rootViewController = [UIApplication sharedApplication].keyWindow.rootViewController;\n        UIViewController *currentVC = [MGFaceLicenseHandle getCurrentVCFrom:rootViewController];\n        [currentVC presentViewController:controller animated:YES completion:nil];\n        \n        if (finish) {\n            finish(NO, nil);\n        }\n        return;\n    }\n    \n    NSDate *licenSDKDate = [self getLicenseDate];\n\n    if ([self compareSDKDate:licenSDKDate] == NO) {\n        if (finish) {\n            finish(YES, [self getLicenseDate]);\n        }\n        return;\n    }\n    \n    NSString *version = [MGFacepp getSDKVersion];\n    NSString *uuid = [[[UIDevice currentDevice] identifierForVendor] UUIDString];\n    \n    [MGLicenseManager getLicenseWithUUID:uuid\n                                 version:version\n                                  apiKey:MG_LICENSE_KEY\n                               apiSecret:MG_LICENSE_SECRET\n                             apiDuration:1\n                               URLString:MGLicenseURL_CN\n                                  finish:^(bool License, NSError *error) {\n                                      if (error) {\n                                          NSLog(@\"Auth error = %@\", error);\n                                      }\n                                      \n                                      if (License) {\n                                          NSDate  *nowSDKDate = [self getLicenseDate];\n                                          \n                                          if (finish) {\n                                              finish(License, nowSDKDate);\n                                          }\n                                      }else{\n                                          if (finish) {\n                                              finish(License, licenSDKDate);\n                                          }\n                                      }\n                                  }];\n\n}\n\n+ (NSDate *)getLicenseDate {\n    NSString *modelPath = [[NSBundle mainBundle] pathForResource:KMGFACEMODELNAME ofType:@\"\"];\n    NSData *modelData = [NSData dataWithContentsOfFile:modelPath];\n    MGAlgorithmInfo *sdkInfo = [MGFacepp getSDKAlgorithmInfoWithModel:modelData];\n    if (sdkInfo.needNetLicense) {\n        NSString *version = [MGFacepp getSDKVersion];\n        NSDate *date = [MGLicenseManager getExpiretime:version];\n        NSLog(@\"过期时间 ： %@\",date);\n        return date;\n    } else {\n        NSLog(@\"SDK 为非联网授权版\");\n        return sdkInfo.expireDate;\n    }\n}\n\n+ (BOOL)compareSDKDate:(NSDate *)sdkDate{\n    \n    NSDate *nowDate = [NSDate date];\n    double result = [sdkDate timeIntervalSinceDate:nowDate];\n\n    \n    if (result >= 1*1*60*60.0) {\n        return NO;\n    }\n    return YES;\n}\n\n+ (BOOL)getNeedNetLicense{\n    \n    NSString *modelPath = [[NSBundle mainBundle] pathForResource:KMGFACEMODELNAME ofType:@\"\"];\n    NSData *modelData = [NSData dataWithContentsOfFile:modelPath];\n    \n    MGAlgorithmInfo *sdkInfo = [MGFacepp getSDKAlgorithmInfoWithModel:modelData];\n    NSLog(@\"\\n************\\nSDK 功能列表: %@\\n是否需要联网授权: %d\\n版本号:%@\\n过期时间:%@ \\n************\", sdkInfo.SDKAbility, sdkInfo.needNetLicense, sdkInfo.version, sdkInfo.expireDate);\n    \n    return sdkInfo.needNetLicense;\n}\n\n+ (UIViewController *)getCurrentVCFrom:(UIViewController *)rootVC\n{\n    UIViewController *currentVC;\n    \n    if ([rootVC presentedViewController]) {\n        // 视图是被presented出来的\n        rootVC = [rootVC presentedViewController];\n    }\n    \n    if ([rootVC isKindOfClass:[UITabBarController class]]) {\n        // 根视图为UITabBarController\n        currentVC = [self getCurrentVCFrom:[(UITabBarController *)rootVC selectedViewController]];\n    } else if ([rootVC isKindOfClass:[UINavigationController class]]){\n        // 根视图为UINavigationController\n        currentVC = [self getCurrentVCFrom:[(UINavigationController *)rootVC visibleViewController]];\n    } else {\n        // 根视图为非导航类\n        currentVC = rootVC;\n    }\n    \n    return currentVC;\n}\n\n\n@end\n"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo/Face/Face++/MGLicenseManagerHelper/MGNetAccount.h",
    "content": "//\n//  MGNetAccount_example.h\n//  FaceppDemo\n//\n//  Created by 张英堂 on 2017/1/12.\n//  Copyright © 2017年 megvii. All rights reserved.\n//\n\n#ifndef MGNetAccount_example_h\n#define MGNetAccount_example_h\n\n\n\n// 访问 https://www.faceplusplus.com.cn， 登录后在控制台生成对应的 key 和 secret 填写到下面的字符串中\n\n#define MG_LICENSE_KEY      @\"8eDcVuRKZMh-Z6KCpl71PPYOd8RLJdCl\" // api_key\n#define MG_LICENSE_SECRET    @\"Nt_ttQSGaXq--K4UpKjdcG0uwqcv3Wg6\" // api_secret\n\n\n#endif /* MGNetAccount_example_h */\n"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo/Face/Face++/iOS_SDK/MGAlgorithmInfo.h",
    "content": "//\n//  MGAlgorithmInfo.h\n//  MGFacepp\n//\n//  Created by 张英堂 on 2017/1/10.\n//  Copyright © 2017年 megvii. All rights reserved.\n//\n\n#import <Foundation/Foundation.h>\n#import <UIKit/UIKit.h>\n\n#define MG_ABILITY_KEY_POSE3D               @\"pose3D\"\n#define MG_ABILITY_KEY_EYE_STATUS           @\"eyeStatus\"\n#define MG_ABILITY_KEY_MOUTH_SATUS          @\"mouthStatus\"\n#define MG_ABILITY_KEY_MINORITY             @\"minority\"\n#define MG_ABILITY_KEY_BLURNESS             @\"blurness\"\n#define MG_ABILITY_KEY_AGE_GENDER           @\"ageGender\"\n#define MG_ABILITY_KEY_EXTRACT_FEATURE      @\"extractFeature\"\n#define MG_ABILITY_KEY_TRACK_FAST           @\"trackFast\"\n#define MG_ABILITY_KEY_TRACK_ROBUST         @\"trackRobust\"\n#define MG_ABILITY_KEY_DETECT               @\"detect\"\n#define MG_ABILITY_KEY_DETECT_RECT          @\"detectRect\"\n#define MG_ABILITY_KEY_IDCARD_QUALITY       @\"IDCardQuality\"\n#define MG_ABILITY_KEY_TRACK                @\"track\"\n#define MG_ABILITY_KEY_TRACK_RECT           @\"track_rect\"\n\n\n@interface MGAlgorithmInfo : NSObject\n\n\n/**\n SDK 版本号\n */\n@property (nonatomic, copy, readonly) NSString *version;\n\n/**\n SDK 过期时间\n */\n@property (nonatomic, strong, readonly) NSDate *expireDate;\n\n\n/**\n 是否需要联网授权\n */\n@property (nonatomic, assign, readonly) BOOL needNetLicense;\n\n\n/**\n SDK 功能列表\n */\n@property (nonatomic, strong, readonly) NSArray *SDKAbility;\n\n\n/**\n SDK 限制的bundleId\n */\n@property (nonatomic, strong, readonly) NSString *bundleId;\n\n\n@end\n"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo/Face/Face++/iOS_SDK/MGDetectRectInfo.h",
    "content": "//\n//  MGDetectRect.h\n//  MGFacepp\n//\n//  Created by Megvii on 2017/10/18.\n//  Copyright © 2017年 megvii. All rights reserved.\n//\n\n#import <Foundation/Foundation.h>\n#import <UIKit/UIKit.h>\n\ntypedef NS_ENUM(NSInteger, MGOrientation) {\n    MGOrientationLeft,\n    MGOrientationUp,\n    MGOrientationRight,\n    MGOrientationDown,\n};\n\n@interface MGDetectRectInfo : NSObject\n\n@property (nonatomic, assign) float confidence;\n\n@property (nonatomic, assign) float angle;\n\n@property (nonatomic, assign) MGOrientation orient;\n\n@property (nonatomic, assign) CGRect rect;\n\n@end\n\n"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo/Face/Face++/iOS_SDK/MGFaceInfo.h",
    "content": "//\n//  MGFaceModel.h\n//  LandMask\n//\n//  Created by 张英堂 on 16/7/11.\n//  Copyright © 2016年 megvii. All rights reserved.\n//\n\n#import <Foundation/Foundation.h>\n#import <UIKit/UIKit.h>\n#import \"MGFaceppCommon.h\"\n\ntypedef NS_ENUM(NSInteger, MGGender) {\n    MGFemale = 0,\n    MGMale = 1,\n};\n\n@interface MGFaceInfo : NSObject\n\n/** tracking ID */\n@property (nonatomic, assign) NSInteger trackID;\n\n/** 在该张图片中人脸序号 */\n@property (nonatomic, assign) int index;\n\n/** 人脸的rect */\n@property (nonatomic, assign) CGRect rect;\n\n/** 人脸点坐标 （NSValue -> CGPoints）*/\n@property (nonatomic, strong) NSArray <NSValue *>*points;\n\n/** 该张人脸质量 */\n@property (nonatomic, assign) CGFloat confidence;\n\n\n#pragma mark 以下属性需要SDK版本支持 请使用 [MGAlgorithmInfo SDKAbility] 属性，获取SDK支持功能\n\n#pragma mark 需要主动调用 MGFacepp 相关方法获取\n//3D info\n@property (nonatomic, assign) float pitch;\n@property (nonatomic, assign) float yaw;\n@property (nonatomic, assign) float roll;\n\n/** 年龄 */\n@property (nonatomic, assign) float age;\n/** 性别 */\n@property (nonatomic, assign) MGGender gender;\n/** blurness */\n@property (nonatomic, assign) float blurness;\n/** minority */\n@property (nonatomic, assign) float minority;\n\n/** 眼状态 */\n@property (nonatomic, assign) MGEyeStatus leftEyesStatus;\n@property (nonatomic, assign) MGEyeStatus rightEyesStatus;\n\n/** 嘴状态 */\n@property (nonatomic, assign) MGMouthStatus mouseStatus;\n\n\n#pragma mark -\n\n/**\n 人脸的 feature\n */\n@property (nonatomic, strong, readonly) NSData *featureData;\n\n\n\n@end\n"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo/Face/Face++/iOS_SDK/MGFacepp.h",
    "content": "//\n//  MGFacepp.h\n//  LandMask\n//\n//  Created by 张英堂 on 16/9/5.\n//  Copyright © 2016年 megvii. All rights reserved.\n//\n\n#import <UIKit/UIKit.h>\n#import <CoreMedia/CoreMedia.h>\n\n#import \"MGFaceppConfig.h\"\n#import \"MGImageData.h\"\n#import \"MGFaceppCommon.h\"\n#import \"MGAlgorithmInfo.h\"\n#import \"MGFaceInfo.h\"\n#import \"MGDetectRectInfo.h\"\n\n@interface MGFacepp : NSObject\n\n- (MGFaceppConfig *)getFaceppConfig;\n\n- (instancetype)init DEPRECATED_ATTRIBUTE;\n\n/**\n *  初始化方法 必须使用该方法初始化，否则会导致初始化失败。\n *\n *  @param modelData        model data\n *  @param config           设置的callback\n *  @return 实例化\n */\n- (instancetype)initWithModel:(NSData *)modelData\n                faceppSetting:(void(^)(MGFaceppConfig *config))config;\n\n/**\n 初始化方法\n\n @param modelData        model data\n @param maxFaceCount     一张图像中识别的最大人脸数，设置为1即为单脸跟踪\n @param config           设置的callback\n @return handle\n */\n- (instancetype)initWithModel:(NSData *)modelData maxFaceCount:(NSInteger)maxFaceCount faceppSetting:(void(^)(MGFaceppConfig *config))config;\n\n/**\n *  @param config        更新设置参数\n */\n- (BOOL)updateFaceppSetting:(void(^)(MGFaceppConfig *config))config;\n\n\n/**\n *  获取检测器目前状态\n */\n@property (nonatomic, assign, readonly) MGFaceppStatus status;\n\n\n#pragma mark - 检测人脸信息\n/**\n *  检测人脸信息\n *\n *  @param imagedata  检测的图片\n *  @return 检测结果（如果为 nil 时候，检测器异常，检测失败，请检测代码以及设置）\n */\n- (NSArray <MGFaceInfo *>*)detectWithImageData:(MGImageData *)imagedata;\n\n/**\n *  检测人脸框\n *\n *  @param imagedata  检测的图片\n *  @return 检测结果（如果为 nil 时候，检测器异常，检测失败，请检测代码以及设置）\n */\n- (NSInteger)getFaceNumberWithImageData:(MGImageData *)imagedata;\n\n\n/**\n *  人脸关键点平滑\n *\n *  @param faceInfo faceinfo model\n *  @param isSmooth 是否关键点平滑，防止人脸抖动\n *  @param nr       关键点个数\n */\n- (BOOL)GetGetLandmark:(MGFaceInfo *)faceInfo isSmooth:(BOOL)isSmooth pointsNumber:(int)nr;\n\n/**\n *  获取人脸框\n *  \n *\n *  @param index faceinfo model\n *  @param isSmooth 是否关键点平滑，防止人脸抖动\n */\n- (MGDetectRectInfo *)GetRectAtIndex:(int)index isSmooth:(BOOL)isSmooth;\n\n/**\n *  获取人脸 3D信息\n *  @param faceInfo faceInfo\n *  @return 是否获取成功\n */\n- (BOOL)GetAttribute3D:(MGFaceInfo *)faceInfo;\n\n/**\n *  获取 眼状态\n *  @param faceInfo faceInfo\n *  @return 是否获取成功\n */\n- (BOOL)GetAttributeEyeStatus:(MGFaceInfo *)faceInfo;\n\n/**\n *  获取 嘴状态\n *  @param faceInfo faceInfo\n *  @return 是否获取成功\n */\n- (BOOL)GetAttributeMouseStatus:(MGFaceInfo *)faceInfo;\n\n/**\n *  获取 年龄\n *  @param faceInfo faceInfo\n *  @return 是否获取成功\n */\n- (BOOL)GetAttributeAgeGenderStatus:(MGFaceInfo *)faceInfo;\n\n/**\n 获取 Blurness 状态\n\n @param faceInfo faceInfo\n @return 是否获取成功\n */\n- (BOOL)GetBlurnessStatus:(MGFaceInfo *)faceInfo;\n\n/**\n 获取 Minorit 状态\n\n @param faceInfo faceInfo\n @return 是否获取成功\n */\n- (BOOL)GetMinorityStatus:(MGFaceInfo *)faceInfo;\n\n/**\n 获取人脸的 Feature 数据\n\n @param faceInfo faceinfo\n @return 是否获取成功\n */\n- (BOOL)GetFeatureData:(MGFaceInfo *)faceInfo;\n\n/**\n 比较两个人脸相似度， 必须改 MGFaceInfo 获取过 Feature 数据才有效\n 如果成功返回人脸相似度， 如果比对失败，返回 -1.0\n \n @param faceInfo MGFaceInfo 1\n @param faceInf2 MGFaceInfo 2\n @return 比对相似度\n */\n- (float)faceCompareWithFaceInfo:(MGFaceInfo *)faceInfo faceInf2:(MGFaceInfo *)faceInf2;\n\n/**\n 比较两个人脸相似度\n 如果成功返回人脸相似度， 如果比对失败，返回 -1.0\n\n @param featureData featureData 1\n @param featureData2 featureData 2\n @return 相似度\n */\n- (float)faceCompareWithFeatureData:(NSData *)featureData featureData2:(NSData *)featureData2;\n\n\n/** 开启检测新的一帧，在每次调用 detectWithImageData: 之前调用。 */\n- (void)beginDetectionFrame;\n\n/** 停止当前改帧的检测，在获取人脸详细信息后，需要主动调用该方法结束当前帧，以便进入下一帧 */\n- (void)endDetectionFrame;\n\n\n/**\n 释放算法资源\n 算法在计算时需要占用一些内存资源，必须在所有算法的句柄（handle）被释放后再调用\n \n @return 成功则返回 YES\n */\n- (BOOL)shutDown;\n\n#pragma mark - 类方法，获取 SDK 相关信息\n\n\n/**\n 获取版本号\n\n @return 版本号\n */\n+ (NSString *)getSDKVersion;\n\n\n/**\n 获取 SDK jenkins 号\n\n @return SDK jenkins 号\n */\n+ (NSString *)getJenkinsNumber;\n\n\n/**\n 清除track缓存\n\n @return 成功则返回 YES\n */\n- (BOOL)resetTrack;\n\n\n/**\n 获取 SDK 相关信息\n\n @param modelData model data\n @return sdk 相关信息\n */\n+ (MGAlgorithmInfo *)getSDKAlgorithmInfoWithModel:(NSData *)modelData;;\n\n\n\n\n@end\n\n\n\n\n"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo/Face/Face++/iOS_SDK/MGFaceppCommon.h",
    "content": "//\n//  MGFaceppConfig.h\n//  MGLandMark\n//\n//  Created by 张英堂 on 16/9/5.\n//  Copyright © 2016年 megvii. All rights reserved.\n//\n\n#ifndef MGFaceppConfig_h\n#define MGFaceppConfig_h\n\n#import <UIKit/UIKit.h>\n\n#define KMGFACEMODELNAME @\"megviifacepp_0_5_2_model\"\n#define KMGFACEMODELTYPE @\"\"\n\n\n/** 设置检测视频帧的类型  */\ntypedef NS_ENUM(NSInteger, MGPixelFormatType) {\n    PixelFormatTypeGRAY = 0,\n    PixelFormatTypeBGR,\n    PixelFormatTypeNV21,\n    PixelFormatTypeRGBA,\n    PixelFormatTypeRGB\n};\n\n/** face SDK 功能类型  */\ntypedef NS_ENUM(NSInteger, MGFaceAbility) {\n    MGFaceAbilityTrack = 0,\n    MGFaceAbilityDetect = 1,\n    MGFaceAbilityPose3d = 2,\n    MGFaceAbilityEyeStatus = 3,\n    MGFaceAbilityMouseStatus = 4,\n    MGFaceAbilityMinority = 5,\n    MGFaceAbilityBlurness = 6,\n    MGFaceAbilityAgeGender = 7,\n    MGFaceAbilityExtractFeature = 8,\n};\n\n\n/** 人脸检测框 */\ntypedef struct {\n    int left;\n    int top;\n    int right;\n    int bottom;\n}MGDetectROI;\n\nCG_INLINE MGDetectROI MGDetectROIMake(int left, int top, int right,int bottom){\n    MGDetectROI d;\n    d.left = left;\n    d.top = top;\n    d.right = right;\n    d.bottom = bottom;\n    return d;\n}\n\ntypedef NS_ENUM(NSUInteger ,MGFaceppStatus) {\n    MGMarkPrepareWork = 1, //初始化已结束， 准备工作\n    MGMarkWorking,          //正在检测中\n    MGMarkWaiting,          //上一帧已经结束，等待下一帧输入\n    MGMarkStopped           //检测器停止检测，等待释放\n};\n\ntypedef NS_ENUM(NSUInteger ,MGFppDetectionMode) {\n    MGFppDetectionModeDetect = 0,\n    MGFppDetectionModeTracking = 1, // 此模式已经废弃，请使用 robust 模式\n    MGFppDetectionModeTrackingFast = 3,\n    MGFppDetectionModeTrackingRobust = 4,\n    MGFppDetectionModeDetectRect = 5,\n    MGFppDetectionModeTrackingRect = 6,\n};\n\n\ntypedef NS_ENUM(NSUInteger ,MGEyeStatus) {\n    MGEyeStatusNoGlassesOpen = 0,\n    MGEyeStatusNoGlassesClose = 1,\n    MGEyeStatusNormalGlassesOpen = 2,   //普通眼镜\n    MGEyeStatusNormalGlassesClose = 3,\n    MGEyeStatuoDarkGlasses = 4,         //太阳镜\n    MGEyeStatusOtherOcclusion = 5,\n    MGEyeStatusCount = 6\n};\n\ntypedef NS_ENUM(NSUInteger ,MGMouthStatus) {\n    MGMouthStatusOpen = 0,\n    MGMouthStatusClose = 1,\n    MGMouthStatusMaskOrRespopator = 2,   //\n    MGMouthStatusOtherOcclusion = 3,\n    MGMouthStatusCount = 4,         //\n};\n\n\n\n\n\n#endif /* MGFaceppConfig_h */\n"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo/Face/Face++/iOS_SDK/MGFaceppConfig.h",
    "content": "//\n//  MGFaceppConfig.h\n//  MGFacepp\n//\n//  Created by 张英堂 on 2016/12/27.\n//  Copyright © 2016年 megvii. All rights reserved.\n//\n\n#import <Foundation/Foundation.h>\n#import <UIKit/UIKit.h>\n\n#import \"MGFaceppCommon.h\"\n\n#pragma mark - 设置检测参数\n\n@interface MGFaceppConfig : NSObject\n\n/**  人脸大小 默认 100 */\n@property (nonatomic, assign) int minFaceSize;\n\n/**  重新全局检测间隔 40 */\n@property (nonatomic, assign) int interval;\n\n/** 旋转角度 defalut 0, [0,90,180,270,360] */\n@property (nonatomic, assign) int orientation;\n\n\n/**  设置Detection类型 默认:MGFppDetectionModeNormal */\n@property (nonatomic, assign) MGFppDetectionMode detectionMode;\n\n/**  设置检测区域（为视频流原图的区域），默认全图检测 */\n@property (nonatomic, assign) MGDetectROI detectROI;\n\n/** 设置视频流格式，默认 PixelFormatTypeRGBA */\n@property (nonatomic, assign) MGPixelFormatType pixelFormatType;\n\n\n@property (nonatomic, assign) float faceConfidenceFilter;\n\n\n\n@end\n"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo/Face/Face++/iOS_SDK/MGImageData.h",
    "content": "//\n//  MGImageData.h\n//  MGFacepp\n//\n//  Created by 张英堂 on 2016/12/27.\n//  Copyright © 2016年 megvii. All rights reserved.\n//\n\n#import <UIKit/UIKit.h>\n#import <CoreMedia/CoreMedia.h>\n\n@interface MGImageData : NSObject\n\n/** 请不要使用该方法初始化 */\n- (instancetype)init DEPRECATED_ATTRIBUTE;\n\n/**\n 初始化方法，二选一\n\n @param sampleBuffer 视频流帧\n @return 实例化对象\n */\n- (instancetype)initWithSampleBuffer:(CMSampleBufferRef)sampleBuffer;\n\n/**\n 初始化方法，二选一\n\n\n @param image UIImage 对象\n @return 实例化对象\n */\n- (instancetype)initWithImage:(UIImage *)image;\n\n\n/**\n 图片宽度\n */\n@property (nonatomic, assign) CGFloat width;\n\n/**\n 图片高度\n */\n@property (nonatomic, assign) CGFloat height;\n\n\n/**\n 是否为图片\n */\n@property (nonatomic, assign, readonly) BOOL isUIImage;\n\n\n/**\n 获取内存地址\n\n @return 内存地址\n */\n- (const char*)getData;\n\n\n/**\n 释放内存\n */\n- (void)releaseImageData;\n\n@end\n"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo/Face/FaceSliderView.h",
    "content": "//\n//  FaceSliderView.h\n//  GPURenderKitDemo\n//\n//  Created by 刘海东 on 2019/4/16.\n//  Copyright © 2019 刘海东. All rights reserved.\n//\n\n#import <UIKit/UIKit.h>\n\nNS_ASSUME_NONNULL_BEGIN\n\ntypedef void(^FaceSliderViewValueChangeBlock)(float value);\n\n@interface FaceSliderView : UIView\n\n@property (nonatomic, assign) float minimumValue;\n\n@property (nonatomic, assign) float maximumValue;\n\n@property (nonatomic, copy) FaceSliderViewValueChangeBlock valueChangeBlock;\n\n@property (nonatomic, strong) NSString *title;\n\n\n@end\n\nNS_ASSUME_NONNULL_END\n"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo/Face/FaceSliderView.m",
    "content": "//\n//  FaceSliderView.m\n//  GPURenderKitDemo\n//\n//  Created by 刘海东 on 2019/4/16.\n//  Copyright © 2019 刘海东. All rights reserved.\n//\n\n#import \"FaceSliderView.h\"\n\n@interface FaceSliderView ()\n\n@property (nonatomic, strong) UISlider *sliderView;\n@property (nonatomic, strong) UILabel *titleLab;\n\n\n@end\n\n\n@implementation FaceSliderView\n\n- (instancetype)initWithFrame:(CGRect)frame\n{\n    self = [super initWithFrame:frame];\n    if (self) {\n        \n        self.userInteractionEnabled = YES;\n        \n        self.titleLab = [[UILabel alloc]init];\n        self.titleLab.frame = CGRectMake(0, 0, self.frame.size.width, 20);\n        self.titleLab.font = [UIFont systemFontOfSize:15];\n        [self addSubview:self.titleLab];\n        \n        \n        self.sliderView = [[UISlider alloc]initWithFrame:CGRectMake(0, 20, self.frame.size.width, 30)];\n        [self.sliderView addTarget:self action:@selector(sliderValueChange:) forControlEvents:UIControlEventValueChanged];\n        [self addSubview:self.sliderView];\n        \n    }\n    return self;\n}\n\n- (void)setMinimumValue:(float)minimumValue{\n    self.sliderView.minimumValue = minimumValue;\n}\n\n- (void)setMaximumValue:(float)maximumValue{\n    self.sliderView.maximumValue = maximumValue;\n}\n\n- (void)sliderValueChange:(UISlider *)slider{\n    \n    if (self.valueChangeBlock) {\n        self.titleLab.text = [NSString stringWithFormat:@\"%@:%.2f\",_title,slider.value];\n        self.valueChangeBlock(slider.value);\n    }\n}\n\n- (void)setTitle:(NSString *)title{\n    _title = title;\n    self.titleLab.text = [NSString stringWithFormat:@\"%@:%.2f\",title,self.sliderView.value];\n}\n\n\n@end\n"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo/Face/FaceViewController.h",
    "content": "//\n//  FaceViewController.h\n//  GPURenderKitDemo\n//\n//  Created by 刘海东 on 2019/4/15.\n//  Copyright © 2019 刘海东. All rights reserved.\n//\n\n#import \"BaseViewController.h\"\n\nNS_ASSUME_NONNULL_BEGIN\n\n@interface FaceViewController : BaseViewController\n\n@end\n\nNS_ASSUME_NONNULL_END\n"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo/Face/FaceViewController.mm",
    "content": "//\n//  FaceViewController.m\n//  GPURenderKitDemo\n//\n//  Created by 刘海东 on 2019/4/15.\n//  Copyright © 2019 刘海东. All rights reserved.\n//\n\n#import \"FaceViewController.h\"\n#import \"MGFaceLicenseHandle.h\"\n#import \"MGFacepp.h\"\n#import \"FaceSliderView.h\"\n\n@interface FaceViewController ()<GPUImageVideoCameraDelegate>\n@property (nonatomic, strong) MGFacepp *markManager;\n@property (nonatomic, strong) GPUImageVideoCamera *videoCamera;\n@property (nonatomic, strong) GPUImageView *preview;\n@property (nonatomic, strong) GPUImageBeautifyFilter *beautifyFilter;\n@property (nonatomic, strong) GLImageFaceChangeFilterGroup *faceChangeFilterGroup;\n@property (nonatomic, strong) UIButton *rotateBtn;\n@property (nonatomic, strong) UISwitch *switchView;\n\n@property (nonatomic, assign) AVCaptureDevicePosition devicePosition;\n\n@property (nonatomic, assign) BOOL faceServiceBool;\n@property (nonatomic, strong) FaceSliderView *thinFaceView;\n@property (nonatomic, strong) FaceSliderView *eyeFaceView;\n@property (nonatomic, strong) FaceSliderView *noseFaceView;\n@property (nonatomic, strong) FaceSliderView *beautifyView;\n\n@end\n\n@implementation FaceViewController\n\n- (void)viewWillDisappear:(BOOL)animated{\n    [super viewWillDisappear:animated];\n    [self removeAllObject];\n}\n\n\n\n- (void)viewDidLoad {\n    [super viewDidLoad];\n    \n    /**\n     GLImageFaceChangeFilter是人脸微整的效果。是把识别到的106个人脸关键点传到shader里面去。然后做像素的平移变化。\n     你们可以先查看项目里面有一张图片。\"人脸106个关键点.png\"。结合shader里面用到的点加深理解。\n     */\n    \n    \n    @weakify(self);\n    [self checkFaceServiceBlock:^(BOOL results) {\n        @strongify(self);\n        dispatch_async(dispatch_get_main_queue(), ^{\n            \n            self.faceServiceBool = results;\n            if (results) {\n                [self configFaceMarkManager];\n                [self setupFaceConfig];\n            }else{\n                NSLog(@\"授权未通过，未能开启人脸关键点识别服务。\");\n                [self setupNoFaceConfig];\n            }\n        });\n    }];\n}\n\n- (void)checkFaceServiceBlock:(void(^)(BOOL results))block{\n    \n    /** 进行联网授权版本判断，联网授权就需要进行网络授权 */\n    BOOL needLicense = [MGFaceLicenseHandle getNeedNetLicense];\n    if (needLicense) {\n        [MGFaceLicenseHandle licenseForNetwokrFinish:^(bool License, NSDate *sdkDate) {\n            if (!License) {\n                NSLog(@\"联网授权失败 ！！！\");\n                if (block) {\n                    block(NO);\n                }\n            } else {\n                NSLog(@\"联网授权成功\");\n                if (block) {\n                    block(YES);\n                }\n            }\n        }];\n        \n    } else {\n        NSLog(@\"SDK 为非联网授权版本！\");\n        if (block) {\n            block(NO);\n        }\n    }\n}\n\n- (void)setupFaceConfig{\n    \n    //添加瘦脸，大眼filter\n    self.faceChangeFilterGroup = [[GLImageFaceChangeFilterGroup alloc]init];\n    [self.faceChangeFilterGroup setCaptureDevicePosition:self.videoCamera.cameraPosition];\n    [self.faceChangeFilterGroup addTarget:self.preview];\n    [self.beautifyFilter addTarget:self.faceChangeFilterGroup];\n    [self.videoCamera addTarget:self.beautifyFilter];\n    CGFloat w = [UIScreen mainScreen].bounds.size.width - 40*2;\n    CGFloat h = 50;\n    \n    @weakify(self);\n    \n    //脸部控制\n    self.thinFaceView = [[FaceSliderView alloc]initWithFrame:CGRectMake(40, kScreen_H - 100, w, h)];\n    [self.view addSubview:self.thinFaceView];\n    self.thinFaceView.title = @\"瘦脸or胖脸\";\n    self.thinFaceView.minimumValue = -1.0;\n    self.thinFaceView.maximumValue = 1.0;\n    self.thinFaceView.valueChangeBlock = ^(float value) {\n        @strongify(self);\n        self.faceChangeFilterGroup.thinFaceParam = value;\n    };\n    \n    //大眼控制\n    self.eyeFaceView = [[FaceSliderView alloc]initWithFrame:CGRectMake(40, kScreen_H - 100 - 50, w, h)];\n    [self.view addSubview:self.eyeFaceView];\n    self.eyeFaceView.title = @\"大眼or小眼\";\n    self.eyeFaceView.minimumValue = -1.0;\n    self.eyeFaceView.maximumValue = 1.0;\n    self.eyeFaceView.valueChangeBlock = ^(float value) {\n        @strongify(self);\n        self.faceChangeFilterGroup.eyeParam = value;\n    };\n    \n    //鼻子控制\n    self.noseFaceView = [[FaceSliderView alloc]initWithFrame:CGRectMake(40, kScreen_H - 100 - 100, w, h)];\n    [self.view addSubview:self.noseFaceView];\n    self.noseFaceView.title = @\"大鼻or小鼻\";\n    self.noseFaceView.minimumValue = -1.0;\n    self.noseFaceView.maximumValue = 1.0;\n    self.noseFaceView.valueChangeBlock = ^(float value) {\n        @strongify(self);\n        self.faceChangeFilterGroup.noseParam = value;\n    };\n\n\n    //美颜\n    self.beautifyView = [[FaceSliderView alloc]initWithFrame:CGRectMake(40, kScreen_H - 100 - 150, w, h)];\n    [self.view addSubview:self.beautifyView];\n    self.beautifyView.title = @\"美颜\";\n    self.beautifyView.minimumValue = 0.0;\n    self.beautifyView.maximumValue = 1.0;\n    self.beautifyView.valueChangeBlock = ^(float value) {\n        @strongify(self);\n        self.beautifyFilter.intensity = value;\n    };\n    \n    [self rotateBtn];\n    [self switchView];\n    \n}\n\n- (void)setupNoFaceConfig{\n    \n    \n    [self.beautifyFilter addTarget:self.preview];\n    [self.videoCamera addTarget:self.beautifyFilter];\n    \n    CGFloat w = [UIScreen mainScreen].bounds.size.width - 40*2;\n    CGFloat h = 50;\n    \n    @weakify(self);\n\n    //美颜\n    self.beautifyView = [[FaceSliderView alloc]initWithFrame:CGRectMake(40, kScreen_H - 100 - 70, w, h)];\n    [self.view addSubview:self.beautifyView];\n    self.beautifyView.title = @\"美颜\";\n    self.beautifyView.minimumValue = 0.0;\n    self.beautifyView.maximumValue = 1.0;\n    self.beautifyView.valueChangeBlock = ^(float value) {\n        @strongify(self);\n        self.beautifyFilter.intensity = value;\n    };\n    \n    [self rotateBtn];\n}\n\n\n\n- (void)configFaceMarkManager{\n    \n    NSString *modelPath = [[NSBundle mainBundle] pathForResource:KMGFACEMODELNAME ofType:@\"\"];\n    NSData *modelData = [NSData dataWithContentsOfFile:modelPath];\n    \n    int maxFaceCount = 0;\n    int faceSize = 100;\n    int internal = 40;\n\n    MGDetectROI detectROI = MGDetectROIMake(0, 0, 0, 0);\n\n    self.markManager = [[MGFacepp alloc] initWithModel:modelData\n                                               maxFaceCount:maxFaceCount\n                                              faceppSetting:^(MGFaceppConfig *config) {\n                                                  config.minFaceSize = faceSize;\n                                                  config.interval = internal;\n                                                  config.orientation = 90;\n                                                  config.detectionMode = MGFppDetectionModeTrackingFast;\n                                                  config.detectROI = detectROI;\n                                                  config.pixelFormatType = PixelFormatTypeNV21;\n                                              }];\n    \n}\n\n#pragma mark ------------------------------------------------------ lazy ------------------------------------------------------\n- (GPUImageView *)preview{\n    if (!_preview) {\n        _preview = [[GPUImageView alloc] initWithFrame:self.view.bounds];\n        _preview.layer.contentsScale = 2.0;\n        _preview.backgroundColor = [[UIColor blackColor] colorWithAlphaComponent:0.8];\n        [_preview setBackgroundColorRed:0.2 green:0.2 blue:0.2 alpha:1.0];\n        [self.view addSubview:_preview];\n    }\n    return _preview;\n}\n\n- (GPUImageBeautifyFilter *)beautifyFilter{\n    if (!_beautifyFilter) {\n        _beautifyFilter = [[GPUImageBeautifyFilter alloc]init];\n        _beautifyFilter.intensity = 0.0;\n    }\n    return _beautifyFilter;\n}\n\n\n- (GPUImageVideoCamera *)videoCamera\n{\n    if (!_videoCamera)\n    {\n        _videoCamera = [[GPUImageVideoCamera alloc] initWithSessionPreset:AVCaptureSessionPreset1280x720 cameraPosition:AVCaptureDevicePositionBack];\n        _videoCamera.runBenchmark = NO;\n        _videoCamera.horizontallyMirrorFrontFacingCamera = YES;\n        _videoCamera.outputImageOrientation = UIInterfaceOrientationPortrait;\n        _videoCamera.delegate = self;\n        self.devicePosition = _videoCamera.cameraPosition;\n        [_videoCamera startCameraCapture];\n    }\n    \n    return _videoCamera;\n}\n\n- (UIButton *)rotateBtn{\n    if (!_rotateBtn) {\n        _rotateBtn = [UIButton buttonWithType:UIButtonTypeCustom];\n        [self.view addSubview:_rotateBtn];\n        UIImage *image = [UIImage imageNamed:@\"rotate\"];\n        _rotateBtn.frame = CGRectMake(kScreen_W - 1.5 *image.size.width, 100, image.size.width,image.size.height );\n        [_rotateBtn setImage:image forState:UIControlStateNormal];\n        [_rotateBtn addTarget:self action:@selector(rotateBtnAction) forControlEvents:UIControlEventTouchUpInside];\n    }\n    return _rotateBtn;\n}\n\n- (UISwitch *)switchView{\n    if (!_switchView) {\n        \n        _switchView = [[UISwitch alloc] initWithFrame:CGRectMake(20, CGRectGetMinY(self.rotateBtn.frame), 51, 31)];\n        _switchView.on = YES;\n        [_switchView addTarget:self action:@selector(switchViewvalueChanged:) forControlEvents:(UIControlEventValueChanged)];\n        [self.view addSubview:_switchView];\n    }\n    return _switchView;\n}\n\n- (void)rotateBtnAction{\n    [_videoCamera rotateCamera];\n    self.devicePosition = _videoCamera.cameraPosition;\n    \n    if (self.faceServiceBool) {\n        [self.faceChangeFilterGroup setCaptureDevicePosition:self.videoCamera.cameraPosition];\n    }\n    \n}\n\n/** 是否显示人脸检测关键点 */\n- (void)switchViewvalueChanged:(UISwitch *)switchView{\n    \n    self.faceChangeFilterGroup.isShowFaceDetectPointBool = switchView.isOn;\n}\n\n\n#pragma mark ------------------------------------------------------ GPUImageVideoCameraDelegate ------------------------------------------------------\n- (void)willOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer{\n\n    if (self.faceServiceBool) {\n        if (self.markManager.status != MGMarkWorking) {\n            [self detectSampleBuffer:sampleBuffer];\n        }\n    }else{\n        NSLog(@\"未能开启人脸检测\");\n    }\n}\n\n\n- (void)detectSampleBuffer:(CMSampleBufferRef)sampleBuffer{\n    MGImageData *imageData = [[MGImageData alloc] initWithSampleBuffer:sampleBuffer];\n    [self.markManager beginDetectionFrame];\n\n    NSArray *tempArray = [self.markManager detectWithImageData:imageData];\n    NSUInteger faceCount = tempArray.count;\n    if (faceCount == 0) {\n        self.faceChangeFilterGroup.isHaveFace = NO;\n        [self.faceChangeFilterGroup setFacePointsArray:@[]];\n    }else{\n        self.faceChangeFilterGroup.isHaveFace = YES;\n    }\n    NSLog(@\"face Count : %zd\",faceCount);\n    for (MGFaceInfo *faceInfo in tempArray) {\n        [self.markManager GetGetLandmark:faceInfo isSmooth:YES pointsNumber:106];\n//        NSLog(@\"%@\",faceInfo.points);\n        [self.faceChangeFilterGroup setFacePointsArray:faceInfo.points];\n    }\n    [self.markManager endDetectionFrame];\n}\n\n\n#pragma mark ------------------------------------------------------ 清空所有数据 ------------------------------------------------------\n- (void)removeAllObject{\n    [_videoCamera stopCameraCapture];\n    if (_faceChangeFilterGroup) {\n        [_faceChangeFilterGroup removeAllTargets];\n        _faceChangeFilterGroup = nil;\n    }\n    \n    if (_beautifyFilter) {\n        [_beautifyFilter removeAllTargets];\n        _beautifyFilter = nil;\n    }\n    \n    if (_preview) {\n        _preview = nil;\n    }\n    \n    if (_markManager) {\n        _markManager = nil;\n    }\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo/GLImageFilterEnumType.h",
    "content": "//\n//  GLImageFilterEnumType.h\n//  GPURenderKitDemo\n//\n//  Created by 刘海东 on 2019/4/13.\n//  Copyright © 2019 刘海东. All rights reserved.\n//\n\n#ifndef GLImageFilterEnumType_h\n#define GLImageFilterEnumType_h\n#import <UIKit/UIKit.h>\n\n\n\n\ntypedef NS_ENUM(NSInteger, GLIMAGE_FILTER_TYPE)\n{\n    GLIMAGE_LUT,\n    GL_IMAGE_ADDSTICKER,\n    GLIMAGE_NUMBEROFFILTER,\n};\n\nstatic inline NSString *GetFilterNameWithType(GLIMAGE_FILTER_TYPE type)\n{\n    NSString *text;\n    switch (type)\n    {\n        case GLIMAGE_LUT: text = @\"Lookup Table （lut图）\"; break;\n        case GL_IMAGE_ADDSTICKER: text = @\"贴纸,水印\"; break;\n        default: break;\n    }\n    \n    return text;\n}\n\n\n\ntypedef NS_ENUM(NSInteger, GL_INPUT_SOURCE_TYPE)\n{\n    GL_INPUT_SOURCE_CAMERA,\n    GL_INPUT_SOURCE_IMAGE,\n    GL_INPUT_SOURCE_MOVIE,\n};\n\n\n#endif /* GLImageFilterEnumType_h */\n"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo/GLImageFilterList/GLImageFilterListViewController.h",
    "content": "//\n//  GLImageFilterListViewController.h\n//  GPURenderKitDemo\n//\n//  Created by 刘海东 on 2019/4/11.\n//  Copyright © 2019 刘海东. All rights reserved.\n//\n\n#import \"BaseViewController.h\"\n\nNS_ASSUME_NONNULL_BEGIN\n\n@interface GLImageFilterListViewController : BaseViewController\n\n@end\n\nNS_ASSUME_NONNULL_END\n"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo/GLImageFilterList/GLImageFilterListViewController.m",
    "content": "//\n//  GLImageFilterListViewController.m\n//  GPURenderKitDemo\n//\n//  Created by 刘海东 on 2019/4/11.\n//  Copyright © 2019 刘海东. All rights reserved.\n//\n\n#import \"GLImageFilterListViewController.h\"\n#import <GPURenderKit/GPURenderKit.h>\n#import \"GLImageFilterShowViewController.h\"\n\n\n@interface GLImageFilterListViewController ()<UITableViewDelegate,UITableViewDataSource>\n\n@property (nonatomic, strong) GPUImageVideoCamera *videoCamera;\n@property (nonatomic, strong) GPUImageView *preview;\n@property (nonatomic, copy) NSString *oldCategorySession; //保存session，恢复时使用\n@property (nonatomic, strong) GPUImageMovieWriterFix *movieWriter;\n\n@property (nonatomic, strong) UITableView *tableView;\n@property (nonatomic, assign) GL_INPUT_SOURCE_TYPE inputSourceType;\n\n\n@end\n\n@implementation GLImageFilterListViewController\n\n- (void)viewDidLoad {\n    [super viewDidLoad];\n    self.inputSourceType = GL_INPUT_SOURCE_IMAGE;\n    [self createTitleView];\n    [self createTableView];\n}\n\n\n- (void)createTitleView\n{\n    UISegmentedControl *segmentedControl = [[UISegmentedControl alloc] initWithItems:@[@\"相机\", @\"照片\", @\"影片\"]];\n    segmentedControl.frame = CGRectMake(0, 0, 200, 30);\n    [segmentedControl addTarget:self action:@selector(segmentedAction:) forControlEvents:UIControlEventValueChanged];\n    self.navigationItem.titleView = segmentedControl;\n    [segmentedControl setSelectedSegmentIndex:self.inputSourceType];\n}\n\n- (void)segmentedAction:(UISegmentedControl *)sender\n{\n    self.inputSourceType = sender.selectedSegmentIndex;\n}\n\n- (void)createTableView\n{\n    self.tableView = [[UITableView alloc] initWithFrame:CGRectMake(0, 0, self.view.bounds.size.width, self.view.bounds.size.height)];\n    self.tableView.delegate = self;\n    self.tableView.dataSource = self;\n    [self.view addSubview:self.tableView];\n}\n\n#pragma mark - tableView dataSource\n\n- (NSInteger)tableView:(UITableView *)tableView numberOfRowsInSection:(NSInteger)section\n{\n    return GLIMAGE_NUMBEROFFILTER;\n}\n\n- (UITableViewCell *)tableView:(UITableView *)tableView cellForRowAtIndexPath:(NSIndexPath *)indexPath\n{\n    static NSString *identifier = @\"CELL\";\n    UITableViewCell *cell = [tableView dequeueReusableCellWithIdentifier:identifier];\n    \n    if (!cell)\n    {\n        cell = [[UITableViewCell alloc] initWithStyle:UITableViewCellStyleDefault reuseIdentifier:identifier];\n        cell.accessoryType = UITableViewCellAccessoryDisclosureIndicator;\n    }\n    \n    NSInteger index = indexPath.row;\n    cell.textLabel.text = GetFilterNameWithType(index);\n    \n    return cell;\n}\n\n#pragma mark - tableView delegate\n\n- (void)tableView:(UITableView *)tableView didSelectRowAtIndexPath:(NSIndexPath *)indexPath\n{\n    UITableViewCell *cell = [tableView cellForRowAtIndexPath:indexPath];\n    [tableView deselectRowAtIndexPath:indexPath animated:YES];\n    GLImageFilterShowViewController *filterViewController = [[GLImageFilterShowViewController alloc] initWithFilterType:indexPath.row];\n    filterViewController.title = cell.textLabel.text;\n    filterViewController.inputSourceType = self.inputSourceType;\n    [self.navigationController pushViewController:filterViewController animated:YES];\n}\n\n\n\n- (void)test{\n    [self.videoCamera addTarget:self.preview];\n    self.oldCategorySession = [AVAudioSession sharedInstance].category;\n    [GLImageFilterListViewController setCategory:AVAudioSessionCategoryPlayAndRecord withOptions:AVAudioSessionCategoryOptionMixWithOthers];\n    [self.videoCamera startCameraCapture];\n\n}\n\n- (GPUImageView *)preview{\n    \n    if (!_preview) {\n        _preview = [[GPUImageView alloc]initWithFrame:self.view.bounds];\n        _preview.backgroundColor = [UIColor blackColor];\n        [_preview setBackgroundColorRed:0.0 green:0.0 blue:0.0 alpha:1.0];\n        [self.view addSubview:_preview];\n    }\n    return _preview;\n}\n\n\n- (GPUImageVideoCamera *)videoCamera{\n    \n    if (!_videoCamera) {\n        _videoCamera = [[GPUImageVideoCamera alloc]initWithSessionPreset:AVCaptureSessionPresetHigh cameraPosition:AVCaptureDevicePositionBack];\n        _videoCamera.outputImageOrientation = UIInterfaceOrientationPortrait;//设置照片的方向为设备的定向\n        _videoCamera.horizontallyMirrorFrontFacingCamera = YES;//设置前置为镜像\n        _videoCamera.horizontallyMirrorRearFacingCamera = NO;//设置后置为非镜像\n    }\n    return _videoCamera;\n}\n\n- (GPUImageMovieWriterFix *)movieWriter{\n    if (!_movieWriter) {\n        \n        NSString *pathToMovie = [NSHomeDirectory() stringByAppendingPathComponent:@\"Documents/videoFromSaveManager.mov\"];\n        unlink([pathToMovie UTF8String]);\n        NSURL *movieURL = [NSURL fileURLWithPath:pathToMovie];\n        _movieWriter = [[GPUImageMovieWriterFix alloc]initWithMovieURL:movieURL size:CGSizeMake(720, 1280)];\n    }\n    return _movieWriter;\n}\n\n\n- (void)touchesBegan:(NSSet<UITouch *> *)touches withEvent:(UIEvent *)event{\n    \n    _movieWriter.encodingLiveVideo = YES;\n    _movieWriter.shouldPassthroughAudio = YES;\n    _movieWriter.assetWriter.movieFragmentInterval = kCMTimeInvalid;\n    _movieWriter.hasAudioTrack = YES;\n    \n    [self.videoCamera addTarget:self.movieWriter];\n\n    \n    bool addAudioInputBool =  [_videoCamera addAudioInputsAndOutputs];\n    if (addAudioInputBool) {\n        NSLog(@\"添加成功\");\n    }else{\n        NSLog(@\"添加失败\");\n    }\n    \n    self.videoCamera.audioEncodingTarget = self.movieWriter;\n    [self.movieWriter startRecording];\n    \n    dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t)(5.0 * NSEC_PER_SEC)), dispatch_get_main_queue(), ^{\n\n        [self.movieWriter endProcessing];\n        \n        [self.movieWriter finishRecordingWithCompletionHandler:^{\n            NSLog(@\"录制完成\");\n            \n        }];\n        \n    });\n    \n    \n    \n    \n}\n\n\n+ (void)setCategory:(AVAudioSessionCategory)category withOptions:(AVAudioSessionCategoryOptions)options {\n    if ([[AVAudioSession sharedInstance].category isEqualToString:category]) {\n        return;\n    }\n    \n    NSError *error = nil;\n    BOOL isSuccess = [[AVAudioSession sharedInstance] setCategory:category withOptions:options error:&error];\n    if (error) {\n        NSLog(@\"set audiosession error ! category:%@ , options:%lu, error:%@\",category,(unsigned long)options,error.description);\n    }\n    \n    NSError *activeError = nil;\n    [[AVAudioSession sharedInstance] setActive:NO withOptions:AVAudioSessionSetActiveOptionNotifyOthersOnDeactivation error:&activeError];\n    if (activeError) {\n        NSLog(@\"setAudioSessionPlaybackAndMixWithOthers deactive audiosession failed !\");\n    }\n}\n\n\n- (void)dealloc{\n    NSLog(@\"---->\");\n    [GLImageFilterListViewController setCategory:self.oldCategorySession withOptions:AVAudioSessionCategoryOptionMixWithOthers];\n}\n\n\n\n@end\n"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo/GLImageFilterList/GLImageFilterShowViewController.h",
    "content": "//\n//  GLImageFilterShowViewController.h\n//  GPURenderKitDemo\n//\n//  Created by 刘海东 on 2019/4/13.\n//  Copyright © 2019 刘海东. All rights reserved.\n//\n\n#import \"BaseViewController.h\"\n\nNS_ASSUME_NONNULL_BEGIN\n\n@interface GLImageFilterShowViewController : BaseViewController\n\n- (id)initWithFilterType:(GLIMAGE_FILTER_TYPE)filterType;\n\n@property (nonatomic, assign) GL_INPUT_SOURCE_TYPE inputSourceType;\n\n@end\n\nNS_ASSUME_NONNULL_END\n"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo/GLImageFilterList/GLImageFilterShowViewController.m",
    "content": "//\n//  GLImageFilterShowViewController.m\n//  GPURenderKitDemo\n//\n//  Created by 刘海东 on 2019/4/13.\n//  Copyright © 2019 刘海东. All rights reserved.\n//\n\n#import \"GLImageFilterShowViewController.h\"\n#import \"GLImagePickerHelper.h\"\n#import \"GLSliderView.h\"\n#import \"GLFilterInfoView.h\"\n\n@interface GLImageFilterShowViewController ()\n\n@property (nonatomic, strong) GLSliderView      *silderView;\n@property (nonatomic, strong) GPUImageVideoCamera *videoCamera;\n@property (nonatomic, strong) GPUImagePicture   *picture;\n@property (nonatomic, strong) GPUImageMovie     *movie;\n@property (nonatomic, strong) GPUImageOutput    *inputSource;\n@property (nonatomic, strong) GPUImageView      *preview;\n@property (nonatomic, assign) GLIMAGE_FILTER_TYPE filterType;\n@property (nonatomic, strong) UIImage *sourceImage;\n@property (nonatomic, strong) GPUImageOutput <GPUImageInput> *filter;\n@property (nonatomic, strong) GLFilterInfoView *filterInfoView;\n@property (nonatomic,   copy) void (^silderValueDidChangeHandler)(float value);\n\n\n\n@end\n\n@implementation GLImageFilterShowViewController\n\n- (instancetype)initWithFilterType:(GLIMAGE_FILTER_TYPE)filterType\n{\n    self = [super init];\n    if (self) {\n        \n        self.filterType = filterType;\n    }\n    return self;\n}\n\n\n\n- (void)viewDidLoad {\n    [super viewDidLoad];\n    \n    [self createSubviews];\n    [self setupFilter];\n    \n}\n\n- (void)createSubviews\n{\n    CGRect frame = self.view.bounds;\n    self.preview = [[GPUImageView alloc] initWithFrame:frame];\n    self.preview.layer.contentsScale = 2.0;\n    self.preview.backgroundColor = [[UIColor blackColor] colorWithAlphaComponent:0.8];\n    [self.preview setBackgroundColorRed:0.2 green:0.2 blue:0.2 alpha:1.0];\n    [self.view addSubview:self.preview];\n    [self.view addSubview:self.silderView];\n    self.filterInfoView = [[GLFilterInfoView alloc] initWithFrame:CGRectMake(0, self.view.bounds.size.height - 100, self.view.bounds.size.width, 100)];\n    self.filterInfoView.userInteractionEnabled = NO;\n    self.filterInfoView.title = self.title;\n    self.filterInfoView.selected = NO;\n    self.filterInfoView.degree = 0.0;\n    self.filterInfoView.backgroundColor = [UIColor whiteColor];\n    [self.view addSubview:self.filterInfoView];\n    \n    if (self.inputSourceType == GL_INPUT_SOURCE_CAMERA)\n    {\n        self.navigationItem.rightBarButtonItem = [[UIBarButtonItem alloc] initWithImage:[UIImage imageNamed:@\"rotate\"] style:UIBarButtonItemStyleDone target:self action:@selector(rotateCamera)];\n    }\n    else if (self.inputSourceType == GL_INPUT_SOURCE_IMAGE)\n    {\n        UIBarButtonItem *albumButton = [[UIBarButtonItem alloc] initWithTitle:@\"相册\" style:UIBarButtonItemStyleDone target:self action:@selector(selectImageFromAlbum)];\n        self.navigationItem.rightBarButtonItems = @[albumButton];\n    }\n}\n\n- (void)setupFilter\n{\n    switch (self.filterType) {\n        case GLIMAGE_LUT:\n        {\n            GLImageLutFilter *lutFilter = [[GLImageLutFilter alloc]init];\n            [lutFilter setLutImage:[UIImage imageNamed:@\"exposure_n\"]];\n            self.filter = lutFilter;\n        }\n            break;\n        case GL_IMAGE_ADDSTICKER:\n        {\n            GLImageAddStickerFilter *stickerFilter = [[GLImageAddStickerFilter alloc]init];\n            UIImage *stickerImage = [UIImage imageNamed:@\"bunny\"];\n            [stickerFilter setStickerImage:stickerImage];\n            stickerFilter.center = CGPointMake(0.5, 0.5);\n            stickerFilter.theta = 0.0;\n            self.filter = stickerFilter;\n            self.silderView.minimumValue = 0.0;\n            self.silderView.maximumValue = 2.0;\n            \n            self.silderValueDidChangeHandler = ^(float value) {\n                stickerFilter.theta = value*M_PI;\n                stickerFilter.center = CGPointMake(0.5*value, 0.5*value);\n                NSLog(@\"调节中心点，大小\");\n            };\n        }\n            break;\n            \n            \n            \n        default:\n            break;\n    }\n    \n}\n\n- (void)rotateCamera\n{\n    [self.videoCamera rotateCamera];\n}\n\n- (void)selectImageFromAlbum\n{\n    __weak typeof(self) weakSelf = self;\n    [GLImagePickerHelper showInController:self completion:^(UIImage *image, UIImage *thumbImage) {\n        \n        weakSelf.sourceImage = image;\n        [weakSelf changeInputPicture:image];\n    }];\n}\n\n\n- (void)changeInputPicture:(UIImage *)image\n{\n    GPUImagePicture *newPicture = [[GPUImagePicture alloc] initWithImage:image];\n    \n    for (id<GPUImageInput> target in _picture.targets)\n    {\n        [newPicture addTarget:target];\n    }\n    \n    _picture = nil;\n    _picture = newPicture;\n    \n    [self startProcessFilter];\n}\n\n- (void)startProcessFilter\n{\n    switch (self.inputSourceType)\n    {\n        case GL_INPUT_SOURCE_CAMERA: break;\n        case GL_INPUT_SOURCE_IMAGE: [self.picture processImage]; break;\n        case GL_INPUT_SOURCE_MOVIE: break;\n        default: break;\n    }\n}\n\n- (void)setFilter:(GPUImageOutput <GPUImageInput> *)filter\n{\n    _filter = filter;\n    [self.inputSource addTarget:_filter];\n    [_filter addTarget:self.preview];\n    [self startProcessFilter];\n}\n\n#pragma mark - Input Source\n\n- (GPUImageVideoCamera *)videoCamera\n{\n    if (!_videoCamera)\n    {\n        _videoCamera = [[GPUImageVideoCamera alloc] initWithSessionPreset:AVCaptureSessionPreset1280x720 cameraPosition:AVCaptureDevicePositionFront];\n        _videoCamera.runBenchmark = YES;\n        _videoCamera.horizontallyMirrorFrontFacingCamera = YES;\n        _videoCamera.outputImageOrientation = UIInterfaceOrientationPortrait;\n        [_videoCamera startCameraCapture];\n    }\n    \n    return _videoCamera;\n}\n\n- (GPUImagePicture *)picture\n{\n    if (!_picture)\n    {\n        if (self.sourceImage)\n        {\n            _picture = [[GPUImagePicture alloc] initWithImage:self.sourceImage];\n        }\n        else\n        {\n            _picture = [[GPUImagePicture alloc] initWithImage:[UIImage imageNamed:@\"flower\"]];\n        }\n    }\n    \n    return _picture;\n}\n\n- (GPUImageMovie *)movie\n{\n    if (!_movie)\n    {\n        _movie = [[GPUImageMovie alloc] initWithURL:[NSURL fileURLWithPath:[[NSBundle mainBundle] pathForResource:@\"测试视频\" ofType:@\"mp4\"]]];\n        _movie.shouldRepeat = YES;\n        _movie.playAtActualSpeed = YES;\n        [_movie startProcessing];\n    }\n    \n    return _movie;\n}\n\n- (GLSliderView *)silderView\n{\n    if (!_silderView)\n    {\n        _silderView = [[GLSliderView alloc] initWithFrame:CGRectMake(0, 64, self.view.bounds.size.width, self.view.bounds.size.height - 64)];\n        [_silderView addTarget:self action:@selector(sliderViewValueDidChangeAction:)];\n    }\n    \n    return _silderView;\n}\n\n- (GPUImageOutput *)inputSource\n{\n    switch (self.inputSourceType)\n    {\n        case GL_INPUT_SOURCE_CAMERA: return self.videoCamera;\n        case GL_INPUT_SOURCE_IMAGE: return self.picture;\n        case GL_INPUT_SOURCE_MOVIE: return self.movie;\n        default: return nil;\n    }\n}\n\n\n\n- (void)sliderViewValueDidChangeAction:(UISlider *)sender\n{\n    if (self.silderValueDidChangeHandler)\n    {\n        self.silderValueDidChangeHandler(sender.value);\n    }\n    \n    self.filterInfoView.degree = sender.value;\n    [self startProcessFilter];\n}\n\n\n@end\n"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo/GLImageMovie/DDAVAssetRenderManage.h",
    "content": "//\n//  DDAVAssetRenderManage.h\n//  GPURenderKitDemo\n//\n//  Created by 刘海东 on 2019/4/5.\n//  Copyright © 2019 刘海东. All rights reserved.\n//\n\n#import <Foundation/Foundation.h>\n#import <AVFoundation/AVFoundation.h>\n#import <GPURenderKit/GPURenderKit.h>\n\nNS_ASSUME_NONNULL_BEGIN\n\ntypedef void(^DDAVAssetRenderFinishHandle)(void);\n\n@protocol DDAVAssetRenderManageDelegate <NSObject>\n\n- (void)outputProcessMovieFrameSampleBuffer:(CMSampleBufferRef)sampleBufferRef;\n\n- (void)outputProcessAudioFrameSampleBuffer:(CMSampleBufferRef)sampleBufferRef;\n\n\n@end\n\n\n@interface DDAVAssetRenderManage : NSObject\n\n/** 是否保留视频原声 */\n@property (nonatomic, assign) BOOL isAddVideoVoiceBool;\n\n/** 视频声音大小 */\n@property (nonatomic, assign) float videoVolume;\n\n/** frameDuration */\n@property (nonatomic, assign) CMTime frameDuration;\n\n@property (nonatomic, weak) id<DDAVAssetRenderManageDelegate> delegate;\n\n- (instancetype)initWithVideoFileUrl:(NSURL *)videoFileUrl;\n\n\n/**\n 视频添加背景音乐\n\n @param musicFilePath 音乐文件路径\n @param startTime 开始时间\n @param musicVolume 音乐音量大小\n */\n- (void)mixMusicFilePath:(NSURL *)musicFilePath startTime:(CMTime)startTime musicVolume:(float)musicVolume;\n\n\n/**\n 开始渲染\n */\n- (void)startProcessing;\n\n\n/**\n 结束渲染的回调\n\n @param handle  结束渲染的回调\n */\n- (void)finishProcessingWithCompletionHandler:(DDAVAssetRenderFinishHandle)handle;\n\n\n/**\n 取消渲染的回调\n\n @param handler 取消渲染的回调\n */\n- (void)cancelProcessingHandler:(void (^)(void))handler;\n\n\n@end\n\nNS_ASSUME_NONNULL_END\n"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo/GLImageMovie/DDAVAssetRenderManage.m",
    "content": "//\n//  DDAVAssetRenderManage.m\n//  GPURenderKitDemo\n//\n//  Created by 刘海东 on 2019/4/5.\n//  Copyright © 2019 刘海东. All rights reserved.\n//\n\n#import \"DDAVAssetRenderManage.h\"\n\n\n@interface DDAVAssetRenderManage ()\n\n/// 主轨道\n@property (nonatomic, strong) AVMutableComposition *mainComposition;\n/// 视频处理器\n@property (nonatomic, strong) AVMutableVideoComposition *videoComposition;\n/// 音乐处理器\n@property (nonatomic, strong) AVMutableAudioMix *audioMix;\n/// assetReader\n@property (nonatomic, strong) AVAssetReader *assetReader;\n/// video输出\n@property (nonatomic, strong) AVAssetReaderTrackOutput *videoTrackOutput;\n/// audio输出\n@property (nonatomic, strong) AVAssetReaderTrackOutput *audioTrackOutput;\n\n@property (nonatomic, strong) NSMutableArray *audioMixInputParameterss;\n\n@property (nonatomic, strong) NSURL *videoFileUrl;\n\n@property (nonatomic, strong) AVAsset *videoAsset;\n\n@property (nonatomic, assign) CGSize videoSize;\n\n@property (nonatomic, assign) CMTime duration;\n\n@property (nonatomic, copy) DDAVAssetRenderFinishHandle finishHandle;\n\n@property (nonatomic, assign) BOOL isCancelProcessingBool;\n\n\n@end\n\n\n@implementation DDAVAssetRenderManage\n\n\n\n- (instancetype)initWithVideoFileUrl:(NSURL *)videoFileUrl\n{\n    self = [super init];\n    if (self) {\n        \n        _videoFileUrl = videoFileUrl;\n        _isCancelProcessingBool = NO;\n        _videoVolume = 1.0;\n        [self videoAsset];\n    }\n    return self;\n}\n\n- (AVAsset *)videoAsset{\n    if (!_videoAsset) {\n        \n        _videoAsset = [AVAsset assetWithURL:self.videoFileUrl];\n        NSArray *videoTracks = [_videoAsset tracksWithMediaType:AVMediaTypeVideo];\n        \n        if (videoTracks.count==0) {\n            NSAssert(NO, @\"没有视频帧---\");\n        }\n        self.duration = _videoAsset.duration;\n        AVAssetTrack *videoTrack = [videoTracks objectAtIndex:0];\n        self.videoSize = videoTrack.naturalSize;\n    }\n    return _videoAsset;\n}\n\n\n\n- (AVMutableComposition *)mainComposition{\n    if (!_mainComposition) {\n        \n        _mainComposition = [[AVMutableComposition alloc]init];\n    }\n    return _mainComposition;\n}\n\n\n- (AVMutableVideoComposition *)videoComposition\n{\n    if (!_videoComposition) {\n        _videoComposition = [AVMutableVideoComposition videoComposition];\n        _videoComposition.renderSize = self.videoSize;\n        if (CMTIME_IS_INVALID(self.frameDuration)) {\n            /// 如果帧率不传则默认 30帧\n            self.frameDuration = CMTimeMake(1, 30);\n        }\n        _videoComposition.frameDuration = self.frameDuration;\n        _videoComposition.renderScale = 1.0;\n    }\n    return _videoComposition;\n}\n\n- (AVMutableAudioMix *)audioMix\n{\n    if (!_audioMix) {\n        _audioMix = [AVMutableAudioMix audioMix];\n    }\n    return _audioMix;\n}\n\n- (NSMutableArray *)audioMixInputParameterss\n{\n    if (!_audioMixInputParameterss) {\n        _audioMixInputParameterss = [NSMutableArray array];\n    }\n    return _audioMixInputParameterss;\n}\n\n\n- (void)addVideoSource{\n    \n    // 视频通道 枚举 kCMPersistentTrackID_Invalid = 0\n    AVMutableCompositionTrack *videoTrack = [self.mainComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];\n    // 视频采集通道\n    AVAssetTrack *videoAssetTrack = [[self.videoAsset tracksWithMediaType:AVMediaTypeVideo] firstObject];\n    // 把采集轨道数据加入到可变轨道之中\n    // 视频时间范围\n    CMTimeRange videoTimeRange = videoAssetTrack.timeRange;\n    [videoTrack insertTimeRange:videoTimeRange ofTrack:videoAssetTrack atTime:kCMTimeZero error:nil];\n    // 获取视频的音频轨道\n    NSArray *assetAudioTracks = [self.videoAsset tracksWithMediaType:AVMediaTypeAudio];\n    \n    if (assetAudioTracks.count > 0 && self.isAddVideoVoiceBool) {\n        AVMutableCompositionTrack *audioCompositionTrack = [self.mainComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];\n        \n        AVAssetTrack *audioTrack = [assetAudioTracks firstObject];\n        [audioCompositionTrack insertTimeRange:videoTimeRange ofTrack:audioTrack atTime:kCMTimeZero error:nil];\n        \n        AVMutableAudioMixInputParameters *audioMixInputParameters = [AVMutableAudioMixInputParameters audioMixInputParametersWithTrack:audioCompositionTrack];\n        [audioMixInputParameters setVolumeRampFromStartVolume:self.videoVolume toEndVolume:self.videoVolume timeRange:videoTimeRange];\n        [self.audioMixInputParameterss addObject:audioMixInputParameters];\n    }\n    \n    \n    AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];\n    AVMutableVideoCompositionLayerInstruction *layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];\n    instruction.layerInstructions = @[layerInstruction];\n    instruction.timeRange = CMTimeRangeMake(kCMTimeZero, self.videoAsset.duration);\n    NSMutableArray *instrucionArray = [NSMutableArray array];\n    [instrucionArray addObject:instruction];\n    self.videoComposition.instructions = instrucionArray;\n}\n\n\n- (void)initializeAssetReader\n{\n    NSError *error = nil;\n    self.assetReader = [[AVAssetReader alloc] initWithAsset:self.mainComposition error:&error];\n    self.assetReader.timeRange = CMTimeRangeMake(kCMTimeZero, CMTimeMake(self.mainComposition.duration.value, self.mainComposition.duration.timescale));\n    \n    self.audioMix.inputParameters = self.audioMixInputParameterss;\n    \n    NSDictionary *outputSettings = @{(id)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)};\n    AVAssetReaderVideoCompositionOutput *readerVideoOutput = [AVAssetReaderVideoCompositionOutput assetReaderVideoCompositionOutputWithVideoTracks:[self.mainComposition tracksWithMediaType:AVMediaTypeVideo] videoSettings:outputSettings];\n    \n    \n    readerVideoOutput.videoComposition = self.videoComposition;\n    readerVideoOutput.alwaysCopiesSampleData = NO;\n    if ([self.assetReader canAddOutput:readerVideoOutput]) {\n        [self.assetReader addOutput:readerVideoOutput];\n    } else{\n        NSLog(@\"加入视频输入失败\");\n    }\n    \n    NSArray *audioTracks = [self.mainComposition tracksWithMediaType:AVMediaTypeAudio];\n    BOOL shouldRecordAudioTrack = ([audioTracks count] > 0);\n    AVAssetReaderAudioMixOutput *readerAudioOutput = nil;\n    \n    if (shouldRecordAudioTrack)\n    {\n        readerAudioOutput = [AVAssetReaderAudioMixOutput assetReaderAudioMixOutputWithAudioTracks:audioTracks audioSettings:nil];\n        readerAudioOutput.audioMix = self.audioMix;\n        readerAudioOutput.alwaysCopiesSampleData = NO;\n        if ([self.assetReader canAddOutput:readerAudioOutput]) {\n            [self.assetReader addOutput:readerAudioOutput];\n        } else{\n            NSLog(@\"加入音频失败\");\n        }\n    }\n    \n    self.videoTrackOutput = (AVAssetReaderTrackOutput *)readerVideoOutput;\n    self.audioTrackOutput = (AVAssetReaderTrackOutput *)readerAudioOutput;\n    \n}\n\n\n\n#pragma mark ------------------------------------------------------ publicFunc ------------------------------------------------------\n\n\n- (void)mixMusicFilePath:(NSURL *)musicFilePath startTime:(CMTime)startTime musicVolume:(float)musicVolume{\n    \n    \n    AVURLAsset *audioAsset = [AVURLAsset assetWithURL:musicFilePath];\n    NSArray *arrayAudioTrack = [audioAsset tracksWithMediaType:AVMediaTypeAudio];\n    \n    CMTimeRange cropTimeRange = CMTimeRangeMake(startTime, audioAsset.duration);\n    \n    float cropTimeSecond = CMTimeGetSeconds(cropTimeRange.duration);\n    float presentTimeSecond = CMTimeGetSeconds(self.duration);\n    if (cropTimeSecond < presentTimeSecond) {\n        if (arrayAudioTrack.count > 0) {\n            AVAssetTrack *audioTrak = [arrayAudioTrack firstObject];\n            AVMutableCompositionTrack *audioCompositionTrack = [self.mainComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];\n            \n            \n            int count = presentTimeSecond / cropTimeSecond;\n            for (int i = 0; i< count; i++) {\n                CMTimeRange timeRange = CMTimeRangeMake(CMTimeAdd(cropTimeRange.start, CMTimeMakeWithSeconds(CMTimeGetSeconds(cropTimeRange.duration) * i, 90000)) , cropTimeRange.duration);\n                [audioCompositionTrack insertTimeRange:cropTimeRange\n                                               ofTrack:audioTrak\n                                                atTime:CMTimeSubtract(timeRange.start, cropTimeRange.start)\n                                                 error:nil];\n            }\n            CMTimeRange lastTimeRange = CMTimeRangeMake(CMTimeMakeWithSeconds(cropTimeSecond * (count), 90000), CMTimeMakeWithSeconds(presentTimeSecond -  cropTimeSecond * (count), 90000));\n            [audioCompositionTrack insertTimeRange:CMTimeRangeMake(cropTimeRange.start, lastTimeRange.duration)\n                                           ofTrack:audioTrak\n                                            atTime:lastTimeRange.start\n                                             error:nil];\n            \n            \n            AVMutableAudioMixInputParameters *audioMixInputParameters = [AVMutableAudioMixInputParameters audioMixInputParametersWithTrack:audioCompositionTrack];\n            [audioMixInputParameters setVolumeRampFromStartVolume:musicVolume toEndVolume:musicVolume timeRange:CMTimeRangeMake(kCMTimeZero, self.duration)];\n            [self.audioMixInputParameterss addObject:audioMixInputParameters];\n            \n        }\n    } else\n    {\n        if (arrayAudioTrack.count > 0) {\n            AVAssetTrack *audioTrak = [arrayAudioTrack firstObject];\n            AVMutableCompositionTrack *audioCompositionTrack = [self.mainComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];\n            [audioCompositionTrack insertTimeRange:cropTimeRange\n                                           ofTrack:audioTrak\n                                            atTime:kCMTimeZero\n                                             error:nil];\n            AVMutableAudioMixInputParameters *audioMixInputParameters = [AVMutableAudioMixInputParameters audioMixInputParametersWithTrack:audioCompositionTrack];\n            [audioMixInputParameters setVolumeRampFromStartVolume:musicVolume toEndVolume:musicVolume timeRange:cropTimeRange];\n            [self.audioMixInputParameterss addObject:audioMixInputParameters];\n        }\n    }\n}\n\n\n\n- (void)startProcessing{\n    \n    [self addVideoSource];\n    [self initializeAssetReader];\n    BOOL value =  [self.assetReader startReading];\n    if (value) {\n        NSLog(@\"开始reading\");\n    }else{\n        NSLog(@\"有问题%@\",self.assetReader.error);\n        return;\n    }\n    runSynchronouslyOnVideoProcessingQueue(^{\n        [self processingBuffer];\n    });\n}\n\n\n- (void)processingBuffer{\n    \n    if (self.isCancelProcessingBool) {\n        \n        if (self.assetReader.status == AVAssetReaderStatusReading) {\n            [self.assetReader cancelReading];\n        }\n        return;\n    }\n    bool isCirculationBool = YES;\n    \n    if (self.assetReader.status == AVAssetReaderStatusReading) {\n        CMSampleBufferRef videoSampleBuffer = [self.videoTrackOutput copyNextSampleBuffer];\n        \n        if (videoSampleBuffer) {\n            if (self.delegate && [self.delegate respondsToSelector:@selector(outputProcessMovieFrameSampleBuffer:)]) {\n                [self.delegate outputProcessMovieFrameSampleBuffer:videoSampleBuffer];\n            }\n            CMSampleBufferInvalidate(videoSampleBuffer);\n            CFRelease(videoSampleBuffer);\n        }else{\n            \n            isCirculationBool = NO;\n            NSLog(@\"videoSampleBuffer is null\");\n            if (self.finishHandle) {\n                self.finishHandle();\n            }\n            [self.assetReader cancelReading];\n            return;\n        }\n    }else{\n        \n        NSLog(@\"assetReader有问题\");\n        return;\n    }\n    \n    \n    if (self.assetReader.status == AVAssetReaderStatusReading) {\n        \n        CMSampleBufferRef audioSampleBuffer = [self.audioTrackOutput copyNextSampleBuffer];\n        if (audioSampleBuffer) {\n            \n            if (self.delegate && [self.delegate respondsToSelector:@selector(outputProcessAudioFrameSampleBuffer:)]) {\n                [self.delegate outputProcessAudioFrameSampleBuffer:audioSampleBuffer];\n            }\n//            CMSampleBufferInvalidate(audioSampleBuffer);\n            CFRelease(audioSampleBuffer);\n        }\n    }else{\n        \n        NSLog(@\"assetReader有问题\");\n        return;\n    }\n    //循环读取下一帧视频\n    if (isCirculationBool) {\n        [NSThread sleepForTimeInterval:1/240.0];\n        [self processingBuffer];\n    }\n}\n\n- (void)finishProcessingWithCompletionHandler:(DDAVAssetRenderFinishHandle)handle{\n    \n    if (handle) {\n        self.finishHandle = handle;\n    }\n}\n\n- (void)cancelProcessingHandler:(void (^)(void))handler{\n    _isCancelProcessingBool = YES;\n    if (handler) {\n        handler();\n    }\n}\n\n\n@end\n"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo/GLImageMovie/GLImageMovieUseViewController.h",
    "content": "//\n//  GLImageMovieUseViewController.h\n//  GPURenderKitDemo\n//\n//  Created by 刘海东 on 2019/4/5.\n//  Copyright © 2019 刘海东. All rights reserved.\n//\n\n#import \"BaseViewController.h\"\n\nNS_ASSUME_NONNULL_BEGIN\n\n@interface GLImageMovieUseViewController : BaseViewController\n\n@end\n\nNS_ASSUME_NONNULL_END\n"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo/GLImageMovie/GLImageMovieUseViewController.m",
    "content": "//\n//  GLImageMovieUseViewController.m\n//  GPURenderKitDemo\n//\n//  Created by 刘海东 on 2019/4/5.\n//  Copyright © 2019 刘海东. All rights reserved.\n//\n\n#import \"GLImageMovieUseViewController.h\"\n#import \"DDAVAssetRenderManage.h\"\n#import \"DDMediaEditorManage.h\"\n#import \"MBProgressHUD.h\"\n#import \"MovieViewController.h\"\n@interface GLImageMovieUseViewController ()<DDAVAssetRenderManageDelegate>\n\n@property (nonatomic, strong) DDAVAssetRenderManage *assetRenderManage;\n@property (nonatomic, strong) GLImageMovie *imageMovie;\n@property (nonatomic, strong) GPUImageMovieWriterFix *movieWriter;\n@property (nonatomic, strong) GPUImageView *preview;\n@property (nonatomic, strong) DDMediaEditorManage *mediaEditorManage;\n@property (nonatomic, strong) GLImageLutFilter *lutFilter;\n\n@property (nonatomic, strong) UIImage *lutImage;\n@property (nonatomic, strong) NSURL *musicFileUrl;\n\n@property (nonatomic, strong) UIButton *addMusicBtn;\n@property (nonatomic, strong) UIButton *changeLutBtn;\n@property (nonatomic, strong) UIButton *saveBtn;\n@property (nonatomic, strong) NSURL *outputFileUrl;\n\n\n@end\n\n@implementation GLImageMovieUseViewController\n\n- (void)viewWillAppear:(BOOL)animated{\n    [super viewWillAppear:animated];\n    \n    if (_mediaEditorManage) {\n        if (!_mediaEditorManage.isPlaying) {\n            @weakify(self);\n            [_mediaEditorManage videoSeekToTime:kCMTimeZero completionHandler:^(BOOL finished) {\n                @strongify(self);\n                [self.mediaEditorManage playVideo];\n            }];\n        }\n    }\n}\n\n\n- (void)viewDidLoad {\n    [super viewDidLoad];\n    \n    self.preview = [[GPUImageView alloc] initWithFrame:self.view.bounds];\n    self.preview.layer.contentsScale = 2.0;\n    self.preview.backgroundColor = [UIColor blackColor];\n    [self.preview setBackgroundColorRed:0.0 green:0.0 blue:0.0 alpha:1.0];\n    [self.view addSubview:self.preview];\n    \n    //添加滤镜filter\n    self.lutFilter = [[GLImageLutFilter alloc]init];\n    self.lutImage = [UIImage imageNamed:@\"heibai.png\"];\n    [self.lutFilter setLutImage:self.lutImage];\n    [self.lutFilter addTarget:self.preview];\n    \n    //\n    [self.mediaEditorManage.movie addTarget:self.lutFilter];\n    \n    @weakify(self);\n    [self.mediaEditorManage playVideo];\n    [self.mediaEditorManage setVideoPlayEndCallBack:^{\n        @strongify(self);\n        @weakify(self);\n        [self.mediaEditorManage videoSeekToTime:kCMTimeZero completionHandler:^(BOOL finished) {\n            @strongify(self);\n            [self.mediaEditorManage playVideo];\n        }];\n    }];\n    \n    //添加背景音乐\n    self.addMusicBtn = [self createBtnTit:@\"添加音乐\" btnFrame:CGRectMake(0, kScreen_H - 100, kScreen_W/4.0, 50) action:@selector(addMusicAction)];\n    //切换滤镜\n    self.changeLutBtn = [self createBtnTit:@\"切换滤镜\" btnFrame:CGRectMake((kScreen_W - kScreen_W/4.0)/2.0, kScreen_H - 100, kScreen_W/4.0, 50) action:@selector(changLutAction)];\n    //保存视频\n    self.saveBtn = [self createBtnTit:@\"保   存\" btnFrame:CGRectMake(kScreen_W - kScreen_W/4.0, kScreen_H - 100, kScreen_W/4.0, 50) action:@selector(saveAction)];\n}\n\n- (DDMediaEditorManage *)mediaEditorManage{\n    \n    if (!_mediaEditorManage) {\n        NSURL *mediaUrl = [[NSBundle mainBundle] URLForResource:@\"测试视频\" withExtension:@\"mp4\"];\n        _mediaEditorManage = [[DDMediaEditorManage alloc]initWithUrl:mediaUrl];\n    }\n    return _mediaEditorManage;\n}\n\n- (UIButton *)createBtnTit:(NSString *)btnTit btnFrame:(CGRect)btnFrame action:(SEL)action{\n    \n    UIButton *targetBtn = [UIButton buttonWithType:UIButtonTypeCustom];\n    [targetBtn setTitle:btnTit forState:UIControlStateNormal];\n    targetBtn.frame = btnFrame;\n    [targetBtn addTarget:self action:action forControlEvents:UIControlEventTouchUpInside];\n    [self.view addSubview:targetBtn];\n    return targetBtn;\n}\n\n\n- (void)addMusicAction{\n    \n        if (!self.musicFileUrl) {\n            NSURL *mediaUrl = [[NSBundle mainBundle] URLForResource:@\"6666\" withExtension:@\"mp3\"];\n            self.musicFileUrl = mediaUrl;\n            [self.addMusicBtn setTitle:@\"移除音乐\" forState:UIControlStateNormal];\n            [self.mediaEditorManage addAudioPath:self.musicFileUrl];\n            [self.mediaEditorManage adjustVolumeForMusic:0.9];\n            [self.mediaEditorManage adjustVolumeForVideo:0.1];\n            @weakify(self);\n            [self.mediaEditorManage videoSeekToTime:kCMTimeZero completionHandler:^(BOOL finished) {\n                @strongify(self);\n                [self.mediaEditorManage playVideo];\n            }];\n            \n        }else{\n            \n            [self.addMusicBtn setTitle:@\"添加音乐\" forState:UIControlStateNormal];\n            self.musicFileUrl = nil;\n            [self.mediaEditorManage adjustVolumeForVideo:1.0];\n            [self.mediaEditorManage removeMusic];\n        }\n}\n\n\n\n- (void)changLutAction{\n    \n    NSArray *lutArray = @[@\"gaoya\",@\"heibai\",@\"jingdu\",@\"meishi\",@\"xiatian\"];\n    NSInteger index = arc4random()%(lutArray.count);\n    self.lutImage = [UIImage imageNamed:[NSString stringWithFormat:@\"%@.png\",lutArray[index]]];\n    [self.lutFilter setLutImage:self.lutImage];\n}\n\n- (void)saveAction{\n    \n    [self.mediaEditorManage pauseVideo];\n    [MBProgressHUD showHUDAddedTo:self.preview animated:YES];\n    \n    NSURL *mediaUrl = [[NSBundle mainBundle] URLForResource:@\"测试视频\" withExtension:@\"mp4\"];\n    self.assetRenderManage = [[DDAVAssetRenderManage alloc]initWithVideoFileUrl:mediaUrl];\n    self.assetRenderManage.delegate = self;\n    \n    __weak typeof(self) weakSelf = self;\n    [self.assetRenderManage finishProcessingWithCompletionHandler:^{\n        NSLog(@\"渲染完成\");\n        [weakSelf finshRecord];\n    }];\n    \n    self.assetRenderManage.videoVolume = 1.0;\n    self.assetRenderManage.isAddVideoVoiceBool = YES;\n    \n    if (self.musicFileUrl) {\n        self.assetRenderManage.videoVolume = 0.3;\n        [self.assetRenderManage mixMusicFilePath:self.musicFileUrl startTime:kCMTimeZero musicVolume:0.7];\n    }\n    \n    self.imageMovie = [[GLImageMovie alloc]init];\n    self.imageMovie.runBenchmark = YES;\n    \n    NSString *pathToMovie = [NSHomeDirectory() stringByAppendingPathComponent:@\"Documents/videoFromSaveManager.mov\"];\n    unlink([pathToMovie UTF8String]);\n    NSURL *movieURL = [NSURL fileURLWithPath:pathToMovie];\n    self.outputFileUrl = movieURL;\n    self.movieWriter = [[GPUImageMovieWriterFix alloc] initWithMovieURL:movieURL size:CGSizeMake(720, 1280)];\n    \n    self.movieWriter.encodingLiveVideo = YES;\n    self.movieWriter.assetWriter.movieFragmentInterval = kCMTimeInvalid;\n    self.movieWriter.hasAudioTrack = YES;\n    \n    GLImageLutFilter *lutFilter = [[GLImageLutFilter alloc]init];\n    [lutFilter setLutImage:self.lutImage];\n    \n    [self.imageMovie addTarget:self.lutFilter];\n    [self.lutFilter addTarget:self.movieWriter];\n    \n    [self.movieWriter startRecording];\n    [self.assetRenderManage startProcessing];\n\n}\n\n- (void)finshRecord{\n    [self.imageMovie endProcessing];\n    [self.movieWriter endProcessing];\n    \n    [MBProgressHUD hideHUDForView:self.preview animated:YES];\n    @weakify(self);\n    [self.movieWriter finishRecordingWithCompletionHandler:^{\n        dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t)(0.1 * NSEC_PER_SEC)), dispatch_get_main_queue(), ^{\n            NSLog(@\"渲染完成----\");\n            @strongify(self);\n            MovieViewController *vc = [[MovieViewController alloc]initWithMediaFileUrl:self.outputFileUrl];\n            [self.navigationController pushViewController:vc animated:YES];\n        });\n    }];\n}\n\n- (void)outputProcessAudioFrameSampleBuffer:(CMSampleBufferRef)sampleBufferRef{\n    [self.movieWriter processAudioBuffer:sampleBufferRef];\n}\n\n- (void)outputProcessMovieFrameSampleBuffer:(CMSampleBufferRef)sampleBufferRef{\n    [self.imageMovie processMovieFrameSampleBuffer:sampleBufferRef];\n}\n\n\n\n@end\n"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo/GLImageMovie/MovieViewController.h",
    "content": "//\n//  MovieViewController.h\n//  GPURenderKitDemo\n//\n//  Created by 刘海东 on 2019/4/15.\n//  Copyright © 2019 刘海东. All rights reserved.\n//\n\n#import \"BaseViewController.h\"\n\nNS_ASSUME_NONNULL_BEGIN\n\n@interface MovieViewController : BaseViewController\n- (instancetype)initWithMediaFileUrl:(NSURL *)mediaFileUrl;\n\n@end\n\nNS_ASSUME_NONNULL_END\n"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo/GLImageMovie/MovieViewController.m",
    "content": "//\n//  MovieViewController.m\n//  GPURenderKitDemo\n//\n//  Created by 刘海东 on 2019/4/15.\n//  Copyright © 2019 刘海东. All rights reserved.\n//\n\n#import \"MovieViewController.h\"\n#import \"DDMediaEditorManage.h\"\n@interface MovieViewController ()\n@property (nonatomic, strong) GPUImageView *preview;\n@property (nonatomic, strong) DDMediaEditorManage *mediaEditorManage;\n@property (nonatomic, strong) NSURL *mediaFileUrl;\n@end\n\n@implementation MovieViewController\n\n- (instancetype)initWithMediaFileUrl:(NSURL *)mediaFileUrl\n{\n    self = [super init];\n    if (self) {\n        \n        self.mediaFileUrl = mediaFileUrl;\n    }\n    return self;\n}\n\n\n- (void)viewDidLoad {\n    [super viewDidLoad];\n    \n    self.preview = [[GPUImageView alloc] initWithFrame:self.view.bounds];\n    self.preview.layer.contentsScale = 2.0;\n    self.preview.backgroundColor = [UIColor blackColor];\n    [self.preview setBackgroundColorRed:0.0 green:0.0 blue:0.0 alpha:1.0];\n    [self.view addSubview:self.preview];\n    \n    [self.mediaEditorManage.movie addTarget:self.preview];\n    \n    @weakify(self);\n    [self.mediaEditorManage playVideo];\n    [self.mediaEditorManage setVideoPlayEndCallBack:^{\n        @strongify(self);\n        @weakify(self);\n        [self.mediaEditorManage videoSeekToTime:kCMTimeZero completionHandler:^(BOOL finished) {\n            @strongify(self);\n            [self.mediaEditorManage playVideo];\n        }];\n    }];\n    \n    \n}\n\n- (DDMediaEditorManage *)mediaEditorManage{\n    \n    if (!_mediaEditorManage) {\n        _mediaEditorManage = [[DDMediaEditorManage alloc]initWithUrl:self.mediaFileUrl];\n    }\n    return _mediaEditorManage;\n}\n\n\n@end\n"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo/GPURenderMacroHeader.h",
    "content": "//\n//  GPURenderMacroHeader.h\n//  DDOpenGLESExample\n//\n//  Created by 刘海东 on 2018/2/5.\n//  Copyright © 2018年 刘海东. All rights reserved.\n//\n\n#ifndef GPURenderMacroHeader_h\n#define GPURenderMacroHeader_h\n\n#import \"GLImageFilterEnumType.h\"\n\n#define WEAKSELF __weak __typeof(&*self)weakSelf = self;\n\n/** 屏幕size */\n#define kScreen_W [UIScreen mainScreen].bounds.size.width\n#define kScreen_H [UIScreen mainScreen].bounds.size.height\n/**为了兼容写的宽高**/\n#define SCREEN_W [UIScreen mainScreen].bounds.size.width\n#define SCREEN_H [UIScreen mainScreen].bounds.size.height\n\n\n\n#define SafeAreaStatusTopHeight (iPhoneX ? 32 : 0)\n#define  SafeTopMargin  (IS_IPHONE_X ? 44.f : 0.f)\n#define  SafeBottomMargin         (IS_IPHONE_X ? 34.f : 0.f)\n#define  SafeNavigationBarHeight  (IS_IPHONE_X ? 88.f : 64.f)\n#define IS_IPHONE_X (kScreen_W == 375.f && kScreen_H == 812.f ? YES : NO)\n\n// 是否为iPhoneX\n#define iPhoneX ([UIScreen mainScreen].bounds.size.height == 812)\n\n/** 颜色 */\n\n#define kBlackColor_Alpha_05 [UIColor colorWithRed:0 green:0 blue:0 alpha:0.5]\n#define RGB(r,g,b) [UIColor colorWithRed:r / 255.0 green:g / 255.0 blue:b / 255.0 alpha:1]\n#define RGBA(r,g,b,a) [UIColor colorWithRed:r / 255.0 green:g / 255.0 blue:b / 255.0 alpha:a]\n\n#define kStingValue(x) [NSString stringWithFormat:@\"%@\",x]\n\n\n//统一配置\n#define APPConfig(AppconfigKey) ([[NSBundle mainBundle] localizedStringForKey:AppconfigKey value:nil table:@\"APPConfig\"])\n\n#define kColorHexInt(value) [UIColor colorWithHex:value]\n\n#define kColorHexString(value) [UIColor colorWithHexString:value]\n\n\n/** 通用代理判断  前提是代理的属性名字是要  delegate*/\n#define kDelegateBoolForSelector(selectorName) (self.delegate && [self.delegate respondsToSelector:@selector(selectorName)])\n#define kWeakeSelf __weak typeof(self) weakSelf = self;\n#define kStrongSelf __strong typeof(weakSelf) strongSelf = weakSelf;\n\n\n/** 美化教程 */\n#define ATBeautyWebDetailPageURLType(type) [NSString stringWithFormat:@\"https://hdpage.wecut.com/h5_artist_manual/h5_artist_manual.html?lan=%@&href=%@\",[NSString getAtLanString],type]\n\n/** 函数执行时间 */\n#define kFuncStartTime CFAbsoluteTimeGetCurrent();\n#define kFuncEndTime(timeValue) ((CFAbsoluteTimeGetCurrent() - timeValue)*1000.0)\n\n\n\n#define kATRiseMenuView_h 168\n\n\n\n/** weak self and storng self**/\n#ifndef    weakify\n#if __has_feature(objc_arc)\n\n#define weakify( x ) \\\n_Pragma(\"clang diagnostic push\") \\\n_Pragma(\"clang diagnostic ignored \\\"-Wshadow\\\"\") \\\nautoreleasepool{} __weak __typeof__(x) __weak_##x##__ = x; \\\n_Pragma(\"clang diagnostic pop\")\n\n#else\n\n#define weakify( x ) \\\n_Pragma(\"clang diagnostic push\") \\\n_Pragma(\"clang diagnostic ignored \\\"-Wshadow\\\"\") \\\nautoreleasepool{} __block __typeof__(x) __block_##x##__ = x; \\\n_Pragma(\"clang diagnostic pop\")\n\n#endif\n#endif\n\n#ifndef    strongify\n#if __has_feature(objc_arc)\n\n#define strongify( x ) \\\n_Pragma(\"clang diagnostic push\") \\\n_Pragma(\"clang diagnostic ignored \\\"-Wshadow\\\"\") \\\ntry{} @finally{} __typeof__(x) x = __weak_##x##__; \\\n_Pragma(\"clang diagnostic pop\")\n\n#else\n\n#define strongify( x ) \\\n_Pragma(\"clang diagnostic push\") \\\n_Pragma(\"clang diagnostic ignored \\\"-Wshadow\\\"\") \\\ntry{} @finally{} __typeof__(x) x = __block_##x##__; \\\n_Pragma(\"clang diagnostic pop\")\n\n#endif\n#endif\n/* weak self and storng self **/\n\n\n\n\n#endif /* GPURenderMacroHeader_h */\n"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo/Info.plist",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<!DOCTYPE plist PUBLIC \"-//Apple//DTD PLIST 1.0//EN\" \"http://www.apple.com/DTDs/PropertyList-1.0.dtd\">\n<plist version=\"1.0\">\n<dict>\n\t<key>CFBundleDevelopmentRegion</key>\n\t<string>$(DEVELOPMENT_LANGUAGE)</string>\n\t<key>CFBundleDisplayName</key>\n\t<string>GPURenderKit</string>\n\t<key>CFBundleExecutable</key>\n\t<string>$(EXECUTABLE_NAME)</string>\n\t<key>CFBundleIdentifier</key>\n\t<string>$(PRODUCT_BUNDLE_IDENTIFIER)</string>\n\t<key>CFBundleInfoDictionaryVersion</key>\n\t<string>6.0</string>\n\t<key>CFBundleName</key>\n\t<string>$(PRODUCT_NAME)</string>\n\t<key>CFBundlePackageType</key>\n\t<string>APPL</string>\n\t<key>CFBundleShortVersionString</key>\n\t<string>1.0</string>\n\t<key>CFBundleVersion</key>\n\t<string>1</string>\n\t<key>LSRequiresIPhoneOS</key>\n\t<true/>\n\t<key>NSCameraUsageDescription</key>\n\t<string>我们需要您的相机来拍照、录制视频等功能</string>\n\t<key>NSMicrophoneUsageDescription</key>\n\t<string>我们需要您的麦克风来录制视频</string>\n\t<key>NSPhotoLibraryAddUsageDescription</key>\n\t<string></string>\n\t<key>NSPhotoLibraryUsageDescription</key>\n\t<string>我们需要您的相册给好友分享照片、视频等功能</string>\n\t<key>UIBackgroundModes</key>\n\t<array>\n\t\t<string>audio</string>\n\t</array>\n\t<key>UIFileSharingEnabled</key>\n\t<true/>\n\t<key>UILaunchStoryboardName</key>\n\t<string>LaunchScreen</string>\n\t<key>UIRequiredDeviceCapabilities</key>\n\t<array>\n\t\t<string>armv7</string>\n\t</array>\n\t<key>UISupportedInterfaceOrientations</key>\n\t<array>\n\t\t<string>UIInterfaceOrientationPortrait</string>\n\t</array>\n\t<key>UISupportedInterfaceOrientations~ipad</key>\n\t<array>\n\t\t<string>UIInterfaceOrientationPortrait</string>\n\t\t<string>UIInterfaceOrientationPortraitUpsideDown</string>\n\t\t<string>UIInterfaceOrientationLandscapeLeft</string>\n\t\t<string>UIInterfaceOrientationLandscapeRight</string>\n\t</array>\n</dict>\n</plist>\n"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo/MainViewController.h",
    "content": "//\n//  MainViewController.h\n//  DDFaceKitDemo\n//\n//  Created by 刘海东 on 2018/9/12.\n//  Copyright © 2018年 刘海东. All rights reserved.\n//\n\n#import <UIKit/UIKit.h>\n@interface MainViewController : UIViewController\n\n@end\n"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo/MainViewController.m",
    "content": "//\n//  MainViewController.m\n//  DDFaceKitDemo\n//\n//  Created by 刘海东 on 2018/9/12.\n//  Copyright © 2018年 刘海东. All rights reserved.\n//\n\n#import \"MainViewController.h\"\n#import \"BaseViewController.h\"\n#import <objc/runtime.h>\ntypedef NS_ENUM(NSInteger,ActionType)\n{\n    /** 抖音效果 */\n    ActionType_DouYinEffect,\n    /** GPUimage+背景音乐一步合成 */\n    ActionType_GLImageMovieUse,\n    /** GLImageFilter */\n    ActionType_FilterList,\n    /** Face */\n    ActionType_Face_Fragment,\n    /** videoEcode */\n    ActionType_VideoEcode,\n    /** 增高瘦身 */\n    ActionType_Shape,\n    /** 未实现 */\n    ActionType_Empty,\n\n\n};\n\n\n@interface MainViewController ()<UITableViewDelegate,UITableViewDataSource>\n\n@property (nonatomic, strong) UITableView *tab;\n@property (nonatomic, strong) NSArray *dataSource;\n@end\n\n@implementation MainViewController\n\n- (void)viewDidLoad {\n    [super viewDidLoad];\n    self.view.backgroundColor = [UIColor whiteColor];\n    [self tab];\n    \n}\n\n- (NSArray *)dataSource\n{\n    if (!_dataSource) {\n        NSDictionary *dic1 = [self actionDic:@\"抖音效果\" type:ActionType_DouYinEffect viewcontrollerName:@\"GLDouYinEffectViewController\"];\n        NSDictionary *dic2 = [self actionDic:@\"GPUiamge+混音保存\" type:ActionType_GLImageMovieUse viewcontrollerName:@\"GLImageMovieUseViewController\"];\n        NSDictionary *dic3 = [self actionDic:@\"FilterShow\" type:ActionType_FilterList viewcontrollerName:@\"GLImageFilterListViewController\"];\n        NSDictionary *dic4 = [self actionDic:@\"美颜,脸，鼻，眼调节（基于FragmentShader调节--已实现）\" type:ActionType_Face_Fragment viewcontrollerName:@\"FaceViewController\"];\n        NSDictionary *dic5 = [self actionDic:@\"美颜,瘦脸,大眼（基于VertexShader调节--未实现）\" type:ActionType_Empty viewcontrollerName:@\"\"];\n        NSDictionary *dic6 = [self actionDic:@\"增高，瘦身效果调节\" type:ActionType_Shape viewcontrollerName:@\"DDShapeViewController\"];\n        \n        _dataSource = @[dic1,dic2,dic3,dic4,dic5,dic6];\n        \n    }\n    return _dataSource;\n}\n\n- (NSDictionary *)actionDic:(NSString *)title type:(ActionType)type viewcontrollerName:(NSString *)vcName\n{\n    return @{@\"title\":title,\n             @\"type\":@(type),\n             @\"vcName\":vcName\n             };\n}\n\n- (UITableView *)tab\n{\n    if (!_tab) {\n        \n        _tab = [[UITableView alloc]initWithFrame:self.view.bounds style:UITableViewStylePlain];\n        _tab.delegate = self;\n        _tab.dataSource = self;\n        [self.view addSubview:_tab];\n        [_tab reloadData];\n    }\n    return _tab;\n}\n\n\n- (NSInteger)tableView:(UITableView *)tableView numberOfRowsInSection:(NSInteger)section\n{\n    return self.dataSource.count;\n}\n\n- (NSInteger)numberOfSectionsInTableView:(UITableView *)tableView\n{\n    return 1;\n}\n\n- (UITableViewCell *)tableView:(UITableView *)tableView cellForRowAtIndexPath:(NSIndexPath *)indexPath\n{\n    \n    static NSString *cellid = @\"cellid\";\n    UITableViewCell *cell = [tableView dequeueReusableCellWithIdentifier:cellid];\n    if (!cell) {\n        cell = [[UITableViewCell alloc]initWithStyle:UITableViewCellStyleDefault reuseIdentifier:cellid];\n        cell.textLabel.font = [UIFont systemFontOfSize:12];\n    }\n    \n    NSDictionary *dic = self.dataSource[indexPath.row];\n    cell.textLabel.text = dic[@\"title\"];\n    return cell;\n}\n\n- (void)tableView:(UITableView *)tableView didSelectRowAtIndexPath:(NSIndexPath *)indexPath\n{\n    NSDictionary *dic = self.dataSource[indexPath.row];\n    \n    ActionType type = [dic[@\"type\"] integerValue];\n    if (type == ActionType_Empty) {\n        NSLog(@\"未实现---\");\n        return;\n    }\n    \n    BaseViewController *vc = (BaseViewController *)[self createClassName:dic[@\"vcName\"]];\n    vc.title = dic[@\"title\"];\n    [self.navigationController pushViewController:vc animated:YES];\n}\n\n- (UIViewController *)createClassName:(NSString *)classString\n{\n    const char *className = [classString cStringUsingEncoding:NSASCIIStringEncoding];\n    Class newClass = objc_getClass(className);\n    if (!newClass) {\n        Class superClass = [NSObject class];\n        newClass = objc_allocateClassPair(superClass, className, 0);\n        objc_registerClassPair(newClass);\n    }\n    id instance = [[newClass alloc] init];\n    return (UIViewController *)instance;\n}\n\n- (void)didReceiveMemoryWarning {\n    [super didReceiveMemoryWarning];\n    // Dispose of any resources that can be recreated.\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo/ShapeChange/DDShapeViewController.h",
    "content": "//\n//  DDStrectchViewController.h\n//  WEOpenGLDemo\n//\n//  Created by 刘海东 on 2018/6/14.\n//  Copyright © 2018年 Leo. All rights reserved.\n//\n\n#import <UIKit/UIKit.h>\n#import \"BaseViewController.h\"\n@interface DDShapeViewController : BaseViewController\n\n@end\n"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo/ShapeChange/DDShapeViewController.m",
    "content": "//\n//  ViewController.m\n//  OpenGLStretchDemo\n//\n//  Created by 刘海东 on 2018/5/30.\n//  Copyright © 2018年 刘海东. All rights reserved.\n//\n\n#define kSW [UIScreen mainScreen].bounds.size.width\n#define kSH [UIScreen mainScreen].bounds.size.height\n#define kSelfSize self.frame.size\n\n\n#import \"DDShapeViewController.h\"\n#import \"DDGLShapeView.h\"\n#import \"DDGLShapeSelView.h\"\n#import \"DDGLShapeView.h\"\n#import <Photos/Photos.h>\n#import \"ATRiseViewController.h\"\n@interface DDShapeViewController ()<UIImagePickerControllerDelegate,UINavigationControllerDelegate>\n\n@property (nonatomic, strong) UIImage *image;\n@property (nonatomic, assign) DDGLShapeViewType type;\n\n@end\n\n@implementation DDShapeViewController\n\n- (void)viewDidLoad {\n    [super viewDidLoad];\n    \n    self.view.backgroundColor = [UIColor whiteColor];\n    \n    NSArray *array = [NSArray arrayWithObjects:@\"长图\",@\"宽图\",@\"方图\",nil];\n    //初始化UISegmentedControl\n    UISegmentedControl *segment = [[UISegmentedControl alloc]initWithItems:array];\n    //设置frame\n    segment.frame = CGRectMake(10, 100, self.view.frame.size.width-20, 30);\n    [segment addTarget:self action:@selector(change:) forControlEvents:UIControlEventValueChanged];\n    segment.selectedSegmentIndex = 0;\n    //添加到视图\n    [self.view addSubview:segment];\n    \n    \n    NSArray *array2 = [NSArray arrayWithObjects:@\"增高\",@\"瘦身\",nil];\n    //初始化UISegmentedControl\n    UISegmentedControl *segment2 = [[UISegmentedControl alloc]initWithItems:array2];\n    //设置frame\n    segment2.frame = CGRectMake(10, 200, self.view.frame.size.width-20, 30);\n    [segment2 addTarget:self action:@selector(changeFunc:) forControlEvents:UIControlEventValueChanged];\n    segment2.selectedSegmentIndex = 0;\n    //添加到视图\n    [self.view addSubview:segment2];\n    \n    \n    UIButton *btn = [UIButton buttonWithType:UIButtonTypeSystem];\n    [btn setTitle:@\"跳转\" forState:UIControlStateNormal];\n    btn.frame = CGRectMake(0, self.view.frame.size.height - 300, self.view.frame.size.width, 50);\n    [btn addTarget:self action:@selector(jumpAction:) forControlEvents:UIControlEventTouchUpInside];\n    [self.view addSubview:btn];\n    \n    \n    \n    \n    \n    self.image = [UIImage imageNamed:@\"长图.JPG\"];\n    self.type = 0;\n    \n    \n}\n\n- (void)change:(UISegmentedControl *)seg\n{\n    NSString* filePath = nil;\n    switch (seg.selectedSegmentIndex) {\n        case 0:\n        {\n            filePath = @\"长图.JPG\";\n        }\n            break;\n        case 1:\n        {\n            filePath = @\"宽图.JPG\";\n        }\n            break;\n        case 2:\n        {\n            filePath = @\"方图600*600.JPG\";\n        }\n            break;\n        case 3:\n        {\n            filePath = @\"4032*3024.JPG\";\n        }\n            break;\n            \n        default:\n            filePath = @\"长图.JPG\";\n            break;\n    }\n    self.image = [UIImage imageNamed:filePath];\n}\n\n- (void)changeFunc:(UISegmentedControl *)seg\n{\n    self.type = seg.selectedSegmentIndex;\n}\n\n- (void)jumpAction:(UIButton *)btn\n{\n    \n    ATRiseViewController *vc = [[ATRiseViewController alloc]init];\n    vc.type = self.type;\n    vc.previewImage = self.image;\n    [self presentViewController:vc animated:YES completion:nil];\n    \n}\n\n\n\n\n- (void)didReceiveMemoryWarning {\n    [super didReceiveMemoryWarning];\n}\n\n\n@end\n\n"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo/ShapeChange/Rise/Controller/ATRiseViewController.h",
    "content": "//\n//  ATRiseViewController.h\n//  Artist\n//\n//  Created by huangjinwen on 2018/6/21.\n//  Copyright © 2018年 wecut. All rights reserved.\n//\n\n#import <UIKit/UIKit.h>\n#import \"DDGLShapingView.h\"\n\n@interface ATRiseViewController : UIViewController\n\n@property (nonatomic, strong) UIImage *previewImage;\n@property (nonatomic, assign) DDGLShapeViewType type;\n\n\n\n\n@end\n"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo/ShapeChange/Rise/Controller/ATRiseViewController.m",
    "content": "//\n//  ATRiseViewController.m\n//  Artist\n//\n//  Created by huangjinwen on 2018/6/21.\n//  Copyright © 2018年 wecut. All rights reserved.\n//\n\n#import \"ATRiseViewController.h\"\n#import \"ATRiseMenuView.h\"\n#import <GPURenderKit/GPURenderKit.h>\n\n@interface ATRiseViewController ()<DDGLShapingViewDelegate>\n@property (nonatomic, strong) DDGLShapingView *glShapingView;\n@property (nonatomic, assign) DDGLNormValueRange rangeValue;\n@property (nonatomic, strong) UISlider *slider;\n@end\n\n@implementation ATRiseViewController\n\n- (void)viewDidLoad {\n    [super viewDidLoad];\n    DDGLNormValueRange range = {0,0};\n    range.max = 0.5;\n    range.min = 0.2;\n    _rangeValue = range;\n    self.view.backgroundColor = [UIColor blackColor];\n    \n    \n    dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t)(1.0 * NSEC_PER_SEC)), dispatch_get_main_queue(), ^{\n        [self configUI];\n    });\n}\n\n\n- (void)configUI\n{\n    UIButton *backBtn = [UIButton buttonWithType:UIButtonTypeCustom];\n    [backBtn setTitle:@\"返回\" forState:UIControlStateNormal];\n    backBtn.frame = CGRectMake(0, 50, 100, 50);\n    [backBtn addTarget:self action:@selector(backAction) forControlEvents:UIControlEventTouchUpInside];\n    [self.view addSubview:backBtn];\n    \n    \n    UISlider *slider = [[UISlider alloc]initWithFrame:CGRectMake(20, kScreen_H - 100, kScreen_W - 40, 50)];\n    [self.view addSubview:slider];\n    slider.minimumValue = 0.0;\n    slider.maximumValue = 1.0;\n    [slider addTarget:self action:@selector(valueChange:) forControlEvents:UIControlEventValueChanged];\n    self.slider = slider;\n    \n    \n    UIButton *saveBtn = [UIButton buttonWithType:UIButtonTypeCustom];\n    [saveBtn setTitle:@\"保存\" forState:UIControlStateNormal];\n    saveBtn.frame = CGRectMake(kScreen_W-100, 50, 100, 50);\n    [saveBtn addTarget:self action:@selector(saveAction) forControlEvents:UIControlEventTouchUpInside];\n    [self.view addSubview:saveBtn];\n\n}\n\n- (void)viewWillAppear:(BOOL)animated\n{\n    [super viewWillAppear:animated];\n}\n\n- (DDGLShapingView *)glShapingView\n{\n    if (!_glShapingView) {\n        float h = kScreen_H - kATRiseMenuView_h - SafeTopMargin - SafeBottomMargin - 50;\n        _glShapingView = [[DDGLShapingView alloc]initWithFrame:CGRectMake(0, SafeTopMargin + 50, kScreen_W, h) type:_type image:self.previewImage];\n        _glShapingView.delegate = self;\n        [self.view addSubview:_glShapingView];\n        [_glShapingView changeRange:self.rangeValue];\n    }\n    return _glShapingView;\n}\n\n- (void)shapingViewSwiping\n{\n    self.slider.value = 0.0;\n    [self.glShapingView changeValue:self.slider.value];\n    \n}\n- (void)shapingViewGetVertexArray:(NSArray *)vertexArray textureCoordinateArray:(NSArray *)textureCoordinateArray changeValue:(float)changeValue type:(DDGLShapeViewType)type\n{\n    \n}\n- (void)shapingViewSwipEndMaxValue:(float)max minValue:(float)min\n{\n    _rangeValue.max = max;\n    _rangeValue.min = min;\n    [_glShapingView changeRange:self.rangeValue];\n    \n}\n\n\n\n- (void)setType:(DDGLShapeViewType)type\n{\n    _type = type;\n}\n\n- (void)setPreviewImage:(UIImage *)previewImage\n{\n    _previewImage = previewImage;\n    [self glShapingView];\n}\n\n- (void)didReceiveMemoryWarning {\n    [super didReceiveMemoryWarning];\n}\n\n- (void)dealloc\n{\n    [[GPUImageContext sharedImageProcessingContext].framebufferCache purgeAllUnassignedFramebuffers];\n    NSLog(@\"ATRiseViewController-- 增高或瘦身 dealloc\");\n}\n\n- (void)backAction\n{\n    [self dismissViewControllerAnimated:YES completion:nil];\n}\n\n- (void)saveAction\n{\n    UIImage *ima = [self.glShapingView getProcessImage];\n    UIImageView *imageView = [[UIImageView alloc]initWithImage:ima];\n    float h = ((kScreen_W-100)/ima.size.width) *ima.size.height;\n    imageView.frame = CGRectMake(50, 200,  kScreen_W-100, h);\n    [self.view addSubview:imageView];\n    \n    dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t)(5.0 * NSEC_PER_SEC)), dispatch_get_main_queue(), ^{\n        [imageView removeFromSuperview];\n    });\n    \n}\n\n\n- (void)valueChange:(UISlider *)slider\n{\n    [self.glShapingView changeValue:slider.value];\n}\n\n\n\n\n@end\n"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo/ShapeChange/Rise/View/ATRiseMenuView.h",
    "content": "//\n//  ATRiseMenuView.h\n//  Artist\n//\n//  Created by 刘海东 on 2018/6/21.\n//  Copyright © 2018年 wecut. All rights reserved.\n//\n\n#import <UIKit/UIKit.h>\n\ntypedef NS_ENUM(NSInteger,ATRiseMenuViewActionType) {\n    \n    /** 关闭 */\n    ATRiseMenuViewActionType_Close = 0,\n    /** course教程 */\n    ATRiseMenuViewActionType_Course,\n    /** 确认 */\n    ATRiseMenuViewActionType_Enter,\n    \n};\n\n@interface ATRiseMenuView : UIView\n\n\n@property (nonatomic, copy) void (^valueDidChangeHandler)(float value);\n@property (nonatomic, copy) void (^touchBeginHandler)(float value);\n@property (nonatomic, copy) void (^touchEndHandler)(float value);\n\n/** 点击事件 */\n@property (nonatomic, copy) void (^clickActionHandler)(ATRiseMenuViewActionType type);\n\n- (void)hideHelpButton:(BOOL)state;\n\n- (float)getValue;\n\n- (void)setValue:(float)value;\n\n- (void)setTitle:(NSString *)tit;\n\n- (void)minmimValue:(float)value;\n- (void)maxmimValue:(float)value;\n\n\n@end\n"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo/ShapeChange/Rise/View/ATRiseMenuView.m",
    "content": "//\n//  ATRiseMenuView.m\n//  Artist\n//\n//  Created by 刘海东 on 2018/6/21.\n//  Copyright © 2018年 wecut. All rights reserved.\n//\n\n#import \"ATRiseMenuView.h\"\n#import \"ATSelectBarView.h\"\n#import \"ATSliderView.h\"\n#define kSliderView_h 26\n#define kSelectBarView_h 48\n\n@interface ATRiseMenuView ()\n@property (nonatomic, strong) ATSelectBarView *selectBarView;\n@property (nonatomic, strong) ATSliderView *sliderView;\n\n@end\n\n@implementation ATRiseMenuView\n\n- (instancetype)initWithFrame:(CGRect)frame\n{\n    self = [super initWithFrame:frame];\n    if (self) {\n        \n        self.backgroundColor = [UIColor blackColor];\n        [self selectBarView];\n        [self sliderView];\n    }\n    return self;\n}\n\n\n- (ATSelectBarView *)selectBarView\n{\n    if (!_selectBarView) {\n        _selectBarView = [[ATSelectBarView alloc]initWithFrame:CGRectMake(0, self.frame.size.height-kSelectBarView_h, kScreen_W, kSelectBarView_h)];\n        _selectBarView.hideHelpButton = NO;\n        kWeakeSelf;\n        _selectBarView.selectBlock = ^(NSInteger index) {\n            kStrongSelf;\n            if (strongSelf.clickActionHandler) {\n                strongSelf.clickActionHandler(index);\n            }\n        };\n        \n        \n        \n        [self addSubview:_selectBarView];\n    }\n    return _selectBarView;\n}\n\n- (ATSliderView *)sliderView\n{\n    if (!_sliderView) {\n        \n        _sliderView = [[ATSliderView alloc]initWithFrame:CGRectMake(20, 47, kScreen_W - 40, kSliderView_h)];\n        [_sliderView configBigFollowView];\n        [self addSubview:_sliderView];\n        kWeakeSelf;\n        \n        _sliderView.valueDidChangeHandler = ^(float value) {\n            kStrongSelf;\n            strongSelf.sliderView.topLabValue = [NSString stringWithFormat:@\"%.0f\",value*100];\n            if (strongSelf.valueDidChangeHandler) {\n                strongSelf.valueDidChangeHandler(value);\n            }\n        };\n        \n        _sliderView.touchBeginHandler = ^(float value) {\n            kStrongSelf;\n            strongSelf.sliderView.topLabValue = [NSString stringWithFormat:@\"%.0f\",value*100];\n            if (strongSelf.touchBeginHandler) {\n                strongSelf.touchBeginHandler(value);\n            }\n        };\n        \n        _sliderView.touchEndHandler = ^(float value) {\n            kStrongSelf;\n            strongSelf.sliderView.topLabValue = [NSString stringWithFormat:@\"%.0f\",value*100];\n            if (strongSelf.touchEndHandler) {\n                strongSelf.touchEndHandler(value);\n            }\n        };\n        \n    }\n    return _sliderView;\n}\n\n- (float)getValue\n{\n    return self.sliderView.value;\n}\n\n\n- (void)setValue:(float)value\n{\n    self.sliderView.value = value;\n    \n}\n\n- (void)setTitle:(NSString *)tit\n{\n    self.selectBarView.title = tit;\n    \n}\n\n- (void)minmimValue:(float)value\n{\n    self.sliderView.minmimValue = value;\n}\n- (void)maxmimValue:(float)value\n{\n    self.sliderView.maxmimValue = value;\n}\n\n- (void)hideHelpButton:(BOOL)state\n{\n    _selectBarView.hideHelpButton = state;\n}\n\n\n\n\n\n@end\n"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo/ShapeChange/Rise/View/ATSelectBarView.h",
    "content": "//\n//  ATSelectBarView.h\n//  Artist\n//\n//  Created by huangjinwen on 2018/6/21.\n//  Copyright © 2018年 wecut. All rights reserved.\n//\n\n#import <UIKit/UIKit.h>\n\n//传0是点了左边button  1是中间button   2是右边button\ntypedef void(^SelectBlock)(NSInteger index);\n\n@interface ATSelectBarView : UIView\n\n@property (nonatomic, copy) NSString *title;\n@property (nonatomic, copy) SelectBlock selectBlock;\n//是否显示中间帮助button\n@property (nonatomic, assign) BOOL hideHelpButton;\n\n@end\n"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo/ShapeChange/Rise/View/ATSelectBarView.m",
    "content": "//\n//  ATSelectBarView.m\n//  Artist\n//\n//  Created by huangjinwen on 2018/6/21.\n//  Copyright © 2018年 wecut. All rights reserved.\n//\n\n#import \"ATSelectBarView.h\"\n#import \"UIView+Xib.h\"\n\n@interface ATSelectBarView ()\n@property (weak, nonatomic) IBOutlet UILabel *titleLabel;\n@property (weak, nonatomic) IBOutlet UIButton *cancelButton;\n@property (weak, nonatomic) IBOutlet UIButton *okButton;\n@property (weak, nonatomic) IBOutlet UIButton *helpButton;\n@end\n\n\n@implementation ATSelectBarView\n\n\n- (void)awakeFromNib\n{\n    [super awakeFromNib];\n    //加载同名xib并添加到self\n    [self setupSelfNameXibOnSelf];\n}\n\n- (instancetype)initWithFrame:(CGRect)frame\n{\n    self = [super initWithFrame:frame];\n    if (self) {\n        UIView *view = [[[NSBundle mainBundle] loadNibNamed:NSStringFromClass(self.class) owner:self options:nil] firstObject];\n        self.frame = frame;\n        view.frame = CGRectMake(0, 0, CGRectGetWidth(frame), CGRectGetHeight(frame));\n        [self addSubview:view];\n        \n    }\n    return self;\n}\n\n\n#pragma mark - setter\n\n-(void)setTitle:(NSString *)title {\n    _title = title;\n    self.titleLabel.text = title;\n}\n\n\n#pragma mark - button event\n\n- (IBAction)buttonAction:(UIButton *)sender {\n    if (self.selectBlock) {\n        if (self.cancelButton == sender) {\n            self.selectBlock(0);\n        } else if (self.okButton == sender) {\n            self.selectBlock(2);\n        } else if (self.helpButton == sender) {\n            self.selectBlock(1);\n        }\n    }\n}\n\n-(void)setHideHelpButton:(BOOL)hideHelpButton {\n    _hideHelpButton = hideHelpButton;\n    self.helpButton.hidden = hideHelpButton;\n}\n\n\n\n\n\n\n@end\n"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo/ShapeChange/Rise/View/ATSelectBarView.xib",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<document type=\"com.apple.InterfaceBuilder3.CocoaTouch.XIB\" version=\"3.0\" toolsVersion=\"14109\" targetRuntime=\"iOS.CocoaTouch\" propertyAccessControl=\"none\" useAutolayout=\"YES\" useTraitCollections=\"YES\" useSafeAreas=\"YES\" colorMatched=\"YES\">\n    <device id=\"retina4_7\" orientation=\"portrait\">\n        <adaptation id=\"fullscreen\"/>\n    </device>\n    <dependencies>\n        <deployment identifier=\"iOS\"/>\n        <plugIn identifier=\"com.apple.InterfaceBuilder.IBCocoaTouchPlugin\" version=\"14088\"/>\n        <capability name=\"Safe area layout guides\" minToolsVersion=\"9.0\"/>\n        <capability name=\"documents saved in the Xcode 8 format\" minToolsVersion=\"8.0\"/>\n    </dependencies>\n    <objects>\n        <placeholder placeholderIdentifier=\"IBFilesOwner\" id=\"-1\" userLabel=\"File's Owner\" customClass=\"ATSelectBarView\">\n            <connections>\n                <outlet property=\"cancelButton\" destination=\"Yei-yK-eoT\" id=\"pBo-S1-dJO\"/>\n                <outlet property=\"helpButton\" destination=\"CId-mP-Phx\" id=\"8JY-4U-pg0\"/>\n                <outlet property=\"okButton\" destination=\"uy1-X2-9hX\" id=\"GLW-JN-vqJ\"/>\n                <outlet property=\"titleLabel\" destination=\"lwd-4c-6b4\" id=\"hQO-rd-lKY\"/>\n            </connections>\n        </placeholder>\n        <placeholder placeholderIdentifier=\"IBFirstResponder\" id=\"-2\" customClass=\"UIResponder\"/>\n        <view contentMode=\"scaleToFill\" id=\"iN0-l3-epB\">\n            <rect key=\"frame\" x=\"0.0\" y=\"0.0\" width=\"375\" height=\"667\"/>\n            <autoresizingMask key=\"autoresizingMask\" widthSizable=\"YES\" heightSizable=\"YES\"/>\n            <subviews>\n                <view contentMode=\"scaleToFill\" translatesAutoresizingMaskIntoConstraints=\"NO\" id=\"Ab5-eL-TDE\">\n                    <rect key=\"frame\" x=\"0.0\" y=\"309.5\" width=\"375\" height=\"48\"/>\n                    <subviews>\n                        <stackView opaque=\"NO\" contentMode=\"scaleToFill\" distribution=\"equalSpacing\" translatesAutoresizingMaskIntoConstraints=\"NO\" id=\"Xkl-Zl-X6s\">\n                            <rect key=\"frame\" x=\"14\" y=\"0.0\" width=\"347\" height=\"48\"/>\n                            <subviews>\n                                <button opaque=\"NO\" contentMode=\"scaleToFill\" contentHorizontalAlignment=\"center\" contentVerticalAlignment=\"center\" lineBreakMode=\"middleTruncation\" translatesAutoresizingMaskIntoConstraints=\"NO\" id=\"Yei-yK-eoT\">\n                                    <rect key=\"frame\" x=\"0.0\" y=\"0.0\" width=\"28\" height=\"48\"/>\n                                    <constraints>\n                                        <constraint firstAttribute=\"width\" constant=\"28\" id=\"c9y-ye-RxI\"/>\n                                        <constraint firstAttribute=\"height\" constant=\"48\" id=\"ucS-Xp-9WC\"/>\n                                    </constraints>\n                                    <state key=\"normal\" image=\"edit_cancel\"/>\n                                    <connections>\n                                        <action selector=\"buttonAction:\" destination=\"-1\" eventType=\"touchUpInside\" id=\"wcE-QZ-EX1\"/>\n                                    </connections>\n                                </button>\n                                <view contentMode=\"scaleToFill\" translatesAutoresizingMaskIntoConstraints=\"NO\" id=\"uN6-1r-2Qk\">\n                                    <rect key=\"frame\" x=\"73.5\" y=\"0.0\" width=\"200\" height=\"48\"/>\n                                    <subviews>\n                                        <label opaque=\"NO\" userInteractionEnabled=\"NO\" contentMode=\"left\" horizontalHuggingPriority=\"251\" verticalHuggingPriority=\"251\" text=\"请输入title\" textAlignment=\"center\" lineBreakMode=\"tailTruncation\" baselineAdjustment=\"alignBaselines\" adjustsFontSizeToFit=\"NO\" translatesAutoresizingMaskIntoConstraints=\"NO\" id=\"lwd-4c-6b4\">\n                                            <rect key=\"frame\" x=\"63.5\" y=\"15\" width=\"73\" height=\"18\"/>\n                                            <fontDescription key=\"fontDescription\" type=\"system\" weight=\"medium\" pointSize=\"15\"/>\n                                            <color key=\"textColor\" red=\"1\" green=\"1\" blue=\"1\" alpha=\"1\" colorSpace=\"custom\" customColorSpace=\"sRGB\"/>\n                                            <nil key=\"highlightedColor\"/>\n                                            <userDefinedRuntimeAttributes>\n                                                <userDefinedRuntimeAttribute type=\"boolean\" keyPath=\"hiddenText\" value=\"YES\"/>\n                                            </userDefinedRuntimeAttributes>\n                                        </label>\n                                        <button hidden=\"YES\" opaque=\"NO\" contentMode=\"scaleToFill\" contentHorizontalAlignment=\"center\" contentVerticalAlignment=\"center\" lineBreakMode=\"middleTruncation\" translatesAutoresizingMaskIntoConstraints=\"NO\" id=\"CId-mP-Phx\">\n                                            <rect key=\"frame\" x=\"142.5\" y=\"9.5\" width=\"28\" height=\"28\"/>\n                                            <constraints>\n                                                <constraint firstAttribute=\"height\" constant=\"28\" id=\"CF2-zJ-JdQ\"/>\n                                                <constraint firstAttribute=\"width\" constant=\"28\" id=\"Ozw-MW-rdm\"/>\n                                            </constraints>\n                                            <state key=\"normal\" title=\"Button\" image=\"edit_beautify_manual\"/>\n                                            <connections>\n                                                <action selector=\"buttonAction:\" destination=\"-1\" eventType=\"touchUpInside\" id=\"M7c-nT-dDg\"/>\n                                            </connections>\n                                        </button>\n                                    </subviews>\n                                    <color key=\"backgroundColor\" white=\"0.0\" alpha=\"1\" colorSpace=\"custom\" customColorSpace=\"genericGamma22GrayColorSpace\"/>\n                                    <constraints>\n                                        <constraint firstItem=\"CId-mP-Phx\" firstAttribute=\"centerY\" secondItem=\"uN6-1r-2Qk\" secondAttribute=\"centerY\" id=\"2HS-Hz-yEY\"/>\n                                        <constraint firstItem=\"lwd-4c-6b4\" firstAttribute=\"centerY\" secondItem=\"uN6-1r-2Qk\" secondAttribute=\"centerY\" id=\"IYo-m1-Ue0\"/>\n                                        <constraint firstAttribute=\"width\" constant=\"200\" id=\"Ofg-JP-TcS\"/>\n                                        <constraint firstItem=\"lwd-4c-6b4\" firstAttribute=\"centerX\" secondItem=\"uN6-1r-2Qk\" secondAttribute=\"centerX\" id=\"Rul-JI-oIB\"/>\n                                        <constraint firstItem=\"CId-mP-Phx\" firstAttribute=\"leading\" secondItem=\"lwd-4c-6b4\" secondAttribute=\"trailing\" constant=\"6\" id=\"c5Y-r2-15Z\"/>\n                                    </constraints>\n                                </view>\n                                <button opaque=\"NO\" contentMode=\"scaleToFill\" contentHorizontalAlignment=\"center\" contentVerticalAlignment=\"center\" lineBreakMode=\"middleTruncation\" translatesAutoresizingMaskIntoConstraints=\"NO\" id=\"uy1-X2-9hX\">\n                                    <rect key=\"frame\" x=\"319\" y=\"0.0\" width=\"28\" height=\"48\"/>\n                                    <constraints>\n                                        <constraint firstAttribute=\"height\" constant=\"48\" id=\"EjT-Nc-caa\"/>\n                                        <constraint firstAttribute=\"width\" constant=\"28\" id=\"m6c-KY-A21\"/>\n                                    </constraints>\n                                    <state key=\"normal\" image=\"edit_achieve\"/>\n                                    <connections>\n                                        <action selector=\"buttonAction:\" destination=\"-1\" eventType=\"touchUpInside\" id=\"zTo-Qw-Dor\"/>\n                                    </connections>\n                                </button>\n                            </subviews>\n                            <constraints>\n                                <constraint firstItem=\"uN6-1r-2Qk\" firstAttribute=\"centerX\" secondItem=\"Xkl-Zl-X6s\" secondAttribute=\"centerX\" id=\"Esl-QP-lPk\"/>\n                                <constraint firstAttribute=\"height\" constant=\"48\" id=\"lhR-hA-1Zb\"/>\n                            </constraints>\n                        </stackView>\n                    </subviews>\n                    <color key=\"backgroundColor\" white=\"0.0\" alpha=\"1\" colorSpace=\"custom\" customColorSpace=\"genericGamma22GrayColorSpace\"/>\n                    <constraints>\n                        <constraint firstAttribute=\"height\" constant=\"48\" id=\"2aY-zH-zLb\"/>\n                        <constraint firstItem=\"Xkl-Zl-X6s\" firstAttribute=\"leading\" secondItem=\"Ab5-eL-TDE\" secondAttribute=\"leading\" constant=\"14\" id=\"AsY-J6-Q9m\"/>\n                        <constraint firstAttribute=\"bottom\" secondItem=\"Xkl-Zl-X6s\" secondAttribute=\"bottom\" id=\"GW9-MQ-ye2\"/>\n                        <constraint firstAttribute=\"trailing\" secondItem=\"Xkl-Zl-X6s\" secondAttribute=\"trailing\" constant=\"14\" id=\"ZFO-Wu-rcf\"/>\n                    </constraints>\n                </view>\n            </subviews>\n            <color key=\"backgroundColor\" red=\"1\" green=\"1\" blue=\"1\" alpha=\"1\" colorSpace=\"custom\" customColorSpace=\"sRGB\"/>\n            <constraints>\n                <constraint firstItem=\"Ab5-eL-TDE\" firstAttribute=\"centerY\" secondItem=\"iN0-l3-epB\" secondAttribute=\"centerY\" id=\"2e9-1u-C8H\"/>\n                <constraint firstItem=\"Ab5-eL-TDE\" firstAttribute=\"leading\" secondItem=\"vUN-kp-3ea\" secondAttribute=\"leading\" id=\"Y1c-0w-zOf\"/>\n                <constraint firstItem=\"vUN-kp-3ea\" firstAttribute=\"trailing\" secondItem=\"Ab5-eL-TDE\" secondAttribute=\"trailing\" id=\"xfT-va-Bat\"/>\n            </constraints>\n            <viewLayoutGuide key=\"safeArea\" id=\"vUN-kp-3ea\"/>\n        </view>\n    </objects>\n    <resources>\n        <image name=\"edit_achieve\" width=\"28\" height=\"28\"/>\n        <image name=\"edit_beautify_manual\" width=\"28\" height=\"28\"/>\n        <image name=\"edit_cancel\" width=\"28\" height=\"28\"/>\n    </resources>\n</document>\n"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo/ShapeChange/Rise/View/DDGLShapeView/DDGLSelectView.h",
    "content": "//\n//  DDGLSelectView.h\n//  Artist\n//\n//  Created by 刘海东 on 2018/7/11.\n//  Copyright © 2018年 wecut. All rights reserved.\n//\n\n#import <UIKit/UIKit.h>\n#import \"DDGLShapeView.h\"\n@interface DDGLSelectView : UIView\n\n\n@property (nonatomic, assign) DDGLShapeViewType type;\n\n\n@end\n"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo/ShapeChange/Rise/View/DDGLShapeView/DDGLSelectView.m",
    "content": "//\n//  DDGLSelectView.m\n//  Artist\n//\n//  Created by 刘海东 on 2018/7/11.\n//  Copyright © 2018年 wecut. All rights reserved.\n//\n\n#import \"DDGLSelectView.h\"\n\n\n@interface DDGLSelectView ()\n\n@property (nonatomic, strong) UILabel *contentLab;\n\n@end\n\n\n@implementation DDGLSelectView\n\n- (void)layoutSubviews\n{\n    [super layoutSubviews];\n    \n    if (_type == DDGLShapeViewType_Vertical)\n    {\n        float w = self.frame.size.width;\n        float h = 30;\n        float y = (self.frame.size.height - h)/2.0;\n        if (self.frame.size.height<=h+10)\n        {\n            self.contentLab.hidden = YES;\n        }\n        else\n        {\n            self.contentLab.hidden = NO;\n        }\n        self.contentLab.frame = CGRectMake(0, y, w, h);\n    }\n    else\n    {\n        float w = 150;\n        float h = 60;\n        float y = (self.frame.size.height - h)/2.0;\n        float x = (self.frame.size.width - w)/2.0;\n        if (self.frame.size.width<=w+10)\n        {\n            self.contentLab.hidden = YES;\n        }\n        else\n        {\n            self.contentLab.hidden = NO;\n        }\n        self.contentLab.frame = CGRectMake(x, y, w, h);\n    }\n}\n\n\n- (instancetype)initWithFrame:(CGRect)frame\n{\n    self = [super initWithFrame:frame];\n    if (self) {\n        \n        \n        \n    }\n    return self;\n}\n\n\n- (UILabel *)contentLab\n{\n    if (!_contentLab)\n    {\n        _contentLab = [[UILabel alloc]init];\n        _contentLab.font = [UIFont fontWithName:@\"SFUIText-Semibold\" size:14];\n        _contentLab.textColor = [UIColor whiteColor];\n        _contentLab.textAlignment = NSTextAlignmentCenter;\n        [self addSubview:_contentLab];\n    }\n    return _contentLab;\n}\n\n\n- (void)setType:(DDGLShapeViewType)type\n{\n    _type = type;\n    if (_type == DDGLShapeViewType_Vertical)\n    {\n        self.contentLab.text = @\"Drag the line to select the area\";\n    }\n    else\n    {\n        self.contentLab.text = @\"Drag the line to select the area\";\n        self.contentLab.numberOfLines = 2;\n    }\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo/ShapeChange/Rise/View/DDGLShapeView/DDGLShapeControlView.h",
    "content": "//\n//  DDGLShapeControlView.h\n//  OpenGLStretchDemo\n//\n//  Created by 刘海东 on 2018/6/6.\n//  Copyright © 2018年 刘海东. All rights reserved.\n//\n\n#import <UIKit/UIKit.h>\n#import \"DDGLShapeView.h\"\n@interface DDGLShapeControlView : UIView\n\n- (instancetype)initWithFrame:(CGRect)frame type:(DDGLShapeViewType)type;\n\n\n@end\n"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo/ShapeChange/Rise/View/DDGLShapeView/DDGLShapeControlView.m",
    "content": "//\n//  DDGLShapeControlView.m\n//  OpenGLStretchDemo\n//\n//  Created by 刘海东 on 2018/6/6.\n//  Copyright © 2018年 刘海东. All rights reserved.\n//\n\n#import \"DDGLShapeControlView.h\"\n\n@interface DDGLShapeControlView ()\n\n\n@property (nonatomic, strong) UIView *lineView;\n\n@property (nonatomic, strong) UIImageView *controlView1;\n\n@property (nonatomic, strong) UIImageView *controlView2;\n\n@property (nonatomic, assign) DDGLShapeViewType type;\n\n\n\n@end\n\n@implementation DDGLShapeControlView\n\n\n- (instancetype)initWithFrame:(CGRect)frame type:(DDGLShapeViewType)type\n{\n    self = [super initWithFrame:frame];\n    if (self) {\n        \n        _type = type;\n        \n        [self lineView];\n        [self controlView1];\n        [self controlView2];\n        \n        \n        if (_type == DDGLShapeViewType_Vertical)\n        {\n            self.controlView1.hidden = YES;\n        }\n        else\n        {\n            self.controlView2.hidden = YES;\n\n        }\n\n        \n    }\n    return self;\n}\n\n\n- (UIView *)lineView\n{\n    if (!_lineView)\n    {\n        CGRect rect;\n        if (_type == DDGLShapeViewType_Vertical) {\n            rect = CGRectMake(0, self.frame.size.height/2.0, self.frame.size.width, 1.0);\n        }else\n        {\n            rect = CGRectMake(self.frame.size.width/2.0, 0, 1.0, self.frame.size.height);\n        }\n        _lineView = [[UIView alloc]initWithFrame:rect];\n        _lineView.layer.shadowOffset = CGSizeMake(0, 0);\n        _lineView.layer.shadowOpacity = 0.13;\n        _lineView.layer.shadowRadius = 2;\n        _lineView.layer.shadowColor = [UIColor colorWithRed:0 green:0 blue:0 alpha:1].CGColor;\n        _lineView.backgroundColor = [UIColor whiteColor];\n        [self addSubview:_lineView];\n\n    }\n    return _lineView;\n}\n\n\n- (UIImageView *)controlView1\n{\n    if (!_controlView1) {\n        \n        CGRect rect;\n        UIImage *image;\n        if (_type == DDGLShapeViewType_Vertical)\n        {\n            image = [UIImage imageNamed:@\"edit_beautify_rise_move\"];\n            rect = CGRectMake(0, (self.frame.size.height - image.size.height)/2.0, image.size.width, image.size.height);\n        }\n        else\n        {\n            image = [UIImage imageNamed:@\"edit_beautify_slim_move\"];\n            rect = CGRectMake((self.frame.size.width - image.size.width)/2.0, 0, image.size.width,image.size.height);\n        }\n        _controlView1 = [[UIImageView alloc]initWithFrame:rect];\n        _controlView1.image = image;\n        [self addSubview:_controlView1];\n\n    }\n    return _controlView1;\n}\n\n\n- (UIImageView *)controlView2\n{\n    if (!_controlView2) {\n        CGRect rect;\n        UIImage *image;\n        if (_type == DDGLShapeViewType_Vertical)\n        {\n            image = [UIImage imageNamed:@\"edit_beautify_rise_move\"];\n            rect = CGRectMake(self.frame.size.width - image.size.width, (self.frame.size.height - image.size.height)/2.0, image.size.width,image.size.height);\n        }\n        else\n        {\n            image = [UIImage imageNamed:@\"edit_beautify_slim_move\"];\n            rect = CGRectMake((self.frame.size.width - image.size.width)/2.0, self.frame.size.height-image.size.height, image.size.width,image.size.height);\n        }\n        _controlView2 = [[UIImageView alloc]initWithFrame:rect];\n        _controlView2.image = image;\n        [self addSubview:_controlView2];\n    }\n    return _controlView2;\n}\n\n\n\n- (void)handlePan:(UILongPressGestureRecognizer *)sender\n{\n    \n    \n\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo/ShapeChange/Rise/View/DDGLShapeView/DDGLShapeSelView.h",
    "content": "//\n//  DDGLShapeSelView.h\n//  OpenGLStretchDemo\n//\n//  Created by 刘海东 on 2018/6/6.\n//  Copyright © 2018年 刘海东. All rights reserved.\n//\n\n#import <UIKit/UIKit.h>\n#import \"DDGLShapeView.h\"\n\n\n@protocol DDGLShapeSelViewDelegate <NSObject>\n\n/** 滑动选择区域中 */\n- (void)strectchSelViewSwiping;\n\n/** 滑动结束 */\n- (void)strectchSelViewSwipEndMaxValue:(float)max minValue:(float)min;\n\n@end\n\n@interface DDGLShapeSelView : UIView\n\n\n\n- (instancetype)initWithFrame:(CGRect)frame type:(DDGLShapeViewType)type subFrame:(CGRect)subFrame;\n@property (nonatomic, assign) DDGLNormValueRange valueRange;\n\n@property (nonatomic, weak) id<DDGLShapeSelViewDelegate> delegate;\n\n\n@end\n"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo/ShapeChange/Rise/View/DDGLShapeView/DDGLShapeSelView.m",
    "content": "//\n//  DDGLShapeSelView.m\n//  OpenGLStretchDemo\n//\n//  Created by 刘海东 on 2018/6/6.\n//  Copyright © 2018年 刘海东. All rights reserved.\n//\n\n#define kControlViewW 50\n#define kSelfSize self.frame.size\n\n#import \"DDGLShapeSelView.h\"\n#import \"DDGLShapeControlView.h\"\n#import \"DDGLSelectView.h\"\n\n\n@interface DDGLShapeSelView ()\n@property (nonatomic, assign) DDGLShapeViewType type;\n@property (nonatomic, strong) DDGLShapeControlView *controlView1;\n@property (nonatomic, strong) DDGLShapeControlView *controlView2;\n@property (nonatomic, strong) DDGLSelectView *selectView;\n@property (nonatomic, assign) CGRect subFrame;\n@end\n\n\n@implementation DDGLShapeSelView\n\n- (instancetype)initWithFrame:(CGRect)frame type:(DDGLShapeViewType)type subFrame:(CGRect)subFrame\n{\n    self = [super initWithFrame:frame];\n    if (self) {\n        \n        self.userInteractionEnabled = YES;\n        _subFrame = subFrame;\n        \n        _type = type;\n\n    }\n    return self;\n}\n\n- (DDGLShapeControlView *)controlView1\n{\n    if (!_controlView1) {\n        \n        CGRect rect;\n        if (_type == DDGLShapeViewType_Vertical) {\n            rect = CGRectMake(kSelfSize.width-kSelfSize.width, -kControlViewW/2.0+kSelfSize.height*_valueRange.min, kSelfSize.width, kControlViewW);\n        }else\n        {\n            rect = CGRectMake(kSelfSize.width*_valueRange.min-kControlViewW/2.0, 0, kControlViewW, kSelfSize.height);\n        }\n        _controlView1 = [[DDGLShapeControlView alloc]initWithFrame:rect type:_type];\n        _controlView1.userInteractionEnabled = YES;\n        [self addSubview:_controlView1];\n        UILongPressGestureRecognizer *logPressGestureRecognizer = [[UILongPressGestureRecognizer alloc] initWithTarget:self action:@selector(handlePan:)];\n        logPressGestureRecognizer.minimumPressDuration = 0.0;\n        [_controlView1 addGestureRecognizer:logPressGestureRecognizer];\n        \n    }\n    return _controlView1;\n}\n\n- (DDGLShapeControlView *)controlView2\n{\n    if (!_controlView2) {\n        \n        CGRect rect;\n        if (_type == DDGLShapeViewType_Vertical) {\n            rect = CGRectMake(kSelfSize.width-kSelfSize.width, kSelfSize.height*_valueRange.max-kControlViewW/2.0, kSelfSize.width, kControlViewW);\n        }else\n        {\n            rect = CGRectMake((kSelfSize.width - kControlViewW/2.0)*_valueRange.max, 0, kControlViewW, kSelfSize.height);\n        }\n        _controlView2 = [[DDGLShapeControlView alloc]initWithFrame:rect type:_type];\n        _controlView2.userInteractionEnabled = YES;\n        [self addSubview:_controlView2];\n        UILongPressGestureRecognizer *logPressGestureRecognizer = [[UILongPressGestureRecognizer alloc] initWithTarget:self action:@selector(handlePan:)];\n        logPressGestureRecognizer.minimumPressDuration = 0.0;\n        [_controlView2 addGestureRecognizer:logPressGestureRecognizer];\n    }\n    return _controlView2;\n}\n\n- (DDGLSelectView *)selectView\n{\n    if (!_selectView) {\n        \n        _selectView = [[DDGLSelectView alloc]init];\n        _selectView.backgroundColor = [UIColor colorWithRed:1.0 green:0.0 blue:0.0 alpha:0.2];\n        _selectView.type = _type;\n        [self addSubview:_selectView];\n        [self sendSubviewToBack:_selectView];\n        if (_type == DDGLShapeViewType_Vertical)\n        {\n            _selectView.frame = CGRectMake(0, CGRectGetMidY(self.controlView1.frame), CGRectGetWidth(self.frame), CGRectGetMidY(self.controlView2.frame) - CGRectGetMidY(self.controlView1.frame));\n        }else\n        {\n            _selectView.frame = CGRectMake(CGRectGetMidX(self.controlView1.frame), 0, CGRectGetMidX(self.controlView2.frame)-CGRectGetMidX(self.controlView1.frame), CGRectGetHeight(self.frame));\n        }\n        _selectView.hidden = YES;\n        \n    }\n    return _selectView;\n}\n\n\n\n#pragma mark func\n\n- (void)handlePan:(UILongPressGestureRecognizer *)sender {\n    \n    \n    DDGLShapeControlView *tapSuperView = (DDGLShapeControlView *)sender.view;\n    \n    \n    switch (sender.state) {\n        case UIGestureRecognizerStateBegan:\n        {\n            self.selectView.hidden = NO;\n        }\n            break;\n        case UIGestureRecognizerStateChanged:\n        {\n            self.selectView.hidden = NO;\n            if (sender.numberOfTouches <= 0) {\n                return;\n            }\n            CGPoint tapPoint = [sender locationOfTouch:0 inView:self];\n            \n            \n            switch (_type) {\n                case DDGLShapeViewType_Vertical:\n                {\n                    tapPoint = CGPointMake(tapPoint.x, tapPoint.y-kControlViewW/2.0);\n                    \n                    [self verticalConfigPoint:tapPoint tapSuperView:tapSuperView];\n                }\n                    break;\n                case DDGLShapeViewType_Horizontal:\n                {\n                    tapPoint = CGPointMake(tapPoint.x-kControlViewW/2.0, tapPoint.y);\n                    [self horizontalConfigPoint:tapPoint tapSuperView:tapSuperView];\n                }\n                    break;\n                    \n                default:\n                    break;\n            }\n            \n            //滑动中\n            if (self.delegate && [self.delegate respondsToSelector:@selector(strectchSelViewSwiping)]) {\n                [self.delegate strectchSelViewSwiping];\n            }\n            \n            \n        }\n            break;\n        case UIGestureRecognizerStateCancelled:\n        {\n            self.selectView.hidden = YES;\n            [self calculateValue];\n        }\n            break;\n        case UIGestureRecognizerStateFailed:\n        {\n            self.selectView.hidden = YES;\n            [self calculateValue];\n        }\n            break;\n        case UIGestureRecognizerStateEnded:\n        {\n            self.selectView.hidden = YES;\n            [self calculateValue];\n        }\n            break;\n            \n        default:\n            break;\n    }\n    \n}\n\n/** 停止滑动时候 计算最大值 最小值 */\n- (void)calculateValue\n{\n    \n    float max = 1.0;\n    float min = 0.0;\n    \n    if (_type == DDGLShapeViewType_Vertical) {\n        float value1 = CGRectGetMidY(self.controlView1.frame)/self.frame.size.height;\n        float value2 = CGRectGetMidY(self.controlView2.frame)/self.frame.size.height;\n        if (value1>=value2) {\n            max = value1;\n            min = value2;\n        }else\n        {\n            max = value2;\n            min = value1;\n        }\n    }else\n    {\n        float value1 = CGRectGetMidX(self.controlView1.frame)/self.frame.size.width;\n        float value2 = CGRectGetMidX(self.controlView2.frame)/self.frame.size.width;\n        if (value1>=value2) {\n            max = value1;\n            min = value2;\n        }else\n        {\n            max = value2;\n            min = value1;\n        }\n\n    }\n    \n    \n    if ([self.delegate respondsToSelector:@selector(strectchSelViewSwipEndMaxValue:minValue:)]) {\n        [self.delegate strectchSelViewSwipEndMaxValue:max minValue:min];\n    }\n    \n    \n}\n\n\n#pragma mark 竖\n- (void)verticalConfigPoint:(CGPoint)tapPoint tapSuperView:(DDGLShapeControlView *)tapSuperView\n{\n    \n    CGRect rect;\n    if (tapSuperView == self.controlView1) {\n        rect = self.controlView1.frame;\n    }else{\n        rect = self.controlView2.frame;\n    }\n    \n    if (tapPoint.y<-kControlViewW/2.0)\n    {\n        tapPoint.y = -kControlViewW/2.0;\n    }\n    else if (tapPoint.y>kSelfSize.height-kControlViewW/2.0)\n    {\n        tapPoint.y = kSelfSize.height-kControlViewW/2.0;\n    }\n    \n    rect.origin.y = tapPoint.y;\n    \n    if (tapSuperView == self.controlView1) {\n        self.controlView1.frame = rect;\n    }else{\n        self.controlView2.frame = rect;\n    }\n    \n    CGRect rect1 = self.controlView1.frame;\n    CGRect rect2 = self.controlView2.frame;\n    \n    CGRect selectRect = self.selectView.frame;\n    \n    if (rect1.origin.y<rect2.origin.y) {\n        selectRect.origin.y = CGRectGetMidY(self.controlView1.frame);\n        selectRect.size.height = CGRectGetMidY(self.controlView2.frame) - CGRectGetMidY(self.controlView1.frame);\n    }\n    else\n    {\n        selectRect.origin.y = CGRectGetMidY(self.controlView2.frame);\n        selectRect.size.height = CGRectGetMidY(self.controlView1.frame) - CGRectGetMidY(self.controlView2.frame);\n    }\n    self.selectView.frame = selectRect;\n    \n}\n#pragma mark 横\n- (void)horizontalConfigPoint:(CGPoint)tapPoint tapSuperView:(DDGLShapeControlView *)tapSuperView\n{\n    \n    CGRect rect;\n    if (tapSuperView == self.controlView1) {\n        rect = self.controlView1.frame;\n    }else{\n        rect = self.controlView2.frame;\n    }\n    \n    if (tapPoint.x<-kControlViewW/2.0)\n    {\n        tapPoint.x = -kControlViewW/2.0;\n    }\n    else if (tapPoint.x>kSelfSize.width-kControlViewW/2.0)\n    {\n        tapPoint.x = kSelfSize.width-kControlViewW/2.0;\n    }\n    \n    rect.origin.x = tapPoint.x;\n    \n    if (tapSuperView == self.controlView1) {\n        self.controlView1.frame = rect;\n    }else{\n        self.controlView2.frame = rect;\n    }\n    \n    CGRect rect1 = self.controlView1.frame;\n    CGRect rect2 = self.controlView2.frame;\n    \n    CGRect selectRect = self.selectView.frame;\n    \n    if (rect1.origin.x<rect2.origin.x) {\n        selectRect.origin.x = CGRectGetMidX(self.controlView1.frame);\n        selectRect.size.width = CGRectGetMidX(self.controlView2.frame) - CGRectGetMidX(self.controlView1.frame);\n    }\n    else\n    {\n        selectRect.origin.x = CGRectGetMidX(self.controlView2.frame);\n        selectRect.size.width = CGRectGetMidX(self.controlView1.frame) - CGRectGetMidX(self.controlView2.frame);\n    }\n    self.selectView.frame = selectRect;\n    \n}\n\n-(UIView *)hitTest:(CGPoint)point withEvent:(UIEvent *)event {\n    UIView *view = [super hitTest:point withEvent:event];\n    if (view == nil)\n    {\n        for (UIView*subView in self.subviews)\n        {\n            if ([subView isKindOfClass:[DDGLShapeControlView class]])\n            {\n                CGPoint lowLeftPoint = [subView convertPoint:point fromView:self];\n                if ([subView pointInside:lowLeftPoint withEvent:event])\n                {\n                    view = subView;\n                }\n            }\n        }\n    }\n    return view;\n}\n\n#pragma mark set\n- (void)setValueRange:(DDGLNormValueRange)valueRange\n{\n    _valueRange = valueRange;\n    [self controlView1];\n    [self controlView2];\n    [self selectView];\n    [self updateSubViewsFrame];\n}\n\n- (void)updateSubViewsFrame\n{\n    CGRect rect;\n    if (_type == DDGLShapeViewType_Vertical) {\n        rect = CGRectMake(kSelfSize.width-kSelfSize.width, -kControlViewW/2.0+kSelfSize.height*_valueRange.min, kSelfSize.width, kControlViewW);\n    }else\n    {\n        rect = CGRectMake(kSelfSize.width*_valueRange.min-kControlViewW/2.0, 0, kControlViewW, kSelfSize.height);\n    }\n    _controlView1.frame = rect;\n    \n    \n    CGRect rect2;\n    if (_type == DDGLShapeViewType_Vertical) {\n        rect2 = CGRectMake(kSelfSize.width-kSelfSize.width, kSelfSize.height*_valueRange.max-kControlViewW/2.0, kSelfSize.width, kControlViewW);\n    }else\n    {\n        rect2 = CGRectMake((kSelfSize.width - kControlViewW/2.0)*_valueRange.max, 0, kControlViewW, kSelfSize.height);\n    }\n    _controlView2.frame = rect2;\n    \n    if (_type == DDGLShapeViewType_Vertical)\n    {\n        self.selectView.frame = CGRectMake(CGRectGetMinX(self.subFrame), CGRectGetMidY(self.controlView1.frame), CGRectGetWidth(self.subFrame), CGRectGetMidY(self.controlView2.frame) - CGRectGetMidY(self.controlView1.frame));\n    }else\n    {\n        self.selectView.frame = CGRectMake(CGRectGetMidX(self.controlView1.frame), self.subFrame.origin.y, CGRectGetMidX(self.controlView2.frame)-CGRectGetMidX(self.controlView1.frame), CGRectGetHeight(self.subFrame));\n    }\n}\n\n\n\n\n\n@end\n"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo/ShapeChange/Rise/View/DDGLShapeView/DDGLShapeView.h",
    "content": "//\n//  DDGLShapeView.h\n//  OpenGLStretchDemo\n//\n//  Created by 刘海东 on 2018/5/30.\n//  Copyright © 2018年 刘海东. All rights reserved.\n//\n\n#import <UIKit/UIKit.h>\n#import <OpenGLES/ES2/gl.h>\n#import <OpenGLES/ES2/glext.h>\n#import <GLKit/GLKit.h>\n\n/** 归一化的 0.0-1.0*/\nstruct DDGLNormValueRange{\n    /** 最大值 */\n    float max;\n    /** 最小值 */\n    float min;\n};\ntypedef struct DDGLNormValueRange DDGLNormValueRange;\n\n\n/** 拉伸图的类型 */\ntypedef NS_ENUM(NSInteger,DDGLShapeViewType)\n{\n    /** 竖直方向 */\n    DDGLShapeViewType_Vertical = 0,\n    /** 水平方向 */\n    DDGLShapeViewType_Horizontal,\n};\n\n@protocol DDGLShapeViewDelegate <NSObject>\n\n- (void)strectchViewGetVertexArray:(NSArray *)vertexArray changeValue:(float)changeValue;\n\n@end\n\n@interface DDGLShapeView : UIView\n\n@property (nonatomic, weak) id<DDGLShapeViewDelegate> delegate;\n- (instancetype)initWithFrame:(CGRect)frame type:(DDGLShapeViewType)type image:(UIImage *)image;\n\n@property (nonatomic, assign) DDGLNormValueRange valueRange;\n- (void)getOriginImageVertexConfig;\n- (void)changeValue:(float)value;\n\n@end\n"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo/ShapeChange/Rise/View/DDGLShapeView/DDGLShapeView.m",
    "content": "//\n//  DDGLShapeView.m\n//  OpenGLStretchDemo\n//\n//  Created by 刘海东 on 2018/5/30.\n//  Copyright © 2018年 刘海东. All rights reserved.\n//\n\n#define kVertical_h 200\n\n#import \"DDGLShapeView.h\"\n#import \"DDGLShapeSelView.h\"\n#import \"DDGLShapeControlView.h\"\n\n@interface DDGLShapeView ()<GLKViewDelegate>\n\n{\n    GLfloat squareVertexData[40];\n}\n\n@property (nonatomic, strong) GLKView *glkView;\n@property (nonatomic , strong) EAGLContext* mContext;\n@property (nonatomic , strong) GLKBaseEffect* mEffect;\n\n@property (nonatomic, assign) float changValue;\n\n/** 图像宽高 */\n@property (nonatomic, assign) float imageWidth;\n@property (nonatomic, assign) float imageHeight;\n/** 类型 */\n@property (nonatomic, assign) DDGLShapeViewType  type;\n\n@property (nonatomic, copy) NSArray *vertexArray;\n\n\n\n@end\n\n@implementation DDGLShapeView\n- (instancetype)initWithFrame:(CGRect)frame type:(DDGLShapeViewType)type image:(UIImage *)image\n{\n    self = [super initWithFrame:frame];\n    if (self) {\n        _type = type;\n        //新建OpenGLES 上下文\n        self.mContext = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2];\n        self.glkView.drawableColorFormat = GLKViewDrawableColorFormatRGBA8888;  //颜色缓冲区格式\n        [EAGLContext setCurrentContext:self.mContext];\n        \n        self.imageWidth = image.size.width;\n        self.imageHeight = image.size.height;\n        \n        NSDictionary* options = [NSDictionary dictionaryWithObjectsAndKeys:@(1), GLKTextureLoaderOriginBottomLeft, nil];\n        GLKTextureInfo* textureInfo = [GLKTextureLoader textureWithCGImage:image.CGImage options:options error:nil];\n        //着色器\n        self.mEffect = [[GLKBaseEffect alloc] init];\n        self.mEffect.texture2d0.enabled = GL_TRUE;\n        self.mEffect.texture2d0.name = textureInfo.name;\n        \n    }\n    return self;\n}\n\n- (GLKView *)glkView\n{\n    \n    if(!_glkView)\n    {\n        _glkView = [[GLKView alloc]initWithFrame:self.bounds context:self.mContext];\n        _glkView.delegate = self;\n        [self addSubview:_glkView];\n        \n    }\n    \n    return _glkView;\n}\n\n\n- (void)glkView:(GLKView *)view drawInRect:(CGRect)rect\n{\n    \n    glClearColor(1.f, 1.f, 1.0f, 1.0f);\n    glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);\n    NSArray * array;\n    \n    switch (_type) {\n        case DDGLShapeViewType_Vertical:\n        {\n            array = [self verticalConfigVertex];\n            _vertexArray = array;\n        }\n            break;\n        case DDGLShapeViewType_Horizontal:\n        {\n            array = [self horizontalConfigVertex];\n            _vertexArray = array;\n        }\n            break;\n        default:\n            break;\n    }\n        \n    for (int i=0; i!=array.count; i++) {\n        NSNumber *value = array[i];\n        squareVertexData[i] = value.floatValue;\n    }\n    \n    GLuint buffer;\n    glGenBuffers(1, &buffer);\n    glBindBuffer(GL_ARRAY_BUFFER, buffer);\n    glBufferData(GL_ARRAY_BUFFER, sizeof(squareVertexData), squareVertexData, GL_STATIC_DRAW);\n    glEnableVertexAttribArray(GLKVertexAttribPosition);\n    glVertexAttribPointer(GLKVertexAttribPosition, 3, GL_FLOAT, GL_FALSE, sizeof(GLfloat) * 5, (GLfloat *)NULL + 0);\n    glEnableVertexAttribArray(GLKVertexAttribTexCoord0);\n    glVertexAttribPointer(GLKVertexAttribTexCoord0, 2, GL_FLOAT, GL_FALSE, sizeof(GLfloat) * 5, (GLfloat *)NULL + 3);\n    [self.mEffect prepareToDraw];\n    glDrawArrays(GL_TRIANGLE_STRIP, 0, 8);\n}\n\n#pragma mark 配置竖直方向上面的顶点数据\n- (NSArray *)verticalConfigVertex\n{\n    \n    float imageRatio = (float)self.imageWidth/self.imageHeight;\n    float screenRatio = (float)self.frame.size.width/self.frame.size.height;\n    float xfactor=1.0;\n    float yfactor=1.0;\n    \n    \n    float tempValue = self.changValue;\n    //负值\n    float xMinus = -xfactor;\n    float yMinus = -yfactor;\n    \n    //正值\n    float xPlus = xfactor;\n    float yPlus = yfactor;\n    \n    \n    float x1,x2,x3,x4,x5,x6,x7,x8 = 0.0;\n    float y1,y2,y3,y4,y5,y6,y7,y8 = 0.0;\n    float tx1,tx2,tx3,tx4,tx5,tx6,tx7,tx8 = 0.0;\n    float ty1,ty2,ty3,ty4,ty5,ty6,ty7,ty8 = 0.0;\n    \n    \n    if (imageRatio > screenRatio) {\n        \n        //宽顶到边\n        yMinus = xMinus*screenRatio/(_imageWidth/(_imageHeight+tempValue*kVertical_h));\n        yPlus = xPlus*screenRatio/(_imageWidth/(_imageHeight+tempValue*kVertical_h));\n        \n        //原来的比例\n        float originY = xPlus*screenRatio/imageRatio;\n        \n        //高大于宽\n        x1 = xMinus;\n        y1 = yMinus;\n        tx1 = 0;\n        ty1 = 0;\n        \n        x2 = xPlus;\n        y2 = yMinus;\n        tx2 = 1;\n        ty2 = 0;\n        \n        \n        /** 极限值的判断处理 */\n        if ((yMinus<-1.0000001 || yMinus>-0.0000001) || (yPlus>1.0000001 || yPlus<.0000001)) {\n            \n            //负值\n            yMinus = -yfactor;\n            //正值\n            yPlus = yfactor;\n            \n            xMinus = yMinus*_imageWidth/(_imageHeight+tempValue*kVertical_h)/screenRatio;\n            xPlus = xPlus*_imageWidth/(_imageHeight+tempValue*kVertical_h)/screenRatio;\n            \n            //原来的比例\n            float originX = yMinus*imageRatio/screenRatio;\n            \n            x1 = xMinus;\n            y1 = yMinus;\n            tx1 = 0;\n            ty1 = 0;\n            \n            x2 = xPlus;\n            y2 = yMinus;\n            tx2 = 1;\n            ty2 = 0;\n            \n            //原来的高\n            CGFloat h = ABS(1-2*_valueRange.max + 1) *xMinus/originX;\n            \n            x3 = xMinus;\n            y3 = -(1.0-h);\n            tx3 = 0;\n            ty3 = (1-_valueRange.max);\n            \n            x4 = xPlus;\n            y4 = -(1.0-h);\n            tx4 = 1;\n            ty4 = (1-_valueRange.max);\n            \n            \n            x5 = xMinus;\n            y5 = (1-2*_valueRange.min*xMinus/originX);\n            tx5 = 0;\n            ty5 = (1-_valueRange.min);\n            \n            \n            x6 = xPlus;\n            y6 = (1-2*_valueRange.min*xMinus/originX);\n            tx6 = 1;\n            ty6 = (1-_valueRange.min);\n            \n            x7 = xMinus;\n            y7 = yPlus;\n            tx7 = 0;\n            ty7 = 1;\n            \n            x8 = xPlus;\n            y8 = yPlus;\n            tx8 = 1;\n            ty8 = 1;\n            \n        }\n        else\n        {\n            //形变\n            CGFloat h = yPlus;\n            CGFloat value = (h - originY);\n            \n            x3 = xMinus;\n            y3 = (1-2*_valueRange.max)*originY-value;\n            tx3 = 0;\n            ty3 = (1-_valueRange.max);\n            \n            x4 = xPlus;\n            y4 = (1-2*_valueRange.max)*originY-value;\n            tx4 = 1;\n            ty4 = (1-_valueRange.max);\n            \n            x5 = xMinus;\n            y5 =  (1-2*_valueRange.min)*originY+value;\n            tx5 = 0;\n            ty5 = (1-_valueRange.min);\n            \n            x6 = xPlus;\n            y6 = (1-2*_valueRange.min)*originY+value;\n            tx6 = 1;\n            ty6 = (1-_valueRange.min);\n            \n            x7 = xMinus;\n            y7 = yPlus;\n            tx7 = 0;\n            ty7 = 1;\n            \n            x8 = xPlus;\n            y8 = yPlus;\n            tx8 = 1;\n            ty8 = 1;\n            \n            \n        }\n        \n        \n    }\n    else\n    {\n        \n        //高顶到边\n        xMinus = yMinus*_imageWidth/(_imageHeight+tempValue*kVertical_h)/screenRatio;\n        xPlus = xPlus*_imageWidth/(_imageHeight+tempValue*kVertical_h)/screenRatio;\n        //原来的比例\n        float originX = yMinus*imageRatio/screenRatio;\n        \n        x1 = xMinus;\n        y1 = yMinus;\n        tx1 = 0;\n        ty1 = 0;\n        \n        x2 = xPlus;\n        y2 = yMinus;\n        tx2 = 1;\n        ty2 = 0;\n        \n        //原来的高\n        CGFloat h = ABS(1-2*_valueRange.max + 1) *xMinus/originX;\n        \n        x3 = xMinus;\n        y3 = -(1.0-h);\n        tx3 = 0;\n        ty3 = (1-_valueRange.max);\n        \n        \n        x4 = xPlus;\n        y4 = -(1.0-h);\n        tx4 = 1;\n        ty4 = (1-_valueRange.max);\n        \n        \n        x5 = xMinus;\n        y5 = (1-2*_valueRange.min*xMinus/originX);\n        tx5 = 0;\n        ty5 = (1-_valueRange.min);\n        \n        \n        x6 = xPlus;\n        y6 = (1-2*_valueRange.min*xMinus/originX);\n        tx6 = 1;\n        ty6 = (1-_valueRange.min);\n        \n        x7 = xMinus;\n        y7 = yPlus;\n        tx7 = 0;\n        ty7 = 1;\n        \n        x8 = xPlus;\n        y8 = yPlus;\n        tx8 = 1;\n        ty8 = 1;\n        \n        \n    }\n    \n    NSArray *array = @[\n                       //（x1,y1）\n                       @(x1), @(y1), @(0),  @(tx1), @(ty1),\n                       //（x2,y2）\n                       @(x2), @(y2), @(0),  @(tx2), @(ty2),\n                       //（x3,y3）\n                       @(x3), @(y3), @(0),  @(tx3), @(ty3),\n                       //（x4,y4）\n                       @(x4), @(y4), @(0),  @(tx4), @(ty4),\n                       //（x5,y5）\n                       @(x5), @(y5), @(0),  @(tx5), @(ty5),\n                       //（x6,y6）\n                       @(x6), @(y6), @(0),  @(tx6), @(ty6),\n                       //（x7,y7）\n                       @(x7), @(y7), @(0),  @(tx7), @(ty7),\n                       //（x8,y8）\n                       @(x8), @(y8), @(0),  @(tx8), @(ty8),\n                       ];\n    \n    return array;\n}\n\n#pragma mark 配置水平方向上面的顶点数据\n- (NSArray *)horizontalConfigVertex\n{\n    float imageRatio = (float)self.imageWidth/self.imageHeight;\n    float screenRatio = (float)self.frame.size.width/self.frame.size.height;\n    float xfactor=1.0;\n    float yfactor=1.0;\n    \n    float tempValue = self.changValue;\n    //负值\n    float xMinus = -xfactor;\n    float yMinus = -yfactor;\n    \n    //正值\n    float xPlus = xfactor;\n    float yPlus = yfactor;\n    \n    float x1,x2,x3,x4,x5,x6,x7,x8 = 0.0;\n    float y1,y2,y3,y4,y5,y6,y7,y8 = 0.0;\n    float tx1,tx2,tx3,tx4,tx5,tx6,tx7,tx8 = 0.0;\n    float ty1,ty2,ty3,ty4,ty5,ty6,ty7,ty8 = 0.0;\n    \n    //压缩最大的值域区间的80%\n    float compressMaxValue = (_valueRange.max - _valueRange.min)*_imageWidth*0.8;\n    \n    if (imageRatio > screenRatio) {\n        \n        //宽顶到边\n        yMinus = xMinus*screenRatio/imageRatio;\n        yPlus = xPlus*screenRatio/imageRatio;\n        \n        //        NSLog(@\"宽顶到边\");\n        \n        //改变的比例\n        float neW_xMinus = yMinus*((self.imageWidth-compressMaxValue*tempValue)/self.imageHeight)/screenRatio;\n        //改变的值\n        float w = ABS(xMinus - neW_xMinus);\n        \n        x1 = xMinus+w;\n        y1 = yMinus;\n        tx1 = 0;\n        ty1 = 0;\n        \n        x2 = xMinus+w;\n        y2 = yPlus;\n        tx2 = 0;\n        ty2 = 1;\n        \n        x3 = (1-2*_valueRange.min)/xMinus+w;\n        y3 = yMinus;\n        tx3 = _valueRange.min;\n        ty3 = 0;\n        \n        x4 = (1-2*_valueRange.min)/xMinus+w;\n        y4 = yPlus;\n        tx4 = _valueRange.min;\n        ty4 = 1;\n        \n        x5 =  (1-2*_valueRange.max)/xMinus-w;\n        y5 = yMinus;\n        tx5 = _valueRange.max;\n        ty5 = 0;\n        \n        \n        x6 = (1-2*_valueRange.max)/xMinus-w;\n        y6 = yPlus;\n        tx6 = _valueRange.max;\n        ty6 = 1;\n        \n        x7 = xPlus-w;\n        y7 = yMinus;\n        tx7 = 1;\n        ty7 = 0;\n        \n        x8 = xPlus-w;\n        y8 = yPlus;\n        tx8 = 1;\n        ty8 = 1;\n    }\n    else\n    {\n        \n        \n        xMinus = yMinus*((_imageWidth+tempValue*compressMaxValue*-1)/_imageHeight)/screenRatio;\n        xPlus = yPlus*((_imageWidth+tempValue*compressMaxValue*-1)/_imageHeight)/screenRatio;\n        //        NSLog(@\"高顶到边\");\n        //原来的比例\n        float originX = yPlus*imageRatio/screenRatio;\n        float w = originX - xPlus;\n        //高大于宽\n        x1 = xMinus;\n        y1 = yMinus;\n        tx1 = 0;\n        ty1 = 0;\n        \n        x2 = xMinus;\n        y2 = yPlus;\n        tx2 = 0;\n        ty2 = 1;\n        \n        x3 = -(1-2*_valueRange.min)*originX+w;\n        y3 = yMinus;\n        tx3 = _valueRange.min;\n        ty3 = 0;\n        \n        x4 = -(1-2*_valueRange.min)*originX+w;\n        y4 = yPlus;\n        tx4 = _valueRange.min;\n        ty4 = 1;\n        \n        x5 = -(1-2*_valueRange.max)*originX-w;\n        y5 = yMinus;\n        tx5 = _valueRange.max;\n        ty5 = 0;\n        \n        x6 = -(1-2*_valueRange.max)*originX-w;\n        y6 = yPlus;\n        tx6 = _valueRange.max;\n        ty6 = 1;\n        \n        x7 = xPlus;\n        y7 = yMinus;\n        tx7 = 1;\n        ty7 = 0;\n        \n        x8 = xPlus;\n        y8 = yPlus;\n        tx8 = 1;\n        ty8 = 1;\n    }\n    \n    NSArray *array = @[\n                       //（x1,y1）\n                       @(x1), @(y1), @(0),  @(tx1), @(ty1),\n                       //（x2,y2）\n                       @(x2), @(y2), @(0),  @(tx2), @(ty2),\n                       //（x3,y3）\n                       @(x3), @(y3), @(0),  @(tx3), @(ty3),\n                       //（x4,y4）\n                       @(x4), @(y4), @(0),  @(tx4), @(ty4),\n                       //（x5,y5）\n                       @(x5), @(y5), @(0),  @(tx5), @(ty5),\n                       //（x6,y6）\n                       @(x6), @(y6), @(0),  @(tx6), @(ty6),\n                       //（x7,y7）\n                       @(x7), @(y7), @(0),  @(tx7), @(ty7),\n                       //（x8,y8）\n                       @(x8), @(y8), @(0),  @(tx8), @(ty8),\n                       ];\n    return array;\n    \n}\n\n#pragma mark set\n- (void)setValueRange:(DDGLNormValueRange)valueRange\n{\n    _valueRange = valueRange;\n}\n\n#pragma mark func\n\n- (void)changeValue:(float)value\n{\n    self.changValue = value;\n    [self.glkView display];\n}\n\n- (void)getOriginImageVertexConfig\n{\n    \n    switch (_type) {\n        case DDGLShapeViewType_Vertical:\n        {\n            \n            NSNumber *yValue = _vertexArray[1];\n            CGFloat y = ABS(yValue.floatValue);\n            \n            NSMutableArray *muarray = [NSMutableArray arrayWithArray:_vertexArray];\n            for (int i=0; i!=_vertexArray.count; i++)\n            {\n                //处理x坐标\n                if (i%5==0)\n                {\n                    if (i%10==0)\n                    {\n                        muarray[i] = @(-1.0);\n                    }\n                    else\n                    {\n                        muarray[i] = @(1.0);\n                    }\n                }\n\n                if (i%5==1) {\n                    NSNumber *yV = _vertexArray[i];\n                    muarray[i] = @(yV.floatValue/y);\n                }\n            }\n            \n            float value=kVertical_h*self.changValue;\n            if (self.delegate && [self.delegate respondsToSelector:@selector(strectchViewGetVertexArray:changeValue:)])\n            {\n                [self.delegate strectchViewGetVertexArray:muarray changeValue:value];\n            }\n            \n            \n        }\n            break;\n        case DDGLShapeViewType_Horizontal:\n        {\n            NSMutableArray *muarray = [NSMutableArray arrayWithArray:_vertexArray];\n            \n            NSNumber *xValue = _vertexArray[0];\n            CGFloat x = ABS(xValue.floatValue);\n            \n            NSNumber *yValue = _vertexArray[1];\n            CGFloat y = ABS(yValue.floatValue);\n            \n            for (int i=0; i!=_vertexArray.count; i++)\n            {\n\n                if (i%5==0) {\n                    NSNumber *xV = _vertexArray[i];\n                    muarray[i] = @(xV.floatValue/x);\n                }\n\n                if (i%5==1) {\n                    NSNumber *yV = _vertexArray[i];\n                    muarray[i] = @(yV.floatValue/y);\n                }\n\n            }\n\n            float compressMaxValue = (_valueRange.max - _valueRange.min)*_imageWidth*0.8;\n            float value=compressMaxValue*self.changValue;\n            if (self.delegate && [self.delegate respondsToSelector:@selector(strectchViewGetVertexArray:changeValue:)])\n            {\n                [self.delegate strectchViewGetVertexArray:muarray changeValue:value];\n            }\n        }\n            break;\n        default:\n            break;\n    }\n    \n}\n\n- (void)dealloc\n{\n    if (self.mContext) {\n        _mContext = nil;\n        _mEffect = nil;\n        [EAGLContext setCurrentContext:nil];\n    }\n    NSLog(@\"DDGLShapeView---dealloc\");\n}\n\n\n@end\n"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo/ShapeChange/Rise/View/DDGLShapeView/DDGLShapingView.h",
    "content": "//\n//  DDGLShapingView.h\n//  OpenGLStretchDemo\n//\n//  Created by 刘海东 on 2018/6/12.\n//  Copyright © 2018年 刘海东. All rights reserved.\n//\n\n#import <UIKit/UIKit.h>\n#import \"DDGLShapeView.h\"\n\n\n@protocol DDGLShapingViewDelegate <NSObject>\n@optional\n/** 选择区域拖拽中 */\n- (void)shapingViewSwiping;\n- (void)shapingViewGetVertexArray:(NSArray *)vertexArray textureCoordinateArray:(NSArray *)textureCoordinateArray changeValue:(float)changeValue type:(DDGLShapeViewType)type;\n- (void)shapingViewSwipEndMaxValue:(float)max minValue:(float)min;\n\n@end\n\n\n@interface DDGLShapingView : UIView\n\n- (instancetype)initWithFrame:(CGRect)frame type:(DDGLShapeViewType)type image:(UIImage *)image;\n@property (nonatomic, weak) id<DDGLShapingViewDelegate> delegate;\n\n- (DDGLNormValueRange)getRange;\n\n/** 改变值 */\n- (void)changeValue:(float)value;\n\n/** 改变选择的区域 */\n- (void)changeRange:(DDGLNormValueRange)range;\n\n/** 获取配置 */\n- (void)getOriginImageVertexConfig;\n\n/** 隐藏选择区域的UI */\n- (void)hideStrectchSelView;\n\n/** 显示选择区域的UI */\n- (void)showStrectchSelView;\n\n- (UIImage *)getProcessImage;\n\n\n\n\n\n@end\n"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo/ShapeChange/Rise/View/DDGLShapeView/DDGLShapingView.m",
    "content": "//\n//  DDGLShapingView.m\n//  OpenGLStretchDemo\n//\n//  Created by 刘海东 on 2018/6/12.\n//  Copyright © 2018年 刘海东. All rights reserved.\n//\n\n#import \"DDGLShapingView.h\"\n#import \"DDGLShapeSelView.h\"\n#import <GPURenderKit/GPURenderKit.h>\n\n\n\n@interface DDGLShapingView ()<DDGLShapeSelViewDelegate>\n\n/** 选择区域 */\n@property (nonatomic, strong) DDGLShapeSelView *strectchSelView;\n\n/** 图像宽高 */\n@property (nonatomic, assign) float imageWidth;\n@property (nonatomic, assign) float imageHeight;\n/** 类型 */\n@property (nonatomic, assign) DDGLShapeViewType  type;\n@property (nonatomic, assign) DDGLNormValueRange range;\n\n/** 滑动中 */\n@property (nonatomic, assign) BOOL swipingBool;\n\n@property (nonatomic, strong) GPUImageView     *glPreview;\n@property (nonatomic, strong) GLImageShapeFilter *glShapeFilter;\n@property (nonatomic, strong) GPUImagePicture *glImagePicture;\n@property (nonatomic, copy) UIImage *image;\n\n\n@end\n\n@implementation DDGLShapingView\n\n- (instancetype)initWithFrame:(CGRect)frame type:(DDGLShapeViewType)type image:(UIImage *)image\n{\n    self = [super initWithFrame:frame];\n    if (self) {\n        \n        _type = type;\n        self.userInteractionEnabled = YES;\n        //默认的值\n        DDGLNormValueRange range = {0,0};\n        range.max = 75/100.0;\n        range.min = 25/100.0;\n        _range = range;\n        \n        _imageWidth = image.size.width;\n        _imageHeight = image.size.height;\n        \n        _image = image;\n        \n        self.glImagePicture = [[GPUImagePicture alloc]initWithImage:image];\n        [self.glImagePicture addTarget:self.glShapeFilter];\n        [self.glShapeFilter addTarget:self.glPreview];\n        [self.glImagePicture processImage];\n        [self strectchSelView];\n        \n    \n    }\n    return self;\n}\n\n\n\n- (GLImageShapeFilter *)glShapeFilter\n{\n    \n    if (!_glShapeFilter) {\n        _glShapeFilter = [[GLImageShapeFilter alloc] init];\n        _glShapeFilter.screenRatio = self.frame.size.width/self.frame.size.height;\n        _glShapeFilter.imageWidth = _imageWidth;\n        _glShapeFilter.imageHeight = _imageHeight;\n        _glShapeFilter.minValue = _range.min;\n        _glShapeFilter.maxValue = _range.max;\n        _glShapeFilter.type = _type;\n        [_glShapeFilter changeValue:0.0];\n        [_glShapeFilter forceProcessingAtSize:CGSizeMake(self.frame.size.width*[UIScreen mainScreen].scale, self.frame.size.height*[UIScreen mainScreen].scale)];\n    }\n    return _glShapeFilter;\n}\n\n\n- (GPUImageView *)glPreview\n{\n    if (!_glPreview) {\n        _glPreview = [[GPUImageView alloc] initWithFrame:self.bounds];\n        [_glPreview setBackgroundColorRed:0.0 green:0.0 blue:0 alpha:1.0];\n        _glPreview.backgroundColor = [UIColor blackColor];\n        [self addSubview:_glPreview];\n    }\n    return _glPreview;\n}\n\n\n- (DDGLShapeSelView *)strectchSelView\n{\n    if (!_strectchSelView)\n    {\n        float imageRatio = (float)_imageWidth/_imageHeight;\n        float screenRatio = (float)self.frame.size.width/self.frame.size.height;\n        float originX;\n        float originY;\n        \n        CGRect rect;\n        \n        if (_type == DDGLShapeViewType_Vertical) {\n            if (imageRatio > screenRatio) {\n                originY = 1.0*screenRatio/imageRatio;\n            }else\n            {\n                originY = 1.0;\n            }\n            rect = CGRectMake(0, (1-originY)/2.0*self.frame.size.height, self.frame.size.width, self.frame.size.height*originY);\n        }\n        else\n        {\n            \n            if (imageRatio > screenRatio) {\n                originX = 1.0;\n                originY = 1.0;\n            }else\n            {\n                originX = 1.0*imageRatio/screenRatio;\n                originY = 1.0;\n            }\n            rect = CGRectMake((1-originX)/2.0*self.frame.size.width, (1-originY)/2.0*self.frame.size.height, self.frame.size.width*originX, self.frame.size.height*originY);\n        }\n        \n        CGSize superSize = self.glPreview.frame.size;\n        CGSize fitSize = [self fitSizeWithImage:self.image superSize:superSize];\n        CGRect subFrame = CGRectMake((superSize.width - fitSize.width) / 2.0, (superSize.height - fitSize.height) / 2.0, fitSize.width, fitSize.height);\n\n        \n        _strectchSelView = [[DDGLShapeSelView alloc]initWithFrame:rect type:_type subFrame:subFrame];\n        _strectchSelView.valueRange = _range;\n        _strectchSelView.delegate = self;\n        [self addSubview:_strectchSelView];\n        \n    }\n    return _strectchSelView;\n}\n\n- (CGSize)fitSizeWithImage:(UIImage *)image superSize:(CGSize)superSize\n{\n    float imageRatio = image.size.width / image.size.height;\n    float superRatio = superSize.width / superSize.height;\n    CGSize size = superSize;\n    \n    if (superRatio > imageRatio)\n    {\n        size.height = superSize.height;\n        size.width = superSize.height * imageRatio;\n    }\n    else\n    {\n        size.width = superSize.width;\n        size.height = superSize.width / imageRatio;\n    }\n    \n    return size;\n}\n\n\n\n- (void)changeValue:(float)value\n{\n    [self.glShapeFilter changeValue:value];\n    [self.glImagePicture processImage];\n    \n    \n}\n\n- (void)changeRange:(DDGLNormValueRange)range\n{\n    _range = range;\n    _strectchSelView.valueRange = _range;\n    self.glShapeFilter.minValue = _range.min;\n    self.glShapeFilter.maxValue = _range.max;\n\n}\n\n\n- (DDGLNormValueRange)getRange\n{\n    return _range;\n}\n\n\n#pragma mark DDGLShapeSelViewDelegate\n- (void)strectchSelViewSwiping\n{\n    //滚动中\n    if (self.delegate && [self.delegate respondsToSelector:@selector(shapingViewSwiping)]) {\n        if (!self.swipingBool) {\n            [self.glShapeFilter changeValue:0];\n            [self.glImagePicture processImage];\n            [self.delegate shapingViewSwiping];\n        }\n        self.swipingBool = YES;\n    }\n}\n\n#pragma mark 重写点击区域\n-(UIView *)hitTest:(CGPoint)point withEvent:(UIEvent *)event {\n    UIView *view = [super hitTest:point withEvent:event];\n    if (view == nil)\n    {\n        for (UIView*subView in self.subviews)\n        {\n            if ([subView isKindOfClass:[DDGLShapeSelView class]])\n            {\n                CGPoint lowLeftPoint = [subView convertPoint:point fromView:self];\n                view = [subView hitTest:lowLeftPoint withEvent:event];\n            }\n        }\n    }\n    return view;\n}\n\n- (void)strectchSelViewSwipEndMaxValue:(float)max minValue:(float)min\n{\n    _range.max = max;\n    _range.min = min;\n    self.swipingBool = NO;\n    self.glShapeFilter.minValue = _range.min;\n    self.glShapeFilter.maxValue = _range.max;\n    if (self.delegate && [self.delegate respondsToSelector:@selector(shapingViewSwipEndMaxValue:minValue:)]) {\n        [self.delegate shapingViewSwipEndMaxValue:max minValue:min];\n    }\n}\n\n\n- (void)getOriginImageVertexConfig\n{\n\n    __weak typeof(self) weakSelf = self;\n    \n    [self.glShapeFilter getVerticesAndTextureCoordinatesHandle:^(NSArray *squareVertexes, NSArray *textureCoordinates, float changeValue,NSInteger type){\n        \n        if (weakSelf.delegate && [weakSelf.delegate respondsToSelector:@selector(shapingViewGetVertexArray:textureCoordinateArray:changeValue:type:)])\n        {\n            [weakSelf.delegate shapingViewGetVertexArray:squareVertexes textureCoordinateArray:textureCoordinates changeValue:changeValue type:type];\n        }\n        \n    }];\n\n}\n\n/** 隐藏选择区域的UI */\n- (void)hideStrectchSelView\n{\n    self.strectchSelView.hidden = YES;\n}\n\n/** 显示选择区域的UI */\n- (void)showStrectchSelView\n{\n    self.strectchSelView.hidden = NO;\n}\n\n- (UIImage *)getProcessImage\n{\n    [self.glImagePicture processImage];\n    [self.glShapeFilter useNextFrameForImageCapture];\n    return [self.glShapeFilter imageFromCurrentFramebuffer];\n}\n\n\n\n\n@end\n"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo/ShapeChange/Rise/View/Slider/ATSliderView.h",
    "content": "//\n//  ATSliderView.h\n//  Artist\n//\n//  Created by 刘海东 on 2018/6/30.\n//  Copyright © 2018年 wecut. All rights reserved.\n//\n\n#import \"WeSliderView.h\"\n\n@interface ATSliderView : WeSliderView\n\n@property (nonatomic, copy) NSString *topLabValue;\n\n/** 默认显示 */\n@property (nonatomic, assign) BOOL hideTopLab;\n\n\n/** 自动显示 默认不显示 */\n@property (nonatomic, assign)IBInspectable BOOL autoTopLab;\n/** 滑块的宽度大小 */\n@property (nonatomic, assign)IBInspectable  float thumbSize;\n\n\n-(void)configBigFollowView;\n\n-(void)configSmallFollowView;\n\n/**滑动球的默认大小：26 */\n-(void)configDefaultThumbWidth;\n\n/**滑动球的默认大小：20 */\n-(void)configSmallThumbWidth;\n\n\n\n\n\n@end\n"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo/ShapeChange/Rise/View/Slider/ATSliderView.m",
    "content": "//\n//  ATSliderView.m\n//  Artist\n//\n//  Created by 刘海东 on 2018/6/30.\n//  Copyright © 2018年 wecut. All rights reserved.\n//\n\n#import \"ATSliderView.h\"\n\n@interface ATSliderView ()\n@property (nonatomic, strong) UILabel *topLab;\n@property (nonatomic, assign) float topLabW;\n\n@end\n\n@implementation ATSliderView\n\n- (void)awakeFromNib\n{\n    [super awakeFromNib];\n    [self configBigFollowView];\n}\n\n\n-(void)configSmallFollowView\n{\n    self.topLabW = 40;\n    [self configUI];\n    self.thumbWidth = 20;\n}\n\n\n\n- (void)setThumbSize:(float)thumbSize\n{\n    _thumbSize = thumbSize;\n    self.thumbWidth = _thumbSize;\n}\n\n-(void)configBigFollowView\n{\n    self.topLabW = 50;\n    self.thumbWidth = 26;\n    [self configUI];\n}\n\n-(void)configDefaultThumbWidth {\n    self.thumbWidth = 26;\n}\n\n-(void)configSmallThumbWidth {\n    self.thumbWidth = 20;\n}\n\n- (void)configUI\n{\n\n    self.topLab.frame = CGRectMake(0, 0, self.topLabW, self.topLabW);\n\n    self.followView = self.topLab;\n\n    self.followViewIntervalY = 18;\n    self.minmimValue = 0.0;\n    self.maxmimValue = 1.0;\n    self.thumbTintColor = [UIColor whiteColor];\n    self.trackTintColor = [UIColor colorWithRed:1 green:1 blue:1 alpha:0.3];\n    self.trackHeight = 2.0;\n    self.progressTintColor = [UIColor whiteColor];\n    WEAKSELF\n    self.touchBeginHandler = ^(float value)\n    {\n        weakSelf.progress = value;\n    };\n    if (_thumbSize) {\n        self.thumbWidth = _thumbSize;\n    }\n\n}\n\n\n- (UILabel *)topLab\n{\n    if (!_topLab) {\n        \n        _topLab = [[UILabel alloc]initWithFrame:CGRectMake(0, 0, self.topLabW, self.topLabW)];\n        _topLab.textAlignment = NSTextAlignmentCenter;\n        _topLab.font = [UIFont boldSystemFontOfSize:16.f];\n        _topLab.textColor = [UIColor colorWithRed:0/255.0 green:0/255.0 blue:0/255.0 alpha:1];\n        _topLab.layer.cornerRadius = self.topLabW/2.0;\n        _topLab.layer.masksToBounds = YES;\n        _topLab.backgroundColor = [UIColor colorWithRed:255/255.0 green:255/255.0 blue:255/255.0 alpha:0.95];\n    }\n    return _topLab;\n}\n\n- (void)setTopLabValue:(NSString *)topLabValue\n{\n    if (![self.topLab.text isEqualToString:topLabValue] && ([topLabValue isEqualToString:@\"-100\"] || [topLabValue isEqualToString:@\"0\"] || [topLabValue isEqualToString:@\"100\"])) {\n    }\n    self.topLab.text = topLabValue;\n}\n\n- (void)setHideTopLab:(BOOL)hideTopLab\n{\n    _hideTopLab = hideTopLab;\n    self.topLab.hidden = hideTopLab;\n}\n\n- (void)setProgress:(float)progress\n{\n    [super setProgress:progress];\n    if (_autoTopLab) {\n        self.topLabValue = [NSString stringWithFormat:@\"%.0f\",MIN(progress*100, 100)];\n    }\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo/ShapeChange/Rise/View/Slider/WeSliderView.h",
    "content": "//\n//  WeSliderView.h\n//  LWSliderViewDemo\n//\n//  Created by Leo on 2018/3/14.\n//  Copyright © 2018年 leo. All rights reserved.\n//\n\n#import <UIKit/UIKit.h>\n\n@interface WeSliderView : UIControl\n\n/** 最小值 defult 0.0 */\n@property (nonatomic, assign) IBInspectable float     minmimValue;\n/** 最大值 defult 1.0 */\n@property (nonatomic, assign) IBInspectable float     maxmimValue;\n/** defult 0.0 */\n@property (nonatomic, assign) IBInspectable float     value;\n/** 滑块颜色 */\n@property (nonatomic, strong)  UIColor   *thumbTintColor;\n/** 滑块图片 */\n@property (nonatomic, strong)  UIImage   *thumbImage;\n/** 轨迹颜色 */\n@property (nonatomic, strong)  UIColor   *trackTintColor;\n/** 进度条颜色 */\n@property (nonatomic, strong)  UIColor   *progressTintColor;\n/** 轨迹高度 defult 4.0 */\n@property (nonatomic, assign) IBInspectable float     trackHeight;\n/** 隐藏滑块 */\n@property (nonatomic, assign) IBInspectable BOOL      hiddenThumb;\n/** 滑块上部跟随视图 */\n@property (nonatomic, strong) UIView    *followView;\n/** 滑块上部跟随视图 和滑块的间隙 */\n@property (nonatomic, assign) float followViewIntervalY;\n/** 滑块的宽度大小 */\n@property (nonatomic, assign)  float thumbWidth;\n\n@property (nonatomic, strong) UIView *trackView;\n\n/** 是否在value = 0.0处停顿下 defult YES 只在最小最大值异号时才起作用 */\n@property (nonatomic, assign IBInspectable) BOOL      needInterruptAtZero;\n@property (nonatomic, assign) float progress;\n\n/** 回调Block */\n@property (nonatomic, copy) void (^valueDidChangeHandler)(float value);\n@property (nonatomic, copy) void (^touchBeginHandler)(float value);\n@property (nonatomic, copy) void (^touchEndHandler)(float value);\n\n/** 设置轨道边框 */\n- (void)setTrackBorderWidth:(float)width color:(UIColor *)color;\n/** 设置滑块边框 */\n- (void)setThumbBorderWidth:(float)width color:(UIColor *)color;\n\n- (void)setValue:(float)value animation:(BOOL)animation;\n\n@end\n"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo/ShapeChange/Rise/View/Slider/WeSliderView.m",
    "content": "//\n//  WeSliderView.m\n//  LWSliderViewDemo\n//\n//  Created by Leo on 2018/3/14.\n//  Copyright © 2018年 leo. All rights reserved.\n//\n\n#import \"WeSliderView.h\"\n\n@interface WeSliderView () <UIGestureRecognizerDelegate>\n\n\n@property (nonatomic, strong) UIView *progressView;\n@property (nonatomic, strong) UIImageView *thumbView;\n@property (nonatomic, assign) float ratio;\n\n@end\n\n@implementation WeSliderView\n{\n    float currentRatio;\n    float threshold;\n}\n\n- (instancetype)initWithFrame:(CGRect)frame\n{\n    self = [super initWithFrame:frame];\n    if (self)\n    {\n        [self createSubviews];\n        [self setup];\n    }\n    return self;\n}\n\n- (instancetype)initWithCoder:(NSCoder *)aDecoder\n{\n    self = [super initWithCoder:aDecoder];\n    if (self)\n    {\n        [self createSubviews];\n        [self setup];\n    }\n    return self;\n}\n\n- (void)setup\n{\n    threshold   = 0.1;\n    _thumbWidth  = 20;\n    \n    _minmimValue = -1.0;\n    _maxmimValue = 1.0;\n    \n    _progress    = -1.0;\n    _trackHeight = 4.0;\n    self.ratio   = 0.0;\n    _followViewIntervalY = 0.0;\n    \n    _needInterruptAtZero = YES;\n    \n    self.thumbTintColor = [UIColor colorWithRed:255/255.0 green:108/255.0 blue:156/255.0 alpha:1.0];\n    self.trackTintColor = [UIColor lightGrayColor];\n    self.progressTintColor = [UIColor colorWithRed:255/255.0 green:108/255.0 blue:156/255.0 alpha:1.0];\n}\n\n- (void)createSubviews\n{\n    self.trackView = [[UIView alloc] init];\n    self.trackView.clipsToBounds = YES;\n    [self addSubview:self.trackView];\n    \n    self.progressView = [[UIView alloc] init];\n    [self.trackView addSubview:self.progressView];\n    \n    self.thumbView = [[UIImageView alloc] init];\n    self.thumbView.layer.cornerRadius = _thumbWidth / 2.0;\n    [self addSubview:self.thumbView];\n    \n    UIPanGestureRecognizer *panGesture = [[UIPanGestureRecognizer alloc] initWithTarget:self action:@selector(panGestureActionHandler:)];\n    panGesture.delegate = self;\n    [self addGestureRecognizer:panGesture];\n}\n\n- (CGPoint)centerPoint\n{\n    return CGPointMake(self.bounds.size.width / 2.0, self.bounds.size.height / 2.0);\n}\n\n- (void)layoutSubviews\n{\n    CGPoint center = [self centerPoint];\n    self.trackView.frame = CGRectMake(0, 0, self.bounds.size.width - _thumbWidth, _trackHeight);\n    self.trackView.center = center;\n    self.trackView.layer.cornerRadius = _trackHeight / 2.0;\n    \n    self.progressView.frame = self.trackView.bounds;\n    self.thumbView.frame = CGRectMake(0, 0, _thumbWidth, _thumbWidth);\n    self.thumbView.layer.cornerRadius = _thumbWidth / 2.0;\n    \n    [self updateProgressFrame];\n}\n\n- (void)updateProgressFrame\n{\n    float x = [self isOriginalPointCenter] ? self.trackView.bounds.size.width / 2.0 : 0.0;\n    float w = [self progressWidth];\n    self.progressView.frame = CGRectMake(x, 0, w * _progress, self.trackView.bounds.size.height);\n    self.thumbView.center = CGPointMake(self.trackView.frame.origin.x + x + w * _progress, [self centerPoint].y);\n    \n    if (self.followView)\n    {\n        self.followView.center = CGPointMake(self.thumbView.center.x, CGRectGetMinY(self.thumbView.frame) - self.followView.frame.size.height / 2.0 - _followViewIntervalY);\n    }\n}\n\n- (void)setThumbTintColor:(UIColor *)thumbTintColor\n{\n    _thumbTintColor = thumbTintColor;\n    self.thumbView.backgroundColor = thumbTintColor;\n}\n\n- (void)setTrackTintColor:(UIColor *)trackTintColor\n{\n    _trackTintColor = trackTintColor;\n    self.trackView.backgroundColor = trackTintColor;\n}\n\n- (void)setProgressTintColor:(UIColor *)progressTintColor\n{\n    _progressTintColor = progressTintColor;\n    self.progressView.backgroundColor = progressTintColor;\n}\n\n- (void)setThumbImage:(UIImage *)thumbImage\n{\n    _thumbImage = thumbImage;\n    [self.thumbView setImage:thumbImage];\n    \n    if (thumbImage)\n    {\n        self.thumbView.backgroundColor = [UIColor clearColor];\n        self.thumbView.layer.cornerRadius = 0.0;\n        _thumbWidth = thumbImage.size.width;\n        [self setNeedsLayout];\n    }\n}\n\n-(void)setThumbWidth:(float)thumbWidth {\n    _thumbWidth = thumbWidth;\n    [self setNeedsLayout];\n}\n\n- (void)setFollowView:(UIView *)followView\n{\n    if (_followView)\n    {\n        [_followView removeFromSuperview];\n    }\n    \n    _followView = followView;\n    followView.hidden = YES;\n    [self addSubview:_followView];\n}\n\n- (void)setTrackBorderWidth:(float)width color:(UIColor *)color\n{\n    self.trackView.layer.borderWidth = width;\n    self.trackView.layer.borderColor = color.CGColor;\n}\n\n- (void)setThumbBorderWidth:(float)width color:(UIColor *)color\n{\n    self.thumbView.layer.borderWidth = width;\n    self.thumbView.layer.borderColor = color.CGColor;\n}\n\n- (void)setTrackHeight:(float)trackHeight\n{\n    _trackHeight = trackHeight;\n    [self setNeedsLayout];\n}\n\n- (void)setHiddenThumb:(BOOL)hiddenThumb\n{\n    _hiddenThumb = hiddenThumb;\n    self.thumbView.hidden = hiddenThumb;\n}\n\n- (void)setMinmimValue:(float)minmimValue\n{\n    _minmimValue = minmimValue;\n    [self setValue:_value];\n}\n\n- (void)setFollowViewIntervalY:(float)followViewIntervalY\n{\n    _followViewIntervalY = followViewIntervalY;\n}\n\n- (void)setMaxmimValue:(float)maxmimValue\n{\n    _maxmimValue = maxmimValue;\n    [self setValue:_value];\n}\n\n- (void)setRatio:(float)ratio\n{\n    float minRatio = [self isOriginalPointCenter] ? -1.0 : threshold;\n    float currentThreshold = [self isOriginalPointCenter] && _needInterruptAtZero ? threshold : 0.0;\n    ratio = MAX(minRatio - threshold, MIN(1.0 + threshold, ratio));\n    _ratio = ratio;\n    \n    if (fabs(ratio) < currentThreshold)\n    {\n        self.progress = 0.0;\n    }\n    else\n    {\n        if (ratio > 0)\n        {\n            self.progress =  ratio - currentThreshold;\n        }\n        else\n        {\n            self.progress = ratio + currentThreshold;\n        }\n    }\n}\n\n- (void)setProgress:(float)progress\n{\n    float minProgress = [self isOriginalPointCenter] ? -1.0 : 0.0;\n    progress = MAX(minProgress, MIN(1.0, progress));\n    \n    if (_progress == progress)\n    {\n        return;\n    }\n    \n    _progress = progress;\n    [self updateProgressFrame];\n    \n    if (self.valueDidChangeHandler)\n    {\n        _value = progress >= 0 ? (_maxmimValue * progress) : (_minmimValue * -progress);\n        self.valueDidChangeHandler(_value);\n    }else{\n        _value = progress >= 0 ? (_maxmimValue * progress) : (_minmimValue * -progress);\n        [self sendActionsForControlEvents:UIControlEventValueChanged];\n    }\n}\n\n- (void)setValue:(float)value\n{\n    value = MAX(_minmimValue, MIN(_maxmimValue, value));\n    _value = value;\n    \n    if (_minmimValue != _maxmimValue)\n    {\n        if (_minmimValue >= 0)\n        {\n            _progress = (value - _minmimValue) / (_maxmimValue - _minmimValue);\n        }\n        else\n        {\n            _progress = value >= 0 ? value / _maxmimValue : -value / _minmimValue;\n        }\n        \n        float currentThreshold = [self isOriginalPointCenter] && _needInterruptAtZero ? threshold : 0.0;\n        \n        if (_progress >= 0)\n        {\n            _ratio = _progress + currentThreshold;\n        }\n        else\n        {\n            _ratio = _progress - currentThreshold;\n        }\n        \n        [self updateProgressFrame];\n    }\n}\n\n- (void)setValue:(float)value animation:(BOOL)animation\n{\n    if (animation)\n    {\n        [UIView animateWithDuration:0.35 animations:^{\n            self.value = value;\n        }];\n    }\n    else\n    {\n        self.value = value;\n    }\n}\n\n\n\n- (BOOL)isOriginalPointCenter\n{\n    return _minmimValue < 0 && _maxmimValue > 0;\n}\n\n- (float)progressWidth\n{\n    return [self isOriginalPointCenter] ? self.trackView.bounds.size.width / 2.0 : self.trackView.bounds.size.width;\n}\n\n- (void)panGestureActionHandler:(UIPanGestureRecognizer *)gesture\n{\n    if (gesture.state == UIGestureRecognizerStateBegan)\n    {\n        currentRatio  = self.ratio;\n    }\n    else if (gesture.state == UIGestureRecognizerStateChanged)\n    {\n        CGPoint translate = [gesture translationInView:gesture.view];\n        \n        if ([self isOriginalPointCenter])\n        {\n            self.ratio = currentRatio + translate.x / self.progressWidth * (1.0 + threshold);\n        }\n        else\n        {\n            self.ratio = currentRatio + translate.x / self.progressWidth;\n        }\n    }\n    else\n    {\n        [self doTouchEndAction];\n    }\n}\n\n- (void)doTouchBeginAction\n{\n    if (self.touchBeginHandler)\n    {\n        self.touchBeginHandler(_value);\n    }\n    \n    self.followView.hidden = NO;\n}\n\n- (void)doTouchEndAction\n{\n    [self sendActionsForControlEvents:UIControlEventTouchUpInside];\n\n    if (self.touchEndHandler)\n    {\n        self.touchEndHandler(_value);\n    }\n\n    self.followView.hidden = YES;\n}\n\n- (void)touchesBegan:(NSSet<UITouch *> *)touches withEvent:(UIEvent *)event\n{\n    [super touchesBegan:touches withEvent:event];\n    [self doTouchBeginAction];\n}\n\n- (void)touchesEnded:(NSSet<UITouch *> *)touches withEvent:(UIEvent *)event\n{\n    [super touchesEnded:touches withEvent:event];\n    [self doTouchEndAction];\n}\n\n- (BOOL)gestureRecognizer:(UIGestureRecognizer *)gestureRecognizer shouldRecognizeSimultaneouslyWithGestureRecognizer:(UIGestureRecognizer *)otherGestureRecognizer\n{\n    return YES;\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo/ShapeChange/View/UIView+Xib.h",
    "content": "//\n//  UIView+Xib.h\n//  MLProject\n//\n//  Created by 妙龙赖 on 15/11/22.\n//  Copyright © 2015年 妙龙赖. All rights reserved.\n//\n\n#import <UIKit/UIKit.h>\n\n@interface UIView (Xib)\n@property (nonatomic, strong, readonly) id containerView;\n/**\n *  生成与自身类同名的xibView,且约束与自己相同大小\n 同时将自己设置为FileOwner\n\n */\n- (void)setupSelfNameXibOnSelf;\n- (void)setupSelfNameXibOnSelfWithSerialNumber:(NSInteger)number;\n- (instancetype)loadSelfXibWithFileOwner:(id)fileOwner;\n- (instancetype)loadSelfXibWithFileOwner:(id)fileOwner serialNumber:(NSInteger)number;\n\n- (void)setupXibWithName:(NSString *)name;\n- (instancetype)loadXibWithName:(NSString *)name;\n- (instancetype)loadXibWithName:(NSString *)name serialNumber:(NSInteger)number;\n\n- (instancetype)loadXibWithName:(NSString *)name  FileOwner:(id)fileOwner serialNumber:(NSInteger)number;\n\n@end\n"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo/ShapeChange/View/UIView+Xib.m",
    "content": "//\n//  UIView+Xib.m\n//  MLProject\n//\n//  Created by 妙龙赖 on 15/11/22.\n//  Copyright © 2015年 妙龙赖. All rights reserved.\n//\n\n#import \"UIView+Xib.h\"\n#import <objc/runtime.h>\n\n@implementation UIView (Xib)\n- (void)setupSelfNameXibOnSelf\n{\n   \n    [self setupSelfNameXibOnSelfWithSerialNumber:0];\n}\n\n- (void)setupSelfNameXibOnSelfWithSerialNumber:(NSInteger)number\n{\n    UIView *containerView = [self loadSelfXibWithFileOwner:self serialNumber:number];\n    [self addSubview:containerView];\n}\n- (instancetype)loadSelfXibWithFileOwner:(id)fileOwner\n{\n    \n   return [self loadSelfXibWithFileOwner:fileOwner serialNumber:0];\n}\n\n- (instancetype)loadSelfXibWithFileOwner:(id)fileOwner serialNumber:(NSInteger)number\n{\n    \n    UIView *containerView = [self loadXibWithName:NSStringFromClass([self class]) FileOwner:self serialNumber:0];\n    return containerView;\n}\n\n\n- (void)setupXibWithName:(NSString *)name\n{\n    UIView *contianerView = [self loadXibWithName:name];\n    [self addSubview:contianerView];\n  \n}\n- (instancetype)loadXibWithName:(NSString *)name\n{\n    return [self loadXibWithName:name serialNumber:0];\n}\n- (instancetype)loadXibWithName:(NSString *)name serialNumber:(NSInteger)number\n{\n    return [self loadXibWithName:name FileOwner:self serialNumber:number];\n}\n- (instancetype)loadXibWithName:(NSString *)name  FileOwner:(id)fileOwner serialNumber:(NSInteger)number\n{\n    UIView *containerView = [[NSBundle mainBundle] loadNibNamed:name owner:fileOwner options:nil][number];\n    containerView.frame = self.bounds;\n    containerView.autoresizingMask = UIViewAutoresizingFlexibleHeight|UIViewAutoresizingFlexibleWidth;\n    objc_setAssociatedObject(fileOwner, @selector(containerView), containerView, OBJC_ASSOCIATION_RETAIN_NONATOMIC);\n    return containerView;\n}\n\n\n#pragma mark - ========= Setter & Getter =========\n- (id)containerView\n{\n    return objc_getAssociatedObject(self, @selector(containerView));\n}\n@end\n"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo/Support/GLFilterInfoView.h",
    "content": "//\n//  GLFilterInfoView.h\n//  GLImageDemo\n//\n//  Created by LEO on 2018/3/14.\n//  Copyright © 2018年 LEO. All rights reserved.\n//\n\n#import <UIKit/UIKit.h>\n\n@interface GLFilterInfoView : UIView\n\n@property (nonatomic, assign) BOOL      selected;\n@property (nonatomic, assign) float     degree;\n@property (nonatomic, strong) NSString  *text;\n@property (nonatomic, assign) NSString  *title;\n@property (nonatomic, strong) UIImage   *backgroundImage;\n@property (nonatomic,   copy) void (^selectedBlock)(GLFilterInfoView *filterInfoView, BOOL selected);\n\n- (void)setSelected:(BOOL)selected;\n\n@end\n"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo/Support/GLFilterInfoView.m",
    "content": "//\n//  GLFilterInfoView.m\n//  GLImageDemo\n//\n//  Created by LEO on 2018/3/14.\n//  Copyright © 2018年 LEO. All rights reserved.\n//\n\n#import \"GLFilterInfoView.h\"\n\n@interface GLFilterInfoView ()\n\n@property (nonatomic, strong) UIImageView   *imageView;\n@property (nonatomic, strong) UIView        *backgroundView;\n@property (nonatomic, strong) UILabel       *titleLabel;\n@property (nonatomic, strong) UILabel       *degreeLabel;\n\n@end\n\n@implementation GLFilterInfoView\n\n- (instancetype)initWithFrame:(CGRect)frame\n{\n    self = [super initWithFrame:frame];\n    if (self)\n    {\n        [self createSubviews];\n    }\n    return self;\n}\n\n- (instancetype)init\n{\n    self = [super init];\n    if (self)\n    {\n        [self createSubviews];\n    }\n    return self;\n}\n\n- (void)createSubviews\n{\n    self.imageView = [[UIImageView alloc] init];\n    self.imageView.translatesAutoresizingMaskIntoConstraints = NO;\n    [self addSubview:self.imageView];\n    \n    self.backgroundView = [[UIView alloc] init];\n    self.backgroundView.translatesAutoresizingMaskIntoConstraints = NO;\n    [self addSubview:self.backgroundView];\n    \n    self.titleLabel = [[UILabel alloc] init];\n    self.titleLabel.font = [UIFont systemFontOfSize:12];\n    self.titleLabel.textAlignment = NSTextAlignmentCenter;\n    self.titleLabel.textColor = RGB(133, 136, 150);\n    self.titleLabel.backgroundColor = [[UIColor blackColor] colorWithAlphaComponent:0.8];\n    self.titleLabel.translatesAutoresizingMaskIntoConstraints = NO;\n    [self addSubview:self.titleLabel];\n    \n    self.degreeLabel = [[UILabel alloc] init];\n    self.degreeLabel.font = [UIFont systemFontOfSize:40];\n    self.degreeLabel.textAlignment = NSTextAlignmentCenter;\n    self.degreeLabel.textColor = [UIColor whiteColor];\n    self.degreeLabel.minimumScaleFactor = 0.8;\n    self.degreeLabel.adjustsFontSizeToFitWidth = YES;\n    self.degreeLabel.translatesAutoresizingMaskIntoConstraints = NO;\n    [self addSubview:self.degreeLabel];\n    \n    [self addConstraints:[NSLayoutConstraint constraintsWithVisualFormat:@\"H:|[_imageView]|\" options:0 metrics:0 views:NSDictionaryOfVariableBindings(_imageView)]];\n    [self addConstraints:[NSLayoutConstraint constraintsWithVisualFormat:@\"V:|[_imageView]|\" options:0 metrics:0 views:NSDictionaryOfVariableBindings(_imageView)]];\n    \n    [self addConstraints:[NSLayoutConstraint constraintsWithVisualFormat:@\"H:|[_backgroundView]|\" options:0 metrics:0 views:NSDictionaryOfVariableBindings(_backgroundView)]];\n    [self addConstraints:[NSLayoutConstraint constraintsWithVisualFormat:@\"V:|[_backgroundView]|\" options:0 metrics:0 views:NSDictionaryOfVariableBindings(_backgroundView)]];\n    \n    [self addConstraints:[NSLayoutConstraint constraintsWithVisualFormat:@\"H:|[_titleLabel]|\" options:0 metrics:0 views:NSDictionaryOfVariableBindings(_titleLabel)]];\n    [self addConstraints:[NSLayoutConstraint constraintsWithVisualFormat:@\"V:|[_titleLabel(24)]\" options:0 metrics:0 views:NSDictionaryOfVariableBindings(_titleLabel)]];\n    \n    [self addConstraints:[NSLayoutConstraint constraintsWithVisualFormat:@\"H:|[_degreeLabel]|\" options:0 metrics:0 views:NSDictionaryOfVariableBindings(_degreeLabel)]];\n    [self addConstraints:[NSLayoutConstraint constraintsWithVisualFormat:@\"V:[_titleLabel][_degreeLabel]|\" options:0 metrics:0 views:NSDictionaryOfVariableBindings(_titleLabel, _degreeLabel)]];\n    \n    UITapGestureRecognizer *tapGesture = [[UITapGestureRecognizer alloc] initWithTarget:self action:@selector(tapGestureActionHandler:)];\n    [self addGestureRecognizer:tapGesture];\n}\n\n- (void)tapGestureActionHandler:(UITapGestureRecognizer *)gesture\n{\n    self.selected = YES;\n    \n    if (self.selectedBlock)\n    {\n        self.selectedBlock(self, self.selected);\n    }\n}\n\n- (void)setTitle:(NSString *)title\n{\n    _title = title;\n    self.titleLabel.text = title;\n}\n\n- (void)setText:(NSString *)text\n{\n    _text = text;\n    self.degreeLabel.text = text;\n}\n\n- (void)setDegree:(float)degree\n{\n    _degree = degree;\n    self.degreeLabel.text = [NSString stringWithFormat:@\"%.2f\", degree];\n}\n\n- (void)setSelected:(BOOL)selected\n{\n    _selected = selected;\n    \n    if (selected)\n    {\n        self.backgroundView.backgroundColor = RGBA(48, 109, 215, 0.2);\n        self.titleLabel.textColor = [UIColor whiteColor];\n        self.titleLabel.backgroundColor = [UIColor clearColor];\n        self.degreeLabel.textColor = [UIColor whiteColor];\n    }\n    else\n    {\n        self.backgroundView.backgroundColor = [UIColor clearColor];\n        self.titleLabel.textColor = RGB(133, 136, 150);\n        self.titleLabel.backgroundColor = [[UIColor blackColor] colorWithAlphaComponent:0.8];\n        self.degreeLabel.textColor = RGB(133, 136, 150);\n    }\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo/Support/GLImagePickerHelper.h",
    "content": "//\n//  GLImagePickerHelper.h\n//  WeGPURender\n//\n//  Created by LHD on 2018/2/3.\n//  Copyright © 2018年 LHD. All rights reserved.\n//\n\n#import <Foundation/Foundation.h>\n#import <UIKit/UIKit.h>\n\n@interface GLImagePickerHelper : NSObject\n\n+ (void)showInController:(UIViewController *)controller completion:(void (^)(UIImage *image, UIImage *thumbImage))completion;\n\n@end\n"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo/Support/GLImagePickerHelper.m",
    "content": "//\n//  GLImagePickerHelper.m\n//  WeGPURender\n//\n//  Created by LHD on 2018/2/3.\n//  Copyright © 2018年 LHD. All rights reserved.\n//\n\n#import \"GLImagePickerHelper.h\"\n#import <Photos/Photos.h>\n#import \"UIImage+Rotate.h\"\n\n@interface GLImagePickerHelper () <UIImagePickerControllerDelegate,UINavigationControllerDelegate>\n\n@property (nonatomic, weak) UIViewController *controller;\n@property (nonatomic, copy) void (^completion)(UIImage *image, UIImage *thumbImage);\n\n@end\n\n@implementation GLImagePickerHelper\n\n+ (id)sharedHelper\n{\n    static dispatch_once_t onceToken;\n    static id instance = nil;\n    dispatch_once(&onceToken, ^{\n        instance = [[GLImagePickerHelper alloc] init];\n    });\n    \n    return instance;\n}\n\n+ (void)showInController:(UIViewController *)controller completion:(void (^)(UIImage *image, UIImage *thumbImage))completion\n{\n    [[[self class] sharedHelper] showInController:controller completion:completion];\n}\n\n- (void)showInController:(UIViewController *)controller completion:(void (^)(UIImage *image, UIImage *thumbImage))completion\n{\n    self.completion = completion;\n    \n    if (![UIImagePickerController isSourceTypeAvailable:UIImagePickerControllerSourceTypePhotoLibrary]) return;\n    UIImagePickerController *ipc = [[UIImagePickerController alloc] init];\n    // 3. 设置打开照片相册类型(显示所有相簿)\n    ipc.sourceType = UIImagePickerControllerSourceTypePhotoLibrary;\n    ipc.delegate = self;\n    [controller presentViewController:ipc animated:YES completion:nil];\n}\n\n#pragma mark -- <UIImagePickerControllerDelegate>--\n// 获取图片后的操作\n- (void)imagePickerController:(UIImagePickerController *)picker didFinishPickingMediaWithInfo:(NSDictionary<NSString *,id> *)info\n{\n    // 设置图片\n    UIImage *image= info[UIImagePickerControllerOriginalImage];\n    image = [image fixOrientation];\n    UIImage *thumbImage = image;\n    \n    if (image.size.width > 1080)\n    {\n        float radio = image.size.height / image.size.width;\n        thumbImage = [self imageByScalingAndCroppingForSize:CGSizeMake(1080, 1080 * radio) withSourceImage:image];\n    }\n    \n    if (self.completion)\n    {\n        self.completion(image, thumbImage);\n    }\n    \n    // 销毁控制器\n    [picker dismissViewControllerAnimated:YES completion:nil];\n}\n\n- (UIImage*)imageByScalingAndCroppingForSize:(CGSize)targetSize withSourceImage:(UIImage *)sourceImage\n{\n    UIImage *newImage = nil;\n    CGSize imageSize = sourceImage.size;\n    CGFloat width = imageSize.width;\n    CGFloat height = imageSize.height;\n    CGFloat targetWidth = targetSize.width;\n    CGFloat targetHeight = targetSize.height;\n    CGFloat scaleFactor = 0.0;\n    CGFloat scaledWidth = targetWidth;\n    CGFloat scaledHeight = targetHeight;\n    CGPoint thumbnailPoint = CGPointMake(0.0,0.0);\n    if (CGSizeEqualToSize(imageSize, targetSize) == NO)\n    {\n        CGFloat widthFactor = targetWidth / width;\n        CGFloat heightFactor = targetHeight / height;\n        if (widthFactor > heightFactor)\n            scaleFactor = widthFactor; // scale to fit height\n        else\n            scaleFactor = heightFactor; // scale to fit width\n        scaledWidth= width * scaleFactor;\n        scaledHeight = height * scaleFactor;\n        // center the image\n        if (widthFactor > heightFactor)\n        {\n            thumbnailPoint.y = (targetHeight - scaledHeight);\n        }\n        else if (widthFactor < heightFactor)\n        {\n            thumbnailPoint.x = (targetWidth - scaledWidth);\n        }\n    }\n    UIGraphicsBeginImageContext(targetSize); // this will crop\n    CGRect thumbnailRect = CGRectZero;\n    thumbnailRect.origin = thumbnailPoint;\n    thumbnailRect.size.width= scaledWidth;\n    thumbnailRect.size.height = scaledHeight;\n    \n    [sourceImage drawInRect:thumbnailRect];\n    newImage = UIGraphicsGetImageFromCurrentImageContext();\n    if(newImage == nil)\n        NSLog(@\"could not scale image\");\n    \n    //pop the context to get back to the default\n    UIGraphicsEndImageContext();\n    \n    return newImage;\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo/Support/GLSliderView.h",
    "content": "//\n//  GLSliderView.h\n//  GLImageDemo\n//\n//  Created by LEO on 2018/3/12.\n//  Copyright © 2018年 LEO. All rights reserved.\n//\n\n#import <UIKit/UIKit.h>\n\n@interface GLSliderView : UIView\n\n@property (nonatomic, assign) float value;\n@property (nonatomic, assign) float minimumValue;\n@property (nonatomic, assign) float maximumValue;\n@property (nonatomic,   copy) void (^sliderViewValueDidChangeHandler)(float value);\n\n- (void)addTarget:(id)target action:(SEL)action;\n\n@end\n"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo/Support/GLSliderView.m",
    "content": "//\n//  GLSliderView.m\n//  GLImageDemo\n//\n//  Created by LEO on 2018/3/12.\n//  Copyright © 2018年 LEO. All rights reserved.\n//\n\n#import \"GLSliderView.h\"\n\n@interface GLSliderView () <UIGestureRecognizerDelegate>\n\n@property (nonatomic, strong) UISlider  *sliderView;\n@property (nonatomic, strong) UIView    *trackView;\n@property (nonatomic, strong) UIView    *progressView;\n@property (nonatomic, assign) float     progress;\n@property (nonatomic,   weak) id        target;\n@property (nonatomic, assign) SEL       action;\n\n@end\n\n@implementation GLSliderView\n{\n    float currentProgress;\n}\n\n- (instancetype)initWithFrame:(CGRect)frame\n{\n    self = [super initWithFrame:frame];\n    if (self) {\n        [self createSubviews];\n        _progress = 0.1;\n        self.progress = 0.0;\n        _minimumValue = 0.0;\n        _maximumValue = 1.0;\n    }\n    return self;\n}\n\n- (void)createSubviews\n{\n    //self.backgroundColor = [UIColor whiteColor];\n    \n    self.trackView = [[UIView alloc] initWithFrame:CGRectMake(0, 0, self.bounds.size.width, 5)];\n    self.trackView.backgroundColor = RGBA(0, 152, 255, 0.1);\n    [self addSubview:self.trackView];\n    \n    self.progressView = [[UIView alloc] initWithFrame:self.trackView.bounds];\n    self.progressView.backgroundColor = RGB(48, 109, 215);\n    [self addSubview:self.progressView];\n    \n    UIPanGestureRecognizer *panGesture = [[UIPanGestureRecognizer alloc] initWithTarget:self action:@selector(panGestureHanlder:)];\n    panGesture.delegate = self;\n    [self addGestureRecognizer:panGesture];\n}\n\n- (void)setProgress:(float)progress\n{\n    progress = MIN(MAX(progress, 0.0), 1.0);\n    \n    if (_progress != progress)\n    {\n        _progress = progress;\n        _value = self.minimumValue + (self.maximumValue - self.minimumValue) * _progress;\n        self.progressView.frame = CGRectMake(0, 0, self.trackView.bounds.size.width * progress, self.trackView.bounds.size.height);\n        [self performActionWithValue:_value];\n    }\n}\n\n- (void)setValue:(float)value\n{\n    _value = value;\n    _progress = (value - self.minimumValue) / (self.maximumValue - self.minimumValue);\n    self.progressView.frame = CGRectMake(0, 0, self.trackView.bounds.size.width * _progress, self.trackView.bounds.size.height);\n}\n\n- (void)setMinimumValue:(float)minimumValue\n{\n    _minimumValue = minimumValue;\n    [self setValue:self.value];\n}\n\n- (void)setMaximumValue:(float)maximumValue\n{\n    _maximumValue = maximumValue;\n    [self setValue:self.value];\n}\n\n- (void)performActionWithValue:(float)value\n{\n    if (self.sliderViewValueDidChangeHandler)\n    {\n        self.sliderViewValueDidChangeHandler(value);\n    }\n    \n    if (self.target && self.action)\n    {\n        [self.target performSelector:self.action withObject:self afterDelay:0.0];\n    }\n}\n\n- (void)addTarget:(id)target action:(SEL)action\n{\n    self.target = target;\n    self.action = action;\n}\n\n- (void)panGestureHanlder:(UIPanGestureRecognizer *)gesture\n{\n    CGPoint translate = [gesture translationInView:gesture.view];\n    \n    if (gesture.state == UIGestureRecognizerStateBegan)\n    {\n        currentProgress = _progress;\n    }\n    else if (gesture.state == UIGestureRecognizerStateChanged)\n    {\n        self.progress = currentProgress + translate.x / self.trackView.bounds.size.width;\n    }\n}\n\n- (BOOL)gestureRecognizer:(UIGestureRecognizer *)gestureRecognizer shouldRecognizeSimultaneouslyWithGestureRecognizer:(UIGestureRecognizer *)otherGestureRecognizer\n{\n    return YES;\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo/Support/HUD/MBProgressHUD.h",
    "content": "//\n//  MBProgressHUD.h\n//  Version 1.1.0\n//  Created by Matej Bukovinski on 2.4.09.\n//\n\n// This code is distributed under the terms and conditions of the MIT license. \n\n// Copyright © 2009-2016 Matej Bukovinski\n//\n// Permission is hereby granted, free of charge, to any person obtaining a copy\n// of this software and associated documentation files (the \"Software\"), to deal\n// in the Software without restriction, including without limitation the rights\n// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n// copies of the Software, and to permit persons to whom the Software is\n// furnished to do so, subject to the following conditions:\n//\n// The above copyright notice and this permission notice shall be included in\n// all copies or substantial portions of the Software.\n//\n// THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN\n// THE SOFTWARE.\n\n#import <Foundation/Foundation.h>\n#import <UIKit/UIKit.h>\n#import <CoreGraphics/CoreGraphics.h>\n\n@class MBBackgroundView;\n@protocol MBProgressHUDDelegate;\n\n\nextern CGFloat const MBProgressMaxOffset;\n\ntypedef NS_ENUM(NSInteger, MBProgressHUDMode) {\n    /// UIActivityIndicatorView.\n    MBProgressHUDModeIndeterminate,\n    /// A round, pie-chart like, progress view.\n    MBProgressHUDModeDeterminate,\n    /// Horizontal progress bar.\n    MBProgressHUDModeDeterminateHorizontalBar,\n    /// Ring-shaped progress view.\n    MBProgressHUDModeAnnularDeterminate,\n    /// Shows a custom view.\n    MBProgressHUDModeCustomView,\n    /// Shows only labels.\n    MBProgressHUDModeText\n};\n\ntypedef NS_ENUM(NSInteger, MBProgressHUDAnimation) {\n    /// Opacity animation\n    MBProgressHUDAnimationFade,\n    /// Opacity + scale animation (zoom in when appearing zoom out when disappearing)\n    MBProgressHUDAnimationZoom,\n    /// Opacity + scale animation (zoom out style)\n    MBProgressHUDAnimationZoomOut,\n    /// Opacity + scale animation (zoom in style)\n    MBProgressHUDAnimationZoomIn\n};\n\ntypedef NS_ENUM(NSInteger, MBProgressHUDBackgroundStyle) {\n    /// Solid color background\n    MBProgressHUDBackgroundStyleSolidColor,\n    /// UIVisualEffectView or UIToolbar.layer background view\n    MBProgressHUDBackgroundStyleBlur\n};\n\ntypedef void (^MBProgressHUDCompletionBlock)(void);\n\n\nNS_ASSUME_NONNULL_BEGIN\n\n\n/** \n * Displays a simple HUD window containing a progress indicator and two optional labels for short messages.\n *\n * This is a simple drop-in class for displaying a progress HUD view similar to Apple's private UIProgressHUD class.\n * The MBProgressHUD window spans over the entire space given to it by the initWithFrame: constructor and catches all\n * user input on this region, thereby preventing the user operations on components below the view.\n *\n * @note To still allow touches to pass through the HUD, you can set hud.userInteractionEnabled = NO.\n * @attention MBProgressHUD is a UI class and should therefore only be accessed on the main thread.\n */\n@interface MBProgressHUD : UIView\n\n/**\n * Creates a new HUD, adds it to provided view and shows it. The counterpart to this method is hideHUDForView:animated:.\n *\n * @note This method sets removeFromSuperViewOnHide. The HUD will automatically be removed from the view hierarchy when hidden.\n *\n * @param view The view that the HUD will be added to\n * @param animated If set to YES the HUD will appear using the current animationType. If set to NO the HUD will not use\n * animations while appearing.\n * @return A reference to the created HUD.\n *\n * @see hideHUDForView:animated:\n * @see animationType\n */\n+ (instancetype)showHUDAddedTo:(UIView *)view animated:(BOOL)animated;\n\n/// @name Showing and hiding\n\n/**\n * Finds the top-most HUD subview that hasn't finished and hides it. The counterpart to this method is showHUDAddedTo:animated:.\n *\n * @note This method sets removeFromSuperViewOnHide. The HUD will automatically be removed from the view hierarchy when hidden.\n *\n * @param view The view that is going to be searched for a HUD subview.\n * @param animated If set to YES the HUD will disappear using the current animationType. If set to NO the HUD will not use\n * animations while disappearing.\n * @return YES if a HUD was found and removed, NO otherwise.\n *\n * @see showHUDAddedTo:animated:\n * @see animationType\n */\n+ (BOOL)hideHUDForView:(UIView *)view animated:(BOOL)animated;\n\n/**\n * Finds the top-most HUD subview that hasn't finished and returns it.\n *\n * @param view The view that is going to be searched.\n * @return A reference to the last HUD subview discovered.\n */\n+ (nullable MBProgressHUD *)HUDForView:(UIView *)view;\n\n/**\n * A convenience constructor that initializes the HUD with the view's bounds. Calls the designated constructor with\n * view.bounds as the parameter.\n *\n * @param view The view instance that will provide the bounds for the HUD. Should be the same instance as\n * the HUD's superview (i.e., the view that the HUD will be added to).\n */\n- (instancetype)initWithView:(UIView *)view;\n\n/** \n * Displays the HUD. \n *\n * @note You need to make sure that the main thread completes its run loop soon after this method call so that\n * the user interface can be updated. Call this method when your task is already set up to be executed in a new thread\n * (e.g., when using something like NSOperation or making an asynchronous call like NSURLRequest).\n *\n * @param animated If set to YES the HUD will appear using the current animationType. If set to NO the HUD will not use\n * animations while appearing.\n *\n * @see animationType\n */\n- (void)showAnimated:(BOOL)animated;\n\n/** \n * Hides the HUD. This still calls the hudWasHidden: delegate. This is the counterpart of the show: method. Use it to\n * hide the HUD when your task completes.\n *\n * @param animated If set to YES the HUD will disappear using the current animationType. If set to NO the HUD will not use\n * animations while disappearing.\n *\n * @see animationType\n */\n- (void)hideAnimated:(BOOL)animated;\n\n/** \n * Hides the HUD after a delay. This still calls the hudWasHidden: delegate. This is the counterpart of the show: method. Use it to\n * hide the HUD when your task completes.\n *\n * @param animated If set to YES the HUD will disappear using the current animationType. If set to NO the HUD will not use\n * animations while disappearing.\n * @param delay Delay in seconds until the HUD is hidden.\n *\n * @see animationType\n */\n- (void)hideAnimated:(BOOL)animated afterDelay:(NSTimeInterval)delay;\n\n/**\n * The HUD delegate object. Receives HUD state notifications.\n */\n@property (weak, nonatomic) id<MBProgressHUDDelegate> delegate;\n\n/**\n * Called after the HUD is hiden.\n */\n@property (copy, nullable) MBProgressHUDCompletionBlock completionBlock;\n\n/*\n * Grace period is the time (in seconds) that the invoked method may be run without\n * showing the HUD. If the task finishes before the grace time runs out, the HUD will\n * not be shown at all.\n * This may be used to prevent HUD display for very short tasks.\n * Defaults to 0 (no grace time).\n * @note The graceTime needs to be set before the hud is shown. You thus can't use `showHUDAddedTo:animated:`,\n * but instead need to alloc / init the HUD, configure the grace time and than show it manually.\n */\n@property (assign, nonatomic) NSTimeInterval graceTime;\n\n/**\n * The minimum time (in seconds) that the HUD is shown.\n * This avoids the problem of the HUD being shown and than instantly hidden.\n * Defaults to 0 (no minimum show time).\n */\n@property (assign, nonatomic) NSTimeInterval minShowTime;\n\n/**\n * Removes the HUD from its parent view when hidden.\n * Defaults to NO.\n */\n@property (assign, nonatomic) BOOL removeFromSuperViewOnHide;\n\n/// @name Appearance\n\n/** \n * MBProgressHUD operation mode. The default is MBProgressHUDModeIndeterminate.\n */\n@property (assign, nonatomic) MBProgressHUDMode mode;\n\n/**\n * A color that gets forwarded to all labels and supported indicators. Also sets the tintColor\n * for custom views on iOS 7+. Set to nil to manage color individually.\n * Defaults to semi-translucent black on iOS 7 and later and white on earlier iOS versions.\n */\n@property (strong, nonatomic, nullable) UIColor *contentColor UI_APPEARANCE_SELECTOR;\n\n/**\n * The animation type that should be used when the HUD is shown and hidden.\n */\n@property (assign, nonatomic) MBProgressHUDAnimation animationType UI_APPEARANCE_SELECTOR;\n\n/**\n * The bezel offset relative to the center of the view. You can use MBProgressMaxOffset\n * and -MBProgressMaxOffset to move the HUD all the way to the screen edge in each direction.\n * E.g., CGPointMake(0.f, MBProgressMaxOffset) would position the HUD centered on the bottom edge.\n */\n@property (assign, nonatomic) CGPoint offset UI_APPEARANCE_SELECTOR;\n\n/**\n * The amount of space between the HUD edge and the HUD elements (labels, indicators or custom views).\n * This also represents the minimum bezel distance to the edge of the HUD view.\n * Defaults to 20.f\n */\n@property (assign, nonatomic) CGFloat margin UI_APPEARANCE_SELECTOR;\n\n/**\n * The minimum size of the HUD bezel. Defaults to CGSizeZero (no minimum size).\n */\n@property (assign, nonatomic) CGSize minSize UI_APPEARANCE_SELECTOR;\n\n/**\n * Force the HUD dimensions to be equal if possible.\n */\n@property (assign, nonatomic, getter = isSquare) BOOL square UI_APPEARANCE_SELECTOR;\n\n/**\n * When enabled, the bezel center gets slightly affected by the device accelerometer data.\n * Has no effect on iOS < 7.0. Defaults to YES.\n */\n@property (assign, nonatomic, getter=areDefaultMotionEffectsEnabled) BOOL defaultMotionEffectsEnabled UI_APPEARANCE_SELECTOR;\n\n/// @name Progress\n\n/**\n * The progress of the progress indicator, from 0.0 to 1.0. Defaults to 0.0.\n */\n@property (assign, nonatomic) float progress;\n\n/// @name ProgressObject\n\n/**\n * The NSProgress object feeding the progress information to the progress indicator.\n */\n@property (strong, nonatomic, nullable) NSProgress *progressObject;\n\n/// @name Views\n\n/**\n * The view containing the labels and indicator (or customView).\n */\n@property (strong, nonatomic, readonly) MBBackgroundView *bezelView;\n\n/**\n * View covering the entire HUD area, placed behind bezelView.\n */\n@property (strong, nonatomic, readonly) MBBackgroundView *backgroundView;\n\n/**\n * The UIView (e.g., a UIImageView) to be shown when the HUD is in MBProgressHUDModeCustomView.\n * The view should implement intrinsicContentSize for proper sizing. For best results use approximately 37 by 37 pixels.\n */\n@property (strong, nonatomic, nullable) UIView *customView;\n\n/**\n * A label that holds an optional short message to be displayed below the activity indicator. The HUD is automatically resized to fit\n * the entire text.\n */\n@property (strong, nonatomic, readonly) UILabel *label;\n\n/**\n * A label that holds an optional details message displayed below the labelText message. The details text can span multiple lines.\n */\n@property (strong, nonatomic, readonly) UILabel *detailsLabel;\n\n/**\n * A button that is placed below the labels. Visible only if a target / action is added. \n */\n@property (strong, nonatomic, readonly) UIButton *button;\n\n@end\n\n\n@protocol MBProgressHUDDelegate <NSObject>\n\n@optional\n\n/** \n * Called after the HUD was fully hidden from the screen. \n */\n- (void)hudWasHidden:(MBProgressHUD *)hud;\n\n@end\n\n\n/**\n * A progress view for showing definite progress by filling up a circle (pie chart).\n */\n@interface MBRoundProgressView : UIView \n\n/**\n * Progress (0.0 to 1.0)\n */\n@property (nonatomic, assign) float progress;\n\n/**\n * Indicator progress color.\n * Defaults to white [UIColor whiteColor].\n */\n@property (nonatomic, strong) UIColor *progressTintColor;\n\n/**\n * Indicator background (non-progress) color. \n * Only applicable on iOS versions older than iOS 7.\n * Defaults to translucent white (alpha 0.1).\n */\n@property (nonatomic, strong) UIColor *backgroundTintColor;\n\n/*\n * Display mode - NO = round or YES = annular. Defaults to round.\n */\n@property (nonatomic, assign, getter = isAnnular) BOOL annular;\n\n@end\n\n\n/**\n * A flat bar progress view. \n */\n@interface MBBarProgressView : UIView\n\n/**\n * Progress (0.0 to 1.0)\n */\n@property (nonatomic, assign) float progress;\n\n/**\n * Bar border line color.\n * Defaults to white [UIColor whiteColor].\n */\n@property (nonatomic, strong) UIColor *lineColor;\n\n/**\n * Bar background color.\n * Defaults to clear [UIColor clearColor];\n */\n@property (nonatomic, strong) UIColor *progressRemainingColor;\n\n/**\n * Bar progress color.\n * Defaults to white [UIColor whiteColor].\n */\n@property (nonatomic, strong) UIColor *progressColor;\n\n@end\n\n\n@interface MBBackgroundView : UIView\n\n/**\n * The background style. \n * Defaults to MBProgressHUDBackgroundStyleBlur.\n */\n@property (nonatomic) MBProgressHUDBackgroundStyle style;\n\n/**\n * The blur effect style, when using MBProgressHUDBackgroundStyleBlur.\n * Defaults to UIBlurEffectStyleLight.\n */\n@property (nonatomic) UIBlurEffectStyle blurEffectStyle;\n\n/**\n * The background color or the blur tint color.\n */\n@property (nonatomic, strong) UIColor *color;\n\n@end\n\nNS_ASSUME_NONNULL_END\n"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo/Support/HUD/MBProgressHUD.m",
    "content": "//\n// MBProgressHUD.m\n// Version 1.1.0\n// Created by Matej Bukovinski on 2.4.09.\n//\n\n#import \"MBProgressHUD.h\"\n#import <tgmath.h>\n\n#define MBMainThreadAssert() NSAssert([NSThread isMainThread], @\"MBProgressHUD needs to be accessed on the main thread.\");\n\nCGFloat const MBProgressMaxOffset = 1000000.f;\n\nstatic const CGFloat MBDefaultPadding = 4.f;\nstatic const CGFloat MBDefaultLabelFontSize = 16.f;\nstatic const CGFloat MBDefaultDetailsLabelFontSize = 12.f;\n\n\n@interface MBProgressHUD ()\n\n@property (nonatomic, assign) BOOL useAnimation;\n@property (nonatomic, assign, getter=hasFinished) BOOL finished;\n@property (nonatomic, strong) UIView *indicator;\n@property (nonatomic, strong) NSDate *showStarted;\n@property (nonatomic, strong) NSArray *paddingConstraints;\n@property (nonatomic, strong) NSArray *bezelConstraints;\n@property (nonatomic, strong) UIView *topSpacer;\n@property (nonatomic, strong) UIView *bottomSpacer;\n@property (nonatomic, weak) NSTimer *graceTimer;\n@property (nonatomic, weak) NSTimer *minShowTimer;\n@property (nonatomic, weak) NSTimer *hideDelayTimer;\n@property (nonatomic, weak) CADisplayLink *progressObjectDisplayLink;\n\n@end\n\n\n@interface MBProgressHUDRoundedButton : UIButton\n@end\n\n\n@implementation MBProgressHUD\n\n#pragma mark - Class methods\n\n+ (instancetype)showHUDAddedTo:(UIView *)view animated:(BOOL)animated {\n    MBProgressHUD *hud = [[self alloc] initWithView:view];\n    hud.removeFromSuperViewOnHide = YES;\n    [view addSubview:hud];\n    [hud showAnimated:animated];\n    return hud;\n}\n\n+ (BOOL)hideHUDForView:(UIView *)view animated:(BOOL)animated {\n    MBProgressHUD *hud = [self HUDForView:view];\n    if (hud != nil) {\n        hud.removeFromSuperViewOnHide = YES;\n        [hud hideAnimated:animated];\n        return YES;\n    }\n    return NO;\n}\n\n+ (MBProgressHUD *)HUDForView:(UIView *)view {\n    NSEnumerator *subviewsEnum = [view.subviews reverseObjectEnumerator];\n    for (UIView *subview in subviewsEnum) {\n        if ([subview isKindOfClass:self]) {\n            MBProgressHUD *hud = (MBProgressHUD *)subview;\n            if (hud.hasFinished == NO) {\n                return hud;\n            }\n        }\n    }\n    return nil;\n}\n\n#pragma mark - Lifecycle\n\n- (void)commonInit {\n    // Set default values for properties\n    _animationType = MBProgressHUDAnimationFade;\n    _mode = MBProgressHUDModeIndeterminate;\n    _margin = 20.0f;\n    _defaultMotionEffectsEnabled = YES;\n    _contentColor = [UIColor colorWithWhite:0.f alpha:0.7f];\n\n    // Transparent background\n    self.opaque = NO;\n    self.backgroundColor = [UIColor clearColor];\n    // Make it invisible for now\n    self.alpha = 0.0f;\n    self.autoresizingMask = UIViewAutoresizingFlexibleWidth | UIViewAutoresizingFlexibleHeight;\n    self.layer.allowsGroupOpacity = NO;\n\n    [self setupViews];\n    [self updateIndicators];\n    [self registerForNotifications];\n}\n\n- (instancetype)initWithFrame:(CGRect)frame {\n    if ((self = [super initWithFrame:frame])) {\n        [self commonInit];\n    }\n    return self;\n}\n\n- (instancetype)initWithCoder:(NSCoder *)aDecoder {\n    if ((self = [super initWithCoder:aDecoder])) {\n        [self commonInit];\n    }\n    return self;\n}\n\n- (id)initWithView:(UIView *)view {\n    NSAssert(view, @\"View must not be nil.\");\n    return [self initWithFrame:view.bounds];\n}\n\n- (void)dealloc {\n    [self unregisterFromNotifications];\n}\n\n#pragma mark - Show & hide\n\n- (void)showAnimated:(BOOL)animated {\n    MBMainThreadAssert();\n    [self.minShowTimer invalidate];\n    self.useAnimation = animated;\n    self.finished = NO;\n    // If the grace time is set, postpone the HUD display\n    if (self.graceTime > 0.0) {\n        NSTimer *timer = [NSTimer timerWithTimeInterval:self.graceTime target:self selector:@selector(handleGraceTimer:) userInfo:nil repeats:NO];\n        [[NSRunLoop currentRunLoop] addTimer:timer forMode:NSRunLoopCommonModes];\n        self.graceTimer = timer;\n    } \n    // ... otherwise show the HUD immediately\n    else {\n        [self showUsingAnimation:self.useAnimation];\n    }\n}\n\n- (void)hideAnimated:(BOOL)animated {\n    MBMainThreadAssert();\n    [self.graceTimer invalidate];\n    self.useAnimation = animated;\n    self.finished = YES;\n    // If the minShow time is set, calculate how long the HUD was shown,\n    // and postpone the hiding operation if necessary\n    if (self.minShowTime > 0.0 && self.showStarted) {\n        NSTimeInterval interv = [[NSDate date] timeIntervalSinceDate:self.showStarted];\n        if (interv < self.minShowTime) {\n            NSTimer *timer = [NSTimer timerWithTimeInterval:(self.minShowTime - interv) target:self selector:@selector(handleMinShowTimer:) userInfo:nil repeats:NO];\n            [[NSRunLoop currentRunLoop] addTimer:timer forMode:NSRunLoopCommonModes];\n            self.minShowTimer = timer;\n            return;\n        } \n    }\n    // ... otherwise hide the HUD immediately\n    [self hideUsingAnimation:self.useAnimation];\n}\n\n- (void)hideAnimated:(BOOL)animated afterDelay:(NSTimeInterval)delay {\n    // Cancel any scheduled hideAnimated:afterDelay: calls\n    [self.hideDelayTimer invalidate];\n\n    NSTimer *timer = [NSTimer timerWithTimeInterval:delay target:self selector:@selector(handleHideTimer:) userInfo:@(animated) repeats:NO];\n    [[NSRunLoop currentRunLoop] addTimer:timer forMode:NSRunLoopCommonModes];\n    self.hideDelayTimer = timer;\n}\n\n#pragma mark - Timer callbacks\n\n- (void)handleGraceTimer:(NSTimer *)theTimer {\n    // Show the HUD only if the task is still running\n    if (!self.hasFinished) {\n        [self showUsingAnimation:self.useAnimation];\n    }\n}\n\n- (void)handleMinShowTimer:(NSTimer *)theTimer {\n    [self hideUsingAnimation:self.useAnimation];\n}\n\n- (void)handleHideTimer:(NSTimer *)timer {\n    [self hideAnimated:[timer.userInfo boolValue]];\n}\n\n#pragma mark - View Hierrarchy\n\n- (void)didMoveToSuperview {\n    [self updateForCurrentOrientationAnimated:NO];\n}\n\n#pragma mark - Internal show & hide operations\n\n- (void)showUsingAnimation:(BOOL)animated {\n    // Cancel any previous animations\n    [self.bezelView.layer removeAllAnimations];\n    [self.backgroundView.layer removeAllAnimations];\n\n    // Cancel any scheduled hideAnimated:afterDelay: calls\n    [self.hideDelayTimer invalidate];\n\n    self.showStarted = [NSDate date];\n    self.alpha = 1.f;\n\n    // Needed in case we hide and re-show with the same NSProgress object attached.\n    [self setNSProgressDisplayLinkEnabled:YES];\n\n    if (animated) {\n        [self animateIn:YES withType:self.animationType completion:NULL];\n    } else {\n        self.bezelView.alpha = 1.f;\n        self.backgroundView.alpha = 1.f;\n    }\n}\n\n- (void)hideUsingAnimation:(BOOL)animated {\n    // Cancel any scheduled hideAnimated:afterDelay: calls.\n    // This needs to happen here instead of in done,\n    // to avoid races if another hideAnimated:afterDelay:\n    // call comes in while the HUD is animating out.\n    [self.hideDelayTimer invalidate];\n\n    if (animated && self.showStarted) {\n        self.showStarted = nil;\n        [self animateIn:NO withType:self.animationType completion:^(BOOL finished) {\n            [self done];\n        }];\n    } else {\n        self.showStarted = nil;\n        self.bezelView.alpha = 0.f;\n        self.backgroundView.alpha = 1.f;\n        [self done];\n    }\n}\n\n- (void)animateIn:(BOOL)animatingIn withType:(MBProgressHUDAnimation)type completion:(void(^)(BOOL finished))completion {\n    // Automatically determine the correct zoom animation type\n    if (type == MBProgressHUDAnimationZoom) {\n        type = animatingIn ? MBProgressHUDAnimationZoomIn : MBProgressHUDAnimationZoomOut;\n    }\n\n    CGAffineTransform small = CGAffineTransformMakeScale(0.5f, 0.5f);\n    CGAffineTransform large = CGAffineTransformMakeScale(1.5f, 1.5f);\n\n    // Set starting state\n    UIView *bezelView = self.bezelView;\n    if (animatingIn && bezelView.alpha == 0.f && type == MBProgressHUDAnimationZoomIn) {\n        bezelView.transform = small;\n    } else if (animatingIn && bezelView.alpha == 0.f && type == MBProgressHUDAnimationZoomOut) {\n        bezelView.transform = large;\n    }\n\n    // Perform animations\n    dispatch_block_t animations = ^{\n        if (animatingIn) {\n            bezelView.transform = CGAffineTransformIdentity;\n        } else if (!animatingIn && type == MBProgressHUDAnimationZoomIn) {\n            bezelView.transform = large;\n        } else if (!animatingIn && type == MBProgressHUDAnimationZoomOut) {\n            bezelView.transform = small;\n        }\n        CGFloat alpha = animatingIn ? 1.f : 0.f;\n        bezelView.alpha = alpha;\n        self.backgroundView.alpha = alpha;\n    };\n    [UIView animateWithDuration:0.3 delay:0. usingSpringWithDamping:1.f initialSpringVelocity:0.f options:UIViewAnimationOptionBeginFromCurrentState animations:animations completion:completion];\n}\n\n- (void)done {\n    [self setNSProgressDisplayLinkEnabled:NO];\n\n    if (self.hasFinished) {\n        self.alpha = 0.0f;\n        if (self.removeFromSuperViewOnHide) {\n            [self removeFromSuperview];\n        }\n    }\n    MBProgressHUDCompletionBlock completionBlock = self.completionBlock;\n    if (completionBlock) {\n        completionBlock();\n    }\n    id<MBProgressHUDDelegate> delegate = self.delegate;\n    if ([delegate respondsToSelector:@selector(hudWasHidden:)]) {\n        [delegate performSelector:@selector(hudWasHidden:) withObject:self];\n    }\n}\n\n#pragma mark - UI\n\n- (void)setupViews {\n    UIColor *defaultColor = self.contentColor;\n\n    MBBackgroundView *backgroundView = [[MBBackgroundView alloc] initWithFrame:self.bounds];\n    backgroundView.style = MBProgressHUDBackgroundStyleSolidColor;\n    backgroundView.backgroundColor = [UIColor clearColor];\n    backgroundView.autoresizingMask = UIViewAutoresizingFlexibleWidth | UIViewAutoresizingFlexibleHeight;\n    backgroundView.alpha = 0.f;\n    [self addSubview:backgroundView];\n    _backgroundView = backgroundView;\n\n    MBBackgroundView *bezelView = [MBBackgroundView new];\n    bezelView.translatesAutoresizingMaskIntoConstraints = NO;\n    bezelView.layer.cornerRadius = 5.f;\n    bezelView.alpha = 0.f;\n    [self addSubview:bezelView];\n    _bezelView = bezelView;\n    [self updateBezelMotionEffects];\n\n    UILabel *label = [UILabel new];\n    label.adjustsFontSizeToFitWidth = NO;\n    label.textAlignment = NSTextAlignmentCenter;\n    label.textColor = defaultColor;\n    label.font = [UIFont boldSystemFontOfSize:MBDefaultLabelFontSize];\n    label.opaque = NO;\n    label.backgroundColor = [UIColor clearColor];\n    _label = label;\n\n    UILabel *detailsLabel = [UILabel new];\n    detailsLabel.adjustsFontSizeToFitWidth = NO;\n    detailsLabel.textAlignment = NSTextAlignmentCenter;\n    detailsLabel.textColor = defaultColor;\n    detailsLabel.numberOfLines = 0;\n    detailsLabel.font = [UIFont boldSystemFontOfSize:MBDefaultDetailsLabelFontSize];\n    detailsLabel.opaque = NO;\n    detailsLabel.backgroundColor = [UIColor clearColor];\n    _detailsLabel = detailsLabel;\n\n    UIButton *button = [MBProgressHUDRoundedButton buttonWithType:UIButtonTypeCustom];\n    button.titleLabel.textAlignment = NSTextAlignmentCenter;\n    button.titleLabel.font = [UIFont boldSystemFontOfSize:MBDefaultDetailsLabelFontSize];\n    [button setTitleColor:defaultColor forState:UIControlStateNormal];\n    _button = button;\n\n    for (UIView *view in @[label, detailsLabel, button]) {\n        view.translatesAutoresizingMaskIntoConstraints = NO;\n        [view setContentCompressionResistancePriority:998.f forAxis:UILayoutConstraintAxisHorizontal];\n        [view setContentCompressionResistancePriority:998.f forAxis:UILayoutConstraintAxisVertical];\n        [bezelView addSubview:view];\n    }\n\n    UIView *topSpacer = [UIView new];\n    topSpacer.translatesAutoresizingMaskIntoConstraints = NO;\n    topSpacer.hidden = YES;\n    [bezelView addSubview:topSpacer];\n    _topSpacer = topSpacer;\n\n    UIView *bottomSpacer = [UIView new];\n    bottomSpacer.translatesAutoresizingMaskIntoConstraints = NO;\n    bottomSpacer.hidden = YES;\n    [bezelView addSubview:bottomSpacer];\n    _bottomSpacer = bottomSpacer;\n}\n\n- (void)updateIndicators {\n    UIView *indicator = self.indicator;\n    BOOL isActivityIndicator = [indicator isKindOfClass:[UIActivityIndicatorView class]];\n    BOOL isRoundIndicator = [indicator isKindOfClass:[MBRoundProgressView class]];\n\n    MBProgressHUDMode mode = self.mode;\n    if (mode == MBProgressHUDModeIndeterminate) {\n        if (!isActivityIndicator) {\n            // Update to indeterminate indicator\n            [indicator removeFromSuperview];\n            indicator = [[UIActivityIndicatorView alloc] initWithActivityIndicatorStyle:UIActivityIndicatorViewStyleWhiteLarge];\n            [(UIActivityIndicatorView *)indicator startAnimating];\n            [self.bezelView addSubview:indicator];\n        }\n    }\n    else if (mode == MBProgressHUDModeDeterminateHorizontalBar) {\n        // Update to bar determinate indicator\n        [indicator removeFromSuperview];\n        indicator = [[MBBarProgressView alloc] init];\n        [self.bezelView addSubview:indicator];\n    }\n    else if (mode == MBProgressHUDModeDeterminate || mode == MBProgressHUDModeAnnularDeterminate) {\n        if (!isRoundIndicator) {\n            // Update to determinante indicator\n            [indicator removeFromSuperview];\n            indicator = [[MBRoundProgressView alloc] init];\n            [self.bezelView addSubview:indicator];\n        }\n        if (mode == MBProgressHUDModeAnnularDeterminate) {\n            [(MBRoundProgressView *)indicator setAnnular:YES];\n        }\n    } \n    else if (mode == MBProgressHUDModeCustomView && self.customView != indicator) {\n        // Update custom view indicator\n        [indicator removeFromSuperview];\n        indicator = self.customView;\n        [self.bezelView addSubview:indicator];\n    }\n    else if (mode == MBProgressHUDModeText) {\n        [indicator removeFromSuperview];\n        indicator = nil;\n    }\n    indicator.translatesAutoresizingMaskIntoConstraints = NO;\n    self.indicator = indicator;\n\n    if ([indicator respondsToSelector:@selector(setProgress:)]) {\n        [(id)indicator setValue:@(self.progress) forKey:@\"progress\"];\n    }\n\n    [indicator setContentCompressionResistancePriority:998.f forAxis:UILayoutConstraintAxisHorizontal];\n    [indicator setContentCompressionResistancePriority:998.f forAxis:UILayoutConstraintAxisVertical];\n\n    [self updateViewsForColor:self.contentColor];\n    [self setNeedsUpdateConstraints];\n}\n\n- (void)updateViewsForColor:(UIColor *)color {\n    if (!color) return;\n\n    self.label.textColor = color;\n    self.detailsLabel.textColor = color;\n    [self.button setTitleColor:color forState:UIControlStateNormal];\n\n    // UIAppearance settings are prioritized. If they are preset the set color is ignored.\n\n    UIView *indicator = self.indicator;\n    if ([indicator isKindOfClass:[UIActivityIndicatorView class]]) {\n        UIActivityIndicatorView *appearance = nil;\n#if __IPHONE_OS_VERSION_MIN_REQUIRED < 90000\n        appearance = [UIActivityIndicatorView appearanceWhenContainedIn:[MBProgressHUD class], nil];\n#else\n        // For iOS 9+\n        appearance = [UIActivityIndicatorView appearanceWhenContainedInInstancesOfClasses:@[[MBProgressHUD class]]];\n#endif\n        \n        if (appearance.color == nil) {\n            ((UIActivityIndicatorView *)indicator).color = color;\n        }\n    } else if ([indicator isKindOfClass:[MBRoundProgressView class]]) {\n        MBRoundProgressView *appearance = nil;\n#if __IPHONE_OS_VERSION_MIN_REQUIRED < 90000\n        appearance = [MBRoundProgressView appearanceWhenContainedIn:[MBProgressHUD class], nil];\n#else\n        appearance = [MBRoundProgressView appearanceWhenContainedInInstancesOfClasses:@[[MBProgressHUD class]]];\n#endif\n        if (appearance.progressTintColor == nil) {\n            ((MBRoundProgressView *)indicator).progressTintColor = color;\n        }\n        if (appearance.backgroundTintColor == nil) {\n            ((MBRoundProgressView *)indicator).backgroundTintColor = [color colorWithAlphaComponent:0.1];\n        }\n    } else if ([indicator isKindOfClass:[MBBarProgressView class]]) {\n        MBBarProgressView *appearance = nil;\n#if __IPHONE_OS_VERSION_MIN_REQUIRED < 90000\n        appearance = [MBBarProgressView appearanceWhenContainedIn:[MBProgressHUD class], nil];\n#else\n        appearance = [MBBarProgressView appearanceWhenContainedInInstancesOfClasses:@[[MBProgressHUD class]]];\n#endif\n        if (appearance.progressColor == nil) {\n            ((MBBarProgressView *)indicator).progressColor = color;\n        }\n        if (appearance.lineColor == nil) {\n            ((MBBarProgressView *)indicator).lineColor = color;\n        }\n    } else {\n#if __IPHONE_OS_VERSION_MAX_ALLOWED >= 70000 || TARGET_OS_TV\n        if ([indicator respondsToSelector:@selector(setTintColor:)]) {\n            [indicator setTintColor:color];\n        }\n#endif\n    }\n}\n\n- (void)updateBezelMotionEffects {\n#if __IPHONE_OS_VERSION_MAX_ALLOWED >= 70000 || TARGET_OS_TV\n    MBBackgroundView *bezelView = self.bezelView;\n    if (![bezelView respondsToSelector:@selector(addMotionEffect:)]) return;\n\n    if (self.defaultMotionEffectsEnabled) {\n        CGFloat effectOffset = 10.f;\n        UIInterpolatingMotionEffect *effectX = [[UIInterpolatingMotionEffect alloc] initWithKeyPath:@\"center.x\" type:UIInterpolatingMotionEffectTypeTiltAlongHorizontalAxis];\n        effectX.maximumRelativeValue = @(effectOffset);\n        effectX.minimumRelativeValue = @(-effectOffset);\n\n        UIInterpolatingMotionEffect *effectY = [[UIInterpolatingMotionEffect alloc] initWithKeyPath:@\"center.y\" type:UIInterpolatingMotionEffectTypeTiltAlongVerticalAxis];\n        effectY.maximumRelativeValue = @(effectOffset);\n        effectY.minimumRelativeValue = @(-effectOffset);\n\n        UIMotionEffectGroup *group = [[UIMotionEffectGroup alloc] init];\n        group.motionEffects = @[effectX, effectY];\n\n        [bezelView addMotionEffect:group];\n    } else {\n        NSArray *effects = [bezelView motionEffects];\n        for (UIMotionEffect *effect in effects) {\n            [bezelView removeMotionEffect:effect];\n        }\n    }\n#endif\n}\n\n#pragma mark - Layout\n\n- (void)updateConstraints {\n    UIView *bezel = self.bezelView;\n    UIView *topSpacer = self.topSpacer;\n    UIView *bottomSpacer = self.bottomSpacer;\n    CGFloat margin = self.margin;\n    NSMutableArray *bezelConstraints = [NSMutableArray array];\n    NSDictionary *metrics = @{@\"margin\": @(margin)};\n\n    NSMutableArray *subviews = [NSMutableArray arrayWithObjects:self.topSpacer, self.label, self.detailsLabel, self.button, self.bottomSpacer, nil];\n    if (self.indicator) [subviews insertObject:self.indicator atIndex:1];\n\n    // Remove existing constraints\n    [self removeConstraints:self.constraints];\n    [topSpacer removeConstraints:topSpacer.constraints];\n    [bottomSpacer removeConstraints:bottomSpacer.constraints];\n    if (self.bezelConstraints) {\n        [bezel removeConstraints:self.bezelConstraints];\n        self.bezelConstraints = nil;\n    }\n\n    // Center bezel in container (self), applying the offset if set\n    CGPoint offset = self.offset;\n    NSMutableArray *centeringConstraints = [NSMutableArray array];\n    [centeringConstraints addObject:[NSLayoutConstraint constraintWithItem:bezel attribute:NSLayoutAttributeCenterX relatedBy:NSLayoutRelationEqual toItem:self attribute:NSLayoutAttributeCenterX multiplier:1.f constant:offset.x]];\n    [centeringConstraints addObject:[NSLayoutConstraint constraintWithItem:bezel attribute:NSLayoutAttributeCenterY relatedBy:NSLayoutRelationEqual toItem:self attribute:NSLayoutAttributeCenterY multiplier:1.f constant:offset.y]];\n    [self applyPriority:998.f toConstraints:centeringConstraints];\n    [self addConstraints:centeringConstraints];\n\n    // Ensure minimum side margin is kept\n    NSMutableArray *sideConstraints = [NSMutableArray array];\n    [sideConstraints addObjectsFromArray:[NSLayoutConstraint constraintsWithVisualFormat:@\"|-(>=margin)-[bezel]-(>=margin)-|\" options:0 metrics:metrics views:NSDictionaryOfVariableBindings(bezel)]];\n    [sideConstraints addObjectsFromArray:[NSLayoutConstraint constraintsWithVisualFormat:@\"V:|-(>=margin)-[bezel]-(>=margin)-|\" options:0 metrics:metrics views:NSDictionaryOfVariableBindings(bezel)]];\n    [self applyPriority:999.f toConstraints:sideConstraints];\n    [self addConstraints:sideConstraints];\n\n    // Minimum bezel size, if set\n    CGSize minimumSize = self.minSize;\n    if (!CGSizeEqualToSize(minimumSize, CGSizeZero)) {\n        NSMutableArray *minSizeConstraints = [NSMutableArray array];\n        [minSizeConstraints addObject:[NSLayoutConstraint constraintWithItem:bezel attribute:NSLayoutAttributeWidth relatedBy:NSLayoutRelationGreaterThanOrEqual toItem:nil attribute:NSLayoutAttributeNotAnAttribute multiplier:1.f constant:minimumSize.width]];\n        [minSizeConstraints addObject:[NSLayoutConstraint constraintWithItem:bezel attribute:NSLayoutAttributeHeight relatedBy:NSLayoutRelationGreaterThanOrEqual toItem:nil attribute:NSLayoutAttributeNotAnAttribute multiplier:1.f constant:minimumSize.height]];\n        [self applyPriority:997.f toConstraints:minSizeConstraints];\n        [bezelConstraints addObjectsFromArray:minSizeConstraints];\n    }\n\n    // Square aspect ratio, if set\n    if (self.square) {\n        NSLayoutConstraint *square = [NSLayoutConstraint constraintWithItem:bezel attribute:NSLayoutAttributeHeight relatedBy:NSLayoutRelationEqual toItem:bezel attribute:NSLayoutAttributeWidth multiplier:1.f constant:0];\n        square.priority = 997.f;\n        [bezelConstraints addObject:square];\n    }\n\n    // Top and bottom spacing\n    [topSpacer addConstraint:[NSLayoutConstraint constraintWithItem:topSpacer attribute:NSLayoutAttributeHeight relatedBy:NSLayoutRelationGreaterThanOrEqual toItem:nil attribute:NSLayoutAttributeNotAnAttribute multiplier:1.f constant:margin]];\n    [bottomSpacer addConstraint:[NSLayoutConstraint constraintWithItem:bottomSpacer attribute:NSLayoutAttributeHeight relatedBy:NSLayoutRelationGreaterThanOrEqual toItem:nil attribute:NSLayoutAttributeNotAnAttribute multiplier:1.f constant:margin]];\n    // Top and bottom spaces should be equal\n    [bezelConstraints addObject:[NSLayoutConstraint constraintWithItem:topSpacer attribute:NSLayoutAttributeHeight relatedBy:NSLayoutRelationEqual toItem:bottomSpacer attribute:NSLayoutAttributeHeight multiplier:1.f constant:0.f]];\n\n    // Layout subviews in bezel\n    NSMutableArray *paddingConstraints = [NSMutableArray new];\n    [subviews enumerateObjectsUsingBlock:^(UIView *view, NSUInteger idx, BOOL *stop) {\n        // Center in bezel\n        [bezelConstraints addObject:[NSLayoutConstraint constraintWithItem:view attribute:NSLayoutAttributeCenterX relatedBy:NSLayoutRelationEqual toItem:bezel attribute:NSLayoutAttributeCenterX multiplier:1.f constant:0.f]];\n        // Ensure the minimum edge margin is kept\n        [bezelConstraints addObjectsFromArray:[NSLayoutConstraint constraintsWithVisualFormat:@\"|-(>=margin)-[view]-(>=margin)-|\" options:0 metrics:metrics views:NSDictionaryOfVariableBindings(view)]];\n        // Element spacing\n        if (idx == 0) {\n            // First, ensure spacing to bezel edge\n            [bezelConstraints addObject:[NSLayoutConstraint constraintWithItem:view attribute:NSLayoutAttributeTop relatedBy:NSLayoutRelationEqual toItem:bezel attribute:NSLayoutAttributeTop multiplier:1.f constant:0.f]];\n        } else if (idx == subviews.count - 1) {\n            // Last, ensure spacing to bezel edge\n            [bezelConstraints addObject:[NSLayoutConstraint constraintWithItem:view attribute:NSLayoutAttributeBottom relatedBy:NSLayoutRelationEqual toItem:bezel attribute:NSLayoutAttributeBottom multiplier:1.f constant:0.f]];\n        }\n        if (idx > 0) {\n            // Has previous\n            NSLayoutConstraint *padding = [NSLayoutConstraint constraintWithItem:view attribute:NSLayoutAttributeTop relatedBy:NSLayoutRelationEqual toItem:subviews[idx - 1] attribute:NSLayoutAttributeBottom multiplier:1.f constant:0.f];\n            [bezelConstraints addObject:padding];\n            [paddingConstraints addObject:padding];\n        }\n    }];\n\n    [bezel addConstraints:bezelConstraints];\n    self.bezelConstraints = bezelConstraints;\n    \n    self.paddingConstraints = [paddingConstraints copy];\n    [self updatePaddingConstraints];\n    \n    [super updateConstraints];\n}\n\n- (void)layoutSubviews {\n    // There is no need to update constraints if they are going to\n    // be recreated in [super layoutSubviews] due to needsUpdateConstraints being set.\n    // This also avoids an issue on iOS 8, where updatePaddingConstraints\n    // would trigger a zombie object access.\n    if (!self.needsUpdateConstraints) {\n        [self updatePaddingConstraints];\n    }\n    [super layoutSubviews];\n}\n\n- (void)updatePaddingConstraints {\n    // Set padding dynamically, depending on whether the view is visible or not\n    __block BOOL hasVisibleAncestors = NO;\n    [self.paddingConstraints enumerateObjectsUsingBlock:^(NSLayoutConstraint *padding, NSUInteger idx, BOOL *stop) {\n        UIView *firstView = (UIView *)padding.firstItem;\n        UIView *secondView = (UIView *)padding.secondItem;\n        BOOL firstVisible = !firstView.hidden && !CGSizeEqualToSize(firstView.intrinsicContentSize, CGSizeZero);\n        BOOL secondVisible = !secondView.hidden && !CGSizeEqualToSize(secondView.intrinsicContentSize, CGSizeZero);\n        // Set if both views are visible or if there's a visible view on top that doesn't have padding\n        // added relative to the current view yet\n        padding.constant = (firstVisible && (secondVisible || hasVisibleAncestors)) ? MBDefaultPadding : 0.f;\n        hasVisibleAncestors |= secondVisible;\n    }];\n}\n\n- (void)applyPriority:(UILayoutPriority)priority toConstraints:(NSArray *)constraints {\n    for (NSLayoutConstraint *constraint in constraints) {\n        constraint.priority = priority;\n    }\n}\n\n#pragma mark - Properties\n\n- (void)setMode:(MBProgressHUDMode)mode {\n    if (mode != _mode) {\n        _mode = mode;\n        [self updateIndicators];\n    }\n}\n\n- (void)setCustomView:(UIView *)customView {\n    if (customView != _customView) {\n        _customView = customView;\n        if (self.mode == MBProgressHUDModeCustomView) {\n            [self updateIndicators];\n        }\n    }\n}\n\n- (void)setOffset:(CGPoint)offset {\n    if (!CGPointEqualToPoint(offset, _offset)) {\n        _offset = offset;\n        [self setNeedsUpdateConstraints];\n    }\n}\n\n- (void)setMargin:(CGFloat)margin {\n    if (margin != _margin) {\n        _margin = margin;\n        [self setNeedsUpdateConstraints];\n    }\n}\n\n- (void)setMinSize:(CGSize)minSize {\n    if (!CGSizeEqualToSize(minSize, _minSize)) {\n        _minSize = minSize;\n        [self setNeedsUpdateConstraints];\n    }\n}\n\n- (void)setSquare:(BOOL)square {\n    if (square != _square) {\n        _square = square;\n        [self setNeedsUpdateConstraints];\n    }\n}\n\n- (void)setProgressObjectDisplayLink:(CADisplayLink *)progressObjectDisplayLink {\n    if (progressObjectDisplayLink != _progressObjectDisplayLink) {\n        [_progressObjectDisplayLink invalidate];\n        \n        _progressObjectDisplayLink = progressObjectDisplayLink;\n        \n        [_progressObjectDisplayLink addToRunLoop:[NSRunLoop mainRunLoop] forMode:NSDefaultRunLoopMode];\n    }\n}\n\n- (void)setProgressObject:(NSProgress *)progressObject {\n    if (progressObject != _progressObject) {\n        _progressObject = progressObject;\n        [self setNSProgressDisplayLinkEnabled:YES];\n    }\n}\n\n- (void)setProgress:(float)progress {\n    if (progress != _progress) {\n        _progress = progress;\n        UIView *indicator = self.indicator;\n        if ([indicator respondsToSelector:@selector(setProgress:)]) {\n            [(id)indicator setValue:@(self.progress) forKey:@\"progress\"];\n        }\n    }\n}\n\n- (void)setContentColor:(UIColor *)contentColor {\n    if (contentColor != _contentColor && ![contentColor isEqual:_contentColor]) {\n        _contentColor = contentColor;\n        [self updateViewsForColor:contentColor];\n    }\n}\n\n- (void)setDefaultMotionEffectsEnabled:(BOOL)defaultMotionEffectsEnabled {\n    if (defaultMotionEffectsEnabled != _defaultMotionEffectsEnabled) {\n        _defaultMotionEffectsEnabled = defaultMotionEffectsEnabled;\n        [self updateBezelMotionEffects];\n    }\n}\n\n#pragma mark - NSProgress\n\n- (void)setNSProgressDisplayLinkEnabled:(BOOL)enabled {\n    // We're using CADisplayLink, because NSProgress can change very quickly and observing it may starve the main thread,\n    // so we're refreshing the progress only every frame draw\n    if (enabled && self.progressObject) {\n        // Only create if not already active.\n        if (!self.progressObjectDisplayLink) {\n            self.progressObjectDisplayLink = [CADisplayLink displayLinkWithTarget:self selector:@selector(updateProgressFromProgressObject)];\n        }\n    } else {\n        self.progressObjectDisplayLink = nil;\n    }\n}\n\n- (void)updateProgressFromProgressObject {\n    self.progress = self.progressObject.fractionCompleted;\n}\n\n#pragma mark - Notifications\n\n- (void)registerForNotifications {\n#if !TARGET_OS_TV\n    NSNotificationCenter *nc = [NSNotificationCenter defaultCenter];\n\n    [nc addObserver:self selector:@selector(statusBarOrientationDidChange:)\n               name:UIApplicationDidChangeStatusBarOrientationNotification object:nil];\n#endif\n}\n\n- (void)unregisterFromNotifications {\n#if !TARGET_OS_TV\n    NSNotificationCenter *nc = [NSNotificationCenter defaultCenter];\n    [nc removeObserver:self name:UIApplicationDidChangeStatusBarOrientationNotification object:nil];\n#endif\n}\n\n#if !TARGET_OS_TV\n- (void)statusBarOrientationDidChange:(NSNotification *)notification {\n    UIView *superview = self.superview;\n    if (!superview) {\n        return;\n    } else {\n        [self updateForCurrentOrientationAnimated:YES];\n    }\n}\n#endif\n\n- (void)updateForCurrentOrientationAnimated:(BOOL)animated {\n    // Stay in sync with the superview in any case\n    if (self.superview) {\n        self.frame = self.superview.bounds;\n    }\n\n    // Not needed on iOS 8+, compile out when the deployment target allows,\n    // to avoid sharedApplication problems on extension targets\n#if __IPHONE_OS_VERSION_MIN_REQUIRED < 80000\n    // Only needed pre iOS 8 when added to a window\n    BOOL iOS8OrLater = kCFCoreFoundationVersionNumber >= kCFCoreFoundationVersionNumber_iOS_8_0;\n    if (iOS8OrLater || ![self.superview isKindOfClass:[UIWindow class]]) return;\n\n    // Make extension friendly. Will not get called on extensions (iOS 8+) due to the above check.\n    // This just ensures we don't get a warning about extension-unsafe API.\n    Class UIApplicationClass = NSClassFromString(@\"UIApplication\");\n    if (!UIApplicationClass || ![UIApplicationClass respondsToSelector:@selector(sharedApplication)]) return;\n\n    UIApplication *application = [UIApplication performSelector:@selector(sharedApplication)];\n    UIInterfaceOrientation orientation = application.statusBarOrientation;\n    CGFloat radians = 0;\n    \n    if (UIInterfaceOrientationIsLandscape(orientation)) {\n        radians = orientation == UIInterfaceOrientationLandscapeLeft ? -(CGFloat)M_PI_2 : (CGFloat)M_PI_2;\n        // Window coordinates differ!\n        self.bounds = CGRectMake(0, 0, self.bounds.size.height, self.bounds.size.width);\n    } else {\n        radians = orientation == UIInterfaceOrientationPortraitUpsideDown ? (CGFloat)M_PI : 0.f;\n    }\n\n    if (animated) {\n        [UIView animateWithDuration:0.3 animations:^{\n            self.transform = CGAffineTransformMakeRotation(radians);\n        }];\n    } else {\n        self.transform = CGAffineTransformMakeRotation(radians);\n    }\n#endif\n}\n\n@end\n\n\n@implementation MBRoundProgressView\n\n#pragma mark - Lifecycle\n\n- (id)init {\n    return [self initWithFrame:CGRectMake(0.f, 0.f, 37.f, 37.f)];\n}\n\n- (id)initWithFrame:(CGRect)frame {\n    self = [super initWithFrame:frame];\n    if (self) {\n        self.backgroundColor = [UIColor clearColor];\n        self.opaque = NO;\n        _progress = 0.f;\n        _annular = NO;\n        _progressTintColor = [[UIColor alloc] initWithWhite:1.f alpha:1.f];\n        _backgroundTintColor = [[UIColor alloc] initWithWhite:1.f alpha:.1f];\n    }\n    return self;\n}\n\n#pragma mark - Layout\n\n- (CGSize)intrinsicContentSize {\n    return CGSizeMake(37.f, 37.f);\n}\n\n#pragma mark - Properties\n\n- (void)setProgress:(float)progress {\n    if (progress != _progress) {\n        _progress = progress;\n        [self setNeedsDisplay];\n    }\n}\n\n- (void)setProgressTintColor:(UIColor *)progressTintColor {\n    NSAssert(progressTintColor, @\"The color should not be nil.\");\n    if (progressTintColor != _progressTintColor && ![progressTintColor isEqual:_progressTintColor]) {\n        _progressTintColor = progressTintColor;\n        [self setNeedsDisplay];\n    }\n}\n\n- (void)setBackgroundTintColor:(UIColor *)backgroundTintColor {\n    NSAssert(backgroundTintColor, @\"The color should not be nil.\");\n    if (backgroundTintColor != _backgroundTintColor && ![backgroundTintColor isEqual:_backgroundTintColor]) {\n        _backgroundTintColor = backgroundTintColor;\n        [self setNeedsDisplay];\n    }\n}\n\n#pragma mark - Drawing\n\n- (void)drawRect:(CGRect)rect {\n    CGContextRef context = UIGraphicsGetCurrentContext();\n\n    if (_annular) {\n        // Draw background\n        CGFloat lineWidth = 2.f;\n        UIBezierPath *processBackgroundPath = [UIBezierPath bezierPath];\n        processBackgroundPath.lineWidth = lineWidth;\n        processBackgroundPath.lineCapStyle = kCGLineCapButt;\n        CGPoint center = CGPointMake(CGRectGetMidX(self.bounds), CGRectGetMidY(self.bounds));\n        CGFloat radius = (self.bounds.size.width - lineWidth)/2;\n        CGFloat startAngle = - ((float)M_PI / 2); // 90 degrees\n        CGFloat endAngle = (2 * (float)M_PI) + startAngle;\n        [processBackgroundPath addArcWithCenter:center radius:radius startAngle:startAngle endAngle:endAngle clockwise:YES];\n        [_backgroundTintColor set];\n        [processBackgroundPath stroke];\n        // Draw progress\n        UIBezierPath *processPath = [UIBezierPath bezierPath];\n        processPath.lineCapStyle = kCGLineCapSquare;\n        processPath.lineWidth = lineWidth;\n        endAngle = (self.progress * 2 * (float)M_PI) + startAngle;\n        [processPath addArcWithCenter:center radius:radius startAngle:startAngle endAngle:endAngle clockwise:YES];\n        [_progressTintColor set];\n        [processPath stroke];\n    } else {\n        // Draw background\n        CGFloat lineWidth = 2.f;\n        CGRect allRect = self.bounds;\n        CGRect circleRect = CGRectInset(allRect, lineWidth/2.f, lineWidth/2.f);\n        CGPoint center = CGPointMake(CGRectGetMidX(self.bounds), CGRectGetMidY(self.bounds));\n        [_progressTintColor setStroke];\n        [_backgroundTintColor setFill];\n        CGContextSetLineWidth(context, lineWidth);\n        CGContextStrokeEllipseInRect(context, circleRect);\n        // 90 degrees\n        CGFloat startAngle = - ((float)M_PI / 2.f);\n        // Draw progress\n        UIBezierPath *processPath = [UIBezierPath bezierPath];\n        processPath.lineCapStyle = kCGLineCapButt;\n        processPath.lineWidth = lineWidth * 2.f;\n        CGFloat radius = (CGRectGetWidth(self.bounds) / 2.f) - (processPath.lineWidth / 2.f);\n        CGFloat endAngle = (self.progress * 2.f * (float)M_PI) + startAngle;\n        [processPath addArcWithCenter:center radius:radius startAngle:startAngle endAngle:endAngle clockwise:YES];\n        // Ensure that we don't get color overlapping when _progressTintColor alpha < 1.f.\n        CGContextSetBlendMode(context, kCGBlendModeCopy);\n        [_progressTintColor set];\n        [processPath stroke];\n    }\n}\n\n@end\n\n\n@implementation MBBarProgressView\n\n#pragma mark - Lifecycle\n\n- (id)init {\n    return [self initWithFrame:CGRectMake(.0f, .0f, 120.0f, 20.0f)];\n}\n\n- (id)initWithFrame:(CGRect)frame {\n    self = [super initWithFrame:frame];\n    if (self) {\n        _progress = 0.f;\n        _lineColor = [UIColor whiteColor];\n        _progressColor = [UIColor whiteColor];\n        _progressRemainingColor = [UIColor clearColor];\n        self.backgroundColor = [UIColor clearColor];\n        self.opaque = NO;\n    }\n    return self;\n}\n\n#pragma mark - Layout\n\n- (CGSize)intrinsicContentSize {\n    return CGSizeMake(120.f, 10.f);\n}\n\n#pragma mark - Properties\n\n- (void)setProgress:(float)progress {\n    if (progress != _progress) {\n        _progress = progress;\n        [self setNeedsDisplay];\n    }\n}\n\n- (void)setProgressColor:(UIColor *)progressColor {\n    NSAssert(progressColor, @\"The color should not be nil.\");\n    if (progressColor != _progressColor && ![progressColor isEqual:_progressColor]) {\n        _progressColor = progressColor;\n        [self setNeedsDisplay];\n    }\n}\n\n- (void)setProgressRemainingColor:(UIColor *)progressRemainingColor {\n    NSAssert(progressRemainingColor, @\"The color should not be nil.\");\n    if (progressRemainingColor != _progressRemainingColor && ![progressRemainingColor isEqual:_progressRemainingColor]) {\n        _progressRemainingColor = progressRemainingColor;\n        [self setNeedsDisplay];\n    }\n}\n\n#pragma mark - Drawing\n\n- (void)drawRect:(CGRect)rect {\n    CGContextRef context = UIGraphicsGetCurrentContext();\n    \n    CGContextSetLineWidth(context, 2);\n    CGContextSetStrokeColorWithColor(context,[_lineColor CGColor]);\n    CGContextSetFillColorWithColor(context, [_progressRemainingColor CGColor]);\n    \n    // Draw background and Border\n    CGFloat radius = (rect.size.height / 2) - 2;\n    CGContextMoveToPoint(context, 2, rect.size.height/2);\n    CGContextAddArcToPoint(context, 2, 2, radius + 2, 2, radius);\n    CGContextAddArcToPoint(context, rect.size.width - 2, 2, rect.size.width - 2, rect.size.height / 2, radius);\n    CGContextAddArcToPoint(context, rect.size.width - 2, rect.size.height - 2, rect.size.width - radius - 2, rect.size.height - 2, radius);\n    CGContextAddArcToPoint(context, 2, rect.size.height - 2, 2, rect.size.height/2, radius);\n    CGContextDrawPath(context, kCGPathFillStroke);\n    \n    CGContextSetFillColorWithColor(context, [_progressColor CGColor]);\n    radius = radius - 2;\n    CGFloat amount = self.progress * rect.size.width;\n    \n    // Progress in the middle area\n    if (amount >= radius + 4 && amount <= (rect.size.width - radius - 4)) {\n        CGContextMoveToPoint(context, 4, rect.size.height/2);\n        CGContextAddArcToPoint(context, 4, 4, radius + 4, 4, radius);\n        CGContextAddLineToPoint(context, amount, 4);\n        CGContextAddLineToPoint(context, amount, radius + 4);\n        \n        CGContextMoveToPoint(context, 4, rect.size.height/2);\n        CGContextAddArcToPoint(context, 4, rect.size.height - 4, radius + 4, rect.size.height - 4, radius);\n        CGContextAddLineToPoint(context, amount, rect.size.height - 4);\n        CGContextAddLineToPoint(context, amount, radius + 4);\n        \n        CGContextFillPath(context);\n    }\n    \n    // Progress in the right arc\n    else if (amount > radius + 4) {\n        CGFloat x = amount - (rect.size.width - radius - 4);\n\n        CGContextMoveToPoint(context, 4, rect.size.height/2);\n        CGContextAddArcToPoint(context, 4, 4, radius + 4, 4, radius);\n        CGContextAddLineToPoint(context, rect.size.width - radius - 4, 4);\n        CGFloat angle = -acos(x/radius);\n        if (isnan(angle)) angle = 0;\n        CGContextAddArc(context, rect.size.width - radius - 4, rect.size.height/2, radius, M_PI, angle, 0);\n        CGContextAddLineToPoint(context, amount, rect.size.height/2);\n\n        CGContextMoveToPoint(context, 4, rect.size.height/2);\n        CGContextAddArcToPoint(context, 4, rect.size.height - 4, radius + 4, rect.size.height - 4, radius);\n        CGContextAddLineToPoint(context, rect.size.width - radius - 4, rect.size.height - 4);\n        angle = acos(x/radius);\n        if (isnan(angle)) angle = 0;\n        CGContextAddArc(context, rect.size.width - radius - 4, rect.size.height/2, radius, -M_PI, angle, 1);\n        CGContextAddLineToPoint(context, amount, rect.size.height/2);\n        \n        CGContextFillPath(context);\n    }\n    \n    // Progress is in the left arc\n    else if (amount < radius + 4 && amount > 0) {\n        CGContextMoveToPoint(context, 4, rect.size.height/2);\n        CGContextAddArcToPoint(context, 4, 4, radius + 4, 4, radius);\n        CGContextAddLineToPoint(context, radius + 4, rect.size.height/2);\n\n        CGContextMoveToPoint(context, 4, rect.size.height/2);\n        CGContextAddArcToPoint(context, 4, rect.size.height - 4, radius + 4, rect.size.height - 4, radius);\n        CGContextAddLineToPoint(context, radius + 4, rect.size.height/2);\n        \n        CGContextFillPath(context);\n    }\n}\n\n@end\n\n\n@interface MBBackgroundView ()\n\n@property UIVisualEffectView *effectView;\n\n@end\n\n\n@implementation MBBackgroundView\n\n#pragma mark - Lifecycle\n\n- (instancetype)initWithFrame:(CGRect)frame {\n    if ((self = [super initWithFrame:frame])) {\n        _style = MBProgressHUDBackgroundStyleBlur;\n        _blurEffectStyle = UIBlurEffectStyleLight;\n        _color = [UIColor colorWithWhite:0.8f alpha:0.6f];\n\n        self.clipsToBounds = YES;\n\n        [self updateForBackgroundStyle];\n    }\n    return self;\n}\n\n#pragma mark - Layout\n\n- (CGSize)intrinsicContentSize {\n    // Smallest size possible. Content pushes against this.\n    return CGSizeZero;\n}\n\n#pragma mark - Appearance\n\n- (void)setStyle:(MBProgressHUDBackgroundStyle)style {\n    if (_style != style) {\n        _style = style;\n        [self updateForBackgroundStyle];\n    }\n}\n\n- (void)setColor:(UIColor *)color {\n    NSAssert(color, @\"The color should not be nil.\");\n    if (color != _color && ![color isEqual:_color]) {\n        _color = color;\n        [self updateViewsForColor:color];\n    }\n}\n\n#if __IPHONE_OS_VERSION_MAX_ALLOWED >= 80000 || TARGET_OS_TV\n\n- (void)setBlurEffectStyle:(UIBlurEffectStyle)blurEffectStyle {\n    if (_blurEffectStyle == blurEffectStyle) {\n        return;\n    }\n\n    _blurEffectStyle = blurEffectStyle;\n\n    [self updateForBackgroundStyle];\n}\n\n#endif\n\n///////////////////////////////////////////////////////////////////////////////////////////\n#pragma mark - Views\n\n- (void)updateForBackgroundStyle {\n    [self.effectView removeFromSuperview];\n    self.effectView = nil;\n\n    MBProgressHUDBackgroundStyle style = self.style;\n    if (style == MBProgressHUDBackgroundStyleBlur) {\n        UIBlurEffect *effect =  [UIBlurEffect effectWithStyle:self.blurEffectStyle];\n        UIVisualEffectView *effectView = [[UIVisualEffectView alloc] initWithEffect:effect];\n        [self insertSubview:effectView atIndex:0];\n        effectView.frame = self.bounds;\n        effectView.autoresizingMask = UIViewAutoresizingFlexibleHeight | UIViewAutoresizingFlexibleWidth;\n        self.backgroundColor = self.color;\n        self.layer.allowsGroupOpacity = NO;\n        self.effectView = effectView;\n    } else {\n        self.backgroundColor = self.color;\n    }\n}\n\n- (void)updateViewsForColor:(UIColor *)color {\n    if (self.style == MBProgressHUDBackgroundStyleBlur) {\n        self.backgroundColor = self.color;\n    } else {\n        self.backgroundColor = self.color;\n    }\n}\n\n@end\n\n\n@implementation MBProgressHUDRoundedButton\n\n#pragma mark - Lifecycle\n\n- (instancetype)initWithFrame:(CGRect)frame {\n    self = [super initWithFrame:frame];\n    if (self) {\n        CALayer *layer = self.layer;\n        layer.borderWidth = 1.f;\n    }\n    return self;\n}\n\n#pragma mark - Layout\n\n- (void)layoutSubviews {\n    [super layoutSubviews];\n    // Fully rounded corners\n    CGFloat height = CGRectGetHeight(self.bounds);\n    self.layer.cornerRadius = ceil(height / 2.f);\n}\n\n- (CGSize)intrinsicContentSize {\n    // Only show if we have associated control events\n    if (self.allControlEvents == 0) return CGSizeZero;\n    CGSize size = [super intrinsicContentSize];\n    // Add some side padding\n    size.width += 20.f;\n    return size;\n}\n\n#pragma mark - Color\n\n- (void)setTitleColor:(UIColor *)color forState:(UIControlState)state {\n    [super setTitleColor:color forState:state];\n    // Update related colors\n    [self setHighlighted:self.highlighted];\n    self.layer.borderColor = color.CGColor;\n}\n\n- (void)setHighlighted:(BOOL)highlighted {\n    [super setHighlighted:highlighted];\n    UIColor *baseColor = [self titleColorForState:UIControlStateSelected];\n    self.backgroundColor = highlighted ? [baseColor colorWithAlphaComponent:0.1f] : [UIColor clearColor];\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo/Support/UIColor+Utils.h",
    "content": "//\n//  UIColor+Utils.h\n//  iOSCodeProject\n//\n//  Created by Fox on 14-7-19.\n//  Copyright (c) 2014年 翔傲信息科技（上海）有限公司. All rights reserved.\n//\n\n#import <UIKit/UIKit.h>\n\n/**\n *  颜色扩展类别\n */\n@interface UIColor (Utils)\n\n/**\n *  通过十六进制获取颜色\n *\n *  @param hexColor 十六进制\n *\n *  @return 颜色\n */\n+ (UIColor *)colorForHex:(NSString *)hexColor;\n\n+ (UIColor *)colorForHex:(NSString *)hexColor alpha:(float)alpha;\n\n@end\n"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo/Support/UIColor+Utils.m",
    "content": "//\n//  UIColor+Utils.m\n//  iOSCodeProject\n//\n//  Created by Fox on 14-7-19.\n//  Copyright (c) 2014年 翔傲信息科技（上海）有限公司. All rights reserved.\n//\n\n#import \"UIColor+Utils.h\"\n\n@implementation UIColor (Utils)\n\n+ (UIColor *)colorForHex:(NSString *)hexColor\n{\n    return [self colorForHex:hexColor alpha:1.0];\n}\n+ (UIColor *)colorForHex:(NSString *)hexColor alpha:(float)alpha\n{\n    NSRange range;\n    range.location = 0;\n    range.length = 2;\n    NSString *rString = [hexColor substringWithRange:range];\n    range.location = 2;\n    NSString *gString = [hexColor substringWithRange:range];\n    range.location = 4;\n    NSString *bString = [hexColor substringWithRange:range];\n\n    unsigned int r, g, b;\n    [[NSScanner scannerWithString:rString] scanHexInt:&r];\n    [[NSScanner scannerWithString:gString] scanHexInt:&g];\n    [[NSScanner scannerWithString:bString] scanHexInt:&b];\n\n    return [UIColor colorWithRed:((float)r / 255.0f)\n                           green:((float)g / 255.0f)\n                            blue:((float)b / 255.0f)\n                           alpha:alpha];\n}\n@end\n"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo/Support/UIImage+Rotate.h",
    "content": "//\n//  UIImage+Rotate.h\n//  UIImage+Categories\n//\n//  Created by lisong on 16/9/4.\n//  Copyright © 2016年 lisong. All rights reserved.\n//\n\n#import <UIKit/UIKit.h>\n\n@interface UIImage (Rotate)\n\n/** 纠正图片的方向 */\n- (UIImage *)fixOrientation;\n\n/** 按给定的方向旋转图片 */\n- (UIImage*)rotate:(UIImageOrientation)orient;\n\n/** 垂直翻转 */\n- (UIImage *)flipVertical;\n\n/** 水平翻转 */\n- (UIImage *)flipHorizontal;\n\n/** 将图片旋转degrees角度 */\n- (UIImage *)imageRotatedByDegrees:(CGFloat)degrees;\n\n/** 将图片旋转radians弧度 */\n- (UIImage *)imageRotatedByRadians:(CGFloat)radians;\n\n@end\n"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo/Support/UIImage+Rotate.m",
    "content": "//\n//  UIImage+Rotate.m\n//  UIImage+Categories\n//\n//  Created by lisong on 16/9/4.\n//  Copyright © 2016年 lisong. All rights reserved.\n//\n\n#import \"UIImage+Rotate.h\"\n\n//由角度转换弧度\n#define kDegreesToRadian(x)      (M_PI * (x) / 180.0)\n//由弧度转换角度\n#define kRadianToDegrees(radian) (radian * 180.0) / (M_PI)\n\n@implementation UIImage (Rotate)\n\n/** 纠正图片的方向 */\n- (UIImage *)fixOrientation\n{\n    if (self.imageOrientation == UIImageOrientationUp) return self;\n    \n    // We need to calculate the proper transformation to make the image upright.\n    // We do it in 2 steps: Rotate if Left/Right/Down, and then flip if Mirrored.\n    CGAffineTransform transform = CGAffineTransformIdentity;\n    \n    switch (self.imageOrientation)\n    {\n        case UIImageOrientationDown:\n        case UIImageOrientationDownMirrored:\n            transform = CGAffineTransformTranslate(transform, self.size.width, self.size.height);\n            transform = CGAffineTransformRotate(transform, M_PI);\n            break;\n            \n        case UIImageOrientationLeft:\n        case UIImageOrientationLeftMirrored:\n            transform = CGAffineTransformTranslate(transform, self.size.width, 0);\n            transform = CGAffineTransformRotate(transform, M_PI_2);\n            break;\n            \n        case UIImageOrientationRight:\n        case UIImageOrientationRightMirrored:\n            transform = CGAffineTransformTranslate(transform, 0, self.size.height);\n            transform = CGAffineTransformRotate(transform, -M_PI_2);\n            break;\n        case UIImageOrientationUp:\n        case UIImageOrientationUpMirrored:\n            break;\n    }\n    \n    switch (self.imageOrientation)\n    {\n        case UIImageOrientationUpMirrored:\n        case UIImageOrientationDownMirrored:\n            transform = CGAffineTransformTranslate(transform, self.size.width, 0);\n            transform = CGAffineTransformScale(transform, -1, 1);\n            break;\n            \n        case UIImageOrientationLeftMirrored:\n        case UIImageOrientationRightMirrored:\n            transform = CGAffineTransformTranslate(transform, self.size.height, 0);\n            transform = CGAffineTransformScale(transform, -1, 1);\n            break;\n        case UIImageOrientationUp:\n        case UIImageOrientationDown:\n        case UIImageOrientationLeft:\n        case UIImageOrientationRight:\n            break;\n    }\n    \n    // Now we draw the underlying CGImage into a new context, applying the transform\n    // calculated above.\n    CGContextRef ctx = CGBitmapContextCreate(NULL, self.size.width, self.size.height,\n                                             CGImageGetBitsPerComponent(self.CGImage), 0,\n                                             CGImageGetColorSpace(self.CGImage),\n                                             CGImageGetBitmapInfo(self.CGImage));\n    CGContextConcatCTM(ctx, transform);\n    \n    switch (self.imageOrientation)\n    {\n        case UIImageOrientationLeft:\n        case UIImageOrientationLeftMirrored:\n        case UIImageOrientationRight:\n        case UIImageOrientationRightMirrored:\n            CGContextDrawImage(ctx, CGRectMake(0,0,self.size.height,self.size.width), self.CGImage);\n            break;\n            \n        default:\n            CGContextDrawImage(ctx, CGRectMake(0,0,self.size.width,self.size.height), self.CGImage);\n            break;\n    }\n    \n    CGImageRef cgimg = CGBitmapContextCreateImage(ctx);\n    UIImage *img = [UIImage imageWithCGImage:cgimg];\n    CGContextRelease(ctx);\n    CGImageRelease(cgimg);\n    \n    return img;\n}\n\n/** 按给定的方向旋转图片 */\n- (UIImage*)rotate:(UIImageOrientation)orient\n{\n    CGRect bnds = CGRectZero;\n    UIImage* copy = nil;\n    CGContextRef ctxt = nil;\n    CGImageRef imag = self.CGImage;\n    CGRect rect = CGRectZero;\n    CGAffineTransform tran = CGAffineTransformIdentity;\n    \n    rect.size.width = CGImageGetWidth(imag);\n    rect.size.height = CGImageGetHeight(imag);\n    \n    bnds = rect;\n    \n    switch (orient)\n    {\n        case UIImageOrientationUp:\n            return self;\n            \n        case UIImageOrientationUpMirrored:\n            tran = CGAffineTransformMakeTranslation(rect.size.width, 0.0);\n            tran = CGAffineTransformScale(tran, -1.0, 1.0);\n            break;\n            \n        case UIImageOrientationDown:\n            tran = CGAffineTransformMakeTranslation(rect.size.width,\n                                                    rect.size.height);\n            tran = CGAffineTransformRotate(tran, M_PI);\n            break;\n            \n        case UIImageOrientationDownMirrored:\n            tran = CGAffineTransformMakeTranslation(0.0, rect.size.height);\n            tran = CGAffineTransformScale(tran, 1.0, -1.0);\n            break;\n            \n        case UIImageOrientationLeft:\n            bnds = swapWidthAndHeight(bnds);\n            tran = CGAffineTransformMakeTranslation(0.0, rect.size.width);\n            tran = CGAffineTransformRotate(tran, 3.0 * M_PI / 2.0);\n            break;\n            \n        case UIImageOrientationLeftMirrored:\n            bnds = swapWidthAndHeight(bnds);\n            tran = CGAffineTransformMakeTranslation(rect.size.height,\n                                                    rect.size.width);\n            tran = CGAffineTransformScale(tran, -1.0, 1.0);\n            tran = CGAffineTransformRotate(tran, 3.0 * M_PI / 2.0);\n            break;\n            \n        case UIImageOrientationRight:\n            bnds = swapWidthAndHeight(bnds);\n            tran = CGAffineTransformMakeTranslation(rect.size.height, 0.0);\n            tran = CGAffineTransformRotate(tran, M_PI / 2.0);\n            break;\n            \n        case UIImageOrientationRightMirrored:\n            bnds = swapWidthAndHeight(bnds);\n            tran = CGAffineTransformMakeScale(-1.0, 1.0);\n            tran = CGAffineTransformRotate(tran, M_PI / 2.0);\n            break;\n            \n        default:\n            return self;\n    }\n    \n    UIGraphicsBeginImageContext(bnds.size);\n    ctxt = UIGraphicsGetCurrentContext();\n    \n    switch (orient)\n    {\n        case UIImageOrientationLeft:\n        case UIImageOrientationLeftMirrored:\n        case UIImageOrientationRight:\n        case UIImageOrientationRightMirrored:\n            CGContextScaleCTM(ctxt, -1.0, 1.0);\n            CGContextTranslateCTM(ctxt, -rect.size.height, 0.0);\n            break;\n            \n        default:\n            CGContextScaleCTM(ctxt, 1.0, -1.0);\n            CGContextTranslateCTM(ctxt, 0.0, -rect.size.height);\n            break;\n    }\n    \n    CGContextConcatCTM(ctxt, tran);\n    CGContextDrawImage(UIGraphicsGetCurrentContext(), rect, imag);\n    \n    copy = UIGraphicsGetImageFromCurrentImageContext();\n    UIGraphicsEndImageContext();\n    \n    return copy;\n}\n\n/** 垂直翻转 */\n- (UIImage *)flipVertical\n{\n    return [self rotate:UIImageOrientationDownMirrored];\n}\n\n/** 水平翻转 */\n- (UIImage *)flipHorizontal\n{\n    return [self rotate:UIImageOrientationUpMirrored];\n}\n\n/** 将图片旋转弧度radians */\n- (UIImage *)imageRotatedByRadians:(CGFloat)radians\n{\n    // calculate the size of the rotated view's containing box for our drawing space\n    UIView *rotatedViewBox = [[UIView alloc] initWithFrame:CGRectMake(0,0,self.size.width, self.size.height)];\n    CGAffineTransform t = CGAffineTransformMakeRotation(radians);\n    rotatedViewBox.transform = t;\n    CGSize rotatedSize = rotatedViewBox.frame.size;\n    \n    // Create the bitmap context\n    UIGraphicsBeginImageContext(rotatedSize);\n    CGContextRef bitmap = UIGraphicsGetCurrentContext();\n    \n    // Move the origin to the middle of the image so we will rotate and scale around the center.\n    CGContextTranslateCTM(bitmap, rotatedSize.width/2, rotatedSize.height/2);\n    \n    //   // Rotate the image context\n    CGContextRotateCTM(bitmap, radians);\n    \n    // Now, draw the rotated/scaled image into the context\n    CGContextScaleCTM(bitmap, 1.0, -1.0);\n    CGContextDrawImage(bitmap, CGRectMake(-self.size.width / 2, -self.size.height / 2, self.size.width, self.size.height), [self CGImage]);\n    \n    UIImage *newImage = UIGraphicsGetImageFromCurrentImageContext();\n    UIGraphicsEndImageContext();\n    \n    return newImage;\n}\n\n/** 将图片旋转角度degrees */\n- (UIImage *)imageRotatedByDegrees:(CGFloat)degrees\n{\n    return [self imageRotatedByRadians:kDegreesToRadian(degrees)];\n}\n\n/** 交换宽和高 */\nstatic CGRect swapWidthAndHeight(CGRect rect)\n{\n    CGFloat swap = rect.size.width;\n    \n    rect.size.width = rect.size.height;\n    rect.size.height = swap;\n    \n    return rect;\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo/Support/UIImage+Utils.h",
    "content": "//\n//  UIImage+Utils.h\n//  iOSCodeProject\n//\n//  Created by Fox on 14-7-18.\n//  Copyright (c) 2014年 翔傲信息科技（上海）有限公司. All rights reserved.\n//\n\n#import <UIKit/UIKit.h>\n\n/**\n *  UIImage扩展类别\n */\n@interface UIImage (Utils)\n\n/**\n *  截图部分图片\n *\n *  @param rect 截取区域\n *\n *  @return 结果图片\n */\n- (UIImage *)subImageAtRect:(CGRect)rect;\n\n/**\n *  沿着一定弧度旋转\n *\n *  @param radians 旋转的弧度\n *\n *  @return 结果图片\n */\n- (UIImage *)imageRotatedByRadians:(CGFloat)radians;\n\n/**\n *  沿着一定的角度旋转\n *\n *  @param degrees 旋转的角度\n *\n *  @return 结果图片\n */\n- (UIImage *)imageRotatedByDegrees:(CGFloat)degrees;\n\n/**\n *  等比例压缩图片\n *\n *  @param size 压缩到的大小\n *\n *  @return 结果图片\n */\n- (UIImage *)imageScaledToSize:(CGSize)size;\n\n\n/**\n *  合并图片\n *\n *  @param image2 <#image2 description#>\n *\n *  @return 合并后的image\n */\n- (UIImage *)addImagetoImage:(UIImage *)image2;\n\n\n/**\n *  图片对调\n *\n *  @param imageSource <#imageSource description#>\n *\n *  @return <#return value description#>\n */\n- (UIImage *)imageMirror;\n\n\n- (UIImage *)addImagetoImage:(UIImage *)image2 image1Frame:(CGRect)image1Frame image2Frame:(CGRect)image2Frame;\n\n//view转image\n- (UIImage *)getImageFromView:(UIView *)theView;\n\n\n/**\n *  拍照后图片  调整位置\n *\n\n *  @return <#return value description#>\n */\n- (UIImage *)fixTakePictureOrientation;\n\n/**\n *  从视频中获取一张缩略图，\n *\n    videoURL 本地路径；\n    second 截图的时间 秒\n \n *  @return 一张截图\n \n 示例：\n \n NSString *videoURL =[[NSBundle mainBundle] pathForResource:@\"test\" ofType:@\"mp4\"];\n UIImage *thumImg = [UIImage assetGetVideoThumImage:videoURL thumSecond:11];\n self.showImageView.image = thumImg;\n\n */\n\n+ (UIImage *)assetGetVideoThumImage:(NSString *)videoURL thumSecond:(CGFloat)second;\n\n/**\n *  UIColor 转 UIImage\n */\n\n+ (UIImage *)createImageWithColor:(UIColor *)color;\n\n/**\n *  调整图片尺寸和大小\n *\n *  @param sourceImage  原始图片\n *  @param maxImageSize 新图片最大尺寸\n *  @param maxSize      新图片最大存储大小\n *\n *  @return 新图片imageData\n */\n+ (NSData *)reSizeImageData:(UIImage *)sourceImage maxImageSize:(CGFloat)maxImageSize maxSizeWithKB:(CGFloat)maxSize;\n\n- (UIImage *)imageWithImageSimple:(UIImage *)image scaledToSize:(CGSize)newSize;\n@end\n"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo/Support/UIImage+Utils.m",
    "content": "//\n//  UIImage+Utils.m\n//  iOSCodeProject\n//\n//  Created by Fox on 14-7-18.\n//  Copyright (c) 2014年 翔傲信息科技（上海）有限公司. All rights reserved.\n//\n\n#import \"UIImage+Utils.h\"\n#import <AVFoundation/AVFoundation.h>\n\nCGFloat TTIDegreesToRadians(CGFloat degrees) { return degrees * M_PI / 180; };\nCGFloat TTIRadiansToDegrees(CGFloat radians) { return radians * 180 / M_PI; };\n@implementation UIImage (Utils)\n\n- (UIImage *)subImageAtRect:(CGRect)rect\n{\n    CGImageRef subImageRef = CGImageCreateWithImageInRect(self.CGImage, rect);\n    CGRect smallBounds = CGRectMake(0, 0, CGImageGetWidth(subImageRef),\n                                    CGImageGetHeight(subImageRef));\n\n    UIGraphicsBeginImageContext(smallBounds.size);\n    CGContextRef context = UIGraphicsGetCurrentContext();\n    CGContextDrawImage(context, smallBounds, subImageRef);\n\n    UIImage *smallImage = [UIImage imageWithCGImage:subImageRef scale:1.0f\n                                        orientation:self.imageOrientation];\n\n    UIGraphicsEndImageContext();\n    CFRelease(subImageRef);\n    return smallImage;\n}\n\n- (UIImage *)imageRotatedByRadians:(CGFloat)radians\n{\n    //沿着一定弧度旋转\n    return [self imageRotatedByDegrees:TTIRadiansToDegrees(radians)];\n}\n\n- (UIImage *)imageRotatedByDegrees:(CGFloat)degrees\n{\n    UIView *rotatedViewBox = [[UIView alloc] initWithFrame:CGRectMake(0, 0, self.size.width, self.size.height)];\n    CGAffineTransform t = CGAffineTransformMakeRotation(TTIDegreesToRadians(degrees));\n    rotatedViewBox.transform = t;\n    CGSize rotatedSize = rotatedViewBox.frame.size;\n\n    // Create the bitmap context\n    UIGraphicsBeginImageContext(rotatedSize);\n    CGContextRef bitmap = UIGraphicsGetCurrentContext();\n\n    // Move the origin to the middle of the image so we will rotate and scale around the center.\n    CGContextTranslateCTM(bitmap, rotatedSize.width / 2, rotatedSize.height / 2);\n\n    //   // Rotate the image context\n    CGContextRotateCTM(bitmap, TTIDegreesToRadians(degrees));\n\n    // Now, draw the rotated/scaled image into the context\n    CGContextScaleCTM(bitmap, 1.0, -1.0);\n    CGContextDrawImage(bitmap, CGRectMake(-self.size.width / 2, -self.size.height / 2, self.size.width, self.size.height), [self CGImage]);\n\n    UIImage *newImage = UIGraphicsGetImageFromCurrentImageContext();\n    UIGraphicsEndImageContext();\n    return newImage;\n}\n\n- (UIImage *)imageScaledToSize:(CGSize)size\n{\n    CGFloat width = CGImageGetWidth(self.CGImage);\n    CGFloat height = CGImageGetHeight(self.CGImage);\n\n    float verticalRadio = size.height * 1.0 / height;\n    float horizontalRadio = size.width * 1.0 / width;\n\n    float radio = 1;\n    if (verticalRadio > 1 && horizontalRadio > 1) {\n        radio = verticalRadio > horizontalRadio ? horizontalRadio : verticalRadio;\n    } else {\n        radio = verticalRadio < horizontalRadio ? verticalRadio : horizontalRadio;\n    }\n\n    width = width * radio;\n    height = height * radio;\n\n    //\tint xPos = (size.width - width)/2;\n    //\tint yPos = (size.height-height)/2;\n\n    // 创建一个bitmap的context\n    // 并把它设置成为当前正在使用的context\n    UIGraphicsBeginImageContext(size);\n\n    // 绘制改变大小的图片\n    [self drawInRect:CGRectMake(0, 0, width, height)];\n\n    // 从当前context中创建一个改变大小后的图片\n    UIImage *scaledImage = UIGraphicsGetImageFromCurrentImageContext();\n\n    // 使当前的context出堆栈\n    UIGraphicsEndImageContext();\n\n    // 返回新的改变大小后的图片\n    return scaledImage;\n}\n\n\n- (UIImage *)addImagetoImage:(UIImage *)image2\n{\n    UIGraphicsBeginImageContext(self.size);\n\n    // Draw image1\n    [self drawInRect:CGRectMake(0, 0, self.size.width, self.size.height)];\n\n    // Draw image2\n    [image2 drawInRect:CGRectMake(0, 0, image2.size.width, image2.size.height)];\n\n    UIImage *resultingImage = UIGraphicsGetImageFromCurrentImageContext();\n\n    UIGraphicsEndImageContext();\n\n    return resultingImage;\n}\n\n- (UIImage *)addImagetoImage:(UIImage *)image2 image1Frame:(CGRect)image1Frame image2Frame:(CGRect)image2Frame\n{\n    UIGraphicsBeginImageContext(self.size);\n\n    // Draw image1\n    [self drawInRect:image1Frame];\n\n    [image2 drawInRect:image2Frame];\n\n    UIImage *resultingImage = UIGraphicsGetImageFromCurrentImageContext();\n\n    UIGraphicsEndImageContext();\n\n    return resultingImage;\n}\n\n\n- (UIImage *)imageMirror\n{\n    CGRect rect = CGRectMake(0, 0, self.size.width, self.size.height); //创建矩形框\n    UIGraphicsBeginImageContext(rect.size);                            //根据size大小创建一个基于位图的图形上下文\n    CGContextRef currentContext = UIGraphicsGetCurrentContext();       //获取当前quartz 2d绘图环境\n    CGContextClipToRect(currentContext, rect);                         //设置当前绘图环境到矩形框\n\n    //顺时针旋转\n    CGContextRotateCTM(currentContext, M_PI);\n    CGContextTranslateCTM(currentContext, -rect.size.width, -rect.size.height);\n\n    CGContextDrawImage(currentContext, rect, self.CGImage); //绘图\n\n    //[image drawInRect:rect];\n\n    UIImage *cropped = UIGraphicsGetImageFromCurrentImageContext(); //获得图片\n    UIGraphicsEndImageContext();                                    //从当前堆栈中删除quartz 2d绘图环境\n\n    return cropped;\n}\n\n\n//将view转为image\n- (UIImage *)getImageFromView:(UIView *)theView\n\n{\n    //第一个参数表示区域大小。第二个参数表示是否是非透明的。如果需要显示半透明效果，需要传NO，否则传YES。第三个参数就是屏幕密度了\n    UIGraphicsBeginImageContextWithOptions(theView.bounds.size, NO, 0); // theView.layer.contentsScale\n    // UIGraphicsBeginImageContext(theView.bounds.size);\n    //  [theView drawAtPoint:CGPointZero];\n\n    //设置图片背景色\n    CGContextSetRGBStrokeColor(UIGraphicsGetCurrentContext(), 0, 0, 0, 0);\n\n    [theView.layer renderInContext:UIGraphicsGetCurrentContext()];\n    // CGContextRestoreGState(UIGraphicsGetCurrentContext());\n\n    // 从当前context中创建一个改变大小后的图片\n    UIImage *image = UIGraphicsGetImageFromCurrentImageContext();\n\n    UIGraphicsEndImageContext();\n\n    return image;\n}\n\n\n//把通过相机获取到的图片（如果该图片大于2M，会自动旋转90度；否则不旋转），直接进行操作, 比如裁剪, 缩放, 则会把原图片向右旋转90度。\n//用相机拍摄出来的照片含有EXIF信息，UIImage的imageOrientation属性指的就是EXIF中的orientation信息。\n//如果我们忽略orientation信息，而直接对照片进行像素处理或者drawInRect等操作，得到的结果是翻转或者旋转90之后的样子。这是因为我们执行像素处理或者drawInRect等操作之后，imageOrientaion信息被删除了，imageOrientaion被重设为0，造成照片内容和imageOrientaion不匹配。\n//所以，在对照片进行处理之前，先将照片旋转到正确的方向，并且返回的imageOrientaion为0。\n- (UIImage *)fixTakePictureOrientation //:(UIImage *)aImage\n{\n    // No-op if the orientation is already correct\n    if (self.imageOrientation == UIImageOrientationUp)\n        return self;\n\n    // We need to calculate the proper transformation to make the image upright.\n    // We do it in 2 steps: Rotate if Left/Right/Down, and then flip if Mirrored.\n    CGAffineTransform transform = CGAffineTransformIdentity;\n\n    switch (self.imageOrientation) {\n        case UIImageOrientationDown:\n        case UIImageOrientationDownMirrored:\n            transform = CGAffineTransformTranslate(transform, self.size.width, self.size.height);\n            transform = CGAffineTransformRotate(transform, M_PI);\n            break;\n\n        case UIImageOrientationLeft:\n        case UIImageOrientationLeftMirrored:\n            transform = CGAffineTransformTranslate(transform, self.size.width, 0);\n            transform = CGAffineTransformRotate(transform, M_PI_2);\n            break;\n\n        case UIImageOrientationRight:\n        case UIImageOrientationRightMirrored:\n            transform = CGAffineTransformTranslate(transform, 0, self.size.height);\n            transform = CGAffineTransformRotate(transform, -M_PI_2);\n            break;\n        default:\n            break;\n    }\n\n    switch (self.imageOrientation) {\n        case UIImageOrientationUpMirrored:\n        case UIImageOrientationDownMirrored:\n            transform = CGAffineTransformTranslate(transform, self.size.width, 0);\n            transform = CGAffineTransformScale(transform, -1, 1);\n            break;\n\n        case UIImageOrientationLeftMirrored:\n        case UIImageOrientationRightMirrored:\n            transform = CGAffineTransformTranslate(transform, self.size.height, 0);\n            transform = CGAffineTransformScale(transform, -1, 1);\n            break;\n        default:\n            break;\n    }\n\n    // Now we draw the underlying CGImage into a new context, applying the transform\n    // calculated above.\n    CGContextRef ctx = CGBitmapContextCreate(NULL, self.size.width, self.size.height,\n                                             CGImageGetBitsPerComponent(self.CGImage), 0,\n                                             CGImageGetColorSpace(self.CGImage),\n                                             CGImageGetBitmapInfo(self.CGImage));\n    CGContextConcatCTM(ctx, transform);\n    switch (self.imageOrientation) {\n        case UIImageOrientationLeft:\n        case UIImageOrientationLeftMirrored:\n        case UIImageOrientationRight:\n        case UIImageOrientationRightMirrored:\n            // Grr...\n            CGContextDrawImage(ctx, CGRectMake(0, 0, self.size.height, self.size.width), self.CGImage);\n            break;\n\n        default:\n            CGContextDrawImage(ctx, CGRectMake(0, 0, self.size.width, self.size.height), self.CGImage);\n            break;\n    }\n\n    // And now we just create a new UIImage from the drawing context\n    CGImageRef cgimg = CGBitmapContextCreateImage(ctx);\n    UIImage *img = [UIImage imageWithCGImage:cgimg];\n    CGContextRelease(ctx);\n    CGImageRelease(cgimg);\n    return img;\n}\n\n\n+ (UIImage *)assetGetVideoThumImage:(NSString *)videoURL thumSecond:(CGFloat)second\n{\n    //  NSString *videoURL =[[NSBundle mainBundle] pathForResource:@\"test\" ofType:@\"mp4\"];\n    AVURLAsset *urlSet = [[AVURLAsset alloc] initWithURL:[NSURL fileURLWithPath:videoURL] options:nil];\n    AVAssetImageGenerator *imageGenerator = [AVAssetImageGenerator assetImageGeneratorWithAsset:urlSet];\n\n    NSError *error = nil;\n    //    CMTimeMake(a,b)    a当前第几帧, b每秒钟多少帧.当前播放时间a/b\n    //    CMTimeMakeWithSeconds(a,b)    a当前时间,b每秒钟多少帧.\n    CMTime time = CMTimeMakeWithSeconds(second, 15);\n    //  CMTime time = CMTimeMake(second,60);\n    CMTime actucalTime; //缩略图实际生成的时间\n    CGImageRef cgImage = [imageGenerator copyCGImageAtTime:time actualTime:&actucalTime error:&error];\n    if (error) {\n        NSLog(@\"截取视频图片失败:%@\", error.localizedDescription);\n    }\n    CMTimeShow(actucalTime);\n    UIImage *image = [UIImage imageWithCGImage:cgImage];\n\n    //保存到相册\n    //  UIImageWriteToSavedPhotosAlbum(image,nil, nil,nil);\n\n    CGImageRelease(cgImage);\n\n    NSLog(@\"视频截取成功\");\n\n    return image;\n}\n\n+ (UIImage *)createImageWithColor:(UIColor *)color\n{\n    CGRect rect = CGRectMake(0, 0, 1, 1);\n    UIGraphicsBeginImageContext(rect.size);\n    CGContextRef context = UIGraphicsGetCurrentContext();\n    CGContextSetFillColorWithColor(context, [color CGColor]);\n    CGContextFillRect(context, rect);\n    UIImage *theImage = UIGraphicsGetImageFromCurrentImageContext();\n    UIGraphicsEndImageContext();\n    return theImage;\n}\n\n+ (UIImage *)imageWithImageSimple:(UIImage *)image scaledToSize:(CGSize)newSize\n{\n    UIGraphicsBeginImageContext(newSize);\n    [image drawInRect:CGRectMake(0, 0, newSize.width, newSize.height)];\n    UIImage *newImage = UIGraphicsGetImageFromCurrentImageContext();\n\n    UIGraphicsEndImageContext();\n    return newImage;\n}\n\n+ (NSData *)reSizeImageData:(UIImage *)sourceImage maxImageSize:(CGFloat)maxImageSize maxSizeWithKB:(CGFloat)maxSize\n{\n    if (maxSize <= 0.0) maxSize = 1024.0;\n    if (maxImageSize <= 0.0) maxImageSize = 5120.0;\n\n    //先调整分辨率\n    CGSize newSize = CGSizeMake(sourceImage.size.width, sourceImage.size.height);\n\n    CGFloat tempHeight = newSize.height / maxImageSize;\n    CGFloat tempWidth = newSize.width / maxImageSize;\n\n    if (tempWidth > 1.0 && tempWidth > tempHeight) {\n        newSize = CGSizeMake(sourceImage.size.width / tempWidth, sourceImage.size.height / tempWidth);\n    } else if (tempHeight > 1.0 && tempWidth < tempHeight) {\n        newSize = CGSizeMake(sourceImage.size.width / tempHeight, sourceImage.size.height / tempHeight);\n    }\n\n    UIGraphicsBeginImageContext(newSize);\n    [sourceImage drawInRect:CGRectMake(0, 0, newSize.width, newSize.height)];\n    UIImage *newImage = UIGraphicsGetImageFromCurrentImageContext();\n    UIGraphicsEndImageContext();\n\n    //调整大小\n    NSData *imageData = UIImageJPEGRepresentation(newImage, 1.0);\n    CGFloat sizeOriginKB = imageData.length / 1024.0;\n\n    CGFloat resizeRate = 0.9;\n    while (sizeOriginKB > maxSize && resizeRate > 0.1) {\n        imageData = UIImageJPEGRepresentation(newImage, resizeRate);\n        sizeOriginKB = imageData.length / 1024.0;\n        resizeRate -= 0.35;\n    }\n\n    return imageData;\n}\n@end\n"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo/Tool/DDMediaEditorManage.h",
    "content": "//\n//  DDMediaEditorManage.h\n//  WEVideoEffect\n//\n//  Created by 刘海东 on 2018/11/5.\n//  Copyright © 2018 刘海东. All rights reserved.\n//\n\n/** 视频编辑管理 */\n\n#import <Foundation/Foundation.h>\n#import <GPURenderKit/GPURenderKit.h>\n\nNS_ASSUME_NONNULL_BEGIN\n\n@protocol DDMediaEditorManageDelegate <NSObject>\n@optional\n/** 播放暂停 */\n- (void)videoEditoPause;\n@end\n\n@interface DDMediaEditorManage : NSObject\n\n@property (nonatomic, weak) id <DDMediaEditorManageDelegate> delegate;\n@property (nonatomic, strong) NSURL *videoUrl;\n@property (nonatomic, strong) GPUImageMovie *movie;\n@property (nonatomic, strong) AVPlayer *videoPlayer;\n@property (nonatomic, strong) AVPlayerItem *videoPlayerItem;\n@property (nonatomic, strong) AVPlayer *audioPlayer;\n@property (nonatomic, strong) AVPlayerItem *audioPlayerItem;\n@property (nonatomic, strong) GPUImageView *glMediaView;\n@property (nonatomic, copy) void (^audioCMTimeCallBack)(CMTime currentTime,CMTime durationTime);\n/** 视频播放进度回调 */\n@property (nonatomic, copy) void (^videoCMTimeCallBack)(CMTime currentTime,CMTime durationTime);\n/** 视频播放结束 */\n@property (nonatomic, copy) void (^videoPlayEndCallBack)(void);\n/** 背景音乐播放结束 */\n@property (nonatomic, copy) void (^audioPlayEndCallBack)(void);\n/** 视频配置完成可以播放的回调 */\n@property (nonatomic, copy) void (^videoAVPlayerItemStatusReadyToPlayCallBack)(void);\n\n- (instancetype)initWithUrl:(NSURL *)url;\n/** 视频是否可播放 */\n- (BOOL)getVideoStatusReadyToPlay;\n/** 播放 */\n- (void)playVideo;\n/**重新播放**/\n- (void)replayVideo;\n/** 暂停 */\n- (void)pauseVideo;\n/** 跳到指定时间 */\n- (void)videoSeekToTime:(CMTime)time completionHandler:(void(^)(BOOL finished))completionHandler;\n/** 添加音乐 */\n- (void)addAudioPath:(NSURL *)audioPath;\n/** 移除音乐 */\n- (void)removeMusic;\n/** 播放音乐 */\n- (void)playMusic;\n/** 暂停音乐 */\n- (void)pauseMusic;\n/** 音乐跳到指定时间 */\n- (void)audioSeekToTime:(CMTime)time;\n/** 视频声音大小 */\n- (void)adjustVolumeForVideo:(float)videoVolume;\n/** 音乐声音大小 */\n- (void)adjustVolumeForMusic:(float)musicVolume;\n/** 获取视频时长 */\n- (float)getVideoDuration;\n/** 获取音乐时长 */\n- (float)getAudioDuration;\n/** 获取视频时长 */\n- (CMTime)getVideoDurationTime;\n/** 获取音乐时长 */\n- (CMTime)getAudioDurationTime;\n/** 是否播放中 */\n- (BOOL)isPlaying;\n/** 释放时候需要调用 */\n- (void)removeAllObject;\n\n@end\n\nNS_ASSUME_NONNULL_END\n"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo/Tool/DDMediaEditorManage.m",
    "content": "//\n//  DDMediaEditorManage.m\n//  WEVideoEffect\n//\n//  Created by 刘海东 on 2018/11/5.\n//  Copyright © 2018 刘海东. All rights reserved.\n//\n\n#import \"DDMediaEditorManage.h\"\n\n\n@interface DDMediaEditorManage ()\n\n@property (nonatomic, strong) UIImage *lutImage;\n@property (nonatomic, assign) BOOL playIng;\n@property (nonatomic, assign) BOOL videoStatusReadyToPlayBool;\n@property (nonatomic, assign) BOOL isVideoPlayEndCallBackNowBool;\n@end\n\n@implementation DDMediaEditorManage\n\n- (instancetype)initWithUrl:(NSURL *)url\n{\n    self = [super init];\n    if (self) {\n        _videoUrl = url;\n        _playIng = NO;\n        _videoStatusReadyToPlayBool = NO;\n        _isVideoPlayEndCallBackNowBool = NO;\n    }\n    return self;\n}\n\n#pragma mark lazy\nstatic NSString * const VideoPlayerItemStatusContext = @\"VideoPlayerItemStatusContext\";\n- (AVPlayerItem *)playerItem\n{\n    if (!_videoPlayerItem)\n    {\n        _videoPlayerItem = [[AVPlayerItem alloc]initWithURL:self.videoUrl];\n        [_videoPlayerItem addObserver:self forKeyPath:@\"status\" options:0 context:(__bridge void * _Nullable)VideoPlayerItemStatusContext];\n        \n    }\n    return _videoPlayerItem;\n}\n\n- (AVPlayer *)videoPlayer\n{\n    if (!_videoPlayer)\n    {\n        _videoPlayer =[[AVPlayer alloc]initWithPlayerItem:self.playerItem];\n        \n        @weakify(self);\n        [_videoPlayer addPeriodicTimeObserverForInterval:CMTimeMake(1, self.videoPlayerItem.asset.duration.timescale) queue:nil usingBlock:^(CMTime time) {\n            @strongify(self);\n            \n            if (self.videoCMTimeCallBack) {\n                self.videoCMTimeCallBack(self.videoPlayerItem.currentTime, self.videoPlayerItem.asset.duration);\n            }\n            \n            if (CMTimeGetSeconds(time) >= CMTimeGetSeconds(self.videoPlayerItem.asset.duration))\n            {\n                if (self.videoPlayEndCallBack && self.isVideoPlayEndCallBackNowBool == NO)\n                {\n                    self.playIng = NO;\n                    self.videoPlayEndCallBack();\n                    self.isVideoPlayEndCallBackNowBool = YES;\n                    @weakify(self);\n                    dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t)(0.5 * NSEC_PER_SEC)), dispatch_get_main_queue(), ^{\n                        @strongify(self);\n                        self.isVideoPlayEndCallBackNowBool = NO;\n                    });\n                }\n            }else\n            {\n                //                NSLog(@\"播放中\");\n            }\n        }];\n        \n    }\n    return _videoPlayer;\n}\n\n\n- (GPUImageMovie *)movie\n{\n    if (!_movie)\n    {\n        _movie =[[GPUImageMovie alloc]initWithPlayerItem:self.playerItem];\n        _movie.runBenchmark =NO;\n        _movie.playAtActualSpeed =YES;\n    }\n    return _movie;\n}\n\n\n\n- (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary<NSKeyValueChangeKey,id> *)change context:(void *)context\n{\n    if (context == (__bridge void * _Nullable)(VideoPlayerItemStatusContext))\n    {\n        if ([keyPath isEqualToString:@\"status\"]) {\n            AVPlayerItem * item = (AVPlayerItem *)object;\n            if (item.status == AVPlayerItemStatusReadyToPlay)\n            { //准备好播放\n                NSLog(@\"AVPlayerItemStatusReadyToPlay--->\");\n                if (self.videoAVPlayerItemStatusReadyToPlayCallBack &&self.videoStatusReadyToPlayBool == NO)\n                {\n                    self.videoAVPlayerItemStatusReadyToPlayCallBack();\n                }\n                self.videoStatusReadyToPlayBool = YES;\n            }else if (item.status == AVPlayerItemStatusFailed){ //失败\n                NSLog(@\"AVPlayerItemStatusFailed--->\");\n            }\n        }\n    }\n}\n\n#pragma mark Public methods\n/** 视频是否可播放 */\n- (BOOL)getVideoStatusReadyToPlay\n{\n    return self.videoStatusReadyToPlayBool;\n}\n\n/** 播放 */\n- (void)playVideo\n{\n    NSLog(@\"播放\");\n    if (self.playIng)\n    {\n        return;\n    }\n    \n    [self.videoPlayer play];\n    [self.movie startProcessing];\n    self.playIng = YES;\n    if (_audioPlayer && _audioPlayerItem)\n    {\n        [_audioPlayer play];\n    }\n}\n\n/**重新播放**/\n- (void)replayVideo\n{\n    @weakify(self);\n    [self videoSeekToTime:kCMTimeZero completionHandler:^(BOOL finished) {\n        @strongify(self);\n        [self playVideo];\n    }];\n}\n\n/** 暂停 */\n- (void)pauseVideo\n{\n    NSLog(@\"暂停\");\n    \n    [self.videoPlayer pause];\n    [self.movie endProcessing];\n    self.playIng = NO;\n    if (_audioPlayer && _audioPlayerItem)\n    {\n        [_audioPlayer pause];\n    }\n}\n\n/** 跳到指定时间 */\n- (void)videoSeekToTime:(CMTime)time completionHandler:(void(^)(BOOL finished))completionHandler\n{\n    NSLog(@\"显示视频预览图\");\n    CMTime seekTimeInProgress = time;\n    if (self.videoPlayerItem.status == AVPlayerItemStatusReadyToPlay)\n    {\n        NSLog(@\"AVPlayerItemStatusReadyToPlay\");\n        [self.videoPlayer pause];\n        self.playIng = NO;\n        [self.videoPlayer seekToTime:seekTimeInProgress\n                     toleranceBefore:kCMTimeZero\n                      toleranceAfter:kCMTimeZero\n                   completionHandler:^(BOOL finished) {\n                       \n                       if (completionHandler) {\n                           completionHandler(finished);\n                       }\n                   }];\n        [self.movie endProcessing];\n    }else if (self.videoPlayerItem.status == AVPlayerItemStatusUnknown)\n    {\n        \n        NSLog(@\"AVPlayerItemStatusUnknown\");\n    }else\n    {\n        NSLog(@\"AVPlayerItemStatusFailed\");\n    }\n}\n\n/** 添加音乐 */\n- (void)addAudioPath:(NSURL *)audioPath\n{\n    \n    _audioPlayer = nil;\n    _audioPlayerItem = nil;\n    self.audioPlayer = [[AVPlayer alloc ]init];\n    self.audioPlayerItem =[AVPlayerItem playerItemWithURL:audioPath];\n    [self.audioPlayer replaceCurrentItemWithPlayerItem:self.audioPlayerItem];\n    [self.audioPlayer play];\n    @weakify(self);\n    [self.audioPlayer addPeriodicTimeObserverForInterval:CMTimeMake(1, self.audioPlayerItem.asset.duration.timescale) queue:nil usingBlock:^(CMTime time) {\n        @strongify(self);\n        if (time.value == self.audioPlayerItem.asset.duration.value)\n        {\n            if (self.audioPlayEndCallBack)\n            {\n                self.audioPlayEndCallBack();\n            }\n        }else\n        {\n        }\n        \n        if (self.audioCMTimeCallBack && self.audioPlayerItem)\n        {\n            NSLog(@\"<<%f\",CMTimeGetSeconds(self.audioPlayerItem.currentTime));\n            self.audioCMTimeCallBack(self.audioPlayerItem.currentTime, self.audioPlayerItem.duration);\n        }\n    }];\n    \n}\n/** 移除音乐 */\n- (void)removeMusic\n{\n    [self pauseMusic];\n    _audioPlayer = nil;\n    _audioPlayerItem = nil;\n}\n\n/** 播放音乐 */\n- (void)playMusic\n{\n    if (_audioPlayer && _audioPlayerItem)\n    {\n        [_audioPlayer play];\n    }\n}\n/** 暂停音乐 */\n- (void)pauseMusic\n{\n    if (_audioPlayer && _audioPlayerItem)\n    {\n        [_audioPlayer pause];\n    }\n}\n\n/** 音乐跳到指定时间 */\n- (void)audioSeekToTime:(CMTime)time\n{\n    if (_audioPlayer && _audioPlayerItem)\n    {\n        [self.audioPlayerItem seekToTime:time];\n    }\n}\n\n/** 视频声音大小 */\n- (void)adjustVolumeForVideo:(float)videoVolume\n{\n    if (videoVolume>=0&&videoVolume<=1.0)\n    {\n        _videoPlayer.volume = videoVolume;\n    }\n}\n/** 音乐声音大小 */\n- (void)adjustVolumeForMusic:(float)musicVolume\n{\n    if (musicVolume>=0&&musicVolume<=1.0)\n    {\n        _audioPlayer.volume = musicVolume;\n    }\n}\n\n/** 获取视频时长 */\n- (float)getVideoDuration\n{\n    return (float)CMTimeGetSeconds(self.videoPlayerItem.asset.duration);\n}\n\n/** 获取音乐时长 */\n- (float)getAudioDuration\n{\n    return (float)CMTimeGetSeconds(self.audioPlayerItem.asset.duration);\n}\n\n/** 获取视频时长 */\n- (CMTime)getVideoDurationTime\n{\n    return self.videoPlayerItem.asset.duration;\n}\n\n/** 获取音乐时长 */\n- (CMTime)getAudioDurationTime\n{\n    return self.audioPlayerItem.asset.duration;\n}\n/** 是否播放中 */\n- (BOOL)isPlaying{\n    return _playIng;\n}\n\n\n#pragma mark privatelyFunc\n/** 释放时候需要调用 */\n- (void)removeAllObject\n{\n    [_movie cancelProcessing];\n    [_movie endProcessing];\n    [_movie removeAllTargets];\n    _delegate = nil;\n    _movie = nil;\n    _lutImage = nil;\n    [_videoPlayer pause];\n    [_videoPlayerItem removeObserver:self forKeyPath:@\"status\"];\n    [_videoPlayerItem cancelPendingSeeks];\n    [_videoPlayerItem.asset cancelLoading];\n    [_videoPlayer.currentItem cancelPendingSeeks];\n    [_videoPlayer.currentItem.asset cancelLoading];\n    _videoPlayer = nil;\n    _videoPlayerItem = nil;\n    \n    [_audioPlayer pause];\n    [_audioPlayerItem cancelPendingSeeks];\n    [_audioPlayerItem.asset cancelLoading];\n    [_audioPlayer.currentItem cancelPendingSeeks];\n    [_audioPlayer.currentItem.asset cancelLoading];\n    _audioPlayer = nil;\n    _audioPlayerItem = nil;\n    _videoAVPlayerItemStatusReadyToPlayCallBack = nil;\n    _videoPlayEndCallBack = nil;\n    \n    [[GPUImageContext sharedImageProcessingContext].framebufferCache purgeAllUnassignedFramebuffers];\n    NSLog(@\"DDMediaEditorManage------->removeAllObject\");\n    \n}\n\n- (void)dealloc\n{\n    [self removeAllObject];\n    NSLog(@\"DDMediaEditorManage --->delloc 释放了\");\n}\n\n- (BOOL)willDealloc\n{\n    __weak id weakSelf = self;\n    dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t)(3*NSEC_PER_SEC)), dispatch_get_main_queue(), ^{\n        [weakSelf assertNotDealloc];\n    });\n    return YES;\n}\n\n- (void)assertNotDealloc\n{\n    NSAssert(NO, @\"\");\n}\n\n\n@end\n\n"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo/VideoCode/DDVideoEcodeManage.h",
    "content": "//\n//  DDVideoEcodeManage.h\n//  GPURenderKitDemo\n//\n//  Created by 刘海东 on 2019/4/27.\n//  Copyright © 2019 刘海东. All rights reserved.\n//\n\n#import <Foundation/Foundation.h>\n\nNS_ASSUME_NONNULL_BEGIN\n\n@interface DDVideoEcodeManage : NSObject\n\n@end\n\nNS_ASSUME_NONNULL_END\n"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo/VideoCode/DDVideoEcodeManage.m",
    "content": "//\n//  DDVideoEcodeManage.m\n//  GPURenderKitDemo\n//\n//  Created by 刘海东 on 2019/4/27.\n//  Copyright © 2019 刘海东. All rights reserved.\n//\n\n#import \"DDVideoEcodeManage.h\"\n#import <VideoToolbox/VideoToolbox.h>\n\n@interface DDVideoEcodeManage ()\n\n@end\n\n@implementation DDVideoEcodeManage\n\n- (instancetype)init\n{\n    self = [super init];\n    if (self) {\n        \n    }\n    return self;\n}\n\n\n//- (void)setup\n\n\n\n\n@end\n"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo/VideoCode/VideoEcodeViewController.h",
    "content": "//\n//  VideoEcodeViewController.h\n//  GPURenderKitDemo\n//\n//  Created by 刘海东 on 2019/4/27.\n//  Copyright © 2019 刘海东. All rights reserved.\n//\n\n#import \"BaseViewController.h\"\n\nNS_ASSUME_NONNULL_BEGIN\n\n@interface VideoEcodeViewController : BaseViewController\n\n@end\n\nNS_ASSUME_NONNULL_END\n"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo/VideoCode/VideoEcodeViewController.m",
    "content": "//\n//  VideoEcodeViewController.m\n//  GPURenderKitDemo\n//\n//  Created by 刘海东 on 2019/4/27.\n//  Copyright © 2019 刘海东. All rights reserved.\n//\n\n#import \"VideoEcodeViewController.h\"\n#import \"DDVideoEcodeManage.h\"\n\n@interface VideoEcodeViewController ()<GPUImageVideoCameraDelegate>\n@property (nonatomic, strong) GPUImageVideoCamera *videoCamera;\n@property (nonatomic, strong) GPUImageView *preview;\n\n@end\n\n@implementation VideoEcodeViewController\n\n- (void)viewDidLoad {\n    [super viewDidLoad];\n    \n    [self.videoCamera addTarget:self.preview];\n    \n    dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t)(0.2 * NSEC_PER_SEC)), dispatch_get_main_queue(), ^{\n        [self.videoCamera startCameraCapture];\n    });\n    \n    \n}\n\n#pragma mark ------------------------------------------------------ lazy ------------------------------------------------------\n- (GPUImageView *)preview{\n    if (!_preview) {\n        _preview = [[GPUImageView alloc] initWithFrame:self.view.bounds];\n        _preview.layer.contentsScale = 2.0;\n        _preview.backgroundColor = [[UIColor blackColor] colorWithAlphaComponent:0.8];\n        [_preview setBackgroundColorRed:0.2 green:0.2 blue:0.2 alpha:1.0];\n        [self.view addSubview:_preview];\n    }\n    return _preview;\n}\n- (GPUImageVideoCamera *)videoCamera\n{\n    if (!_videoCamera)\n    {\n        _videoCamera = [[GPUImageVideoCamera alloc] initWithSessionPreset:AVCaptureSessionPreset1280x720 cameraPosition:AVCaptureDevicePositionBack];\n        _videoCamera.runBenchmark = NO;\n        _videoCamera.horizontallyMirrorFrontFacingCamera = YES;\n        _videoCamera.outputImageOrientation = UIInterfaceOrientationPortrait;\n        _videoCamera.delegate = self;\n        [_videoCamera startCameraCapture];\n    }\n    return _videoCamera;\n}\n\n\n\n\n\n\n\n@end\n"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo/main.m",
    "content": "//\n//  main.m\n//  GPURenderKitDemo\n//\n//  Created by 刘海东 on 2019/2/22.\n//  Copyright © 2019 刘海东. All rights reserved.\n//\n\n#import <UIKit/UIKit.h>\n#import \"AppDelegate.h\"\n\nint main(int argc, char * argv[]) {\n    @autoreleasepool {\n        return UIApplicationMain(argc, argv, nil, NSStringFromClass([AppDelegate class]));\n    }\n}\n"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo.xcodeproj/project.pbxproj",
    "content": "// !$*UTF8*$!\n{\n\tarchiveVersion = 1;\n\tclasses = {\n\t};\n\tobjectVersion = 50;\n\tobjects = {\n\n/* Begin PBXBuildFile section */\n\t\tB41317382277258A00B4866F /* 纹理.jpg in Resources */ = {isa = PBXBuildFile; fileRef = B413170A2277258A00B4866F /* 纹理.jpg */; };\n\t\tB41317392277258A00B4866F /* 长图.JPG in Resources */ = {isa = PBXBuildFile; fileRef = B413170B2277258A00B4866F /* 长图.JPG */; };\n\t\tB413173A2277258A00B4866F /* 宽图.JPG in Resources */ = {isa = PBXBuildFile; fileRef = B413170C2277258A00B4866F /* 宽图.JPG */; };\n\t\tB413173B2277258A00B4866F /* 方图600*600.JPG in Resources */ = {isa = PBXBuildFile; fileRef = B413170D2277258A00B4866F /* 方图600*600.JPG */; };\n\t\tB413173C2277258A00B4866F /* 原图.jpg in Resources */ = {isa = PBXBuildFile; fileRef = B413170E2277258A00B4866F /* 原图.jpg */; };\n\t\tB413173D2277258A00B4866F /* UIView+Xib.m in Sources */ = {isa = PBXBuildFile; fileRef = B41317102277258A00B4866F /* UIView+Xib.m */; };\n\t\tB413173E2277258A00B4866F /* DDShapeViewController.m in Sources */ = {isa = PBXBuildFile; fileRef = B41317122277258A00B4866F /* DDShapeViewController.m */; };\n\t\tB41317412277258A00B4866F /* ATRiseViewController.m in Sources */ = {isa = PBXBuildFile; fileRef = B413171B2277258A00B4866F /* ATRiseViewController.m */; };\n\t\tB41317422277258A00B4866F /* ATSelectBarView.xib in Resources */ = {isa = PBXBuildFile; fileRef = B413171E2277258A00B4866F /* ATSelectBarView.xib */; };\n\t\tB41317432277258A00B4866F /* WeSliderView.m in Sources */ = {isa = PBXBuildFile; fileRef = B41317232277258A00B4866F /* WeSliderView.m */; };\n\t\tB41317442277258A00B4866F /* ATSliderView.m in Sources */ = {isa = PBXBuildFile; fileRef = B41317242277258A00B4866F /* ATSliderView.m */; };\n\t\tB41317452277258A00B4866F /* DDGLShapeControlView.m in Sources */ = {isa = PBXBuildFile; fileRef = B41317272277258A00B4866F /* DDGLShapeControlView.m */; };\n\t\tB41317462277258A00B4866F /* DDGLSelectView.m in Sources */ = {isa = PBXBuildFile; fileRef = B413172B2277258A00B4866F /* DDGLSelectView.m */; };\n\t\tB41317472277258A00B4866F /* DDGLShapingView.m in Sources */ = {isa = PBXBuildFile; fileRef = B413172C2277258A00B4866F /* DDGLShapingView.m */; };\n\t\tB41317482277258A00B4866F /* DDGLShapeSelView.m in Sources */ = {isa = PBXBuildFile; fileRef = B413172E2277258A00B4866F /* DDGLShapeSelView.m */; };\n\t\tB41317492277258A00B4866F /* DDGLShapeView.m in Sources */ = {isa = PBXBuildFile; fileRef = B413172F2277258A00B4866F /* DDGLShapeView.m */; };\n\t\tB413174A2277258A00B4866F /* ATRiseMenuView.m in Sources */ = {isa = PBXBuildFile; fileRef = B41317302277258A00B4866F /* ATRiseMenuView.m */; };\n\t\tB413174B2277258A00B4866F /* ATSelectBarView.m in Sources */ = {isa = PBXBuildFile; fileRef = B41317312277258A00B4866F /* ATSelectBarView.m */; };\n\t\tB42A993E226D6E860067297F /* 人脸106个关键点.png in Resources */ = {isa = PBXBuildFile; fileRef = B42A993B226D6E860067297F /* 人脸106个关键点.png */; };\n\t\tB42C4F2D225DB9B400CF9B9B /* 6666.mp3 in Resources */ = {isa = PBXBuildFile; fileRef = B42C4F2A225DB9B400CF9B9B /* 6666.mp3 */; };\n\t\tB42C4F4A225F3ACF00CF9B9B /* GLImageFilterListViewController.m in Sources */ = {isa = PBXBuildFile; fileRef = B42C4F49225F3ACF00CF9B9B /* GLImageFilterListViewController.m */; };\n\t\tB42EC0512252039600226FA0 /* GLDouYinEffectViewController.m in Sources */ = {isa = PBXBuildFile; fileRef = B42EC0502252039600226FA0 /* GLDouYinEffectViewController.m */; };\n\t\tB42EC0542252044900226FA0 /* DouYinEffectTabView.m in Sources */ = {isa = PBXBuildFile; fileRef = B42EC0532252044900226FA0 /* DouYinEffectTabView.m */; };\n\t\tB4620D682220413400EE2876 /* AppDelegate.m in Sources */ = {isa = PBXBuildFile; fileRef = B4620D672220413400EE2876 /* AppDelegate.m */; };\n\t\tB4620D702220413500EE2876 /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = B4620D6F2220413500EE2876 /* Assets.xcassets */; };\n\t\tB4620D732220413500EE2876 /* LaunchScreen.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = B4620D712220413500EE2876 /* LaunchScreen.storyboard */; };\n\t\tB4620D762220413500EE2876 /* main.m in Sources */ = {isa = PBXBuildFile; fileRef = B4620D752220413500EE2876 /* main.m */; };\n\t\tB4620D802220413500EE2876 /* GPURenderKitDemoTests.m in Sources */ = {isa = PBXBuildFile; fileRef = B4620D7F2220413500EE2876 /* GPURenderKitDemoTests.m */; };\n\t\tB4620D8B2220413500EE2876 /* GPURenderKitDemoUITests.m in Sources */ = {isa = PBXBuildFile; fileRef = B4620D8A2220413500EE2876 /* GPURenderKitDemoUITests.m */; };\n\t\tB47A5ED4222127EE006E07A4 /* BaseViewController.m in Sources */ = {isa = PBXBuildFile; fileRef = B47A5ED3222127EE006E07A4 /* BaseViewController.m */; };\n\t\tB47A5ED7222127FF006E07A4 /* MainViewController.m in Sources */ = {isa = PBXBuildFile; fileRef = B47A5ED6222127FF006E07A4 /* MainViewController.m */; };\n\t\tB49EF60C22622D6D00C220B0 /* GLImageFilterShowViewController.m in Sources */ = {isa = PBXBuildFile; fileRef = B49EF60B22622D6D00C220B0 /* GLImageFilterShowViewController.m */; };\n\t\tB49EF62B22634BDD00C220B0 /* UIImage+Rotate.m in Sources */ = {isa = PBXBuildFile; fileRef = B49EF62322634BDD00C220B0 /* UIImage+Rotate.m */; };\n\t\tB49EF62C22634BDD00C220B0 /* GLImagePickerHelper.m in Sources */ = {isa = PBXBuildFile; fileRef = B49EF62422634BDD00C220B0 /* GLImagePickerHelper.m */; };\n\t\tB49EF62D22634BDD00C220B0 /* GLSliderView.m in Sources */ = {isa = PBXBuildFile; fileRef = B49EF62522634BDD00C220B0 /* GLSliderView.m */; };\n\t\tB49EF62E22634BDD00C220B0 /* GLFilterInfoView.m in Sources */ = {isa = PBXBuildFile; fileRef = B49EF62922634BDD00C220B0 /* GLFilterInfoView.m */; };\n\t\tB49EF63F2263541400C220B0 /* glitchPicture100.png in Resources */ = {isa = PBXBuildFile; fileRef = B49EF6382263541400C220B0 /* glitchPicture100.png */; };\n\t\tB49EF6402263541400C220B0 /* glitchPicture003.png in Resources */ = {isa = PBXBuildFile; fileRef = B49EF6392263541400C220B0 /* glitchPicture003.png */; };\n\t\tB49EF6412263541400C220B0 /* glitchPicture002.png in Resources */ = {isa = PBXBuildFile; fileRef = B49EF63A2263541400C220B0 /* glitchPicture002.png */; };\n\t\tB49EF6422263541400C220B0 /* glitchPicture000.png in Resources */ = {isa = PBXBuildFile; fileRef = B49EF63B2263541400C220B0 /* glitchPicture000.png */; };\n\t\tB49EF6432263541400C220B0 /* glitchPicture001.png in Resources */ = {isa = PBXBuildFile; fileRef = B49EF63C2263541400C220B0 /* glitchPicture001.png */; };\n\t\tB49EF6442263541400C220B0 /* glitchPicture005.png in Resources */ = {isa = PBXBuildFile; fileRef = B49EF63D2263541400C220B0 /* glitchPicture005.png */; };\n\t\tB49EF6452263541400C220B0 /* glitchPicture004.png in Resources */ = {isa = PBXBuildFile; fileRef = B49EF63E2263541400C220B0 /* glitchPicture004.png */; };\n\t\tB49EF64C226372C200C220B0 /* DDMediaEditorManage.m in Sources */ = {isa = PBXBuildFile; fileRef = B49EF64A226372C200C220B0 /* DDMediaEditorManage.m */; };\n\t\tB49EF65D226393D200C220B0 /* MBProgressHUD.m in Sources */ = {isa = PBXBuildFile; fileRef = B49EF658226393D200C220B0 /* MBProgressHUD.m */; };\n\t\tB49EF66B226399BD00C220B0 /* MovieViewController.m in Sources */ = {isa = PBXBuildFile; fileRef = B49EF66A226399BD00C220B0 /* MovieViewController.m */; };\n\t\tB4A9FE782265BF6F00BC6E51 /* FaceSliderView.m in Sources */ = {isa = PBXBuildFile; fileRef = B4A9FE772265BF6F00BC6E51 /* FaceSliderView.m */; };\n\t\tB4A9FE842265C5BA00BC6E51 /* GPURenderKit.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = B4A9FE812265C5B000BC6E51 /* GPURenderKit.framework */; };\n\t\tB4C64C3322748239001BB716 /* VideoEcodeViewController.m in Sources */ = {isa = PBXBuildFile; fileRef = B4C64C3222748239001BB716 /* VideoEcodeViewController.m */; };\n\t\tB4C64C3622748A5A001BB716 /* DDVideoEcodeManage.m in Sources */ = {isa = PBXBuildFile; fileRef = B4C64C3522748A5A001BB716 /* DDVideoEcodeManage.m */; };\n\t\tB4DAAC4D2257257F0094A3B2 /* GLImageMovieUseViewController.m in Sources */ = {isa = PBXBuildFile; fileRef = B4DAAC4C2257257F0094A3B2 /* GLImageMovieUseViewController.m */; };\n\t\tB4DAAC50225726000094A3B2 /* DDAVAssetRenderManage.m in Sources */ = {isa = PBXBuildFile; fileRef = B4DAAC4F225726000094A3B2 /* DDAVAssetRenderManage.m */; };\n\t\tB4DAAC5222573EB90094A3B2 /* 测试视频.mp4 in Resources */ = {isa = PBXBuildFile; fileRef = B4DAAC5122573EB80094A3B2 /* 测试视频.mp4 */; };\n\t\tB4DC43BC2264CE9A009C08A0 /* FaceViewController.mm in Sources */ = {isa = PBXBuildFile; fileRef = B4DC43BB2264CE9A009C08A0 /* FaceViewController.mm */; };\n\t\tB4DC43E22264D372009C08A0 /* MGFaceLicenseHandle.m in Sources */ = {isa = PBXBuildFile; fileRef = B4DC43D32264D372009C08A0 /* MGFaceLicenseHandle.m */; };\n\t\tB4DC43E32264D372009C08A0 /* libMGFacepp-0.5.2.a in Frameworks */ = {isa = PBXBuildFile; fileRef = B4DC43D52264D372009C08A0 /* libMGFacepp-0.5.2.a */; };\n\t\tB4DC43E42264D372009C08A0 /* megviifacepp_0_5_2_model in Resources */ = {isa = PBXBuildFile; fileRef = B4DC43DD2264D372009C08A0 /* megviifacepp_0_5_2_model */; };\n\t\tB4DC43E52264D372009C08A0 /* libMGLicMgrSDK-0.3.1.a in Frameworks */ = {isa = PBXBuildFile; fileRef = B4DC43DF2264D372009C08A0 /* libMGLicMgrSDK-0.3.1.a */; };\n\t\tB4DEF80022239EAF00809B5E /* libz.1.1.3.tbd in Frameworks */ = {isa = PBXBuildFile; fileRef = B4DEF7FF22239EAF00809B5E /* libz.1.1.3.tbd */; };\n/* End PBXBuildFile section */\n\n/* Begin PBXContainerItemProxy section */\n\t\tB4620D7C2220413500EE2876 /* PBXContainerItemProxy */ = {\n\t\t\tisa = PBXContainerItemProxy;\n\t\t\tcontainerPortal = B4620D5B2220413400EE2876 /* Project object */;\n\t\t\tproxyType = 1;\n\t\t\tremoteGlobalIDString = B4620D622220413400EE2876;\n\t\t\tremoteInfo = GPURenderKitDemo;\n\t\t};\n\t\tB4620D872220413500EE2876 /* PBXContainerItemProxy */ = {\n\t\t\tisa = PBXContainerItemProxy;\n\t\t\tcontainerPortal = B4620D5B2220413400EE2876 /* Project object */;\n\t\t\tproxyType = 1;\n\t\t\tremoteGlobalIDString = B4620D622220413400EE2876;\n\t\t\tremoteInfo = GPURenderKitDemo;\n\t\t};\n\t\tB4A9FE802265C5B000BC6E51 /* PBXContainerItemProxy */ = {\n\t\t\tisa = PBXContainerItemProxy;\n\t\t\tcontainerPortal = B4A9FE7B2265C5B000BC6E51 /* GPURenderKit.xcodeproj */;\n\t\t\tproxyType = 2;\n\t\t\tremoteGlobalIDString = B4620D4022203FBE00EE2876;\n\t\t\tremoteInfo = GPURenderKit;\n\t\t};\n\t\tB4A9FE822265C5B000BC6E51 /* PBXContainerItemProxy */ = {\n\t\t\tisa = PBXContainerItemProxy;\n\t\t\tcontainerPortal = B4A9FE7B2265C5B000BC6E51 /* GPURenderKit.xcodeproj */;\n\t\t\tproxyType = 2;\n\t\t\tremoteGlobalIDString = B4620D4922203FBE00EE2876;\n\t\t\tremoteInfo = GPURenderKitTests;\n\t\t};\n/* End PBXContainerItemProxy section */\n\n/* Begin PBXFileReference section */\n\t\tB41317022277258A00B4866F /* DDShapeViewController.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = DDShapeViewController.h; sourceTree = \"<group>\"; };\n\t\tB413170A2277258A00B4866F /* 纹理.jpg */ = {isa = PBXFileReference; lastKnownFileType = image.jpeg; path = \"纹理.jpg\"; sourceTree = \"<group>\"; };\n\t\tB413170B2277258A00B4866F /* 长图.JPG */ = {isa = PBXFileReference; lastKnownFileType = image.jpeg; path = \"长图.JPG\"; sourceTree = \"<group>\"; };\n\t\tB413170C2277258A00B4866F /* 宽图.JPG */ = {isa = PBXFileReference; lastKnownFileType = image.jpeg; path = \"宽图.JPG\"; sourceTree = \"<group>\"; };\n\t\tB413170D2277258A00B4866F /* 方图600*600.JPG */ = {isa = PBXFileReference; lastKnownFileType = image.jpeg; path = \"方图600*600.JPG\"; sourceTree = \"<group>\"; };\n\t\tB413170E2277258A00B4866F /* 原图.jpg */ = {isa = PBXFileReference; lastKnownFileType = image.jpeg; path = \"原图.jpg\"; sourceTree = \"<group>\"; };\n\t\tB41317102277258A00B4866F /* UIView+Xib.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = \"UIView+Xib.m\"; sourceTree = \"<group>\"; };\n\t\tB41317112277258A00B4866F /* UIView+Xib.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = \"UIView+Xib.h\"; sourceTree = \"<group>\"; };\n\t\tB41317122277258A00B4866F /* DDShapeViewController.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = DDShapeViewController.m; sourceTree = \"<group>\"; };\n\t\tB413171A2277258A00B4866F /* ATRiseViewController.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = ATRiseViewController.h; sourceTree = \"<group>\"; };\n\t\tB413171B2277258A00B4866F /* ATRiseViewController.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = ATRiseViewController.m; sourceTree = \"<group>\"; };\n\t\tB413171D2277258A00B4866F /* ATRiseMenuView.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = ATRiseMenuView.h; sourceTree = \"<group>\"; };\n\t\tB413171E2277258A00B4866F /* ATSelectBarView.xib */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = file.xib; path = ATSelectBarView.xib; sourceTree = \"<group>\"; };\n\t\tB413171F2277258A00B4866F /* ATSelectBarView.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = ATSelectBarView.h; sourceTree = \"<group>\"; };\n\t\tB41317212277258A00B4866F /* WeSliderView.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = WeSliderView.h; sourceTree = \"<group>\"; };\n\t\tB41317222277258A00B4866F /* ATSliderView.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = ATSliderView.h; sourceTree = \"<group>\"; };\n\t\tB41317232277258A00B4866F /* WeSliderView.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = WeSliderView.m; sourceTree = \"<group>\"; };\n\t\tB41317242277258A00B4866F /* ATSliderView.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = ATSliderView.m; sourceTree = \"<group>\"; };\n\t\tB41317262277258A00B4866F /* DDGLSelectView.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = DDGLSelectView.h; sourceTree = \"<group>\"; };\n\t\tB41317272277258A00B4866F /* DDGLShapeControlView.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = DDGLShapeControlView.m; sourceTree = \"<group>\"; };\n\t\tB41317282277258A00B4866F /* DDGLShapingView.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = DDGLShapingView.h; sourceTree = \"<group>\"; };\n\t\tB41317292277258A00B4866F /* DDGLShapeSelView.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = DDGLShapeSelView.h; sourceTree = \"<group>\"; };\n\t\tB413172A2277258A00B4866F /* DDGLShapeView.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = DDGLShapeView.h; sourceTree = \"<group>\"; };\n\t\tB413172B2277258A00B4866F /* DDGLSelectView.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = DDGLSelectView.m; sourceTree = \"<group>\"; };\n\t\tB413172C2277258A00B4866F /* DDGLShapingView.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = DDGLShapingView.m; sourceTree = \"<group>\"; };\n\t\tB413172D2277258A00B4866F /* DDGLShapeControlView.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = DDGLShapeControlView.h; sourceTree = \"<group>\"; };\n\t\tB413172E2277258A00B4866F /* DDGLShapeSelView.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = DDGLShapeSelView.m; sourceTree = \"<group>\"; };\n\t\tB413172F2277258A00B4866F /* DDGLShapeView.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = DDGLShapeView.m; sourceTree = \"<group>\"; };\n\t\tB41317302277258A00B4866F /* ATRiseMenuView.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = ATRiseMenuView.m; sourceTree = \"<group>\"; };\n\t\tB41317312277258A00B4866F /* ATSelectBarView.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = ATSelectBarView.m; sourceTree = \"<group>\"; };\n\t\tB42A993B226D6E860067297F /* 人脸106个关键点.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = \"人脸106个关键点.png\"; sourceTree = \"<group>\"; };\n\t\tB42C4F2A225DB9B400CF9B9B /* 6666.mp3 */ = {isa = PBXFileReference; lastKnownFileType = audio.mp3; path = 6666.mp3; sourceTree = \"<group>\"; };\n\t\tB42C4F48225F3ACF00CF9B9B /* GLImageFilterListViewController.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = GLImageFilterListViewController.h; sourceTree = \"<group>\"; };\n\t\tB42C4F49225F3ACF00CF9B9B /* GLImageFilterListViewController.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = GLImageFilterListViewController.m; sourceTree = \"<group>\"; };\n\t\tB42EC04F2252039600226FA0 /* GLDouYinEffectViewController.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = GLDouYinEffectViewController.h; sourceTree = \"<group>\"; };\n\t\tB42EC0502252039600226FA0 /* GLDouYinEffectViewController.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = GLDouYinEffectViewController.m; sourceTree = \"<group>\"; };\n\t\tB42EC0522252044900226FA0 /* DouYinEffectTabView.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = DouYinEffectTabView.h; sourceTree = \"<group>\"; };\n\t\tB42EC0532252044900226FA0 /* DouYinEffectTabView.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = DouYinEffectTabView.m; sourceTree = \"<group>\"; };\n\t\tB4620D632220413400EE2876 /* GPURenderKitDemo.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = GPURenderKitDemo.app; sourceTree = BUILT_PRODUCTS_DIR; };\n\t\tB4620D662220413400EE2876 /* AppDelegate.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = AppDelegate.h; sourceTree = \"<group>\"; };\n\t\tB4620D672220413400EE2876 /* AppDelegate.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = AppDelegate.m; sourceTree = \"<group>\"; };\n\t\tB4620D6F2220413500EE2876 /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = \"<group>\"; };\n\t\tB4620D722220413500EE2876 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/LaunchScreen.storyboard; sourceTree = \"<group>\"; };\n\t\tB4620D742220413500EE2876 /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = \"<group>\"; };\n\t\tB4620D752220413500EE2876 /* main.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = main.m; sourceTree = \"<group>\"; };\n\t\tB4620D7B2220413500EE2876 /* GPURenderKitDemoTests.xctest */ = {isa = PBXFileReference; explicitFileType = wrapper.cfbundle; includeInIndex = 0; path = GPURenderKitDemoTests.xctest; sourceTree = BUILT_PRODUCTS_DIR; };\n\t\tB4620D7F2220413500EE2876 /* GPURenderKitDemoTests.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = GPURenderKitDemoTests.m; sourceTree = \"<group>\"; };\n\t\tB4620D812220413500EE2876 /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = \"<group>\"; };\n\t\tB4620D862220413500EE2876 /* GPURenderKitDemoUITests.xctest */ = {isa = PBXFileReference; explicitFileType = wrapper.cfbundle; includeInIndex = 0; path = GPURenderKitDemoUITests.xctest; sourceTree = BUILT_PRODUCTS_DIR; };\n\t\tB4620D8A2220413500EE2876 /* GPURenderKitDemoUITests.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = GPURenderKitDemoUITests.m; sourceTree = \"<group>\"; };\n\t\tB4620D8C2220413500EE2876 /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = \"<group>\"; };\n\t\tB47A5ED2222127EE006E07A4 /* BaseViewController.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = BaseViewController.h; sourceTree = \"<group>\"; };\n\t\tB47A5ED3222127EE006E07A4 /* BaseViewController.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = BaseViewController.m; sourceTree = \"<group>\"; };\n\t\tB47A5ED5222127FF006E07A4 /* MainViewController.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = MainViewController.h; sourceTree = \"<group>\"; };\n\t\tB47A5ED6222127FF006E07A4 /* MainViewController.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = MainViewController.m; sourceTree = \"<group>\"; };\n\t\tB47A5EE0222129E9006E07A4 /* GPURenderMacroHeader.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPURenderMacroHeader.h; sourceTree = \"<group>\"; };\n\t\tB49EF60A22622D6D00C220B0 /* GLImageFilterShowViewController.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = GLImageFilterShowViewController.h; sourceTree = \"<group>\"; };\n\t\tB49EF60B22622D6D00C220B0 /* GLImageFilterShowViewController.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = GLImageFilterShowViewController.m; sourceTree = \"<group>\"; };\n\t\tB49EF61022622E8300C220B0 /* GLImageFilterEnumType.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = GLImageFilterEnumType.h; sourceTree = \"<group>\"; };\n\t\tB49EF62322634BDD00C220B0 /* UIImage+Rotate.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = \"UIImage+Rotate.m\"; sourceTree = \"<group>\"; };\n\t\tB49EF62422634BDD00C220B0 /* GLImagePickerHelper.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GLImagePickerHelper.m; sourceTree = \"<group>\"; };\n\t\tB49EF62522634BDD00C220B0 /* GLSliderView.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GLSliderView.m; sourceTree = \"<group>\"; };\n\t\tB49EF62622634BDD00C220B0 /* GLImagePickerHelper.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GLImagePickerHelper.h; sourceTree = \"<group>\"; };\n\t\tB49EF62722634BDD00C220B0 /* GLFilterInfoView.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GLFilterInfoView.h; sourceTree = \"<group>\"; };\n\t\tB49EF62822634BDD00C220B0 /* UIImage+Rotate.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = \"UIImage+Rotate.h\"; sourceTree = \"<group>\"; };\n\t\tB49EF62922634BDD00C220B0 /* GLFilterInfoView.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GLFilterInfoView.m; sourceTree = \"<group>\"; };\n\t\tB49EF62A22634BDD00C220B0 /* GLSliderView.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GLSliderView.h; sourceTree = \"<group>\"; };\n\t\tB49EF6382263541400C220B0 /* glitchPicture100.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = glitchPicture100.png; sourceTree = \"<group>\"; };\n\t\tB49EF6392263541400C220B0 /* glitchPicture003.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = glitchPicture003.png; sourceTree = \"<group>\"; };\n\t\tB49EF63A2263541400C220B0 /* glitchPicture002.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = glitchPicture002.png; sourceTree = \"<group>\"; };\n\t\tB49EF63B2263541400C220B0 /* glitchPicture000.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = glitchPicture000.png; sourceTree = \"<group>\"; };\n\t\tB49EF63C2263541400C220B0 /* glitchPicture001.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = glitchPicture001.png; sourceTree = \"<group>\"; };\n\t\tB49EF63D2263541400C220B0 /* glitchPicture005.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = glitchPicture005.png; sourceTree = \"<group>\"; };\n\t\tB49EF63E2263541400C220B0 /* glitchPicture004.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = glitchPicture004.png; sourceTree = \"<group>\"; };\n\t\tB49EF64A226372C200C220B0 /* DDMediaEditorManage.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = DDMediaEditorManage.m; sourceTree = \"<group>\"; };\n\t\tB49EF64B226372C200C220B0 /* DDMediaEditorManage.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = DDMediaEditorManage.h; sourceTree = \"<group>\"; };\n\t\tB49EF658226393D200C220B0 /* MBProgressHUD.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = MBProgressHUD.m; sourceTree = \"<group>\"; };\n\t\tB49EF65B226393D200C220B0 /* MBProgressHUD.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = MBProgressHUD.h; sourceTree = \"<group>\"; };\n\t\tB49EF669226399BD00C220B0 /* MovieViewController.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = MovieViewController.h; sourceTree = \"<group>\"; };\n\t\tB49EF66A226399BD00C220B0 /* MovieViewController.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = MovieViewController.m; sourceTree = \"<group>\"; };\n\t\tB4A9FE762265BF6F00BC6E51 /* FaceSliderView.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = FaceSliderView.h; sourceTree = \"<group>\"; };\n\t\tB4A9FE772265BF6F00BC6E51 /* FaceSliderView.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = FaceSliderView.m; sourceTree = \"<group>\"; };\n\t\tB4A9FE7B2265C5B000BC6E51 /* GPURenderKit.xcodeproj */ = {isa = PBXFileReference; lastKnownFileType = \"wrapper.pb-project\"; name = GPURenderKit.xcodeproj; path = ../../GPURenderKit/GPURenderKit.xcodeproj; sourceTree = \"<group>\"; };\n\t\tB4C64C3122748239001BB716 /* VideoEcodeViewController.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = VideoEcodeViewController.h; sourceTree = \"<group>\"; };\n\t\tB4C64C3222748239001BB716 /* VideoEcodeViewController.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = VideoEcodeViewController.m; sourceTree = \"<group>\"; };\n\t\tB4C64C3422748A5A001BB716 /* DDVideoEcodeManage.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = DDVideoEcodeManage.h; sourceTree = \"<group>\"; };\n\t\tB4C64C3522748A5A001BB716 /* DDVideoEcodeManage.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = DDVideoEcodeManage.m; sourceTree = \"<group>\"; };\n\t\tB4DAAC4B2257257F0094A3B2 /* GLImageMovieUseViewController.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = GLImageMovieUseViewController.h; sourceTree = \"<group>\"; };\n\t\tB4DAAC4C2257257F0094A3B2 /* GLImageMovieUseViewController.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = GLImageMovieUseViewController.m; sourceTree = \"<group>\"; };\n\t\tB4DAAC4E225726000094A3B2 /* DDAVAssetRenderManage.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = DDAVAssetRenderManage.h; sourceTree = \"<group>\"; };\n\t\tB4DAAC4F225726000094A3B2 /* DDAVAssetRenderManage.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = DDAVAssetRenderManage.m; sourceTree = \"<group>\"; };\n\t\tB4DAAC5122573EB80094A3B2 /* 测试视频.mp4 */ = {isa = PBXFileReference; lastKnownFileType = file; path = \"测试视频.mp4\"; sourceTree = \"<group>\"; };\n\t\tB4DC43BA2264CE9A009C08A0 /* FaceViewController.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = FaceViewController.h; sourceTree = \"<group>\"; };\n\t\tB4DC43BB2264CE9A009C08A0 /* FaceViewController.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = FaceViewController.mm; sourceTree = \"<group>\"; };\n\t\tB4DC43D12264D372009C08A0 /* MGNetAccount.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = MGNetAccount.h; sourceTree = \"<group>\"; };\n\t\tB4DC43D22264D372009C08A0 /* MGFaceLicenseHandle.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = MGFaceLicenseHandle.h; sourceTree = \"<group>\"; };\n\t\tB4DC43D32264D372009C08A0 /* MGFaceLicenseHandle.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = MGFaceLicenseHandle.m; sourceTree = \"<group>\"; };\n\t\tB4DC43D52264D372009C08A0 /* libMGFacepp-0.5.2.a */ = {isa = PBXFileReference; lastKnownFileType = archive.ar; path = \"libMGFacepp-0.5.2.a\"; sourceTree = \"<group>\"; };\n\t\tB4DC43D62264D372009C08A0 /* MGFaceppConfig.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = MGFaceppConfig.h; sourceTree = \"<group>\"; };\n\t\tB4DC43D72264D372009C08A0 /* MGAlgorithmInfo.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = MGAlgorithmInfo.h; sourceTree = \"<group>\"; };\n\t\tB4DC43D82264D372009C08A0 /* MGFacepp.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = MGFacepp.h; sourceTree = \"<group>\"; };\n\t\tB4DC43D92264D372009C08A0 /* MGDetectRectInfo.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = MGDetectRectInfo.h; sourceTree = \"<group>\"; };\n\t\tB4DC43DA2264D372009C08A0 /* MGFaceppCommon.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = MGFaceppCommon.h; sourceTree = \"<group>\"; };\n\t\tB4DC43DB2264D372009C08A0 /* MGImageData.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = MGImageData.h; sourceTree = \"<group>\"; };\n\t\tB4DC43DC2264D372009C08A0 /* MGFaceInfo.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = MGFaceInfo.h; sourceTree = \"<group>\"; };\n\t\tB4DC43DD2264D372009C08A0 /* megviifacepp_0_5_2_model */ = {isa = PBXFileReference; lastKnownFileType = file; path = megviifacepp_0_5_2_model; sourceTree = \"<group>\"; };\n\t\tB4DC43DF2264D372009C08A0 /* libMGLicMgrSDK-0.3.1.a */ = {isa = PBXFileReference; lastKnownFileType = archive.ar; path = \"libMGLicMgrSDK-0.3.1.a\"; sourceTree = \"<group>\"; };\n\t\tB4DC43E02264D372009C08A0 /* MGLicenseManager.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = MGLicenseManager.h; sourceTree = \"<group>\"; };\n\t\tB4DC43E12264D372009C08A0 /* MGLicenseCommon.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = MGLicenseCommon.h; sourceTree = \"<group>\"; };\n\t\tB4DEF7FF22239EAF00809B5E /* libz.1.1.3.tbd */ = {isa = PBXFileReference; lastKnownFileType = \"sourcecode.text-based-dylib-definition\"; name = libz.1.1.3.tbd; path = usr/lib/libz.1.1.3.tbd; sourceTree = SDKROOT; };\n/* End PBXFileReference section */\n\n/* Begin PBXFrameworksBuildPhase section */\n\t\tB4620D602220413400EE2876 /* Frameworks */ = {\n\t\t\tisa = PBXFrameworksBuildPhase;\n\t\t\tbuildActionMask = 2147483647;\n\t\t\tfiles = (\n\t\t\t\tB4A9FE842265C5BA00BC6E51 /* GPURenderKit.framework in Frameworks */,\n\t\t\t\tB4DC43E52264D372009C08A0 /* libMGLicMgrSDK-0.3.1.a in Frameworks */,\n\t\t\t\tB4DEF80022239EAF00809B5E /* libz.1.1.3.tbd in Frameworks */,\n\t\t\t\tB4DC43E32264D372009C08A0 /* libMGFacepp-0.5.2.a in Frameworks */,\n\t\t\t);\n\t\t\trunOnlyForDeploymentPostprocessing = 0;\n\t\t};\n\t\tB4620D782220413500EE2876 /* Frameworks */ = {\n\t\t\tisa = PBXFrameworksBuildPhase;\n\t\t\tbuildActionMask = 2147483647;\n\t\t\tfiles = (\n\t\t\t);\n\t\t\trunOnlyForDeploymentPostprocessing = 0;\n\t\t};\n\t\tB4620D832220413500EE2876 /* Frameworks */ = {\n\t\t\tisa = PBXFrameworksBuildPhase;\n\t\t\tbuildActionMask = 2147483647;\n\t\t\tfiles = (\n\t\t\t);\n\t\t\trunOnlyForDeploymentPostprocessing = 0;\n\t\t};\n/* End PBXFrameworksBuildPhase section */\n\n/* Begin PBXGroup section */\n\t\tB41317012277258A00B4866F /* ShapeChange */ = {\n\t\t\tisa = PBXGroup;\n\t\t\tchildren = (\n\t\t\t\tB41317022277258A00B4866F /* DDShapeViewController.h */,\n\t\t\t\tB41317122277258A00B4866F /* DDShapeViewController.m */,\n\t\t\t\tB41317092277258A00B4866F /* Pic */,\n\t\t\t\tB413170F2277258A00B4866F /* View */,\n\t\t\t\tB41317182277258A00B4866F /* Rise */,\n\t\t\t);\n\t\t\tpath = ShapeChange;\n\t\t\tsourceTree = \"<group>\";\n\t\t};\n\t\tB41317092277258A00B4866F /* Pic */ = {\n\t\t\tisa = PBXGroup;\n\t\t\tchildren = (\n\t\t\t\tB413170A2277258A00B4866F /* 纹理.jpg */,\n\t\t\t\tB413170B2277258A00B4866F /* 长图.JPG */,\n\t\t\t\tB413170C2277258A00B4866F /* 宽图.JPG */,\n\t\t\t\tB413170D2277258A00B4866F /* 方图600*600.JPG */,\n\t\t\t\tB413170E2277258A00B4866F /* 原图.jpg */,\n\t\t\t);\n\t\t\tpath = Pic;\n\t\t\tsourceTree = \"<group>\";\n\t\t};\n\t\tB413170F2277258A00B4866F /* View */ = {\n\t\t\tisa = PBXGroup;\n\t\t\tchildren = (\n\t\t\t\tB41317102277258A00B4866F /* UIView+Xib.m */,\n\t\t\t\tB41317112277258A00B4866F /* UIView+Xib.h */,\n\t\t\t);\n\t\t\tpath = View;\n\t\t\tsourceTree = \"<group>\";\n\t\t};\n\t\tB41317182277258A00B4866F /* Rise */ = {\n\t\t\tisa = PBXGroup;\n\t\t\tchildren = (\n\t\t\t\tB41317192277258A00B4866F /* Controller */,\n\t\t\t\tB413171C2277258A00B4866F /* View */,\n\t\t\t);\n\t\t\tpath = Rise;\n\t\t\tsourceTree = \"<group>\";\n\t\t};\n\t\tB41317192277258A00B4866F /* Controller */ = {\n\t\t\tisa = PBXGroup;\n\t\t\tchildren = (\n\t\t\t\tB413171A2277258A00B4866F /* ATRiseViewController.h */,\n\t\t\t\tB413171B2277258A00B4866F /* ATRiseViewController.m */,\n\t\t\t);\n\t\t\tpath = Controller;\n\t\t\tsourceTree = \"<group>\";\n\t\t};\n\t\tB413171C2277258A00B4866F /* View */ = {\n\t\t\tisa = PBXGroup;\n\t\t\tchildren = (\n\t\t\t\tB413171D2277258A00B4866F /* ATRiseMenuView.h */,\n\t\t\t\tB413171E2277258A00B4866F /* ATSelectBarView.xib */,\n\t\t\t\tB413171F2277258A00B4866F /* ATSelectBarView.h */,\n\t\t\t\tB41317202277258A00B4866F /* Slider */,\n\t\t\t\tB41317252277258A00B4866F /* DDGLShapeView */,\n\t\t\t\tB41317302277258A00B4866F /* ATRiseMenuView.m */,\n\t\t\t\tB41317312277258A00B4866F /* ATSelectBarView.m */,\n\t\t\t);\n\t\t\tpath = View;\n\t\t\tsourceTree = \"<group>\";\n\t\t};\n\t\tB41317202277258A00B4866F /* Slider */ = {\n\t\t\tisa = PBXGroup;\n\t\t\tchildren = (\n\t\t\t\tB41317212277258A00B4866F /* WeSliderView.h */,\n\t\t\t\tB41317222277258A00B4866F /* ATSliderView.h */,\n\t\t\t\tB41317232277258A00B4866F /* WeSliderView.m */,\n\t\t\t\tB41317242277258A00B4866F /* ATSliderView.m */,\n\t\t\t);\n\t\t\tpath = Slider;\n\t\t\tsourceTree = \"<group>\";\n\t\t};\n\t\tB41317252277258A00B4866F /* DDGLShapeView */ = {\n\t\t\tisa = PBXGroup;\n\t\t\tchildren = (\n\t\t\t\tB41317262277258A00B4866F /* DDGLSelectView.h */,\n\t\t\t\tB41317272277258A00B4866F /* DDGLShapeControlView.m */,\n\t\t\t\tB41317282277258A00B4866F /* DDGLShapingView.h */,\n\t\t\t\tB41317292277258A00B4866F /* DDGLShapeSelView.h */,\n\t\t\t\tB413172A2277258A00B4866F /* DDGLShapeView.h */,\n\t\t\t\tB413172B2277258A00B4866F /* DDGLSelectView.m */,\n\t\t\t\tB413172C2277258A00B4866F /* DDGLShapingView.m */,\n\t\t\t\tB413172D2277258A00B4866F /* DDGLShapeControlView.h */,\n\t\t\t\tB413172E2277258A00B4866F /* DDGLShapeSelView.m */,\n\t\t\t\tB413172F2277258A00B4866F /* DDGLShapeView.m */,\n\t\t\t);\n\t\t\tpath = DDGLShapeView;\n\t\t\tsourceTree = \"<group>\";\n\t\t};\n\t\tB42C4F47225F3A7000CF9B9B /* GLImageFilterList */ = {\n\t\t\tisa = PBXGroup;\n\t\t\tchildren = (\n\t\t\t\tB42C4F48225F3ACF00CF9B9B /* GLImageFilterListViewController.h */,\n\t\t\t\tB42C4F49225F3ACF00CF9B9B /* GLImageFilterListViewController.m */,\n\t\t\t\tB49EF60A22622D6D00C220B0 /* GLImageFilterShowViewController.h */,\n\t\t\t\tB49EF60B22622D6D00C220B0 /* GLImageFilterShowViewController.m */,\n\t\t\t);\n\t\t\tpath = GLImageFilterList;\n\t\t\tsourceTree = \"<group>\";\n\t\t};\n\t\tB42EC04E2252033700226FA0 /* DouYinEffect */ = {\n\t\t\tisa = PBXGroup;\n\t\t\tchildren = (\n\t\t\t\tB49EF6372263541400C220B0 /* GlitchPicture */,\n\t\t\t\tB42EC04F2252039600226FA0 /* GLDouYinEffectViewController.h */,\n\t\t\t\tB42EC0502252039600226FA0 /* GLDouYinEffectViewController.m */,\n\t\t\t\tB42EC0522252044900226FA0 /* DouYinEffectTabView.h */,\n\t\t\t\tB42EC0532252044900226FA0 /* DouYinEffectTabView.m */,\n\t\t\t);\n\t\t\tpath = DouYinEffect;\n\t\t\tsourceTree = \"<group>\";\n\t\t};\n\t\tB4620D5A2220413400EE2876 = {\n\t\t\tisa = PBXGroup;\n\t\t\tchildren = (\n\t\t\t\tB4620D652220413400EE2876 /* GPURenderKitDemo */,\n\t\t\t\tB4620D7E2220413500EE2876 /* GPURenderKitDemoTests */,\n\t\t\t\tB4620D892220413500EE2876 /* GPURenderKitDemoUITests */,\n\t\t\t\tB4620D642220413400EE2876 /* Products */,\n\t\t\t\tB46218A322204BD400EE2876 /* Frameworks */,\n\t\t\t);\n\t\t\tsourceTree = \"<group>\";\n\t\t};\n\t\tB4620D642220413400EE2876 /* Products */ = {\n\t\t\tisa = PBXGroup;\n\t\t\tchildren = (\n\t\t\t\tB4620D632220413400EE2876 /* GPURenderKitDemo.app */,\n\t\t\t\tB4620D7B2220413500EE2876 /* GPURenderKitDemoTests.xctest */,\n\t\t\t\tB4620D862220413500EE2876 /* GPURenderKitDemoUITests.xctest */,\n\t\t\t);\n\t\t\tname = Products;\n\t\t\tsourceTree = \"<group>\";\n\t\t};\n\t\tB4620D652220413400EE2876 /* GPURenderKitDemo */ = {\n\t\t\tisa = PBXGroup;\n\t\t\tchildren = (\n\t\t\t\tB41317012277258A00B4866F /* ShapeChange */,\n\t\t\t\tB4C64C2E227481CA001BB716 /* VideoCode */,\n\t\t\t\tB4DC43AB2264CE48009C08A0 /* Face */,\n\t\t\t\tB49EF6462263723500C220B0 /* Tool */,\n\t\t\t\tB49EF61422634BBE00C220B0 /* Support */,\n\t\t\t\tB42C4F47225F3A7000CF9B9B /* GLImageFilterList */,\n\t\t\t\tB4DAAC3F2257243D0094A3B2 /* GLImageMovie */,\n\t\t\t\tB42EC04E2252033700226FA0 /* DouYinEffect */,\n\t\t\t\tB4620D662220413400EE2876 /* AppDelegate.h */,\n\t\t\t\tB4620D672220413400EE2876 /* AppDelegate.m */,\n\t\t\t\tB47A5EE0222129E9006E07A4 /* GPURenderMacroHeader.h */,\n\t\t\t\tB49EF61022622E8300C220B0 /* GLImageFilterEnumType.h */,\n\t\t\t\tB47A5ED5222127FF006E07A4 /* MainViewController.h */,\n\t\t\t\tB47A5ED6222127FF006E07A4 /* MainViewController.m */,\n\t\t\t\tB47A5ED2222127EE006E07A4 /* BaseViewController.h */,\n\t\t\t\tB47A5ED3222127EE006E07A4 /* BaseViewController.m */,\n\t\t\t\tB4620D6F2220413500EE2876 /* Assets.xcassets */,\n\t\t\t\tB4620D712220413500EE2876 /* LaunchScreen.storyboard */,\n\t\t\t\tB4620D742220413500EE2876 /* Info.plist */,\n\t\t\t\tB4620D752220413500EE2876 /* main.m */,\n\t\t\t\tB4A9FE7B2265C5B000BC6E51 /* GPURenderKit.xcodeproj */,\n\t\t\t);\n\t\t\tpath = GPURenderKitDemo;\n\t\t\tsourceTree = \"<group>\";\n\t\t};\n\t\tB4620D7E2220413500EE2876 /* GPURenderKitDemoTests */ = {\n\t\t\tisa = PBXGroup;\n\t\t\tchildren = (\n\t\t\t\tB4620D7F2220413500EE2876 /* GPURenderKitDemoTests.m */,\n\t\t\t\tB4620D812220413500EE2876 /* Info.plist */,\n\t\t\t);\n\t\t\tpath = GPURenderKitDemoTests;\n\t\t\tsourceTree = \"<group>\";\n\t\t};\n\t\tB4620D892220413500EE2876 /* GPURenderKitDemoUITests */ = {\n\t\t\tisa = PBXGroup;\n\t\t\tchildren = (\n\t\t\t\tB4620D8A2220413500EE2876 /* GPURenderKitDemoUITests.m */,\n\t\t\t\tB4620D8C2220413500EE2876 /* Info.plist */,\n\t\t\t);\n\t\t\tpath = GPURenderKitDemoUITests;\n\t\t\tsourceTree = \"<group>\";\n\t\t};\n\t\tB46218A322204BD400EE2876 /* Frameworks */ = {\n\t\t\tisa = PBXGroup;\n\t\t\tchildren = (\n\t\t\t\tB4DEF7FF22239EAF00809B5E /* libz.1.1.3.tbd */,\n\t\t\t);\n\t\t\tname = Frameworks;\n\t\t\tsourceTree = \"<group>\";\n\t\t};\n\t\tB49EF61422634BBE00C220B0 /* Support */ = {\n\t\t\tisa = PBXGroup;\n\t\t\tchildren = (\n\t\t\t\tB49EF64E2263939C00C220B0 /* HUD */,\n\t\t\t\tB49EF62722634BDD00C220B0 /* GLFilterInfoView.h */,\n\t\t\t\tB49EF62922634BDD00C220B0 /* GLFilterInfoView.m */,\n\t\t\t\tB49EF62622634BDD00C220B0 /* GLImagePickerHelper.h */,\n\t\t\t\tB49EF62422634BDD00C220B0 /* GLImagePickerHelper.m */,\n\t\t\t\tB49EF62A22634BDD00C220B0 /* GLSliderView.h */,\n\t\t\t\tB49EF62522634BDD00C220B0 /* GLSliderView.m */,\n\t\t\t\tB49EF62822634BDD00C220B0 /* UIImage+Rotate.h */,\n\t\t\t\tB49EF62322634BDD00C220B0 /* UIImage+Rotate.m */,\n\t\t\t);\n\t\t\tpath = Support;\n\t\t\tsourceTree = \"<group>\";\n\t\t};\n\t\tB49EF6372263541400C220B0 /* GlitchPicture */ = {\n\t\t\tisa = PBXGroup;\n\t\t\tchildren = (\n\t\t\t\tB49EF6382263541400C220B0 /* glitchPicture100.png */,\n\t\t\t\tB49EF6392263541400C220B0 /* glitchPicture003.png */,\n\t\t\t\tB49EF63A2263541400C220B0 /* glitchPicture002.png */,\n\t\t\t\tB49EF63B2263541400C220B0 /* glitchPicture000.png */,\n\t\t\t\tB49EF63C2263541400C220B0 /* glitchPicture001.png */,\n\t\t\t\tB49EF63D2263541400C220B0 /* glitchPicture005.png */,\n\t\t\t\tB49EF63E2263541400C220B0 /* glitchPicture004.png */,\n\t\t\t);\n\t\t\tpath = GlitchPicture;\n\t\t\tsourceTree = \"<group>\";\n\t\t};\n\t\tB49EF6462263723500C220B0 /* Tool */ = {\n\t\t\tisa = PBXGroup;\n\t\t\tchildren = (\n\t\t\t\tB49EF64B226372C200C220B0 /* DDMediaEditorManage.h */,\n\t\t\t\tB49EF64A226372C200C220B0 /* DDMediaEditorManage.m */,\n\t\t\t);\n\t\t\tpath = Tool;\n\t\t\tsourceTree = \"<group>\";\n\t\t};\n\t\tB49EF64E2263939C00C220B0 /* HUD */ = {\n\t\t\tisa = PBXGroup;\n\t\t\tchildren = (\n\t\t\t\tB49EF65B226393D200C220B0 /* MBProgressHUD.h */,\n\t\t\t\tB49EF658226393D200C220B0 /* MBProgressHUD.m */,\n\t\t\t);\n\t\t\tpath = HUD;\n\t\t\tsourceTree = \"<group>\";\n\t\t};\n\t\tB4A9FE7C2265C5B000BC6E51 /* Products */ = {\n\t\t\tisa = PBXGroup;\n\t\t\tchildren = (\n\t\t\t\tB4A9FE812265C5B000BC6E51 /* GPURenderKit.framework */,\n\t\t\t\tB4A9FE832265C5B000BC6E51 /* GPURenderKitTests.xctest */,\n\t\t\t);\n\t\t\tname = Products;\n\t\t\tsourceTree = \"<group>\";\n\t\t};\n\t\tB4C64C2E227481CA001BB716 /* VideoCode */ = {\n\t\t\tisa = PBXGroup;\n\t\t\tchildren = (\n\t\t\t\tB4C64C3122748239001BB716 /* VideoEcodeViewController.h */,\n\t\t\t\tB4C64C3222748239001BB716 /* VideoEcodeViewController.m */,\n\t\t\t\tB4C64C3422748A5A001BB716 /* DDVideoEcodeManage.h */,\n\t\t\t\tB4C64C3522748A5A001BB716 /* DDVideoEcodeManage.m */,\n\t\t\t);\n\t\t\tpath = VideoCode;\n\t\t\tsourceTree = \"<group>\";\n\t\t};\n\t\tB4DAAC3F2257243D0094A3B2 /* GLImageMovie */ = {\n\t\t\tisa = PBXGroup;\n\t\t\tchildren = (\n\t\t\t\tB4DAAC4B2257257F0094A3B2 /* GLImageMovieUseViewController.h */,\n\t\t\t\tB4DAAC4C2257257F0094A3B2 /* GLImageMovieUseViewController.m */,\n\t\t\t\tB4DAAC4E225726000094A3B2 /* DDAVAssetRenderManage.h */,\n\t\t\t\tB4DAAC4F225726000094A3B2 /* DDAVAssetRenderManage.m */,\n\t\t\t\tB42C4F2A225DB9B400CF9B9B /* 6666.mp3 */,\n\t\t\t\tB4DAAC5122573EB80094A3B2 /* 测试视频.mp4 */,\n\t\t\t\tB49EF669226399BD00C220B0 /* MovieViewController.h */,\n\t\t\t\tB49EF66A226399BD00C220B0 /* MovieViewController.m */,\n\t\t\t);\n\t\t\tpath = GLImageMovie;\n\t\t\tsourceTree = \"<group>\";\n\t\t};\n\t\tB4DC43AB2264CE48009C08A0 /* Face */ = {\n\t\t\tisa = PBXGroup;\n\t\t\tchildren = (\n\t\t\t\tB4DC43CF2264D372009C08A0 /* Face++ */,\n\t\t\t\tB4DC43BA2264CE9A009C08A0 /* FaceViewController.h */,\n\t\t\t\tB4DC43BB2264CE9A009C08A0 /* FaceViewController.mm */,\n\t\t\t\tB4A9FE762265BF6F00BC6E51 /* FaceSliderView.h */,\n\t\t\t\tB4A9FE772265BF6F00BC6E51 /* FaceSliderView.m */,\n\t\t\t);\n\t\t\tpath = Face;\n\t\t\tsourceTree = \"<group>\";\n\t\t};\n\t\tB4DC43CF2264D372009C08A0 /* Face++ */ = {\n\t\t\tisa = PBXGroup;\n\t\t\tchildren = (\n\t\t\t\tB42A993B226D6E860067297F /* 人脸106个关键点.png */,\n\t\t\t\tB4DC43D02264D372009C08A0 /* MGLicenseManagerHelper */,\n\t\t\t\tB4DC43D42264D372009C08A0 /* iOS_SDK */,\n\t\t\t\tB4DC43DE2264D372009C08A0 /* MGLicense */,\n\t\t\t);\n\t\t\tpath = \"Face++\";\n\t\t\tsourceTree = \"<group>\";\n\t\t};\n\t\tB4DC43D02264D372009C08A0 /* MGLicenseManagerHelper */ = {\n\t\t\tisa = PBXGroup;\n\t\t\tchildren = (\n\t\t\t\tB4DC43D12264D372009C08A0 /* MGNetAccount.h */,\n\t\t\t\tB4DC43D22264D372009C08A0 /* MGFaceLicenseHandle.h */,\n\t\t\t\tB4DC43D32264D372009C08A0 /* MGFaceLicenseHandle.m */,\n\t\t\t);\n\t\t\tpath = MGLicenseManagerHelper;\n\t\t\tsourceTree = \"<group>\";\n\t\t};\n\t\tB4DC43D42264D372009C08A0 /* iOS_SDK */ = {\n\t\t\tisa = PBXGroup;\n\t\t\tchildren = (\n\t\t\t\tB4DC43D52264D372009C08A0 /* libMGFacepp-0.5.2.a */,\n\t\t\t\tB4DC43D62264D372009C08A0 /* MGFaceppConfig.h */,\n\t\t\t\tB4DC43D72264D372009C08A0 /* MGAlgorithmInfo.h */,\n\t\t\t\tB4DC43D82264D372009C08A0 /* MGFacepp.h */,\n\t\t\t\tB4DC43D92264D372009C08A0 /* MGDetectRectInfo.h */,\n\t\t\t\tB4DC43DA2264D372009C08A0 /* MGFaceppCommon.h */,\n\t\t\t\tB4DC43DB2264D372009C08A0 /* MGImageData.h */,\n\t\t\t\tB4DC43DC2264D372009C08A0 /* MGFaceInfo.h */,\n\t\t\t\tB4DC43DD2264D372009C08A0 /* megviifacepp_0_5_2_model */,\n\t\t\t);\n\t\t\tpath = iOS_SDK;\n\t\t\tsourceTree = \"<group>\";\n\t\t};\n\t\tB4DC43DE2264D372009C08A0 /* MGLicense */ = {\n\t\t\tisa = PBXGroup;\n\t\t\tchildren = (\n\t\t\t\tB4DC43DF2264D372009C08A0 /* libMGLicMgrSDK-0.3.1.a */,\n\t\t\t\tB4DC43E02264D372009C08A0 /* MGLicenseManager.h */,\n\t\t\t\tB4DC43E12264D372009C08A0 /* MGLicenseCommon.h */,\n\t\t\t);\n\t\t\tpath = MGLicense;\n\t\t\tsourceTree = \"<group>\";\n\t\t};\n/* End PBXGroup section */\n\n/* Begin PBXNativeTarget section */\n\t\tB4620D622220413400EE2876 /* GPURenderKitDemo */ = {\n\t\t\tisa = PBXNativeTarget;\n\t\t\tbuildConfigurationList = B4620D8F2220413500EE2876 /* Build configuration list for PBXNativeTarget \"GPURenderKitDemo\" */;\n\t\t\tbuildPhases = (\n\t\t\t\tB4620D5F2220413400EE2876 /* Sources */,\n\t\t\t\tB4620D602220413400EE2876 /* Frameworks */,\n\t\t\t\tB4620D612220413400EE2876 /* Resources */,\n\t\t\t);\n\t\t\tbuildRules = (\n\t\t\t);\n\t\t\tdependencies = (\n\t\t\t);\n\t\t\tname = GPURenderKitDemo;\n\t\t\tproductName = GPURenderKitDemo;\n\t\t\tproductReference = B4620D632220413400EE2876 /* GPURenderKitDemo.app */;\n\t\t\tproductType = \"com.apple.product-type.application\";\n\t\t};\n\t\tB4620D7A2220413500EE2876 /* GPURenderKitDemoTests */ = {\n\t\t\tisa = PBXNativeTarget;\n\t\t\tbuildConfigurationList = B4620D922220413500EE2876 /* Build configuration list for PBXNativeTarget \"GPURenderKitDemoTests\" */;\n\t\t\tbuildPhases = (\n\t\t\t\tB4620D772220413500EE2876 /* Sources */,\n\t\t\t\tB4620D782220413500EE2876 /* Frameworks */,\n\t\t\t\tB4620D792220413500EE2876 /* Resources */,\n\t\t\t);\n\t\t\tbuildRules = (\n\t\t\t);\n\t\t\tdependencies = (\n\t\t\t\tB4620D7D2220413500EE2876 /* PBXTargetDependency */,\n\t\t\t);\n\t\t\tname = GPURenderKitDemoTests;\n\t\t\tproductName = GPURenderKitDemoTests;\n\t\t\tproductReference = B4620D7B2220413500EE2876 /* GPURenderKitDemoTests.xctest */;\n\t\t\tproductType = \"com.apple.product-type.bundle.unit-test\";\n\t\t};\n\t\tB4620D852220413500EE2876 /* GPURenderKitDemoUITests */ = {\n\t\t\tisa = PBXNativeTarget;\n\t\t\tbuildConfigurationList = B4620D952220413500EE2876 /* Build configuration list for PBXNativeTarget \"GPURenderKitDemoUITests\" */;\n\t\t\tbuildPhases = (\n\t\t\t\tB4620D822220413500EE2876 /* Sources */,\n\t\t\t\tB4620D832220413500EE2876 /* Frameworks */,\n\t\t\t\tB4620D842220413500EE2876 /* Resources */,\n\t\t\t);\n\t\t\tbuildRules = (\n\t\t\t);\n\t\t\tdependencies = (\n\t\t\t\tB4620D882220413500EE2876 /* PBXTargetDependency */,\n\t\t\t);\n\t\t\tname = GPURenderKitDemoUITests;\n\t\t\tproductName = GPURenderKitDemoUITests;\n\t\t\tproductReference = B4620D862220413500EE2876 /* GPURenderKitDemoUITests.xctest */;\n\t\t\tproductType = \"com.apple.product-type.bundle.ui-testing\";\n\t\t};\n/* End PBXNativeTarget section */\n\n/* Begin PBXProject section */\n\t\tB4620D5B2220413400EE2876 /* Project object */ = {\n\t\t\tisa = PBXProject;\n\t\t\tattributes = {\n\t\t\t\tLastUpgradeCheck = 1010;\n\t\t\t\tORGANIZATIONNAME = \"刘海东\";\n\t\t\t\tTargetAttributes = {\n\t\t\t\t\tB4620D622220413400EE2876 = {\n\t\t\t\t\t\tCreatedOnToolsVersion = 10.1;\n\t\t\t\t\t\tSystemCapabilities = {\n\t\t\t\t\t\t\tcom.apple.BackgroundModes = {\n\t\t\t\t\t\t\t\tenabled = 1;\n\t\t\t\t\t\t\t};\n\t\t\t\t\t\t};\n\t\t\t\t\t};\n\t\t\t\t\tB4620D7A2220413500EE2876 = {\n\t\t\t\t\t\tCreatedOnToolsVersion = 10.1;\n\t\t\t\t\t\tTestTargetID = B4620D622220413400EE2876;\n\t\t\t\t\t};\n\t\t\t\t\tB4620D852220413500EE2876 = {\n\t\t\t\t\t\tCreatedOnToolsVersion = 10.1;\n\t\t\t\t\t\tTestTargetID = B4620D622220413400EE2876;\n\t\t\t\t\t};\n\t\t\t\t};\n\t\t\t};\n\t\t\tbuildConfigurationList = B4620D5E2220413400EE2876 /* Build configuration list for PBXProject \"GPURenderKitDemo\" */;\n\t\t\tcompatibilityVersion = \"Xcode 9.3\";\n\t\t\tdevelopmentRegion = en;\n\t\t\thasScannedForEncodings = 0;\n\t\t\tknownRegions = (\n\t\t\t\ten,\n\t\t\t\tBase,\n\t\t\t);\n\t\t\tmainGroup = B4620D5A2220413400EE2876;\n\t\t\tproductRefGroup = B4620D642220413400EE2876 /* Products */;\n\t\t\tprojectDirPath = \"\";\n\t\t\tprojectReferences = (\n\t\t\t\t{\n\t\t\t\t\tProductGroup = B4A9FE7C2265C5B000BC6E51 /* Products */;\n\t\t\t\t\tProjectRef = B4A9FE7B2265C5B000BC6E51 /* GPURenderKit.xcodeproj */;\n\t\t\t\t},\n\t\t\t);\n\t\t\tprojectRoot = \"\";\n\t\t\ttargets = (\n\t\t\t\tB4620D622220413400EE2876 /* GPURenderKitDemo */,\n\t\t\t\tB4620D7A2220413500EE2876 /* GPURenderKitDemoTests */,\n\t\t\t\tB4620D852220413500EE2876 /* GPURenderKitDemoUITests */,\n\t\t\t);\n\t\t};\n/* End PBXProject section */\n\n/* Begin PBXReferenceProxy section */\n\t\tB4A9FE812265C5B000BC6E51 /* GPURenderKit.framework */ = {\n\t\t\tisa = PBXReferenceProxy;\n\t\t\tfileType = wrapper.framework;\n\t\t\tpath = GPURenderKit.framework;\n\t\t\tremoteRef = B4A9FE802265C5B000BC6E51 /* PBXContainerItemProxy */;\n\t\t\tsourceTree = BUILT_PRODUCTS_DIR;\n\t\t};\n\t\tB4A9FE832265C5B000BC6E51 /* GPURenderKitTests.xctest */ = {\n\t\t\tisa = PBXReferenceProxy;\n\t\t\tfileType = wrapper.cfbundle;\n\t\t\tpath = GPURenderKitTests.xctest;\n\t\t\tremoteRef = B4A9FE822265C5B000BC6E51 /* PBXContainerItemProxy */;\n\t\t\tsourceTree = BUILT_PRODUCTS_DIR;\n\t\t};\n/* End PBXReferenceProxy section */\n\n/* Begin PBXResourcesBuildPhase section */\n\t\tB4620D612220413400EE2876 /* Resources */ = {\n\t\t\tisa = PBXResourcesBuildPhase;\n\t\t\tbuildActionMask = 2147483647;\n\t\t\tfiles = (\n\t\t\t\tB4620D732220413500EE2876 /* LaunchScreen.storyboard in Resources */,\n\t\t\t\tB49EF6432263541400C220B0 /* glitchPicture001.png in Resources */,\n\t\t\t\tB413173A2277258A00B4866F /* 宽图.JPG in Resources */,\n\t\t\t\tB49EF6402263541400C220B0 /* glitchPicture003.png in Resources */,\n\t\t\t\tB42A993E226D6E860067297F /* 人脸106个关键点.png in Resources */,\n\t\t\t\tB41317422277258A00B4866F /* ATSelectBarView.xib in Resources */,\n\t\t\t\tB41317382277258A00B4866F /* 纹理.jpg in Resources */,\n\t\t\t\tB413173B2277258A00B4866F /* 方图600*600.JPG in Resources */,\n\t\t\t\tB49EF6442263541400C220B0 /* glitchPicture005.png in Resources */,\n\t\t\t\tB4620D702220413500EE2876 /* Assets.xcassets in Resources */,\n\t\t\t\tB49EF6412263541400C220B0 /* glitchPicture002.png in Resources */,\n\t\t\t\tB413173C2277258A00B4866F /* 原图.jpg in Resources */,\n\t\t\t\tB49EF6452263541400C220B0 /* glitchPicture004.png in Resources */,\n\t\t\t\tB41317392277258A00B4866F /* 长图.JPG in Resources */,\n\t\t\t\tB42C4F2D225DB9B400CF9B9B /* 6666.mp3 in Resources */,\n\t\t\t\tB4DC43E42264D372009C08A0 /* megviifacepp_0_5_2_model in Resources */,\n\t\t\t\tB49EF63F2263541400C220B0 /* glitchPicture100.png in Resources */,\n\t\t\t\tB49EF6422263541400C220B0 /* glitchPicture000.png in Resources */,\n\t\t\t\tB4DAAC5222573EB90094A3B2 /* 测试视频.mp4 in Resources */,\n\t\t\t);\n\t\t\trunOnlyForDeploymentPostprocessing = 0;\n\t\t};\n\t\tB4620D792220413500EE2876 /* Resources */ = {\n\t\t\tisa = PBXResourcesBuildPhase;\n\t\t\tbuildActionMask = 2147483647;\n\t\t\tfiles = (\n\t\t\t);\n\t\t\trunOnlyForDeploymentPostprocessing = 0;\n\t\t};\n\t\tB4620D842220413500EE2876 /* Resources */ = {\n\t\t\tisa = PBXResourcesBuildPhase;\n\t\t\tbuildActionMask = 2147483647;\n\t\t\tfiles = (\n\t\t\t);\n\t\t\trunOnlyForDeploymentPostprocessing = 0;\n\t\t};\n/* End PBXResourcesBuildPhase section */\n\n/* Begin PBXSourcesBuildPhase section */\n\t\tB4620D5F2220413400EE2876 /* Sources */ = {\n\t\t\tisa = PBXSourcesBuildPhase;\n\t\t\tbuildActionMask = 2147483647;\n\t\t\tfiles = (\n\t\t\t\tB41317412277258A00B4866F /* ATRiseViewController.m in Sources */,\n\t\t\t\tB41317462277258A00B4866F /* DDGLSelectView.m in Sources */,\n\t\t\t\tB49EF60C22622D6D00C220B0 /* GLImageFilterShowViewController.m in Sources */,\n\t\t\t\tB49EF62D22634BDD00C220B0 /* GLSliderView.m in Sources */,\n\t\t\t\tB4620D762220413500EE2876 /* main.m in Sources */,\n\t\t\t\tB413174B2277258A00B4866F /* ATSelectBarView.m in Sources */,\n\t\t\t\tB41317492277258A00B4866F /* DDGLShapeView.m in Sources */,\n\t\t\t\tB4DAAC50225726000094A3B2 /* DDAVAssetRenderManage.m in Sources */,\n\t\t\t\tB49EF66B226399BD00C220B0 /* MovieViewController.m in Sources */,\n\t\t\t\tB41317432277258A00B4866F /* WeSliderView.m in Sources */,\n\t\t\t\tB47A5ED4222127EE006E07A4 /* BaseViewController.m in Sources */,\n\t\t\t\tB49EF62C22634BDD00C220B0 /* GLImagePickerHelper.m in Sources */,\n\t\t\t\tB4620D682220413400EE2876 /* AppDelegate.m in Sources */,\n\t\t\t\tB42C4F4A225F3ACF00CF9B9B /* GLImageFilterListViewController.m in Sources */,\n\t\t\t\tB41317442277258A00B4866F /* ATSliderView.m in Sources */,\n\t\t\t\tB4C64C3322748239001BB716 /* VideoEcodeViewController.m in Sources */,\n\t\t\t\tB41317472277258A00B4866F /* DDGLShapingView.m in Sources */,\n\t\t\t\tB42EC0542252044900226FA0 /* DouYinEffectTabView.m in Sources */,\n\t\t\t\tB4DAAC4D2257257F0094A3B2 /* GLImageMovieUseViewController.m in Sources */,\n\t\t\t\tB4DC43E22264D372009C08A0 /* MGFaceLicenseHandle.m in Sources */,\n\t\t\t\tB41317452277258A00B4866F /* DDGLShapeControlView.m in Sources */,\n\t\t\t\tB413173D2277258A00B4866F /* UIView+Xib.m in Sources */,\n\t\t\t\tB413174A2277258A00B4866F /* ATRiseMenuView.m in Sources */,\n\t\t\t\tB47A5ED7222127FF006E07A4 /* MainViewController.m in Sources */,\n\t\t\t\tB49EF62E22634BDD00C220B0 /* GLFilterInfoView.m in Sources */,\n\t\t\t\tB49EF62B22634BDD00C220B0 /* UIImage+Rotate.m in Sources */,\n\t\t\t\tB49EF64C226372C200C220B0 /* DDMediaEditorManage.m in Sources */,\n\t\t\t\tB4DC43BC2264CE9A009C08A0 /* FaceViewController.mm in Sources */,\n\t\t\t\tB4A9FE782265BF6F00BC6E51 /* FaceSliderView.m in Sources */,\n\t\t\t\tB41317482277258A00B4866F /* DDGLShapeSelView.m in Sources */,\n\t\t\t\tB4C64C3622748A5A001BB716 /* DDVideoEcodeManage.m in Sources */,\n\t\t\t\tB42EC0512252039600226FA0 /* GLDouYinEffectViewController.m in Sources */,\n\t\t\t\tB413173E2277258A00B4866F /* DDShapeViewController.m in Sources */,\n\t\t\t\tB49EF65D226393D200C220B0 /* MBProgressHUD.m in Sources */,\n\t\t\t);\n\t\t\trunOnlyForDeploymentPostprocessing = 0;\n\t\t};\n\t\tB4620D772220413500EE2876 /* Sources */ = {\n\t\t\tisa = PBXSourcesBuildPhase;\n\t\t\tbuildActionMask = 2147483647;\n\t\t\tfiles = (\n\t\t\t\tB4620D802220413500EE2876 /* GPURenderKitDemoTests.m in Sources */,\n\t\t\t);\n\t\t\trunOnlyForDeploymentPostprocessing = 0;\n\t\t};\n\t\tB4620D822220413500EE2876 /* Sources */ = {\n\t\t\tisa = PBXSourcesBuildPhase;\n\t\t\tbuildActionMask = 2147483647;\n\t\t\tfiles = (\n\t\t\t\tB4620D8B2220413500EE2876 /* GPURenderKitDemoUITests.m in Sources */,\n\t\t\t);\n\t\t\trunOnlyForDeploymentPostprocessing = 0;\n\t\t};\n/* End PBXSourcesBuildPhase section */\n\n/* Begin PBXTargetDependency section */\n\t\tB4620D7D2220413500EE2876 /* PBXTargetDependency */ = {\n\t\t\tisa = PBXTargetDependency;\n\t\t\ttarget = B4620D622220413400EE2876 /* GPURenderKitDemo */;\n\t\t\ttargetProxy = B4620D7C2220413500EE2876 /* PBXContainerItemProxy */;\n\t\t};\n\t\tB4620D882220413500EE2876 /* PBXTargetDependency */ = {\n\t\t\tisa = PBXTargetDependency;\n\t\t\ttarget = B4620D622220413400EE2876 /* GPURenderKitDemo */;\n\t\t\ttargetProxy = B4620D872220413500EE2876 /* PBXContainerItemProxy */;\n\t\t};\n/* End PBXTargetDependency section */\n\n/* Begin PBXVariantGroup section */\n\t\tB4620D712220413500EE2876 /* LaunchScreen.storyboard */ = {\n\t\t\tisa = PBXVariantGroup;\n\t\t\tchildren = (\n\t\t\t\tB4620D722220413500EE2876 /* Base */,\n\t\t\t);\n\t\t\tname = LaunchScreen.storyboard;\n\t\t\tsourceTree = \"<group>\";\n\t\t};\n/* End PBXVariantGroup section */\n\n/* Begin XCBuildConfiguration section */\n\t\tB4620D8D2220413500EE2876 /* Debug */ = {\n\t\t\tisa = XCBuildConfiguration;\n\t\t\tbuildSettings = {\n\t\t\t\tALWAYS_SEARCH_USER_PATHS = NO;\n\t\t\t\tCLANG_ANALYZER_NONNULL = YES;\n\t\t\t\tCLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE;\n\t\t\t\tCLANG_CXX_LANGUAGE_STANDARD = \"gnu++14\";\n\t\t\t\tCLANG_CXX_LIBRARY = \"libc++\";\n\t\t\t\tCLANG_ENABLE_MODULES = YES;\n\t\t\t\tCLANG_ENABLE_OBJC_ARC = YES;\n\t\t\t\tCLANG_ENABLE_OBJC_WEAK = YES;\n\t\t\t\tCLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;\n\t\t\t\tCLANG_WARN_BOOL_CONVERSION = YES;\n\t\t\t\tCLANG_WARN_COMMA = YES;\n\t\t\t\tCLANG_WARN_CONSTANT_CONVERSION = YES;\n\t\t\t\tCLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES;\n\t\t\t\tCLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;\n\t\t\t\tCLANG_WARN_DOCUMENTATION_COMMENTS = YES;\n\t\t\t\tCLANG_WARN_EMPTY_BODY = YES;\n\t\t\t\tCLANG_WARN_ENUM_CONVERSION = YES;\n\t\t\t\tCLANG_WARN_INFINITE_RECURSION = YES;\n\t\t\t\tCLANG_WARN_INT_CONVERSION = YES;\n\t\t\t\tCLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;\n\t\t\t\tCLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES;\n\t\t\t\tCLANG_WARN_OBJC_LITERAL_CONVERSION = YES;\n\t\t\t\tCLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;\n\t\t\t\tCLANG_WARN_RANGE_LOOP_ANALYSIS = YES;\n\t\t\t\tCLANG_WARN_STRICT_PROTOTYPES = YES;\n\t\t\t\tCLANG_WARN_SUSPICIOUS_MOVE = YES;\n\t\t\t\tCLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE;\n\t\t\t\tCLANG_WARN_UNREACHABLE_CODE = YES;\n\t\t\t\tCLANG_WARN__DUPLICATE_METHOD_MATCH = YES;\n\t\t\t\tCODE_SIGN_IDENTITY = \"iPhone Developer\";\n\t\t\t\tCOPY_PHASE_STRIP = NO;\n\t\t\t\tDEBUG_INFORMATION_FORMAT = dwarf;\n\t\t\t\tENABLE_STRICT_OBJC_MSGSEND = YES;\n\t\t\t\tENABLE_TESTABILITY = YES;\n\t\t\t\tGCC_C_LANGUAGE_STANDARD = gnu11;\n\t\t\t\tGCC_DYNAMIC_NO_PIC = NO;\n\t\t\t\tGCC_NO_COMMON_BLOCKS = YES;\n\t\t\t\tGCC_OPTIMIZATION_LEVEL = 0;\n\t\t\t\tGCC_PREPROCESSOR_DEFINITIONS = (\n\t\t\t\t\t\"DEBUG=1\",\n\t\t\t\t\t\"$(inherited)\",\n\t\t\t\t);\n\t\t\t\tGCC_WARN_64_TO_32_BIT_CONVERSION = YES;\n\t\t\t\tGCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;\n\t\t\t\tGCC_WARN_UNDECLARED_SELECTOR = YES;\n\t\t\t\tGCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;\n\t\t\t\tGCC_WARN_UNUSED_FUNCTION = YES;\n\t\t\t\tGCC_WARN_UNUSED_VARIABLE = YES;\n\t\t\t\tIPHONEOS_DEPLOYMENT_TARGET = 12.1;\n\t\t\t\tMTL_ENABLE_DEBUG_INFO = INCLUDE_SOURCE;\n\t\t\t\tMTL_FAST_MATH = YES;\n\t\t\t\tONLY_ACTIVE_ARCH = YES;\n\t\t\t\tSDKROOT = iphoneos;\n\t\t\t};\n\t\t\tname = Debug;\n\t\t};\n\t\tB4620D8E2220413500EE2876 /* Release */ = {\n\t\t\tisa = XCBuildConfiguration;\n\t\t\tbuildSettings = {\n\t\t\t\tALWAYS_SEARCH_USER_PATHS = NO;\n\t\t\t\tCLANG_ANALYZER_NONNULL = YES;\n\t\t\t\tCLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE;\n\t\t\t\tCLANG_CXX_LANGUAGE_STANDARD = \"gnu++14\";\n\t\t\t\tCLANG_CXX_LIBRARY = \"libc++\";\n\t\t\t\tCLANG_ENABLE_MODULES = YES;\n\t\t\t\tCLANG_ENABLE_OBJC_ARC = YES;\n\t\t\t\tCLANG_ENABLE_OBJC_WEAK = YES;\n\t\t\t\tCLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;\n\t\t\t\tCLANG_WARN_BOOL_CONVERSION = YES;\n\t\t\t\tCLANG_WARN_COMMA = YES;\n\t\t\t\tCLANG_WARN_CONSTANT_CONVERSION = YES;\n\t\t\t\tCLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES;\n\t\t\t\tCLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;\n\t\t\t\tCLANG_WARN_DOCUMENTATION_COMMENTS = YES;\n\t\t\t\tCLANG_WARN_EMPTY_BODY = YES;\n\t\t\t\tCLANG_WARN_ENUM_CONVERSION = YES;\n\t\t\t\tCLANG_WARN_INFINITE_RECURSION = YES;\n\t\t\t\tCLANG_WARN_INT_CONVERSION = YES;\n\t\t\t\tCLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;\n\t\t\t\tCLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES;\n\t\t\t\tCLANG_WARN_OBJC_LITERAL_CONVERSION = YES;\n\t\t\t\tCLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;\n\t\t\t\tCLANG_WARN_RANGE_LOOP_ANALYSIS = YES;\n\t\t\t\tCLANG_WARN_STRICT_PROTOTYPES = YES;\n\t\t\t\tCLANG_WARN_SUSPICIOUS_MOVE = YES;\n\t\t\t\tCLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE;\n\t\t\t\tCLANG_WARN_UNREACHABLE_CODE = YES;\n\t\t\t\tCLANG_WARN__DUPLICATE_METHOD_MATCH = YES;\n\t\t\t\tCODE_SIGN_IDENTITY = \"iPhone Developer\";\n\t\t\t\tCOPY_PHASE_STRIP = NO;\n\t\t\t\tDEBUG_INFORMATION_FORMAT = \"dwarf-with-dsym\";\n\t\t\t\tENABLE_NS_ASSERTIONS = NO;\n\t\t\t\tENABLE_STRICT_OBJC_MSGSEND = YES;\n\t\t\t\tGCC_C_LANGUAGE_STANDARD = gnu11;\n\t\t\t\tGCC_NO_COMMON_BLOCKS = YES;\n\t\t\t\tGCC_WARN_64_TO_32_BIT_CONVERSION = YES;\n\t\t\t\tGCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;\n\t\t\t\tGCC_WARN_UNDECLARED_SELECTOR = YES;\n\t\t\t\tGCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;\n\t\t\t\tGCC_WARN_UNUSED_FUNCTION = YES;\n\t\t\t\tGCC_WARN_UNUSED_VARIABLE = YES;\n\t\t\t\tIPHONEOS_DEPLOYMENT_TARGET = 12.1;\n\t\t\t\tMTL_ENABLE_DEBUG_INFO = NO;\n\t\t\t\tMTL_FAST_MATH = YES;\n\t\t\t\tSDKROOT = iphoneos;\n\t\t\t\tVALIDATE_PRODUCT = YES;\n\t\t\t};\n\t\t\tname = Release;\n\t\t};\n\t\tB4620D902220413500EE2876 /* Debug */ = {\n\t\t\tisa = XCBuildConfiguration;\n\t\t\tbuildSettings = {\n\t\t\t\tASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;\n\t\t\t\tCODE_SIGN_STYLE = Automatic;\n\t\t\t\tDEVELOPMENT_TEAM = A9426LDZX7;\n\t\t\t\tENABLE_BITCODE = NO;\n\t\t\t\tFRAMEWORK_SEARCH_PATHS = (\n\t\t\t\t\t\"$(inherited)\",\n\t\t\t\t\t\"$(PROJECT_DIR)/GPURenderKitDemo\",\n\t\t\t\t);\n\t\t\t\tGCC_PRECOMPILE_PREFIX_HEADER = YES;\n\t\t\t\tGCC_PREFIX_HEADER = GPURenderKitDemo/GPURenderMacroHeader.h;\n\t\t\t\tINFOPLIST_FILE = GPURenderKitDemo/Info.plist;\n\t\t\t\tIPHONEOS_DEPLOYMENT_TARGET = 9.0;\n\t\t\t\tLD_RUNPATH_SEARCH_PATHS = (\n\t\t\t\t\t\"$(inherited)\",\n\t\t\t\t\t\"@executable_path/Frameworks\",\n\t\t\t\t);\n\t\t\t\tLIBRARY_SEARCH_PATHS = (\n\t\t\t\t\t\"$(inherited)\",\n\t\t\t\t\t\"$(PROJECT_DIR)/GPURenderKitDemo/Face/Sensetime\",\n\t\t\t\t\t\"$(PROJECT_DIR)/GPURenderKitDemo/Face/iOS_SDK\",\n\t\t\t\t\t\"$(PROJECT_DIR)/GPURenderKitDemo/Face/Face++/iOS_SDK\",\n\t\t\t\t\t\"$(PROJECT_DIR)/GPURenderKitDemo/Face/Face++/Tool\",\n\t\t\t\t\t\"$(PROJECT_DIR)/GPURenderKitDemo/Face/Face++/MGLicense\",\n\t\t\t\t);\n\t\t\t\tOTHER_LDFLAGS = \"-lstdc++\";\n\t\t\t\tPRODUCT_BUNDLE_IDENTIFIER = simon.GPURenderKitDemo;\n\t\t\t\tPRODUCT_NAME = \"$(TARGET_NAME)\";\n\t\t\t\tTARGETED_DEVICE_FAMILY = \"1,2\";\n\t\t\t};\n\t\t\tname = Debug;\n\t\t};\n\t\tB4620D912220413500EE2876 /* Release */ = {\n\t\t\tisa = XCBuildConfiguration;\n\t\t\tbuildSettings = {\n\t\t\t\tASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;\n\t\t\t\tCODE_SIGN_STYLE = Automatic;\n\t\t\t\tDEVELOPMENT_TEAM = A9426LDZX7;\n\t\t\t\tENABLE_BITCODE = NO;\n\t\t\t\tFRAMEWORK_SEARCH_PATHS = (\n\t\t\t\t\t\"$(inherited)\",\n\t\t\t\t\t\"$(PROJECT_DIR)/GPURenderKitDemo\",\n\t\t\t\t);\n\t\t\t\tGCC_OPTIMIZATION_LEVEL = 0;\n\t\t\t\tGCC_PRECOMPILE_PREFIX_HEADER = YES;\n\t\t\t\tGCC_PREFIX_HEADER = GPURenderKitDemo/GPURenderMacroHeader.h;\n\t\t\t\tINFOPLIST_FILE = GPURenderKitDemo/Info.plist;\n\t\t\t\tIPHONEOS_DEPLOYMENT_TARGET = 9.0;\n\t\t\t\tLD_RUNPATH_SEARCH_PATHS = (\n\t\t\t\t\t\"$(inherited)\",\n\t\t\t\t\t\"@executable_path/Frameworks\",\n\t\t\t\t);\n\t\t\t\tLIBRARY_SEARCH_PATHS = (\n\t\t\t\t\t\"$(inherited)\",\n\t\t\t\t\t\"$(PROJECT_DIR)/GPURenderKitDemo/Face/Sensetime\",\n\t\t\t\t\t\"$(PROJECT_DIR)/GPURenderKitDemo/Face/iOS_SDK\",\n\t\t\t\t\t\"$(PROJECT_DIR)/GPURenderKitDemo/Face/Face++/iOS_SDK\",\n\t\t\t\t\t\"$(PROJECT_DIR)/GPURenderKitDemo/Face/Face++/Tool\",\n\t\t\t\t\t\"$(PROJECT_DIR)/GPURenderKitDemo/Face/Face++/MGLicense\",\n\t\t\t\t);\n\t\t\t\tOTHER_LDFLAGS = \"-lstdc++\";\n\t\t\t\tPRODUCT_BUNDLE_IDENTIFIER = simon.GPURenderKitDemo;\n\t\t\t\tPRODUCT_NAME = \"$(TARGET_NAME)\";\n\t\t\t\tTARGETED_DEVICE_FAMILY = \"1,2\";\n\t\t\t};\n\t\t\tname = Release;\n\t\t};\n\t\tB4620D932220413500EE2876 /* Debug */ = {\n\t\t\tisa = XCBuildConfiguration;\n\t\t\tbuildSettings = {\n\t\t\t\tBUNDLE_LOADER = \"$(TEST_HOST)\";\n\t\t\t\tCODE_SIGN_STYLE = Automatic;\n\t\t\t\tDEVELOPMENT_TEAM = A9426LDZX7;\n\t\t\t\tINFOPLIST_FILE = GPURenderKitDemoTests/Info.plist;\n\t\t\t\tLD_RUNPATH_SEARCH_PATHS = (\n\t\t\t\t\t\"$(inherited)\",\n\t\t\t\t\t\"@executable_path/Frameworks\",\n\t\t\t\t\t\"@loader_path/Frameworks\",\n\t\t\t\t);\n\t\t\t\tPRODUCT_BUNDLE_IDENTIFIER = simon.GPURenderKitDemoTests;\n\t\t\t\tPRODUCT_NAME = \"$(TARGET_NAME)\";\n\t\t\t\tTARGETED_DEVICE_FAMILY = \"1,2\";\n\t\t\t\tTEST_HOST = \"$(BUILT_PRODUCTS_DIR)/GPURenderKitDemo.app/GPURenderKitDemo\";\n\t\t\t};\n\t\t\tname = Debug;\n\t\t};\n\t\tB4620D942220413500EE2876 /* Release */ = {\n\t\t\tisa = XCBuildConfiguration;\n\t\t\tbuildSettings = {\n\t\t\t\tBUNDLE_LOADER = \"$(TEST_HOST)\";\n\t\t\t\tCODE_SIGN_STYLE = Automatic;\n\t\t\t\tDEVELOPMENT_TEAM = A9426LDZX7;\n\t\t\t\tINFOPLIST_FILE = GPURenderKitDemoTests/Info.plist;\n\t\t\t\tLD_RUNPATH_SEARCH_PATHS = (\n\t\t\t\t\t\"$(inherited)\",\n\t\t\t\t\t\"@executable_path/Frameworks\",\n\t\t\t\t\t\"@loader_path/Frameworks\",\n\t\t\t\t);\n\t\t\t\tPRODUCT_BUNDLE_IDENTIFIER = simon.GPURenderKitDemoTests;\n\t\t\t\tPRODUCT_NAME = \"$(TARGET_NAME)\";\n\t\t\t\tTARGETED_DEVICE_FAMILY = \"1,2\";\n\t\t\t\tTEST_HOST = \"$(BUILT_PRODUCTS_DIR)/GPURenderKitDemo.app/GPURenderKitDemo\";\n\t\t\t};\n\t\t\tname = Release;\n\t\t};\n\t\tB4620D962220413500EE2876 /* Debug */ = {\n\t\t\tisa = XCBuildConfiguration;\n\t\t\tbuildSettings = {\n\t\t\t\tCODE_SIGN_STYLE = Automatic;\n\t\t\t\tDEVELOPMENT_TEAM = A9426LDZX7;\n\t\t\t\tINFOPLIST_FILE = GPURenderKitDemoUITests/Info.plist;\n\t\t\t\tLD_RUNPATH_SEARCH_PATHS = (\n\t\t\t\t\t\"$(inherited)\",\n\t\t\t\t\t\"@executable_path/Frameworks\",\n\t\t\t\t\t\"@loader_path/Frameworks\",\n\t\t\t\t);\n\t\t\t\tPRODUCT_BUNDLE_IDENTIFIER = simon.GPURenderKitDemoUITests;\n\t\t\t\tPRODUCT_NAME = \"$(TARGET_NAME)\";\n\t\t\t\tTARGETED_DEVICE_FAMILY = \"1,2\";\n\t\t\t\tTEST_TARGET_NAME = GPURenderKitDemo;\n\t\t\t};\n\t\t\tname = Debug;\n\t\t};\n\t\tB4620D972220413500EE2876 /* Release */ = {\n\t\t\tisa = XCBuildConfiguration;\n\t\t\tbuildSettings = {\n\t\t\t\tCODE_SIGN_STYLE = Automatic;\n\t\t\t\tDEVELOPMENT_TEAM = A9426LDZX7;\n\t\t\t\tINFOPLIST_FILE = GPURenderKitDemoUITests/Info.plist;\n\t\t\t\tLD_RUNPATH_SEARCH_PATHS = (\n\t\t\t\t\t\"$(inherited)\",\n\t\t\t\t\t\"@executable_path/Frameworks\",\n\t\t\t\t\t\"@loader_path/Frameworks\",\n\t\t\t\t);\n\t\t\t\tPRODUCT_BUNDLE_IDENTIFIER = simon.GPURenderKitDemoUITests;\n\t\t\t\tPRODUCT_NAME = \"$(TARGET_NAME)\";\n\t\t\t\tTARGETED_DEVICE_FAMILY = \"1,2\";\n\t\t\t\tTEST_TARGET_NAME = GPURenderKitDemo;\n\t\t\t};\n\t\t\tname = Release;\n\t\t};\n/* End XCBuildConfiguration section */\n\n/* Begin XCConfigurationList section */\n\t\tB4620D5E2220413400EE2876 /* Build configuration list for PBXProject \"GPURenderKitDemo\" */ = {\n\t\t\tisa = XCConfigurationList;\n\t\t\tbuildConfigurations = (\n\t\t\t\tB4620D8D2220413500EE2876 /* Debug */,\n\t\t\t\tB4620D8E2220413500EE2876 /* Release */,\n\t\t\t);\n\t\t\tdefaultConfigurationIsVisible = 0;\n\t\t\tdefaultConfigurationName = Release;\n\t\t};\n\t\tB4620D8F2220413500EE2876 /* Build configuration list for PBXNativeTarget \"GPURenderKitDemo\" */ = {\n\t\t\tisa = XCConfigurationList;\n\t\t\tbuildConfigurations = (\n\t\t\t\tB4620D902220413500EE2876 /* Debug */,\n\t\t\t\tB4620D912220413500EE2876 /* Release */,\n\t\t\t);\n\t\t\tdefaultConfigurationIsVisible = 0;\n\t\t\tdefaultConfigurationName = Release;\n\t\t};\n\t\tB4620D922220413500EE2876 /* Build configuration list for PBXNativeTarget \"GPURenderKitDemoTests\" */ = {\n\t\t\tisa = XCConfigurationList;\n\t\t\tbuildConfigurations = (\n\t\t\t\tB4620D932220413500EE2876 /* Debug */,\n\t\t\t\tB4620D942220413500EE2876 /* Release */,\n\t\t\t);\n\t\t\tdefaultConfigurationIsVisible = 0;\n\t\t\tdefaultConfigurationName = Release;\n\t\t};\n\t\tB4620D952220413500EE2876 /* Build configuration list for PBXNativeTarget \"GPURenderKitDemoUITests\" */ = {\n\t\t\tisa = XCConfigurationList;\n\t\t\tbuildConfigurations = (\n\t\t\t\tB4620D962220413500EE2876 /* Debug */,\n\t\t\t\tB4620D972220413500EE2876 /* Release */,\n\t\t\t);\n\t\t\tdefaultConfigurationIsVisible = 0;\n\t\t\tdefaultConfigurationName = Release;\n\t\t};\n/* End XCConfigurationList section */\n\t};\n\trootObject = B4620D5B2220413400EE2876 /* Project object */;\n}\n"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo.xcodeproj/project.xcworkspace/contents.xcworkspacedata",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<Workspace\n   version = \"1.0\">\n   <FileRef\n      location = \"self:GPURenderKitDemo.xcodeproj\">\n   </FileRef>\n</Workspace>\n"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo.xcodeproj/project.xcworkspace/xcshareddata/WorkspaceSettings.xcsettings",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<!DOCTYPE plist PUBLIC \"-//Apple//DTD PLIST 1.0//EN\" \"http://www.apple.com/DTDs/PropertyList-1.0.dtd\">\n<plist version=\"1.0\">\n<dict>\n\t<key>BuildSystemType</key>\n\t<string>Original</string>\n</dict>\n</plist>\n"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo.xcodeproj/project.xcworkspace/xcuserdata/liuhaidong.xcuserdatad/WorkspaceSettings.xcsettings",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<!DOCTYPE plist PUBLIC \"-//Apple//DTD PLIST 1.0//EN\" \"http://www.apple.com/DTDs/PropertyList-1.0.dtd\">\n<plist version=\"1.0\">\n<dict>\n\t<key>BuildLocationStyle</key>\n\t<string>UseAppPreferences</string>\n\t<key>CustomBuildLocationType</key>\n\t<string>RelativeToDerivedData</string>\n\t<key>DerivedDataLocationStyle</key>\n\t<string>Default</string>\n\t<key>EnabledFullIndexStoreVisibility</key>\n\t<false/>\n\t<key>IssueFilterStyle</key>\n\t<string>ShowActiveSchemeOnly</string>\n\t<key>LiveSourceIssuesEnabled</key>\n\t<true/>\n</dict>\n</plist>\n"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemo.xcodeproj/xcshareddata/xcschemes/GPURenderKitDemo.xcscheme",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<Scheme\n   LastUpgradeVersion = \"1010\"\n   version = \"1.3\">\n   <BuildAction\n      parallelizeBuildables = \"YES\"\n      buildImplicitDependencies = \"YES\">\n      <BuildActionEntries>\n         <BuildActionEntry\n            buildForTesting = \"YES\"\n            buildForRunning = \"YES\"\n            buildForProfiling = \"YES\"\n            buildForArchiving = \"YES\"\n            buildForAnalyzing = \"YES\">\n            <BuildableReference\n               BuildableIdentifier = \"primary\"\n               BlueprintIdentifier = \"B4620D622220413400EE2876\"\n               BuildableName = \"GPURenderKitDemo.app\"\n               BlueprintName = \"GPURenderKitDemo\"\n               ReferencedContainer = \"container:GPURenderKitDemo.xcodeproj\">\n            </BuildableReference>\n         </BuildActionEntry>\n      </BuildActionEntries>\n   </BuildAction>\n   <TestAction\n      buildConfiguration = \"Debug\"\n      selectedDebuggerIdentifier = \"Xcode.DebuggerFoundation.Debugger.LLDB\"\n      selectedLauncherIdentifier = \"Xcode.DebuggerFoundation.Launcher.LLDB\"\n      shouldUseLaunchSchemeArgsEnv = \"YES\">\n      <Testables>\n         <TestableReference\n            skipped = \"NO\">\n            <BuildableReference\n               BuildableIdentifier = \"primary\"\n               BlueprintIdentifier = \"B4620D7A2220413500EE2876\"\n               BuildableName = \"GPURenderKitDemoTests.xctest\"\n               BlueprintName = \"GPURenderKitDemoTests\"\n               ReferencedContainer = \"container:GPURenderKitDemo.xcodeproj\">\n            </BuildableReference>\n         </TestableReference>\n         <TestableReference\n            skipped = \"NO\">\n            <BuildableReference\n               BuildableIdentifier = \"primary\"\n               BlueprintIdentifier = \"B4620D852220413500EE2876\"\n               BuildableName = \"GPURenderKitDemoUITests.xctest\"\n               BlueprintName = \"GPURenderKitDemoUITests\"\n               ReferencedContainer = \"container:GPURenderKitDemo.xcodeproj\">\n            </BuildableReference>\n         </TestableReference>\n      </Testables>\n      <MacroExpansion>\n         <BuildableReference\n            BuildableIdentifier = \"primary\"\n            BlueprintIdentifier = \"B4620D622220413400EE2876\"\n            BuildableName = \"GPURenderKitDemo.app\"\n            BlueprintName = \"GPURenderKitDemo\"\n            ReferencedContainer = \"container:GPURenderKitDemo.xcodeproj\">\n         </BuildableReference>\n      </MacroExpansion>\n      <AdditionalOptions>\n      </AdditionalOptions>\n   </TestAction>\n   <LaunchAction\n      buildConfiguration = \"Debug\"\n      selectedDebuggerIdentifier = \"Xcode.DebuggerFoundation.Debugger.LLDB\"\n      selectedLauncherIdentifier = \"Xcode.DebuggerFoundation.Launcher.LLDB\"\n      launchStyle = \"0\"\n      useCustomWorkingDirectory = \"NO\"\n      ignoresPersistentStateOnLaunch = \"NO\"\n      debugDocumentVersioning = \"YES\"\n      debugServiceExtension = \"internal\"\n      allowLocationSimulation = \"YES\">\n      <BuildableProductRunnable\n         runnableDebuggingMode = \"0\">\n         <BuildableReference\n            BuildableIdentifier = \"primary\"\n            BlueprintIdentifier = \"B4620D622220413400EE2876\"\n            BuildableName = \"GPURenderKitDemo.app\"\n            BlueprintName = \"GPURenderKitDemo\"\n            ReferencedContainer = \"container:GPURenderKitDemo.xcodeproj\">\n         </BuildableReference>\n      </BuildableProductRunnable>\n      <AdditionalOptions>\n         <AdditionalOption\n            key = \"NSZombieEnabled\"\n            value = \"YES\"\n            isEnabled = \"YES\">\n         </AdditionalOption>\n      </AdditionalOptions>\n   </LaunchAction>\n   <ProfileAction\n      buildConfiguration = \"Release\"\n      shouldUseLaunchSchemeArgsEnv = \"YES\"\n      savedToolIdentifier = \"\"\n      useCustomWorkingDirectory = \"NO\"\n      debugDocumentVersioning = \"YES\">\n      <BuildableProductRunnable\n         runnableDebuggingMode = \"0\">\n         <BuildableReference\n            BuildableIdentifier = \"primary\"\n            BlueprintIdentifier = \"B4620D622220413400EE2876\"\n            BuildableName = \"GPURenderKitDemo.app\"\n            BlueprintName = \"GPURenderKitDemo\"\n            ReferencedContainer = \"container:GPURenderKitDemo.xcodeproj\">\n         </BuildableReference>\n      </BuildableProductRunnable>\n   </ProfileAction>\n   <AnalyzeAction\n      buildConfiguration = \"Debug\">\n   </AnalyzeAction>\n   <ArchiveAction\n      buildConfiguration = \"Release\"\n      revealArchiveInOrganizer = \"YES\">\n   </ArchiveAction>\n</Scheme>\n"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemoTests/GPURenderKitDemoTests.m",
    "content": "//\n//  GPURenderKitDemoTests.m\n//  GPURenderKitDemoTests\n//\n//  Created by 刘海东 on 2019/2/22.\n//  Copyright © 2019 刘海东. All rights reserved.\n//\n\n#import <XCTest/XCTest.h>\n\n@interface GPURenderKitDemoTests : XCTestCase\n\n@end\n\n@implementation GPURenderKitDemoTests\n\n- (void)setUp {\n    // Put setup code here. This method is called before the invocation of each test method in the class.\n}\n\n- (void)tearDown {\n    // Put teardown code here. This method is called after the invocation of each test method in the class.\n}\n\n- (void)testExample {\n    // This is an example of a functional test case.\n    // Use XCTAssert and related functions to verify your tests produce the correct results.\n}\n\n- (void)testPerformanceExample {\n    // This is an example of a performance test case.\n    [self measureBlock:^{\n        // Put the code you want to measure the time of here.\n    }];\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemoTests/Info.plist",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<!DOCTYPE plist PUBLIC \"-//Apple//DTD PLIST 1.0//EN\" \"http://www.apple.com/DTDs/PropertyList-1.0.dtd\">\n<plist version=\"1.0\">\n<dict>\n\t<key>CFBundleDevelopmentRegion</key>\n\t<string>$(DEVELOPMENT_LANGUAGE)</string>\n\t<key>CFBundleExecutable</key>\n\t<string>$(EXECUTABLE_NAME)</string>\n\t<key>CFBundleIdentifier</key>\n\t<string>$(PRODUCT_BUNDLE_IDENTIFIER)</string>\n\t<key>CFBundleInfoDictionaryVersion</key>\n\t<string>6.0</string>\n\t<key>CFBundleName</key>\n\t<string>$(PRODUCT_NAME)</string>\n\t<key>CFBundlePackageType</key>\n\t<string>BNDL</string>\n\t<key>CFBundleShortVersionString</key>\n\t<string>1.0</string>\n\t<key>CFBundleVersion</key>\n\t<string>1</string>\n</dict>\n</plist>\n"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemoUITests/GPURenderKitDemoUITests.m",
    "content": "//\n//  GPURenderKitDemoUITests.m\n//  GPURenderKitDemoUITests\n//\n//  Created by 刘海东 on 2019/2/22.\n//  Copyright © 2019 刘海东. All rights reserved.\n//\n\n#import <XCTest/XCTest.h>\n\n@interface GPURenderKitDemoUITests : XCTestCase\n\n@end\n\n@implementation GPURenderKitDemoUITests\n\n- (void)setUp {\n    // Put setup code here. This method is called before the invocation of each test method in the class.\n\n    // In UI tests it is usually best to stop immediately when a failure occurs.\n    self.continueAfterFailure = NO;\n\n    // UI tests must launch the application that they test. Doing this in setup will make sure it happens for each test method.\n    [[[XCUIApplication alloc] init] launch];\n\n    // In UI tests it’s important to set the initial state - such as interface orientation - required for your tests before they run. The setUp method is a good place to do this.\n}\n\n- (void)tearDown {\n    // Put teardown code here. This method is called after the invocation of each test method in the class.\n}\n\n- (void)testExample {\n    // Use recording to get started writing UI tests.\n    // Use XCTAssert and related functions to verify your tests produce the correct results.\n}\n\n@end\n"
  },
  {
    "path": "GPURenderKitDemo/GPURenderKitDemoUITests/Info.plist",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<!DOCTYPE plist PUBLIC \"-//Apple//DTD PLIST 1.0//EN\" \"http://www.apple.com/DTDs/PropertyList-1.0.dtd\">\n<plist version=\"1.0\">\n<dict>\n\t<key>CFBundleDevelopmentRegion</key>\n\t<string>$(DEVELOPMENT_LANGUAGE)</string>\n\t<key>CFBundleExecutable</key>\n\t<string>$(EXECUTABLE_NAME)</string>\n\t<key>CFBundleIdentifier</key>\n\t<string>$(PRODUCT_BUNDLE_IDENTIFIER)</string>\n\t<key>CFBundleInfoDictionaryVersion</key>\n\t<string>6.0</string>\n\t<key>CFBundleName</key>\n\t<string>$(PRODUCT_NAME)</string>\n\t<key>CFBundlePackageType</key>\n\t<string>BNDL</string>\n\t<key>CFBundleShortVersionString</key>\n\t<string>1.0</string>\n\t<key>CFBundleVersion</key>\n\t<string>1</string>\n</dict>\n</plist>\n"
  },
  {
    "path": "README.md",
    "content": "# GPURenderKitDemo\n基于GPUImage做效果渲染学习.\n\n具体实现方案后期会补充完善。\n\n![人脸调节](https://github.com/Dongdong1991/GPURenderKitDemo/blob/master/GifResources/人脸调节.gif)\n\n![放大缩小](https://github.com/Dongdong1991/GPURenderKitDemo/blob/master/GifResources/放大缩小.gif)\n\n![放大缩小](https://github.com/Dongdong1991/GPURenderKitDemo/blob/master/GifResources/模糊分屏.gif)\n\n![灵魂出窍](https://github.com/Dongdong1991/GPURenderKitDemo/blob/master/GifResources/灵魂出窍.gif)\n\n\n###GLDouYinEffectViewController\n这里主要做一些抖音效果的仿写。目前已经实现的Filter。\n\n1.三屏带滤镜效果。\n\n2.四屏。\n\n3.电流效果。\n\n4.格子故障。\n\n5.灵魂出窍。[实现思路](https://www.jianshu.com/p/12ec246485a1)\n\n6.放大缩小。[实现思路](https://www.jianshu.com/p/ad6375fa046b)\n\n7.水面倒影（水的波纹实现的粗糙，数学比较渣，还在想办法优化）。\n\n8.模糊分屏。[实现思路](https://www.jianshu.com/p/34c941349b99)\n\n\n###FaceViewController\n美颜，脸部调节实现。目前已经实现\n\n1.美颜。[琨君大佬的简书---美颜实现](https://www.jianshu.com/p/945fc806a9b4)\n\n2.大小眼调节。\n\n3.胖瘦脸调节。\n\n4.大小鼻调节。\n\n5.基于顶点坐标去实现增高瘦身功能。具体看demo。\n\n**目前里面是在FragmentShader做像素的的调整来实现功能的，后面会放出基于VertexShader做功能的实现（这里可以大家可以想想两种实现方案有什么好处~~）**\n\n由于这里面需要用到人脸106个关键点。\n\n所以大家最好去face++申请一个,免费试用~~每天有5次的免费试用，不然你们都用我的，那我要经常更换。🤣\n\n这里就麻烦各位啦。\n\n更换face++的api\\_key和api\\_secret很简单。去face++注册一个，选择免费的就可以搞定了~~[face++注册](https://www.faceplusplus.com.cn)。注册好之后更换MGNetAccount.h 里面的api\\_key和api\\_secret。这样就可以搞定了。\n\n###GLImageMovieUseViewController\n这里有视频添加滤镜再混音的操作。一次性生成文件，加速视频的合成时间。\n\n\n\n"
  }
]