Showing preview only (2,230K chars total). Download the full file or copy to clipboard to get everything.
Repository: Dongdong1991/GPURenderKitDemo
Branch: master
Commit: 5b4b270583c6
Files: 566
Total size: 42.5 MB
Directory structure:
gitextract_wqoehnzj/
├── GPURenderKit/
│ ├── GPURenderKit/
│ │ ├── GPUImage/
│ │ │ ├── BaseClass/
│ │ │ │ ├── GLProgram.h
│ │ │ │ ├── GLProgram.m
│ │ │ │ ├── GPUImageBuffer.h
│ │ │ │ ├── GPUImageBuffer.m
│ │ │ │ ├── GPUImageFilter.h
│ │ │ │ ├── GPUImageFilter.m
│ │ │ │ ├── GPUImageFilterGroup.h
│ │ │ │ ├── GPUImageFilterGroup.m
│ │ │ │ ├── GPUImageFilterPipeline.h
│ │ │ │ ├── GPUImageFilterPipeline.m
│ │ │ │ ├── GPUImageFourInputFilter.h
│ │ │ │ ├── GPUImageFourInputFilter.m
│ │ │ │ ├── GPUImageFramebuffer.h
│ │ │ │ ├── GPUImageFramebuffer.m
│ │ │ │ ├── GPUImageFramebufferCache.h
│ │ │ │ ├── GPUImageFramebufferCache.m
│ │ │ │ ├── GPUImageMovie.h
│ │ │ │ ├── GPUImageMovie.m
│ │ │ │ ├── GPUImageMovieComposition.h
│ │ │ │ ├── GPUImageMovieComposition.m
│ │ │ │ ├── GPUImageOutput.h
│ │ │ │ ├── GPUImageOutput.m
│ │ │ │ ├── GPUImageTextureInput.h
│ │ │ │ ├── GPUImageTextureInput.m
│ │ │ │ ├── GPUImageTextureOutput.h
│ │ │ │ ├── GPUImageTextureOutput.m
│ │ │ │ ├── GPUImageThreeInputFilter.h
│ │ │ │ ├── GPUImageThreeInputFilter.m
│ │ │ │ ├── GPUImageTwoInputFilter.h
│ │ │ │ ├── GPUImageTwoInputFilter.m
│ │ │ │ ├── GPUImageTwoPassFilter.h
│ │ │ │ ├── GPUImageTwoPassFilter.m
│ │ │ │ ├── GPUImageTwoPassTextureSamplingFilter.h
│ │ │ │ ├── GPUImageTwoPassTextureSamplingFilter.m
│ │ │ │ ├── GPUImageVideoCamera.h
│ │ │ │ └── GPUImageVideoCamera.m
│ │ │ ├── Filters/
│ │ │ │ ├── GPUImage3x3ConvolutionFilter.h
│ │ │ │ ├── GPUImage3x3ConvolutionFilter.m
│ │ │ │ ├── GPUImage3x3TextureSamplingFilter.h
│ │ │ │ ├── GPUImage3x3TextureSamplingFilter.m
│ │ │ │ ├── GPUImageAdaptiveThresholdFilter.h
│ │ │ │ ├── GPUImageAdaptiveThresholdFilter.m
│ │ │ │ ├── GPUImageAddBlendFilter.h
│ │ │ │ ├── GPUImageAddBlendFilter.m
│ │ │ │ ├── GPUImageAlphaBlendFilter.h
│ │ │ │ ├── GPUImageAlphaBlendFilter.m
│ │ │ │ ├── GPUImageAmatorkaFilter.h
│ │ │ │ ├── GPUImageAmatorkaFilter.m
│ │ │ │ ├── GPUImageAverageColor.h
│ │ │ │ ├── GPUImageAverageColor.m
│ │ │ │ ├── GPUImageAverageLuminanceThresholdFilter.h
│ │ │ │ ├── GPUImageAverageLuminanceThresholdFilter.m
│ │ │ │ ├── GPUImageBilateralFilter.h
│ │ │ │ ├── GPUImageBilateralFilter.m
│ │ │ │ ├── GPUImageBoxBlurFilter.h
│ │ │ │ ├── GPUImageBoxBlurFilter.m
│ │ │ │ ├── GPUImageBrightnessFilter.h
│ │ │ │ ├── GPUImageBrightnessFilter.m
│ │ │ │ ├── GPUImageBulgeDistortionFilter.h
│ │ │ │ ├── GPUImageBulgeDistortionFilter.m
│ │ │ │ ├── GPUImageCGAColorspaceFilter.h
│ │ │ │ ├── GPUImageCGAColorspaceFilter.m
│ │ │ │ ├── GPUImageCannyEdgeDetectionFilter.h
│ │ │ │ ├── GPUImageCannyEdgeDetectionFilter.m
│ │ │ │ ├── GPUImageChromaKeyBlendFilter.h
│ │ │ │ ├── GPUImageChromaKeyBlendFilter.m
│ │ │ │ ├── GPUImageChromaKeyFilter.h
│ │ │ │ ├── GPUImageChromaKeyFilter.m
│ │ │ │ ├── GPUImageClosingFilter.h
│ │ │ │ ├── GPUImageClosingFilter.m
│ │ │ │ ├── GPUImageColorBlendFilter.h
│ │ │ │ ├── GPUImageColorBlendFilter.m
│ │ │ │ ├── GPUImageColorBurnBlendFilter.h
│ │ │ │ ├── GPUImageColorBurnBlendFilter.m
│ │ │ │ ├── GPUImageColorConversion.h
│ │ │ │ ├── GPUImageColorConversion.m
│ │ │ │ ├── GPUImageColorDodgeBlendFilter.h
│ │ │ │ ├── GPUImageColorDodgeBlendFilter.m
│ │ │ │ ├── GPUImageColorInvertFilter.h
│ │ │ │ ├── GPUImageColorInvertFilter.m
│ │ │ │ ├── GPUImageColorLocalBinaryPatternFilter.h
│ │ │ │ ├── GPUImageColorLocalBinaryPatternFilter.m
│ │ │ │ ├── GPUImageColorMatrixFilter.h
│ │ │ │ ├── GPUImageColorMatrixFilter.m
│ │ │ │ ├── GPUImageColorPackingFilter.h
│ │ │ │ ├── GPUImageColorPackingFilter.m
│ │ │ │ ├── GPUImageColourFASTFeatureDetector.h
│ │ │ │ ├── GPUImageColourFASTFeatureDetector.m
│ │ │ │ ├── GPUImageColourFASTSamplingOperation.h
│ │ │ │ ├── GPUImageColourFASTSamplingOperation.m
│ │ │ │ ├── GPUImageContrastFilter.h
│ │ │ │ ├── GPUImageContrastFilter.m
│ │ │ │ ├── GPUImageCropFilter.h
│ │ │ │ ├── GPUImageCropFilter.m
│ │ │ │ ├── GPUImageCrosshairGenerator.h
│ │ │ │ ├── GPUImageCrosshairGenerator.m
│ │ │ │ ├── GPUImageCrosshatchFilter.h
│ │ │ │ ├── GPUImageCrosshatchFilter.m
│ │ │ │ ├── GPUImageDarkenBlendFilter.h
│ │ │ │ ├── GPUImageDarkenBlendFilter.m
│ │ │ │ ├── GPUImageDifferenceBlendFilter.h
│ │ │ │ ├── GPUImageDifferenceBlendFilter.m
│ │ │ │ ├── GPUImageDilationFilter.h
│ │ │ │ ├── GPUImageDilationFilter.m
│ │ │ │ ├── GPUImageDirectionalNonMaximumSuppressionFilter.h
│ │ │ │ ├── GPUImageDirectionalNonMaximumSuppressionFilter.m
│ │ │ │ ├── GPUImageDirectionalSobelEdgeDetectionFilter.h
│ │ │ │ ├── GPUImageDirectionalSobelEdgeDetectionFilter.m
│ │ │ │ ├── GPUImageDissolveBlendFilter.h
│ │ │ │ ├── GPUImageDissolveBlendFilter.m
│ │ │ │ ├── GPUImageDivideBlendFilter.h
│ │ │ │ ├── GPUImageDivideBlendFilter.m
│ │ │ │ ├── GPUImageEmbossFilter.h
│ │ │ │ ├── GPUImageEmbossFilter.m
│ │ │ │ ├── GPUImageErosionFilter.h
│ │ │ │ ├── GPUImageErosionFilter.m
│ │ │ │ ├── GPUImageExclusionBlendFilter.h
│ │ │ │ ├── GPUImageExclusionBlendFilter.m
│ │ │ │ ├── GPUImageExposureFilter.h
│ │ │ │ ├── GPUImageExposureFilter.m
│ │ │ │ ├── GPUImageFASTCornerDetectionFilter.h
│ │ │ │ ├── GPUImageFASTCornerDetectionFilter.m
│ │ │ │ ├── GPUImageFalseColorFilter.h
│ │ │ │ ├── GPUImageFalseColorFilter.m
│ │ │ │ ├── GPUImageGammaFilter.h
│ │ │ │ ├── GPUImageGammaFilter.m
│ │ │ │ ├── GPUImageGaussianBlurFilter.h
│ │ │ │ ├── GPUImageGaussianBlurFilter.m
│ │ │ │ ├── GPUImageGaussianBlurPositionFilter.h
│ │ │ │ ├── GPUImageGaussianBlurPositionFilter.m
│ │ │ │ ├── GPUImageGaussianSelectiveBlurFilter.h
│ │ │ │ ├── GPUImageGaussianSelectiveBlurFilter.m
│ │ │ │ ├── GPUImageGlassSphereFilter.h
│ │ │ │ ├── GPUImageGlassSphereFilter.m
│ │ │ │ ├── GPUImageGrayscaleFilter.h
│ │ │ │ ├── GPUImageGrayscaleFilter.m
│ │ │ │ ├── GPUImageHSBFilter.h
│ │ │ │ ├── GPUImageHSBFilter.m
│ │ │ │ ├── GPUImageHalftoneFilter.h
│ │ │ │ ├── GPUImageHalftoneFilter.m
│ │ │ │ ├── GPUImageHardLightBlendFilter.h
│ │ │ │ ├── GPUImageHardLightBlendFilter.m
│ │ │ │ ├── GPUImageHarrisCornerDetectionFilter.h
│ │ │ │ ├── GPUImageHarrisCornerDetectionFilter.m
│ │ │ │ ├── GPUImageHazeFilter.h
│ │ │ │ ├── GPUImageHazeFilter.m
│ │ │ │ ├── GPUImageHighPassFilter.h
│ │ │ │ ├── GPUImageHighPassFilter.m
│ │ │ │ ├── GPUImageHighlightShadowFilter.h
│ │ │ │ ├── GPUImageHighlightShadowFilter.m
│ │ │ │ ├── GPUImageHighlightShadowTintFilter.h
│ │ │ │ ├── GPUImageHighlightShadowTintFilter.m
│ │ │ │ ├── GPUImageHistogramEqualizationFilter.h
│ │ │ │ ├── GPUImageHistogramEqualizationFilter.m
│ │ │ │ ├── GPUImageHistogramFilter.h
│ │ │ │ ├── GPUImageHistogramFilter.m
│ │ │ │ ├── GPUImageHistogramGenerator.h
│ │ │ │ ├── GPUImageHistogramGenerator.m
│ │ │ │ ├── GPUImageHoughTransformLineDetector.h
│ │ │ │ ├── GPUImageHoughTransformLineDetector.m
│ │ │ │ ├── GPUImageHueBlendFilter.h
│ │ │ │ ├── GPUImageHueBlendFilter.m
│ │ │ │ ├── GPUImageHueFilter.h
│ │ │ │ ├── GPUImageHueFilter.m
│ │ │ │ ├── GPUImageJFAVoronoiFilter.h
│ │ │ │ ├── GPUImageJFAVoronoiFilter.m
│ │ │ │ ├── GPUImageKuwaharaFilter.h
│ │ │ │ ├── GPUImageKuwaharaFilter.m
│ │ │ │ ├── GPUImageKuwaharaRadius3Filter.h
│ │ │ │ ├── GPUImageKuwaharaRadius3Filter.m
│ │ │ │ ├── GPUImageLanczosResamplingFilter.h
│ │ │ │ ├── GPUImageLanczosResamplingFilter.m
│ │ │ │ ├── GPUImageLaplacianFilter.h
│ │ │ │ ├── GPUImageLaplacianFilter.m
│ │ │ │ ├── GPUImageLevelsFilter.h
│ │ │ │ ├── GPUImageLevelsFilter.m
│ │ │ │ ├── GPUImageLightenBlendFilter.h
│ │ │ │ ├── GPUImageLightenBlendFilter.m
│ │ │ │ ├── GPUImageLineGenerator.h
│ │ │ │ ├── GPUImageLineGenerator.m
│ │ │ │ ├── GPUImageLinearBurnBlendFilter.h
│ │ │ │ ├── GPUImageLinearBurnBlendFilter.m
│ │ │ │ ├── GPUImageLocalBinaryPatternFilter.h
│ │ │ │ ├── GPUImageLocalBinaryPatternFilter.m
│ │ │ │ ├── GPUImageLookupFilter.h
│ │ │ │ ├── GPUImageLookupFilter.m
│ │ │ │ ├── GPUImageLowPassFilter.h
│ │ │ │ ├── GPUImageLowPassFilter.m
│ │ │ │ ├── GPUImageLuminanceRangeFilter.h
│ │ │ │ ├── GPUImageLuminanceRangeFilter.m
│ │ │ │ ├── GPUImageLuminanceThresholdFilter.h
│ │ │ │ ├── GPUImageLuminanceThresholdFilter.m
│ │ │ │ ├── GPUImageLuminosity.h
│ │ │ │ ├── GPUImageLuminosity.m
│ │ │ │ ├── GPUImageLuminosityBlendFilter.h
│ │ │ │ ├── GPUImageLuminosityBlendFilter.m
│ │ │ │ ├── GPUImageMaskFilter.h
│ │ │ │ ├── GPUImageMaskFilter.m
│ │ │ │ ├── GPUImageMedianFilter.h
│ │ │ │ ├── GPUImageMedianFilter.m
│ │ │ │ ├── GPUImageMissEtikateFilter.h
│ │ │ │ ├── GPUImageMissEtikateFilter.m
│ │ │ │ ├── GPUImageMonochromeFilter.h
│ │ │ │ ├── GPUImageMonochromeFilter.m
│ │ │ │ ├── GPUImageMosaicFilter.h
│ │ │ │ ├── GPUImageMosaicFilter.m
│ │ │ │ ├── GPUImageMotionBlurFilter.h
│ │ │ │ ├── GPUImageMotionBlurFilter.m
│ │ │ │ ├── GPUImageMotionDetector.h
│ │ │ │ ├── GPUImageMotionDetector.m
│ │ │ │ ├── GPUImageMultiplyBlendFilter.h
│ │ │ │ ├── GPUImageMultiplyBlendFilter.m
│ │ │ │ ├── GPUImageNobleCornerDetectionFilter.h
│ │ │ │ ├── GPUImageNobleCornerDetectionFilter.m
│ │ │ │ ├── GPUImageNonMaximumSuppressionFilter.h
│ │ │ │ ├── GPUImageNonMaximumSuppressionFilter.m
│ │ │ │ ├── GPUImageNormalBlendFilter.h
│ │ │ │ ├── GPUImageNormalBlendFilter.m
│ │ │ │ ├── GPUImageOpacityFilter.h
│ │ │ │ ├── GPUImageOpacityFilter.m
│ │ │ │ ├── GPUImageOpeningFilter.h
│ │ │ │ ├── GPUImageOpeningFilter.m
│ │ │ │ ├── GPUImageOverlayBlendFilter.h
│ │ │ │ ├── GPUImageOverlayBlendFilter.m
│ │ │ │ ├── GPUImageParallelCoordinateLineTransformFilter.h
│ │ │ │ ├── GPUImageParallelCoordinateLineTransformFilter.m
│ │ │ │ ├── GPUImagePerlinNoiseFilter.h
│ │ │ │ ├── GPUImagePerlinNoiseFilter.m
│ │ │ │ ├── GPUImagePinchDistortionFilter.h
│ │ │ │ ├── GPUImagePinchDistortionFilter.m
│ │ │ │ ├── GPUImagePixellateFilter.h
│ │ │ │ ├── GPUImagePixellateFilter.m
│ │ │ │ ├── GPUImagePixellatePositionFilter.h
│ │ │ │ ├── GPUImagePixellatePositionFilter.m
│ │ │ │ ├── GPUImagePoissonBlendFilter.h
│ │ │ │ ├── GPUImagePoissonBlendFilter.m
│ │ │ │ ├── GPUImagePolarPixellateFilter.h
│ │ │ │ ├── GPUImagePolarPixellateFilter.m
│ │ │ │ ├── GPUImagePolkaDotFilter.h
│ │ │ │ ├── GPUImagePolkaDotFilter.m
│ │ │ │ ├── GPUImagePosterizeFilter.h
│ │ │ │ ├── GPUImagePosterizeFilter.m
│ │ │ │ ├── GPUImagePrewittEdgeDetectionFilter.h
│ │ │ │ ├── GPUImagePrewittEdgeDetectionFilter.m
│ │ │ │ ├── GPUImageRGBClosingFilter.h
│ │ │ │ ├── GPUImageRGBClosingFilter.m
│ │ │ │ ├── GPUImageRGBDilationFilter.h
│ │ │ │ ├── GPUImageRGBDilationFilter.m
│ │ │ │ ├── GPUImageRGBErosionFilter.h
│ │ │ │ ├── GPUImageRGBErosionFilter.m
│ │ │ │ ├── GPUImageRGBFilter.h
│ │ │ │ ├── GPUImageRGBFilter.m
│ │ │ │ ├── GPUImageRGBOpeningFilter.h
│ │ │ │ ├── GPUImageRGBOpeningFilter.m
│ │ │ │ ├── GPUImageRawDataInput.h
│ │ │ │ ├── GPUImageRawDataInput.m
│ │ │ │ ├── GPUImageRawDataOutput.h
│ │ │ │ ├── GPUImageRawDataOutput.m
│ │ │ │ ├── GPUImageSaturationBlendFilter.h
│ │ │ │ ├── GPUImageSaturationBlendFilter.m
│ │ │ │ ├── GPUImageSaturationFilter.h
│ │ │ │ ├── GPUImageSaturationFilter.m
│ │ │ │ ├── GPUImageScreenBlendFilter.h
│ │ │ │ ├── GPUImageScreenBlendFilter.m
│ │ │ │ ├── GPUImageSepiaFilter.h
│ │ │ │ ├── GPUImageSepiaFilter.m
│ │ │ │ ├── GPUImageSharpenFilter.h
│ │ │ │ ├── GPUImageSharpenFilter.m
│ │ │ │ ├── GPUImageShiTomasiFeatureDetectionFilter.h
│ │ │ │ ├── GPUImageShiTomasiFeatureDetectionFilter.m
│ │ │ │ ├── GPUImageSingleComponentGaussianBlurFilter.h
│ │ │ │ ├── GPUImageSingleComponentGaussianBlurFilter.m
│ │ │ │ ├── GPUImageSketchFilter.h
│ │ │ │ ├── GPUImageSketchFilter.m
│ │ │ │ ├── GPUImageSkinToneFilter.h
│ │ │ │ ├── GPUImageSkinToneFilter.m
│ │ │ │ ├── GPUImageSmoothToonFilter.h
│ │ │ │ ├── GPUImageSmoothToonFilter.m
│ │ │ │ ├── GPUImageSobelEdgeDetectionFilter.h
│ │ │ │ ├── GPUImageSobelEdgeDetectionFilter.m
│ │ │ │ ├── GPUImageSoftEleganceFilter.h
│ │ │ │ ├── GPUImageSoftEleganceFilter.m
│ │ │ │ ├── GPUImageSoftLightBlendFilter.h
│ │ │ │ ├── GPUImageSoftLightBlendFilter.m
│ │ │ │ ├── GPUImageSolarizeFilter.h
│ │ │ │ ├── GPUImageSolarizeFilter.m
│ │ │ │ ├── GPUImageSolidColorGenerator.h
│ │ │ │ ├── GPUImageSolidColorGenerator.m
│ │ │ │ ├── GPUImageSourceOverBlendFilter.h
│ │ │ │ ├── GPUImageSourceOverBlendFilter.m
│ │ │ │ ├── GPUImageSphereRefractionFilter.h
│ │ │ │ ├── GPUImageSphereRefractionFilter.m
│ │ │ │ ├── GPUImageStillCamera.h
│ │ │ │ ├── GPUImageStillCamera.m
│ │ │ │ ├── GPUImageStretchDistortionFilter.h
│ │ │ │ ├── GPUImageStretchDistortionFilter.m
│ │ │ │ ├── GPUImageSubtractBlendFilter.h
│ │ │ │ ├── GPUImageSubtractBlendFilter.m
│ │ │ │ ├── GPUImageSwirlFilter.h
│ │ │ │ ├── GPUImageSwirlFilter.m
│ │ │ │ ├── GPUImageThresholdEdgeDetectionFilter.h
│ │ │ │ ├── GPUImageThresholdEdgeDetectionFilter.m
│ │ │ │ ├── GPUImageThresholdSketchFilter.h
│ │ │ │ ├── GPUImageThresholdSketchFilter.m
│ │ │ │ ├── GPUImageThresholdedNonMaximumSuppressionFilter.h
│ │ │ │ ├── GPUImageThresholdedNonMaximumSuppressionFilter.m
│ │ │ │ ├── GPUImageTiltShiftFilter.h
│ │ │ │ ├── GPUImageTiltShiftFilter.m
│ │ │ │ ├── GPUImageToneCurveFilter.h
│ │ │ │ ├── GPUImageToneCurveFilter.m
│ │ │ │ ├── GPUImageToonFilter.h
│ │ │ │ ├── GPUImageToonFilter.m
│ │ │ │ ├── GPUImageTransformFilter.h
│ │ │ │ ├── GPUImageTransformFilter.m
│ │ │ │ ├── GPUImageTwoInputCrossTextureSamplingFilter.h
│ │ │ │ ├── GPUImageTwoInputCrossTextureSamplingFilter.m
│ │ │ │ ├── GPUImageUIElement.h
│ │ │ │ ├── GPUImageUIElement.m
│ │ │ │ ├── GPUImageUnsharpMaskFilter.h
│ │ │ │ ├── GPUImageUnsharpMaskFilter.m
│ │ │ │ ├── GPUImageVibranceFilter.h
│ │ │ │ ├── GPUImageVibranceFilter.m
│ │ │ │ ├── GPUImageVignetteFilter.h
│ │ │ │ ├── GPUImageVignetteFilter.m
│ │ │ │ ├── GPUImageVoronoiConsumerFilter.h
│ │ │ │ ├── GPUImageVoronoiConsumerFilter.m
│ │ │ │ ├── GPUImageWeakPixelInclusionFilter.h
│ │ │ │ ├── GPUImageWeakPixelInclusionFilter.m
│ │ │ │ ├── GPUImageWhiteBalanceFilter.h
│ │ │ │ ├── GPUImageWhiteBalanceFilter.m
│ │ │ │ ├── GPUImageXYDerivativeFilter.h
│ │ │ │ ├── GPUImageXYDerivativeFilter.m
│ │ │ │ ├── GPUImageZoomBlurFilter.h
│ │ │ │ ├── GPUImageZoomBlurFilter.m
│ │ │ │ ├── GPUImageiOSBlurFilter.h
│ │ │ │ └── GPUImageiOSBlurFilter.m
│ │ │ ├── GLFilters/
│ │ │ │ ├── ColorProcessing/
│ │ │ │ │ ├── GLImageAddStickerFilter.h
│ │ │ │ │ ├── GLImageAddStickerFilter.m
│ │ │ │ │ ├── GLImageAddStickerWithEffectFilter.h
│ │ │ │ │ ├── GLImageAddStickerWithEffectFilter.m
│ │ │ │ │ ├── GLImageBlendFilter.h
│ │ │ │ │ ├── GLImageBlendFilter.m
│ │ │ │ │ ├── GLImageCircleFilter.h
│ │ │ │ │ ├── GLImageCircleFilter.m
│ │ │ │ │ ├── GLImageGassianBlurMixFilter.h
│ │ │ │ │ ├── GLImageGassianBlurMixFilter.m
│ │ │ │ │ ├── GLImageLutFilter.h
│ │ │ │ │ ├── GLImageLutFilter.m
│ │ │ │ │ ├── GLImageMixBlendFilter.h
│ │ │ │ │ ├── GLImageMixBlendFilter.m
│ │ │ │ │ ├── GLImageShapeFilter.h
│ │ │ │ │ ├── GLImageShapeFilter.m
│ │ │ │ │ ├── GLImageShapeHighDefinitionFilter.h
│ │ │ │ │ ├── GLImageShapeHighDefinitionFilter.m
│ │ │ │ │ ├── GLImageStickerFilter.h
│ │ │ │ │ ├── GLImageStickerFilter.m
│ │ │ │ │ ├── GLImageTwoLutFilter.h
│ │ │ │ │ └── GLImageTwoLutFilter.m
│ │ │ │ ├── DouYinEffect/
│ │ │ │ │ ├── GLImageBlurSnapViewFilter.h
│ │ │ │ │ ├── GLImageBlurSnapViewFilter.m
│ │ │ │ │ ├── GLImageBlurSnapViewFilterGroup.h
│ │ │ │ │ ├── GLImageBlurSnapViewFilterGroup.m
│ │ │ │ │ ├── GLImageFourPointsMirrorFilter.h
│ │ │ │ │ ├── GLImageFourPointsMirrorFilter.m
│ │ │ │ │ ├── GLImageGlitchEffectGridFilter.h
│ │ │ │ │ ├── GLImageGlitchEffectGridFilter.m
│ │ │ │ │ ├── GLImageGlitchEffectLineFilter.h
│ │ │ │ │ ├── GLImageGlitchEffectLineFilter.m
│ │ │ │ │ ├── GLImageSoulOutFilter.h
│ │ │ │ │ ├── GLImageSoulOutFilter.m
│ │ │ │ │ ├── GLImageThreePartitionFilter.h
│ │ │ │ │ ├── GLImageThreePartitionFilter.m
│ │ │ │ │ ├── GLImageThreePartitionGroupFilter.h
│ │ │ │ │ ├── GLImageThreePartitionGroupFilter.m
│ │ │ │ │ ├── GLImageWaterReflectionFilter.h
│ │ │ │ │ ├── GLImageWaterReflectionFilter.m
│ │ │ │ │ ├── GLImageZoomFilter.h
│ │ │ │ │ └── GLImageZoomFilter.m
│ │ │ │ ├── FaceFilters/
│ │ │ │ │ ├── GLImageFaceChangeFilter.h
│ │ │ │ │ ├── GLImageFaceChangeFilter.m
│ │ │ │ │ ├── GLImageFaceChangeFilterGroup.h
│ │ │ │ │ ├── GLImageFaceChangeFilterGroup.m
│ │ │ │ │ ├── GLImageFaceDetectPointFilter.h
│ │ │ │ │ ├── GLImageFaceDetectPointFilter.m
│ │ │ │ │ ├── GPUImageBeautifyFilter.h
│ │ │ │ │ └── GPUImageBeautifyFilter.m
│ │ │ │ └── GLImageMovie/
│ │ │ │ ├── GLImageMovie.h
│ │ │ │ ├── GLImageMovie.m
│ │ │ │ ├── GPUImageMovieWriterFix.h
│ │ │ │ └── GPUImageMovieWriterFix.m
│ │ │ ├── GPUImage.h
│ │ │ ├── Mac/
│ │ │ │ ├── GPUImage.h
│ │ │ │ ├── GPUImageAVCamera.h
│ │ │ │ ├── GPUImageAVCamera.m
│ │ │ │ ├── GPUImageContext.h
│ │ │ │ ├── GPUImageContext.m
│ │ │ │ ├── GPUImageMac-Info.plist
│ │ │ │ ├── GPUImageMac-Prefix.pch
│ │ │ │ ├── GPUImageMovieWriter.h
│ │ │ │ ├── GPUImageMovieWriter.m
│ │ │ │ ├── GPUImagePicture.h
│ │ │ │ ├── GPUImagePicture.m
│ │ │ │ ├── GPUImageView.h
│ │ │ │ ├── GPUImageView.m
│ │ │ │ └── en.lproj/
│ │ │ │ └── InfoPlist.strings
│ │ │ └── iOS/
│ │ │ ├── Framework/
│ │ │ │ ├── GPUImageFramework.h
│ │ │ │ ├── Info.plist
│ │ │ │ └── module.modulemap
│ │ │ ├── GPUImage-Prefix.pch
│ │ │ ├── GPUImageContext.h
│ │ │ ├── GPUImageContext.m
│ │ │ ├── GPUImageMovieWriter.h
│ │ │ ├── GPUImageMovieWriter.m
│ │ │ ├── GPUImagePicture+TextureSubimage.h
│ │ │ ├── GPUImagePicture+TextureSubimage.m
│ │ │ ├── GPUImagePicture.h
│ │ │ ├── GPUImagePicture.m
│ │ │ ├── GPUImageView.h
│ │ │ └── GPUImageView.m
│ │ ├── GPURenderKit.h
│ │ └── Info.plist
│ ├── GPURenderKit.xcodeproj/
│ │ ├── project.pbxproj
│ │ ├── project.xcworkspace/
│ │ │ ├── contents.xcworkspacedata
│ │ │ ├── xcshareddata/
│ │ │ │ └── WorkspaceSettings.xcsettings
│ │ │ └── xcuserdata/
│ │ │ └── liuhaidong.xcuserdatad/
│ │ │ └── WorkspaceSettings.xcsettings
│ │ └── xcshareddata/
│ │ └── xcschemes/
│ │ └── GPURenderKit.xcscheme
│ └── GPURenderKitTests/
│ ├── GPURenderKitTests.m
│ └── Info.plist
├── GPURenderKitDemo/
│ ├── GPURenderKitDemo/
│ │ ├── AppDelegate.h
│ │ ├── AppDelegate.m
│ │ ├── Assets.xcassets/
│ │ │ ├── AppIcon.appiconset/
│ │ │ │ └── Contents.json
│ │ │ ├── Contents.json
│ │ │ ├── EditorLut/
│ │ │ │ ├── Contents.json
│ │ │ │ ├── exposure_n.imageset/
│ │ │ │ │ └── Contents.json
│ │ │ │ ├── gaoya.imageset/
│ │ │ │ │ └── Contents.json
│ │ │ │ ├── heibai.imageset/
│ │ │ │ │ └── Contents.json
│ │ │ │ ├── jingdu.imageset/
│ │ │ │ │ └── Contents.json
│ │ │ │ ├── meishi.imageset/
│ │ │ │ │ └── Contents.json
│ │ │ │ └── xiatian.imageset/
│ │ │ │ └── Contents.json
│ │ │ ├── bunny.imageset/
│ │ │ │ └── Contents.json
│ │ │ ├── edit_beautify_rise_move.imageset/
│ │ │ │ └── Contents.json
│ │ │ ├── edit_beautify_slim_move.imageset/
│ │ │ │ └── Contents.json
│ │ │ ├── flower.imageset/
│ │ │ │ └── Contents.json
│ │ │ └── rotate.imageset/
│ │ │ └── Contents.json
│ │ ├── Base.lproj/
│ │ │ └── LaunchScreen.storyboard
│ │ ├── BaseViewController.h
│ │ ├── BaseViewController.m
│ │ ├── DDShapeViewController/
│ │ │ ├── DDShapeViewController.h
│ │ │ ├── DDShapeViewController.m
│ │ │ ├── Rise/
│ │ │ │ ├── Controller/
│ │ │ │ │ ├── ATRiseViewController.h
│ │ │ │ │ └── ATRiseViewController.m
│ │ │ │ └── View/
│ │ │ │ ├── ATRiseMenuView.h
│ │ │ │ ├── ATRiseMenuView.m
│ │ │ │ ├── ATSelectBarView.h
│ │ │ │ ├── ATSelectBarView.m
│ │ │ │ ├── ATSelectBarView.xib
│ │ │ │ ├── DDGLShapeView/
│ │ │ │ │ ├── DDGLSelectView.h
│ │ │ │ │ ├── DDGLSelectView.m
│ │ │ │ │ ├── DDGLShapeControlView.h
│ │ │ │ │ ├── DDGLShapeControlView.m
│ │ │ │ │ ├── DDGLShapeSelView.h
│ │ │ │ │ ├── DDGLShapeSelView.m
│ │ │ │ │ ├── DDGLShapeView.h
│ │ │ │ │ ├── DDGLShapeView.m
│ │ │ │ │ ├── DDGLShapingView.h
│ │ │ │ │ └── DDGLShapingView.m
│ │ │ │ └── Slider/
│ │ │ │ ├── ATSliderView.h
│ │ │ │ ├── ATSliderView.m
│ │ │ │ ├── WeSliderView.h
│ │ │ │ └── WeSliderView.m
│ │ │ └── View/
│ │ │ ├── UIView+Xib.h
│ │ │ └── UIView+Xib.m
│ │ ├── DouYinEffect/
│ │ │ ├── DouYinEffectTabView.h
│ │ │ ├── DouYinEffectTabView.m
│ │ │ ├── GLDouYinEffectViewController.h
│ │ │ └── GLDouYinEffectViewController.m
│ │ ├── Face/
│ │ │ ├── Face++/
│ │ │ │ ├── MGLicense/
│ │ │ │ │ ├── MGLicenseCommon.h
│ │ │ │ │ ├── MGLicenseManager.h
│ │ │ │ │ └── libMGLicMgrSDK-0.3.1.a
│ │ │ │ ├── MGLicenseManagerHelper/
│ │ │ │ │ ├── MGFaceLicenseHandle.h
│ │ │ │ │ ├── MGFaceLicenseHandle.m
│ │ │ │ │ └── MGNetAccount.h
│ │ │ │ └── iOS_SDK/
│ │ │ │ ├── MGAlgorithmInfo.h
│ │ │ │ ├── MGDetectRectInfo.h
│ │ │ │ ├── MGFaceInfo.h
│ │ │ │ ├── MGFacepp.h
│ │ │ │ ├── MGFaceppCommon.h
│ │ │ │ ├── MGFaceppConfig.h
│ │ │ │ ├── MGImageData.h
│ │ │ │ ├── libMGFacepp-0.5.2.a
│ │ │ │ └── megviifacepp_0_5_2_model
│ │ │ ├── FaceSliderView.h
│ │ │ ├── FaceSliderView.m
│ │ │ ├── FaceViewController.h
│ │ │ └── FaceViewController.mm
│ │ ├── GLImageFilterEnumType.h
│ │ ├── GLImageFilterList/
│ │ │ ├── GLImageFilterListViewController.h
│ │ │ ├── GLImageFilterListViewController.m
│ │ │ ├── GLImageFilterShowViewController.h
│ │ │ └── GLImageFilterShowViewController.m
│ │ ├── GLImageMovie/
│ │ │ ├── DDAVAssetRenderManage.h
│ │ │ ├── DDAVAssetRenderManage.m
│ │ │ ├── GLImageMovieUseViewController.h
│ │ │ ├── GLImageMovieUseViewController.m
│ │ │ ├── MovieViewController.h
│ │ │ └── MovieViewController.m
│ │ ├── GPURenderMacroHeader.h
│ │ ├── Info.plist
│ │ ├── MainViewController.h
│ │ ├── MainViewController.m
│ │ ├── ShapeChange/
│ │ │ ├── DDShapeViewController.h
│ │ │ ├── DDShapeViewController.m
│ │ │ ├── Rise/
│ │ │ │ ├── Controller/
│ │ │ │ │ ├── ATRiseViewController.h
│ │ │ │ │ └── ATRiseViewController.m
│ │ │ │ └── View/
│ │ │ │ ├── ATRiseMenuView.h
│ │ │ │ ├── ATRiseMenuView.m
│ │ │ │ ├── ATSelectBarView.h
│ │ │ │ ├── ATSelectBarView.m
│ │ │ │ ├── ATSelectBarView.xib
│ │ │ │ ├── DDGLShapeView/
│ │ │ │ │ ├── DDGLSelectView.h
│ │ │ │ │ ├── DDGLSelectView.m
│ │ │ │ │ ├── DDGLShapeControlView.h
│ │ │ │ │ ├── DDGLShapeControlView.m
│ │ │ │ │ ├── DDGLShapeSelView.h
│ │ │ │ │ ├── DDGLShapeSelView.m
│ │ │ │ │ ├── DDGLShapeView.h
│ │ │ │ │ ├── DDGLShapeView.m
│ │ │ │ │ ├── DDGLShapingView.h
│ │ │ │ │ └── DDGLShapingView.m
│ │ │ │ └── Slider/
│ │ │ │ ├── ATSliderView.h
│ │ │ │ ├── ATSliderView.m
│ │ │ │ ├── WeSliderView.h
│ │ │ │ └── WeSliderView.m
│ │ │ └── View/
│ │ │ ├── UIView+Xib.h
│ │ │ └── UIView+Xib.m
│ │ ├── Support/
│ │ │ ├── GLFilterInfoView.h
│ │ │ ├── GLFilterInfoView.m
│ │ │ ├── GLImagePickerHelper.h
│ │ │ ├── GLImagePickerHelper.m
│ │ │ ├── GLSliderView.h
│ │ │ ├── GLSliderView.m
│ │ │ ├── HUD/
│ │ │ │ ├── MBProgressHUD.h
│ │ │ │ └── MBProgressHUD.m
│ │ │ ├── UIColor+Utils.h
│ │ │ ├── UIColor+Utils.m
│ │ │ ├── UIImage+Rotate.h
│ │ │ ├── UIImage+Rotate.m
│ │ │ ├── UIImage+Utils.h
│ │ │ └── UIImage+Utils.m
│ │ ├── Tool/
│ │ │ ├── DDMediaEditorManage.h
│ │ │ └── DDMediaEditorManage.m
│ │ ├── VideoCode/
│ │ │ ├── DDVideoEcodeManage.h
│ │ │ ├── DDVideoEcodeManage.m
│ │ │ ├── VideoEcodeViewController.h
│ │ │ └── VideoEcodeViewController.m
│ │ └── main.m
│ ├── GPURenderKitDemo.xcodeproj/
│ │ ├── project.pbxproj
│ │ ├── project.xcworkspace/
│ │ │ ├── contents.xcworkspacedata
│ │ │ ├── xcshareddata/
│ │ │ │ └── WorkspaceSettings.xcsettings
│ │ │ └── xcuserdata/
│ │ │ └── liuhaidong.xcuserdatad/
│ │ │ └── WorkspaceSettings.xcsettings
│ │ └── xcshareddata/
│ │ └── xcschemes/
│ │ └── GPURenderKitDemo.xcscheme
│ ├── GPURenderKitDemoTests/
│ │ ├── GPURenderKitDemoTests.m
│ │ └── Info.plist
│ └── GPURenderKitDemoUITests/
│ ├── GPURenderKitDemoUITests.m
│ └── Info.plist
└── README.md
================================================
FILE CONTENTS
================================================
================================================
FILE: GPURenderKit/GPURenderKit/GPUImage/BaseClass/GLProgram.h
================================================
// This is Jeff LaMarche's GLProgram OpenGL shader wrapper class from his OpenGL ES 2.0 book.
// A description of this can be found at his page on the topic:
// http://iphonedevelopment.blogspot.com/2010/11/opengl-es-20-for-ios-chapter-4.html
// I've extended this to be able to take programs as NSStrings in addition to files, for baked-in shaders
#import <Foundation/Foundation.h>
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
#import <OpenGLES/ES2/gl.h>
#import <OpenGLES/ES2/glext.h>
#else
#import <OpenGL/OpenGL.h>
#import <OpenGL/gl.h>
#endif
@interface GLProgram : NSObject
{
NSMutableArray *attributes;
NSMutableArray *uniforms;
GLuint program,
vertShader,
fragShader;
}
@property(readwrite, nonatomic) BOOL initialized;
@property(readwrite, copy, nonatomic) NSString *vertexShaderLog;
@property(readwrite, copy, nonatomic) NSString *fragmentShaderLog;
@property(readwrite, copy, nonatomic) NSString *programLog;
- (id)initWithVertexShaderString:(NSString *)vShaderString
fragmentShaderString:(NSString *)fShaderString;
- (id)initWithVertexShaderString:(NSString *)vShaderString
fragmentShaderFilename:(NSString *)fShaderFilename;
- (id)initWithVertexShaderFilename:(NSString *)vShaderFilename
fragmentShaderFilename:(NSString *)fShaderFilename;
- (void)addAttribute:(NSString *)attributeName;
- (GLuint)attributeIndex:(NSString *)attributeName;
- (GLuint)uniformIndex:(NSString *)uniformName;
- (BOOL)link;
- (void)use;
- (void)validate;
@end
================================================
FILE: GPURenderKit/GPURenderKit/GPUImage/BaseClass/GLProgram.m
================================================
// This is Jeff LaMarche's GLProgram OpenGL shader wrapper class from his OpenGL ES 2.0 book.
// A description of this can be found at his page on the topic:
// http://iphonedevelopment.blogspot.com/2010/11/opengl-es-20-for-ios-chapter-4.html
#import "GLProgram.h"
// START:typedefs
#pragma mark Function Pointer Definitions
typedef void (*GLInfoFunction)(GLuint program, GLenum pname, GLint* params);
typedef void (*GLLogFunction) (GLuint program, GLsizei bufsize, GLsizei* length, GLchar* infolog);
// END:typedefs
#pragma mark -
#pragma mark Private Extension Method Declaration
// START:extension
@interface GLProgram()
- (BOOL)compileShader:(GLuint *)shader
type:(GLenum)type
string:(NSString *)shaderString;
@end
// END:extension
#pragma mark -
@implementation GLProgram
// START:init
@synthesize initialized = _initialized;
- (id)initWithVertexShaderString:(NSString *)vShaderString
fragmentShaderString:(NSString *)fShaderString;
{
if ((self = [super init]))
{
_initialized = NO;
attributes = [[NSMutableArray alloc] init];
uniforms = [[NSMutableArray alloc] init];
program = glCreateProgram();
if (![self compileShader:&vertShader
type:GL_VERTEX_SHADER
string:vShaderString])
{
NSLog(@"Failed to compile vertex shader");
}
// Create and compile fragment shader
if (![self compileShader:&fragShader
type:GL_FRAGMENT_SHADER
string:fShaderString])
{
NSLog(@"Failed to compile fragment shader");
}
glAttachShader(program, vertShader);
glAttachShader(program, fragShader);
}
return self;
}
- (id)initWithVertexShaderString:(NSString *)vShaderString
fragmentShaderFilename:(NSString *)fShaderFilename;
{
NSString *fragShaderPathname = [[NSBundle mainBundle] pathForResource:fShaderFilename ofType:@"fsh"];
NSString *fragmentShaderString = [NSString stringWithContentsOfFile:fragShaderPathname encoding:NSUTF8StringEncoding error:nil];
if ((self = [self initWithVertexShaderString:vShaderString fragmentShaderString:fragmentShaderString]))
{
}
return self;
}
- (id)initWithVertexShaderFilename:(NSString *)vShaderFilename
fragmentShaderFilename:(NSString *)fShaderFilename;
{
NSString *vertShaderPathname = [[NSBundle mainBundle] pathForResource:vShaderFilename ofType:@"vsh"];
NSString *vertexShaderString = [NSString stringWithContentsOfFile:vertShaderPathname encoding:NSUTF8StringEncoding error:nil];
NSString *fragShaderPathname = [[NSBundle mainBundle] pathForResource:fShaderFilename ofType:@"fsh"];
NSString *fragmentShaderString = [NSString stringWithContentsOfFile:fragShaderPathname encoding:NSUTF8StringEncoding error:nil];
if ((self = [self initWithVertexShaderString:vertexShaderString fragmentShaderString:fragmentShaderString]))
{
}
return self;
}
// END:init
// START:compile
- (BOOL)compileShader:(GLuint *)shader
type:(GLenum)type
string:(NSString *)shaderString
{
// CFAbsoluteTime startTime = CFAbsoluteTimeGetCurrent();
GLint status;
const GLchar *source;
source =
(GLchar *)[shaderString UTF8String];
if (!source)
{
NSLog(@"Failed to load vertex shader");
return NO;
}
*shader = glCreateShader(type);
glShaderSource(*shader, 1, &source, NULL);
glCompileShader(*shader);
glGetShaderiv(*shader, GL_COMPILE_STATUS, &status);
if (status != GL_TRUE)
{
GLint logLength;
glGetShaderiv(*shader, GL_INFO_LOG_LENGTH, &logLength);
if (logLength > 0)
{
GLchar *log = (GLchar *)malloc(logLength);
glGetShaderInfoLog(*shader, logLength, &logLength, log);
if (shader == &vertShader)
{
self.vertexShaderLog = [NSString stringWithFormat:@"%s", log];
}
else
{
self.fragmentShaderLog = [NSString stringWithFormat:@"%s", log];
}
free(log);
}
}
// CFAbsoluteTime linkTime = (CFAbsoluteTimeGetCurrent() - startTime);
// NSLog(@"Compiled in %f ms", linkTime * 1000.0);
return status == GL_TRUE;
}
// END:compile
#pragma mark -
// START:addattribute
- (void)addAttribute:(NSString *)attributeName
{
if (![attributes containsObject:attributeName])
{
[attributes addObject:attributeName];
glBindAttribLocation(program,
(GLuint)[attributes indexOfObject:attributeName],
[attributeName UTF8String]);
}
}
// END:addattribute
// START:indexmethods
- (GLuint)attributeIndex:(NSString *)attributeName
{
return (GLuint)[attributes indexOfObject:attributeName];
}
- (GLuint)uniformIndex:(NSString *)uniformName
{
return glGetUniformLocation(program, [uniformName UTF8String]);
}
// END:indexmethods
#pragma mark -
// START:link
- (BOOL)link
{
// CFAbsoluteTime startTime = CFAbsoluteTimeGetCurrent();
GLint status;
glLinkProgram(program);
glGetProgramiv(program, GL_LINK_STATUS, &status);
if (status == GL_FALSE)
return NO;
if (vertShader)
{
glDeleteShader(vertShader);
vertShader = 0;
}
if (fragShader)
{
glDeleteShader(fragShader);
fragShader = 0;
}
self.initialized = YES;
// CFAbsoluteTime linkTime = (CFAbsoluteTimeGetCurrent() - startTime);
// NSLog(@"Linked in %f ms", linkTime * 1000.0);
return YES;
}
// END:link
// START:use
- (void)use
{
glUseProgram(program);
}
// END:use
#pragma mark -
- (void)validate;
{
GLint logLength;
glValidateProgram(program);
glGetProgramiv(program, GL_INFO_LOG_LENGTH, &logLength);
if (logLength > 0)
{
GLchar *log = (GLchar *)malloc(logLength);
glGetProgramInfoLog(program, logLength, &logLength, log);
self.programLog = [NSString stringWithFormat:@"%s", log];
free(log);
}
}
#pragma mark -
// START:dealloc
- (void)dealloc
{
if (vertShader)
glDeleteShader(vertShader);
if (fragShader)
glDeleteShader(fragShader);
if (program)
glDeleteProgram(program);
}
// END:dealloc
@end
================================================
FILE: GPURenderKit/GPURenderKit/GPUImage/BaseClass/GPUImageBuffer.h
================================================
#import "GPUImageFilter.h"
@interface GPUImageBuffer : GPUImageFilter
{
NSMutableArray *bufferedFramebuffers;
}
@property(readwrite, nonatomic) NSUInteger bufferSize;
@end
================================================
FILE: GPURenderKit/GPURenderKit/GPUImage/BaseClass/GPUImageBuffer.m
================================================
#import "GPUImageBuffer.h"
@interface GPUImageBuffer()
@end
@implementation GPUImageBuffer
@synthesize bufferSize = _bufferSize;
#pragma mark -
#pragma mark Initialization and teardown
- (id)init;
{
if (!(self = [self initWithFragmentShaderFromString:kGPUImagePassthroughFragmentShaderString]))
{
return nil;
}
bufferedFramebuffers = [[NSMutableArray alloc] init];
// [bufferedTextures addObject:[NSNumber numberWithInt:outputTexture]];
_bufferSize = 1;
return self;
}
- (void)dealloc
{
for (GPUImageFramebuffer *currentFramebuffer in bufferedFramebuffers)
{
[currentFramebuffer unlock];
}
}
#pragma mark -
#pragma mark GPUImageInput
- (void)newFrameReadyAtTime:(CMTime)frameTime atIndex:(NSInteger)textureIndex;
{
if ([bufferedFramebuffers count] >= _bufferSize)
{
outputFramebuffer = [bufferedFramebuffers objectAtIndex:0];
[bufferedFramebuffers removeObjectAtIndex:0];
}
else
{
// Nothing yet in the buffer, so don't process further until the buffer is full
outputFramebuffer = firstInputFramebuffer;
[firstInputFramebuffer lock];
}
[bufferedFramebuffers addObject:firstInputFramebuffer];
// Need to pass along rotation information, as we're just holding on to buffered framebuffers and not rotating them ourselves
for (id<GPUImageInput> currentTarget in targets)
{
if (currentTarget != self.targetToIgnoreForUpdates)
{
NSInteger indexOfObject = [targets indexOfObject:currentTarget];
NSInteger textureIndex = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];
[currentTarget setInputRotation:inputRotation atIndex:textureIndex];
}
}
// Let the downstream video elements see the previous frame from the buffer before rendering a new one into place
[self informTargetsAboutNewFrameAtTime:frameTime];
// [self renderToTextureWithVertices:imageVertices textureCoordinates:[[self class] textureCoordinatesForRotation:inputRotation]];
}
- (void)renderToTextureWithVertices:(const GLfloat *)vertices textureCoordinates:(const GLfloat *)textureCoordinates;
{
// No need to render to another texture anymore, since we'll be hanging on to the textures in our buffer
}
#pragma mark -
#pragma mark Accessors
- (void)setBufferSize:(NSUInteger)newValue;
{
if ( (newValue == _bufferSize) || (newValue < 1) )
{
return;
}
if (newValue > _bufferSize)
{
NSUInteger texturesToAdd = newValue - _bufferSize;
for (NSUInteger currentTextureIndex = 0; currentTextureIndex < texturesToAdd; currentTextureIndex++)
{
// TODO: Deal with the growth of the size of the buffer by rotating framebuffers, no textures
}
}
else
{
NSUInteger texturesToRemove = _bufferSize - newValue;
for (NSUInteger currentTextureIndex = 0; currentTextureIndex < texturesToRemove; currentTextureIndex++)
{
GPUImageFramebuffer *lastFramebuffer = [bufferedFramebuffers lastObject];
[bufferedFramebuffers removeObjectAtIndex:([bufferedFramebuffers count] - 1)];
[lastFramebuffer unlock];
lastFramebuffer = nil;
}
}
_bufferSize = newValue;
}
@end
================================================
FILE: GPURenderKit/GPURenderKit/GPUImage/BaseClass/GPUImageFilter.h
================================================
#import "GPUImageOutput.h"
#define STRINGIZE(x) #x
#define STRINGIZE2(x) STRINGIZE(x)
#define SHADER_STRING(text) @ STRINGIZE2(text)
#define GPUImageHashIdentifier #
#define GPUImageWrappedLabel(x) x
#define GPUImageEscapedHashIdentifier(a) GPUImageWrappedLabel(GPUImageHashIdentifier)a
extern NSString *const kGPUImageVertexShaderString;
extern NSString *const kGPUImagePassthroughFragmentShaderString;
struct GPUVector4 {
GLfloat one;
GLfloat two;
GLfloat three;
GLfloat four;
};
typedef struct GPUVector4 GPUVector4;
struct GPUVector3 {
GLfloat one;
GLfloat two;
GLfloat three;
};
typedef struct GPUVector3 GPUVector3;
struct GPUMatrix4x4 {
GPUVector4 one;
GPUVector4 two;
GPUVector4 three;
GPUVector4 four;
};
typedef struct GPUMatrix4x4 GPUMatrix4x4;
struct GPUMatrix3x3 {
GPUVector3 one;
GPUVector3 two;
GPUVector3 three;
};
typedef struct GPUMatrix3x3 GPUMatrix3x3;
/** GPUImage's base filter class
Filters and other subsequent elements in the chain conform to the GPUImageInput protocol, which lets them take in the supplied or processed texture from the previous link in the chain and do something with it. Objects one step further down the chain are considered targets, and processing can be branched by adding multiple targets to a single output or filter.
*/
@interface GPUImageFilter : GPUImageOutput <GPUImageInput>
{
GPUImageFramebuffer *firstInputFramebuffer;
GLProgram *filterProgram;
GLint filterPositionAttribute, filterTextureCoordinateAttribute;
GLint filterInputTextureUniform;
GLfloat backgroundColorRed, backgroundColorGreen, backgroundColorBlue, backgroundColorAlpha;
BOOL isEndProcessing;
CGSize currentFilterSize;
GPUImageRotationMode inputRotation;
BOOL currentlyReceivingMonochromeInput;
NSMutableDictionary *uniformStateRestorationBlocks;
dispatch_semaphore_t imageCaptureSemaphore;
}
@property(readonly) CVPixelBufferRef renderTarget;
@property(readwrite, nonatomic) BOOL preventRendering;
@property(readwrite, nonatomic) BOOL currentlyReceivingMonochromeInput;
/// @name Initialization and teardown
/**
Initialize with vertex and fragment shaders
You make take advantage of the SHADER_STRING macro to write your shaders in-line.
@param vertexShaderString Source code of the vertex shader to use
@param fragmentShaderString Source code of the fragment shader to use
*/
- (id)initWithVertexShaderFromString:(NSString *)vertexShaderString fragmentShaderFromString:(NSString *)fragmentShaderString;
/**
Initialize with a fragment shader
You may take advantage of the SHADER_STRING macro to write your shader in-line.
@param fragmentShaderString Source code of fragment shader to use
*/
- (id)initWithFragmentShaderFromString:(NSString *)fragmentShaderString;
/**
Initialize with a fragment shader
@param fragmentShaderFilename Filename of fragment shader to load
*/
- (id)initWithFragmentShaderFromFile:(NSString *)fragmentShaderFilename;
- (void)initializeAttributes;
- (void)setupFilterForSize:(CGSize)filterFrameSize;
- (CGSize)rotatedSize:(CGSize)sizeToRotate forIndex:(NSInteger)textureIndex;
- (CGPoint)rotatedPoint:(CGPoint)pointToRotate forRotation:(GPUImageRotationMode)rotation;
/// @name Managing the display FBOs
/** Size of the frame buffer object
*/
- (CGSize)sizeOfFBO;
/// @name Rendering
+ (const GLfloat *)textureCoordinatesForRotation:(GPUImageRotationMode)rotationMode;
- (void)renderToTextureWithVertices:(const GLfloat *)vertices textureCoordinates:(const GLfloat *)textureCoordinates;
- (void)informTargetsAboutNewFrameAtTime:(CMTime)frameTime;
- (CGSize)outputFrameSize;
/// @name Input parameters
- (void)setBackgroundColorRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent alpha:(GLfloat)alphaComponent;
- (void)setInteger:(GLint)newInteger forUniformName:(NSString *)uniformName;
- (void)setFloat:(GLfloat)newFloat forUniformName:(NSString *)uniformName;
- (void)setSize:(CGSize)newSize forUniformName:(NSString *)uniformName;
- (void)setPoint:(CGPoint)newPoint forUniformName:(NSString *)uniformName;
- (void)setFloatVec3:(GPUVector3)newVec3 forUniformName:(NSString *)uniformName;
- (void)setFloatVec4:(GPUVector4)newVec4 forUniform:(NSString *)uniformName;
- (void)setFloatArray:(GLfloat *)array length:(GLsizei)count forUniform:(NSString*)uniformName;
- (void)setMatrix3f:(GPUMatrix3x3)matrix forUniform:(GLint)uniform program:(GLProgram *)shaderProgram;
- (void)setMatrix4f:(GPUMatrix4x4)matrix forUniform:(GLint)uniform program:(GLProgram *)shaderProgram;
- (void)setFloat:(GLfloat)floatValue forUniform:(GLint)uniform program:(GLProgram *)shaderProgram;
- (void)setPoint:(CGPoint)pointValue forUniform:(GLint)uniform program:(GLProgram *)shaderProgram;
- (void)setSize:(CGSize)sizeValue forUniform:(GLint)uniform program:(GLProgram *)shaderProgram;
- (void)setVec3:(GPUVector3)vectorValue forUniform:(GLint)uniform program:(GLProgram *)shaderProgram;
- (void)setVec4:(GPUVector4)vectorValue forUniform:(GLint)uniform program:(GLProgram *)shaderProgram;
- (void)setFloatArray:(GLfloat *)arrayValue length:(GLsizei)arrayLength forUniform:(GLint)uniform program:(GLProgram *)shaderProgram;
- (void)setInteger:(GLint)intValue forUniform:(GLint)uniform program:(GLProgram *)shaderProgram;
- (void)setFloatVec2Array:(GLfloat *)array length:(GLsizei)length forUniform:(GLint)uniform program:(GLProgram *)program;
- (void)setAndExecuteUniformStateCallbackAtIndex:(GLint)uniform forProgram:(GLProgram *)shaderProgram toBlock:(dispatch_block_t)uniformStateBlock;
- (void)setUniformsForProgramAtIndex:(NSUInteger)programIndex;
@end
================================================
FILE: GPURenderKit/GPURenderKit/GPUImage/BaseClass/GPUImageFilter.m
================================================
#import "GPUImageFilter.h"
#import "GPUImagePicture.h"
#import <AVFoundation/AVFoundation.h>
// Hardcode the vertex shader for standard filters, but this can be overridden
NSString *const kGPUImageVertexShaderString = SHADER_STRING
(
attribute vec4 position;
attribute vec4 inputTextureCoordinate;
varying vec2 textureCoordinate;
void main()
{
gl_Position = position;
textureCoordinate = inputTextureCoordinate.xy;
}
);
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
NSString *const kGPUImagePassthroughFragmentShaderString = SHADER_STRING
(
varying highp vec2 textureCoordinate;
uniform sampler2D inputImageTexture;
void main()
{
gl_FragColor = texture2D(inputImageTexture, textureCoordinate);
}
);
#else
NSString *const kGPUImagePassthroughFragmentShaderString = SHADER_STRING
(
varying vec2 textureCoordinate;
uniform sampler2D inputImageTexture;
void main()
{
gl_FragColor = texture2D(inputImageTexture, textureCoordinate);
}
);
#endif
@implementation GPUImageFilter
@synthesize preventRendering = _preventRendering;
@synthesize currentlyReceivingMonochromeInput;
#pragma mark -
#pragma mark Initialization and teardown
- (id)initWithVertexShaderFromString:(NSString *)vertexShaderString fragmentShaderFromString:(NSString *)fragmentShaderString;
{
if (!(self = [super init]))
{
return nil;
}
uniformStateRestorationBlocks = [NSMutableDictionary dictionaryWithCapacity:10];
_preventRendering = NO;
currentlyReceivingMonochromeInput = NO;
inputRotation = kGPUImageNoRotation;
backgroundColorRed = 0.0;
backgroundColorGreen = 0.0;
backgroundColorBlue = 0.0;
backgroundColorAlpha = 0.0;
imageCaptureSemaphore = dispatch_semaphore_create(0);
dispatch_semaphore_signal(imageCaptureSemaphore);
runSynchronouslyOnVideoProcessingQueue(^{
[GPUImageContext useImageProcessingContext];
filterProgram = [[GPUImageContext sharedImageProcessingContext] programForVertexShaderString:vertexShaderString fragmentShaderString:fragmentShaderString];
if (!filterProgram.initialized)
{
[self initializeAttributes];
if (![filterProgram link])
{
NSString *progLog = [filterProgram programLog];
NSLog(@"Program link log: %@", progLog);
NSString *fragLog = [filterProgram fragmentShaderLog];
NSLog(@"Fragment shader compile log: %@", fragLog);
NSString *vertLog = [filterProgram vertexShaderLog];
NSLog(@"Vertex shader compile log: %@", vertLog);
filterProgram = nil;
NSAssert(NO, @"Filter shader link failed");
}
}
filterPositionAttribute = [filterProgram attributeIndex:@"position"];
filterTextureCoordinateAttribute = [filterProgram attributeIndex:@"inputTextureCoordinate"];
filterInputTextureUniform = [filterProgram uniformIndex:@"inputImageTexture"]; // This does assume a name of "inputImageTexture" for the fragment shader
[GPUImageContext setActiveShaderProgram:filterProgram];
glEnableVertexAttribArray(filterPositionAttribute);
glEnableVertexAttribArray(filterTextureCoordinateAttribute);
});
return self;
}
- (id)initWithFragmentShaderFromString:(NSString *)fragmentShaderString;
{
if (!(self = [self initWithVertexShaderFromString:kGPUImageVertexShaderString fragmentShaderFromString:fragmentShaderString]))
{
return nil;
}
return self;
}
- (id)initWithFragmentShaderFromFile:(NSString *)fragmentShaderFilename;
{
NSString *fragmentShaderPathname = [[NSBundle mainBundle] pathForResource:fragmentShaderFilename ofType:@"fsh"];
NSString *fragmentShaderString = [NSString stringWithContentsOfFile:fragmentShaderPathname encoding:NSUTF8StringEncoding error:nil];
if (!(self = [self initWithFragmentShaderFromString:fragmentShaderString]))
{
return nil;
}
return self;
}
- (id)init;
{
if (!(self = [self initWithFragmentShaderFromString:kGPUImagePassthroughFragmentShaderString]))
{
return nil;
}
return self;
}
- (void)initializeAttributes;
{
[filterProgram addAttribute:@"position"];
[filterProgram addAttribute:@"inputTextureCoordinate"];
// Override this, calling back to this super method, in order to add new attributes to your vertex shader
}
- (void)setupFilterForSize:(CGSize)filterFrameSize;
{
// This is where you can override to provide some custom setup, if your filter has a size-dependent element
}
- (void)dealloc
{
#if !OS_OBJECT_USE_OBJC
if (imageCaptureSemaphore != NULL)
{
dispatch_release(imageCaptureSemaphore);
}
#endif
}
#pragma mark -
#pragma mark Still image processing
- (void)useNextFrameForImageCapture;
{
usingNextFrameForImageCapture = YES;
// Set the semaphore high, if it isn't already
if (dispatch_semaphore_wait(imageCaptureSemaphore, DISPATCH_TIME_NOW) != 0)
{
return;
}
}
- (CGImageRef)newCGImageFromCurrentlyProcessedOutput
{
// Give it three seconds to process, then abort if they forgot to set up the image capture properly
double timeoutForImageCapture = 3.0;
dispatch_time_t convertedTimeout = dispatch_time(DISPATCH_TIME_NOW, timeoutForImageCapture * NSEC_PER_SEC);
if (dispatch_semaphore_wait(imageCaptureSemaphore, convertedTimeout) != 0)
{
return NULL;
}
GPUImageFramebuffer* framebuffer = [self framebufferForOutput];
usingNextFrameForImageCapture = NO;
dispatch_semaphore_signal(imageCaptureSemaphore);
CGImageRef image = [framebuffer newCGImageFromFramebufferContents];
return image;
}
#pragma mark -
#pragma mark Managing the display FBOs
- (CGSize)sizeOfFBO;
{
CGSize outputSize = [self maximumOutputSize];
if ( (CGSizeEqualToSize(outputSize, CGSizeZero)) || (inputTextureSize.width < outputSize.width) )
{
return inputTextureSize;
}
else
{
return outputSize;
}
}
#pragma mark -
#pragma mark Rendering
+ (const GLfloat *)textureCoordinatesForRotation:(GPUImageRotationMode)rotationMode;
{
static const GLfloat noRotationTextureCoordinates[] = {
0.0f, 0.0f,
1.0f, 0.0f,
0.0f, 1.0f,
1.0f, 1.0f,
};
static const GLfloat rotateLeftTextureCoordinates[] = {
1.0f, 0.0f,
1.0f, 1.0f,
0.0f, 0.0f,
0.0f, 1.0f,
};
static const GLfloat rotateRightTextureCoordinates[] = {
0.0f, 1.0f,
0.0f, 0.0f,
1.0f, 1.0f,
1.0f, 0.0f,
};
static const GLfloat verticalFlipTextureCoordinates[] = {
0.0f, 1.0f,
1.0f, 1.0f,
0.0f, 0.0f,
1.0f, 0.0f,
};
static const GLfloat horizontalFlipTextureCoordinates[] = {
1.0f, 0.0f,
0.0f, 0.0f,
1.0f, 1.0f,
0.0f, 1.0f,
};
static const GLfloat rotateRightVerticalFlipTextureCoordinates[] = {
0.0f, 0.0f,
0.0f, 1.0f,
1.0f, 0.0f,
1.0f, 1.0f,
};
static const GLfloat rotateRightHorizontalFlipTextureCoordinates[] = {
1.0f, 1.0f,
1.0f, 0.0f,
0.0f, 1.0f,
0.0f, 0.0f,
};
static const GLfloat rotate180TextureCoordinates[] = {
1.0f, 1.0f,
0.0f, 1.0f,
1.0f, 0.0f,
0.0f, 0.0f,
};
switch(rotationMode)
{
case kGPUImageNoRotation: return noRotationTextureCoordinates;
case kGPUImageRotateLeft: return rotateLeftTextureCoordinates;
case kGPUImageRotateRight: return rotateRightTextureCoordinates;
case kGPUImageFlipVertical: return verticalFlipTextureCoordinates;
case kGPUImageFlipHorizonal: return horizontalFlipTextureCoordinates;
case kGPUImageRotateRightFlipVertical: return rotateRightVerticalFlipTextureCoordinates;
case kGPUImageRotateRightFlipHorizontal: return rotateRightHorizontalFlipTextureCoordinates;
case kGPUImageRotate180: return rotate180TextureCoordinates;
}
}
- (void)renderToTextureWithVertices:(const GLfloat *)vertices textureCoordinates:(const GLfloat *)textureCoordinates;
{
if (self.preventRendering)
{
[firstInputFramebuffer unlock];
return;
}
[GPUImageContext setActiveShaderProgram:filterProgram];
outputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:[self sizeOfFBO] textureOptions:self.outputTextureOptions onlyTexture:NO];
[outputFramebuffer activateFramebuffer];
if (usingNextFrameForImageCapture)
{
[outputFramebuffer lock];
}
[self setUniformsForProgramAtIndex:0];
glClearColor(backgroundColorRed, backgroundColorGreen, backgroundColorBlue, backgroundColorAlpha);
glClear(GL_COLOR_BUFFER_BIT);
glActiveTexture(GL_TEXTURE2);
glBindTexture(GL_TEXTURE_2D, [firstInputFramebuffer texture]);
glUniform1i(filterInputTextureUniform, 2);
glVertexAttribPointer(filterPositionAttribute, 2, GL_FLOAT, 0, 0, vertices);
glVertexAttribPointer(filterTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, textureCoordinates);
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
[firstInputFramebuffer unlock];
if (usingNextFrameForImageCapture)
{
dispatch_semaphore_signal(imageCaptureSemaphore);
}
}
- (void)informTargetsAboutNewFrameAtTime:(CMTime)frameTime;
{
if (self.frameProcessingCompletionBlock != NULL)
{
self.frameProcessingCompletionBlock(self, frameTime);
}
// Get all targets the framebuffer so they can grab a lock on it
for (id<GPUImageInput> currentTarget in targets)
{
if (currentTarget != self.targetToIgnoreForUpdates)
{
NSInteger indexOfObject = [targets indexOfObject:currentTarget];
NSInteger textureIndex = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];
[self setInputFramebufferForTarget:currentTarget atIndex:textureIndex];
[currentTarget setInputSize:[self outputFrameSize] atIndex:textureIndex];
}
}
// Release our hold so it can return to the cache immediately upon processing
[[self framebufferForOutput] unlock];
if (usingNextFrameForImageCapture)
{
// usingNextFrameForImageCapture = NO;
}
else
{
[self removeOutputFramebuffer];
}
// Trigger processing last, so that our unlock comes first in serial execution, avoiding the need for a callback
for (id<GPUImageInput> currentTarget in targets)
{
if (currentTarget != self.targetToIgnoreForUpdates)
{
NSInteger indexOfObject = [targets indexOfObject:currentTarget];
NSInteger textureIndex = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];
[currentTarget newFrameReadyAtTime:frameTime atIndex:textureIndex];
}
}
}
- (CGSize)outputFrameSize;
{
return inputTextureSize;
}
#pragma mark -
#pragma mark Input parameters
- (void)setBackgroundColorRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent alpha:(GLfloat)alphaComponent;
{
backgroundColorRed = redComponent;
backgroundColorGreen = greenComponent;
backgroundColorBlue = blueComponent;
backgroundColorAlpha = alphaComponent;
}
- (void)setInteger:(GLint)newInteger forUniformName:(NSString *)uniformName;
{
GLint uniformIndex = [filterProgram uniformIndex:uniformName];
[self setInteger:newInteger forUniform:uniformIndex program:filterProgram];
}
- (void)setFloat:(GLfloat)newFloat forUniformName:(NSString *)uniformName;
{
GLint uniformIndex = [filterProgram uniformIndex:uniformName];
[self setFloat:newFloat forUniform:uniformIndex program:filterProgram];
}
- (void)setSize:(CGSize)newSize forUniformName:(NSString *)uniformName;
{
GLint uniformIndex = [filterProgram uniformIndex:uniformName];
[self setSize:newSize forUniform:uniformIndex program:filterProgram];
}
- (void)setPoint:(CGPoint)newPoint forUniformName:(NSString *)uniformName;
{
GLint uniformIndex = [filterProgram uniformIndex:uniformName];
[self setPoint:newPoint forUniform:uniformIndex program:filterProgram];
}
- (void)setFloatVec3:(GPUVector3)newVec3 forUniformName:(NSString *)uniformName;
{
GLint uniformIndex = [filterProgram uniformIndex:uniformName];
[self setVec3:newVec3 forUniform:uniformIndex program:filterProgram];
}
- (void)setFloatVec4:(GPUVector4)newVec4 forUniform:(NSString *)uniformName;
{
GLint uniformIndex = [filterProgram uniformIndex:uniformName];
[self setVec4:newVec4 forUniform:uniformIndex program:filterProgram];
}
- (void)setFloatArray:(GLfloat *)array length:(GLsizei)count forUniform:(NSString*)uniformName
{
GLint uniformIndex = [filterProgram uniformIndex:uniformName];
[self setFloatArray:array length:count forUniform:uniformIndex program:filterProgram];
}
- (void)setMatrix3f:(GPUMatrix3x3)matrix forUniform:(GLint)uniform program:(GLProgram *)shaderProgram;
{
runAsynchronouslyOnVideoProcessingQueue(^{
[GPUImageContext setActiveShaderProgram:shaderProgram];
[self setAndExecuteUniformStateCallbackAtIndex:uniform forProgram:shaderProgram toBlock:^{
glUniformMatrix3fv(uniform, 1, GL_FALSE, (GLfloat *)&matrix);
}];
});
}
- (void)setMatrix4f:(GPUMatrix4x4)matrix forUniform:(GLint)uniform program:(GLProgram *)shaderProgram;
{
runAsynchronouslyOnVideoProcessingQueue(^{
[GPUImageContext setActiveShaderProgram:shaderProgram];
[self setAndExecuteUniformStateCallbackAtIndex:uniform forProgram:shaderProgram toBlock:^{
glUniformMatrix4fv(uniform, 1, GL_FALSE, (GLfloat *)&matrix);
}];
});
}
- (void)setFloat:(GLfloat)floatValue forUniform:(GLint)uniform program:(GLProgram *)shaderProgram;
{
runAsynchronouslyOnVideoProcessingQueue(^{
[GPUImageContext setActiveShaderProgram:shaderProgram];
[self setAndExecuteUniformStateCallbackAtIndex:uniform forProgram:shaderProgram toBlock:^{
glUniform1f(uniform, floatValue);
}];
});
}
- (void)setPoint:(CGPoint)pointValue forUniform:(GLint)uniform program:(GLProgram *)shaderProgram;
{
runAsynchronouslyOnVideoProcessingQueue(^{
[GPUImageContext setActiveShaderProgram:shaderProgram];
[self setAndExecuteUniformStateCallbackAtIndex:uniform forProgram:shaderProgram toBlock:^{
GLfloat positionArray[2];
positionArray[0] = pointValue.x;
positionArray[1] = pointValue.y;
glUniform2fv(uniform, 1, positionArray);
}];
});
}
- (void)setSize:(CGSize)sizeValue forUniform:(GLint)uniform program:(GLProgram *)shaderProgram;
{
runAsynchronouslyOnVideoProcessingQueue(^{
[GPUImageContext setActiveShaderProgram:shaderProgram];
[self setAndExecuteUniformStateCallbackAtIndex:uniform forProgram:shaderProgram toBlock:^{
GLfloat sizeArray[2];
sizeArray[0] = sizeValue.width;
sizeArray[1] = sizeValue.height;
glUniform2fv(uniform, 1, sizeArray);
}];
});
}
- (void)setVec3:(GPUVector3)vectorValue forUniform:(GLint)uniform program:(GLProgram *)shaderProgram;
{
runAsynchronouslyOnVideoProcessingQueue(^{
[GPUImageContext setActiveShaderProgram:shaderProgram];
[self setAndExecuteUniformStateCallbackAtIndex:uniform forProgram:shaderProgram toBlock:^{
glUniform3fv(uniform, 1, (GLfloat *)&vectorValue);
}];
});
}
- (void)setVec4:(GPUVector4)vectorValue forUniform:(GLint)uniform program:(GLProgram *)shaderProgram;
{
runAsynchronouslyOnVideoProcessingQueue(^{
[GPUImageContext setActiveShaderProgram:shaderProgram];
[self setAndExecuteUniformStateCallbackAtIndex:uniform forProgram:shaderProgram toBlock:^{
glUniform4fv(uniform, 1, (GLfloat *)&vectorValue);
}];
});
}
- (void)setFloatArray:(GLfloat *)arrayValue length:(GLsizei)arrayLength forUniform:(GLint)uniform program:(GLProgram *)shaderProgram;
{
// Make a copy of the data, so it doesn't get overwritten before async call executes
NSData* arrayData = [NSData dataWithBytes:arrayValue length:arrayLength * sizeof(arrayValue[0])];
runAsynchronouslyOnVideoProcessingQueue(^{
[GPUImageContext setActiveShaderProgram:shaderProgram];
[self setAndExecuteUniformStateCallbackAtIndex:uniform forProgram:shaderProgram toBlock:^{
glUniform1fv(uniform, arrayLength, [arrayData bytes]);
}];
});
}
- (void)setInteger:(GLint)intValue forUniform:(GLint)uniform program:(GLProgram *)shaderProgram;
{
runAsynchronouslyOnVideoProcessingQueue(^{
[GPUImageContext setActiveShaderProgram:shaderProgram];
[self setAndExecuteUniformStateCallbackAtIndex:uniform forProgram:shaderProgram toBlock:^{
glUniform1i(uniform, intValue);
}];
});
}
- (void)setFloatVec2Array:(GLfloat *)array length:(GLsizei)length forUniform:(GLint)uniform program:(GLProgram *)program
{
NSData *arrayData = [NSData dataWithBytes:array length:length * sizeof(array[0])];
runAsynchronouslyOnVideoProcessingQueue(^{
[GPUImageContext setActiveShaderProgram:program];
[self setAndExecuteUniformStateCallbackAtIndex:uniform forProgram:program toBlock:^{
glUniform2fv(uniform, length, [arrayData bytes]);
}];
});
}
- (void)setAndExecuteUniformStateCallbackAtIndex:(GLint)uniform forProgram:(GLProgram *)shaderProgram toBlock:(dispatch_block_t)uniformStateBlock;
{
[uniformStateRestorationBlocks setObject:[uniformStateBlock copy] forKey:[NSNumber numberWithInt:uniform]];
uniformStateBlock();
}
- (void)setUniformsForProgramAtIndex:(NSUInteger)programIndex;
{
[uniformStateRestorationBlocks enumerateKeysAndObjectsUsingBlock:^(id key, id obj, BOOL *stop){
dispatch_block_t currentBlock = obj;
currentBlock();
}];
}
#pragma mark -
#pragma mark GPUImageInput
- (void)newFrameReadyAtTime:(CMTime)frameTime atIndex:(NSInteger)textureIndex;
{
static const GLfloat imageVertices[] = {
-1.0f, -1.0f,
1.0f, -1.0f,
-1.0f, 1.0f,
1.0f, 1.0f,
};
[self renderToTextureWithVertices:imageVertices textureCoordinates:[[self class] textureCoordinatesForRotation:inputRotation]];
[self informTargetsAboutNewFrameAtTime:frameTime];
}
- (NSInteger)nextAvailableTextureIndex;
{
return 0;
}
- (void)setInputFramebuffer:(GPUImageFramebuffer *)newInputFramebuffer atIndex:(NSInteger)textureIndex;
{
firstInputFramebuffer = newInputFramebuffer;
[firstInputFramebuffer lock];
}
- (CGSize)rotatedSize:(CGSize)sizeToRotate forIndex:(NSInteger)textureIndex;
{
CGSize rotatedSize = sizeToRotate;
if (GPUImageRotationSwapsWidthAndHeight(inputRotation))
{
rotatedSize.width = sizeToRotate.height;
rotatedSize.height = sizeToRotate.width;
}
return rotatedSize;
}
- (CGPoint)rotatedPoint:(CGPoint)pointToRotate forRotation:(GPUImageRotationMode)rotation;
{
CGPoint rotatedPoint;
switch(rotation)
{
case kGPUImageNoRotation: return pointToRotate; break;
case kGPUImageFlipHorizonal:
{
rotatedPoint.x = 1.0 - pointToRotate.x;
rotatedPoint.y = pointToRotate.y;
}; break;
case kGPUImageFlipVertical:
{
rotatedPoint.x = pointToRotate.x;
rotatedPoint.y = 1.0 - pointToRotate.y;
}; break;
case kGPUImageRotateLeft:
{
rotatedPoint.x = 1.0 - pointToRotate.y;
rotatedPoint.y = pointToRotate.x;
}; break;
case kGPUImageRotateRight:
{
rotatedPoint.x = pointToRotate.y;
rotatedPoint.y = 1.0 - pointToRotate.x;
}; break;
case kGPUImageRotateRightFlipVertical:
{
rotatedPoint.x = pointToRotate.y;
rotatedPoint.y = pointToRotate.x;
}; break;
case kGPUImageRotateRightFlipHorizontal:
{
rotatedPoint.x = 1.0 - pointToRotate.y;
rotatedPoint.y = 1.0 - pointToRotate.x;
}; break;
case kGPUImageRotate180:
{
rotatedPoint.x = 1.0 - pointToRotate.x;
rotatedPoint.y = 1.0 - pointToRotate.y;
}; break;
}
return rotatedPoint;
}
- (void)setInputSize:(CGSize)newSize atIndex:(NSInteger)textureIndex;
{
if (self.preventRendering)
{
return;
}
if (overrideInputSize)
{
if (CGSizeEqualToSize(forcedMaximumSize, CGSizeZero))
{
}
else
{
CGRect insetRect = AVMakeRectWithAspectRatioInsideRect(newSize, CGRectMake(0.0, 0.0, forcedMaximumSize.width, forcedMaximumSize.height));
inputTextureSize = insetRect.size;
}
}
else
{
CGSize rotatedSize = [self rotatedSize:newSize forIndex:textureIndex];
if (CGSizeEqualToSize(rotatedSize, CGSizeZero))
{
inputTextureSize = rotatedSize;
}
else if (!CGSizeEqualToSize(inputTextureSize, rotatedSize))
{
inputTextureSize = rotatedSize;
}
}
[self setupFilterForSize:[self sizeOfFBO]];
}
- (void)setInputRotation:(GPUImageRotationMode)newInputRotation atIndex:(NSInteger)textureIndex;
{
inputRotation = newInputRotation;
}
- (void)forceProcessingAtSize:(CGSize)frameSize;
{
if (CGSizeEqualToSize(frameSize, CGSizeZero))
{
overrideInputSize = NO;
}
else
{
overrideInputSize = YES;
inputTextureSize = frameSize;
forcedMaximumSize = CGSizeZero;
}
}
- (void)forceProcessingAtSizeRespectingAspectRatio:(CGSize)frameSize;
{
if (CGSizeEqualToSize(frameSize, CGSizeZero))
{
overrideInputSize = NO;
inputTextureSize = CGSizeZero;
forcedMaximumSize = CGSizeZero;
}
else
{
overrideInputSize = YES;
forcedMaximumSize = frameSize;
}
}
- (CGSize)maximumOutputSize;
{
// I'm temporarily disabling adjustments for smaller output sizes until I figure out how to make this work better
return CGSizeZero;
/*
if (CGSizeEqualToSize(cachedMaximumOutputSize, CGSizeZero))
{
for (id<GPUImageInput> currentTarget in targets)
{
if ([currentTarget maximumOutputSize].width > cachedMaximumOutputSize.width)
{
cachedMaximumOutputSize = [currentTarget maximumOutputSize];
}
}
}
return cachedMaximumOutputSize;
*/
}
- (void)endProcessing
{
if (!isEndProcessing)
{
isEndProcessing = YES;
for (id<GPUImageInput> currentTarget in targets)
{
[currentTarget endProcessing];
}
}
}
- (BOOL)wantsMonochromeInput;
{
return NO;
}
#pragma mark -
#pragma mark Accessors
@end
================================================
FILE: GPURenderKit/GPURenderKit/GPUImage/BaseClass/GPUImageFilterGroup.h
================================================
#import "GPUImageOutput.h"
#import "GPUImageFilter.h"
@interface GPUImageFilterGroup : GPUImageOutput <GPUImageInput>
{
NSMutableArray *filters;
BOOL isEndProcessing;
}
@property(readwrite, nonatomic, strong) GPUImageOutput<GPUImageInput> *terminalFilter;
@property(readwrite, nonatomic, strong) NSArray *initialFilters;
@property(readwrite, nonatomic, strong) GPUImageOutput<GPUImageInput> *inputFilterToIgnoreForUpdates;
// Filter management
- (void)addFilter:(GPUImageOutput<GPUImageInput> *)newFilter;
- (GPUImageOutput<GPUImageInput> *)filterAtIndex:(NSUInteger)filterIndex;
- (NSUInteger)filterCount;
@end
================================================
FILE: GPURenderKit/GPURenderKit/GPUImage/BaseClass/GPUImageFilterGroup.m
================================================
#import "GPUImageFilterGroup.h"
#import "GPUImagePicture.h"
@implementation GPUImageFilterGroup
@synthesize terminalFilter = _terminalFilter;
@synthesize initialFilters = _initialFilters;
@synthesize inputFilterToIgnoreForUpdates = _inputFilterToIgnoreForUpdates;
- (id)init;
{
if (!(self = [super init]))
{
return nil;
}
filters = [[NSMutableArray alloc] init];
return self;
}
#pragma mark -
#pragma mark Filter management
- (void)addFilter:(GPUImageOutput<GPUImageInput> *)newFilter;
{
[filters addObject:newFilter];
}
- (GPUImageOutput<GPUImageInput> *)filterAtIndex:(NSUInteger)filterIndex;
{
return [filters objectAtIndex:filterIndex];
}
- (NSUInteger)filterCount;
{
return [filters count];
}
#pragma mark -
#pragma mark Still image processing
- (void)useNextFrameForImageCapture;
{
[self.terminalFilter useNextFrameForImageCapture];
}
- (CGImageRef)newCGImageFromCurrentlyProcessedOutput;
{
return [self.terminalFilter newCGImageFromCurrentlyProcessedOutput];
}
#pragma mark -
#pragma mark GPUImageOutput overrides
- (void)setTargetToIgnoreForUpdates:(id<GPUImageInput>)targetToIgnoreForUpdates;
{
[_terminalFilter setTargetToIgnoreForUpdates:targetToIgnoreForUpdates];
}
- (void)addTarget:(id<GPUImageInput>)newTarget atTextureLocation:(NSInteger)textureLocation;
{
[_terminalFilter addTarget:newTarget atTextureLocation:textureLocation];
}
- (void)removeTarget:(id<GPUImageInput>)targetToRemove;
{
[_terminalFilter removeTarget:targetToRemove];
}
- (void)removeAllTargets;
{
[_terminalFilter removeAllTargets];
}
- (NSArray *)targets;
{
return [_terminalFilter targets];
}
- (void)setFrameProcessingCompletionBlock:(void (^)(GPUImageOutput *, CMTime))frameProcessingCompletionBlock;
{
[_terminalFilter setFrameProcessingCompletionBlock:frameProcessingCompletionBlock];
}
- (void (^)(GPUImageOutput *, CMTime))frameProcessingCompletionBlock;
{
return [_terminalFilter frameProcessingCompletionBlock];
}
#pragma mark -
#pragma mark GPUImageInput protocol
- (void)newFrameReadyAtTime:(CMTime)frameTime atIndex:(NSInteger)textureIndex;
{
for (GPUImageOutput<GPUImageInput> *currentFilter in _initialFilters)
{
if (currentFilter != self.inputFilterToIgnoreForUpdates)
{
[currentFilter newFrameReadyAtTime:frameTime atIndex:textureIndex];
}
}
}
- (void)setInputFramebuffer:(GPUImageFramebuffer *)newInputFramebuffer atIndex:(NSInteger)textureIndex;
{
for (GPUImageOutput<GPUImageInput> *currentFilter in _initialFilters)
{
[currentFilter setInputFramebuffer:newInputFramebuffer atIndex:textureIndex];
}
}
- (NSInteger)nextAvailableTextureIndex;
{
// if ([_initialFilters count] > 0)
// {
// return [[_initialFilters objectAtIndex:0] nextAvailableTextureIndex];
// }
return 0;
}
- (void)setInputSize:(CGSize)newSize atIndex:(NSInteger)textureIndex;
{
for (GPUImageOutput<GPUImageInput> *currentFilter in _initialFilters)
{
[currentFilter setInputSize:newSize atIndex:textureIndex];
}
}
- (void)setInputRotation:(GPUImageRotationMode)newInputRotation atIndex:(NSInteger)textureIndex;
{
for (GPUImageOutput<GPUImageInput> *currentFilter in _initialFilters)
{
[currentFilter setInputRotation:newInputRotation atIndex:(NSInteger)textureIndex];
}
}
- (void)forceProcessingAtSize:(CGSize)frameSize;
{
for (GPUImageOutput<GPUImageInput> *currentFilter in filters)
{
[currentFilter forceProcessingAtSize:frameSize];
}
}
- (void)forceProcessingAtSizeRespectingAspectRatio:(CGSize)frameSize;
{
for (GPUImageOutput<GPUImageInput> *currentFilter in filters)
{
[currentFilter forceProcessingAtSizeRespectingAspectRatio:frameSize];
}
}
- (CGSize)maximumOutputSize;
{
// I'm temporarily disabling adjustments for smaller output sizes until I figure out how to make this work better
return CGSizeZero;
/*
if (CGSizeEqualToSize(cachedMaximumOutputSize, CGSizeZero))
{
for (id<GPUImageInput> currentTarget in _initialFilters)
{
if ([currentTarget maximumOutputSize].width > cachedMaximumOutputSize.width)
{
cachedMaximumOutputSize = [currentTarget maximumOutputSize];
}
}
}
return cachedMaximumOutputSize;
*/
}
- (void)endProcessing;
{
if (!isEndProcessing)
{
isEndProcessing = YES;
for (id<GPUImageInput> currentTarget in _initialFilters)
{
[currentTarget endProcessing];
}
}
}
- (BOOL)wantsMonochromeInput;
{
BOOL allInputsWantMonochromeInput = YES;
for (GPUImageOutput<GPUImageInput> *currentFilter in _initialFilters)
{
allInputsWantMonochromeInput = allInputsWantMonochromeInput && [currentFilter wantsMonochromeInput];
}
return allInputsWantMonochromeInput;
}
- (void)setCurrentlyReceivingMonochromeInput:(BOOL)newValue;
{
for (GPUImageOutput<GPUImageInput> *currentFilter in _initialFilters)
{
[currentFilter setCurrentlyReceivingMonochromeInput:newValue];
}
}
@end
================================================
FILE: GPURenderKit/GPURenderKit/GPUImage/BaseClass/GPUImageFilterPipeline.h
================================================
#import <Foundation/Foundation.h>
#import "GPUImageOutput.h"
@interface GPUImageFilterPipeline : NSObject
{
NSString *stringValue;
}
@property (strong) NSMutableArray *filters;
@property (strong) GPUImageOutput *input;
@property (strong) id <GPUImageInput> output;
- (id) initWithOrderedFilters:(NSArray*) filters input:(GPUImageOutput*)input output:(id <GPUImageInput>)output;
- (id) initWithConfiguration:(NSDictionary*) configuration input:(GPUImageOutput*)input output:(id <GPUImageInput>)output;
- (id) initWithConfigurationFile:(NSURL*) configuration input:(GPUImageOutput*)input output:(id <GPUImageInput>)output;
- (void) addFilter:(GPUImageOutput<GPUImageInput> *)filter;
- (void) addFilter:(GPUImageOutput<GPUImageInput> *)filter atIndex:(NSUInteger)insertIndex;
- (void) replaceFilterAtIndex:(NSUInteger)index withFilter:(GPUImageOutput<GPUImageInput> *)filter;
- (void) replaceAllFilters:(NSArray *) newFilters;
- (void) removeFilter:(GPUImageOutput<GPUImageInput> *)filter;
- (void) removeFilterAtIndex:(NSUInteger)index;
- (void) removeAllFilters;
- (UIImage *) currentFilteredFrame;
- (UIImage *) currentFilteredFrameWithOrientation:(UIImageOrientation)imageOrientation;
- (CGImageRef) newCGImageFromCurrentFilteredFrame;
@end
================================================
FILE: GPURenderKit/GPURenderKit/GPUImage/BaseClass/GPUImageFilterPipeline.m
================================================
#import "GPUImageFilterPipeline.h"
@interface GPUImageFilterPipeline ()
- (BOOL)_parseConfiguration:(NSDictionary *)configuration;
- (void)_refreshFilters;
@end
@implementation GPUImageFilterPipeline
@synthesize filters = _filters, input = _input, output = _output;
#pragma mark Config file init
- (id)initWithConfiguration:(NSDictionary *)configuration input:(GPUImageOutput *)input output:(id <GPUImageInput>)output {
self = [super init];
if (self) {
self.input = input;
self.output = output;
if (![self _parseConfiguration:configuration]) {
NSLog(@"Sorry, a parsing error occurred.");
abort();
}
[self _refreshFilters];
}
return self;
}
- (id)initWithConfigurationFile:(NSURL *)configuration input:(GPUImageOutput *)input output:(id <GPUImageInput>)output {
return [self initWithConfiguration:[NSDictionary dictionaryWithContentsOfURL:configuration] input:input output:output];
}
- (BOOL)_parseConfiguration:(NSDictionary *)configuration {
NSArray *filters = [configuration objectForKey:@"Filters"];
if (!filters) {
return NO;
}
NSError *regexError = nil;
NSRegularExpression *parsingRegex = [NSRegularExpression regularExpressionWithPattern:@"(float|CGPoint|NSString)\\((.*?)(?:,\\s*(.*?))*\\)"
options:0
error:®exError];
// It's faster to put them into an array and then pass it to the filters property than it is to call [self addFilter:] every time
NSMutableArray *orderedFilters = [NSMutableArray arrayWithCapacity:[filters count]];
for (NSDictionary *filter in filters) {
NSString *filterName = [filter objectForKey:@"FilterName"];
Class theClass = NSClassFromString(filterName);
GPUImageOutput<GPUImageInput> *genericFilter = [[theClass alloc] init];
// Set up the properties
NSDictionary *filterAttributes;
if ((filterAttributes = [filter objectForKey:@"Attributes"])) {
for (NSString *propertyKey in filterAttributes) {
// Set up the selector
SEL theSelector = NSSelectorFromString(propertyKey);
NSInvocation *inv = [NSInvocation invocationWithMethodSignature:[theClass instanceMethodSignatureForSelector:theSelector]];
[inv setSelector:theSelector];
[inv setTarget:genericFilter];
// check selector given with parameter
if ([propertyKey hasSuffix:@":"]) {
stringValue = nil;
// Then parse the arguments
NSMutableArray *parsedArray;
if ([[filterAttributes objectForKey:propertyKey] isKindOfClass:[NSArray class]]) {
NSArray *array = [filterAttributes objectForKey:propertyKey];
parsedArray = [NSMutableArray arrayWithCapacity:[array count]];
for (NSString *string in array) {
NSTextCheckingResult *parse = [parsingRegex firstMatchInString:string
options:0
range:NSMakeRange(0, [string length])];
NSString *modifier = [string substringWithRange:[parse rangeAtIndex:1]];
if ([modifier isEqualToString:@"float"]) {
// Float modifier, one argument
CGFloat value = [[string substringWithRange:[parse rangeAtIndex:2]] floatValue];
[parsedArray addObject:[NSNumber numberWithFloat:value]];
[inv setArgument:&value atIndex:2];
} else if ([modifier isEqualToString:@"CGPoint"]) {
// CGPoint modifier, two float arguments
CGFloat x = [[string substringWithRange:[parse rangeAtIndex:2]] floatValue];
CGFloat y = [[string substringWithRange:[parse rangeAtIndex:3]] floatValue];
CGPoint value = CGPointMake(x, y);
[parsedArray addObject:[NSValue valueWithCGPoint:value]];
} else if ([modifier isEqualToString:@"NSString"]) {
// NSString modifier, one string argument
stringValue = [[string substringWithRange:[parse rangeAtIndex:2]] copy];
[inv setArgument:&stringValue atIndex:2];
} else {
return NO;
}
}
[inv setArgument:&parsedArray atIndex:2];
} else {
NSString *string = [filterAttributes objectForKey:propertyKey];
NSTextCheckingResult *parse = [parsingRegex firstMatchInString:string
options:0
range:NSMakeRange(0, [string length])];
NSString *modifier = [string substringWithRange:[parse rangeAtIndex:1]];
if ([modifier isEqualToString:@"float"]) {
// Float modifier, one argument
CGFloat value = [[string substringWithRange:[parse rangeAtIndex:2]] floatValue];
[inv setArgument:&value atIndex:2];
} else if ([modifier isEqualToString:@"CGPoint"]) {
// CGPoint modifier, two float arguments
CGFloat x = [[string substringWithRange:[parse rangeAtIndex:2]] floatValue];
CGFloat y = [[string substringWithRange:[parse rangeAtIndex:3]] floatValue];
CGPoint value = CGPointMake(x, y);
[inv setArgument:&value atIndex:2];
} else if ([modifier isEqualToString:@"NSString"]) {
// NSString modifier, one string argument
stringValue = [[string substringWithRange:[parse rangeAtIndex:2]] copy];
[inv setArgument:&stringValue atIndex:2];
} else {
return NO;
}
}
}
[inv invoke];
}
}
[orderedFilters addObject:genericFilter];
}
self.filters = orderedFilters;
return YES;
}
#pragma mark Regular init
- (id)initWithOrderedFilters:(NSArray *)filters input:(GPUImageOutput *)input output:(id <GPUImageInput>)output {
self = [super init];
if (self) {
self.input = input;
self.output = output;
self.filters = [NSMutableArray arrayWithArray:filters];
[self _refreshFilters];
}
return self;
}
- (void)addFilter:(GPUImageOutput<GPUImageInput> *)filter atIndex:(NSUInteger)insertIndex {
[self.filters insertObject:filter atIndex:insertIndex];
[self _refreshFilters];
}
- (void)addFilter:(GPUImageOutput<GPUImageInput> *)filter {
[self.filters addObject:filter];
[self _refreshFilters];
}
- (void)replaceFilterAtIndex:(NSUInteger)index withFilter:(GPUImageOutput<GPUImageInput> *)filter {
[self.filters replaceObjectAtIndex:index withObject:filter];
[self _refreshFilters];
}
- (void) removeFilter:(GPUImageOutput<GPUImageInput> *)filter;
{
[self.filters removeObject:filter];
[self _refreshFilters];
}
- (void)removeFilterAtIndex:(NSUInteger)index {
[self.filters removeObjectAtIndex:index];
[self _refreshFilters];
}
- (void)removeAllFilters {
[self.filters removeAllObjects];
[self _refreshFilters];
}
- (void)replaceAllFilters:(NSArray *)newFilters {
self.filters = [NSMutableArray arrayWithArray:newFilters];
[self _refreshFilters];
}
- (void)_refreshFilters {
id prevFilter = self.input;
GPUImageOutput<GPUImageInput> *theFilter = nil;
for (int i = 0; i < [self.filters count]; i++) {
theFilter = [self.filters objectAtIndex:i];
[prevFilter removeAllTargets];
[prevFilter addTarget:theFilter];
prevFilter = theFilter;
}
[prevFilter removeAllTargets];
if (self.output != nil) {
[prevFilter addTarget:self.output];
}
}
- (UIImage *)currentFilteredFrame {
return [(GPUImageOutput<GPUImageInput> *)[_filters lastObject] imageFromCurrentFramebuffer];
}
- (UIImage *)currentFilteredFrameWithOrientation:(UIImageOrientation)imageOrientation {
return [(GPUImageOutput<GPUImageInput> *)[_filters lastObject] imageFromCurrentFramebufferWithOrientation:imageOrientation];
}
- (CGImageRef)newCGImageFromCurrentFilteredFrame {
return [(GPUImageOutput<GPUImageInput> *)[_filters lastObject] newCGImageFromCurrentlyProcessedOutput];
}
@end
================================================
FILE: GPURenderKit/GPURenderKit/GPUImage/BaseClass/GPUImageFourInputFilter.h
================================================
#import "GPUImageThreeInputFilter.h"
extern NSString *const kGPUImageFourInputTextureVertexShaderString;
@interface GPUImageFourInputFilter : GPUImageThreeInputFilter
{
GPUImageFramebuffer *fourthInputFramebuffer;
GLint filterFourthTextureCoordinateAttribute;
GLint filterInputTextureUniform4;
GPUImageRotationMode inputRotation4;
GLuint filterSourceTexture4;
CMTime fourthFrameTime;
BOOL hasSetThirdTexture, hasReceivedFourthFrame, fourthFrameWasVideo;
BOOL fourthFrameCheckDisabled;
}
- (void)disableFourthFrameCheck;
@end
================================================
FILE: GPURenderKit/GPURenderKit/GPUImage/BaseClass/GPUImageFourInputFilter.m
================================================
#import "GPUImageFourInputFilter.h"
NSString *const kGPUImageFourInputTextureVertexShaderString = SHADER_STRING
(
attribute vec4 position;
attribute vec4 inputTextureCoordinate;
attribute vec4 inputTextureCoordinate2;
attribute vec4 inputTextureCoordinate3;
attribute vec4 inputTextureCoordinate4;
varying vec2 textureCoordinate;
varying vec2 textureCoordinate2;
varying vec2 textureCoordinate3;
varying vec2 textureCoordinate4;
void main()
{
gl_Position = position;
textureCoordinate = inputTextureCoordinate.xy;
textureCoordinate2 = inputTextureCoordinate2.xy;
textureCoordinate3 = inputTextureCoordinate3.xy;
textureCoordinate4 = inputTextureCoordinate4.xy;
}
);
@implementation GPUImageFourInputFilter
#pragma mark -
#pragma mark Initialization and teardown
- (id)initWithFragmentShaderFromString:(NSString *)fragmentShaderString;
{
if (!(self = [self initWithVertexShaderFromString:kGPUImageFourInputTextureVertexShaderString fragmentShaderFromString:fragmentShaderString]))
{
return nil;
}
return self;
}
- (id)initWithVertexShaderFromString:(NSString *)vertexShaderString fragmentShaderFromString:(NSString *)fragmentShaderString;
{
if (!(self = [super initWithVertexShaderFromString:vertexShaderString fragmentShaderFromString:fragmentShaderString]))
{
return nil;
}
inputRotation4 = kGPUImageNoRotation;
hasSetThirdTexture = NO;
hasReceivedFourthFrame = NO;
fourthFrameWasVideo = NO;
fourthFrameCheckDisabled = NO;
fourthFrameTime = kCMTimeInvalid;
runSynchronouslyOnVideoProcessingQueue(^{
[GPUImageContext useImageProcessingContext];
filterFourthTextureCoordinateAttribute = [filterProgram attributeIndex:@"inputTextureCoordinate4"];
filterInputTextureUniform4 = [filterProgram uniformIndex:@"inputImageTexture4"]; // This does assume a name of "inputImageTexture3" for the third input texture in the fragment shader
glEnableVertexAttribArray(filterFourthTextureCoordinateAttribute);
});
return self;
}
- (void)initializeAttributes;
{
[super initializeAttributes];
[filterProgram addAttribute:@"inputTextureCoordinate4"];
}
- (void)disableFourthFrameCheck;
{
fourthFrameCheckDisabled = YES;
}
#pragma mark -
#pragma mark Rendering
- (void)renderToTextureWithVertices:(const GLfloat *)vertices textureCoordinates:(const GLfloat *)textureCoordinates;
{
if (self.preventRendering)
{
[firstInputFramebuffer unlock];
[secondInputFramebuffer unlock];
[thirdInputFramebuffer unlock];
[fourthInputFramebuffer unlock];
return;
}
[GPUImageContext setActiveShaderProgram:filterProgram];
outputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:[self sizeOfFBO] textureOptions:self.outputTextureOptions onlyTexture:NO];
[outputFramebuffer activateFramebuffer];
if (usingNextFrameForImageCapture)
{
[outputFramebuffer lock];
}
[self setUniformsForProgramAtIndex:0];
glClearColor(backgroundColorRed, backgroundColorGreen, backgroundColorBlue, backgroundColorAlpha);
glClear(GL_COLOR_BUFFER_BIT);
glActiveTexture(GL_TEXTURE2);
glBindTexture(GL_TEXTURE_2D, [firstInputFramebuffer texture]);
glUniform1i(filterInputTextureUniform, 2);
glActiveTexture(GL_TEXTURE3);
glBindTexture(GL_TEXTURE_2D, [secondInputFramebuffer texture]);
glUniform1i(filterInputTextureUniform2, 3);
glActiveTexture(GL_TEXTURE4);
glBindTexture(GL_TEXTURE_2D, [thirdInputFramebuffer texture]);
glUniform1i(filterInputTextureUniform3, 4);
glActiveTexture(GL_TEXTURE5);
glBindTexture(GL_TEXTURE_2D, [fourthInputFramebuffer texture]);
glUniform1i(filterInputTextureUniform4, 5);
glVertexAttribPointer(filterPositionAttribute, 2, GL_FLOAT, 0, 0, vertices);
glVertexAttribPointer(filterTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, textureCoordinates);
glVertexAttribPointer(filterSecondTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, [[self class] textureCoordinatesForRotation:inputRotation2]);
glVertexAttribPointer(filterThirdTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, [[self class] textureCoordinatesForRotation:inputRotation3]);
glVertexAttribPointer(filterFourthTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, [[self class] textureCoordinatesForRotation:inputRotation4]);
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
[firstInputFramebuffer unlock];
[secondInputFramebuffer unlock];
[thirdInputFramebuffer unlock];
[fourthInputFramebuffer unlock];
if (usingNextFrameForImageCapture)
{
dispatch_semaphore_signal(imageCaptureSemaphore);
}
}
#pragma mark -
#pragma mark GPUImageInput
- (NSInteger)nextAvailableTextureIndex;
{
if (hasSetThirdTexture)
{
return 3;
}
else if (hasSetSecondTexture)
{
return 2;
}
else if (hasSetFirstTexture)
{
return 1;
}
else
{
return 0;
}
}
- (void)setInputFramebuffer:(GPUImageFramebuffer *)newInputFramebuffer atIndex:(NSInteger)textureIndex;
{
if (textureIndex == 0)
{
firstInputFramebuffer = newInputFramebuffer;
hasSetFirstTexture = YES;
[firstInputFramebuffer lock];
}
else if (textureIndex == 1)
{
secondInputFramebuffer = newInputFramebuffer;
hasSetSecondTexture = YES;
[secondInputFramebuffer lock];
}
else if (textureIndex == 2)
{
thirdInputFramebuffer = newInputFramebuffer;
hasSetThirdTexture = YES;
[thirdInputFramebuffer lock];
}
else
{
fourthInputFramebuffer = newInputFramebuffer;
[fourthInputFramebuffer lock];
}
}
- (void)setInputSize:(CGSize)newSize atIndex:(NSInteger)textureIndex;
{
if (textureIndex == 0)
{
[super setInputSize:newSize atIndex:textureIndex];
if (CGSizeEqualToSize(newSize, CGSizeZero))
{
hasSetFirstTexture = NO;
}
}
else if (textureIndex == 1)
{
if (CGSizeEqualToSize(newSize, CGSizeZero))
{
hasSetSecondTexture = NO;
}
}
else if (textureIndex == 2)
{
if (CGSizeEqualToSize(newSize, CGSizeZero))
{
hasSetThirdTexture = NO;
}
}
}
- (void)setInputRotation:(GPUImageRotationMode)newInputRotation atIndex:(NSInteger)textureIndex;
{
if (textureIndex == 0)
{
inputRotation = newInputRotation;
}
else if (textureIndex == 1)
{
inputRotation2 = newInputRotation;
}
else if (textureIndex == 2)
{
inputRotation3 = newInputRotation;
}
else
{
inputRotation4 = newInputRotation;
}
}
- (CGSize)rotatedSize:(CGSize)sizeToRotate forIndex:(NSInteger)textureIndex;
{
CGSize rotatedSize = sizeToRotate;
GPUImageRotationMode rotationToCheck;
if (textureIndex == 0)
{
rotationToCheck = inputRotation;
}
else if (textureIndex == 1)
{
rotationToCheck = inputRotation2;
}
else if (textureIndex == 2)
{
rotationToCheck = inputRotation3;
}
else
{
rotationToCheck = inputRotation4;
}
if (GPUImageRotationSwapsWidthAndHeight(rotationToCheck))
{
rotatedSize.width = sizeToRotate.height;
rotatedSize.height = sizeToRotate.width;
}
return rotatedSize;
}
- (void)newFrameReadyAtTime:(CMTime)frameTime atIndex:(NSInteger)textureIndex;
{
// You can set up infinite update loops, so this helps to short circuit them
if (hasReceivedFirstFrame && hasReceivedSecondFrame && hasReceivedThirdFrame)
{
return;
}
BOOL updatedMovieFrameOppositeStillImage = NO;
if (textureIndex == 0)
{
hasReceivedFirstFrame = YES;
firstFrameTime = frameTime;
if (secondFrameCheckDisabled)
{
hasReceivedSecondFrame = YES;
}
if (thirdFrameCheckDisabled)
{
hasReceivedThirdFrame = YES;
}
if (fourthFrameCheckDisabled)
{
hasReceivedThirdFrame = YES;
}
if (!CMTIME_IS_INDEFINITE(frameTime))
{
if CMTIME_IS_INDEFINITE(secondFrameTime)
{
updatedMovieFrameOppositeStillImage = YES;
}
}
}
else if (textureIndex == 1)
{
hasReceivedSecondFrame = YES;
secondFrameTime = frameTime;
if (firstFrameCheckDisabled)
{
hasReceivedFirstFrame = YES;
}
if (thirdFrameCheckDisabled)
{
hasReceivedThirdFrame = YES;
}
if (fourthFrameCheckDisabled)
{
hasReceivedFourthFrame = YES;
}
if (!CMTIME_IS_INDEFINITE(frameTime))
{
if CMTIME_IS_INDEFINITE(firstFrameTime)
{
updatedMovieFrameOppositeStillImage = YES;
}
}
}
else if (textureIndex == 2)
{
hasReceivedThirdFrame = YES;
thirdFrameTime = frameTime;
if (firstFrameCheckDisabled)
{
hasReceivedFirstFrame = YES;
}
if (secondFrameCheckDisabled)
{
hasReceivedSecondFrame = YES;
}
if (fourthFrameCheckDisabled)
{
hasReceivedFourthFrame = YES;
}
if (!CMTIME_IS_INDEFINITE(frameTime))
{
if CMTIME_IS_INDEFINITE(firstFrameTime)
{
updatedMovieFrameOppositeStillImage = YES;
}
}
}
else
{
hasReceivedFourthFrame = YES;
fourthFrameTime = frameTime;
if (firstFrameCheckDisabled)
{
hasReceivedFirstFrame = YES;
}
if (secondFrameCheckDisabled)
{
hasReceivedSecondFrame = YES;
}
if (thirdFrameCheckDisabled)
{
hasReceivedThirdFrame = YES;
}
if (!CMTIME_IS_INDEFINITE(frameTime))
{
if CMTIME_IS_INDEFINITE(firstFrameTime)
{
updatedMovieFrameOppositeStillImage = YES;
}
}
}
// || (hasReceivedFirstFrame && secondFrameCheckDisabled) || (hasReceivedSecondFrame && firstFrameCheckDisabled)
if ((hasReceivedFirstFrame && hasReceivedSecondFrame && hasReceivedThirdFrame && hasReceivedFourthFrame) || updatedMovieFrameOppositeStillImage)
{
static const GLfloat imageVertices[] = {
-1.0f, -1.0f,
1.0f, -1.0f,
-1.0f, 1.0f,
1.0f, 1.0f,
};
[self renderToTextureWithVertices:imageVertices textureCoordinates:[[self class] textureCoordinatesForRotation:inputRotation]];
[self informTargetsAboutNewFrameAtTime:frameTime];
hasReceivedFirstFrame = NO;
hasReceivedSecondFrame = NO;
hasReceivedThirdFrame = NO;
hasReceivedFourthFrame = NO;
}
}
@end
================================================
FILE: GPURenderKit/GPURenderKit/GPUImage/BaseClass/GPUImageFramebuffer.h
================================================
#import <Foundation/Foundation.h>
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
#import <OpenGLES/EAGL.h>
#import <OpenGLES/ES2/gl.h>
#import <OpenGLES/ES2/glext.h>
#else
#import <OpenGL/OpenGL.h>
#import <OpenGL/gl.h>
#endif
#import <QuartzCore/QuartzCore.h>
#import <CoreMedia/CoreMedia.h>
typedef struct GPUTextureOptions {
GLenum minFilter;
GLenum magFilter;
GLenum wrapS;
GLenum wrapT;
GLenum internalFormat;
GLenum format;
GLenum type;
} GPUTextureOptions;
@interface GPUImageFramebuffer : NSObject
@property(readonly) CGSize size;
@property(readonly) GPUTextureOptions textureOptions;
@property(readonly) GLuint texture;
@property(readonly) BOOL missingFramebuffer;
// Initialization and teardown
- (id)initWithSize:(CGSize)framebufferSize;
- (id)initWithSize:(CGSize)framebufferSize textureOptions:(GPUTextureOptions)fboTextureOptions onlyTexture:(BOOL)onlyGenerateTexture;
- (id)initWithSize:(CGSize)framebufferSize overriddenTexture:(GLuint)inputTexture;
// Usage
- (void)activateFramebuffer;
// Reference counting
- (void)lock;
- (void)unlock;
- (void)clearAllLocks;
- (void)disableReferenceCounting;
- (void)enableReferenceCounting;
// Image capture
- (CGImageRef)newCGImageFromFramebufferContents;
- (void)restoreRenderTarget;
// Raw data bytes
- (void)lockForReading;
- (void)unlockAfterReading;
- (NSUInteger)bytesPerRow;
- (GLubyte *)byteBuffer;
- (CVPixelBufferRef)pixelBuffer;
@end
================================================
FILE: GPURenderKit/GPURenderKit/GPUImage/BaseClass/GPUImageFramebuffer.m
================================================
#import "GPUImageFramebuffer.h"
#import "GPUImageOutput.h"
@interface GPUImageFramebuffer()
{
GLuint framebuffer;
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
CVPixelBufferRef renderTarget;
CVOpenGLESTextureRef renderTexture;
NSUInteger readLockCount;
#else
#endif
NSUInteger framebufferReferenceCount;
BOOL referenceCountingDisabled;
}
- (void)generateFramebuffer;
- (void)generateTexture;
- (void)destroyFramebuffer;
@end
void dataProviderReleaseCallback (void *info, const void *data, size_t size);
void dataProviderUnlockCallback (void *info, const void *data, size_t size);
@implementation GPUImageFramebuffer
@synthesize size = _size;
@synthesize textureOptions = _textureOptions;
@synthesize texture = _texture;
@synthesize missingFramebuffer = _missingFramebuffer;
#pragma mark -
#pragma mark Initialization and teardown
- (id)initWithSize:(CGSize)framebufferSize textureOptions:(GPUTextureOptions)fboTextureOptions onlyTexture:(BOOL)onlyGenerateTexture;
{
if (!(self = [super init]))
{
return nil;
}
_textureOptions = fboTextureOptions;
_size = framebufferSize;
framebufferReferenceCount = 0;
referenceCountingDisabled = NO;
_missingFramebuffer = onlyGenerateTexture;
if (_missingFramebuffer)
{
runSynchronouslyOnVideoProcessingQueue(^{
[GPUImageContext useImageProcessingContext];
[self generateTexture];
framebuffer = 0;
});
}
else
{
[self generateFramebuffer];
}
return self;
}
- (id)initWithSize:(CGSize)framebufferSize overriddenTexture:(GLuint)inputTexture;
{
if (!(self = [super init]))
{
return nil;
}
GPUTextureOptions defaultTextureOptions;
defaultTextureOptions.minFilter = GL_LINEAR;
defaultTextureOptions.magFilter = GL_LINEAR;
defaultTextureOptions.wrapS = GL_CLAMP_TO_EDGE;
defaultTextureOptions.wrapT = GL_CLAMP_TO_EDGE;
defaultTextureOptions.internalFormat = GL_RGBA;
defaultTextureOptions.format = GL_BGRA;
defaultTextureOptions.type = GL_UNSIGNED_BYTE;
_textureOptions = defaultTextureOptions;
_size = framebufferSize;
framebufferReferenceCount = 0;
referenceCountingDisabled = YES;
_texture = inputTexture;
return self;
}
- (id)initWithSize:(CGSize)framebufferSize;
{
GPUTextureOptions defaultTextureOptions;
defaultTextureOptions.minFilter = GL_LINEAR;
defaultTextureOptions.magFilter = GL_LINEAR;
defaultTextureOptions.wrapS = GL_CLAMP_TO_EDGE;
defaultTextureOptions.wrapT = GL_CLAMP_TO_EDGE;
defaultTextureOptions.internalFormat = GL_RGBA;
defaultTextureOptions.format = GL_BGRA;
defaultTextureOptions.type = GL_UNSIGNED_BYTE;
if (!(self = [self initWithSize:framebufferSize textureOptions:defaultTextureOptions onlyTexture:NO]))
{
return nil;
}
return self;
}
- (void)dealloc
{
[self destroyFramebuffer];
}
#pragma mark -
#pragma mark Internal
- (void)generateTexture;
{
glActiveTexture(GL_TEXTURE1);
glGenTextures(1, &_texture);
glBindTexture(GL_TEXTURE_2D, _texture);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, _textureOptions.minFilter);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, _textureOptions.magFilter);
// This is necessary for non-power-of-two textures
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, _textureOptions.wrapS);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, _textureOptions.wrapT);
// TODO: Handle mipmaps
}
- (void)generateFramebuffer;
{
runSynchronouslyOnVideoProcessingQueue(^{
[GPUImageContext useImageProcessingContext];
glGenFramebuffers(1, &framebuffer);
glBindFramebuffer(GL_FRAMEBUFFER, framebuffer);
// By default, all framebuffers on iOS 5.0+ devices are backed by texture caches, using one shared cache
if ([GPUImageContext supportsFastTextureUpload])
{
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
CVOpenGLESTextureCacheRef coreVideoTextureCache = [[GPUImageContext sharedImageProcessingContext] coreVideoTextureCache];
// Code originally sourced from http://allmybrain.com/2011/12/08/rendering-to-a-texture-with-ios-5-texture-cache-api/
CFDictionaryRef empty; // empty value for attr value.
CFMutableDictionaryRef attrs;
empty = CFDictionaryCreate(kCFAllocatorDefault, NULL, NULL, 0, &kCFTypeDictionaryKeyCallBacks, &kCFTypeDictionaryValueCallBacks); // our empty IOSurface properties dictionary
attrs = CFDictionaryCreateMutable(kCFAllocatorDefault, 1, &kCFTypeDictionaryKeyCallBacks, &kCFTypeDictionaryValueCallBacks);
CFDictionarySetValue(attrs, kCVPixelBufferIOSurfacePropertiesKey, empty);
CVReturn err = CVPixelBufferCreate(kCFAllocatorDefault, (int)_size.width, (int)_size.height, kCVPixelFormatType_32BGRA, attrs, &renderTarget);
if (err)
{
NSLog(@"FBO size: %f, %f", _size.width, _size.height);
NSAssert(NO, @"Error at CVPixelBufferCreate %d", err);
}
err = CVOpenGLESTextureCacheCreateTextureFromImage (kCFAllocatorDefault, coreVideoTextureCache, renderTarget,
NULL, // texture attributes
GL_TEXTURE_2D,
_textureOptions.internalFormat, // opengl format
(int)_size.width,
(int)_size.height,
_textureOptions.format, // native iOS format
_textureOptions.type,
0,
&renderTexture);
if (err)
{
NSAssert(NO, @"Error at CVOpenGLESTextureCacheCreateTextureFromImage %d", err);
}
CFRelease(attrs);
CFRelease(empty);
glBindTexture(CVOpenGLESTextureGetTarget(renderTexture), CVOpenGLESTextureGetName(renderTexture));
_texture = CVOpenGLESTextureGetName(renderTexture);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, _textureOptions.wrapS);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, _textureOptions.wrapT);
glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, CVOpenGLESTextureGetName(renderTexture), 0);
#endif
}
else
{
[self generateTexture];
glBindTexture(GL_TEXTURE_2D, _texture);
glTexImage2D(GL_TEXTURE_2D, 0, _textureOptions.internalFormat, (int)_size.width, (int)_size.height, 0, _textureOptions.format, _textureOptions.type, 0);
glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, _texture, 0);
}
#ifndef NS_BLOCK_ASSERTIONS
GLenum status = glCheckFramebufferStatus(GL_FRAMEBUFFER);
NSAssert(status == GL_FRAMEBUFFER_COMPLETE, @"Incomplete filter FBO: %d", status);
#endif
glBindTexture(GL_TEXTURE_2D, 0);
});
}
- (void)destroyFramebuffer;
{
runSynchronouslyOnVideoProcessingQueue(^{
[GPUImageContext useImageProcessingContext];
if (framebuffer)
{
glDeleteFramebuffers(1, &framebuffer);
framebuffer = 0;
}
if ([GPUImageContext supportsFastTextureUpload] && (!_missingFramebuffer))
{
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
if (renderTarget)
{
CFRelease(renderTarget);
renderTarget = NULL;
}
if (renderTexture)
{
CFRelease(renderTexture);
renderTexture = NULL;
}
#endif
}
else
{
glDeleteTextures(1, &_texture);
}
});
}
#pragma mark -
#pragma mark Usage
- (void)activateFramebuffer;
{
glBindFramebuffer(GL_FRAMEBUFFER, framebuffer);
glViewport(0, 0, (int)_size.width, (int)_size.height);
}
#pragma mark -
#pragma mark Reference counting
- (void)lock;
{
if (referenceCountingDisabled)
{
return;
}
framebufferReferenceCount++;
}
- (void)unlock;
{
if (referenceCountingDisabled)
{
return;
}
if (framebufferReferenceCount == 0) {
NSLog(@"这里修改了源码----");
return;
}
NSAssert(framebufferReferenceCount > 0, @"Tried to overrelease a framebuffer, did you forget to call -useNextFrameForImageCapture before using -imageFromCurrentFramebuffer?");
framebufferReferenceCount--;
if (framebufferReferenceCount < 1)
{
[[GPUImageContext sharedFramebufferCache] returnFramebufferToCache:self];
}
}
- (void)clearAllLocks;
{
framebufferReferenceCount = 0;
}
- (void)disableReferenceCounting;
{
referenceCountingDisabled = YES;
}
- (void)enableReferenceCounting;
{
referenceCountingDisabled = NO;
}
#pragma mark -
#pragma mark Image capture
void dataProviderReleaseCallback (void *info, const void *data, size_t size)
{
free((void *)data);
}
void dataProviderUnlockCallback (void *info, const void *data, size_t size)
{
GPUImageFramebuffer *framebuffer = (__bridge_transfer GPUImageFramebuffer*)info;
[framebuffer restoreRenderTarget];
[framebuffer unlock];
[[GPUImageContext sharedFramebufferCache] removeFramebufferFromActiveImageCaptureList:framebuffer];
}
- (CGImageRef)newCGImageFromFramebufferContents;
{
// a CGImage can only be created from a 'normal' color texture
NSAssert(self.textureOptions.internalFormat == GL_RGBA, @"For conversion to a CGImage the output texture format for this filter must be GL_RGBA.");
NSAssert(self.textureOptions.type == GL_UNSIGNED_BYTE, @"For conversion to a CGImage the type of the output texture of this filter must be GL_UNSIGNED_BYTE.");
__block CGImageRef cgImageFromBytes;
runSynchronouslyOnVideoProcessingQueue(^{
[GPUImageContext useImageProcessingContext];
NSUInteger totalBytesForImage = (int)_size.width * (int)_size.height * 4;
// It appears that the width of a texture must be padded out to be a multiple of 8 (32 bytes) if reading from it using a texture cache
GLubyte *rawImagePixels;
CGDataProviderRef dataProvider = NULL;
if ([GPUImageContext supportsFastTextureUpload])
{
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
NSUInteger paddedWidthOfImage = CVPixelBufferGetBytesPerRow(renderTarget) / 4.0;
NSUInteger paddedBytesForImage = paddedWidthOfImage * (int)_size.height * 4;
glFinish();
CFRetain(renderTarget); // I need to retain the pixel buffer here and release in the data source callback to prevent its bytes from being prematurely deallocated during a photo write operation
[self lockForReading];
rawImagePixels = (GLubyte *)CVPixelBufferGetBaseAddress(renderTarget);
dataProvider = CGDataProviderCreateWithData((__bridge_retained void*)self, rawImagePixels, paddedBytesForImage, dataProviderUnlockCallback);
[[GPUImageContext sharedFramebufferCache] addFramebufferToActiveImageCaptureList:self]; // In case the framebuffer is swapped out on the filter, need to have a strong reference to it somewhere for it to hang on while the image is in existence
#else
#endif
}
else
{
[self activateFramebuffer];
rawImagePixels = (GLubyte *)malloc(totalBytesForImage);
glReadPixels(0, 0, (int)_size.width, (int)_size.height, GL_RGBA, GL_UNSIGNED_BYTE, rawImagePixels);
dataProvider = CGDataProviderCreateWithData(NULL, rawImagePixels, totalBytesForImage, dataProviderReleaseCallback);
[self unlock]; // Don't need to keep this around anymore
}
CGColorSpaceRef defaultRGBColorSpace = CGColorSpaceCreateDeviceRGB();
if ([GPUImageContext supportsFastTextureUpload])
{
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
cgImageFromBytes = CGImageCreate((int)_size.width, (int)_size.height, 8, 32, CVPixelBufferGetBytesPerRow(renderTarget), defaultRGBColorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst, dataProvider, NULL, NO, kCGRenderingIntentDefault);
#else
#endif
}
else
{
cgImageFromBytes = CGImageCreate((int)_size.width, (int)_size.height, 8, 32, 4 * (int)_size.width, defaultRGBColorSpace, kCGBitmapByteOrderDefault | kCGImageAlphaLast, dataProvider, NULL, NO, kCGRenderingIntentDefault);
}
// Capture image with current device orientation
CGDataProviderRelease(dataProvider);
CGColorSpaceRelease(defaultRGBColorSpace);
});
return cgImageFromBytes;
}
- (void)restoreRenderTarget;
{
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
[self unlockAfterReading];
CFRelease(renderTarget);
#else
#endif
}
#pragma mark -
#pragma mark Raw data bytes
- (void)lockForReading
{
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
if ([GPUImageContext supportsFastTextureUpload])
{
if (readLockCount == 0)
{
CVPixelBufferLockBaseAddress(renderTarget, 0);
}
readLockCount++;
}
#endif
}
- (void)unlockAfterReading
{
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
if ([GPUImageContext supportsFastTextureUpload])
{
NSAssert(readLockCount > 0, @"Unbalanced call to -[GPUImageFramebuffer unlockAfterReading]");
readLockCount--;
if (readLockCount == 0)
{
CVPixelBufferUnlockBaseAddress(renderTarget, 0);
}
}
#endif
}
- (NSUInteger)bytesPerRow;
{
if ([GPUImageContext supportsFastTextureUpload])
{
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
return CVPixelBufferGetBytesPerRow(renderTarget);
#else
return _size.width * 4; // TODO: do more with this on the non-texture-cache side
#endif
}
else
{
return _size.width * 4;
}
}
- (GLubyte *)byteBuffer;
{
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
[self lockForReading];
GLubyte * bufferBytes = CVPixelBufferGetBaseAddress(renderTarget);
[self unlockAfterReading];
return bufferBytes;
#else
return NULL; // TODO: do more with this on the non-texture-cache side
#endif
}
- (CVPixelBufferRef )pixelBuffer;
{
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
return renderTarget;
#else
return NULL; // TODO: do more with this on the non-texture-cache side
#endif
}
- (GLuint)texture;
{
// NSLog(@"Accessing texture: %d from FB: %@", _texture, self);
return _texture;
}
@end
================================================
FILE: GPURenderKit/GPURenderKit/GPUImage/BaseClass/GPUImageFramebufferCache.h
================================================
#import <Foundation/Foundation.h>
#import <QuartzCore/QuartzCore.h>
#import "GPUImageFramebuffer.h"
@interface GPUImageFramebufferCache : NSObject
// Framebuffer management
- (GPUImageFramebuffer *)fetchFramebufferForSize:(CGSize)framebufferSize textureOptions:(GPUTextureOptions)textureOptions onlyTexture:(BOOL)onlyTexture;
- (GPUImageFramebuffer *)fetchFramebufferForSize:(CGSize)framebufferSize onlyTexture:(BOOL)onlyTexture;
- (void)returnFramebufferToCache:(GPUImageFramebuffer *)framebuffer;
- (void)purgeAllUnassignedFramebuffers;
- (void)addFramebufferToActiveImageCaptureList:(GPUImageFramebuffer *)framebuffer;
- (void)removeFramebufferFromActiveImageCaptureList:(GPUImageFramebuffer *)framebuffer;
@end
================================================
FILE: GPURenderKit/GPURenderKit/GPUImage/BaseClass/GPUImageFramebufferCache.m
================================================
#import "GPUImageFramebufferCache.h"
#import "GPUImageContext.h"
#import "GPUImageOutput.h"
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
#import <UIKit/UIKit.h>
#else
#endif
@interface GPUImageFramebufferCache()
{
// NSCache *framebufferCache;
NSMutableDictionary *framebufferCache;
NSMutableDictionary *framebufferTypeCounts;
NSMutableArray *activeImageCaptureList; // Where framebuffers that may be lost by a filter, but which are still needed for a UIImage, etc., are stored
id memoryWarningObserver;
dispatch_queue_t framebufferCacheQueue;
}
- (NSString *)hashForSize:(CGSize)size textureOptions:(GPUTextureOptions)textureOptions onlyTexture:(BOOL)onlyTexture;
@end
@implementation GPUImageFramebufferCache
#pragma mark -
#pragma mark Initialization and teardown
- (id)init;
{
if (!(self = [super init]))
{
return nil;
}
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
__unsafe_unretained __typeof__ (self) weakSelf = self;
memoryWarningObserver = [[NSNotificationCenter defaultCenter] addObserverForName:UIApplicationDidReceiveMemoryWarningNotification object:nil queue:nil usingBlock:^(NSNotification *note) {
__typeof__ (self) strongSelf = weakSelf;
if (strongSelf) {
[strongSelf purgeAllUnassignedFramebuffers];
}
}];
#else
#endif
// framebufferCache = [[NSCache alloc] init];
framebufferCache = [[NSMutableDictionary alloc] init];
framebufferTypeCounts = [[NSMutableDictionary alloc] init];
activeImageCaptureList = [[NSMutableArray alloc] init];
framebufferCacheQueue = dispatch_queue_create("com.sunsetlakesoftware.GPUImage.framebufferCacheQueue", GPUImageDefaultQueueAttribute());
return self;
}
- (void)dealloc;
{
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
[[NSNotificationCenter defaultCenter] removeObserver:self];
#else
#endif
}
#pragma mark -
#pragma mark Framebuffer management
- (NSString *)hashForSize:(CGSize)size textureOptions:(GPUTextureOptions)textureOptions onlyTexture:(BOOL)onlyTexture;
{
if (onlyTexture)
{
return [NSString stringWithFormat:@"%.1fx%.1f-%d:%d:%d:%d:%d:%d:%d-NOFB", size.width, size.height, textureOptions.minFilter, textureOptions.magFilter, textureOptions.wrapS, textureOptions.wrapT, textureOptions.internalFormat, textureOptions.format, textureOptions.type];
}
else
{
return [NSString stringWithFormat:@"%.1fx%.1f-%d:%d:%d:%d:%d:%d:%d", size.width, size.height, textureOptions.minFilter, textureOptions.magFilter, textureOptions.wrapS, textureOptions.wrapT, textureOptions.internalFormat, textureOptions.format, textureOptions.type];
}
}
- (GPUImageFramebuffer *)fetchFramebufferForSize:(CGSize)framebufferSize textureOptions:(GPUTextureOptions)textureOptions onlyTexture:(BOOL)onlyTexture;
{
__block GPUImageFramebuffer *framebufferFromCache = nil;
// dispatch_sync(framebufferCacheQueue, ^{
runSynchronouslyOnVideoProcessingQueue(^{
NSString *lookupHash = [self hashForSize:framebufferSize textureOptions:textureOptions onlyTexture:onlyTexture];
NSNumber *numberOfMatchingTexturesInCache = [framebufferTypeCounts objectForKey:lookupHash];
NSInteger numberOfMatchingTextures = [numberOfMatchingTexturesInCache integerValue];
if ([numberOfMatchingTexturesInCache integerValue] < 1)
{
// Nothing in the cache, create a new framebuffer to use
framebufferFromCache = [[GPUImageFramebuffer alloc] initWithSize:framebufferSize textureOptions:textureOptions onlyTexture:onlyTexture];
}
else
{
// Something found, pull the old framebuffer and decrement the count
NSInteger currentTextureID = (numberOfMatchingTextures - 1);
while ((framebufferFromCache == nil) && (currentTextureID >= 0))
{
NSString *textureHash = [NSString stringWithFormat:@"%@-%ld", lookupHash, (long)currentTextureID];
framebufferFromCache = [framebufferCache objectForKey:textureHash];
// Test the values in the cache first, to see if they got invalidated behind our back
if (framebufferFromCache != nil)
{
// Withdraw this from the cache while it's in use
[framebufferCache removeObjectForKey:textureHash];
}
currentTextureID--;
}
currentTextureID++;
[framebufferTypeCounts setObject:[NSNumber numberWithInteger:currentTextureID] forKey:lookupHash];
if (framebufferFromCache == nil)
{
framebufferFromCache = [[GPUImageFramebuffer alloc] initWithSize:framebufferSize textureOptions:textureOptions onlyTexture:onlyTexture];
}
}
});
[framebufferFromCache lock];
return framebufferFromCache;
}
- (GPUImageFramebuffer *)fetchFramebufferForSize:(CGSize)framebufferSize onlyTexture:(BOOL)onlyTexture;
{
GPUTextureOptions defaultTextureOptions;
defaultTextureOptions.minFilter = GL_LINEAR;
defaultTextureOptions.magFilter = GL_LINEAR;
defaultTextureOptions.wrapS = GL_CLAMP_TO_EDGE;
defaultTextureOptions.wrapT = GL_CLAMP_TO_EDGE;
defaultTextureOptions.internalFormat = GL_RGBA;
defaultTextureOptions.format = GL_BGRA;
defaultTextureOptions.type = GL_UNSIGNED_BYTE;
return [self fetchFramebufferForSize:framebufferSize textureOptions:defaultTextureOptions onlyTexture:onlyTexture];
}
- (void)returnFramebufferToCache:(GPUImageFramebuffer *)framebuffer;
{
[framebuffer clearAllLocks];
// dispatch_async(framebufferCacheQueue, ^{
runAsynchronouslyOnVideoProcessingQueue(^{
CGSize framebufferSize = framebuffer.size;
GPUTextureOptions framebufferTextureOptions = framebuffer.textureOptions;
NSString *lookupHash = [self hashForSize:framebufferSize textureOptions:framebufferTextureOptions onlyTexture:framebuffer.missingFramebuffer];
NSNumber *numberOfMatchingTexturesInCache = [framebufferTypeCounts objectForKey:lookupHash];
NSInteger numberOfMatchingTextures = [numberOfMatchingTexturesInCache integerValue];
NSString *textureHash = [NSString stringWithFormat:@"%@-%ld", lookupHash, (long)numberOfMatchingTextures];
// [framebufferCache setObject:framebuffer forKey:textureHash cost:round(framebufferSize.width * framebufferSize.height * 4.0)];
[framebufferCache setObject:framebuffer forKey:textureHash];
[framebufferTypeCounts setObject:[NSNumber numberWithInteger:(numberOfMatchingTextures + 1)] forKey:lookupHash];
});
}
- (void)purgeAllUnassignedFramebuffers;
{
runAsynchronouslyOnVideoProcessingQueue(^{
// dispatch_async(framebufferCacheQueue, ^{
[framebufferCache removeAllObjects];
[framebufferTypeCounts removeAllObjects];
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
CVOpenGLESTextureCacheFlush([[GPUImageContext sharedImageProcessingContext] coreVideoTextureCache], 0);
#else
#endif
});
}
- (void)addFramebufferToActiveImageCaptureList:(GPUImageFramebuffer *)framebuffer;
{
runAsynchronouslyOnVideoProcessingQueue(^{
// dispatch_async(framebufferCacheQueue, ^{
[activeImageCaptureList addObject:framebuffer];
});
}
- (void)removeFramebufferFromActiveImageCaptureList:(GPUImageFramebuffer *)framebuffer;
{
runAsynchronouslyOnVideoProcessingQueue(^{
// dispatch_async(framebufferCacheQueue, ^{
[activeImageCaptureList removeObject:framebuffer];
});
}
@end
================================================
FILE: GPURenderKit/GPURenderKit/GPUImage/BaseClass/GPUImageMovie.h
================================================
#import <Foundation/Foundation.h>
#import <AVFoundation/AVFoundation.h>
#import "GPUImageContext.h"
#import "GPUImageOutput.h"
/** Protocol for getting Movie played callback.
*/
@protocol GPUImageMovieDelegate <NSObject>
@optional
- (void)didCompletePlayingMovie;
- (void)willDisplayAtTime:(CMTime)time;
@end
/** Source object for filtering movies
*/
@interface GPUImageMovie : GPUImageOutput
@property (readwrite, retain) AVAsset *asset;
@property (readwrite, retain) AVPlayerItem *playerItem;
@property(readwrite, retain) NSURL *url;
/** This enables the benchmarking mode, which logs out instantaneous and average frame times to the console
*/
@property(readwrite, nonatomic) BOOL runBenchmark;
/** This determines whether to play back a movie as fast as the frames can be processed, or if the original speed of the movie should be respected. Defaults to NO.
*/
@property(readwrite, nonatomic) BOOL playAtActualSpeed;
/** This determines whether the video should repeat (loop) at the end and restart from the beginning. Defaults to NO.
*/
@property(readwrite, nonatomic) BOOL shouldRepeat;
/** This specifies the progress of the process on a scale from 0 to 1.0. A value of 0 means the process has not yet begun, A value of 1.0 means the conversaion is complete.
This property is not key-value observable.
*/
@property(readonly, nonatomic) float progress;
/** This is used to send the delete Movie did complete playing alert
*/
@property (readwrite, nonatomic, assign) id <GPUImageMovieDelegate>delegate;
@property (readonly, nonatomic) AVAssetReader *assetReader;
@property (readonly, nonatomic) BOOL audioEncodingIsFinished;
@property (readonly, nonatomic) BOOL videoEncodingIsFinished;
@property (readwrite, nonatomic) BOOL isPaused;
/// @name Initialization and teardown
- (id)initWithAsset:(AVAsset *)asset;
- (id)initWithPlayerItem:(AVPlayerItem *)playerItem;
- (id)initWithURL:(NSURL *)url;
- (void)yuvConversionSetup;
/// @name Movie processing
- (void)enableSynchronizedEncodingUsingMovieWriter:(GPUImageMovieWriter *)movieWriter;
- (BOOL)readNextVideoFrameFromOutput:(AVAssetReaderOutput *)readerVideoTrackOutput;
- (BOOL)readNextAudioSampleFromOutput:(AVAssetReaderOutput *)readerAudioTrackOutput;
- (void)startProcessing;
- (void)endProcessing;
- (void)cancelProcessing;
- (void)processMovieFrame:(CMSampleBufferRef)movieSampleBuffer;
@end
================================================
FILE: GPURenderKit/GPURenderKit/GPUImage/BaseClass/GPUImageMovie.m
================================================
#import "GPUImageMovie.h"
#import "GPUImageMovieWriter.h"
#import "GPUImageFilter.h"
#import "GPUImageVideoCamera.h"
@interface GPUImageMovie () <AVPlayerItemOutputPullDelegate>
{
BOOL audioEncodingIsFinished, videoEncodingIsFinished;
GPUImageMovieWriter *synchronizedMovieWriter;
AVAssetReader *reader;
AVPlayerItemVideoOutput *playerItemOutput;
CADisplayLink *displayLink;
CMTime previousFrameTime, processingFrameTime;
CFAbsoluteTime previousActualFrameTime;
BOOL keepLooping;
GLuint luminanceTexture, chrominanceTexture;
GLProgram *yuvConversionProgram;
GLint yuvConversionPositionAttribute, yuvConversionTextureCoordinateAttribute;
GLint yuvConversionLuminanceTextureUniform, yuvConversionChrominanceTextureUniform;
GLint yuvConversionMatrixUniform;
const GLfloat *_preferredConversion;
BOOL isFullYUVRange;
int imageBufferWidth, imageBufferHeight;
//解决播放movie有延时的bug
CGFloat delayoOffsetTime;
}
- (void)processAsset;
@end
@implementation GPUImageMovie
@synthesize url = _url;
@synthesize asset = _asset;
@synthesize runBenchmark = _runBenchmark;
@synthesize playAtActualSpeed = _playAtActualSpeed;
@synthesize delegate = _delegate;
@synthesize shouldRepeat = _shouldRepeat;
#pragma mark -
#pragma mark Initialization and teardown
- (id)initWithURL:(NSURL *)url;
{
if (!(self = [super init]))
{
return nil;
}
[self yuvConversionSetup];
self.url = url;
self.asset = nil;
return self;
}
- (id)initWithAsset:(AVAsset *)asset;
{
if (!(self = [super init]))
{
return nil;
}
[self yuvConversionSetup];
self.url = nil;
self.asset = asset;
return self;
}
- (id)initWithPlayerItem:(AVPlayerItem *)playerItem;
{
if (!(self = [super init]))
{
return nil;
}
[self yuvConversionSetup];
self.url = nil;
self.asset = nil;
self.playerItem = playerItem;
return self;
}
- (void)yuvConversionSetup;
{
if ([GPUImageContext supportsFastTextureUpload])
{
runSynchronouslyOnVideoProcessingQueue(^{
[GPUImageContext useImageProcessingContext];
_preferredConversion = kColorConversion709;
isFullYUVRange = YES;
yuvConversionProgram = [[GPUImageContext sharedImageProcessingContext] programForVertexShaderString:kGPUImageVertexShaderString fragmentShaderString:kGPUImageYUVFullRangeConversionForLAFragmentShaderString];
if (!yuvConversionProgram.initialized)
{
[yuvConversionProgram addAttribute:@"position"];
[yuvConversionProgram addAttribute:@"inputTextureCoordinate"];
if (![yuvConversionProgram link])
{
NSString *progLog = [yuvConversionProgram programLog];
NSLog(@"Program link log: %@", progLog);
NSString *fragLog = [yuvConversionProgram fragmentShaderLog];
NSLog(@"Fragment shader compile log: %@", fragLog);
NSString *vertLog = [yuvConversionProgram vertexShaderLog];
NSLog(@"Vertex shader compile log: %@", vertLog);
yuvConversionProgram = nil;
NSAssert(NO, @"Filter shader link failed");
}
}
yuvConversionPositionAttribute = [yuvConversionProgram attributeIndex:@"position"];
yuvConversionTextureCoordinateAttribute = [yuvConversionProgram attributeIndex:@"inputTextureCoordinate"];
yuvConversionLuminanceTextureUniform = [yuvConversionProgram uniformIndex:@"luminanceTexture"];
yuvConversionChrominanceTextureUniform = [yuvConversionProgram uniformIndex:@"chrominanceTexture"];
yuvConversionMatrixUniform = [yuvConversionProgram uniformIndex:@"colorConversionMatrix"];
[GPUImageContext setActiveShaderProgram:yuvConversionProgram];
glEnableVertexAttribArray(yuvConversionPositionAttribute);
glEnableVertexAttribArray(yuvConversionTextureCoordinateAttribute);
});
}
}
- (void)dealloc
{
// Moved into endProcessing
//if (self.playerItem && (displayLink != nil))
//{
// [displayLink invalidate]; // remove from all run loops
// displayLink = nil;
//}
}
#pragma mark -
#pragma mark Movie processing
- (void)enableSynchronizedEncodingUsingMovieWriter:(GPUImageMovieWriter *)movieWriter;
{
synchronizedMovieWriter = movieWriter;
movieWriter.encodingLiveVideo = NO;
}
- (void)startProcessing
{
if( self.playerItem ) {
[self processPlayerItem];
return;
}
if(self.url == nil)
{
[self processAsset];
return;
}
if (_shouldRepeat) keepLooping = YES;
previousFrameTime = kCMTimeZero;
previousActualFrameTime = CFAbsoluteTimeGetCurrent();
NSDictionary *inputOptions = [NSDictionary dictionaryWithObject:[NSNumber numberWithBool:YES] forKey:AVURLAssetPreferPreciseDurationAndTimingKey];
AVURLAsset *inputAsset = [[AVURLAsset alloc] initWithURL:self.url options:inputOptions];
GPUImageMovie __block *blockSelf = self;
[inputAsset loadValuesAsynchronouslyForKeys:[NSArray arrayWithObject:@"tracks"] completionHandler: ^{
dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
NSError *error = nil;
AVKeyValueStatus tracksStatus = [inputAsset statusOfValueForKey:@"tracks" error:&error];
if (tracksStatus != AVKeyValueStatusLoaded)
{
return;
}
blockSelf.asset = inputAsset;
[blockSelf processAsset];
blockSelf = nil;
});
}];
}
- (AVAssetReader*)createAssetReader
{
NSError *error = nil;
AVAssetReader *assetReader = [AVAssetReader assetReaderWithAsset:self.asset error:&error];
NSMutableDictionary *outputSettings = [NSMutableDictionary dictionary];
if ([GPUImageContext supportsFastTextureUpload]) {
[outputSettings setObject:@(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) forKey:(id)kCVPixelBufferPixelFormatTypeKey];
isFullYUVRange = YES;
}
else {
[outputSettings setObject:@(kCVPixelFormatType_32BGRA) forKey:(id)kCVPixelBufferPixelFormatTypeKey];
isFullYUVRange = NO;
}
// Maybe set alwaysCopiesSampleData to NO on iOS 5.0 for faster video decoding
AVAssetReaderTrackOutput *readerVideoTrackOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:[[self.asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] outputSettings:outputSettings];
readerVideoTrackOutput.alwaysCopiesSampleData = NO;
[assetReader addOutput:readerVideoTrackOutput];
NSArray *audioTracks = [self.asset tracksWithMediaType:AVMediaTypeAudio];
BOOL shouldRecordAudioTrack = (([audioTracks count] > 0) && (self.audioEncodingTarget != nil) );
AVAssetReaderTrackOutput *readerAudioTrackOutput = nil;
if (shouldRecordAudioTrack)
{
[self.audioEncodingTarget setShouldInvalidateAudioSampleWhenDone:YES];
// This might need to be extended to handle movies with more than one audio track
AVAssetTrack* audioTrack = [audioTracks objectAtIndex:0];
readerAudioTrackOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:audioTrack outputSettings:nil];
readerAudioTrackOutput.alwaysCopiesSampleData = NO;
[assetReader addOutput:readerAudioTrackOutput];
}
return assetReader;
}
- (void)processAsset
{
reader = [self createAssetReader];
AVAssetReaderOutput *readerVideoTrackOutput = nil;
AVAssetReaderOutput *readerAudioTrackOutput = nil;
audioEncodingIsFinished = YES;
for( AVAssetReaderOutput *output in reader.outputs ) {
if( [output.mediaType isEqualToString:AVMediaTypeAudio] ) {
audioEncodingIsFinished = NO;
readerAudioTrackOutput = output;
}
else if( [output.mediaType isEqualToString:AVMediaTypeVideo] ) {
readerVideoTrackOutput = output;
}
}
if ([reader startReading] == NO)
{
NSLog(@"Error reading from file at URL: %@", self.url);
return;
}
__unsafe_unretained GPUImageMovie *weakSelf = self;
if (synchronizedMovieWriter != nil)
{
[synchronizedMovieWriter setVideoInputReadyCallback:^{
return [weakSelf readNextVideoFrameFromOutput:readerVideoTrackOutput];
}];
[synchronizedMovieWriter setAudioInputReadyCallback:^{
return [weakSelf readNextAudioSampleFromOutput:readerAudioTrackOutput];
}];
[synchronizedMovieWriter enableSynchronizationCallbacks];
}
else
{
while (reader.status == AVAssetReaderStatusReading && (!_shouldRepeat || keepLooping))
{
[weakSelf readNextVideoFrameFromOutput:readerVideoTrackOutput];
if ( (readerAudioTrackOutput) && (!audioEncodingIsFinished) )
{
[weakSelf readNextAudioSampleFromOutput:readerAudioTrackOutput];
}
}
if (reader.status == AVAssetReaderStatusCompleted) {
[reader cancelReading];
if (keepLooping) {
reader = nil;
dispatch_async(dispatch_get_main_queue(), ^{
[self startProcessing];
});
} else {
[weakSelf endProcessing];
}
}
}
}
- (void)processPlayerItem
{
runSynchronouslyOnVideoProcessingQueue(^{
displayLink = [CADisplayLink displayLinkWithTarget:self selector:@selector(displayLinkCallback:)];
[displayLink addToRunLoop:[NSRunLoop currentRunLoop] forMode:NSRunLoopCommonModes];
[displayLink setPaused:YES];
dispatch_queue_t videoProcessingQueue = [GPUImageContext sharedContextQueue];
NSMutableDictionary *pixBuffAttributes = [NSMutableDictionary dictionary];
if ([GPUImageContext supportsFastTextureUpload]) {
[pixBuffAttributes setObject:@(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) forKey:(id)kCVPixelBufferPixelFormatTypeKey];
}
else {
[pixBuffAttributes setObject:@(kCVPixelFormatType_32BGRA) forKey:(id)kCVPixelBufferPixelFormatTypeKey];
}
playerItemOutput = [[AVPlayerItemVideoOutput alloc] initWithPixelBufferAttributes:pixBuffAttributes];
[playerItemOutput setDelegate:self queue:videoProcessingQueue];
[_playerItem addOutput:playerItemOutput];
[playerItemOutput requestNotificationOfMediaDataChangeWithAdvanceInterval:0.1];
});
}
- (void)outputMediaDataWillChange:(AVPlayerItemOutput *)sender
{
// Restart display link.
[displayLink setPaused:NO];
}
- (void)displayLinkCallback:(CADisplayLink *)sender
{
/*
The callback gets called once every Vsync.
Using the display link's timestamp and duration we can compute the next time the screen will be refreshed, and copy the pixel buffer for that time
This pixel buffer can then be processed and later rendered on screen.
*/
// Calculate the nextVsync time which is when the screen will be refreshed next.
CFTimeInterval nextVSync = ([sender timestamp] + [sender duration]);
CMTime outputItemTime = [playerItemOutput itemTimeForHostTime:nextVSync];
if ([playerItemOutput hasNewPixelBufferForItemTime:outputItemTime]) {
__unsafe_unretained GPUImageMovie *weakSelf = self;
CVPixelBufferRef pixelBuffer = [playerItemOutput copyPixelBufferForItemTime:outputItemTime itemTimeForDisplay:NULL];
if( pixelBuffer )
runSynchronouslyOnVideoProcessingQueue(^{
[weakSelf processMovieFrame:pixelBuffer withSampleTime:outputItemTime];
CFRelease(pixelBuffer);
});
}
}
//解决播放movie有延时的bug,有修改源码
- (BOOL)readNextVideoFrameFromOutput:(AVAssetReaderOutput *)readerVideoTrackOutput;
{
while (self.isPaused) {
usleep(100000);
}
if (reader.status == AVAssetReaderStatusReading && ! videoEncodingIsFinished)
{
CMSampleBufferRef sampleBufferRef = [readerVideoTrackOutput copyNextSampleBuffer];
if (sampleBufferRef)
{
//NSLog(@"read a video frame: %@", CFBridgingRelease(CMTimeCopyDescription(kCFAllocatorDefault, CMSampleBufferGetOutputPresentationTimeStamp(sampleBufferRef))));
if (_playAtActualSpeed)
{
// Do this outside of the video processing queue to not slow that down while waiting
CMTime currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(sampleBufferRef);
CMTime differenceFromLastFrame = CMTimeSubtract(currentSampleTime, previousFrameTime);
if ( differenceFromLastFrame.value > 0 ) {
CFAbsoluteTime currentActualTime = CFAbsoluteTimeGetCurrent();
CGFloat frameTimeDifference = CMTimeGetSeconds(differenceFromLastFrame);
CGFloat actualTimeDifference = currentActualTime - previousActualFrameTime;
if (frameTimeDifference > actualTimeDifference ){
CGFloat difTime = (frameTimeDifference - actualTimeDifference) - delayoOffsetTime;
if(difTime > 0){
double time = 1000000.0 * difTime;
usleep(time);
}
delayoOffsetTime = CFAbsoluteTimeGetCurrent() - currentActualTime - difTime;
if (delayoOffsetTime < 0) {
delayoOffsetTime = 0;
}
}
previousFrameTime = currentSampleTime;
previousActualFrameTime = CFAbsoluteTimeGetCurrent();
}
}
__unsafe_unretained GPUImageMovie *weakSelf = self;
runSynchronouslyOnVideoProcessingQueue(^{
[weakSelf processMovieFrame:sampleBufferRef];
CMSampleBufferInvalidate(sampleBufferRef);
CFRelease(sampleBufferRef);
});
return YES;
}
else
{
if (!keepLooping) {
videoEncodingIsFinished = YES;
if( videoEncodingIsFinished && audioEncodingIsFinished )
[self endProcessing];
}
}
}
else if (synchronizedMovieWriter != nil)
{
if (reader.status == AVAssetReaderStatusCompleted)
{
[self endProcessing];
}
}
return NO;
}
- (BOOL)readNextAudioSampleFromOutput:(AVAssetReaderOutput *)readerAudioTrackOutput;
{
if (reader.status == AVAssetReaderStatusReading && ! audioEncodingIsFinished)
{
CMSampleBufferRef audioSampleBufferRef = [readerAudioTrackOutput copyNextSampleBuffer];
if (audioSampleBufferRef)
{
//NSLog(@"read an audio frame: %@", CFBridgingRelease(CMTimeCopyDescription(kCFAllocatorDefault, CMSampleBufferGetOutputPresentationTimeStamp(audioSampleBufferRef))));
[self.audioEncodingTarget processAudioBuffer:audioSampleBufferRef];
CFRelease(audioSampleBufferRef);
return YES;
}
else
{
if (!keepLooping) {
audioEncodingIsFinished = YES;
if( videoEncodingIsFinished && audioEncodingIsFinished )
[self endProcessing];
}
}
}
else if (synchronizedMovieWriter != nil)
{
if (reader.status == AVAssetReaderStatusCompleted || reader.status == AVAssetReaderStatusFailed ||
reader.status == AVAssetReaderStatusCancelled)
{
[self endProcessing];
}
}
return NO;
}
- (void)processMovieFrame:(CMSampleBufferRef)movieSampleBuffer;
{
// CMTimeGetSeconds
// CMTimeSubtract
CMTime currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(movieSampleBuffer);
CVImageBufferRef movieFrame = CMSampleBufferGetImageBuffer(movieSampleBuffer);
processingFrameTime = currentSampleTime;
[self processMovieFrame:movieFrame withSampleTime:currentSampleTime];
}
- (float)progress
{
if ( AVAssetReaderStatusReading == reader.status )
{
float current = processingFrameTime.value * 1.0f / processingFrameTime.timescale;
float duration = self.asset.duration.value * 1.0f / self.asset.duration.timescale;
return current / duration;
}
else if ( AVAssetReaderStatusCompleted == reader.status )
{
return 1.f;
}
else
{
return 0.f;
}
}
- (void)processMovieFrame:(CVPixelBufferRef)movieFrame withSampleTime:(CMTime)currentSampleTime
{
if (self.delegate && [self.delegate respondsToSelector:@selector(willDisplayAtTime:)])
{
[self.delegate willDisplayAtTime:currentSampleTime];
}
int bufferHeight = (int) CVPixelBufferGetHeight(movieFrame);
int bufferWidth = (int) CVPixelBufferGetWidth(movieFrame);
CFTypeRef colorAttachments = CVBufferGetAttachment(movieFrame, kCVImageBufferYCbCrMatrixKey, NULL);
if (colorAttachments != NULL)
{
if(CFStringCompare(colorAttachments, kCVImageBufferYCbCrMatrix_ITU_R_601_4, 0) == kCFCompareEqualTo)
{
if (isFullYUVRange)
{
_preferredConversion = kColorConversion601FullRange;
}
else
{
_preferredConversion = kColorConversion601;
}
}
else
{
_preferredConversion = kColorConversion709;
}
}
else
{
if (isFullYUVRange)
{
_preferredConversion = kColorConversion601FullRange;
}
else
{
_preferredConversion = kColorConversion601;
}
}
CFAbsoluteTime startTime = CFAbsoluteTimeGetCurrent();
// Fix issue 1580
[GPUImageContext useImageProcessingContext];
if ([GPUImageContext supportsFastTextureUpload])
{
CVOpenGLESTextureRef luminanceTextureRef = NULL;
CVOpenGLESTextureRef chrominanceTextureRef = NULL;
// if (captureAsYUV && [GPUImageContext deviceSupportsRedTextures])
if (CVPixelBufferGetPlaneCount(movieFrame) > 0) // Check for YUV planar inputs to do RGB conversion
{
if ( (imageBufferWidth != bufferWidth) && (imageBufferHeight != bufferHeight) )
{
imageBufferWidth = bufferWidth;
imageBufferHeight = bufferHeight;
}
CVReturn err;
// Y-plane
glActiveTexture(GL_TEXTURE4);
if ([GPUImageContext deviceSupportsRedTextures])
{
err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, [[GPUImageContext sharedImageProcessingContext] coreVideoTextureCache], movieFrame, NULL, GL_TEXTURE_2D, GL_LUMINANCE, bufferWidth, bufferHeight, GL_LUMINANCE, GL_UNSIGNED_BYTE, 0, &luminanceTextureRef);
}
else
{
err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, [[GPUImageContext sharedImageProcessingContext] coreVideoTextureCache], movieFrame, NULL, GL_TEXTURE_2D, GL_LUMINANCE, bufferWidth, bufferHeight, GL_LUMINANCE, GL_UNSIGNED_BYTE, 0, &luminanceTextureRef);
}
if (err)
{
NSLog(@"Error at CVOpenGLESTextureCacheCreateTextureFromImage %d", err);
}
luminanceTexture = CVOpenGLESTextureGetName(luminanceTextureRef);
glBindTexture(GL_TEXTURE_2D, luminanceTexture);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
// UV-plane
glActiveTexture(GL_TEXTURE5);
if ([GPUImageContext deviceSupportsRedTextures])
{
err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, [[GPUImageContext sharedImageProcessingContext] coreVideoTextureCache], movieFrame, NULL, GL_TEXTURE_2D, GL_LUMINANCE_ALPHA, bufferWidth/2, bufferHeight/2, GL_LUMINANCE_ALPHA, GL_UNSIGNED_BYTE, 1, &chrominanceTextureRef);
}
else
{
err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, [[GPUImageContext sharedImageProcessingContext] coreVideoTextureCache], movieFrame, NULL, GL_TEXTURE_2D, GL_LUMINANCE_ALPHA, bufferWidth/2, bufferHeight/2, GL_LUMINANCE_ALPHA, GL_UNSIGNED_BYTE, 1, &chrominanceTextureRef);
}
if (err)
{
NSLog(@"Error at CVOpenGLESTextureCacheCreateTextureFromImage %d", err);
}
chrominanceTexture = CVOpenGLESTextureGetName(chrominanceTextureRef);
glBindTexture(GL_TEXTURE_2D, chrominanceTexture);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
// if (!allTargetsWantMonochromeData)
// {
[self convertYUVToRGBOutput];
// }
for (id<GPUImageInput> currentTarget in targets)
{
NSInteger indexOfObject = [targets indexOfObject:currentTarget];
NSInteger targetTextureIndex = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];
[currentTarget setInputSize:CGSizeMake(bufferWidth, bufferHeight) atIndex:targetTextureIndex];
[currentTarget setInputFramebuffer:outputFramebuffer atIndex:targetTextureIndex];
}
[outputFramebuffer unlock];
for (id<GPUImageInput> currentTarget in targets)
{
NSInteger indexOfObject = [targets indexOfObject:currentTarget];
NSInteger targetTextureIndex = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];
[currentTarget newFrameReadyAtTime:currentSampleTime atIndex:targetTextureIndex];
}
CVPixelBufferUnlockBaseAddress(movieFrame, 0);
CFRelease(luminanceTextureRef);
CFRelease(chrominanceTextureRef);
}
else
{
// TODO: Mesh this with the new framebuffer cache
// CVPixelBufferLockBaseAddress(movieFrame, 0);
//
// CVReturn err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, coreVideoTextureCache, movieFrame, NULL, GL_TEXTURE_2D, GL_RGBA, bufferWidth, bufferHeight, GL_BGRA, GL_UNSIGNED_BYTE, 0, &texture);
//
// if (!texture || err) {
// NSLog(@"Movie CVOpenGLESTextureCacheCreateTextureFromImage failed (error: %d)", err);
// NSAssert(NO, @"Camera failure");
// return;
// }
//
// outputTexture = CVOpenGLESTextureGetName(texture);
// // glBindTexture(CVOpenGLESTextureGetTarget(texture), outputTexture);
// glBindTexture(GL_TEXTURE_2D, outputTexture);
// glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
// glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
// glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
// glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
//
// for (id<GPUImageInput> currentTarget in targets)
// {
// NSInteger indexOfObject = [targets indexOfObject:currentTarget];
// NSInteger targetTextureIndex = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];
//
// [currentTarget setInputSize:CGSizeMake(bufferWidth, bufferHeight) atIndex:targetTextureIndex];
// [currentTarget setInputTexture:outputTexture atIndex:targetTextureIndex];
//
// [currentTarget newFrameReadyAtTime:currentSampleTime atIndex:targetTextureIndex];
// }
//
// CVPixelBufferUnlockBaseAddress(movieFrame, 0);
// CVOpenGLESTextureCacheFlush(coreVideoTextureCache, 0);
// CFRelease(texture);
//
// outputTexture = 0;
}
}
else
{
// Upload to texture
CVPixelBufferLockBaseAddress(movieFrame, 0);
outputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:CGSizeMake(bufferWidth, bufferHeight) textureOptions:self.outputTextureOptions onlyTexture:YES];
glBindTexture(GL_TEXTURE_2D, [outputFramebuffer texture]);
// Using BGRA extension to pull in video frame data directly
glTexImage2D(GL_TEXTURE_2D,
0,
self.outputTextureOptions.internalFormat,
bufferWidth,
bufferHeight,
0,
self.outputTextureOptions.format,
self.outputTextureOptions.type,
CVPixelBufferGetBaseAddress(movieFrame));
for (id<GPUImageInput> currentTarget in targets)
{
NSInteger indexOfObject = [targets indexOfObject:currentTarget];
NSInteger targetTextureIndex = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];
[currentTarget setInputSize:CGSizeMake(bufferWidth, bufferHeight) atIndex:targetTextureIndex];
[currentTarget setInputFramebuffer:outputFramebuffer atIndex:targetTextureIndex];
}
[outputFramebuffer unlock];
for (id<GPUImageInput> currentTarget in targets)
{
NSInteger indexOfObject = [targets indexOfObject:currentTarget];
NSInteger targetTextureIndex = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];
[currentTarget newFrameReadyAtTime:currentSampleTime atIndex:targetTextureIndex];
}
CVPixelBufferUnlockBaseAddress(movieFrame, 0);
}
if (_runBenchmark)
{
CFAbsoluteTime currentFrameTime = (CFAbsoluteTimeGetCurrent() - startTime);
NSLog(@"Current frame time : %f ms", 1000.0 * currentFrameTime);
}
}
- (void)endProcessing;
{
keepLooping = NO;
[displayLink setPaused:YES];
for (id<GPUImageInput> currentTarget in targets)
{
[currentTarget endProcessing];
}
if (synchronizedMovieWriter != nil)
{
[synchronizedMovieWriter setVideoInputReadyCallback:^{return NO;}];
[synchronizedMovieWriter setAudioInputReadyCallback:^{return NO;}];
}
if (self.playerItem && (displayLink != nil))
{
[displayLink invalidate]; // remove from all run loops
displayLink = nil;
}
if ([self.delegate respondsToSelector:@selector(didCompletePlayingMovie)]) {
[self.delegate didCompletePlayingMovie];
}
self.delegate = nil;
}
- (void)cancelProcessing
{
if (reader) {
[reader cancelReading];
}
[self endProcessing];
}
- (void)convertYUVToRGBOutput;
{
[GPUImageContext setActiveShaderProgram:yuvConversionProgram];
outputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:CGSizeMake(imageBufferWidth, imageBufferHeight) onlyTexture:NO];
[outputFramebuffer activateFramebuffer];
glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
static const GLfloat squareVertices[] = {
-1.0f, -1.0f,
1.0f, -1.0f,
-1.0f, 1.0f,
1.0f, 1.0f,
};
static const GLfloat textureCoordinates[] = {
0.0f, 0.0f,
1.0f, 0.0f,
0.0f, 1.0f,
1.0f, 1.0f,
};
glActiveTexture(GL_TEXTURE4);
glBindTexture(GL_TEXTURE_2D, luminanceTexture);
glUniform1i(yuvConversionLuminanceTextureUniform, 4);
glActiveTexture(GL_TEXTURE5);
glBindTexture(GL_TEXTURE_2D, chrominanceTexture);
glUniform1i(yuvConversionChrominanceTextureUniform, 5);
glUniformMatrix3fv(yuvConversionMatrixUniform, 1, GL_FALSE, _preferredConversion);
glVertexAttribPointer(yuvConversionPositionAttribute, 2, GL_FLOAT, 0, 0, squareVertices);
glVertexAttribPointer(yuvConversionTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, textureCoordinates);
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
}
- (AVAssetReader*)assetReader {
return reader;
}
- (BOOL)audioEncodingIsFinished {
return audioEncodingIsFinished;
}
- (BOOL)videoEncodingIsFinished {
return videoEncodingIsFinished;
}
@end
================================================
FILE: GPURenderKit/GPURenderKit/GPUImage/BaseClass/GPUImageMovieComposition.h
================================================
//
// GPUImageMovieComposition.h
// Givit
//
// Created by Sean Meiners on 2013/01/25.
//
//
#import "GPUImageMovie.h"
@interface GPUImageMovieComposition : GPUImageMovie
@property (readwrite, retain) AVComposition *compositon;
@property (readwrite, retain) AVVideoComposition *videoComposition;
@property (readwrite, retain) AVAudioMix *audioMix;
- (id)initWithComposition:(AVComposition*)compositon
andVideoComposition:(AVVideoComposition*)videoComposition
andAudioMix:(AVAudioMix*)audioMix;
@end
================================================
FILE: GPURenderKit/GPURenderKit/GPUImage/BaseClass/GPUImageMovieComposition.m
================================================
//
// GPUImageMovieComposition.m
// Givit
//
// Created by Sean Meiners on 2013/01/25.
//
//
#import "GPUImageMovieComposition.h"
#import "GPUImageMovieWriter.h"
@implementation GPUImageMovieComposition
@synthesize compositon = _compositon;
@synthesize videoComposition = _videoComposition;
@synthesize audioMix = _audioMix;
- (id)initWithComposition:(AVComposition*)compositon
andVideoComposition:(AVVideoComposition*)videoComposition
andAudioMix:(AVAudioMix*)audioMix {
if (!(self = [super init]))
{
return nil;
}
[self yuvConversionSetup];
self.compositon = compositon;
self.videoComposition = videoComposition;
self.audioMix = audioMix;
return self;
}
- (AVAssetReader*)createAssetReader
{
//NSLog(@"creating reader from composition: %@, video: %@, audio: %@ with duration: %@", _compositon, _videoComposition, _audioMix, CFBridgingRelease(CMTimeCopyDescription(kCFAllocatorDefault, _compositon.duration)));
NSError *error = nil;
AVAssetReader *assetReader = [AVAssetReader assetReaderWithAsset:self.compositon error:&error];
NSDictionary *outputSettings = @{(id)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)};
AVAssetReaderVideoCompositionOutput *readerVideoOutput = [AVAssetReaderVideoCompositionOutput assetReaderVideoCompositionOutputWithVideoTracks:[_compositon tracksWithMediaType:AVMediaTypeVideo]
videoSettings:outputSettings];
#if ! TARGET_IPHONE_SIMULATOR
if( [_videoComposition isKindOfClass:[AVMutableVideoComposition class]] )
[(AVMutableVideoComposition*)_videoComposition setRenderScale:1.0];
#endif
readerVideoOutput.videoComposition = self.videoComposition;
readerVideoOutput.alwaysCopiesSampleData = NO;
[assetReader addOutput:readerVideoOutput];
NSArray *audioTracks = [_compositon tracksWithMediaType:AVMediaTypeAudio];
BOOL shouldRecordAudioTrack = (([audioTracks count] > 0) && (self.audioEncodingTarget != nil) );
AVAssetReaderAudioMixOutput *readerAudioOutput = nil;
if (shouldRecordAudioTrack)
{
[self.audioEncodingTarget setShouldInvalidateAudioSampleWhenDone:YES];
readerAudioOutput = [AVAssetReaderAudioMixOutput assetReaderAudioMixOutputWithAudioTracks:audioTracks audioSettings:nil];
readerAudioOutput.audioMix = self.audioMix;
readerAudioOutput.alwaysCopiesSampleData = NO;
[assetReader addOutput:readerAudioOutput];
}
return assetReader;
}
@end
================================================
FILE: GPURenderKit/GPURenderKit/GPUImage/BaseClass/GPUImageOutput.h
================================================
#import "GPUImageContext.h"
#import "GPUImageFramebuffer.h"
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
#import <UIKit/UIKit.h>
#else
// For now, just redefine this on the Mac
typedef NS_ENUM(NSInteger, UIImageOrientation) {
UIImageOrientationUp, // default orientation
UIImageOrientationDown, // 180 deg rotation
UIImageOrientationLeft, // 90 deg CCW
UIImageOrientationRight, // 90 deg CW
UIImageOrientationUpMirrored, // as above but image mirrored along other axis. horizontal flip
UIImageOrientationDownMirrored, // horizontal flip
UIImageOrientationLeftMirrored, // vertical flip
UIImageOrientationRightMirrored, // vertical flip
};
#endif
dispatch_queue_attr_t GPUImageDefaultQueueAttribute(void);
void runOnMainQueueWithoutDeadlocking(void (^block)(void));
void runSynchronouslyOnVideoProcessingQueue(void (^block)(void));
void runAsynchronouslyOnVideoProcessingQueue(void (^block)(void));
void runSynchronouslyOnContextQueue(GPUImageContext *context, void (^block)(void));
void runAsynchronouslyOnContextQueue(GPUImageContext *context, void (^block)(void));
void reportAvailableMemoryForGPUImage(NSString *tag);
@class GPUImageMovieWriter;
/** GPUImage's base source object
Images or frames of video are uploaded from source objects, which are subclasses of GPUImageOutput. These include:
- GPUImageVideoCamera (for live video from an iOS camera)
- GPUImageStillCamera (for taking photos with the camera)
- GPUImagePicture (for still images)
- GPUImageMovie (for movies)
Source objects upload still image frames to OpenGL ES as textures, then hand those textures off to the next objects in the processing chain.
*/
@interface GPUImageOutput : NSObject
{
GPUImageFramebuffer *outputFramebuffer;
NSMutableArray *targets, *targetTextureIndices;
CGSize inputTextureSize, cachedMaximumOutputSize, forcedMaximumSize;
BOOL overrideInputSize;
BOOL allTargetsWantMonochromeData;
BOOL usingNextFrameForImageCapture;
}
@property(readwrite, nonatomic) BOOL shouldSmoothlyScaleOutput;
@property(readwrite, nonatomic) BOOL shouldIgnoreUpdatesToThisTarget;
@property(readwrite, nonatomic, retain) GPUImageMovieWriter *audioEncodingTarget;
@property(readwrite, nonatomic, unsafe_unretained) id<GPUImageInput> targetToIgnoreForUpdates;
@property(nonatomic, copy) void(^frameProcessingCompletionBlock)(GPUImageOutput*, CMTime);
@property(nonatomic) BOOL enabled;
@property(readwrite, nonatomic) GPUTextureOptions outputTextureOptions;
/// @name Managing targets
- (void)setInputFramebufferForTarget:(id<GPUImageInput>)target atIndex:(NSInteger)inputTextureIndex;
- (GPUImageFramebuffer *)framebufferForOutput;
- (void)removeOutputFramebuffer;
- (void)notifyTargetsAboutNewOutputTexture;
/** Returns an array of the current targets.
*/
- (NSArray*)targets;
/** Adds a target to receive notifications when new frames are available.
The target will be asked for its next available texture.
See [GPUImageInput newFrameReadyAtTime:]
@param newTarget Target to be added
*/
- (void)addTarget:(id<GPUImageInput>)newTarget;
/** Adds a target to receive notifications when new frames are available.
See [GPUImageInput newFrameReadyAtTime:]
@param newTarget Target to be added
*/
- (void)addTarget:(id<GPUImageInput>)newTarget atTextureLocation:(NSInteger)textureLocation;
/** Removes a target. The target will no longer receive notifications when new frames are available.
@param targetToRemove Target to be removed
*/
- (void)removeTarget:(id<GPUImageInput>)targetToRemove;
/** Removes all targets.
*/
- (void)removeAllTargets;
/// @name Manage the output texture
- (void)forceProcessingAtSize:(CGSize)frameSize;
- (void)forceProcessingAtSizeRespectingAspectRatio:(CGSize)frameSize;
/// @name Still image processing
- (void)useNextFrameForImageCapture;
- (CGImageRef)newCGImageFromCurrentlyProcessedOutput;
- (CGImageRef)newCGImageByFilteringCGImage:(CGImageRef)imageToFilter;
// Platform-specific image output methods
// If you're trying to use these methods, remember that you need to set -useNextFrameForImageCapture before running -processImage or running video and calling any of these methods, or you will get a nil image
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
- (UIImage *)imageFromCurrentFramebuffer;
- (UIImage *)imageFromCurrentFramebufferWithOrientation:(UIImageOrientation)imageOrientation;
- (UIImage *)imageByFilteringImage:(UIImage *)imageToFilter;
- (CGImageRef)newCGImageByFilteringImage:(UIImage *)imageToFilter;
#else
- (NSImage *)imageFromCurrentFramebuffer;
- (NSImage *)imageFromCurrentFramebufferWithOrientation:(UIImageOrientation)imageOrientation;
- (NSImage *)imageByFilteringImage:(NSImage *)imageToFilter;
- (CGImageRef)newCGImageByFilteringImage:(NSImage *)imageToFilter;
#endif
- (BOOL)providesMonochromeOutput;
@end
================================================
FILE: GPURenderKit/GPURenderKit/GPUImage/BaseClass/GPUImageOutput.m
================================================
#import "GPUImageOutput.h"
#import "GPUImageMovieWriter.h"
#import "GPUImagePicture.h"
#import <mach/mach.h>
dispatch_queue_attr_t GPUImageDefaultQueueAttribute(void)
{
#if TARGET_OS_IPHONE
if ([[[UIDevice currentDevice] systemVersion] compare:@"9.0" options:NSNumericSearch] != NSOrderedAscending)
{
return dispatch_queue_attr_make_with_qos_class(DISPATCH_QUEUE_SERIAL, QOS_CLASS_DEFAULT, 0);
}
#endif
return nil;
}
void runOnMainQueueWithoutDeadlocking(void (^block)(void))
{
if ([NSThread isMainThread])
{
block();
}
else
{
dispatch_sync(dispatch_get_main_queue(), block);
}
}
void runSynchronouslyOnVideoProcessingQueue(void (^block)(void))
{
dispatch_queue_t videoProcessingQueue = [GPUImageContext sharedContextQueue];
#if !OS_OBJECT_USE_OBJC
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Wdeprecated-declarations"
if (dispatch_get_current_queue() == videoProcessingQueue)
#pragma clang diagnostic pop
#else
if (dispatch_get_specific([GPUImageContext contextKey]))
#endif
{
block();
}else
{
dispatch_sync(videoProcessingQueue, block);
}
}
void runAsynchronouslyOnVideoProcessingQueue(void (^block)(void))
{
dispatch_queue_t videoProcessingQueue = [GPUImageContext sharedContextQueue];
#if !OS_OBJECT_USE_OBJC
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Wdeprecated-declarations"
if (dispatch_get_current_queue() == videoProcessingQueue)
#pragma clang diagnostic pop
#else
if (dispatch_get_specific([GPUImageContext contextKey]))
#endif
{
block();
}else
{
dispatch_async(videoProcessingQueue, block);
}
}
void runSynchronouslyOnContextQueue(GPUImageContext *context, void (^block)(void))
{
dispatch_queue_t videoProcessingQueue = [context contextQueue];
#if !OS_OBJECT_USE_OBJC
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Wdeprecated-declarations"
if (dispatch_get_current_queue() == videoProcessingQueue)
#pragma clang diagnostic pop
#else
if (dispatch_get_specific([GPUImageContext contextKey]))
#endif
{
block();
}else
{
dispatch_sync(videoProcessingQueue, block);
}
}
void runAsynchronouslyOnContextQueue(GPUImageContext *context, void (^block)(void))
{
dispatch_queue_t videoProcessingQueue = [context contextQueue];
#if !OS_OBJECT_USE_OBJC
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Wdeprecated-declarations"
if (dispatch_get_current_queue() == videoProcessingQueue)
#pragma clang diagnostic pop
#else
if (dispatch_get_specific([GPUImageContext contextKey]))
#endif
{
block();
}else
{
dispatch_async(videoProcessingQueue, block);
}
}
void reportAvailableMemoryForGPUImage(NSString *tag)
{
if (!tag)
tag = @"Default";
struct task_basic_info info;
mach_msg_type_number_t size = sizeof(info);
kern_return_t kerr = task_info(mach_task_self(),
TASK_BASIC_INFO,
(task_info_t)&info,
&size);
if( kerr == KERN_SUCCESS ) {
NSLog(@"%@ - Memory used: %u", tag, (unsigned int)info.resident_size); //in bytes
} else {
NSLog(@"%@ - Error: %s", tag, mach_error_string(kerr));
}
}
@implementation GPUImageOutput
@synthesize shouldSmoothlyScaleOutput = _shouldSmoothlyScaleOutput;
@synthesize shouldIgnoreUpdatesToThisTarget = _shouldIgnoreUpdatesToThisTarget;
@synthesize audioEncodingTarget = _audioEncodingTarget;
@synthesize targetToIgnoreForUpdates = _targetToIgnoreForUpdates;
@synthesize frameProcessingCompletionBlock = _frameProcessingCompletionBlock;
@synthesize enabled = _enabled;
@synthesize outputTextureOptions = _outputTextureOptions;
#pragma mark -
#pragma mark Initialization and teardown
- (id)init;
{
if (!(self = [super init]))
{
return nil;
}
targets = [[NSMutableArray alloc] init];
targetTextureIndices = [[NSMutableArray alloc] init];
_enabled = YES;
allTargetsWantMonochromeData = YES;
usingNextFrameForImageCapture = NO;
// set default texture options
_outputTextureOptions.minFilter = GL_LINEAR;
_outputTextureOptions.magFilter = GL_LINEAR;
_outputTextureOptions.wrapS = GL_CLAMP_TO_EDGE;
_outputTextureOptions.wrapT = GL_CLAMP_TO_EDGE;
_outputTextureOptions.internalFormat = GL_RGBA;
_outputTextureOptions.format = GL_BGRA;
_outputTextureOptions.type = GL_UNSIGNED_BYTE;
return self;
}
- (void)dealloc
{
[self removeAllTargets];
}
#pragma mark -
#pragma mark Managing targets
- (void)setInputFramebufferForTarget:(id<GPUImageInput>)target atIndex:(NSInteger)inputTextureIndex;
{
[target setInputFramebuffer:[self framebufferForOutput] atIndex:inputTextureIndex];
}
- (GPUImageFramebuffer *)framebufferForOutput;
{
return outputFramebuffer;
}
- (void)removeOutputFramebuffer;
{
outputFramebuffer = nil;
}
- (void)notifyTargetsAboutNewOutputTexture;
{
for (id<GPUImageInput> currentTarget in targets)
{
NSInteger indexOfObject = [targets indexOfObject:currentTarget];
NSInteger textureIndex = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];
[self setInputFramebufferForTarget:currentTarget atIndex:textureIndex];
}
}
- (NSArray*)targets;
{
return [NSArray arrayWithArray:targets];
}
- (void)addTarget:(id<GPUImageInput>)newTarget;
{
NSInteger nextAvailableTextureIndex = [newTarget nextAvailableTextureIndex];
[self addTarget:newTarget atTextureLocation:nextAvailableTextureIndex];
if ([newTarget shouldIgnoreUpdatesToThisTarget])
{
_targetToIgnoreForUpdates = newTarget;
}
}
- (void)addTarget:(id<GPUImageInput>)newTarget atTextureLocation:(NSInteger)textureLocation;
{
if([targets containsObject:newTarget])
{
return;
}
cachedMaximumOutputSize = CGSizeZero;
runSynchronouslyOnVideoProcessingQueue(^{
[self setInputFramebufferForTarget:newTarget atIndex:textureLocation];
[targets addObject:newTarget];
[targetTextureIndices addObject:[NSNumber numberWithInteger:textureLocation]];
allTargetsWantMonochromeData = allTargetsWantMonochromeData && [newTarget wantsMonochromeInput];
});
}
- (void)removeTarget:(id<GPUImageInput>)targetToRemove;
{
if(![targets containsObject:targetToRemove])
{
return;
}
if (_targetToIgnoreForUpdates == targetToRemove)
{
_targetToIgnoreForUpdates = nil;
}
cachedMaximumOutputSize = CGSizeZero;
NSInteger indexOfObject = [targets indexOfObject:targetToRemove];
NSInteger textureIndexOfTarget = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];
runSynchronouslyOnVideoProcessingQueue(^{
[targetToRemove setInputSize:CGSizeZero atIndex:textureIndexOfTarget];
[targetToRemove setInputRotation:kGPUImageNoRotation atIndex:textureIndexOfTarget];
[targetTextureIndices removeObjectAtIndex:indexOfObject];
[targets removeObject:targetToRemove];
[targetToRemove endProcessing];
});
}
- (void)removeAllTargets;
{
cachedMaximumOutputSize = CGSizeZero;
runSynchronouslyOnVideoProcessingQueue(^{
for (id<GPUImageInput> targetToRemove in targets)
{
NSInteger indexOfObject = [targets indexOfObject:targetToRemove];
NSInteger textureIndexOfTarget = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];
[targetToRemove setInputSize:CGSizeZero atIndex:textureIndexOfTarget];
[targetToRemove setInputRotation:kGPUImageNoRotation atIndex:textureIndexOfTarget];
}
[targets removeAllObjects];
[targetTextureIndices removeAllObjects];
allTargetsWantMonochromeData = YES;
});
}
#pragma mark -
#pragma mark Manage the output texture
- (void)forceProcessingAtSize:(CGSize)frameSize;
{
}
- (void)forceProcessingAtSizeRespectingAspectRatio:(CGSize)frameSize;
{
}
#pragma mark -
#pragma mark Still image processing
- (void)useNextFrameForImageCapture;
{
}
- (CGImageRef)newCGImageFromCurrentlyProcessedOutput;
{
return nil;
}
- (CGImageRef)newCGImageByFilteringCGImage:(CGImageRef)imageToFilter;
{
GPUImagePicture *stillImageSource = [[GPUImagePicture alloc] initWithCGImage:imageToFilter];
[self useNextFrameForImageCapture];
[stillImageSource addTarget:(id<GPUImageInput>)self];
[stillImageSource processImage];
CGImageRef processedImage = [self newCGImageFromCurrentlyProcessedOutput];
[stillImageSource removeTarget:(id<GPUImageInput>)self];
return processedImage;
}
- (BOOL)providesMonochromeOutput;
{
return NO;
}
#pragma mark -
#pragma mark Platform-specific image output methods
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
- (UIImage *)imageFromCurrentFramebuffer;
{
UIDeviceOrientation deviceOrientation = [[UIDevice currentDevice] orientation];
UIImageOrientation imageOrientation = UIImageOrientationLeft;
switch (deviceOrientation)
{
case UIDeviceOrientationPortrait:
imageOrientation = UIImageOrientationUp;
break;
case UIDeviceOrientationPortraitUpsideDown:
imageOrientation = UIImageOrientationDown;
break;
case UIDeviceOrientationLandscapeLeft:
imageOrientation = UIImageOrientationLeft;
break;
case UIDeviceOrientationLandscapeRight:
imageOrientation = UIImageOrientationRight;
break;
default:
imageOrientation = UIImageOrientationUp;
break;
}
return [self imageFromCurrentFramebufferWithOrientation:imageOrientation];
}
- (UIImage *)imageFromCurrentFramebufferWithOrientation:(UIImageOrientation)imageOrientation;
{
CGImageRef cgImageFromBytes = [self newCGImageFromCurrentlyProcessedOutput];
UIImage *finalImage = [UIImage imageWithCGImage:cgImageFromBytes scale:1.0 orientation:imageOrientation];
CGImageRelease(cgImageFromBytes);
return finalImage;
}
- (UIImage *)imageByFilteringImage:(UIImage *)imageToFilter;
{
CGImageRef image = [self newCGImageByFilteringCGImage:[imageToFilter CGImage]];
UIImage *processedImage = [UIImage imageWithCGImage:image scale:[imageToFilter scale] orientation:[imageToFilter imageOrientation]];
CGImageRelease(image);
return processedImage;
}
- (CGImageRef)newCGImageByFilteringImage:(UIImage *)imageToFilter
{
return [self newCGImageByFilteringCGImage:[imageToFilter CGImage]];
}
#else
- (NSImage *)imageFromCurrentFramebuffer;
{
return [self imageFromCurrentFramebufferWithOrientation:UIImageOrientationLeft];
}
- (NSImage *)imageFromCurrentFramebufferWithOrientation:(UIImageOrientation)imageOrientation;
{
CGImageRef cgImageFromBytes = [self newCGImageFromCurrentlyProcessedOutput];
NSImage *finalImage = [[NSImage alloc] initWithCGImage:cgImageFromBytes size:NSZeroSize];
CGImageRelease(cgImageFromBytes);
return finalImage;
}
- (NSImage *)imageByFilteringImage:(NSImage *)imageToFilter;
{
CGImageRef image = [self newCGImageByFilteringCGImage:[imageToFilter CGImageForProposedRect:NULL context:[NSGraphicsContext currentContext] hints:nil]];
NSImage *processedImage = [[NSImage alloc] initWithCGImage:image size:NSZeroSize];
CGImageRelease(image);
return processedImage;
}
- (CGImageRef)newCGImageByFilteringImage:(NSImage *)imageToFilter
{
return [self newCGImageByFilteringCGImage:[imageToFilter CGImageForProposedRect:NULL context:[NSGraphicsContext currentContext] hints:nil]];
}
#endif
#pragma mark -
#pragma mark Accessors
- (void)setAudioEncodingTarget:(GPUImageMovieWriter *)newValue;
{
_audioEncodingTarget = newValue;
if( ! _audioEncodingTarget.hasAudioTrack )
{
_audioEncodingTarget.hasAudioTrack = YES;
}
}
-(void)setOutputTextureOptions:(GPUTextureOptions)outputTextureOptions
{
_outputTextureOptions = outputTextureOptions;
if( outputFramebuffer.texture )
{
glBindTexture(GL_TEXTURE_2D, outputFramebuffer.texture);
//_outputTextureOptions.format
//_outputTextureOptions.internalFormat
//_outputTextureOptions.magFilter
//_outputTextureOptions.minFilter
//_outputTextureOptions.type
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, _outputTextureOptions.wrapS);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, _outputTextureOptions.wrapT);
glBindTexture(GL_TEXTURE_2D, 0);
}
}
@end
================================================
FILE: GPURenderKit/GPURenderKit/GPUImage/BaseClass/GPUImageTextureInput.h
================================================
#import "GPUImageOutput.h"
@interface GPUImageTextureInput : GPUImageOutput
{
CGSize textureSize;
}
// Initialization and teardown
- (id)initWithTexture:(GLuint)newInputTexture size:(CGSize)newTextureSize;
// Image rendering
- (void)processTextureWithFrameTime:(CMTime)frameTime;
@end
================================================
FILE: GPURenderKit/GPURenderKit/GPUImage/BaseClass/GPUImageTextureInput.m
================================================
#import "GPUImageTextureInput.h"
@implementation GPUImageTextureInput
#pragma mark -
#pragma mark Initialization and teardown
- (id)initWithTexture:(GLuint)newInputTexture size:(CGSize)newTextureSize;
{
if (!(self = [super init]))
{
return nil;
}
runSynchronouslyOnVideoProcessingQueue(^{
[GPUImageContext useImageProcessingContext];
});
textureSize = newTextureSize;
runSynchronouslyOnVideoProcessingQueue(^{
outputFramebuffer = [[GPUImageFramebuffer alloc] initWithSize:newTextureSize overriddenTexture:newInputTexture];
});
return self;
}
#pragma mark -
#pragma mark Image rendering
- (void)processTextureWithFrameTime:(CMTime)frameTime;
{
runAsynchronouslyOnVideoProcessingQueue(^{
for (id<GPUImageInput> currentTarget in targets)
{
NSInteger indexOfObject = [targets indexOfObject:currentTarget];
NSInteger targetTextureIndex = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];
[currentTarget setInputSize:textureSize atIndex:targetTextureIndex];
[currentTarget setInputFramebuffer:outputFramebuffer atIndex:targetTextureIndex];
[currentTarget newFrameReadyAtTime:frameTime atIndex:targetTextureIndex];
}
});
}
@end
================================================
FILE: GPURenderKit/GPURenderKit/GPUImage/BaseClass/GPUImageTextureOutput.h
================================================
#import <Foundation/Foundation.h>
#import "GPUImageContext.h"
@protocol GPUImageTextureOutputDelegate;
@interface GPUImageTextureOutput : NSObject <GPUImageInput>
{
GPUImageFramebuffer *firstInputFramebuffer;
}
@property(readwrite, unsafe_unretained, nonatomic) id<GPUImageTextureOutputDelegate> delegate;
@property(readonly) GLuint texture;
@property(nonatomic) BOOL enabled;
- (void)doneWithTexture;
@end
@protocol GPUImageTextureOutputDelegate
- (void)newFrameReadyFromTextureOutput:(GPUImageTextureOutput *)callbackTextureOutput;
@end
================================================
FILE: GPURenderKit/GPURenderKit/GPUImage/BaseClass/GPUImageTextureOutput.m
================================================
#import "GPUImageTextureOutput.h"
@implementation GPUImageTextureOutput
@synthesize delegate = _delegate;
@synthesize texture = _texture;
@synthesize enabled;
#pragma mark -
#pragma mark Initialization and teardown
- (id)init;
{
if (!(self = [super init]))
{
return nil;
}
self.enabled = YES;
return self;
}
- (void)doneWithTexture;
{
[firstInputFramebuffer unlock];
}
#pragma mark -
#pragma mark GPUImageInput protocol
- (void)newFrameReadyAtTime:(CMTime)frameTime atIndex:(NSInteger)textureIndex;
{
[_delegate newFrameReadyFromTextureOutput:self];
}
- (NSInteger)nextAvailableTextureIndex;
{
return 0;
}
// TODO: Deal with the fact that the texture changes regularly as a result of the caching
- (void)setInputFramebuffer:(GPUImageFramebuffer *)newInputFramebuffer atIndex:(NSInteger)textureIndex;
{
firstInputFramebuffer = newInputFramebuffer;
[firstInputFramebuffer lock];
_texture = [firstInputFramebuffer texture];
}
- (void)setInputRotation:(GPUImageRotationMode)newInputRotation atIndex:(NSInteger)textureIndex;
{
}
- (void)setInputSize:(CGSize)newSize atIndex:(NSInteger)textureIndex;
{
}
- (CGSize)maximumOutputSize;
{
return CGSizeZero;
}
- (void)endProcessing
{
}
- (BOOL)shouldIgnoreUpdatesToThisTarget;
{
return NO;
}
- (BOOL)wantsMonochromeInput;
{
return NO;
}
- (void)setCurrentlyReceivingMonochromeInput:(BOOL)newValue;
{
}
@end
================================================
FILE: GPURenderKit/GPURenderKit/GPUImage/BaseClass/GPUImageThreeInputFilter.h
================================================
#import "GPUImageTwoInputFilter.h"
extern NSString *const kGPUImageThreeInputTextureVertexShaderString;
@interface GPUImageThreeInputFilter : GPUImageTwoInputFilter
{
GPUImageFramebuffer *thirdInputFramebuffer;
GLint filterThirdTextureCoordinateAttribute;
GLint filterInputTextureUniform3;
GPUImageRotationMode inputRotation3;
GLuint filterSourceTexture3;
CMTime thirdFrameTime;
BOOL hasSetSecondTexture, hasReceivedThirdFrame, thirdFrameWasVideo;
BOOL thirdFrameCheckDisabled;
}
- (void)disableThirdFrameCheck;
@end
================================================
FILE: GPURenderKit/GPURenderKit/GPUImage/BaseClass/GPUImageThreeInputFilter.m
================================================
#import "GPUImageThreeInputFilter.h"
NSString *const kGPUImageThreeInputTextureVertexShaderString = SHADER_STRING
(
attribute vec4 position;
attribute vec4 inputTextureCoordinate;
attribute vec4 inputTextureCoordinate2;
attribute vec4 inputTextureCoordinate3;
varying vec2 textureCoordinate;
varying vec2 textureCoordinate2;
varying vec2 textureCoordinate3;
void main()
{
gl_Position = position;
textureCoordinate = inputTextureCoordinate.xy;
textureCoordinate2 = inputTextureCoordinate2.xy;
textureCoordinate3 = inputTextureCoordinate3.xy;
}
);
@implementation GPUImageThreeInputFilter
#pragma mark -
#pragma mark Initialization and teardown
- (id)initWithFragmentShaderFromString:(NSString *)fragmentShaderString;
{
if (!(self = [self initWithVertexShaderFromString:kGPUImageThreeInputTextureVertexShaderString fragmentShaderFromString:fragmentShaderString]))
{
return nil;
}
return self;
}
- (id)initWithVertexShaderFromString:(NSString *)vertexShaderString fragmentShaderFromString:(NSString *)fragmentShaderString;
{
if (!(self = [super initWithVertexShaderFromString:vertexShaderString fragmentShaderFromString:fragmentShaderString]))
{
return nil;
}
inputRotation3 = kGPUImageNoRotation;
hasSetSecondTexture = NO;
hasReceivedThirdFrame = NO;
thirdFrameWasVideo = NO;
thirdFrameCheckDisabled = NO;
thirdFrameTime = kCMTimeInvalid;
runSynchronouslyOnVideoProcessingQueue(^{
[GPUImageContext useImageProcessingContext];
filterThirdTextureCoordinateAttribute = [filterProgram attributeIndex:@"inputTextureCoordinate3"];
filterInputTextureUniform3 = [filterProgram uniformIndex:@"inputImageTexture3"]; // This does assume a name of "inputImageTexture3" for the third input texture in the fragment shader
glEnableVertexAttribArray(filterThirdTextureCoordinateAttribute);
});
return self;
}
- (void)initializeAttributes;
{
[super initializeAttributes];
[filterProgram addAttribute:@"inputTextureCoordinate3"];
}
- (void)disableThirdFrameCheck;
{
thirdFrameCheckDisabled = YES;
}
#pragma mark -
#pragma mark Rendering
- (void)renderToTextureWithVertices:(const GLfloat *)vertices textureCoordinates:(const GLfloat *)textureCoordinates;
{
if (self.preventRendering)
{
[firstInputFramebuffer unlock];
[secondInputFramebuffer unlock];
[thirdInputFramebuffer unlock];
return;
}
[GPUImageContext setActiveShaderProgram:filterProgram];
outputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:[self sizeOfFBO] textureOptions:self.outputTextureOptions onlyTexture:NO];
[outputFramebuffer activateFramebuffer];
if (usingNextFrameForImageCapture)
{
[outputFramebuffer lock];
}
[self setUniformsForProgramAtIndex:0];
glClearColor(backgroundColorRed, backgroundColorGreen, backgroundColorBlue, backgroundColorAlpha);
glClear(GL_COLOR_BUFFER_BIT);
glActiveTexture(GL_TEXTURE2);
glBindTexture(GL_TEXTURE_2D, [firstInputFramebuffer texture]);
glUniform1i(filterInputTextureUniform, 2);
glActiveTexture(GL_TEXTURE3);
glBindTexture(GL_TEXTURE_2D, [secondInputFramebuffer texture]);
glUniform1i(filterInputTextureUniform2, 3);
glActiveTexture(GL_TEXTURE4);
glBindTexture(GL_TEXTURE_2D, [thirdInputFramebuffer texture]);
glUniform1i(filterInputTextureUniform3, 4);
glVertexAttribPointer(filterPositionAttribute, 2, GL_FLOAT, 0, 0, vertices);
glVertexAttribPointer(filterTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, textureCoordinates);
glVertexAttribPointer(filterSecondTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, [[self class] textureCoordinatesForRotation:inputRotation2]);
glVertexAttribPointer(filterThirdTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, [[self class] textureCoordinatesForRotation:inputRotation3]);
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
[firstInputFramebuffer unlock];
[secondInputFramebuffer unlock];
[thirdInputFramebuffer unlock];
if (usingNextFrameForImageCapture)
{
dispatch_semaphore_signal(imageCaptureSemaphore);
}
}
#pragma mark -
#pragma mark GPUImageInput
- (NSInteger)nextAvailableTextureIndex;
{
if (hasSetSecondTexture)
{
return 2;
}
else if (hasSetFirstTexture)
{
return 1;
}
else
{
return 0;
}
}
- (void)setInputFramebuffer:(GPUImageFramebuffer *)newInputFramebuffer atIndex:(NSInteger)textureIndex;
{
if (textureIndex == 0)
{
firstInputFramebuffer = newInputFramebuffer;
hasSetFirstTexture = YES;
[firstInputFramebuffer lock];
}
else if (textureIndex == 1)
{
secondInputFramebuffer = newInputFramebuffer;
hasSetSecondTexture = YES;
[secondInputFramebuffer lock];
}
else
{
thirdInputFramebuffer = newInputFramebuffer;
[thirdInputFramebuffer lock];
}
}
- (void)setInputSize:(CGSize)newSize atIndex:(NSInteger)textureIndex;
{
if (textureIndex == 0)
{
[super setInputSize:newSize atIndex:textureIndex];
if (CGSizeEqualToSize(newSize, CGSizeZero))
{
hasSetFirstTexture = NO;
}
}
else if (textureIndex == 1)
{
if (CGSizeEqualToSize(newSize, CGSizeZero))
{
hasSetSecondTexture = NO;
}
}
}
- (void)setInputRotation:(GPUImageRotationMode)newInputRotation atIndex:(NSInteger)textureIndex;
{
if (textureIndex == 0)
{
inputRotation = newInputRotation;
}
else if (textureIndex == 1)
{
inputRotation2 = newInputRotation;
}
else
{
inputRotation3 = newInputRotation;
}
}
- (CGSize)rotatedSize:(CGSize)sizeToRotate forIndex:(NSInteger)textureIndex;
{
CGSize rotatedSize = sizeToRotate;
GPUImageRotationMode rotationToCheck;
if (textureIndex == 0)
{
rotationToCheck = inputRotation;
}
else if (textureIndex == 1)
{
rotationToCheck = inputRotation2;
}
else
{
rotationToCheck = inputRotation3;
}
if (GPUImageRotationSwapsWidthAndHeight(rotationToCheck))
{
rotatedSize.width = sizeToRotate.height;
rotatedSize.height = sizeToRotate.width;
}
return rotatedSize;
}
- (void)newFrameReadyAtTime:(CMTime)frameTime atIndex:(NSInteger)textureIndex;
{
// You can set up infinite update loops, so this helps to short circuit them
if (hasReceivedFirstFrame && hasReceivedSecondFrame && hasReceivedThirdFrame)
{
return;
}
BOOL updatedMovieFrameOppositeStillImage = NO;
if (textureIndex == 0)
{
hasReceivedFirstFrame = YES;
firstFrameTime = frameTime;
if (secondFrameCheckDisabled)
{
hasReceivedSecondFrame = YES;
}
if (thirdFrameCheckDisabled)
{
hasReceivedThirdFrame = YES;
}
if (!CMTIME_IS_INDEFINITE(frameTime))
{
if CMTIME_IS_INDEFINITE(secondFrameTime)
{
updatedMovieFrameOppositeStillImage = YES;
}
}
}
else if (textureIndex == 1)
{
hasReceivedSecondFrame = YES;
secondFrameTime = frameTime;
if (firstFrameCheckDisabled)
{
hasReceivedFirstFrame = YES;
}
if (thirdFrameCheckDisabled)
{
hasReceivedThirdFrame = YES;
}
if (!CMTIME_IS_INDEFINITE(frameTime))
{
if CMTIME_IS_INDEFINITE(firstFrameTime)
{
updatedMovieFrameOppositeStillImage = YES;
}
}
}
else
{
hasReceivedThirdFrame = YES;
thirdFrameTime = frameTime;
if (firstFrameCheckDisabled)
{
hasReceivedFirstFrame = YES;
}
if (secondFrameCheckDisabled)
{
hasReceivedSecondFrame = YES;
}
if (!CMTIME_IS_INDEFINITE(frameTime))
{
if CMTIME_IS_INDEFINITE(firstFrameTime)
{
updatedMovieFrameOppositeStillImage = YES;
}
}
}
// || (hasReceivedFirstFrame && secondFrameCheckDisabled) || (hasReceivedSecondFrame && firstFrameCheckDisabled)
if ((hasReceivedFirstFrame && hasReceivedSecondFrame && hasReceivedThirdFrame) || updatedMovieFrameOppositeStillImage)
{
static const GLfloat imageVertices[] = {
-1.0f, -1.0f,
1.0f, -1.0f,
-1.0f, 1.0f,
1.0f, 1.0f,
};
[self renderToTextureWithVertices:imageVertices textureCoordinates:[[self class] textureCoordinatesForRotation:inputRotation]];
[self informTargetsAboutNewFrameAtTime:frameTime];
hasReceivedFirstFrame = NO;
hasReceivedSecondFrame = NO;
hasReceivedThirdFrame = NO;
}
}
@end
================================================
FILE: GPURenderKit/GPURenderKit/GPUImage/BaseClass/GPUImageTwoInputFilter.h
================================================
#import "GPUImageFilter.h"
extern NSString *const kGPUImageTwoInputTextureVertexShaderString;
@interface GPUImageTwoInputFilter : GPUImageFilter
{
GPUImageFramebuffer *secondInputFramebuffer;
GLint filterSecondTextureCoordinateAttribute;
GLint filterInputTextureUniform2;
GPUImageRotationMode inputRotation2;
CMTime firstFrameTime, secondFrameTime;
BOOL hasSetFirstTexture, hasReceivedFirstFrame, hasReceivedSecondFrame, firstFrameWasVideo, secondFrameWasVideo;
BOOL firstFrameCheckDisabled, secondFrameCheckDisabled;
}
- (void)disableFirstFrameCheck;
- (void)disableSecondFrameCheck;
@end
================================================
FILE: GPURenderKit/GPURenderKit/GPUImage/BaseClass/GPUImageTwoInputFilter.m
================================================
#import "GPUImageTwoInputFilter.h"
NSString *const kGPUImageTwoInputTextureVertexShaderString = SHADER_STRING
(
attribute vec4 position;
attribute vec4 inputTextureCoordinate;
attribute vec4 inputTextureCoordinate2;
varying vec2 textureCoordinate;
varying vec2 textureCoordinate2;
void main()
{
gl_Position = position;
textureCoordinate = inputTextureCoordinate.xy;
textureCoordinate2 = inputTextureCoordinate2.xy;
}
);
@implementation GPUImageTwoInputFilter
#pragma mark -
#pragma mark Initialization and teardown
- (id)initWithFragmentShaderFromString:(NSString *)fragmentShaderString;
{
if (!(self = [self initWithVertexShaderFromString:kGPUImageTwoInputTextureVertexShaderString fragmentShaderFromString:fragmentShaderString]))
{
return nil;
}
return self;
}
- (id)initWithVertexShaderFromString:(NSString *)vertexShaderString fragmentShaderFromString:(NSString *)fragmentShaderString;
{
if (!(self = [super initWithVertexShaderFromString:vertexShaderString fragmentShaderFromString:fragmentShaderString]))
{
return nil;
}
inputRotation2 = kGPUImageNoRotation;
hasSetFirstTexture = NO;
hasReceivedFirstFrame = NO;
hasReceivedSecondFrame = NO;
firstFrameWasVideo = NO;
secondFrameWasVideo = NO;
firstFrameCheckDisabled = NO;
secondFrameCheckDisabled = NO;
firstFrameTime = kCMTimeInvalid;
secondFrameTime = kCMTimeInvalid;
runSynchronouslyOnVideoProcessingQueue(^{
[GPUImageContext useImageProcessingContext];
filterSecondTextureCoordinateAttribute = [filterProgram attributeIndex:@"inputTextureCoordinate2"];
filterInputTextureUniform2 = [filterProgram uniformIndex:@"inputImageTexture2"]; // This does assume a name of "inputImageTexture2" for second input texture in the fragment shader
glEnableVertexAttribArray(filterSecondTextureCoordinateAttribute);
});
return self;
}
- (void)initializeAttributes;
{
[super initializeAttributes];
[filterProgram addAttribute:@"inputTextureCoordinate2"];
}
- (void)disableFirstFrameCheck;
{
firstFrameCheckDisabled = YES;
}
- (void)disableSecondFrameCheck;
{
secondFrameCheckDisabled = YES;
}
#pragma mark -
#pragma mark Rendering
- (void)renderToTextureWithVertices:(const GLfloat *)vertices textureCoordinates:(const GLfloat *)textureCoordinates;
{
if (self.preventRendering)
{
[firstInputFramebuffer unlock];
[secondInputFramebuffer unlock];
return;
}
[GPUImageContext setActiveShaderProgram:filterProgram];
outputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:[self sizeOfFBO] textureOptions:self.outputTextureOptions onlyTexture:NO];
[outputFramebuffer activateFramebuffer];
if (usingNextFrameForImageCapture)
{
[outputFramebuffer lock];
}
[self setUniformsForProgramAtIndex:0];
glClearColor(backgroundColorRed, backgroundColorGreen, backgroundColorBlue, backgroundColorAlpha);
glClear(GL_COLOR_BUFFER_BIT);
glActiveTexture(GL_TEXTURE2);
glBindTexture(GL_TEXTURE_2D, [firstInputFramebuffer texture]);
glUniform1i(filterInputTextureUniform, 2);
glActiveTexture(GL_TEXTURE3);
glBindTexture(GL_TEXTURE_2D, [secondInputFramebuffer texture]);
glUniform1i(filterInputTextureUniform2, 3);
glVertexAttribPointer(filterPositionAttribute, 2, GL_FLOAT, 0, 0, vertices);
glVertexAttribPointer(filterTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, textureCoordinates);
glVertexAttribPointer(filterSecondTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, [[self class] textureCoordinatesForRotation:inputRotation2]);
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
[firstInputFramebuffer unlock];
[secondInputFramebuffer unlock];
if (usingNextFrameForImageCapture)
{
dispatch_semaphore_signal(imageCaptureSemaphore);
}
}
#pragma mark -
#pragma mark GPUImageInput
- (NSInteger)nextAvailableTextureIndex;
{
if (hasSetFirstTexture)
{
return 1;
}
else
{
return 0;
}
}
- (void)setInputFramebuffer:(GPUImageFramebuffer *)newInputFramebuffer atIndex:(NSInteger)textureIndex;
{
if (textureIndex == 0)
{
firstInputFramebuffer = newInputFramebuffer;
hasSetFirstTexture = YES;
[firstInputFramebuffer lock];
}
else
{
secondInputFramebuffer = newInputFramebuffer;
[secondInputFramebuffer lock];
}
}
- (void)setInputSize:(CGSize)newSize atIndex:(NSInteger)textureIndex;
{
if (textureIndex == 0)
{
[super setInputSize:newSize atIndex:textureIndex];
if (CGSizeEqualToSize(newSize, CGSizeZero))
{
hasSetFirstTexture = NO;
}
}
}
- (void)setInputRotation:(GPUImageRotationMode)newInputRotation atIndex:(NSInteger)textureIndex;
{
if (textureIndex == 0)
{
inputRotation = newInputRotation;
}
else
{
inputRotation2 = newInputRotation;
}
}
- (CGSize)rotatedSize:(CGSize)sizeToRotate forIndex:(NSInteger)textureIndex;
{
CGSize rotatedSize = sizeToRotate;
GPUImageRotationMode rotationToCheck;
if (textureIndex == 0)
{
rotationToCheck = inputRotation;
}
else
{
rotationToCheck = inputRotation2;
}
if (GPUImageRotationSwapsWidthAndHeight(rotationToCheck))
{
rotatedSize.width = sizeToRotate.height;
rotatedSize.height = sizeToRotate.width;
}
return rotatedSize;
}
- (void)newFrameReadyAtTime:(CMTime)frameTime atIndex:(NSInteger)textureIndex;
{
// You can set up infinite update loops, so this helps to short circuit them
if (hasReceivedFirstFrame && hasReceivedSecondFrame)
{
return;
}
BOOL updatedMovieFrameOppositeStillImage = NO;
if (textureIndex == 0)
{
hasReceivedFirstFrame = YES;
firstFrameTime = frameTime;
if (secondFrameCheckDisabled)
{
hasReceivedSecondFrame = YES;
}
if (!CMTIME_IS_INDEFINITE(frameTime))
{
if CMTIME_IS_INDEFINITE(secondFrameTime)
{
updatedMovieFrameOppositeStillImage = YES;
}
}
}
else
{
hasReceivedSecondFrame = YES;
secondFrameTime = frameTime;
if (firstFrameCheckDisabled)
{
hasReceivedFirstFrame = YES;
}
if (!CMTIME_IS_INDEFINITE(frameTime))
{
if CMTIME_IS_INDEFINITE(firstFrameTime)
{
updatedMovieFrameOppositeStillImage = YES;
}
}
}
// || (hasReceivedFirstFrame && secondFrameCheckDisabled) || (hasReceivedSecondFrame && firstFrameCheckDisabled)
if ((hasReceivedFirstFrame && hasReceivedSecondFrame) || updatedMovieFrameOppositeStillImage)
{
CMTime passOnFrameTime = (!CMTIME_IS_INDEFINITE(firstFrameTime)) ? firstFrameTime : secondFrameTime;
[super newFrameReadyAtTime:passOnFrameTime atIndex:0]; // Bugfix when trying to record: always use time from first input (unless indefinite, in which case use the second input)
hasReceivedFirstFrame = NO;
hasReceivedSecondFrame = NO;
}
}
@end
================================================
FILE: GPURenderKit/GPURenderKit/GPUImage/BaseClass/GPUImageTwoPassFilter.h
================================================
#import "GPUImageFilter.h"
@interface GPUImageTwoPassFilter : GPUImageFilter
{
GPUImageFramebuffer *secondOutputFramebuffer;
GLProgram *secondFilterProgram;
GLint secondFilterPositionAttribute, secondFilterTextureCoordinateAttribute;
GLint secondFilterInputTextureUniform, secondFilterInputTextureUniform2;
NSMutableDictionary *secondProgramUniformStateRestorationBlocks;
}
// Initialization and teardown
- (id)initWithFirstStageVertexShaderFromString:(NSString *)firstStageVertexShaderString firstStageFragmentShaderFromString:(NSString *)firstStageFragmentShaderString secondStageVertexShaderFromString:(NSString *)secondStageVertexShaderString secondStageFragmentShaderFromString:(NSString *)secondStageFragmentShaderString;
- (id)initWithFirstStageFragmentShaderFromString:(NSString *)firstStageFragmentShaderString secondStageFragmentShaderFromString:(NSString *)secondStageFragmentShaderString;
- (void)initializeSecondaryAttributes;
@end
================================================
FILE: GPURenderKit/GPURenderKit/GPUImage/BaseClass/GPUImageTwoPassFilter.m
================================================
#import "GPUImageTwoPassFilter.h"
@implementation GPUImageTwoPassFilter
#pragma mark -
#pragma mark Initialization and teardown
- (id)initWithFirstStageVertexShaderFromString:(NSString *)firstStageVertexShaderString firstStageFragmentShaderFromString:(NSString *)firstStageFragmentShaderString secondStageVertexShaderFromString:(NSString *)secondStageVertexShaderString secondStageFragmentShaderFromString:(NSString *)secondStageFragmentShaderString;
{
if (!(self = [super initWithVertexShaderFromString:firstStageVertexShaderString fragmentShaderFromString:firstStageFragmentShaderString]))
{
return nil;
}
secondProgramUniformStateRestorationBlocks = [NSMutableDictionary dictionaryWithCapacity:10];
runSynchronouslyOnVideoProcessingQueue(^{
[GPUImageContext useImageProcessingContext];
secondFilterProgram = [[GPUImageContext sharedImageProcessingContext] programForVertexShaderString:secondStageVertexShaderString fragmentShaderString:secondStageFragmentShaderString];
if (!secondFilterProgram.initialized)
{
[self initializeSecondaryAttributes];
if (![secondFilterProgram link])
{
NSString *progLog = [secondFilterProgram programLog];
NSLog(@"Program link log: %@", progLog);
NSString *fragLog = [secondFilterProgram fragmentShaderLog];
NSLog(@"Fragment shader compile log: %@", fragLog);
NSString *vertLog = [secondFilterProgram vertexShaderLog];
NSLog(@"Vertex shader compile log: %@", vertLog);
secondFilterProgram = nil;
NSAssert(NO, @"Filter shader link failed");
}
}
secondFilterPositionAttribute = [secondFilterProgram attributeIndex:@"position"];
secondFilterTextureCoordinateAttribute = [secondFilterProgram attributeIndex:@"inputTextureCoordinate"];
secondFilterInputTextureUniform = [secondFilterProgram uniformIndex:@"inputImageTexture"]; // This does assume a name of "inputImageTexture" for the fragment shader
secondFilterInputTextureUniform2 = [secondFilterProgram uniformIndex:@"inputImageTexture2"]; // This does assume a name of "inputImageTexture2" for second input texture in the fragment shader
[GPUImageContext setActiveShaderProgram:secondFilterProgram];
glEnableVertexAttribArray(secondFilterPositionAttribute);
glEnableVertexAttribArray(secondFilterTextureCoordinateAttribute);
});
return self;
}
- (id)initWithFirstStageFragmentShaderFromString:(NSString *)firstStageFragmentShaderString secondStageFragmentShaderFromString:(NSString *)secondStageFragmentShaderString;
{
if (!(self = [self initWithFirstStageVertexShaderFromString:kGPUImageVertexShaderString firstStageFragmentShaderFromString:firstStageFragmentShaderString secondStageVertexShaderFromString:kGPUImageVertexShaderString secondStageFragmentShaderFromString:secondStageFragmentShaderString]))
{
return nil;
}
return self;
}
- (void)initializeSecondaryAttributes;
{
[secondFilterProgram addAttribute:@"position"];
[secondFilterProgram addAttribute:@"inputTextureCoordinate"];
}
#pragma mark -
#pragma mark Managing targets
- (GPUImageFramebuffer *)framebufferForOutput;
{
return secondOutputFramebuffer;
}
- (void)removeOutputFramebuffer;
{
secondOutputFramebuffer = nil;
}
#pragma mark -
#pragma mark Rendering
- (void)renderToTextureWithVertices:(const GLfloat *)vertices textureCoordinates:(const GLfloat *)textureCoordinates;
{
if (self.preventRendering)
{
[firstInputFramebuffer unlock];
return;
}
[GPUImageContext setActiveShaderProgram:filterProgram];
outputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:[self sizeOfFBO] textureOptions:self.outputTextureOptions onlyTexture:NO];
[outputFramebuffer activateFramebuffer];
[self setUniformsForProgramAtIndex:0];
glClearColor(backgroundColorRed, backgroundColorGreen, backgroundColorBlue, backgroundColorAlpha);
glClear(GL_COLOR_BUFFER_BIT);
glActiveTexture(GL_TEXTURE2);
glBindTexture(GL_TEXTURE_2D, [firstInputFramebuffer texture]);
glUniform1i(filterInputTextureUniform, 2);
glVertexAttribPointer(filterPositionAttribute, 2, GL_FLOAT, 0, 0, vertices);
glVertexAttribPointer(filterTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, textureCoordinates);
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
[firstInputFramebuffer unlock];
firstInputFramebuffer = nil;
// This assumes that any two-pass filter that says it desires monochrome input is using the first pass for a luminance conversion, which can be dropped
// if (!currentlyReceivingMonochromeInput)
// {
// Run the first stage of the two-pass filter
// [super renderToTextureWithVertices:vertices textureCoordinates:textureCoordinates];
// }
// Run the second stage of the two-pass filter
secondOutputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:[self sizeOfFBO] textureOptions:self.outputTextureOptions onlyTexture:NO];
[secondOutputFramebuffer activateFramebuffer];
[GPUImageContext setActiveShaderProgram:secondFilterProgram];
if (usingNextFrameForImageCapture)
{
[secondOutputFramebuffer lock];
}
[self setUniformsForProgramAtIndex:1];
glActiveTexture(GL_TEXTURE3);
glBindTexture(GL_TEXTURE_2D, [outputFramebuffer texture]);
glVertexAttribPointer(secondFilterTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, [[self class] textureCoordinatesForRotation:kGPUImageNoRotation]);
// TODO: Re-enable this monochrome optimization
// if (!currentlyReceivingMonochromeInput)
// {
// glActiveTexture(GL_TEXTURE3);
// glBindTexture(GL_TEXTURE_2D, outputTexture);
// glVertexAttribPointer(secondFilterTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, [[self class] textureCoordinatesForRotation:kGPUImageNoRotation]);
// }
// else
// {
// glActiveTexture(GL_TEXTURE3);
// glBindTexture(GL_TEXTURE_2D, sourceTexture);
// glVertexAttribPointer(secondFilterTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, textureCoordinates);
// }
glUniform1i(secondFilterInputTextureUniform, 3);
glVertexAttribPointer(secondFilterPositionAttribute, 2, GL_FLOAT, 0, 0, vertices);
glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT);
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
[outputFramebuffer unlock];
outputFramebuffer = nil;
if (usingNextFrameForImageCapture)
{
dispatch_semaphore_signal(imageCaptureSemaphore);
}
}
- (void)setAndExecuteUniformStateCallbackAtIndex:(GLint)uniform forProgram:(GLProgram *)shaderProgram toBlock:(dispatch_block_t)uniformStateBlock;
{
// TODO: Deal with the fact that two-pass filters may have the same shader program identifier
if (shaderProgram == filterProgram)
{
[uniformStateRestorationBlocks setObject:[uniformStateBlock copy] forKey:[NSNumber numberWithInt:uniform]];
}
else
{
[secondProgramUniformStateRestorationBlocks setObject:[uniformStateBlock copy] forKey:[NSNumber numberWithInt:uniform]];
}
uniformStateBlock();
}
- (void)setUniformsForProgramAtIndex:(NSUInteger)programIndex;
{
if (programIndex == 0)
{
[uniformStateRestorationBlocks enumerateKeysAndObjectsUsingBlock:^(id key, id obj, BOOL *stop){
dispatch_block_t currentBlock = obj;
currentBlock();
}];
}
else
{
[secondProgramUniformStateRestorationBlocks enumerateKeysAndObjectsUsingBlock:^(id key, id obj, BOOL *stop){
dispatch_block_t currentBlock = obj;
currentBlock();
}];
}
}
@end
================================================
FILE: GPURenderKit/GPURenderKit/GPUImage/BaseClass/GPUImageTwoPassTextureSamplingFilter.h
================================================
#import "GPUImageTwoPassFilter.h"
@interface GPUImageTwoPassTextureSamplingFilter : GPUImageTwoPassFilter
{
GLint verticalPassTexelWidthOffsetUniform, verticalPassTexelHeightOffsetUniform, horizontalPassTexelWidthOffsetUniform, horizontalPassTexelHeightOffsetUniform;
GLfloat verticalPassTexelWidthOffset, verticalPassTexelHeightOffset, horizontalPassTexelWidthOffset, horizontalPassTexelHeightOffset;
CGFloat _verticalTexelSpacing, _horizontalTexelSpacing;
}
// This sets the spacing between texels (in pixels) when sampling for the first. By default, this is 1.0
@property(readwrite, nonatomic) CGFloat verticalTexelSpacing, horizontalTexelSpacing;
@end
================================================
FILE: GPURenderKit/GPURenderKit/GPUImage/BaseClass/GPUImageTwoPassTextureSamplingFilter.m
================================================
#import "GPUImageTwoPassTextureSamplingFilter.h"
@implementation GPUImageTwoPassTextureSamplingFilter
@synthesize verticalTexelSpacing = _verticalTexelSpacing;
@synthesize horizontalTexelSpacing = _horizontalTexelSpacing;
#pragma mark -
#pragma mark Initialization and teardown
- (id)initWithFirstStageVertexShaderFromString:(NSString *)firstStageVertexShaderString firstStageFragmentShaderFromString:(NSString *)firstStageFragmentShaderString secondStageVertexShaderFromString:(NSString *)secondStageVertexShaderString secondStageFragmentShaderFromString:(NSString *)secondStageFragmentShaderString
{
if (!(self = [super initWithFirstStageVertexShaderFromString:firstStageVertexShaderString firstStageFragmentShaderFromString:firstStageFragmentShaderString secondStageVertexShaderFromString:secondStageVertexShaderString secondStageFragmentShaderFromString:secondStageFragmentShaderString]))
{
return nil;
}
runSynchronouslyOnVideoProcessingQueue(^{
[GPUImageContext useImageProcessingContext];
verticalPassTexelWidthOffsetUniform = [filterProgram uniformIndex:@"texelWidthOffset"];
verticalPassTexelHeightOffsetUniform = [filterProgram uniformIndex:@"texelHeightOffset"];
horizontalPassTexelWidthOffsetUniform = [secondFilterProgram uniformIndex:@"texelWidthOffset"];
horizontalPassTexelHeightOffsetUniform = [secondFilterProgram uniformIndex:@"texelHeightOffset"];
});
self.verticalTexelSpacing = 1.0;
self.horizontalTexelSpacing = 1.0;
return self;
}
- (void)setUniformsForProgramAtIndex:(NSUInteger)programIndex;
{
[super setUniformsForProgramAtIndex:programIndex];
if (programIndex == 0)
{
glUniform1f(verticalPassTexelWidthOffsetUniform, verticalPassTexelWidthOffset);
glUniform1f(verticalPassTexelHeightOffsetUniform, verticalPassTexelHeightOffset);
}
else
{
glUniform1f(horizontalPassTexelWidthOffsetUniform, horizontalPassTexelWidthOffset);
glUniform1f(horizontalPassTexelHeightOffsetUniform, horizontalPassTexelHeightOffset);
}
}
- (void)setupFilterForSize:(CGSize)filterFrameSize;
{
runSynchronouslyOnVideoProcessingQueue(^{
// The first pass through the framebuffer may rotate the inbound image, so need to account for that by changing up the kernel ordering for that pass
if (GPUImageRotationSwapsWidthAndHeight(inputRotation))
{
verticalPassTexelWidthOffset = _verticalTexelSpacing / filterFrameSize.height;
verticalPassTexelHeightOffset = 0.0;
}
else
{
verticalPassTexelWidthOffset = 0.0;
verticalPassTexelHeightOffset = _verticalTexelSpacing / filterFrameSize.height;
}
horizontalPassTexelWidthOffset = _horizontalTexelSpacing / filterFrameSize.width;
horizontalPassTexelHeightOffset = 0.0;
});
}
#pragma mark -
#pragma mark Accessors
- (void)setVerticalTexelSpacing:(CGFloat)newValue;
{
_verticalTexelSpacing = newValue;
[self setupFilterForSize:[self sizeOfFBO]];
}
- (void)setHorizontalTexelSpacing:(CGFloat)newValue;
{
_horizontalTexelSpacing = newValue;
[self setupFilterForSize:[self sizeOfFBO]];
}
@end
================================================
FILE: GPURenderKit/GPURenderKit/GPUImage/BaseClass/GPUImageVideoCamera.h
================================================
#import <Foundation/Foundation.h>
#import <AVFoundation/AVFoundation.h>
#import <CoreMedia/CoreMedia.h>
#import "GPUImageContext.h"
#import "GPUImageOutput.h"
#import "GPUImageColorConversion.h"
//Optionally override the YUV to RGB matrices
void setColorConversion601( GLfloat conversionMatrix[9] );
void setColorConversion601FullRange( GLfloat conversionMatrix[9] );
void setColorConversion709( GLfloat conversionMatrix[9] );
//Delegate Protocal for Face Detection.
@protocol GPUImageVideoCameraDelegate <NSObject>
@optional
- (void)willOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer;
@end
/**
A GPUImageOutput that provides frames from either camera
*/
@interface GPUImageVideoCamera : GPUImageOutput <AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAudioDataOutputSampleBufferDelegate>
{
NSUInteger numberOfFramesCaptured;
CGFloat totalFrameTimeDuringCapture;
AVCaptureSession *_captureSession;
AVCaptureDevice *_inputCamera;
AVCaptureDevice *_microphone;
AVCaptureDeviceInput *videoInput;
AVCaptureVideoDataOutput *videoOutput;
BOOL capturePaused;
GPUImageRotationMode outputRotation, internalRotation;
dispatch_semaphore_t frameRenderingSemaphore;
BOOL captureAsYUV;
GLuint luminanceTexture, chrominanceTexture;
__unsafe_unretained id<GPUImageVideoCameraDelegate> _delegate;
}
/// Whether or not the underlying AVCaptureSession is running
@property(readonly, nonatomic) BOOL isRunning;
/// The AVCaptureSession used to capture from the camera
@property(readonly, retain, nonatomic) AVCaptureSession *captureSession;
/// This enables the capture session preset to be changed on the fly
@property (readwrite, nonatomic, copy) NSString *captureSessionPreset;
/// This sets the frame rate of the camera (iOS 5 and above only)
/**
Setting this to 0 or below will set the frame rate back to the default setting for a particular preset.
*/
@property (readwrite) int32_t frameRate;
/// Easy way to tell which cameras are present on device
@property (readonly, getter = isFrontFacingCameraPresent) BOOL frontFacingCameraPresent;
@property (readonly, getter = isBackFacingCameraPresent) BOOL backFacingCameraPresent;
/// This enables the benchmarking mode, which logs out instantaneous and average frame times to the console
@property(readwrite, nonatomic) BOOL runBenchmark;
/// Use this property to manage camera settings. Focus point, exposure point, etc.
@property(readonly) AVCaptureDevice *inputCamera;
/// This determines the rotation applied to the output image, based on the source material
@property(readwrite, nonatomic) UIInterfaceOrientation outputImageOrientation;
/// These properties determine whether or not the two camera orientations should be mirrored. By default, both are NO.
@property(readwrite, nonatomic) BOOL horizontallyMirrorFrontFacingCamera, horizontallyMirrorRearFacingCamera;
@property(nonatomic, assign) id<GPUImageVideoCameraDelegate> delegate;
/// @name Initialization and teardown
/** Begin a capture session
See AVCaptureSession for acceptable values
@param sessionPreset Session preset to use
@param cameraPosition Camera to capture from
*/
- (id)initWithSessionPreset:(NSString *)sessionPreset cameraPosition:(AVCaptureDevicePosition)cameraPosition;
/** Add audio capture to the session. Adding inputs and outputs freezes the capture session momentarily, so you
can use this method to add the audio inputs and outputs early, if you're going to set the audioEncodingTarget
later. Returns YES is the audio inputs and outputs were added, or NO if they had already been added.
*/
- (BOOL)addAudioInputsAndOutputs;
/** Remove the audio capture inputs and outputs from this session. Returns YES if the audio inputs and outputs
were removed, or NO is they hadn't already been added.
*/
- (BOOL)removeAudioInputsAndOutputs;
/** Tear down the capture session
*/
- (void)removeInputsAndOutputs;
/// @name Manage the camera video stream
/** Start camera capturing
*/
- (void)startCameraCapture;
/** Stop camera capturing
*/
- (void)stopCameraCapture;
/** Pause camera capturing
*/
- (void)pauseCameraCapture;
/** Resume camera capturing
*/
- (void)resumeCameraCapture;
/** Process a video sample
@param sampleBuffer Buffer to process
*/
- (void)processVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer;
/** Process an audio sample
@param sampleBuffer Buffer to process
*/
- (void)processAudioSampleBuffer:(CMSampleBufferRef)sampleBuffer;
/** Get the position (front, rear) of the source camera
*/
- (AVCaptureDevicePosition)cameraPosition;
/** Get the AVCaptureConnection of the source camera
*/
- (AVCaptureConnection *)videoCaptureConnection;
/** This flips between the front and rear cameras
*/
- (void)rotateCamera;
/// @name Benchmarking
/** When benchmarking is enabled, this will keep a running average of the time from uploading, processing, and final recording or display
*/
- (CGFloat)averageFrameDurationDuringCapture;
- (void)resetBenchmarkAverage;
+ (BOOL)isBackFacingCameraPresent;
+ (BOOL)isFrontFacingCameraPresent;
@end
================================================
FILE: GPURenderKit/GPURenderKit/GPUImage/BaseClass/GPUImageVideoCamera.m
================================================
#import "GPUImageVideoCamera.h"
#import "GPUImageMovieWriter.h"
#import "GPUImageFilter.h"
void setColorConversion601( GLfloat conversionMatrix[9] )
{
kColorConversion601 = conversionMatrix;
}
void setColorConversion601FullRange( GLfloat conversionMatrix[9] )
{
kColorConversion601FullRange = conversionMatrix;
}
void setColorConversion709( GLfloat conversionMatrix[9] )
{
kColorConversion709 = conversionMatrix;
}
#pragma mark -
#pragma mark Private methods and instance variables
@interface GPUImageVideoCamera ()
{
AVCaptureDeviceInput *audioInput;
AVCaptureAudioDataOutput *audioOutput;
NSDate *startingCaptureTime;
dispatch_queue_t cameraProcessingQueue, audioProcessingQueue;
GLProgram *yuvConversionProgram;
GLint yuvConversionPositionAttribute, yuvConversionTextureCoordinateAttribute;
GLint yuvConversionLuminanceTextureUniform, yuvConversionChrominanceTextureUniform;
GLint yuvConversionMatrixUniform;
const GLfloat *_preferredConversion;
BOOL isFullYUVRange;
int imageBufferWidth, imageBufferHeight;
BOOL addedAudioInputsDueToEncodingTarget;
}
- (void)updateOrientationSendToTargets;
- (void)convertYUVToRGBOutput;
@end
@implementation GPUImageVideoCamera
@synthesize captureSessionPreset = _captureSessionPreset;
@synthesize captureSession = _captureSession;
@synthesize inputCamera = _inputCamera;
@synthesize runBenchmark = _runBenchmark;
@synthesize outputImageOrientation = _outputImageOrientation;
@synthesize delegate = _delegate;
@synthesize horizontallyMirrorFrontFacingCamera = _horizontallyMirrorFrontFacingCamera, horizontallyMirrorRearFacingCamera = _horizontallyMirrorRearFacingCamera;
@synthesize frameRate = _frameRate;
#pragma mark -
#pragma mark Initialization and teardown
- (id)init;
{
if (!(self = [self initWithSessionPreset:AVCaptureSessionPreset640x480 cameraPosition:AVCaptureDevicePositionBack]))
{
return nil;
}
return self;
}
- (id)initWithSessionPreset:(NSString *)sessionPreset cameraPosition:(AVCaptureDevicePosition)cameraPosition;
{
if (!(self = [super init]))
{
return nil;
}
cameraProcessingQueue = dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH,0);
audioProcessingQueue = dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_LOW,0);
frameRenderingSemaphore = dispatch_semaphore_create(1);
_frameRate = 0; // This will not set frame rate unless this value gets set to 1 or above
_runBenchmark = NO;
capturePaused = NO;
outputRotation = kGPUImageNoRotation;
internalRotation = kGPUImageNoRotation;
captureAsYUV = YES;
_preferredConversion = kColorConversion709;
// Grab the back-facing or front-facing camera
_inputCamera = nil;
NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
for (AVCaptureDevice *device in devices)
{
if ([device position] == cameraPosition)
{
_inputCamera = device;
}
}
if (!_inputCamera) {
return nil;
}
// Create the capture session
_captureSession = [[AVCaptureSession alloc] init];
[_captureSession beginConfiguration];
// Add the video input
NSError *error = nil;
videoInput = [[AVCaptureDeviceInput alloc] initWithDevice:_inputCamera error:&error];
if ([_captureSession canAddInput:videoInput])
{
[_captureSession addInput:videoInput];
}
// Add the video frame output
videoOutput = [[AVCaptureVideoDataOutput alloc] init];
[videoOutput setAlwaysDiscardsLateVideoFrames:NO];
// if (captureAsYUV && [GPUImageContext deviceSupportsRedTextures])
if (captureAsYUV && [GPUImageContext supportsFastTextureUpload])
{
BOOL supportsFullYUVRange = NO;
NSArray *supportedPixelFormats = videoOutput.availableVideoCVPixelFormatTypes;
for (NSNumber *currentPixelFormat in supportedPixelFormats)
{
if ([currentPixelFormat intValue] == kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)
{
supportsFullYUVRange = YES;
}
}
if (supportsFullYUVRange)
{
[videoOutput setVideoSettings:[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarFullRange] forKey:(id)kCVPixelBufferPixelFormatTypeKey]];
isFullYUVRange = YES;
}
else
{
[videoOutput setVideoSettings:[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange] forKey:(id)kCVPixelBufferPixelFormatTypeKey]];
isFullYUVRange = NO;
}
}
else
{
[videoOutput setVideoSettings:[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_32BGRA] forKey:(id)kCVPixelBufferPixelFormatTypeKey]];
}
runSynchronouslyOnVideoProcessingQueue(^{
if (captureAsYUV)
{
[GPUImageContext useImageProcessingContext];
// if ([GPUImageContext deviceSupportsRedTextures])
// {
// yuvConversionProgram = [[GPUImageContext sharedImageProcessingContext] programForVertexShaderString:kGPUImageVertexShaderString fragmentShaderString:kGPUImageYUVVideoRangeConversionForRGFragmentShaderString];
// }
// else
// {
if (isFullYUVRange)
{
yuvConversionProgram = [[GPUImageContext sharedImageProcessingContext] programForVertexShaderString:kGPUImageVertexShaderString fragmentShaderString:kGPUImageYUVFullRangeConversionForLAFragmentShaderString];
}
else
{
yuvConversionProgram = [[GPUImageContext sharedImageProcessingContext] programForVertexShaderString:kGPUImageVertexShaderString fragmentShaderString:kGPUImageYUVVideoRangeConversionForLAFragmentShaderString];
}
// }
if (!yuvConversionProgram.initialized)
{
[yuvConversionProgram addAttribute:@"position"];
[yuvConversionProgram addAttribute:@"inputTextureCoordinate"];
if (![yuvConversionProgram link])
{
NSString *progLog = [yuvConversionProgram programLog];
NSLog(@"Program link log: %@", progLog);
NSString *fragLog = [yuvConversionProgram fragmentShaderLog];
NSLog(@"Fragment shader compile log: %@", fragLog);
NSString *vertLog = [yuvConversionProgram vertexShaderLog];
NSLog(@"Vertex shader compile log: %@", vertLog);
yuvConversionProgram = nil;
NSAssert(NO, @"Filter shader link failed");
}
}
yuvConversionPositionAttribute = [yuvConversionProgram attributeIndex:@"position"];
yuvConversionTextureCoordinateAttribute = [yuvConversionProgram attributeIndex:@"inputTextureCoordinate"];
yuvConversionLuminanceTextureUniform = [yuvConversionProgram uniformIndex:@"luminanceTexture"];
yuvConversionChrominanceTextureUniform = [yuvConversionProgram uniformIndex:@"chrominanceTexture"];
yuvConversionMatrixUniform = [yuvConversionProgram uniformIndex:@"colorConversionMatrix"];
[GPUImageContext setActiveShaderProgram:yuvConversionProgram];
glEnableVertexAttribArray(yuvConversionPositionAttribute);
glEnableVertexAttribArray(yuvConversionTextureCoordinateAttribute);
}
});
[videoOutput setSampleBufferDelegate:self queue:cameraProcessingQueue];
if ([_captureSession canAddOutput:videoOutput])
{
[_captureSession addOutput:videoOutput];
}
else
{
NSLog(@"Couldn't add video output");
return nil;
}
_captureSessionPreset = sessionPreset;
[_captureSession setSessionPreset:_captureSessionPreset];
// This will let you get 60 FPS video from the 720p preset on an iPhone 4S, but only that device and that preset
// AVCaptureConnection *conn = [videoOutput connectionWithMediaType:AVMediaTypeVideo];
//
// if (conn.supportsVideoMinFrameDuration)
// conn.videoMinFrameDuration = CMTimeMake(1,60);
// if (conn.supportsVideoMaxFrameDuration)
// conn.videoMaxFrameDuration = CMTimeMake(1,60);
[_captureSession commitConfiguration];
return self;
}
- (GPUImageFramebuffer *)framebufferForOutput;
{
return outputFramebuffer;
}
- (void)dealloc
{
[self stopCameraCapture];
[videoOutput setSampleBufferDelegate:nil queue:dispatch_get_main_queue()];
[audioOutput setSampleBufferDelegate:nil queue:dispatch_get_main_queue()];
[self removeInputsAndOutputs];
// ARC forbids explicit message send of 'release'; since iOS 6 even for dispatch_release() calls: stripping it out in that case is required.
#if !OS_OBJECT_USE_OBJC
if (frameRenderingSemaphore != NULL)
{
gitextract_wqoehnzj/ ├── GPURenderKit/ │ ├── GPURenderKit/ │ │ ├── GPUImage/ │ │ │ ├── BaseClass/ │ │ │ │ ├── GLProgram.h │ │ │ │ ├── GLProgram.m │ │ │ │ ├── GPUImageBuffer.h │ │ │ │ ├── GPUImageBuffer.m │ │ │ │ ├── GPUImageFilter.h │ │ │ │ ├── GPUImageFilter.m │ │ │ │ ├── GPUImageFilterGroup.h │ │ │ │ ├── GPUImageFilterGroup.m │ │ │ │ ├── GPUImageFilterPipeline.h │ │ │ │ ├── GPUImageFilterPipeline.m │ │ │ │ ├── GPUImageFourInputFilter.h │ │ │ │ ├── GPUImageFourInputFilter.m │ │ │ │ ├── GPUImageFramebuffer.h │ │ │ │ ├── GPUImageFramebuffer.m │ │ │ │ ├── GPUImageFramebufferCache.h │ │ │ │ ├── GPUImageFramebufferCache.m │ │ │ │ ├── GPUImageMovie.h │ │ │ │ ├── GPUImageMovie.m │ │ │ │ ├── GPUImageMovieComposition.h │ │ │ │ ├── GPUImageMovieComposition.m │ │ │ │ ├── GPUImageOutput.h │ │ │ │ ├── GPUImageOutput.m │ │ │ │ ├── GPUImageTextureInput.h │ │ │ │ ├── GPUImageTextureInput.m │ │ │ │ ├── GPUImageTextureOutput.h │ │ │ │ ├── GPUImageTextureOutput.m │ │ │ │ ├── GPUImageThreeInputFilter.h │ │ │ │ ├── GPUImageThreeInputFilter.m │ │ │ │ ├── GPUImageTwoInputFilter.h │ │ │ │ ├── GPUImageTwoInputFilter.m │ │ │ │ ├── GPUImageTwoPassFilter.h │ │ │ │ ├── GPUImageTwoPassFilter.m │ │ │ │ ├── GPUImageTwoPassTextureSamplingFilter.h │ │ │ │ ├── GPUImageTwoPassTextureSamplingFilter.m │ │ │ │ ├── GPUImageVideoCamera.h │ │ │ │ └── GPUImageVideoCamera.m │ │ │ ├── Filters/ │ │ │ │ ├── GPUImage3x3ConvolutionFilter.h │ │ │ │ ├── GPUImage3x3ConvolutionFilter.m │ │ │ │ ├── GPUImage3x3TextureSamplingFilter.h │ │ │ │ ├── GPUImage3x3TextureSamplingFilter.m │ │ │ │ ├── GPUImageAdaptiveThresholdFilter.h │ │ │ │ ├── GPUImageAdaptiveThresholdFilter.m │ │ │ │ ├── GPUImageAddBlendFilter.h │ │ │ │ ├── GPUImageAddBlendFilter.m │ │ │ │ ├── GPUImageAlphaBlendFilter.h │ │ │ │ ├── GPUImageAlphaBlendFilter.m │ │ │ │ ├── GPUImageAmatorkaFilter.h │ │ │ │ ├── GPUImageAmatorkaFilter.m │ │ │ │ ├── GPUImageAverageColor.h │ │ │ │ ├── GPUImageAverageColor.m │ │ │ │ ├── GPUImageAverageLuminanceThresholdFilter.h │ │ │ │ ├── GPUImageAverageLuminanceThresholdFilter.m │ │ │ │ ├── GPUImageBilateralFilter.h │ │ │ │ ├── GPUImageBilateralFilter.m │ │ │ │ ├── GPUImageBoxBlurFilter.h │ │ │ │ ├── GPUImageBoxBlurFilter.m │ │ │ │ ├── GPUImageBrightnessFilter.h │ │ │ │ ├── GPUImageBrightnessFilter.m │ │ │ │ ├── GPUImageBulgeDistortionFilter.h │ │ │ │ ├── GPUImageBulgeDistortionFilter.m │ │ │ │ ├── GPUImageCGAColorspaceFilter.h │ │ │ │ ├── GPUImageCGAColorspaceFilter.m │ │ │ │ ├── GPUImageCannyEdgeDetectionFilter.h │ │ │ │ ├── GPUImageCannyEdgeDetectionFilter.m │ │ │ │ ├── GPUImageChromaKeyBlendFilter.h │ │ │ │ ├── GPUImageChromaKeyBlendFilter.m │ │ │ │ ├── GPUImageChromaKeyFilter.h │ │ │ │ ├── GPUImageChromaKeyFilter.m │ │ │ │ ├── GPUImageClosingFilter.h │ │ │ │ ├── GPUImageClosingFilter.m │ │ │ │ ├── GPUImageColorBlendFilter.h │ │ │ │ ├── GPUImageColorBlendFilter.m │ │ │ │ ├── GPUImageColorBurnBlendFilter.h │ │ │ │ ├── GPUImageColorBurnBlendFilter.m │ │ │ │ ├── GPUImageColorConversion.h │ │ │ │ ├── GPUImageColorConversion.m │ │ │ │ ├── GPUImageColorDodgeBlendFilter.h │ │ │ │ ├── GPUImageColorDodgeBlendFilter.m │ │ │ │ ├── GPUImageColorInvertFilter.h │ │ │ │ ├── GPUImageColorInvertFilter.m │ │ │ │ ├── GPUImageColorLocalBinaryPatternFilter.h │ │ │ │ ├── GPUImageColorLocalBinaryPatternFilter.m │ │ │ │ ├── GPUImageColorMatrixFilter.h │ │ │ │ ├── GPUImageColorMatrixFilter.m │ │ │ │ ├── GPUImageColorPackingFilter.h │ │ │ │ ├── GPUImageColorPackingFilter.m │ │ │ │ ├── GPUImageColourFASTFeatureDetector.h │ │ │ │ ├── GPUImageColourFASTFeatureDetector.m │ │ │ │ ├── GPUImageColourFASTSamplingOperation.h │ │ │ │ ├── GPUImageColourFASTSamplingOperation.m │ │ │ │ ├── GPUImageContrastFilter.h │ │ │ │ ├── GPUImageContrastFilter.m │ │ │ │ ├── GPUImageCropFilter.h │ │ │ │ ├── GPUImageCropFilter.m │ │ │ │ ├── GPUImageCrosshairGenerator.h │ │ │ │ ├── GPUImageCrosshairGenerator.m │ │ │ │ ├── GPUImageCrosshatchFilter.h │ │ │ │ ├── GPUImageCrosshatchFilter.m │ │ │ │ ├── GPUImageDarkenBlendFilter.h │ │ │ │ ├── GPUImageDarkenBlendFilter.m │ │ │ │ ├── GPUImageDifferenceBlendFilter.h │ │ │ │ ├── GPUImageDifferenceBlendFilter.m │ │ │ │ ├── GPUImageDilationFilter.h │ │ │ │ ├── GPUImageDilationFilter.m │ │ │ │ ├── GPUImageDirectionalNonMaximumSuppressionFilter.h │ │ │ │ ├── GPUImageDirectionalNonMaximumSuppressionFilter.m │ │ │ │ ├── GPUImageDirectionalSobelEdgeDetectionFilter.h │ │ │ │ ├── GPUImageDirectionalSobelEdgeDetectionFilter.m │ │ │ │ ├── GPUImageDissolveBlendFilter.h │ │ │ │ ├── GPUImageDissolveBlendFilter.m │ │ │ │ ├── GPUImageDivideBlendFilter.h │ │ │ │ ├── GPUImageDivideBlendFilter.m │ │ │ │ ├── GPUImageEmbossFilter.h │ │ │ │ ├── GPUImageEmbossFilter.m │ │ │ │ ├── GPUImageErosionFilter.h │ │ │ │ ├── GPUImageErosionFilter.m │ │ │ │ ├── GPUImageExclusionBlendFilter.h │ │ │ │ ├── GPUImageExclusionBlendFilter.m │ │ │ │ ├── GPUImageExposureFilter.h │ │ │ │ ├── GPUImageExposureFilter.m │ │ │ │ ├── GPUImageFASTCornerDetectionFilter.h │ │ │ │ ├── GPUImageFASTCornerDetectionFilter.m │ │ │ │ ├── GPUImageFalseColorFilter.h │ │ │ │ ├── GPUImageFalseColorFilter.m │ │ │ │ ├── GPUImageGammaFilter.h │ │ │ │ ├── GPUImageGammaFilter.m │ │ │ │ ├── GPUImageGaussianBlurFilter.h │ │ │ │ ├── GPUImageGaussianBlurFilter.m │ │ │ │ ├── GPUImageGaussianBlurPositionFilter.h │ │ │ │ ├── GPUImageGaussianBlurPositionFilter.m │ │ │ │ ├── GPUImageGaussianSelectiveBlurFilter.h │ │ │ │ ├── GPUImageGaussianSelectiveBlurFilter.m │ │ │ │ ├── GPUImageGlassSphereFilter.h │ │ │ │ ├── GPUImageGlassSphereFilter.m │ │ │ │ ├── GPUImageGrayscaleFilter.h │ │ │ │ ├── GPUImageGrayscaleFilter.m │ │ │ │ ├── GPUImageHSBFilter.h │ │ │ │ ├── GPUImageHSBFilter.m │ │ │ │ ├── GPUImageHalftoneFilter.h │ │ │ │ ├── GPUImageHalftoneFilter.m │ │ │ │ ├── GPUImageHardLightBlendFilter.h │ │ │ │ ├── GPUImageHardLightBlendFilter.m │ │ │ │ ├── GPUImageHarrisCornerDetectionFilter.h │ │ │ │ ├── GPUImageHarrisCornerDetectionFilter.m │ │ │ │ ├── GPUImageHazeFilter.h │ │ │ │ ├── GPUImageHazeFilter.m │ │ │ │ ├── GPUImageHighPassFilter.h │ │ │ │ ├── GPUImageHighPassFilter.m │ │ │ │ ├── GPUImageHighlightShadowFilter.h │ │ │ │ ├── GPUImageHighlightShadowFilter.m │ │ │ │ ├── GPUImageHighlightShadowTintFilter.h │ │ │ │ ├── GPUImageHighlightShadowTintFilter.m │ │ │ │ ├── GPUImageHistogramEqualizationFilter.h │ │ │ │ ├── GPUImageHistogramEqualizationFilter.m │ │ │ │ ├── GPUImageHistogramFilter.h │ │ │ │ ├── GPUImageHistogramFilter.m │ │ │ │ ├── GPUImageHistogramGenerator.h │ │ │ │ ├── GPUImageHistogramGenerator.m │ │ │ │ ├── GPUImageHoughTransformLineDetector.h │ │ │ │ ├── GPUImageHoughTransformLineDetector.m │ │ │ │ ├── GPUImageHueBlendFilter.h │ │ │ │ ├── GPUImageHueBlendFilter.m │ │ │ │ ├── GPUImageHueFilter.h │ │ │ │ ├── GPUImageHueFilter.m │ │ │ │ ├── GPUImageJFAVoronoiFilter.h │ │ │ │ ├── GPUImageJFAVoronoiFilter.m │ │ │ │ ├── GPUImageKuwaharaFilter.h │ │ │ │ ├── GPUImageKuwaharaFilter.m │ │ │ │ ├── GPUImageKuwaharaRadius3Filter.h │ │ │ │ ├── GPUImageKuwaharaRadius3Filter.m │ │ │ │ ├── GPUImageLanczosResamplingFilter.h │ │ │ │ ├── GPUImageLanczosResamplingFilter.m │ │ │ │ ├── GPUImageLaplacianFilter.h │ │ │ │ ├── GPUImageLaplacianFilter.m │ │ │ │ ├── GPUImageLevelsFilter.h │ │ │ │ ├── GPUImageLevelsFilter.m │ │ │ │ ├── GPUImageLightenBlendFilter.h │ │ │ │ ├── GPUImageLightenBlendFilter.m │ │ │ │ ├── GPUImageLineGenerator.h │ │ │ │ ├── GPUImageLineGenerator.m │ │ │ │ ├── GPUImageLinearBurnBlendFilter.h │ │ │ │ ├── GPUImageLinearBurnBlendFilter.m │ │ │ │ ├── GPUImageLocalBinaryPatternFilter.h │ │ │ │ ├── GPUImageLocalBinaryPatternFilter.m │ │ │ │ ├── GPUImageLookupFilter.h │ │ │ │ ├── GPUImageLookupFilter.m │ │ │ │ ├── GPUImageLowPassFilter.h │ │ │ │ ├── GPUImageLowPassFilter.m │ │ │ │ ├── GPUImageLuminanceRangeFilter.h │ │ │ │ ├── GPUImageLuminanceRangeFilter.m │ │ │ │ ├── GPUImageLuminanceThresholdFilter.h │ │ │ │ ├── GPUImageLuminanceThresholdFilter.m │ │ │ │ ├── GPUImageLuminosity.h │ │ │ │ ├── GPUImageLuminosity.m │ │ │ │ ├── GPUImageLuminosityBlendFilter.h │ │ │ │ ├── GPUImageLuminosityBlendFilter.m │ │ │ │ ├── GPUImageMaskFilter.h │ │ │ │ ├── GPUImageMaskFilter.m │ │ │ │ ├── GPUImageMedianFilter.h │ │ │ │ ├── GPUImageMedianFilter.m │ │ │ │ ├── GPUImageMissEtikateFilter.h │ │ │ │ ├── GPUImageMissEtikateFilter.m │ │ │ │ ├── GPUImageMonochromeFilter.h │ │ │ │ ├── GPUImageMonochromeFilter.m │ │ │ │ ├── GPUImageMosaicFilter.h │ │ │ │ ├── GPUImageMosaicFilter.m │ │ │ │ ├── GPUImageMotionBlurFilter.h │ │ │ │ ├── GPUImageMotionBlurFilter.m │ │ │ │ ├── GPUImageMotionDetector.h │ │ │ │ ├── GPUImageMotionDetector.m │ │ │ │ ├── GPUImageMultiplyBlendFilter.h │ │ │ │ ├── GPUImageMultiplyBlendFilter.m │ │ │ │ ├── GPUImageNobleCornerDetectionFilter.h │ │ │ │ ├── GPUImageNobleCornerDetectionFilter.m │ │ │ │ ├── GPUImageNonMaximumSuppressionFilter.h │ │ │ │ ├── GPUImageNonMaximumSuppressionFilter.m │ │ │ │ ├── GPUImageNormalBlendFilter.h │ │ │ │ ├── GPUImageNormalBlendFilter.m │ │ │ │ ├── GPUImageOpacityFilter.h │ │ │ │ ├── GPUImageOpacityFilter.m │ │ │ │ ├── GPUImageOpeningFilter.h │ │ │ │ ├── GPUImageOpeningFilter.m │ │ │ │ ├── GPUImageOverlayBlendFilter.h │ │ │ │ ├── GPUImageOverlayBlendFilter.m │ │ │ │ ├── GPUImageParallelCoordinateLineTransformFilter.h │ │ │ │ ├── GPUImageParallelCoordinateLineTransformFilter.m │ │ │ │ ├── GPUImagePerlinNoiseFilter.h │ │ │ │ ├── GPUImagePerlinNoiseFilter.m │ │ │ │ ├── GPUImagePinchDistortionFilter.h │ │ │ │ ├── GPUImagePinchDistortionFilter.m │ │ │ │ ├── GPUImagePixellateFilter.h │ │ │ │ ├── GPUImagePixellateFilter.m │ │ │ │ ├── GPUImagePixellatePositionFilter.h │ │ │ │ ├── GPUImagePixellatePositionFilter.m │ │ │ │ ├── GPUImagePoissonBlendFilter.h │ │ │ │ ├── GPUImagePoissonBlendFilter.m │ │ │ │ ├── GPUImagePolarPixellateFilter.h │ │ │ │ ├── GPUImagePolarPixellateFilter.m │ │ │ │ ├── GPUImagePolkaDotFilter.h │ │ │ │ ├── GPUImagePolkaDotFilter.m │ │ │ │ ├── GPUImagePosterizeFilter.h │ │ │ │ ├── GPUImagePosterizeFilter.m │ │ │ │ ├── GPUImagePrewittEdgeDetectionFilter.h │ │ │ │ ├── GPUImagePrewittEdgeDetectionFilter.m │ │ │ │ ├── GPUImageRGBClosingFilter.h │ │ │ │ ├── GPUImageRGBClosingFilter.m │ │ │ │ ├── GPUImageRGBDilationFilter.h │ │ │ │ ├── GPUImageRGBDilationFilter.m │ │ │ │ ├── GPUImageRGBErosionFilter.h │ │ │ │ ├── GPUImageRGBErosionFilter.m │ │ │ │ ├── GPUImageRGBFilter.h │ │ │ │ ├── GPUImageRGBFilter.m │ │ │ │ ├── GPUImageRGBOpeningFilter.h │ │ │ │ ├── GPUImageRGBOpeningFilter.m │ │ │ │ ├── GPUImageRawDataInput.h │ │ │ │ ├── GPUImageRawDataInput.m │ │ │ │ ├── GPUImageRawDataOutput.h │ │ │ │ ├── GPUImageRawDataOutput.m │ │ │ │ ├── GPUImageSaturationBlendFilter.h │ │ │ │ ├── GPUImageSaturationBlendFilter.m │ │ │ │ ├── GPUImageSaturationFilter.h │ │ │ │ ├── GPUImageSaturationFilter.m │ │ │ │ ├── GPUImageScreenBlendFilter.h │ │ │ │ ├── GPUImageScreenBlendFilter.m │ │ │ │ ├── GPUImageSepiaFilter.h │ │ │ │ ├── GPUImageSepiaFilter.m │ │ │ │ ├── GPUImageSharpenFilter.h │ │ │ │ ├── GPUImageSharpenFilter.m │ │ │ │ ├── GPUImageShiTomasiFeatureDetectionFilter.h │ │ │ │ ├── GPUImageShiTomasiFeatureDetectionFilter.m │ │ │ │ ├── GPUImageSingleComponentGaussianBlurFilter.h │ │ │ │ ├── GPUImageSingleComponentGaussianBlurFilter.m │ │ │ │ ├── GPUImageSketchFilter.h │ │ │ │ ├── GPUImageSketchFilter.m │ │ │ │ ├── GPUImageSkinToneFilter.h │ │ │ │ ├── GPUImageSkinToneFilter.m │ │ │ │ ├── GPUImageSmoothToonFilter.h │ │ │ │ ├── GPUImageSmoothToonFilter.m │ │ │ │ ├── GPUImageSobelEdgeDetectionFilter.h │ │ │ │ ├── GPUImageSobelEdgeDetectionFilter.m │ │ │ │ ├── GPUImageSoftEleganceFilter.h │ │ │ │ ├── GPUImageSoftEleganceFilter.m │ │ │ │ ├── GPUImageSoftLightBlendFilter.h │ │ │ │ ├── GPUImageSoftLightBlendFilter.m │ │ │ │ ├── GPUImageSolarizeFilter.h │ │ │ │ ├── GPUImageSolarizeFilter.m │ │ │ │ ├── GPUImageSolidColorGenerator.h │ │ │ │ ├── GPUImageSolidColorGenerator.m │ │ │ │ ├── GPUImageSourceOverBlendFilter.h │ │ │ │ ├── GPUImageSourceOverBlendFilter.m │ │ │ │ ├── GPUImageSphereRefractionFilter.h │ │ │ │ ├── GPUImageSphereRefractionFilter.m │ │ │ │ ├── GPUImageStillCamera.h │ │ │ │ ├── GPUImageStillCamera.m │ │ │ │ ├── GPUImageStretchDistortionFilter.h │ │ │ │ ├── GPUImageStretchDistortionFilter.m │ │ │ │ ├── GPUImageSubtractBlendFilter.h │ │ │ │ ├── GPUImageSubtractBlendFilter.m │ │ │ │ ├── GPUImageSwirlFilter.h │ │ │ │ ├── GPUImageSwirlFilter.m │ │ │ │ ├── GPUImageThresholdEdgeDetectionFilter.h │ │ │ │ ├── GPUImageThresholdEdgeDetectionFilter.m │ │ │ │ ├── GPUImageThresholdSketchFilter.h │ │ │ │ ├── GPUImageThresholdSketchFilter.m │ │ │ │ ├── GPUImageThresholdedNonMaximumSuppressionFilter.h │ │ │ │ ├── GPUImageThresholdedNonMaximumSuppressionFilter.m │ │ │ │ ├── GPUImageTiltShiftFilter.h │ │ │ │ ├── GPUImageTiltShiftFilter.m │ │ │ │ ├── GPUImageToneCurveFilter.h │ │ │ │ ├── GPUImageToneCurveFilter.m │ │ │ │ ├── GPUImageToonFilter.h │ │ │ │ ├── GPUImageToonFilter.m │ │ │ │ ├── GPUImageTransformFilter.h │ │ │ │ ├── GPUImageTransformFilter.m │ │ │ │ ├── GPUImageTwoInputCrossTextureSamplingFilter.h │ │ │ │ ├── GPUImageTwoInputCrossTextureSamplingFilter.m │ │ │ │ ├── GPUImageUIElement.h │ │ │ │ ├── GPUImageUIElement.m │ │ │ │ ├── GPUImageUnsharpMaskFilter.h │ │ │ │ ├── GPUImageUnsharpMaskFilter.m │ │ │ │ ├── GPUImageVibranceFilter.h │ │ │ │ ├── GPUImageVibranceFilter.m │ │ │ │ ├── GPUImageVignetteFilter.h │ │ │ │ ├── GPUImageVignetteFilter.m │ │ │ │ ├── GPUImageVoronoiConsumerFilter.h │ │ │ │ ├── GPUImageVoronoiConsumerFilter.m │ │ │ │ ├── GPUImageWeakPixelInclusionFilter.h │ │ │ │ ├── GPUImageWeakPixelInclusionFilter.m │ │ │ │ ├── GPUImageWhiteBalanceFilter.h │ │ │ │ ├── GPUImageWhiteBalanceFilter.m │ │ │ │ ├── GPUImageXYDerivativeFilter.h │ │ │ │ ├── GPUImageXYDerivativeFilter.m │ │ │ │ ├── GPUImageZoomBlurFilter.h │ │ │ │ ├── GPUImageZoomBlurFilter.m │ │ │ │ ├── GPUImageiOSBlurFilter.h │ │ │ │ └── GPUImageiOSBlurFilter.m │ │ │ ├── GLFilters/ │ │ │ │ ├── ColorProcessing/ │ │ │ │ │ ├── GLImageAddStickerFilter.h │ │ │ │ │ ├── GLImageAddStickerFilter.m │ │ │ │ │ ├── GLImageAddStickerWithEffectFilter.h │ │ │ │ │ ├── GLImageAddStickerWithEffectFilter.m │ │ │ │ │ ├── GLImageBlendFilter.h │ │ │ │ │ ├── GLImageBlendFilter.m │ │ │ │ │ ├── GLImageCircleFilter.h │ │ │ │ │ ├── GLImageCircleFilter.m │ │ │ │ │ ├── GLImageGassianBlurMixFilter.h │ │ │ │ │ ├── GLImageGassianBlurMixFilter.m │ │ │ │ │ ├── GLImageLutFilter.h │ │ │ │ │ ├── GLImageLutFilter.m │ │ │ │ │ ├── GLImageMixBlendFilter.h │ │ │ │ │ ├── GLImageMixBlendFilter.m │ │ │ │ │ ├── GLImageShapeFilter.h │ │ │ │ │ ├── GLImageShapeFilter.m │ │ │ │ │ ├── GLImageShapeHighDefinitionFilter.h │ │ │ │ │ ├── GLImageShapeHighDefinitionFilter.m │ │ │ │ │ ├── GLImageStickerFilter.h │ │ │ │ │ ├── GLImageStickerFilter.m │ │ │ │ │ ├── GLImageTwoLutFilter.h │ │ │ │ │ └── GLImageTwoLutFilter.m │ │ │ │ ├── DouYinEffect/ │ │ │ │ │ ├── GLImageBlurSnapViewFilter.h │ │ │ │ │ ├── GLImageBlurSnapViewFilter.m │ │ │ │ │ ├── GLImageBlurSnapViewFilterGroup.h │ │ │ │ │ ├── GLImageBlurSnapViewFilterGroup.m │ │ │ │ │ ├── GLImageFourPointsMirrorFilter.h │ │ │ │ │ ├── GLImageFourPointsMirrorFilter.m │ │ │ │ │ ├── GLImageGlitchEffectGridFilter.h │ │ │ │ │ ├── GLImageGlitchEffectGridFilter.m │ │ │ │ │ ├── GLImageGlitchEffectLineFilter.h │ │ │ │ │ ├── GLImageGlitchEffectLineFilter.m │ │ │ │ │ ├── GLImageSoulOutFilter.h │ │ │ │ │ ├── GLImageSoulOutFilter.m │ │ │ │ │ ├── GLImageThreePartitionFilter.h │ │ │ │ │ ├── GLImageThreePartitionFilter.m │ │ │ │ │ ├── GLImageThreePartitionGroupFilter.h │ │ │ │ │ ├── GLImageThreePartitionGroupFilter.m │ │ │ │ │ ├── GLImageWaterReflectionFilter.h │ │ │ │ │ ├── GLImageWaterReflectionFilter.m │ │ │ │ │ ├── GLImageZoomFilter.h │ │ │ │ │ └── GLImageZoomFilter.m │ │ │ │ ├── FaceFilters/ │ │ │ │ │ ├── GLImageFaceChangeFilter.h │ │ │ │ │ ├── GLImageFaceChangeFilter.m │ │ │ │ │ ├── GLImageFaceChangeFilterGroup.h │ │ │ │ │ ├── GLImageFaceChangeFilterGroup.m │ │ │ │ │ ├── GLImageFaceDetectPointFilter.h │ │ │ │ │ ├── GLImageFaceDetectPointFilter.m │ │ │ │ │ ├── GPUImageBeautifyFilter.h │ │ │ │ │ └── GPUImageBeautifyFilter.m │ │ │ │ └── GLImageMovie/ │ │ │ │ ├── GLImageMovie.h │ │ │ │ ├── GLImageMovie.m │ │ │ │ ├── GPUImageMovieWriterFix.h │ │ │ │ └── GPUImageMovieWriterFix.m │ │ │ ├── GPUImage.h │ │ │ ├── Mac/ │ │ │ │ ├── GPUImage.h │ │ │ │ ├── GPUImageAVCamera.h │ │ │ │ ├── GPUImageAVCamera.m │ │ │ │ ├── GPUImageContext.h │ │ │ │ ├── GPUImageContext.m │ │ │ │ ├── GPUImageMac-Info.plist │ │ │ │ ├── GPUImageMac-Prefix.pch │ │ │ │ ├── GPUImageMovieWriter.h │ │ │ │ ├── GPUImageMovieWriter.m │ │ │ │ ├── GPUImagePicture.h │ │ │ │ ├── GPUImagePicture.m │ │ │ │ ├── GPUImageView.h │ │ │ │ ├── GPUImageView.m │ │ │ │ └── en.lproj/ │ │ │ │ └── InfoPlist.strings │ │ │ └── iOS/ │ │ │ ├── Framework/ │ │ │ │ ├── GPUImageFramework.h │ │ │ │ ├── Info.plist │ │ │ │ └── module.modulemap │ │ │ ├── GPUImage-Prefix.pch │ │ │ ├── GPUImageContext.h │ │ │ ├── GPUImageContext.m │ │ │ ├── GPUImageMovieWriter.h │ │ │ ├── GPUImageMovieWriter.m │ │ │ ├── GPUImagePicture+TextureSubimage.h │ │ │ ├── GPUImagePicture+TextureSubimage.m │ │ │ ├── GPUImagePicture.h │ │ │ ├── GPUImagePicture.m │ │ │ ├── GPUImageView.h │ │ │ └── GPUImageView.m │ │ ├── GPURenderKit.h │ │ └── Info.plist │ ├── GPURenderKit.xcodeproj/ │ │ ├── project.pbxproj │ │ ├── project.xcworkspace/ │ │ │ ├── contents.xcworkspacedata │ │ │ ├── xcshareddata/ │ │ │ │ └── WorkspaceSettings.xcsettings │ │ │ └── xcuserdata/ │ │ │ └── liuhaidong.xcuserdatad/ │ │ │ └── WorkspaceSettings.xcsettings │ │ └── xcshareddata/ │ │ └── xcschemes/ │ │ └── GPURenderKit.xcscheme │ └── GPURenderKitTests/ │ ├── GPURenderKitTests.m │ └── Info.plist ├── GPURenderKitDemo/ │ ├── GPURenderKitDemo/ │ │ ├── AppDelegate.h │ │ ├── AppDelegate.m │ │ ├── Assets.xcassets/ │ │ │ ├── AppIcon.appiconset/ │ │ │ │ └── Contents.json │ │ │ ├── Contents.json │ │ │ ├── EditorLut/ │ │ │ │ ├── Contents.json │ │ │ │ ├── exposure_n.imageset/ │ │ │ │ │ └── Contents.json │ │ │ │ ├── gaoya.imageset/ │ │ │ │ │ └── Contents.json │ │ │ │ ├── heibai.imageset/ │ │ │ │ │ └── Contents.json │ │ │ │ ├── jingdu.imageset/ │ │ │ │ │ └── Contents.json │ │ │ │ ├── meishi.imageset/ │ │ │ │ │ └── Contents.json │ │ │ │ └── xiatian.imageset/ │ │ │ │ └── Contents.json │ │ │ ├── bunny.imageset/ │ │ │ │ └── Contents.json │ │ │ ├── edit_beautify_rise_move.imageset/ │ │ │ │ └── Contents.json │ │ │ ├── edit_beautify_slim_move.imageset/ │ │ │ │ └── Contents.json │ │ │ ├── flower.imageset/ │ │ │ │ └── Contents.json │ │ │ └── rotate.imageset/ │ │ │ └── Contents.json │ │ ├── Base.lproj/ │ │ │ └── LaunchScreen.storyboard │ │ ├── BaseViewController.h │ │ ├── BaseViewController.m │ │ ├── DDShapeViewController/ │ │ │ ├── DDShapeViewController.h │ │ │ ├── DDShapeViewController.m │ │ │ ├── Rise/ │ │ │ │ ├── Controller/ │ │ │ │ │ ├── ATRiseViewController.h │ │ │ │ │ └── ATRiseViewController.m │ │ │ │ └── View/ │ │ │ │ ├── ATRiseMenuView.h │ │ │ │ ├── ATRiseMenuView.m │ │ │ │ ├── ATSelectBarView.h │ │ │ │ ├── ATSelectBarView.m │ │ │ │ ├── ATSelectBarView.xib │ │ │ │ ├── DDGLShapeView/ │ │ │ │ │ ├── DDGLSelectView.h │ │ │ │ │ ├── DDGLSelectView.m │ │ │ │ │ ├── DDGLShapeControlView.h │ │ │ │ │ ├── DDGLShapeControlView.m │ │ │ │ │ ├── DDGLShapeSelView.h │ │ │ │ │ ├── DDGLShapeSelView.m │ │ │ │ │ ├── DDGLShapeView.h │ │ │ │ │ ├── DDGLShapeView.m │ │ │ │ │ ├── DDGLShapingView.h │ │ │ │ │ └── DDGLShapingView.m │ │ │ │ └── Slider/ │ │ │ │ ├── ATSliderView.h │ │ │ │ ├── ATSliderView.m │ │ │ │ ├── WeSliderView.h │ │ │ │ └── WeSliderView.m │ │ │ └── View/ │ │ │ ├── UIView+Xib.h │ │ │ └── UIView+Xib.m │ │ ├── DouYinEffect/ │ │ │ ├── DouYinEffectTabView.h │ │ │ ├── DouYinEffectTabView.m │ │ │ ├── GLDouYinEffectViewController.h │ │ │ └── GLDouYinEffectViewController.m │ │ ├── Face/ │ │ │ ├── Face++/ │ │ │ │ ├── MGLicense/ │ │ │ │ │ ├── MGLicenseCommon.h │ │ │ │ │ ├── MGLicenseManager.h │ │ │ │ │ └── libMGLicMgrSDK-0.3.1.a │ │ │ │ ├── MGLicenseManagerHelper/ │ │ │ │ │ ├── MGFaceLicenseHandle.h │ │ │ │ │ ├── MGFaceLicenseHandle.m │ │ │ │ │ └── MGNetAccount.h │ │ │ │ └── iOS_SDK/ │ │ │ │ ├── MGAlgorithmInfo.h │ │ │ │ ├── MGDetectRectInfo.h │ │ │ │ ├── MGFaceInfo.h │ │ │ │ ├── MGFacepp.h │ │ │ │ ├── MGFaceppCommon.h │ │ │ │ ├── MGFaceppConfig.h │ │ │ │ ├── MGImageData.h │ │ │ │ ├── libMGFacepp-0.5.2.a │ │ │ │ └── megviifacepp_0_5_2_model │ │ │ ├── FaceSliderView.h │ │ │ ├── FaceSliderView.m │ │ │ ├── FaceViewController.h │ │ │ └── FaceViewController.mm │ │ ├── GLImageFilterEnumType.h │ │ ├── GLImageFilterList/ │ │ │ ├── GLImageFilterListViewController.h │ │ │ ├── GLImageFilterListViewController.m │ │ │ ├── GLImageFilterShowViewController.h │ │ │ └── GLImageFilterShowViewController.m │ │ ├── GLImageMovie/ │ │ │ ├── DDAVAssetRenderManage.h │ │ │ ├── DDAVAssetRenderManage.m │ │ │ ├── GLImageMovieUseViewController.h │ │ │ ├── GLImageMovieUseViewController.m │ │ │ ├── MovieViewController.h │ │ │ └── MovieViewController.m │ │ ├── GPURenderMacroHeader.h │ │ ├── Info.plist │ │ ├── MainViewController.h │ │ ├── MainViewController.m │ │ ├── ShapeChange/ │ │ │ ├── DDShapeViewController.h │ │ │ ├── DDShapeViewController.m │ │ │ ├── Rise/ │ │ │ │ ├── Controller/ │ │ │ │ │ ├── ATRiseViewController.h │ │ │ │ │ └── ATRiseViewController.m │ │ │ │ └── View/ │ │ │ │ ├── ATRiseMenuView.h │ │ │ │ ├── ATRiseMenuView.m │ │ │ │ ├── ATSelectBarView.h │ │ │ │ ├── ATSelectBarView.m │ │ │ │ ├── ATSelectBarView.xib │ │ │ │ ├── DDGLShapeView/ │ │ │ │ │ ├── DDGLSelectView.h │ │ │ │ │ ├── DDGLSelectView.m │ │ │ │ │ ├── DDGLShapeControlView.h │ │ │ │ │ ├── DDGLShapeControlView.m │ │ │ │ │ ├── DDGLShapeSelView.h │ │ │ │ │ ├── DDGLShapeSelView.m │ │ │ │ │ ├── DDGLShapeView.h │ │ │ │ │ ├── DDGLShapeView.m │ │ │ │ │ ├── DDGLShapingView.h │ │ │ │ │ └── DDGLShapingView.m │ │ │ │ └── Slider/ │ │ │ │ ├── ATSliderView.h │ │ │ │ ├── ATSliderView.m │ │ │ │ ├── WeSliderView.h │ │ │ │ └── WeSliderView.m │ │ │ └── View/ │ │ │ ├── UIView+Xib.h │ │ │ └── UIView+Xib.m │ │ ├── Support/ │ │ │ ├── GLFilterInfoView.h │ │ │ ├── GLFilterInfoView.m │ │ │ ├── GLImagePickerHelper.h │ │ │ ├── GLImagePickerHelper.m │ │ │ ├── GLSliderView.h │ │ │ ├── GLSliderView.m │ │ │ ├── HUD/ │ │ │ │ ├── MBProgressHUD.h │ │ │ │ └── MBProgressHUD.m │ │ │ ├── UIColor+Utils.h │ │ │ ├── UIColor+Utils.m │ │ │ ├── UIImage+Rotate.h │ │ │ ├── UIImage+Rotate.m │ │ │ ├── UIImage+Utils.h │ │ │ └── UIImage+Utils.m │ │ ├── Tool/ │ │ │ ├── DDMediaEditorManage.h │ │ │ └── DDMediaEditorManage.m │ │ ├── VideoCode/ │ │ │ ├── DDVideoEcodeManage.h │ │ │ ├── DDVideoEcodeManage.m │ │ │ ├── VideoEcodeViewController.h │ │ │ └── VideoEcodeViewController.m │ │ └── main.m │ ├── GPURenderKitDemo.xcodeproj/ │ │ ├── project.pbxproj │ │ ├── project.xcworkspace/ │ │ │ ├── contents.xcworkspacedata │ │ │ ├── xcshareddata/ │ │ │ │ └── WorkspaceSettings.xcsettings │ │ │ └── xcuserdata/ │ │ │ └── liuhaidong.xcuserdatad/ │ │ │ └── WorkspaceSettings.xcsettings │ │ └── xcshareddata/ │ │ └── xcschemes/ │ │ └── GPURenderKitDemo.xcscheme │ ├── GPURenderKitDemoTests/ │ │ ├── GPURenderKitDemoTests.m │ │ └── Info.plist │ └── GPURenderKitDemoUITests/ │ ├── GPURenderKitDemoUITests.m │ └── Info.plist └── README.md
SYMBOL INDEX (165 symbols across 139 files)
FILE: GPURenderKit/GPURenderKit/GPUImage/BaseClass/GLProgram.h
function interface (line 16) | interface GLProgram : NSObject
FILE: GPURenderKit/GPURenderKit/GPUImage/BaseClass/GPUImageBuffer.h
function interface (line 3) | interface GPUImageBuffer : GPUImageFilter
FILE: GPURenderKit/GPURenderKit/GPUImage/BaseClass/GPUImageFilter.h
type GPUVector4 (line 14) | struct GPUVector4 {
type GPUVector4 (line 20) | typedef struct GPUVector4 GPUVector4;
type GPUVector3 (line 22) | struct GPUVector3 {
type GPUVector3 (line 27) | typedef struct GPUVector3 GPUVector3;
type GPUMatrix4x4 (line 29) | struct GPUMatrix4x4 {
type GPUMatrix4x4 (line 35) | typedef struct GPUMatrix4x4 GPUMatrix4x4;
type GPUMatrix3x3 (line 37) | struct GPUMatrix3x3 {
type GPUMatrix3x3 (line 42) | typedef struct GPUMatrix3x3 GPUMatrix3x3;
function interface (line 48) | interface GPUImageFilter : GPUImageOutput <GPUImageInput>
FILE: GPURenderKit/GPURenderKit/GPUImage/BaseClass/GPUImageFilterGroup.h
function interface (line 4) | interface GPUImageFilterGroup : GPUImageOutput <GPUImageInput>
FILE: GPURenderKit/GPURenderKit/GPUImage/BaseClass/GPUImageFilterPipeline.h
function interface (line 4) | interface GPUImageFilterPipeline : NSObject
FILE: GPURenderKit/GPURenderKit/GPUImage/BaseClass/GPUImageFourInputFilter.h
function interface (line 5) | interface GPUImageFourInputFilter : GPUImageThreeInputFilter
FILE: GPURenderKit/GPURenderKit/GPUImage/BaseClass/GPUImageFramebuffer.h
type GPUTextureOptions (line 16) | typedef struct GPUTextureOptions {
FILE: GPURenderKit/GPURenderKit/GPUImage/BaseClass/GPUImageOutput.h
type UIImageOrientationUp (line 8) | typedef NS_ENUM(NSInteger, UIImageOrientation) {
function interface (line 41) | interface GPUImageOutput : NSObject
FILE: GPURenderKit/GPURenderKit/GPUImage/BaseClass/GPUImageTextureInput.h
function interface (line 3) | interface GPUImageTextureInput : GPUImageOutput
FILE: GPURenderKit/GPURenderKit/GPUImage/BaseClass/GPUImageTextureOutput.h
function interface (line 6) | interface GPUImageTextureOutput : NSObject <GPUImageInput>
FILE: GPURenderKit/GPURenderKit/GPUImage/BaseClass/GPUImageThreeInputFilter.h
function interface (line 5) | interface GPUImageThreeInputFilter : GPUImageTwoInputFilter
FILE: GPURenderKit/GPURenderKit/GPUImage/BaseClass/GPUImageTwoInputFilter.h
function interface (line 5) | interface GPUImageTwoInputFilter : GPUImageFilter
FILE: GPURenderKit/GPURenderKit/GPUImage/BaseClass/GPUImageTwoPassFilter.h
function interface (line 3) | interface GPUImageTwoPassFilter : GPUImageFilter
FILE: GPURenderKit/GPURenderKit/GPUImage/BaseClass/GPUImageTwoPassTextureSamplingFilter.h
function interface (line 3) | interface GPUImageTwoPassTextureSamplingFilter : GPUImageTwoPassFilter
FILE: GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImage3x3ConvolutionFilter.h
function interface (line 5) | interface GPUImage3x3ConvolutionFilter : GPUImage3x3TextureSamplingFilter
FILE: GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImage3x3TextureSamplingFilter.h
function interface (line 5) | interface GPUImage3x3TextureSamplingFilter : GPUImageFilter
FILE: GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageAlphaBlendFilter.h
function interface (line 3) | interface GPUImageAlphaBlendFilter : GPUImageTwoInputFilter
FILE: GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageAmatorkaFilter.h
function interface (line 12) | interface GPUImageAmatorkaFilter : GPUImageFilterGroup
FILE: GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageAverageColor.h
function interface (line 5) | interface GPUImageAverageColor : GPUImageFilter
FILE: GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageBilateralFilter.h
function interface (line 3) | interface GPUImageBilateralFilter : GPUImageGaussianBlurFilter
FILE: GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageBrightnessFilter.h
function interface (line 3) | interface GPUImageBrightnessFilter : GPUImageFilter
FILE: GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageBulgeDistortionFilter.h
function interface (line 4) | interface GPUImageBulgeDistortionFilter : GPUImageFilter
FILE: GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageCannyEdgeDetectionFilter.h
function interface (line 22) | interface GPUImageCannyEdgeDetectionFilter : GPUImageFilterGroup
FILE: GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageChromaKeyBlendFilter.h
function interface (line 5) | interface GPUImageChromaKeyBlendFilter : GPUImageTwoInputFilter
FILE: GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageChromaKeyFilter.h
function interface (line 3) | interface GPUImageChromaKeyFilter : GPUImageFilter
FILE: GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageClosingFilter.h
function interface (line 9) | interface GPUImageClosingFilter : GPUImageFilterGroup
FILE: GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageColorBurnBlendFilter.h
function interface (line 5) | interface GPUImageColorBurnBlendFilter : GPUImageTwoInputFilter
FILE: GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageColorDodgeBlendFilter.h
function interface (line 5) | interface GPUImageColorDodgeBlendFilter : GPUImageTwoInputFilter
FILE: GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageColorInvertFilter.h
function interface (line 3) | interface GPUImageColorInvertFilter : GPUImageFilter
FILE: GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageColorMatrixFilter.h
function interface (line 5) | interface GPUImageColorMatrixFilter : GPUImageFilter
FILE: GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageColorPackingFilter.h
function interface (line 3) | interface GPUImageColorPackingFilter : GPUImageFilter
FILE: GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageColourFASTFeatureDetector.h
function interface (line 13) | interface GPUImageColourFASTFeatureDetector : GPUImageFilterGroup
FILE: GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageColourFASTSamplingOperation.h
function interface (line 10) | interface GPUImageColourFASTSamplingOperation : GPUImageTwoInputFilter
FILE: GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageContrastFilter.h
function interface (line 5) | interface GPUImageContrastFilter : GPUImageFilter
FILE: GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageCropFilter.h
function interface (line 3) | interface GPUImageCropFilter : GPUImageFilter
FILE: GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageCrosshairGenerator.h
function interface (line 3) | interface GPUImageCrosshairGenerator : GPUImageFilter
FILE: GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageCrosshatchFilter.h
function interface (line 3) | interface GPUImageCrosshatchFilter : GPUImageFilter
FILE: GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageDarkenBlendFilter.h
function interface (line 3) | interface GPUImageDarkenBlendFilter : GPUImageTwoInputFilter
FILE: GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageDifferenceBlendFilter.h
function interface (line 3) | interface GPUImageDifferenceBlendFilter : GPUImageTwoInputFilter
FILE: GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageDirectionalNonMaximumSuppressionFilter.h
function interface (line 3) | interface GPUImageDirectionalNonMaximumSuppressionFilter : GPUImageFilter
FILE: GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageDissolveBlendFilter.h
function interface (line 3) | interface GPUImageDissolveBlendFilter : GPUImageTwoInputFilter
FILE: GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageExclusionBlendFilter.h
function interface (line 3) | interface GPUImageExclusionBlendFilter : GPUImageTwoInputFilter
FILE: GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageExposureFilter.h
function interface (line 3) | interface GPUImageExposureFilter : GPUImageFilter
FILE: GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageFASTCornerDetectionFilter.h
type GPUImageFASTDetectorType (line 17) | typedef enum { kGPUImageFAST12Contiguous, kGPUImageFAST12ContiguousNonMa...
function interface (line 19) | interface GPUImageFASTCornerDetectionFilter : GPUImageFilterGroup
FILE: GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageFalseColorFilter.h
function interface (line 3) | interface GPUImageFalseColorFilter : GPUImageFilter
FILE: GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageGammaFilter.h
function interface (line 3) | interface GPUImageGammaFilter : GPUImageFilter
FILE: GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageGaussianBlurFilter.h
function interface (line 7) | interface GPUImageGaussianBlurFilter : GPUImageTwoPassTextureSamplingFilter
FILE: GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageGaussianBlurPositionFilter.h
function interface (line 5) | interface GPUImageGaussianBlurPositionFilter : GPUImageTwoPassTextureSam...
FILE: GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageGaussianSelectiveBlurFilter.h
function interface (line 7) | interface GPUImageGaussianSelectiveBlurFilter : GPUImageFilterGroup
FILE: GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageHardLightBlendFilter.h
function interface (line 3) | interface GPUImageHardLightBlendFilter : GPUImageTwoInputFilter
FILE: GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageHarrisCornerDetectionFilter.h
function interface (line 23) | interface GPUImageHarrisCornerDetectionFilter : GPUImageFilterGroup
FILE: GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageHazeFilter.h
function interface (line 15) | interface GPUImageHazeFilter : GPUImageFilter
FILE: GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageHighPassFilter.h
function interface (line 5) | interface GPUImageHighPassFilter : GPUImageFilterGroup
FILE: GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageHighlightShadowFilter.h
function interface (line 3) | interface GPUImageHighlightShadowFilter : GPUImageFilter
FILE: GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageHighlightShadowTintFilter.h
function interface (line 11) | interface GPUImageHighlightShadowTintFilter : GPUImageFilter
FILE: GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageHistogramEqualizationFilter.h
function interface (line 15) | interface GPUImageHistogramEqualizationFilter : GPUImageFilterGroup
FILE: GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageHistogramFilter.h
type GPUImageHistogramType (line 3) | typedef enum { kGPUImageHistogramRed, kGPUImageHistogramGreen, kGPUImage...
function interface (line 5) | interface GPUImageHistogramFilter : GPUImageFilter
FILE: GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageHistogramGenerator.h
function interface (line 3) | interface GPUImageHistogramGenerator : GPUImageFilter
FILE: GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageHoughTransformLineDetector.h
function interface (line 25) | interface GPUImageHoughTransformLineDetector : GPUImageFilterGroup
FILE: GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageHueFilter.h
function interface (line 4) | interface GPUImageHueFilter : GPUImageFilter
FILE: GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageJFAVoronoiFilter.h
function interface (line 3) | interface GPUImageJFAVoronoiFilter : GPUImageFilter
FILE: GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageKuwaharaFilter.h
function interface (line 5) | interface GPUImageKuwaharaFilter : GPUImageFilter
FILE: GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageLevelsFilter.h
function interface (line 14) | interface GPUImageLevelsFilter : GPUImageFilter
FILE: GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageLightenBlendFilter.h
function interface (line 4) | interface GPUImageLightenBlendFilter : GPUImageTwoInputFilter
FILE: GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageLineGenerator.h
function interface (line 3) | interface GPUImageLineGenerator : GPUImageFilter
FILE: GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageLookupFilter.h
function interface (line 3) | interface GPUImageLookupFilter : GPUImageTwoInputFilter
FILE: GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageLowPassFilter.h
function interface (line 5) | interface GPUImageLowPassFilter : GPUImageFilterGroup
FILE: GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageLuminanceRangeFilter.h
function interface (line 3) | interface GPUImageLuminanceRangeFilter : GPUImageFilter
FILE: GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageLuminanceThresholdFilter.h
function interface (line 5) | interface GPUImageLuminanceThresholdFilter : GPUImageFilter
FILE: GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageLuminosity.h
function interface (line 3) | interface GPUImageLuminosity : GPUImageAverageColor
FILE: GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageMissEtikateFilter.h
function interface (line 12) | interface GPUImageMissEtikateFilter : GPUImageFilterGroup
FILE: GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageMonochromeFilter.h
function interface (line 3) | interface GPUImageMonochromeFilter : GPUImageFilter
FILE: GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageMosaicFilter.h
function interface (line 7) | interface GPUImageMosaicFilter : GPUImageTwoInputFilter {
FILE: GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageMotionDetector.h
function interface (line 5) | interface GPUImageMotionDetector : GPUImageFilterGroup
FILE: GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageMultiplyBlendFilter.h
function interface (line 3) | interface GPUImageMultiplyBlendFilter : GPUImageTwoInputFilter
FILE: GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageOpacityFilter.h
function interface (line 3) | interface GPUImageOpacityFilter : GPUImageFilter
FILE: GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageOpeningFilter.h
function interface (line 9) | interface GPUImageOpeningFilter : GPUImageFilterGroup
FILE: GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageParallelCoordinateLineTransformFilter.h
function interface (line 9) | interface GPUImageParallelCoordinateLineTransformFilter : GPUImageFilter
FILE: GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImagePerlinNoiseFilter.h
function interface (line 3) | interface GPUImagePerlinNoiseFilter : GPUImageFilter
FILE: GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImagePinchDistortionFilter.h
function interface (line 5) | interface GPUImagePinchDistortionFilter : GPUImageFilter
FILE: GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImagePixellateFilter.h
function interface (line 3) | interface GPUImagePixellateFilter : GPUImageFilter
FILE: GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImagePixellatePositionFilter.h
function interface (line 3) | interface GPUImagePixellatePositionFilter : GPUImageFilter
FILE: GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImagePoissonBlendFilter.h
function interface (line 4) | interface GPUImagePoissonBlendFilter : GPUImageTwoInputCrossTextureSampl...
FILE: GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImagePolarPixellateFilter.h
function interface (line 3) | interface GPUImagePolarPixellateFilter : GPUImageFilter {
FILE: GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImagePolkaDotFilter.h
function interface (line 3) | interface GPUImagePolkaDotFilter : GPUImagePixellateFilter
FILE: GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImagePosterizeFilter.h
function interface (line 5) | interface GPUImagePosterizeFilter : GPUImageFilter
FILE: GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageRGBClosingFilter.h
function interface (line 9) | interface GPUImageRGBClosingFilter : GPUImageFilterGroup
FILE: GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageRGBFilter.h
function interface (line 3) | interface GPUImageRGBFilter : GPUImageFilter
FILE: GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageRGBOpeningFilter.h
function interface (line 9) | interface GPUImageRGBOpeningFilter : GPUImageFilterGroup
FILE: GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageRawDataInput.h
type GPUPixelFormat (line 8) | typedef enum {
type GPUPixelType (line 15) | typedef enum {
function interface (line 20) | interface GPUImageRawDataInput : GPUImageOutput
FILE: GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageRawDataOutput.h
type GPUByteColorVector (line 4) | struct GPUByteColorVector {
type GPUByteColorVector (line 10) | typedef struct GPUByteColorVector GPUByteColorVector;
function interface (line 15) | interface GPUImageRawDataOutput : NSObject <GPUImageInput> {
function interface (line 21) | interface GPUImageRawDataOutput : NSObject <GPUImageInput> {
FILE: GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageSaturationFilter.h
function interface (line 5) | interface GPUImageSaturationFilter : GPUImageFilter
FILE: GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageScreenBlendFilter.h
function interface (line 3) | interface GPUImageScreenBlendFilter : GPUImageTwoInputFilter
FILE: GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageSharpenFilter.h
function interface (line 3) | interface GPUImageSharpenFilter : GPUImageFilter
FILE: GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageSketchFilter.h
function interface (line 7) | interface GPUImageSketchFilter : GPUImageSobelEdgeDetectionFilter
FILE: GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageSkinToneFilter.h
type GPUImageSkinToneUpperColorGreen (line 11) | typedef NS_ENUM(NSUInteger, GPUImageSkinToneUpperColor) {
function interface (line 18) | interface GPUImageSkinToneFilter : GPUImageFilter
FILE: GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageSmoothToonFilter.h
function interface (line 8) | interface GPUImageSmoothToonFilter : GPUImageFilterGroup
FILE: GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageSobelEdgeDetectionFilter.h
function interface (line 3) | interface GPUImageSobelEdgeDetectionFilter : GPUImageTwoPassFilter
FILE: GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageSoftEleganceFilter.h
function interface (line 13) | interface GPUImageSoftEleganceFilter : GPUImageFilterGroup
FILE: GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageSoftLightBlendFilter.h
function interface (line 3) | interface GPUImageSoftLightBlendFilter : GPUImageTwoInputFilter
FILE: GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageSolarizeFilter.h
function interface (line 5) | interface GPUImageSolarizeFilter : GPUImageFilter
FILE: GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageSolidColorGenerator.h
function interface (line 7) | interface GPUImageSolidColorGenerator : GPUImageFilter
FILE: GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageSphereRefractionFilter.h
function interface (line 3) | interface GPUImageSphereRefractionFilter : GPUImageFilter
FILE: GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageStretchDistortionFilter.h
function interface (line 5) | interface GPUImageStretchDistortionFilter : GPUImageFilter {
FILE: GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageSwirlFilter.h
function interface (line 5) | interface GPUImageSwirlFilter : GPUImageFilter
FILE: GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageThresholdEdgeDetectionFilter.h
function interface (line 3) | interface GPUImageThresholdEdgeDetectionFilter : GPUImageSobelEdgeDetect...
FILE: GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageThresholdedNonMaximumSuppressionFilter.h
function interface (line 3) | interface GPUImageThresholdedNonMaximumSuppressionFilter : GPUImage3x3Te...
FILE: GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageTiltShiftFilter.h
function interface (line 6) | interface GPUImageTiltShiftFilter : GPUImageFilterGroup
FILE: GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageToonFilter.h
function interface (line 6) | interface GPUImageToonFilter : GPUImage3x3TextureSamplingFilter
FILE: GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageTransformFilter.h
function interface (line 3) | interface GPUImageTransformFilter : GPUImageFilter
FILE: GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageTwoInputCrossTextureSamplingFilter.h
function interface (line 3) | interface GPUImageTwoInputCrossTextureSamplingFilter : GPUImageTwoInputF...
FILE: GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageUnsharpMaskFilter.h
function interface (line 5) | interface GPUImageUnsharpMaskFilter : GPUImageFilterGroup
FILE: GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageVibranceFilter.h
function interface (line 11) | interface GPUImageVibranceFilter : GPUImageFilter
FILE: GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageVignetteFilter.h
function interface (line 5) | interface GPUImageVignetteFilter : GPUImageFilter
FILE: GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageVoronoiConsumerFilter.h
function interface (line 3) | interface GPUImageVoronoiConsumerFilter : GPUImageTwoInputFilter
FILE: GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageWhiteBalanceFilter.h
function interface (line 7) | interface GPUImageWhiteBalanceFilter : GPUImageFilter
FILE: GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageiOSBlurFilter.h
function interface (line 7) | interface GPUImageiOSBlurFilter : GPUImageFilterGroup
FILE: GPURenderKit/GPURenderKit/GPUImage/GLFilters/ColorProcessing/GLImageAddStickerFilter.h
function interface (line 13) | interface GLImageAddStickerFilter : GPUImageFilterGroup
FILE: GPURenderKit/GPURenderKit/GPUImage/GLFilters/ColorProcessing/GLImageAddStickerWithEffectFilter.h
function interface (line 15) | interface GLImageAddStickerWithEffectFilter : GPUImageFilterGroup
FILE: GPURenderKit/GPURenderKit/GPUImage/GLFilters/ColorProcessing/GLImageBlendFilter.h
function interface (line 11) | interface GLImageBlendFilter : GPUImageTwoInputFilter
FILE: GPURenderKit/GPURenderKit/GPUImage/GLFilters/ColorProcessing/GLImageGassianBlurMixFilter.h
function interface (line 13) | interface GLImageGassianBlurMixFilter : GPUImageFilterGroup
FILE: GPURenderKit/GPURenderKit/GPUImage/GLFilters/ColorProcessing/GLImageLutFilter.h
function interface (line 13) | interface GLImageLutFilter : GPUImageFilterGroup
FILE: GPURenderKit/GPURenderKit/GPUImage/GLFilters/ColorProcessing/GLImageStickerFilter.h
type GL_IMAGE_BLEND_MODE_NORMAL (line 11) | typedef NS_ENUM(NSInteger, GL_IMAGE_BLEND_MODE)
FILE: GPURenderKit/GPURenderKit/GPUImage/GLFilters/ColorProcessing/GLImageTwoLutFilter.h
function interface (line 12) | interface GLImageTwoLutFilter : GPUImageFilterGroup
FILE: GPURenderKit/GPURenderKit/GPUImage/GLFilters/DouYinEffect/GLImageGlitchEffectGridFilter.h
function interface (line 12) | interface GLImageGlitchEffectGridFilter : GPUImageTwoInputFilter
FILE: GPURenderKit/GPURenderKit/GPUImage/GLFilters/DouYinEffect/GLImageGlitchEffectLineFilter.h
function interface (line 11) | interface GLImageGlitchEffectLineFilter : GPUImageFilter
FILE: GPURenderKit/GPURenderKit/GPUImage/GLFilters/FaceFilters/GPUImageBeautifyFilter.h
function interface (line 18) | interface GPUImageBeautifyFilter : GPUImageFilterGroup {
FILE: GPURenderKit/GPURenderKit/GPUImage/Mac/GPUImageContext.h
type GPUImageRotationMode (line 10) | typedef enum { kGPUImageNoRotation, kGPUImageRotateLeft, kGPUImageRotate...
FILE: GPURenderKit/GPURenderKit/GPUImage/Mac/GPUImagePicture.h
function interface (line 4) | interface GPUImagePicture : GPUImageOutput
FILE: GPURenderKit/GPURenderKit/GPUImage/Mac/GPUImageView.h
type GPUImageFillModeType (line 4) | typedef enum {
function interface (line 13) | interface GPUImageView : NSOpenGLView <GPUImageInput>
FILE: GPURenderKit/GPURenderKit/GPUImage/iOS/GPUImageContext.h
type kGPUImageNoRotation (line 7) | typedef NS_ENUM(NSUInteger, GPUImageRotationMode) {
FILE: GPURenderKit/GPURenderKit/GPUImage/iOS/GPUImagePicture.h
function interface (line 5) | interface GPUImagePicture : GPUImageOutput
FILE: GPURenderKit/GPURenderKit/GPUImage/iOS/GPUImageView.h
type kGPUImageFillModeStretch (line 4) | typedef NS_ENUM(NSUInteger, GPUImageFillModeType) {
function interface (line 15) | interface GPUImageView : UIView <GPUImageInput>
FILE: GPURenderKitDemo/GPURenderKitDemo/DDShapeViewController/Rise/View/DDGLShapeView/DDGLShapeView.h
type DDGLNormValueRange (line 15) | struct DDGLNormValueRange{
type DDGLNormValueRange (line 21) | typedef struct DDGLNormValueRange DDGLNormValueRange;
FILE: GPURenderKitDemo/GPURenderKitDemo/Face/Face++/iOS_SDK/MGDetectRectInfo.h
type MGOrientationLeft (line 12) | typedef NS_ENUM(NSInteger, MGOrientation) {
FILE: GPURenderKitDemo/GPURenderKitDemo/Face/Face++/iOS_SDK/MGFaceppCommon.h
type MGDetectROI (line 42) | typedef struct {
function CG_INLINE (line 49) | CG_INLINE MGDetectROI MGDetectROIMake(int left, int top, int right,int b...
FILE: GPURenderKitDemo/GPURenderKitDemo/GLImageFilterEnumType.h
type GLIMAGE_LUT (line 16) | typedef NS_ENUM(NSInteger, GLIMAGE_FILTER_TYPE)
function NSString (line 23) | static inline NSString *GetFilterNameWithType(GLIMAGE_FILTER_TYPE type)
type GL_INPUT_SOURCE_CAMERA (line 38) | typedef NS_ENUM(NSInteger, GL_INPUT_SOURCE_TYPE)
FILE: GPURenderKitDemo/GPURenderKitDemo/ShapeChange/Rise/View/DDGLShapeView/DDGLShapeView.h
type DDGLNormValueRange (line 15) | struct DDGLNormValueRange{
type DDGLNormValueRange (line 21) | typedef struct DDGLNormValueRange DDGLNormValueRange;
FILE: GPURenderKitDemo/GPURenderKitDemo/Support/HUD/MBProgressHUD.h
type MBProgressHUDModeIndeterminate (line 39) | typedef NS_ENUM(NSInteger, MBProgressHUDMode) {
type MBProgressHUDAnimationFade (line 54) | typedef NS_ENUM(NSInteger, MBProgressHUDAnimation) {
type MBProgressHUDBackgroundStyleSolidColor (line 65) | typedef NS_ENUM(NSInteger, MBProgressHUDBackgroundStyle) {
Condensed preview — 566 files, each showing path, character count, and a content snippet. Download the .json file or copy for the full structured content (2,228K chars).
[
{
"path": "GPURenderKit/GPURenderKit/GPUImage/BaseClass/GLProgram.h",
"chars": 1528,
"preview": "// This is Jeff LaMarche's GLProgram OpenGL shader wrapper class from his OpenGL ES 2.0 book.\n// A description of this"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/BaseClass/GLProgram.m",
"chars": 6426,
"preview": "// This is Jeff LaMarche's GLProgram OpenGL shader wrapper class from his OpenGL ES 2.0 book.\n// A description of this"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/BaseClass/GPUImageBuffer.h",
"chars": 179,
"preview": "#import \"GPUImageFilter.h\"\n\n@interface GPUImageBuffer : GPUImageFilter\n{\n NSMutableArray *bufferedFramebuffers;\n}\n\n@p"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/BaseClass/GPUImageBuffer.m",
"chars": 3363,
"preview": "#import \"GPUImageBuffer.h\"\n\n@interface GPUImageBuffer()\n\n@end\n\n@implementation GPUImageBuffer\n\n@synthesize bufferSize = "
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/BaseClass/GPUImageFilter.h",
"chars": 5676,
"preview": "#import \"GPUImageOutput.h\"\n\n#define STRINGIZE(x) #x\n#define STRINGIZE2(x) STRINGIZE(x)\n#define SHADER_STRING(text) @ STR"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/BaseClass/GPUImageFilter.m",
"chars": 23191,
"preview": "#import \"GPUImageFilter.h\"\n#import \"GPUImagePicture.h\"\n#import <AVFoundation/AVFoundation.h>\n\n// Hardcode the vertex sha"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/BaseClass/GPUImageFilterGroup.h",
"chars": 625,
"preview": "#import \"GPUImageOutput.h\"\n#import \"GPUImageFilter.h\"\n\n@interface GPUImageFilterGroup : GPUImageOutput <GPUImageInput>\n{"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/BaseClass/GPUImageFilterGroup.m",
"chars": 5174,
"preview": "#import \"GPUImageFilterGroup.h\"\n#import \"GPUImagePicture.h\"\n\n@implementation GPUImageFilterGroup\n\n@synthesize terminalFi"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/BaseClass/GPUImageFilterPipeline.h",
"chars": 1253,
"preview": "#import <Foundation/Foundation.h>\n#import \"GPUImageOutput.h\"\n\n@interface GPUImageFilterPipeline : NSObject\n{\n NSStrin"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/BaseClass/GPUImageFilterPipeline.m",
"chars": 9463,
"preview": "#import \"GPUImageFilterPipeline.h\"\n\n@interface GPUImageFilterPipeline ()\n\n- (BOOL)_parseConfiguration:(NSDictionary *)co"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/BaseClass/GPUImageFourInputFilter.h",
"chars": 567,
"preview": "#import \"GPUImageThreeInputFilter.h\"\n\nextern NSString *const kGPUImageFourInputTextureVertexShaderString;\n\n@interface GP"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/BaseClass/GPUImageFourInputFilter.m",
"chars": 11127,
"preview": "#import \"GPUImageFourInputFilter.h\"\n\n\nNSString *const kGPUImageFourInputTextureVertexShaderString = SHADER_STRING\n(\n att"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/BaseClass/GPUImageFramebuffer.h",
"chars": 1442,
"preview": "#import <Foundation/Foundation.h>\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\n#import <OpenGLES/EAGL.h>\n#import <Op"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/BaseClass/GPUImageFramebuffer.m",
"chars": 15211,
"preview": "#import \"GPUImageFramebuffer.h\"\n#import \"GPUImageOutput.h\"\n\n@interface GPUImageFramebuffer()\n{\n GLuint framebuffer;\n#"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/BaseClass/GPUImageFramebufferCache.h",
"chars": 718,
"preview": "#import <Foundation/Foundation.h>\n#import <QuartzCore/QuartzCore.h>\n#import \"GPUImageFramebuffer.h\"\n\n@interface GPUImage"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/BaseClass/GPUImageFramebufferCache.m",
"chars": 7642,
"preview": "#import \"GPUImageFramebufferCache.h\"\n#import \"GPUImageContext.h\"\n#import \"GPUImageOutput.h\"\n\n#if TARGET_IPHONE_SIMULATOR"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/BaseClass/GPUImageMovie.h",
"chars": 2380,
"preview": "#import <Foundation/Foundation.h>\n#import <AVFoundation/AVFoundation.h>\n#import \"GPUImageContext.h\"\n#import \"GPUImageOut"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/BaseClass/GPUImageMovie.m",
"chars": 28753,
"preview": "#import \"GPUImageMovie.h\"\n#import \"GPUImageMovieWriter.h\"\n#import \"GPUImageFilter.h\"\n#import \"GPUImageVideoCamera.h\"\n\n@i"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/BaseClass/GPUImageMovieComposition.h",
"chars": 527,
"preview": "//\n// GPUImageMovieComposition.h\n// Givit\n//\n// Created by Sean Meiners on 2013/01/25.\n//\n//\n\n#import \"GPUImageMovie."
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/BaseClass/GPUImageMovieComposition.m",
"chars": 2651,
"preview": "//\n// GPUImageMovieComposition.m\n// Givit\n//\n// Created by Sean Meiners on 2013/01/25.\n//\n//\n\n#import \"GPUImageMovieC"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/BaseClass/GPUImageOutput.h",
"chars": 4914,
"preview": "#import \"GPUImageContext.h\"\n#import \"GPUImageFramebuffer.h\"\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\n#import <UI"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/BaseClass/GPUImageOutput.m",
"chars": 12720,
"preview": "#import \"GPUImageOutput.h\"\n#import \"GPUImageMovieWriter.h\"\n#import \"GPUImagePicture.h\"\n#import <mach/mach.h>\n\ndispatch_q"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/BaseClass/GPUImageTextureInput.h",
"chars": 293,
"preview": "#import \"GPUImageOutput.h\"\n\n@interface GPUImageTextureInput : GPUImageOutput\n{\n CGSize textureSize;\n}\n\n// Initializat"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/BaseClass/GPUImageTextureInput.m",
"chars": 1321,
"preview": "#import \"GPUImageTextureInput.h\"\n\n@implementation GPUImageTextureInput\n\n#pragma mark -\n#pragma mark Initialization and t"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/BaseClass/GPUImageTextureOutput.h",
"chars": 549,
"preview": "#import <Foundation/Foundation.h>\n#import \"GPUImageContext.h\"\n\n@protocol GPUImageTextureOutputDelegate;\n\n@interface GPUI"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/BaseClass/GPUImageTextureOutput.m",
"chars": 1442,
"preview": "#import \"GPUImageTextureOutput.h\"\n\n@implementation GPUImageTextureOutput\n\n@synthesize delegate = _delegate;\n@synthesize "
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/BaseClass/GPUImageThreeInputFilter.h",
"chars": 559,
"preview": "#import \"GPUImageTwoInputFilter.h\"\n\nextern NSString *const kGPUImageThreeInputTextureVertexShaderString;\n\n@interface GPU"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/BaseClass/GPUImageThreeInputFilter.m",
"chars": 9076,
"preview": "#import \"GPUImageThreeInputFilter.h\"\n\n\nNSString *const kGPUImageThreeInputTextureVertexShaderString = SHADER_STRING\n(\n a"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/BaseClass/GPUImageTwoInputFilter.h",
"chars": 628,
"preview": "#import \"GPUImageFilter.h\"\n\nextern NSString *const kGPUImageTwoInputTextureVertexShaderString;\n\n@interface GPUImageTwoIn"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/BaseClass/GPUImageTwoInputFilter.m",
"chars": 7332,
"preview": "#import \"GPUImageTwoInputFilter.h\"\n\nNSString *const kGPUImageTwoInputTextureVertexShaderString = SHADER_STRING\n(\n attrib"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/BaseClass/GPUImageTwoPassFilter.h",
"chars": 974,
"preview": "#import \"GPUImageFilter.h\"\n\n@interface GPUImageTwoPassFilter : GPUImageFilter\n{\n GPUImageFramebuffer *secondOutputFra"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/BaseClass/GPUImageTwoPassFilter.m",
"chars": 7922,
"preview": "#import \"GPUImageTwoPassFilter.h\"\n\n@implementation GPUImageTwoPassFilter\n\n#pragma mark -\n#pragma mark Initialization and"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/BaseClass/GPUImageTwoPassTextureSamplingFilter.h",
"chars": 671,
"preview": "#import \"GPUImageTwoPassFilter.h\"\n\n@interface GPUImageTwoPassTextureSamplingFilter : GPUImageTwoPassFilter\n{\n GLint v"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/BaseClass/GPUImageTwoPassTextureSamplingFilter.m",
"chars": 3254,
"preview": "#import \"GPUImageTwoPassTextureSamplingFilter.h\"\n\n@implementation GPUImageTwoPassTextureSamplingFilter\n\n@synthesize vert"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/BaseClass/GPUImageVideoCamera.h",
"chars": 5111,
"preview": "#import <Foundation/Foundation.h>\n#import <AVFoundation/AVFoundation.h>\n#import <CoreMedia/CoreMedia.h>\n#import \"GPUImag"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/BaseClass/GPUImageVideoCamera.m",
"chars": 39984,
"preview": "#import \"GPUImageVideoCamera.h\"\n#import \"GPUImageMovieWriter.h\"\n#import \"GPUImageFilter.h\"\n\nvoid setColorConversion601( "
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImage3x3ConvolutionFilter.h",
"chars": 653,
"preview": "#import \"GPUImage3x3TextureSamplingFilter.h\"\n\n/** Runs a 3x3 convolution kernel against the image\n */\n@interface GPUImag"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImage3x3ConvolutionFilter.m",
"chars": 4826,
"preview": "#import \"GPUImage3x3ConvolutionFilter.h\"\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImage3x3Co"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImage3x3TextureSamplingFilter.h",
"chars": 593,
"preview": "#import \"GPUImageFilter.h\"\n\nextern NSString *const kGPUImageNearbyTexelSamplingVertexShaderString;\n\n@interface GPUImage3"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImage3x3TextureSamplingFilter.m",
"chars": 3739,
"preview": "#import \"GPUImage3x3TextureSamplingFilter.h\"\n\n// Override vertex shader to remove dependent texture reads \nNSString *con"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageAdaptiveThresholdFilter.h",
"chars": 258,
"preview": "#import \"GPUImageFilterGroup.h\"\n\n@interface GPUImageAdaptiveThresholdFilter : GPUImageFilterGroup\n\n/** A multiplier for "
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageAdaptiveThresholdFilter.m",
"chars": 2870,
"preview": "#import \"GPUImageAdaptiveThresholdFilter.h\"\n#import \"GPUImageFilter.h\"\n#import \"GPUImageTwoInputFilter.h\"\n#import \"GPUIm"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageAddBlendFilter.h",
"chars": 101,
"preview": "#import \"GPUImageTwoInputFilter.h\"\n\n@interface GPUImageAddBlendFilter : GPUImageTwoInputFilter\n\n@end\n"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageAddBlendFilter.m",
"chars": 2675,
"preview": "#import \"GPUImageAddBlendFilter.h\"\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageAddBlendFra"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageAlphaBlendFilter.h",
"chars": 266,
"preview": "#import \"GPUImageTwoInputFilter.h\"\n\n@interface GPUImageAlphaBlendFilter : GPUImageTwoInputFilter\n{\n GLint mixUniform;"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageAlphaBlendFilter.m",
"chars": 1708,
"preview": "#import \"GPUImageAlphaBlendFilter.h\"\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageAlphaBlen"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageAmatorkaFilter.h",
"chars": 429,
"preview": "#import \"GPUImageFilterGroup.h\"\n\n@class GPUImagePicture;\n\n/** A photo filter based on Photoshop action by Amatorka\n h"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageAmatorkaFilter.m",
"chars": 1043,
"preview": "#import \"GPUImageAmatorkaFilter.h\"\n#import \"GPUImagePicture.h\"\n#import \"GPUImageLookupFilter.h\"\n\n@implementation GPUImag"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageAverageColor.h",
"chars": 615,
"preview": "#import \"GPUImageFilter.h\"\n\nextern NSString *const kGPUImageColorAveragingVertexShaderString;\n\n@interface GPUImageAverag"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageAverageColor.m",
"chars": 7857,
"preview": "#import \"GPUImageAverageColor.h\"\n\nNSString *const kGPUImageColorAveragingVertexShaderString = SHADER_STRING\n(\n attribute"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageAverageLuminanceThresholdFilter.h",
"chars": 301,
"preview": "#import \"GPUImageFilterGroup.h\"\n\n@interface GPUImageAverageLuminanceThresholdFilter : GPUImageFilterGroup\n\n// This is mu"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageAverageLuminanceThresholdFilter.m",
"chars": 1383,
"preview": "#import \"GPUImageAverageLuminanceThresholdFilter.h\"\n#import \"GPUImageLuminosity.h\"\n#import \"GPUImageLuminanceThresholdFi"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageBilateralFilter.h",
"chars": 372,
"preview": "#import \"GPUImageGaussianBlurFilter.h\"\n\n@interface GPUImageBilateralFilter : GPUImageGaussianBlurFilter\n{\n CGFloat fi"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageBilateralFilter.m",
"chars": 8993,
"preview": "#import \"GPUImageBilateralFilter.h\"\n\nNSString *const kGPUImageBilateralBlurVertexShaderString = SHADER_STRING\n(\n attribu"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageBoxBlurFilter.h",
"chars": 160,
"preview": "#import \"GPUImageGaussianBlurFilter.h\"\n\n/** A hardware-accelerated box blur of an image\n */\n@interface GPUImageBoxBlurFi"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageBoxBlurFilter.m",
"chars": 7010,
"preview": "#import \"GPUImageBoxBlurFilter.h\"\n\n\n@implementation GPUImageBoxBlurFilter\n\n+ (NSString *)vertexShaderForOptimizedBlurOfR"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageBrightnessFilter.h",
"chars": 242,
"preview": "#import \"GPUImageFilter.h\"\n\n@interface GPUImageBrightnessFilter : GPUImageFilter\n{\n GLint brightnessUniform;\n}\n\n// Br"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageBrightnessFilter.m",
"chars": 1446,
"preview": "#import \"GPUImageBrightnessFilter.h\"\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageBrightnes"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageBulgeDistortionFilter.h",
"chars": 600,
"preview": "#import \"GPUImageFilter.h\"\n\n/// Creates a bulge distortion on the image\n@interface GPUImageBulgeDistortionFilter : GPUIm"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageBulgeDistortionFilter.m",
"chars": 4525,
"preview": "#import \"GPUImageBulgeDistortionFilter.h\"\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageBulg"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageCGAColorspaceFilter.h",
"chars": 90,
"preview": "#import \"GPUImageFilter.h\"\n\n@interface GPUImageCGAColorspaceFilter : GPUImageFilter\n\n@end\n"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageCGAColorspaceFilter.m",
"chars": 3505,
"preview": "//\n// GPUImageCGAColorspaceFilter.m\n//\n\n#import \"GPUImageCGAColorspaceFilter.h\"\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageCannyEdgeDetectionFilter.h",
"chars": 2728,
"preview": "#import \"GPUImageFilterGroup.h\"\n\n@class GPUImageGrayscaleFilter;\n@class GPUImageSingleComponentGaussianBlurFilter;\n@clas"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageCannyEdgeDetectionFilter.m",
"chars": 3281,
"preview": "#import \"GPUImageCannyEdgeDetectionFilter.h\"\n\n#import \"GPUImageGrayscaleFilter.h\"\n#import \"GPUImageDirectionalSobelEdgeD"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageChromaKeyBlendFilter.h",
"chars": 1086,
"preview": "#import \"GPUImageTwoInputFilter.h\"\n\n/** Selectively replaces a color in the first image with the second image\n */\n@inter"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageChromaKeyBlendFilter.m",
"chars": 4199,
"preview": "#import \"GPUImageChromaKeyBlendFilter.h\"\n\n// Shader code based on Apple's CIChromaKeyFilter example: https://developer.a"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageChromaKeyFilter.h",
"chars": 987,
"preview": "#import \"GPUImageFilter.h\"\n\n@interface GPUImageChromaKeyFilter : GPUImageFilter\n{\n GLint colorToReplaceUniform, thres"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageChromaKeyFilter.m",
"chars": 3959,
"preview": "#import \"GPUImageChromaKeyFilter.h\"\n\n// Shader code based on Apple's CIChromaKeyFilter example: https://developer.apple."
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageClosingFilter.h",
"chars": 546,
"preview": "#import \"GPUImageFilterGroup.h\"\n\n@class GPUImageErosionFilter;\n@class GPUImageDilationFilter;\n\n// A filter that first pe"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageClosingFilter.m",
"chars": 1367,
"preview": "#import \"GPUImageClosingFilter.h\"\n#import \"GPUImageErosionFilter.h\"\n#import \"GPUImageDilationFilter.h\"\n\n@implementation "
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageColorBlendFilter.h",
"chars": 103,
"preview": "#import \"GPUImageTwoInputFilter.h\"\n\n@interface GPUImageColorBlendFilter : GPUImageTwoInputFilter\n\n@end\n"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageColorBlendFilter.m",
"chars": 2928,
"preview": "#import \"GPUImageColorBlendFilter.h\"\n\n/**\n * Color blend mode based upon pseudo code from the PDF specification.\n */\n#if"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageColorBurnBlendFilter.h",
"chars": 160,
"preview": "#import \"GPUImageTwoInputFilter.h\"\n\n/** Applies a color burn blend of two images\n */\n@interface GPUImageColorBurnBlendFi"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageColorBurnBlendFilter.m",
"chars": 1378,
"preview": "#import \"GPUImageColorBurnBlendFilter.h\"\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageColor"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageColorConversion.h",
"chars": 475,
"preview": "#ifndef GPUImageColorConversion_h\n#define GPUImageColorConversion_h\n\nextern GLfloat *kColorConversion601;\nextern GLfloat"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageColorConversion.m",
"chars": 4351,
"preview": "#import \"GPUImageFilter.h\"\n\n// Color Conversion Constants (YUV to RGB) including adjustment from 16-235/16-240 (video ra"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageColorDodgeBlendFilter.h",
"chars": 162,
"preview": "#import \"GPUImageTwoInputFilter.h\"\n\n/** Applies a color dodge blend of two images\n */\n@interface GPUImageColorDodgeBlend"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageColorDodgeBlendFilter.m",
"chars": 2427,
"preview": "#import \"GPUImageColorDodgeBlendFilter.h\"\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageColo"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageColorInvertFilter.h",
"chars": 92,
"preview": "#import \"GPUImageFilter.h\"\n\n@interface GPUImageColorInvertFilter : GPUImageFilter\n{\n}\n\n@end\n"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageColorInvertFilter.m",
"chars": 1018,
"preview": "#import \"GPUImageColorInvertFilter.h\"\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageInvertFr"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageColorLocalBinaryPatternFilter.h",
"chars": 136,
"preview": "#import \"GPUImage3x3TextureSamplingFilter.h\"\n\n@interface GPUImageColorLocalBinaryPatternFilter : GPUImage3x3TextureSampl"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageColorLocalBinaryPatternFilter.m",
"chars": 7516,
"preview": "#import \"GPUImageColorLocalBinaryPatternFilter.h\"\n\n// This is based on \"Accelerating image recognition on mobile devices"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageColorMatrixFilter.h",
"chars": 491,
"preview": "#import \"GPUImageFilter.h\"\n\n/** Transforms the colors of an image by applying a matrix to them\n */\n@interface GPUImageCo"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageColorMatrixFilter.m",
"chars": 2097,
"preview": "#import \"GPUImageColorMatrixFilter.h\"\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageColorMat"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageColorPackingFilter.h",
"chars": 184,
"preview": "#import \"GPUImageFilter.h\"\n\n@interface GPUImageColorPackingFilter : GPUImageFilter\n{\n GLint texelWidthUniform, texelH"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageColorPackingFilter.m",
"chars": 4395,
"preview": "#import \"GPUImageColorPackingFilter.h\"\n\nNSString *const kGPUImageColorPackingVertexShaderString = SHADER_STRING\n(\n attri"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageColourFASTFeatureDetector.h",
"chars": 957,
"preview": "#import \"GPUImageFilterGroup.h\"\n\n// This generates image-wide feature descriptors using the ColourFAST process, as devel"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageColourFASTFeatureDetector.m",
"chars": 1277,
"preview": "#import \"GPUImageColourFASTFeatureDetector.h\"\n#import \"GPUImageColourFASTSamplingOperation.h\"\n#import \"GPUImageBoxBlurFi"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageColourFASTSamplingOperation.h",
"chars": 1056,
"preview": "#import \"GPUImageTwoInputFilter.h\"\n\n// This is the feature extraction phase of the ColourFAST feature detector, as descr"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageColourFASTSamplingOperation.m",
"chars": 8564,
"preview": "#import \"GPUImageColourFASTSamplingOperation.h\"\n\nNSString *const kGPUImageColourFASTSamplingVertexShaderString = SHADER_"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageContrastFilter.h",
"chars": 295,
"preview": "#import \"GPUImageFilter.h\"\n\n/** Adjusts the contrast of the image\n */\n@interface GPUImageContrastFilter : GPUImageFilter"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageContrastFilter.m",
"chars": 1438,
"preview": "#import \"GPUImageContrastFilter.h\"\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageContrastFra"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageCropFilter.h",
"chars": 429,
"preview": "#import \"GPUImageFilter.h\"\n\n@interface GPUImageCropFilter : GPUImageFilter\n{\n GLfloat cropTextureCoordinates[8];\n}\n\n/"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageCropFilter.m",
"chars": 7723,
"preview": "#import \"GPUImageCropFilter.h\"\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageCropFragmentSha"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageCrosshairGenerator.h",
"chars": 734,
"preview": "#import \"GPUImageFilter.h\"\n\n@interface GPUImageCrosshairGenerator : GPUImageFilter\n{\n GLint crosshairWidthUniform, cr"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageCrosshairGenerator.m",
"chars": 4276,
"preview": "#import \"GPUImageCrosshairGenerator.h\"\n\nNSString *const kGPUImageCrosshairVertexShaderString = SHADER_STRING\n(\n attribut"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageCrosshatchFilter.h",
"chars": 424,
"preview": "#import \"GPUImageFilter.h\"\n\n@interface GPUImageCrosshatchFilter : GPUImageFilter\n{\n GLint crossHatchSpacingUniform, l"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageCrosshatchFilter.m",
"chars": 4242,
"preview": "#import \"GPUImageCrosshatchFilter.h\"\n\n// Shader code based on http://machinesdontcare.wordpress.com/\n\n#if TARGET_IPHONE_"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageDarkenBlendFilter.h",
"chars": 108,
"preview": "#import \"GPUImageTwoInputFilter.h\"\n\n@interface GPUImageDarkenBlendFilter : GPUImageTwoInputFilter\n{\n}\n\n@end\n"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageDarkenBlendFilter.m",
"chars": 1414,
"preview": "#import \"GPUImageDarkenBlendFilter.h\"\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageDarkenBl"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageDifferenceBlendFilter.h",
"chars": 112,
"preview": "#import \"GPUImageTwoInputFilter.h\"\n\n@interface GPUImageDifferenceBlendFilter : GPUImageTwoInputFilter\n{\n}\n\n@end\n"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageDifferenceBlendFilter.m",
"chars": 1325,
"preview": "#import \"GPUImageDifferenceBlendFilter.h\"\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageDiff"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageDilationFilter.h",
"chars": 845,
"preview": "#import \"GPUImageTwoPassTextureSamplingFilter.h\"\n\n// For each pixel, this sets it to the maximum value of the red channe"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageDilationFilter.m",
"chars": 17698,
"preview": "#import \"GPUImageDilationFilter.h\"\n\n@implementation GPUImageDilationFilter\n\nNSString *const kGPUImageDilationRadiusOneVe"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageDirectionalNonMaximumSuppressionFilter.h",
"chars": 817,
"preview": "#import \"GPUImageFilter.h\"\n\n@interface GPUImageDirectionalNonMaximumSuppressionFilter : GPUImageFilter\n{\n GLint texel"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageDirectionalNonMaximumSuppressionFilter.m",
"chars": 4683,
"preview": "#import \"GPUImageDirectionalNonMaximumSuppressionFilter.h\"\n\n@implementation GPUImageDirectionalNonMaximumSuppressionFilt"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageDirectionalSobelEdgeDetectionFilter.h",
"chars": 142,
"preview": "#import \"GPUImage3x3TextureSamplingFilter.h\"\n\n@interface GPUImageDirectionalSobelEdgeDetectionFilter : GPUImage3x3Textur"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageDirectionalSobelEdgeDetectionFilter.m",
"chars": 4545,
"preview": "#import \"GPUImageDirectionalSobelEdgeDetectionFilter.h\"\n\n@implementation GPUImageDirectionalSobelEdgeDetectionFilter\n\n#i"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageDissolveBlendFilter.h",
"chars": 286,
"preview": "#import \"GPUImageTwoInputFilter.h\"\n\n@interface GPUImageDissolveBlendFilter : GPUImageTwoInputFilter\n{\n GLint mixUnifo"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageDissolveBlendFilter.m",
"chars": 1700,
"preview": "#import \"GPUImageDissolveBlendFilter.h\"\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageDissol"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageDivideBlendFilter.h",
"chars": 104,
"preview": "#import \"GPUImageTwoInputFilter.h\"\n\n@interface GPUImageDivideBlendFilter : GPUImageTwoInputFilter\n\n@end\n"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageDivideBlendFilter.m",
"chars": 3199,
"preview": "#import \"GPUImageDivideBlendFilter.h\"\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageDivideBl"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageEmbossFilter.h",
"chars": 245,
"preview": "#import \"GPUImage3x3ConvolutionFilter.h\"\n\n@interface GPUImageEmbossFilter : GPUImage3x3ConvolutionFilter\n\n// The strengt"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageEmbossFilter.m",
"chars": 1095,
"preview": "#import \"GPUImageEmbossFilter.h\"\n\n@implementation GPUImageEmbossFilter\n\n@synthesize intensity = _intensity; \n\n- (id)init"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageErosionFilter.h",
"chars": 560,
"preview": "#import \"GPUImageTwoPassTextureSamplingFilter.h\"\n\n// For each pixel, this sets it to the minimum value of the red channe"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageErosionFilter.m",
"chars": 13402,
"preview": "#import \"GPUImageErosionFilter.h\"\n#import \"GPUImageDilationFilter.h\"\n\n@implementation GPUImageErosionFilter\n\n#if TARGET_"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageExclusionBlendFilter.h",
"chars": 111,
"preview": "#import \"GPUImageTwoInputFilter.h\"\n\n@interface GPUImageExclusionBlendFilter : GPUImageTwoInputFilter\n{\n}\n\n@end\n"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageExclusionBlendFilter.m",
"chars": 1651,
"preview": "#import \"GPUImageExclusionBlendFilter.h\"\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageExclu"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageExposureFilter.h",
"chars": 236,
"preview": "#import \"GPUImageFilter.h\"\n\n@interface GPUImageExposureFilter : GPUImageFilter\n{\n GLint exposureUniform;\n}\n\n// Exposu"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageExposureFilter.m",
"chars": 1415,
"preview": "#import \"GPUImageExposureFilter.h\"\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageExposureFra"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageFASTCornerDetectionFilter.h",
"chars": 1404,
"preview": "#import \"GPUImageFilterGroup.h\"\n\n@class GPUImageGrayscaleFilter;\n@class GPUImage3x3TextureSamplingFilter;\n@class GPUImag"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageFASTCornerDetectionFilter.m",
"chars": 3315,
"preview": "#import \"GPUImageFASTCornerDetectionFilter.h\"\n\n#import \"GPUImageGrayscaleFilter.h\"\n#import \"GPUImage3x3TextureSamplingFi"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageFalseColorFilter.h",
"chars": 632,
"preview": "#import \"GPUImageFilter.h\"\n\n@interface GPUImageFalseColorFilter : GPUImageFilter\n{\n GLint firstColorUniform, secondCo"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageFalseColorFilter.m",
"chars": 2712,
"preview": "#import \"GPUImageFalseColorFilter.h\"\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUFalseColorFrag"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageGammaFilter.h",
"chars": 221,
"preview": "#import \"GPUImageFilter.h\"\n\n@interface GPUImageGammaFilter : GPUImageFilter\n{\n GLint gammaUniform;\n}\n\n// Gamma ranges"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageGammaFilter.m",
"chars": 1359,
"preview": "#import \"GPUImageGammaFilter.h\"\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageGammaFragmentS"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageGaussianBlurFilter.h",
"chars": 1843,
"preview": "#import \"GPUImageTwoPassTextureSamplingFilter.h\"\n\n/** A Gaussian blur filter\n Interpolated optimization based on Dani"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageGaussianBlurFilter.m",
"chars": 23181,
"preview": "#import \"GPUImageGaussianBlurFilter.h\"\n\n@implementation GPUImageGaussianBlurFilter\n\n@synthesize texelSpacingMultiplier ="
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageGaussianBlurPositionFilter.h",
"chars": 607,
"preview": "#import \"GPUImageTwoPassTextureSamplingFilter.h\"\n\n/** A more generalized 9x9 Gaussian blur filter\n */\n@interface GPUImag"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageGaussianBlurPositionFilter.m",
"chars": 7717,
"preview": "#import \"GPUImageGaussianBlurPositionFilter.h\"\n\nNSString *const kGPUImageGaussianBlurPositionVertexShaderString = SHADER"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageGaussianSelectiveBlurFilter.h",
"chars": 1171,
"preview": "#import \"GPUImageFilterGroup.h\"\n\n@class GPUImageGaussianBlurFilter;\n\n/** A Gaussian blur that preserves focus within a c"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageGaussianSelectiveBlurFilter.m",
"chars": 4975,
"preview": "#import \"GPUImageGaussianSelectiveBlurFilter.h\"\n#import \"GPUImageGaussianBlurFilter.h\"\n#import \"GPUImageTwoInputFilter.h"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageGlassSphereFilter.h",
"chars": 120,
"preview": "#import \"GPUImageSphereRefractionFilter.h\"\n\n@interface GPUImageGlassSphereFilter : GPUImageSphereRefractionFilter\n\n@end\n"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageGlassSphereFilter.m",
"chars": 3894,
"preview": "#import \"GPUImageGlassSphereFilter.h\"\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageGlassSph"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageGrayscaleFilter.h",
"chars": 301,
"preview": "#import \"GPUImageFilter.h\"\n\nextern NSString *const kGPUImageLuminanceFragmentShaderString;\n\n/** Converts an image to gra"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageGrayscaleFilter.m",
"chars": 3802,
"preview": "#import \"GPUImageGrayscaleFilter.h\"\n\n@implementation GPUImageGrayscaleFilter\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_I"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageHSBFilter.h",
"chars": 886,
"preview": "#import \"GPUImageColorMatrixFilter.h\"\n\n@interface GPUImageHSBFilter : GPUImageColorMatrixFilter\n\n/** Reset the filter to"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageHSBFilter.m",
"chars": 8594,
"preview": "#import \"GPUImageHSBFilter.h\"\n\n@implementation GPUImageHSBFilter {\n float matrix[4][4];\n}\n\n- (id)init\n{\n self = [s"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageHalftoneFilter.h",
"chars": 103,
"preview": "#import \"GPUImagePixellateFilter.h\"\n\n@interface GPUImageHalftoneFilter : GPUImagePixellateFilter\n\n@end\n"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageHalftoneFilter.m",
"chars": 2748,
"preview": "#import \"GPUImageHalftoneFilter.h\"\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageHalftoneFra"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageHardLightBlendFilter.h",
"chars": 111,
"preview": "#import \"GPUImageTwoInputFilter.h\"\n\n@interface GPUImageHardLightBlendFilter : GPUImageTwoInputFilter\n{\n}\n\n@end\n"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageHardLightBlendFilter.m",
"chars": 3278,
"preview": "#import \"GPUImageHardLightBlendFilter.h\"\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageHardL"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageHarrisCornerDetectionFilter.h",
"chars": 2378,
"preview": "#import \"GPUImageFilterGroup.h\"\n\n@class GPUImageGaussianBlurFilter;\n@class GPUImageXYDerivativeFilter;\n@class GPUImageGr"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageHarrisCornerDetectionFilter.m",
"chars": 9979,
"preview": "#import \"GPUImageHarrisCornerDetectionFilter.h\"\n#import \"GPUImageGaussianBlurFilter.h\"\n#import \"GPUImageXYDerivativeFilt"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageHazeFilter.h",
"chars": 630,
"preview": "#import \"GPUImageFilter.h\"\n\n/*\n * The haze filter can be used to add or remove haze (similar to a UV filter)\n * \n * @aut"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageHazeFilter.m",
"chars": 2103,
"preview": "#import \"GPUImageHazeFilter.h\"\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageHazeFragmentSha"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageHighPassFilter.h",
"chars": 507,
"preview": "#import \"GPUImageFilterGroup.h\"\n#import \"GPUImageLowPassFilter.h\"\n#import \"GPUImageDifferenceBlendFilter.h\"\n\n@interface "
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageHighPassFilter.m",
"chars": 1159,
"preview": "#import \"GPUImageHighPassFilter.h\"\n\n@implementation GPUImageHighPassFilter\n\n@synthesize filterStrength;\n\n- (id)init;\n{\n "
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageHighlightShadowFilter.h",
"chars": 368,
"preview": "#import \"GPUImageFilter.h\"\n\n@interface GPUImageHighlightShadowFilter : GPUImageFilter\n{\n GLint shadowsUniform, highli"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageHighlightShadowFilter.m",
"chars": 2756,
"preview": "#import \"GPUImageHighlightShadowFilter.h\"\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageHigh"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageHighlightShadowTintFilter.h",
"chars": 1032,
"preview": "//\n// GPUImageHighlightShadowTintFilter.h\n//\n//\n// Created by github.com/r3mus on 8/14/15.\n//\n//\n\n#import \"GPUImageFil"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageHighlightShadowTintFilter.m",
"chars": 4834,
"preview": "//\n// GPUImageHighlightShadowTintFilter.m\n//\n// Created by github.com/r3mus on 8/14/15.\n//\n//\n\n#import \"GPUImageHighli"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageHistogramEqualizationFilter.h",
"chars": 707,
"preview": "//\n// GPUImageHistogramEqualizationFilter.h\n// FilterShowcase\n//\n// Created by Adam Marcus on 19/08/2014.\n// Copyrig"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageHistogramEqualizationFilter.m",
"chars": 10750,
"preview": "//\n// GPUImageHistogramEqualizationFilter.m\n// FilterShowcase\n//\n// Created by Adam Marcus on 19/08/2014.\n// Copyrig"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageHistogramFilter.h",
"chars": 810,
"preview": "#import \"GPUImageFilter.h\"\n\ntypedef enum { kGPUImageHistogramRed, kGPUImageHistogramGreen, kGPUImageHistogramBlue, kGPUI"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageHistogramFilter.m",
"chars": 11745,
"preview": "#import \"GPUImageHistogramFilter.h\"\n\n// Unlike other filters, this one uses a grid of GL_POINTs to sample the incoming i"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageHistogramGenerator.h",
"chars": 127,
"preview": "#import \"GPUImageFilter.h\"\n\n@interface GPUImageHistogramGenerator : GPUImageFilter\n{\n GLint backgroundColorUniform;\n}"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageHistogramGenerator.m",
"chars": 2531,
"preview": "#import \"GPUImageHistogramGenerator.h\"\n\nNSString *const kGPUImageHistogramGeneratorVertexShaderString = SHADER_STRING\n(\n"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageHoughTransformLineDetector.h",
"chars": 3053,
"preview": "#import \"GPUImageFilterGroup.h\"\n#import \"GPUImageThresholdEdgeDetectionFilter.h\"\n#import \"GPUImageParallelCoordinateLine"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageHoughTransformLineDetector.m",
"chars": 9693,
"preview": "#import \"GPUImageHoughTransformLineDetector.h\"\n\n@interface GPUImageHoughTransformLineDetector()\n\n- (void)extractLinePara"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageHueBlendFilter.h",
"chars": 101,
"preview": "#import \"GPUImageTwoInputFilter.h\"\n\n@interface GPUImageHueBlendFilter : GPUImageTwoInputFilter\n\n@end\n"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageHueBlendFilter.m",
"chars": 5671,
"preview": "#import \"GPUImageHueBlendFilter.h\"\n\n/**\n * Hue blend mode based upon pseudo code from the PDF specification.\n */\n#if TAR"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageHueFilter.h",
"chars": 164,
"preview": "\n#import \"GPUImageFilter.h\"\n\n@interface GPUImageHueFilter : GPUImageFilter\n{\n GLint hueAdjustUniform;\n \n}\n@propert"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageHueFilter.m",
"chars": 3642,
"preview": "\n#import \"GPUImageHueFilter.h\"\n\n// Adapted from http://stackoverflow.com/questions/9234724/how-to-change-hue-of-a-textur"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageJFAVoronoiFilter.h",
"chars": 312,
"preview": "#import \"GPUImageFilter.h\"\n\n@interface GPUImageJFAVoronoiFilter : GPUImageFilter\n{\n GLuint secondFilterOutputTexture;"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageJFAVoronoiFilter.m",
"chars": 12683,
"preview": "// adapted from unitzeroone - http://unitzeroone.com/labs/jfavoronoi/\n\n#import \"GPUImageJFAVoronoiFilter.h\"\n\n// The sh"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageKuwaharaFilter.h",
"chars": 664,
"preview": "#import \"GPUImageFilter.h\"\n\n/** Kuwahara image abstraction, drawn from the work of Kyprianidis, et. al. in their publica"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageKuwaharaFilter.m",
"chars": 5534,
"preview": "#import \"GPUImageKuwaharaFilter.h\"\n\n// Sourced from Kyprianidis, J. E., Kang, H., and Doellner, J. \"Anisotropic Kuwahara"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageKuwaharaRadius3Filter.h",
"chars": 132,
"preview": "//\n// GPUImageKuwaharaRadius3Filter.h\n\n#import \"GPUImageFilter.h\"\n\n@interface GPUImageKuwaharaRadius3Filter : GPUImageF"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageKuwaharaRadius3Filter.m",
"chars": 14860,
"preview": "#import \"GPUImageKuwaharaRadius3Filter.h\"\n\n// Sourced from Kyprianidis, J. E., Kang, H., and Doellner, J. \"Anisotropic K"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageLanczosResamplingFilter.h",
"chars": 197,
"preview": "#import \"GPUImageTwoPassTextureSamplingFilter.h\"\n\n@interface GPUImageLanczosResamplingFilter : GPUImageTwoPassTextureSam"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageLanczosResamplingFilter.m",
"chars": 9255,
"preview": "#import \"GPUImageLanczosResamplingFilter.h\"\n\nNSString *const kGPUImageLanczosVertexShaderString = SHADER_STRING\n(\n attri"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageLaplacianFilter.h",
"chars": 114,
"preview": "#import \"GPUImage3x3ConvolutionFilter.h\"\n\n@interface GPUImageLaplacianFilter : GPUImage3x3ConvolutionFilter\n\n@end\n"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageLaplacianFilter.m",
"chars": 4710,
"preview": "#import \"GPUImageLaplacianFilter.h\"\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageLaplacianFr"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageLevelsFilter.h",
"chars": 1670,
"preview": "#import \"GPUImageFilter.h\"\n\n/**\n * Levels like Photoshop.\n *\n * The min, max, minOut and maxOut parameters are floats in"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageLevelsFilter.m",
"chars": 5111,
"preview": "#import \"GPUImageLevelsFilter.h\"\n\n/*\n ** Gamma correction\n ** Details: http://blog.mouaif.org/2009/01/22/photoshop-gamma"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageLightenBlendFilter.h",
"chars": 202,
"preview": "#import \"GPUImageTwoInputFilter.h\"\n\n/// Blends two images by taking the maximum value of each color component between th"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageLightenBlendFilter.m",
"chars": 1251,
"preview": "#import \"GPUImageLightenBlendFilter.h\"\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageLighten"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageLineGenerator.h",
"chars": 674,
"preview": "#import \"GPUImageFilter.h\"\n\n@interface GPUImageLineGenerator : GPUImageFilter\n{\n GLint lineWidthUniform, lineColorUni"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageLineGenerator.m",
"chars": 4322,
"preview": "#import \"GPUImageLineGenerator.h\"\n\nNSString *const kGPUImageLineGeneratorVertexShaderString = SHADER_STRING\n(\n attribute"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageLinearBurnBlendFilter.h",
"chars": 108,
"preview": "#import \"GPUImageTwoInputFilter.h\"\n\n@interface GPUImageLinearBurnBlendFilter : GPUImageTwoInputFilter\n\n@end\n"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageLinearBurnBlendFilter.m",
"chars": 1409,
"preview": "#import \"GPUImageLinearBurnBlendFilter.h\"\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageLine"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageLocalBinaryPatternFilter.h",
"chars": 131,
"preview": "#import \"GPUImage3x3TextureSamplingFilter.h\"\n\n@interface GPUImageLocalBinaryPatternFilter : GPUImage3x3TextureSamplingFi"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageLocalBinaryPatternFilter.m",
"chars": 5139,
"preview": "#import \"GPUImageLocalBinaryPatternFilter.h\"\n\n// This is based on \"Accelerating image recognition on mobile devices usin"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageLookupFilter.h",
"chars": 1496,
"preview": "#import \"GPUImageTwoInputFilter.h\"\n\n@interface GPUImageLookupFilter : GPUImageTwoInputFilter\n{\n GLint intensityUnifor"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageLookupFilter.m",
"chars": 3490,
"preview": "#import \"GPUImageLookupFilter.h\"\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageLookupFragmen"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageLowPassFilter.h",
"chars": 465,
"preview": "#import \"GPUImageFilterGroup.h\"\n#import \"GPUImageBuffer.h\"\n#import \"GPUImageDissolveBlendFilter.h\"\n\n@interface GPUImageL"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageLowPassFilter.m",
"chars": 1791,
"preview": "#import \"GPUImageLowPassFilter.h\"\n\n@implementation GPUImageLowPassFilter\n\n@synthesize filterStrength;\n\n- (id)init;\n{\n "
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageLuminanceRangeFilter.h",
"chars": 274,
"preview": "#import \"GPUImageFilter.h\"\n\n@interface GPUImageLuminanceRangeFilter : GPUImageFilter\n{\n GLint rangeReductionUniform;\n"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageLuminanceRangeFilter.m",
"chars": 2125,
"preview": "#import \"GPUImageLuminanceRangeFilter.h\"\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageLumin"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageLuminanceThresholdFilter.h",
"chars": 408,
"preview": "#import \"GPUImageFilter.h\"\n\n/** Pixels with a luminance above the threshold will appear white, and those below will be b"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageLuminanceThresholdFilter.m",
"chars": 1757,
"preview": "#import \"GPUImageLuminanceThresholdFilter.h\"\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageL"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageLuminosity.h",
"chars": 641,
"preview": "#import \"GPUImageAverageColor.h\"\n\n@interface GPUImageLuminosity : GPUImageAverageColor\n{\n GLProgram *secondFilterProg"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageLuminosity.m",
"chars": 14350,
"preview": "#import \"GPUImageLuminosity.h\"\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageInitialLuminosi"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageLuminosityBlendFilter.h",
"chars": 108,
"preview": "#import \"GPUImageTwoInputFilter.h\"\n\n@interface GPUImageLuminosityBlendFilter : GPUImageTwoInputFilter\n\n@end\n"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageLuminosityBlendFilter.m",
"chars": 2965,
"preview": "#import \"GPUImageLuminosityBlendFilter.h\"\n\n/**\n * Luminosity blend mode based upon pseudo code from the PDF specificatio"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageMaskFilter.h",
"chars": 97,
"preview": "#import \"GPUImageTwoInputFilter.h\"\n\n@interface GPUImageMaskFilter : GPUImageTwoInputFilter\n\n@end\n"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageMaskFilter.m",
"chars": 2261,
"preview": "#import \"GPUImageMaskFilter.h\"\n\n#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE\nNSString *const kGPUImageMaskShaderStrin"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageMedianFilter.h",
"chars": 119,
"preview": "#import \"GPUImage3x3TextureSamplingFilter.h\"\n\n@interface GPUImageMedianFilter : GPUImage3x3TextureSamplingFilter\n\n@end\n"
},
{
"path": "GPURenderKit/GPURenderKit/GPUImage/Filters/GPUImageMedianFilter.m",
"chars": 6449,
"preview": "#import \"GPUImageMedianFilter.h\"\n\n/*\n 3x3 median filter, adapted from \"A Fast, Small-Radius GPU Median Filter\" by Morgan"
}
]
// ... and 366 more files (download for full content)
About this extraction
This page contains the full source code of the Dongdong1991/GPURenderKitDemo GitHub repository, extracted and formatted as plain text for AI agents and large language models (LLMs). The extraction includes 566 files (42.5 MB), approximately 556.0k tokens, and a symbol index with 165 extracted functions, classes, methods, constants, and types. Use this with OpenClaw, Claude, ChatGPT, Cursor, Windsurf, or any other AI tool that accepts text input. You can copy the full output to your clipboard or download it as a .txt file.
Extracted by GitExtract — free GitHub repo to text converter for AI. Built by Nikandr Surkov.