[
  {
    "path": ".gitignore",
    "content": ".DS_Store\n/.build\n/Packages\n/*.xcodeproj\nxcuserdata/\nDerivedData/\n.swiftpm/config/registries.json\n.swiftpm/xcode/package.xcworkspace/contents.xcworkspacedata\n.netrc\n"
  },
  {
    "path": ".swiftpm/xcode/package.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<!DOCTYPE plist PUBLIC \"-//Apple//DTD PLIST 1.0//EN\" \"http://www.apple.com/DTDs/PropertyList-1.0.dtd\">\n<plist version=\"1.0\">\n<dict>\n\t<key>IDEDidComputeMac32BitWarning</key>\n\t<true/>\n</dict>\n</plist>\n"
  },
  {
    "path": "GenerateSwiftUICoreImage/GenerateSwiftUICoreImage/ContentView.swift",
    "content": "//\n//  ContentView.swift\n//  GenerateSwiftUICoreImage\n//\n//  Created by Dan Wood on 6/25/24.\n//\n\nimport SwiftUI\n\nstruct ContentView: View {\n    var body: some View {\n        VStack {\n            Image(systemName: \"globe\")\n                .imageScale(.large)\n                .foregroundStyle(.tint)\n            Text(\"See console output\")\n        }\n        .padding()\n    }\n}\n\n#Preview {\n    ContentView()\n}\n"
  },
  {
    "path": "GenerateSwiftUICoreImage/GenerateSwiftUICoreImage/GenerateSwiftUICoreImage.entitlements",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<!DOCTYPE plist PUBLIC \"-//Apple//DTD PLIST 1.0//EN\" \"http://www.apple.com/DTDs/PropertyList-1.0.dtd\">\n<plist version=\"1.0\">\n<dict>\n    <key>com.apple.security.app-sandbox</key>\n    <true/>\n    <key>com.apple.security.files.user-selected.read-only</key>\n    <true/>\n</dict>\n</plist>\n"
  },
  {
    "path": "GenerateSwiftUICoreImage/GenerateSwiftUICoreImage/GenerateSwiftUICoreImageApp.swift",
    "content": "//\n//  GenerateSwiftUICoreImageApp.swift\n//  GenerateSwiftUICoreImage\n//\n//  Created by Dan Wood on 6/25/24.\n//\n\nimport SwiftUI\n\n@main\nstruct GenerateSwiftUICoreImageApp: App {\n    var body: some Scene {\n        let _ = dumpFilters()\n\t\tlet _ = dumpUnknownProperties()\n        WindowGroup {\n            ContentView()\n        }\n    }\n}\n"
  },
  {
    "path": "GenerateSwiftUICoreImage/GenerateSwiftUICoreImage.xcodeproj/project.pbxproj",
    "content": "// !$*UTF8*$!\n{\n\tarchiveVersion = 1;\n\tclasses = {\n\t};\n\tobjectVersion = 56;\n\tobjects = {\n\n/* Begin PBXBuildFile section */\n\t\tCE67B0D12C2C82A4003E692B /* docLookup.json in Resources */ = {isa = PBXBuildFile; fileRef = CE67B0D02C2C82A4003E692B /* docLookup.json */; };\n\t\tCE9A3AC02C2B75090085C241 /* GenerateSwiftUICoreImageApp.swift in Sources */ = {isa = PBXBuildFile; fileRef = CE9A3ABF2C2B75090085C241 /* GenerateSwiftUICoreImageApp.swift */; };\n\t\tCE9A3AC22C2B75090085C241 /* ContentView.swift in Sources */ = {isa = PBXBuildFile; fileRef = CE9A3AC12C2B75090085C241 /* ContentView.swift */; };\n\t\tCE9A3AE82C2B75260085C241 /* abstracts.json in Resources */ = {isa = PBXBuildFile; fileRef = CE9A3AD12C2B75260085C241 /* abstracts.json */; };\n\t\tCE9A3AE92C2B75260085C241 /* CIImage-Generation.swift in Sources */ = {isa = PBXBuildFile; fileRef = CE9A3AD22C2B75260085C241 /* CIImage-Generation.swift */; };\n\t\tCE9A3AEB2C2B75260085C241 /* FunctionMinima.json in Resources */ = {isa = PBXBuildFile; fileRef = CE9A3AD42C2B75260085C241 /* FunctionMinima.json */; };\n\t\tCE9A3AEC2C2B75260085C241 /* MissingParameterDocumentation.json in Resources */ = {isa = PBXBuildFile; fileRef = CE9A3AD52C2B75260085C241 /* MissingParameterDocumentation.json */; };\n\t\tCE9A3AED2C2B75260085C241 /* CIImage-Extensions.swift in Sources */ = {isa = PBXBuildFile; fileRef = CE9A3AD72C2B75260085C241 /* CIImage-Extensions.swift */; };\n\t\tCE9A3AEE2C2B75260085C241 /* CIImage-Filters.swift in Sources */ = {isa = PBXBuildFile; fileRef = CE9A3AD82C2B75260085C241 /* CIImage-Filters.swift */; };\n\t\tCE9A3AEF2C2B75260085C241 /* Image-Extensions.swift in Sources */ = {isa = PBXBuildFile; fileRef = CE9A3AD92C2B75260085C241 /* Image-Extensions.swift */; };\n/* End PBXBuildFile section */\n\n/* Begin PBXFileReference section */\n\t\tCE67B0D02C2C82A4003E692B /* docLookup.json */ = {isa = PBXFileReference; lastKnownFileType = text.json; path = docLookup.json; sourceTree = \"<group>\"; };\n\t\tCE9A3ABC2C2B75090085C241 /* GenerateSwiftUICoreImage.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = GenerateSwiftUICoreImage.app; sourceTree = BUILT_PRODUCTS_DIR; };\n\t\tCE9A3ABF2C2B75090085C241 /* GenerateSwiftUICoreImageApp.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = GenerateSwiftUICoreImageApp.swift; sourceTree = \"<group>\"; };\n\t\tCE9A3AC12C2B75090085C241 /* ContentView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ContentView.swift; sourceTree = \"<group>\"; };\n\t\tCE9A3AD12C2B75260085C241 /* abstracts.json */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.json; path = abstracts.json; sourceTree = \"<group>\"; };\n\t\tCE9A3AD22C2B75260085C241 /* CIImage-Generation.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = \"CIImage-Generation.swift\"; sourceTree = \"<group>\"; };\n\t\tCE9A3AD42C2B75260085C241 /* FunctionMinima.json */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.json; path = FunctionMinima.json; sourceTree = \"<group>\"; };\n\t\tCE9A3AD52C2B75260085C241 /* MissingParameterDocumentation.json */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.json; path = MissingParameterDocumentation.json; sourceTree = \"<group>\"; };\n\t\tCE9A3AD72C2B75260085C241 /* CIImage-Extensions.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = \"CIImage-Extensions.swift\"; sourceTree = \"<group>\"; };\n\t\tCE9A3AD82C2B75260085C241 /* CIImage-Filters.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = \"CIImage-Filters.swift\"; sourceTree = \"<group>\"; };\n\t\tCE9A3AD92C2B75260085C241 /* Image-Extensions.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = \"Image-Extensions.swift\"; sourceTree = \"<group>\"; };\n\t\tCE9A3AF42C2B7B6A0085C241 /* GenerateSwiftUICoreImage.entitlements */ = {isa = PBXFileReference; lastKnownFileType = text.plist.entitlements; path = GenerateSwiftUICoreImage.entitlements; sourceTree = \"<group>\"; };\n/* End PBXFileReference section */\n\n/* Begin PBXFrameworksBuildPhase section */\n\t\tCE9A3AB92C2B75090085C241 /* Frameworks */ = {\n\t\t\tisa = PBXFrameworksBuildPhase;\n\t\t\tbuildActionMask = 2147483647;\n\t\t\tfiles = (\n\t\t\t);\n\t\t\trunOnlyForDeploymentPostprocessing = 0;\n\t\t};\n/* End PBXFrameworksBuildPhase section */\n\n/* Begin PBXGroup section */\n\t\tCE9A3AB32C2B75090085C241 = {\n\t\t\tisa = PBXGroup;\n\t\t\tchildren = (\n\t\t\t\tCE9A3ABE2C2B75090085C241 /* GenerateSwiftUICoreImage */,\n\t\t\t\tCE9A3AD62C2B75260085C241 /* Generator */,\n\t\t\t\tCE9A3ADA2C2B75260085C241 /* Sources */,\n\t\t\t\tCE9A3ABD2C2B75090085C241 /* Products */,\n\t\t\t);\n\t\t\tsourceTree = \"<group>\";\n\t\t};\n\t\tCE9A3ABD2C2B75090085C241 /* Products */ = {\n\t\t\tisa = PBXGroup;\n\t\t\tchildren = (\n\t\t\t\tCE9A3ABC2C2B75090085C241 /* GenerateSwiftUICoreImage.app */,\n\t\t\t);\n\t\t\tname = Products;\n\t\t\tsourceTree = \"<group>\";\n\t\t};\n\t\tCE9A3ABE2C2B75090085C241 /* GenerateSwiftUICoreImage */ = {\n\t\t\tisa = PBXGroup;\n\t\t\tchildren = (\n\t\t\t\tCE9A3ABF2C2B75090085C241 /* GenerateSwiftUICoreImageApp.swift */,\n\t\t\t\tCE9A3AC12C2B75090085C241 /* ContentView.swift */,\n\t\t\t\tCE9A3AF42C2B7B6A0085C241 /* GenerateSwiftUICoreImage.entitlements */,\n\t\t\t);\n\t\t\tpath = GenerateSwiftUICoreImage;\n\t\t\tsourceTree = \"<group>\";\n\t\t};\n\t\tCE9A3AD62C2B75260085C241 /* Generator */ = {\n\t\t\tisa = PBXGroup;\n\t\t\tchildren = (\n\t\t\t\tCE67B0D02C2C82A4003E692B /* docLookup.json */,\n\t\t\t\tCE9A3AD12C2B75260085C241 /* abstracts.json */,\n\t\t\t\tCE9A3AD22C2B75260085C241 /* CIImage-Generation.swift */,\n\t\t\t\tCE9A3AD42C2B75260085C241 /* FunctionMinima.json */,\n\t\t\t\tCE9A3AD52C2B75260085C241 /* MissingParameterDocumentation.json */,\n\t\t\t);\n\t\t\tname = Generator;\n\t\t\tpath = ../Generator;\n\t\t\tsourceTree = \"<group>\";\n\t\t};\n\t\tCE9A3ADA2C2B75260085C241 /* Sources */ = {\n\t\t\tisa = PBXGroup;\n\t\t\tchildren = (\n\t\t\t\tCE9A3AD72C2B75260085C241 /* CIImage-Extensions.swift */,\n\t\t\t\tCE9A3AD82C2B75260085C241 /* CIImage-Filters.swift */,\n\t\t\t\tCE9A3AD92C2B75260085C241 /* Image-Extensions.swift */,\n\t\t\t);\n\t\t\tname = Sources;\n\t\t\tpath = ../Sources;\n\t\t\tsourceTree = \"<group>\";\n\t\t};\n/* End PBXGroup section */\n\n/* Begin PBXNativeTarget section */\n\t\tCE9A3ABB2C2B75090085C241 /* GenerateSwiftUICoreImage */ = {\n\t\t\tisa = PBXNativeTarget;\n\t\t\tbuildConfigurationList = CE9A3ACB2C2B750B0085C241 /* Build configuration list for PBXNativeTarget \"GenerateSwiftUICoreImage\" */;\n\t\t\tbuildPhases = (\n\t\t\t\tCE9A3AB82C2B75090085C241 /* Sources */,\n\t\t\t\tCE9A3AB92C2B75090085C241 /* Frameworks */,\n\t\t\t\tCE9A3ABA2C2B75090085C241 /* Resources */,\n\t\t\t);\n\t\t\tbuildRules = (\n\t\t\t);\n\t\t\tdependencies = (\n\t\t\t);\n\t\t\tname = GenerateSwiftUICoreImage;\n\t\t\tproductName = GenerateSwiftUICoreImage;\n\t\t\tproductReference = CE9A3ABC2C2B75090085C241 /* GenerateSwiftUICoreImage.app */;\n\t\t\tproductType = \"com.apple.product-type.application\";\n\t\t};\n/* End PBXNativeTarget section */\n\n/* Begin PBXProject section */\n\t\tCE9A3AB42C2B75090085C241 /* Project object */ = {\n\t\t\tisa = PBXProject;\n\t\t\tattributes = {\n\t\t\t\tBuildIndependentTargetsInParallel = 1;\n\t\t\t\tLastSwiftUpdateCheck = 1540;\n\t\t\t\tLastUpgradeCheck = 1540;\n\t\t\t\tTargetAttributes = {\n\t\t\t\t\tCE9A3ABB2C2B75090085C241 = {\n\t\t\t\t\t\tCreatedOnToolsVersion = 15.4;\n\t\t\t\t\t};\n\t\t\t\t};\n\t\t\t};\n\t\t\tbuildConfigurationList = CE9A3AB72C2B75090085C241 /* Build configuration list for PBXProject \"GenerateSwiftUICoreImage\" */;\n\t\t\tcompatibilityVersion = \"Xcode 14.0\";\n\t\t\tdevelopmentRegion = en;\n\t\t\thasScannedForEncodings = 0;\n\t\t\tknownRegions = (\n\t\t\t\ten,\n\t\t\t\tBase,\n\t\t\t);\n\t\t\tmainGroup = CE9A3AB32C2B75090085C241;\n\t\t\tproductRefGroup = CE9A3ABD2C2B75090085C241 /* Products */;\n\t\t\tprojectDirPath = \"\";\n\t\t\tprojectRoot = \"\";\n\t\t\ttargets = (\n\t\t\t\tCE9A3ABB2C2B75090085C241 /* GenerateSwiftUICoreImage */,\n\t\t\t);\n\t\t};\n/* End PBXProject section */\n\n/* Begin PBXResourcesBuildPhase section */\n\t\tCE9A3ABA2C2B75090085C241 /* Resources */ = {\n\t\t\tisa = PBXResourcesBuildPhase;\n\t\t\tbuildActionMask = 2147483647;\n\t\t\tfiles = (\n\t\t\t\tCE9A3AE82C2B75260085C241 /* abstracts.json in Resources */,\n\t\t\t\tCE9A3AEC2C2B75260085C241 /* MissingParameterDocumentation.json in Resources */,\n\t\t\t\tCE9A3AEB2C2B75260085C241 /* FunctionMinima.json in Resources */,\n\t\t\t\tCE67B0D12C2C82A4003E692B /* docLookup.json in Resources */,\n\t\t\t);\n\t\t\trunOnlyForDeploymentPostprocessing = 0;\n\t\t};\n/* End PBXResourcesBuildPhase section */\n\n/* Begin PBXSourcesBuildPhase section */\n\t\tCE9A3AB82C2B75090085C241 /* Sources */ = {\n\t\t\tisa = PBXSourcesBuildPhase;\n\t\t\tbuildActionMask = 2147483647;\n\t\t\tfiles = (\n\t\t\t\tCE9A3AE92C2B75260085C241 /* CIImage-Generation.swift in Sources */,\n\t\t\t\tCE9A3AC22C2B75090085C241 /* ContentView.swift in Sources */,\n\t\t\t\tCE9A3AEE2C2B75260085C241 /* CIImage-Filters.swift in Sources */,\n\t\t\t\tCE9A3AEF2C2B75260085C241 /* Image-Extensions.swift in Sources */,\n\t\t\t\tCE9A3AC02C2B75090085C241 /* GenerateSwiftUICoreImageApp.swift in Sources */,\n\t\t\t\tCE9A3AED2C2B75260085C241 /* CIImage-Extensions.swift in Sources */,\n\t\t\t);\n\t\t\trunOnlyForDeploymentPostprocessing = 0;\n\t\t};\n/* End PBXSourcesBuildPhase section */\n\n/* Begin XCBuildConfiguration section */\n\t\tCE9A3AC92C2B750B0085C241 /* Debug */ = {\n\t\t\tisa = XCBuildConfiguration;\n\t\t\tbuildSettings = {\n\t\t\t\tALWAYS_SEARCH_USER_PATHS = NO;\n\t\t\t\tASSETCATALOG_COMPILER_GENERATE_SWIFT_ASSET_SYMBOL_EXTENSIONS = YES;\n\t\t\t\tCLANG_ANALYZER_NONNULL = YES;\n\t\t\t\tCLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE;\n\t\t\t\tCLANG_CXX_LANGUAGE_STANDARD = \"gnu++20\";\n\t\t\t\tCLANG_ENABLE_MODULES = YES;\n\t\t\t\tCLANG_ENABLE_OBJC_ARC = YES;\n\t\t\t\tCLANG_ENABLE_OBJC_WEAK = YES;\n\t\t\t\tCLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;\n\t\t\t\tCLANG_WARN_BOOL_CONVERSION = YES;\n\t\t\t\tCLANG_WARN_COMMA = YES;\n\t\t\t\tCLANG_WARN_CONSTANT_CONVERSION = YES;\n\t\t\t\tCLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES;\n\t\t\t\tCLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;\n\t\t\t\tCLANG_WARN_DOCUMENTATION_COMMENTS = YES;\n\t\t\t\tCLANG_WARN_EMPTY_BODY = YES;\n\t\t\t\tCLANG_WARN_ENUM_CONVERSION = YES;\n\t\t\t\tCLANG_WARN_INFINITE_RECURSION = YES;\n\t\t\t\tCLANG_WARN_INT_CONVERSION = YES;\n\t\t\t\tCLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;\n\t\t\t\tCLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES;\n\t\t\t\tCLANG_WARN_OBJC_LITERAL_CONVERSION = YES;\n\t\t\t\tCLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;\n\t\t\t\tCLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = YES;\n\t\t\t\tCLANG_WARN_RANGE_LOOP_ANALYSIS = YES;\n\t\t\t\tCLANG_WARN_STRICT_PROTOTYPES = YES;\n\t\t\t\tCLANG_WARN_SUSPICIOUS_MOVE = YES;\n\t\t\t\tCLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE;\n\t\t\t\tCLANG_WARN_UNREACHABLE_CODE = YES;\n\t\t\t\tCLANG_WARN__DUPLICATE_METHOD_MATCH = YES;\n\t\t\t\tCOPY_PHASE_STRIP = NO;\n\t\t\t\tDEBUG_INFORMATION_FORMAT = dwarf;\n\t\t\t\tENABLE_STRICT_OBJC_MSGSEND = YES;\n\t\t\t\tENABLE_TESTABILITY = YES;\n\t\t\t\tENABLE_USER_SCRIPT_SANDBOXING = YES;\n\t\t\t\tGCC_C_LANGUAGE_STANDARD = gnu17;\n\t\t\t\tGCC_DYNAMIC_NO_PIC = NO;\n\t\t\t\tGCC_NO_COMMON_BLOCKS = YES;\n\t\t\t\tGCC_OPTIMIZATION_LEVEL = 0;\n\t\t\t\tGCC_PREPROCESSOR_DEFINITIONS = (\n\t\t\t\t\t\"DEBUG=1\",\n\t\t\t\t\t\"$(inherited)\",\n\t\t\t\t);\n\t\t\t\tGCC_WARN_64_TO_32_BIT_CONVERSION = YES;\n\t\t\t\tGCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;\n\t\t\t\tGCC_WARN_UNDECLARED_SELECTOR = YES;\n\t\t\t\tGCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;\n\t\t\t\tGCC_WARN_UNUSED_FUNCTION = YES;\n\t\t\t\tGCC_WARN_UNUSED_VARIABLE = YES;\n\t\t\t\tLOCALIZATION_PREFERS_STRING_CATALOGS = YES;\n\t\t\t\tMTL_ENABLE_DEBUG_INFO = INCLUDE_SOURCE;\n\t\t\t\tMTL_FAST_MATH = YES;\n\t\t\t\tONLY_ACTIVE_ARCH = YES;\n\t\t\t\tSWIFT_ACTIVE_COMPILATION_CONDITIONS = \"DEBUG $(inherited)\";\n\t\t\t\tSWIFT_OPTIMIZATION_LEVEL = \"-Onone\";\n\t\t\t};\n\t\t\tname = Debug;\n\t\t};\n\t\tCE9A3ACA2C2B750B0085C241 /* Release */ = {\n\t\t\tisa = XCBuildConfiguration;\n\t\t\tbuildSettings = {\n\t\t\t\tALWAYS_SEARCH_USER_PATHS = NO;\n\t\t\t\tASSETCATALOG_COMPILER_GENERATE_SWIFT_ASSET_SYMBOL_EXTENSIONS = YES;\n\t\t\t\tCLANG_ANALYZER_NONNULL = YES;\n\t\t\t\tCLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE;\n\t\t\t\tCLANG_CXX_LANGUAGE_STANDARD = \"gnu++20\";\n\t\t\t\tCLANG_ENABLE_MODULES = YES;\n\t\t\t\tCLANG_ENABLE_OBJC_ARC = YES;\n\t\t\t\tCLANG_ENABLE_OBJC_WEAK = YES;\n\t\t\t\tCLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;\n\t\t\t\tCLANG_WARN_BOOL_CONVERSION = YES;\n\t\t\t\tCLANG_WARN_COMMA = YES;\n\t\t\t\tCLANG_WARN_CONSTANT_CONVERSION = YES;\n\t\t\t\tCLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES;\n\t\t\t\tCLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;\n\t\t\t\tCLANG_WARN_DOCUMENTATION_COMMENTS = YES;\n\t\t\t\tCLANG_WARN_EMPTY_BODY = YES;\n\t\t\t\tCLANG_WARN_ENUM_CONVERSION = YES;\n\t\t\t\tCLANG_WARN_INFINITE_RECURSION = YES;\n\t\t\t\tCLANG_WARN_INT_CONVERSION = YES;\n\t\t\t\tCLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;\n\t\t\t\tCLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES;\n\t\t\t\tCLANG_WARN_OBJC_LITERAL_CONVERSION = YES;\n\t\t\t\tCLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;\n\t\t\t\tCLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = YES;\n\t\t\t\tCLANG_WARN_RANGE_LOOP_ANALYSIS = YES;\n\t\t\t\tCLANG_WARN_STRICT_PROTOTYPES = YES;\n\t\t\t\tCLANG_WARN_SUSPICIOUS_MOVE = YES;\n\t\t\t\tCLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE;\n\t\t\t\tCLANG_WARN_UNREACHABLE_CODE = YES;\n\t\t\t\tCLANG_WARN__DUPLICATE_METHOD_MATCH = YES;\n\t\t\t\tCOPY_PHASE_STRIP = NO;\n\t\t\t\tDEBUG_INFORMATION_FORMAT = \"dwarf-with-dsym\";\n\t\t\t\tENABLE_NS_ASSERTIONS = NO;\n\t\t\t\tENABLE_STRICT_OBJC_MSGSEND = YES;\n\t\t\t\tENABLE_USER_SCRIPT_SANDBOXING = YES;\n\t\t\t\tGCC_C_LANGUAGE_STANDARD = gnu17;\n\t\t\t\tGCC_NO_COMMON_BLOCKS = YES;\n\t\t\t\tGCC_WARN_64_TO_32_BIT_CONVERSION = YES;\n\t\t\t\tGCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;\n\t\t\t\tGCC_WARN_UNDECLARED_SELECTOR = YES;\n\t\t\t\tGCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;\n\t\t\t\tGCC_WARN_UNUSED_FUNCTION = YES;\n\t\t\t\tGCC_WARN_UNUSED_VARIABLE = YES;\n\t\t\t\tLOCALIZATION_PREFERS_STRING_CATALOGS = YES;\n\t\t\t\tMTL_ENABLE_DEBUG_INFO = NO;\n\t\t\t\tMTL_FAST_MATH = YES;\n\t\t\t\tSWIFT_COMPILATION_MODE = wholemodule;\n\t\t\t};\n\t\t\tname = Release;\n\t\t};\n\t\tCE9A3ACC2C2B750B0085C241 /* Debug */ = {\n\t\t\tisa = XCBuildConfiguration;\n\t\t\tbuildSettings = {\n\t\t\t\tASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;\n\t\t\t\tASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor;\n\t\t\t\tCODE_SIGN_ENTITLEMENTS = GenerateSwiftUICoreImage/GenerateSwiftUICoreImage.entitlements;\n\t\t\t\t\"CODE_SIGN_IDENTITY[sdk=macosx*]\" = \"Apple Development\";\n\t\t\t\tCODE_SIGN_STYLE = Automatic;\n\t\t\t\tCURRENT_PROJECT_VERSION = 1;\n\t\t\t\tDEVELOPMENT_ASSET_PATHS = \"\\\"GenerateSwiftUICoreImage/Preview Content\\\"\";\n\t\t\t\tDEVELOPMENT_TEAM = 3SP7MRA6P9;\n\t\t\t\tENABLE_HARDENED_RUNTIME = YES;\n\t\t\t\tENABLE_PREVIEWS = YES;\n\t\t\t\tGENERATE_INFOPLIST_FILE = YES;\n\t\t\t\t\"INFOPLIST_KEY_UIApplicationSceneManifest_Generation[sdk=iphoneos*]\" = YES;\n\t\t\t\t\"INFOPLIST_KEY_UIApplicationSceneManifest_Generation[sdk=iphonesimulator*]\" = YES;\n\t\t\t\t\"INFOPLIST_KEY_UIApplicationSupportsIndirectInputEvents[sdk=iphoneos*]\" = YES;\n\t\t\t\t\"INFOPLIST_KEY_UIApplicationSupportsIndirectInputEvents[sdk=iphonesimulator*]\" = YES;\n\t\t\t\t\"INFOPLIST_KEY_UILaunchScreen_Generation[sdk=iphoneos*]\" = YES;\n\t\t\t\t\"INFOPLIST_KEY_UILaunchScreen_Generation[sdk=iphonesimulator*]\" = YES;\n\t\t\t\t\"INFOPLIST_KEY_UIStatusBarStyle[sdk=iphoneos*]\" = UIStatusBarStyleDefault;\n\t\t\t\t\"INFOPLIST_KEY_UIStatusBarStyle[sdk=iphonesimulator*]\" = UIStatusBarStyleDefault;\n\t\t\t\tINFOPLIST_KEY_UISupportedInterfaceOrientations_iPad = \"UIInterfaceOrientationPortrait UIInterfaceOrientationPortraitUpsideDown UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight\";\n\t\t\t\tINFOPLIST_KEY_UISupportedInterfaceOrientations_iPhone = \"UIInterfaceOrientationPortrait UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight\";\n\t\t\t\tIPHONEOS_DEPLOYMENT_TARGET = 17.5;\n\t\t\t\tLD_RUNPATH_SEARCH_PATHS = \"@executable_path/Frameworks\";\n\t\t\t\t\"LD_RUNPATH_SEARCH_PATHS[sdk=macosx*]\" = \"@executable_path/../Frameworks\";\n\t\t\t\tMACOSX_DEPLOYMENT_TARGET = 14.5;\n\t\t\t\tMARKETING_VERSION = 1.0;\n\t\t\t\tPRODUCT_BUNDLE_IDENTIFIER = com.gigliwood.GenerateSwiftUICoreImage;\n\t\t\t\tPRODUCT_NAME = \"$(TARGET_NAME)\";\n\t\t\t\tSDKROOT = auto;\n\t\t\t\tSUPPORTED_PLATFORMS = \"iphoneos iphonesimulator macosx\";\n\t\t\t\tSWIFT_EMIT_LOC_STRINGS = YES;\n\t\t\t\tSWIFT_VERSION = 5.0;\n\t\t\t\tTARGETED_DEVICE_FAMILY = \"1,2\";\n\t\t\t};\n\t\t\tname = Debug;\n\t\t};\n\t\tCE9A3ACD2C2B750B0085C241 /* Release */ = {\n\t\t\tisa = XCBuildConfiguration;\n\t\t\tbuildSettings = {\n\t\t\t\tASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;\n\t\t\t\tASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor;\n\t\t\t\tCODE_SIGN_ENTITLEMENTS = GenerateSwiftUICoreImage/GenerateSwiftUICoreImage.entitlements;\n\t\t\t\t\"CODE_SIGN_IDENTITY[sdk=macosx*]\" = \"Apple Development\";\n\t\t\t\tCODE_SIGN_STYLE = Automatic;\n\t\t\t\tCURRENT_PROJECT_VERSION = 1;\n\t\t\t\tDEVELOPMENT_ASSET_PATHS = \"\\\"GenerateSwiftUICoreImage/Preview Content\\\"\";\n\t\t\t\tDEVELOPMENT_TEAM = 3SP7MRA6P9;\n\t\t\t\tENABLE_HARDENED_RUNTIME = YES;\n\t\t\t\tENABLE_PREVIEWS = YES;\n\t\t\t\tGENERATE_INFOPLIST_FILE = YES;\n\t\t\t\t\"INFOPLIST_KEY_UIApplicationSceneManifest_Generation[sdk=iphoneos*]\" = YES;\n\t\t\t\t\"INFOPLIST_KEY_UIApplicationSceneManifest_Generation[sdk=iphonesimulator*]\" = YES;\n\t\t\t\t\"INFOPLIST_KEY_UIApplicationSupportsIndirectInputEvents[sdk=iphoneos*]\" = YES;\n\t\t\t\t\"INFOPLIST_KEY_UIApplicationSupportsIndirectInputEvents[sdk=iphonesimulator*]\" = YES;\n\t\t\t\t\"INFOPLIST_KEY_UILaunchScreen_Generation[sdk=iphoneos*]\" = YES;\n\t\t\t\t\"INFOPLIST_KEY_UILaunchScreen_Generation[sdk=iphonesimulator*]\" = YES;\n\t\t\t\t\"INFOPLIST_KEY_UIStatusBarStyle[sdk=iphoneos*]\" = UIStatusBarStyleDefault;\n\t\t\t\t\"INFOPLIST_KEY_UIStatusBarStyle[sdk=iphonesimulator*]\" = UIStatusBarStyleDefault;\n\t\t\t\tINFOPLIST_KEY_UISupportedInterfaceOrientations_iPad = \"UIInterfaceOrientationPortrait UIInterfaceOrientationPortraitUpsideDown UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight\";\n\t\t\t\tINFOPLIST_KEY_UISupportedInterfaceOrientations_iPhone = \"UIInterfaceOrientationPortrait UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight\";\n\t\t\t\tIPHONEOS_DEPLOYMENT_TARGET = 17.5;\n\t\t\t\tLD_RUNPATH_SEARCH_PATHS = \"@executable_path/Frameworks\";\n\t\t\t\t\"LD_RUNPATH_SEARCH_PATHS[sdk=macosx*]\" = \"@executable_path/../Frameworks\";\n\t\t\t\tMACOSX_DEPLOYMENT_TARGET = 14.5;\n\t\t\t\tMARKETING_VERSION = 1.0;\n\t\t\t\tPRODUCT_BUNDLE_IDENTIFIER = com.gigliwood.GenerateSwiftUICoreImage;\n\t\t\t\tPRODUCT_NAME = \"$(TARGET_NAME)\";\n\t\t\t\tSDKROOT = auto;\n\t\t\t\tSUPPORTED_PLATFORMS = \"iphoneos iphonesimulator macosx\";\n\t\t\t\tSWIFT_EMIT_LOC_STRINGS = YES;\n\t\t\t\tSWIFT_VERSION = 5.0;\n\t\t\t\tTARGETED_DEVICE_FAMILY = \"1,2\";\n\t\t\t};\n\t\t\tname = Release;\n\t\t};\n/* End XCBuildConfiguration section */\n\n/* Begin XCConfigurationList section */\n\t\tCE9A3AB72C2B75090085C241 /* Build configuration list for PBXProject \"GenerateSwiftUICoreImage\" */ = {\n\t\t\tisa = XCConfigurationList;\n\t\t\tbuildConfigurations = (\n\t\t\t\tCE9A3AC92C2B750B0085C241 /* Debug */,\n\t\t\t\tCE9A3ACA2C2B750B0085C241 /* Release */,\n\t\t\t);\n\t\t\tdefaultConfigurationIsVisible = 0;\n\t\t\tdefaultConfigurationName = Release;\n\t\t};\n\t\tCE9A3ACB2C2B750B0085C241 /* Build configuration list for PBXNativeTarget \"GenerateSwiftUICoreImage\" */ = {\n\t\t\tisa = XCConfigurationList;\n\t\t\tbuildConfigurations = (\n\t\t\t\tCE9A3ACC2C2B750B0085C241 /* Debug */,\n\t\t\t\tCE9A3ACD2C2B750B0085C241 /* Release */,\n\t\t\t);\n\t\t\tdefaultConfigurationIsVisible = 0;\n\t\t\tdefaultConfigurationName = Release;\n\t\t};\n/* End XCConfigurationList section */\n\t};\n\trootObject = CE9A3AB42C2B75090085C241 /* Project object */;\n}\n"
  },
  {
    "path": "GenerateSwiftUICoreImage/GenerateSwiftUICoreImage.xcodeproj/project.xcworkspace/contents.xcworkspacedata",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<Workspace\n   version = \"1.0\">\n   <FileRef\n      location = \"self:\">\n   </FileRef>\n</Workspace>\n"
  },
  {
    "path": "GenerateSwiftUICoreImage/GenerateSwiftUICoreImage.xcodeproj/project.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<!DOCTYPE plist PUBLIC \"-//Apple//DTD PLIST 1.0//EN\" \"http://www.apple.com/DTDs/PropertyList-1.0.dtd\">\n<plist version=\"1.0\">\n<dict>\n\t<key>IDEDidComputeMac32BitWarning</key>\n\t<true/>\n</dict>\n</plist>\n"
  },
  {
    "path": "Generator/CIImage-Generation.swift",
    "content": "//\n//  CIImage-Generation.swift\n//  SwiftUI Core Image\n//\n//  Created by Dan Wood on 4/27/23.\n//\n// When executed, this outputs Swift code that can be pasted into the file \"CIImage+Generated.swift\".\n//\n// This will run under iOS or macOS and the resulting code is almost the same. Notably in affineClamp and affineTile the default values are not\n// the same. Also as noted in the documentation that we generate, the `cubeDimension` parameter has a different range between iOS and macOS.\n\nimport Foundation\nimport CoreImage\nimport CoreImage.CIFilterBuiltins\n\n#if canImport(UIKit)\nprivate typealias AffineTransform = CGAffineTransform\n#elseif canImport(AppKit)\nprivate typealias AffineTransform = NSAffineTransform\n#endif\n\nprivate var unknownProperties: [String: [String: String]] = [:]\n\nfunc dumpFilters() {\n\n\t/*\n\n\t New documentation base found at\n\t https://developer.apple.com/documentation/coreimage\n\t or\n\t https://developer.apple.com/documentation/coreimage/cifilter\n\n\t 15 categories. Open each in tab. Select all, copy, paste into rich text TextEdit doc. Save as HTML.\n\n\t Copy this source, then in terminal, grep out the lines I want:\n\n\t pbpaste | grep 'class func' | grep 'any CIFilter ' | sort | uniq > ~/Desktop/AllFunctions.html\n\n\t (There are a few duplicated functions; gonna not worry about right now)\n\n\t In BBEdit, remove the stuff before the\n\n\t From that, in BBEdit, grep replace all lines:\n\n\t ^.+<a href=\"https://developer.apple.com/documentation/coreimage/cifilter/([^\"]+)\">class func <span class=\"[^\"]+\">([^>]+)</span><span class=\"[^\"]+\">\\(\\) -&gt; any CIFilter &amp; ([^>]+)</span></a></span></p>\n\t to:\n\n\t \"\\2\": \"\\1\",\n\n\t and then…\n\n\t ^.+<a href=\"https://developer.apple.com/documentation/coreimage/cifilter/([^\"]+)\">class func ([^(]+).+? any CIFilter &amp; ([^<]+)<span class=\"[^\"]+\"></span></a></span></p>\n\n\t to:\n\n\t \"\\2\": \"\\1\",\n\n\n\t Save as RawLookup.json to Desktop\n\n\t cat ~/Desktop/RawLookup.json | sort | uniq > ~/Desktop/docLookup.json\n\n\t Now edit to include { and } and remove last comma\n\n\t This file lets us know the documentation URL fragment to append to https://developer.apple.com/documentation/coreimage/cifilter/\n\t */\n\n\tguard let url = Bundle.main.url(forResource: \"docLookup\", withExtension: \"json\"),\n\t\t  let data = try? Data(contentsOf: url),\n\t\t  let json = try? JSONSerialization.jsonObject(with: data, options: []),\n\t\t  let docLookup: [String: String] = json as? [String: String]\n\telse { print(\"// 🛑 can't load docLookup.json\"); return }\n\n\t\n\t/*\n\t Load abstracts for all functions that are documented on the OLD reference page. Still, some of these descriptions are a bit more descriptive than the built-in descriptions.\n\n\t Possible improvement, scrape the same pages that are used above to generate docLookup.json to get the most up-to-date abstracts from the web.\n\n\t Start with\n\t https://developer.apple.com/library/archive/documentation/GraphicsImaging/Reference/CoreImageFilterReference/\n\n\t auto-expand all symbols\n\n\t get HTML source\n\t in BBEdit change all instances (with Grep) of:\n\t +href=\"#//apple_ref/doc/filter/ci/([^\"]+)\"\\n +title=\"([^\"]+)\">\n\t to:\n\t •\"\\1\": \"\\2\",\n\n\t Sort, extract lines starting with •\n\t Paste and preserve formatting into abstracts.json; fix the last line.\n\t Look for any little tweaks that may be needed.\n\n\t */\n\tguard let url = Bundle.main.url(forResource: \"abstracts\", withExtension: \"json\"),\n\t\t  let data = try? Data(contentsOf: url),\n\t\t  let json = try? JSONSerialization.jsonObject(with: data, options: []),\n\t\t  let abstractLookup: [String: String] = json as? [String: String]\n\telse { print(\"// 🛑 can't load abstracts.json\"); return }\n\n\t/*\n\t A dictionary mapping filters (pretty function names) to override iOS versions  when we have noted that the core image functions (or occasionally parameters of them) required newer OSs.\n\n\t Not sure where we got this originally! We may need to update some of these.\n\n\t */\n\tguard let url = Bundle.main.url(forResource: \"FunctionMinima\", withExtension: \"json\"),\n\t\t  let data = try? Data(contentsOf: url),\n\t\t  let json = try? JSONSerialization.jsonObject(with: data, options: []),\n\t\t  let functionMinima: [String: String] = json as? [String: String]\n\telse { print(\"// 🛑 can't load FunctionMinima.json\"); return }\n\n\t/* Generate this list by running the code; it finds inputs missing documentation replacing with \"_____TODO_____\". Update the MissingParameterDocumentation.json file as this is improved. Documentation can come from whatever sources can be scrapped together; use \"_NOTE\" key just to notate how we found the information.\n\t */\n\tguard let url = Bundle.main.url(forResource: \"MissingParameterDocumentation\", withExtension: \"json\"),\n\t\t  let data = try? Data(contentsOf: url),\n\t\t  let json = try? JSONSerialization.jsonObject(with: data, options: []),\n\t\t  let forUnknownProperties = json as? [String: [String: String]]\n\telse { print(\"// 🛑 can't load MissingParameterDocumentation.json\"); return }\n\tunknownProperties = forUnknownProperties\n\n\tlet ciFilterList = CIFilter.filterNames(inCategories: nil)\n\n\tvar generators: [String: CIFilter] = [:]\n\tvar imageToImage: [String: CIFilter] = [:]\n\n\tfor filterName in ciFilterList {\n\n\t\tguard let filter = CIFilter(name: filterName) else { print(\"// 🛑 can't instantiate \\(filterName)\"); continue }\n\n\t\tif !filter.inputKeys.contains(kCIInputImageKey) {\n\t\t\tgenerators[filterName] = filter\n\t\t} else if filter.outputKeys.contains(kCIOutputImageKey) {\n\t\t\timageToImage[filterName] = filter\n\t\t} else {\n\t\t\tprint(\"// 🛑 Don't know what to do with \\(filterName) - outputKeys = \\(filter.outputKeys)\")\n\t\t}\n\t}\n\n\tprint(\"//\")\n\tprint(\"// Automatically generated by CIImage-Generation.swift - do not edit\")\n\tprint(\"//\")\n\tprint(\"\")\n\tprint(\"import Foundation\")\n\tprint(\"import CoreImage\")\n\tprint(\"import CoreImage.CIFilterBuiltins\")\n\tprint(\"import CoreML\")\n\tprint(\"import AVFoundation\")\n\tprint(\"\")\n\tprint(\"public extension CIImage {\")\n\tprint(\"\")\n\tprint(\"//\")\n\tprint(\"// MARK: IMAGE-TO-IMAGE FILTERS\")\n\tprint(\"//\")\n\tfor filterName in imageToImage.keys.sorted() {\n\t\tguard let filter: CIFilter = imageToImage[filterName] else { continue }\n\t\toutputImageToImage(filter, abstractLookup: abstractLookup, docLookup: docLookup, functionMinima: functionMinima)\n\t}\n\tprint(\"\")\n\tprint(\"//\")\n\tprint(\"// MARK: GENERATORS\")\n\tprint(\"//\")\n\tfor filterName in generators.keys.sorted() {\n\t\tguard let filter: CIFilter = generators[filterName] else { continue }\n\t\toutputGeneratorFilter(filter, abstractLookup: abstractLookup, docLookup: docLookup, functionMinima: functionMinima)\n\t}\n\n\t// End of class extension\n\tprint(\"}\")\n\tprint(\"\\n\\n\\n\\n\\n\\n\\n\")\n}\n\n// Use this to start collecting properties needing some documentation, to then put into MissingParameterDocumentation.json\nfunc dumpUnknownProperties() {\n\tdo {\n\t\tlet theJSONData = try JSONSerialization.data(\n\t\t\twithJSONObject: unknownProperties,\n\t\t\toptions: [.sortedKeys, .prettyPrinted]\n\t\t)\n\t\tif let theJSONText = String(data: theJSONData,\n\t\t\t\t\t\t\t\t\tencoding: String.Encoding.utf8) {\n\t\t\tprint(\"\\n\\n\\n_________________________\\n\\nDumped properties missing documentation = \\n\\n\\n\\(theJSONText)\")\n\t\t} else {\n\t\t\tprint(\"Unable to convert data to JSON\")\n\t\t}\n\t}\n\tcatch {\n\t\tprint(error)\n\t}\n}\n\nprivate func outputGeneratorFilter(_ filter: CIFilter, abstractLookup: [String: String], docLookup: [String: String], functionMinima: [String: String]) {\n\tlet filterName = filter.name\n\n\tlet filtersThatAlreadyHaveInitializer: [String: String] = [\"CIConstantColorGenerator\": \"init(color: CIColor)\"]\n\n\tif let existingFunction: String = filtersThatAlreadyHaveInitializer[filterName] {\n\t\tprint(\"// ℹ️ \\(filterName) already has a CIImage initializer: \\(existingFunction)\")\n\t\treturn\n\t}\n\n\toutputDocumentation(filter, isGenerator: true, abstractLookup: abstractLookup, docLookup: docLookup)\n\toutputOSVersion(filter, functionMinima: functionMinima)\n\toutputImageFunction(filter, isGenerator: true)\n}\n\nprivate func outputDocumentation(_ filter: CIFilter, isGenerator: Bool, abstractLookup: [String: String], docLookup: [String: String]) {\n\n\tlet filterName = filter.name\n\tlet description: String? = CIFilter.localizedDescription(forFilterName: filterName)\n\tlet categories: Array<String> = filter.attributes[kCIAttributeFilterCategories] as? Array<String> ?? []\n\tlet filterDisplayName: String = filter.attributes[kCIAttributeFilterDisplayName] as? String ?? \"\"\n\tlet documentationURL: URL? = filter.attributes[kCIAttributeReferenceDocumentation] as? URL\n\n\t// https://developer.apple.com/documentation/xcode/writing-symbol-documentation-in-your-source-files\n\tprint(\"\\n/// \\(filterDisplayName)\")\n\tprint(\"///\")\n\tif let description {\n\t\tif let abstract = abstractLookup[filterName], !abstract.hasPrefix(\"Returns \"), abstract.count > description.count {\n\t\t\t// Replace description with longer abstract scraped from the website, unless it starts with 'Returns ' since we use that for the output.\n\t\t\tprint(\"/// \\(abstract)\")\n\t\t} else {\n\t\t\tprint(\"/// \\(description)\")\n\t\t}\n\t\tprint(\"///\")\n\t}\n\n\t// Convert, for example, CIAccordionFoldTransition to accordionFoldTransition\n\tlet functionFilterNameCapitalized = filterName.dropFirst(2)\n\tvar functionFilterName = (functionFilterNameCapitalized.first?.lowercased() ?? \"\") + functionFilterNameCapitalized.dropFirst()\n\n\tlet manualNameLookup = [\"CICMYKHalftone\": \"cmykHalftone\", \"CIPDF417BarcodeGenerator\": \"pdf417BarcodeGenerator\", \"CIQRCodeGenerator\": \"qrCodeGenerator\"]\n\tif let foundManualLookup = manualNameLookup[filterName] {\n\t\tfunctionFilterName = foundManualLookup\n\t}\n\n\t// These are still in beta, so I'm not seeing them on the main category lists. https://developer.apple.com/documentation/coreimage/cifilter\n\tlet manualURLLookup = [\"CIAreaBoundsRed\": \"4401847-areaboundsred\",\n\t\t\t\t\t\t   \"CIMaximumScaleTransform\": \"4401870-maximumscaletransform\",\n\t\t\t\t\t\t   \"CIToneMapHeadroom\": \"4401878-tonemapheadroom\",\n\t\t\t\t\t\t   \"CIAreaAlphaWeightedHistogram\": \"4401846-areaalphaweightedhistogram\"\n\t]\n\n\tlet newDocURLFragment: String?\n\tif let manualURLFragment = manualURLLookup[filterName] {\n\t\tnewDocURLFragment = manualURLFragment\n\t} else {\n\t\tnewDocURLFragment = docLookup[functionFilterName]\n\t}\n\n\tif let newDocURLFragment {\n\t\tprint(\"/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/\\(newDocURLFragment))\")\n\t} else {\n\t\tlet withoutSuffix = functionFilterName.replacingOccurrences(of: \"Filter\", with: \"\", options: [.backwards, .anchored])\n\t\tif let newDocURLFragment = docLookup[withoutSuffix] {\n\t\t\tprint(\"/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/\\(newDocURLFragment))\")\n\t\t} else {\n\t\t\tprint(\"/// ⚠️ No documentation available for \\(filterName)\")\n\t\t}\n\t}\n\n\tif let documentationURL {\n\t\tif nil != abstractLookup[filterName] {\n\t\t\tlet urlFragment: String\n#if canImport(UIKit)\n\t\t\turlFragment = \"http://developer.apple.com/library/ios\"\n#elseif canImport(AppKit)\n\t\t\turlFragment = \"http://developer.apple.com/library/mac\"\n#endif\n\n\t\t\tvar urlString: String = documentationURL.absoluteString.replacingOccurrences(of: urlFragment,\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t with: \"https://developer.apple.com/library/archive\",\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t options: .anchored)\n\t\t\turlString = urlString.replacingOccurrences(of: \"https://developer.apple.com/library/archive/documentation/GraphicsImaging/Reference/CoreImageFilterReference/index.html\", with: \"https://t.ly/Gyd6\")\n\n\n\n\t\t\tprint(\"/// [Classic Documentation](\\(urlString))\")\n\t\t}\n\n\t\t// Special cases for documentation\n\t\tif filterName == \"CIDepthBlurEffect\" {\n\t\t\t// Some helpful hints since this is otherwise undocumented\n\t\t\tprint(\"/// [WWDC Video](https://devstreaming-cdn.apple.com/videos/wwdc/2017/508wdyl5rm2jy9z8/508/508_hd_image_editing_with_depth.mp4)\")\n\t\t\tprint(\"/// [WWDC Slides](https://devstreaming-cdn.apple.com/videos/wwdc/2017/508wdyl5rm2jy9z8/508/508_image_editing_with_depth.pdf)\")\n\t\t} else if filterName == \"CICoreMLModelFilter\" {\n\t\t\tprint(\"/// [WWDC Video](https://developer.apple.com/videos/play/wwdc2018-719/?time=2378)\")\n\t\t}\n\t\tprint(\"///\")\n\t}\n\tif categories.count == 1, let category = categories.first {\n\t\tprint(\"/// Category: \\(CIFilter.localizedName(forCategory: category))\")\n\t\tprint(\"///\")\n\t} else if categories.count > 1 {\n\t\tlet prettyList: String = categories.map { CIFilter.localizedName(forCategory: $0) }.joined(separator: \", \")\n\t\tprint(\"/// Categories: \\(prettyList)\")\n\t\tprint(\"///\")\n\t}\n\tprint(\"///\")\n\tprint(\"/// - Parameters:\")\n\n\tvar adjustedInputKeys = filter.inputKeys.filter { $0 != kCIInputImageKey }\n\tif !isGenerator && filter.identityInputKeys.isEmpty && !filter.inputKeys.contains(\"inputBackgroundImage\") {\n\t\tadjustedInputKeys.append(\"active\")\n\t}\n\tfor inputKey in adjustedInputKeys {\n\t\tguard inputKey != \"active\" else {\n\t\t\tprint(\"///   - active: should this filter be applied\")\n\t\t\tcontinue\n\t\t}\n\t\tguard let attributes = filter.attributes[inputKey] as? [String: AnyObject],\n\t\t\t  let attributeClass = attributes[kCIAttributeClass] as? String\n\t\telse {\n\t\t\tprint(\"///   - \\(inputKey): 🛑 couldn't get input attributes\")\n\t\t\tcontinue\n\t\t}\n\n\t\tlet displayName: String = attributes[kCIAttributeDisplayName] as? String ?? \"\"\t// space-separated\n\t\tlet longerInput: String = parameterName(displayName: displayName, filterName: filterName)\n\t\tvar description:  String = attributes[kCIAttributeDescription] as? String ?? \"[unknown]\"\n\n\t\tif nil == attributes[kCIAttributeDescription] {\n\t\t\t\n\t\t\t// TEMPORARY CODE TO COLLECT UNKNOWN PROPERTIES\n\t\t\tvar foundUnknownPropertiesForFilter: [String: String] = unknownProperties[filterName] ?? [:]\n\t\t\tif nil == foundUnknownPropertiesForFilter[longerInput] {\n\t\t\t\tfoundUnknownPropertiesForFilter[longerInput] = \"_____TODO_____\"\n\t\t\t}\n\t\t\tunknownProperties[filterName] = foundUnknownPropertiesForFilter\n\t\t\t\n\t\t\tif let missingParameters: [String: String] = unknownProperties[filterName],\n\t\t\t   let replacementDocumentation: String = missingParameters[longerInput] {\n\t\t\t\tdescription = replacementDocumentation\n\t\t\t}\n\t\t}\n\t\t// Remove rounding information since we are passing in integers directly.\n\t\tdescription = description.replacing(\" The value will be rounded to the nearest odd integer.\", with: \"\")\n\t\tdescription = description.replacing(\" Set to nil for automatic.\", with: \"\")\n\t\t// Fix this weird ObjC style documentation\n\t\tdescription = description.replacing(\"Force a compact style Aztec code to @YES or @NO.\",\n\t\t\t\t\t\t\t\t\t\t\twith: \"A Boolean that specifies whether to force a compact style Aztec code.\")\n\t\tdescription = description.replacing(\"Force compaction style to @YES or @NO.\",\n\t\t\t\t\t\t\t\t\t\t\twith: \"A Boolean value specifying whether to force compaction style.\")\n\n\t\tprint(\"///   - \\(longerInput): \\(description)\", terminator: \"\")\n\n\t\t// For numbers, show the range on the same line\n\t\tswitch attributeClass {\n\t\tcase \"NSNumber\":\n\t\t\tguard attributes[kCIAttributeType] as? String != kCIAttributeTypeBoolean, longerInput != \"extrapolate\" else { break }\n\t\t\tguard longerInput != \"cubeDimension\" else {\n\t\t\t\t// Special case. MacOS and iOS report different values so show that here\n\t\t\t\tprint(\"(2...64 iOS; 2...128 macOS)\", terminator: \"\")\n\t\t\t\tbreak\n\t\t\t}\n\t\t\tlet minimumValue: Float? = (attributes[kCIAttributeMin] as? NSNumber)?.floatValue\n\t\t\tlet maximumValue: Float? = (attributes[kCIAttributeMax] as? NSNumber)?.floatValue\n\t\t\t// Ignore very large maximum value since it's not practical\n\t\t\tif let minimumValue, let maximumValue, maximumValue < 0x0800_0000_00000_0000 {\n\t\t\t\tprint(\" (\\(minimumValue.format5)...\\(maximumValue.format5))\", terminator: \"\")\n\t\t\t} else if let minimumValue {\n\t\t\t\tprint(\" (\\(minimumValue.format5)...)\", terminator: \"\")\n\t\t\t} else if let maximumValue, maximumValue < 0x0800_0000_00000_0000 {\n\t\t\t\tprint(\" (...\\(maximumValue.format5))\", terminator: \"\")\n\t\t\t}\n\n\t\tdefault:\n\t\t\tbreak\n\t\t}\n\t\tprint(\"\")\t// finish up the line\n\n\t}\n\n\n\tif filter.outputKeys.contains(kCIOutputImageKey) {\n\t\tif isGenerator {\n\t\t\tif let abstract: String = abstractLookup[filterName],\n\t\t\t   let match = abstract.firstMatch(of: /^Generates*\\h/) {\n\t\t\t\tlet abstractWithoutReturnsPrefix = abstract[match.range.upperBound...]\n\t\t\t\tlet sentences = Array(abstractWithoutReturnsPrefix.split(separator: /\\./))\n\t\t\t\tlet firstSentence = sentences.first ?? abstractWithoutReturnsPrefix\n\t\t\t\tprint(\"/// - Returns: \\(firstSentence)\")\n\t\t\t} else if let description,\n\t\t\t   let match = description.firstMatch(of: /^Generates*\\h/) {\n\t\t\t\tlet descriptionWithoutReturnsPrefix = description[match.range.upperBound...]\n\t\t\t\tlet sentences = Array(descriptionWithoutReturnsPrefix.split(separator: /\\./))\n\t\t\t\tlet firstSentence = sentences.first ?? descriptionWithoutReturnsPrefix\n\t\t\t\tprint(\"/// - Returns: \\(firstSentence)\")\n\t\t\t} else {\n\t\t\t\tprint(\"/// - Returns: new `CIImage`\")\n\t\t\t}\n\t\t} else {\n\t\t\tvar returnInfo: String\n\t\t\tif var abstract = abstractLookup[filterName], abstract.hasPrefix(\"Returns \") {\n\t\t\t\tabstract = String(abstract.dropFirst(8))\n\t\t\t\tabstract = abstract.replacingOccurrences(of: \".\", with: \"\", options: [.anchored, .backwards])\t// remove any ending period\n\t\t\t\treturnInfo = abstract\n\t\t\t} else {\n\t\t\t\treturnInfo = \"processed new `CIImage`\"\n\t\t\t}\n\t\t\tif filter.identityInputKeys.isEmpty && filter.inputKeys.contains(\"inputBackgroundImage\") {\n\t\t\t\t// Append info about when active is false\n\t\t\t\treturnInfo += \", or identity if `backgroundImage` is nil\"\n\t\t\t} else if filter.identityInputKeys.isEmpty {\n\t\t\t\t\t// Append info about when active is false\n\t\t\t\t\treturnInfo += \", or identity if `active` is false\"\n\t\t\t} else {\n\t\t\t\t// Append info about identity parameters\n\t\t\t\treturnInfo += \" or identity if parameters result in no operation applied\"\n\n\t\t\t\t// TODO: colorCrossPolynomial broken\n\t\t\t}\n\t\t\tprint(\"/// - Returns: \\(returnInfo)\")\n\n\t\t}\n\t}\n}\n\nprivate func outputOSVersion(_ filter: CIFilter, functionMinima: [String: String]) {\n\n\tlet filterName = filter.name\n\tvar macOSVersion: String? = filter.attributes[kCIAttributeFilterAvailable_Mac]  as? String\n\tif nil == Float(macOSVersion ?? \"\") {\n\t\tif filterName == \"CIHistogramDisplayFilter\" {\n\t\t\tmacOSVersion = \"10.9\"\t\t// repair \"10.?\" with 10.9 from documentation\n\t\t}\n\t}\n\n\tif nil != macOSVersion?.firstMatch(of: /10\\.[0-9]+/) && macOSVersion != \"10.15\" {\n\t\tmacOSVersion = \"10.15\"\t\t// For minimum version of SwiftUI and most filter functions\n\t}\n\n\tvar iOSVersion: String? = filter.attributes[kCIAttributeFilterAvailable_iOS]  as? String\n\tif Float(iOSVersion ?? \"\") ?? 0 < 13 {\n\t\tiOSVersion = \"13\"\t// minimum version for SwiftUI and most filter functions\n\t}\n\n\t// Override versions of our functions when we have noted that the core image functions (or occasionally parameters of them) required newer OSs\n\tif let functionMinimum = functionMinima[filter.name.prettyFunction] {\n\t\tmacOSVersion = functionMinimum\n\t\tif let convertedFromMacVersion = [\"11.0\": \"14\", \"12.0\": \"15\", \"13.0\": \"16\"][functionMinimum] {\n\t\t\tiOSVersion = convertedFromMacVersion\n\t\t}\n\t}\n\n\tif let macOSVersion, let iOSVersion {\n\t\tprint(\"@available(iOS \\(iOSVersion), macOS \\(macOSVersion), *)\")\n\t}\n}\n\nprivate func outputImageFunctionHeader(_ filter: CIFilter, isGenerator: Bool) {\n\tlet filterName: String = filter.name\n\tlet filterFunction: String = filterName.prettyFunction\n\n\tprint(\"\\(isGenerator ? \"static \" : \"\")func \\(filterFunction)(\", terminator: \"\")\n\n\tvar inputParams: [String] = filter.inputKeys\n\t\t.filter { $0 != kCIInputImageKey }\n\t\t.map { inputKey in\n\t\t\t(inputKey, (filter.attributes[inputKey] as? [String: AnyObject] ?? [:])) }\t// tuple of the inputKey and its attributes\n\t\t.compactMap { (inputKey: String, inputAttributes: [String: AnyObject]) in\n\t\t\tparameterStatement(inputKey: inputKey, inputAttributes: inputAttributes, filterName: filterName)\n\t\t}\n\n\tif !isGenerator && filter.identityInputKeys.isEmpty && !filter.inputKeys.contains(\"inputBackgroundImage\"),\n\t   let attributesForActiveParam: [String: AnyObject] = .some([kCIAttributeDisplayName: \"Active\" as NSString,\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t   kCIAttributeClass: \"NSNumber\" as NSString,\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tkCIAttributeType: kCIAttributeTypeBoolean  as NSString,\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t kCIAttributeDefault: true as AnyObject,\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tkCIAttributeIdentity: true as AnyObject]),\n\t\tlet activeParameterStatement: String = parameterStatement(inputKey: \"active\", inputAttributes: attributesForActiveParam, filterName: filterName) {\n\t\tinputParams.append(activeParameterStatement)\n\t}\n\tlet inputParamsOnOneLine = inputParams.joined(separator: \", \")\n\tlet forceMultiLines: Bool = inputParamsOnOneLine.contains(\"//\")\n\tif inputParamsOnOneLine.count + filterFunction.count >= 100 || forceMultiLines {\n\t\tprint(inputParams.joined(separator: \",\\n        \"), terminator: forceMultiLines ? \"\\n\" : \"\")\n\t} else {\n\t\tprint(inputParamsOnOneLine, terminator: \"\")\n\t}\n\tprint(\") -> CIImage {\")\n}\n\nprivate func outputImageDictionaryFunction(_ filter: CIFilter, isGenerator: Bool) {\n\n\tassert(!isGenerator)\t\t// not supported for generators; none known to be needed\n\tlet filterName: String = filter.name\n\n\toutputImageFunctionHeader(filter, isGenerator: isGenerator)\n\n\toutputIdentityGuards(filter)\n\n\tprint(\"    // Filter not included in CoreImage.CIFilterBuiltins; using dictionary-based method.\")\n\tprint(\"    guard let filter = CIFilter(name: \\\"\\(filter.name)\\\", parameters: [\", terminator: \"\")\n\t\n\tlet otherInputSettingStatements: [String] = filter.inputKeys\n\t\t.filter { $0 != kCIInputImageKey }\n\t\t.map { inputKey in\n\t\t\t(inputKey, (filter.attributes[inputKey] as? [String: AnyObject] ?? [:])) }\t// tuple of the inputKey and its attributes\n\t\t.compactMap { (inputKey: String, inputAttributes: [String: AnyObject]) in\n\t\t\tguard let displayName: String = inputAttributes[kCIAttributeDisplayName] as? String\n\t\t\telse { return nil }\n\t\t\tlet inputName: String = parameterName(displayName: displayName, filterName: filterName)\n\t\t\treturn \"    \\\"\\(inputKey)\\\": \\(inputName),\"\n\t\t}\n\n\tif !otherInputSettingStatements.isEmpty {\n\t\tprint(\"\\n\")\n\t\tprint(otherInputSettingStatements.joined(separator: \"\\n\"))\n\t\tprint(\"    \", terminator: \"\")\n\t} else {\n\t\tprint(\":\", terminator: \"\")\n\t}\n\n\tprint(\"]) else { return self }\")\n\tprint(\"    return filter.outputImage ?? CIImage.empty()\")\n\n\tprint(\"}\")\n\n}\n\nprivate func outputIdentityGuards(_ filter: CIFilter) {\n\tlet filterName: String = filter.name\n\t// doesn't make sense to have an identity function for generators\n\t// Guards for identity/inert values\n\tlet identityComparisons: String\n\n\tif filter.identityInputKeys.isEmpty {\n\t\tif filter.inputKeys.contains(\"inputBackgroundImage\") {\n\t\t\tidentityComparisons = \"let backgroundImage\"\n\t\t} else {\n\t\t\tidentityComparisons = \"active\"\n\t\t}\n\t} else {\n\t\tidentityComparisons = filter.inputKeys\n\t\t\t.filter { $0 != kCIInputImageKey }\n\t\t\t.map { inputKey in\n\t\t\t\t(inputKey, (filter.attributes[inputKey] as? [String: AnyObject] ?? [:])) }\t// tuple of the inputKey and its attributes\n\t\t\t.compactMap { (inputKey: String, inputAttributes: [String: AnyObject]) in\n\t\t\t\tguard let displayName: String = inputAttributes[kCIAttributeDisplayName] as? String,\n\t\t\t\t\t  let identityValue: Any = inputAttributes[kCIAttributeIdentity]\n\t\t\t\telse { return nil }\n\n\t\t\t\tlet attributeType: String? = inputAttributes[kCIAttributeType] as? String\n\t\t\t\tlet inputName: String = parameterName(displayName: displayName, filterName: filterName)\n\t\t\t\tguard hasReasonableDefaultValue(identityValue, attributeType: attributeType, inputName: inputName)\n\t\t\t\telse { return nil }\n\n\t\t\t\tlet identityValueFormatted: String = formatSmart(identityValue, attributeType: attributeType, inputName: inputName, filterName: filterName)\n\t\t\t\treturn \"\\(inputName) != \\(identityValueFormatted)\"\n\t\t\t}\n\t\t\t.joined(separator: \" || \")\n\t}\n\tif !identityComparisons.isEmpty {\n\t\tprint(\"    guard \\(identityComparisons) else { return self }\")\n\t\tprint(\"\")\n\t}\n}\n\nprivate func outputImageFunction(_ filter: CIFilter, isGenerator: Bool) {\n\tlet filterName: String = filter.name\n\tlet filterFunction: String = filterName.prettyFunction\n\n\toutputImageFunctionHeader(filter, isGenerator: isGenerator)\n\n\tif !isGenerator {\n\t\toutputIdentityGuards(filter)\n\t}\n\tprint(\"    let filter = CIFilter.\\(filterFunction)() // \\(filterName)\")\n\tif !isGenerator {\n\t\tprint(\"    filter.inputImage = self\")\n\t}\n\n\tlet otherInputSettingStatements: String = filter.inputKeys\n\t\t.filter { $0 != kCIInputImageKey }\n\t\t.map { inputKey in\n\t\t\t(inputKey, (filter.attributes[inputKey] as? [String: AnyObject] ?? [:])) }\t// tuple of the inputKey and its attributes\n\t\t.compactMap { (inputKey: String, inputAttributes: [String: AnyObject]) in\n\t\t\tguard let displayName: String = inputAttributes[kCIAttributeDisplayName] as? String\n\t\t\telse { return nil }\n\t\t\tlet inputName: String = parameterName(displayName: displayName, filterName: filterName)\n\t\t\tlet attributeType: String? = inputAttributes[kCIAttributeType] as? String\n\n\t\t\t// Special case - barcode generators, for some reason, want all their parameters as Float. Let's upgrade it here to keep the API simple.\n\t\t\tif nil != filterFunction.firstMatch(of: /(?i)codeGenerator$/),\n\t\t\t   let className = inputAttributes[kCIAttributeClass] as? String,\n\t\t\t   let attributeType = inputAttributes[kCIAttributeType] as? String,\n\t\t\t   className == \"NSNumber\" {\n\t\t\t\tif attributeType == kCIAttributeTypeBoolean {\n\t\t\t\t\treturn \"    filter.\\(inputName) = Float(\\(inputName) ? 1 : 0)\"\n\t\t\t\t} else {\n\t\t\t\t\treturn \"    filter.\\(inputName) = Float(\\(inputName))\"\n\t\t\t\t}\n\t\t\t}\n\n\t\t\t// Annoying to have these negative cases, but the instances where\n\t\t\t// we need to wrap in a float are much more numerous!\n\t\t\tif !(filterFunction == \"kMeans\" && inputName == \"count\"),\t// this function's parameter wants an integer so leave alone\n\t\t\t   !(filterFunction == \"cannyEdgeDetector\" && inputName == \"hysteresisPasses\"),\n\t\t\t   !(filterFunction == \"personSegmentation\" && inputName == \"qualityLevel\"),\n\n\t\t\t\tattributeType == kCIAttributeTypeInteger || attributeType == kCIAttributeTypeCount {\n\t\t\t\treturn \"    filter.\\(inputName) = Float(\\(inputName))\"\t// We pass in Int, but function wants a Float\n\t\t\t}\n\t\t\t// fall through\n\t\t\treturn \"    filter.\\(inputName) = \\(inputName)\"\n\t\t}\n\t\t.joined(separator: \"\\n\")\n\n\tprint(otherInputSettingStatements)\n\tprint(\"    return filter.outputImage ?? CIImage.empty()\")\n\tprint(\"}\")\n}\n\nprivate func outputImageToImage(_ filter: CIFilter, abstractLookup: [String: String], docLookup: [String: String], functionMinima: [String: String]) {\n\n\tlet filterName = filter.name\n\n\tlet filtersWithoutSwiftAPI: Set<String> = [\"CICameraCalibrationLensCorrection\", \"CIGuidedFilter\"]\n\tlet filtersThatAlreadyHaveImageExtension: [String: String] = [\"CIAffineTransform\": \"transformed(by: CGAffineTransform)\",\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t  \"CICrop\": \"cropped(to: CGRect)\",\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t  \"CIClamp\": \"clamped(to: CGRect)\",\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t  \"CISampleNearest\": \"samplingNearest()\",\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t  // https://developer.apple.com/documentation/coreimage/ciimage/2867429-samplingnearest\n\t\"CIDepthBlurEffect\": \"depthBlurEffectFilter(for...)\"\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t  // https://developer.apple.com/documentation/coreimage/cicontext#4375374\n]\n\n\tlet filtersThatAlreadyHaveImageExtensionDoc: [String: String] = [\"CISampleNearest\": \"https://developer.apple.com/documentation/coreimage/ciimage/2867429-samplingnearest\",\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t  \"CIDepthBlurEffect\": \"https://developer.apple.com/documentation/coreimage/cicontext#4375374\"]\n\n\tif let existingFunction: String = filtersThatAlreadyHaveImageExtension[filterName] {\n\t\tprint(\"\")\n\t\tprint(\"// ℹ️ \\(filterName) already has a CIImage method: func \\(existingFunction) -> CIImage\")\n\t\tif let existingFunctionURL = filtersThatAlreadyHaveImageExtensionDoc[filterName] {\n\t\t\tprint(\"// \\(existingFunctionURL)\")\n\t\t}\n\t\tprint(\"\")\n\t\treturn\n\t}\n\toutputDocumentation(filter, isGenerator: false, abstractLookup: abstractLookup, docLookup: docLookup)\n\toutputOSVersion(filter, functionMinima: functionMinima)\n\n\tif filtersWithoutSwiftAPI.contains(filterName) {\n\t\toutputImageDictionaryFunction(filter, isGenerator: false)\n\t} else {\n\t\toutputImageFunction(filter, isGenerator: false)\n\t}\n}\n\n\n// convert long name like \"Gray Component Replacement\" to input name used in CoreImage.CIFilterBuiltins. And fix a bunch of inconsistencies.\nprivate func parameterName(displayName: String, filterName: String) -> String {\n\tlet words: [String] = displayName.components(separatedBy: \" \").map { $0.capitalized }\n\tlet removeSpaces: String = words.joined(separator: \"\")\n\tvar result: String = removeSpaces.prefix(1).lowercased() + removeSpaces.dropFirst()\n\tif result == \"texture\" {\n\t\tresult = \"textureImage\"\n\t} else if result == \"b\" {\n\t\tresult = \"parameterB\"\n\t} else if result == \"c\" {\n\t\tresult = \"parameterC\"\n\t} else if result == \"means\" {\n\t\tresult = \"inputMeans\"\n\t} else if result == \"redVector\" {\n\t\tresult = \"rVector\"\n\t} else if result == \"greenVector\" {\n\t\tresult = \"gVector\"\n\t} else if result == \"blueVector\" {\n\t\tresult = \"bVector\"\n\t} else if result == \"alphaVector\" {\n\t\tresult = \"aVector\"\n\t} else if result == \"maximumStriationRadius\" {\n\t\tresult = \"maxStriationRadius\"\n\t} else if result == \"color1\" {\n\t\tresult = \"color0\"\n\t} else if result == \"color2\" {\n\t\tresult = \"color1\"\n\t} else if result == \"radius1\" {\n\t\tresult = \"radius0\"\n\t} else if result == \"radius2\" {\n\t\tresult = \"radius1\"\n\t} else if result == \"image2\" && filterName == \"CIColorAbsoluteDifference\" {\t// only substitute for this function\n\t\tresult = \"inputImage2\"\n\t} else if result.hasSuffix(\".\") {\n\t\tresult = String(result.dropLast(1))\t// to deal with data anomoly where \".\" is at end of parameter\n\t}\n\treturn result\n}\n\nprivate func parameterStatement(inputKey: String, inputAttributes: [String: AnyObject], filterName: String) -> String? {\n\n\tguard let displayName: String = inputAttributes[kCIAttributeDisplayName] as? String,\n\t\t  let attributeClass: String = inputAttributes[kCIAttributeClass] as? String\n\telse { return nil }\n\n\tlet inputName: String = parameterName(displayName: displayName, filterName: filterName)\n\tlet attributeType: String? = inputAttributes[kCIAttributeType] as? String\n\tvar convertedClass: String\n\tswitch attributeClass {\n\tcase \"NSNumber\":\n\n\t\tif attributeType == kCIAttributeTypeBoolean\n\t\t\t|| inputName == \"extrapolate\" { // Hack - missing info\n\t\t\tconvertedClass = \"Bool\"\n\t\t} else if attributeType == kCIAttributeTypeInteger || attributeType == kCIAttributeTypeCount\n\t\t\t\t\t|| inputName == \"qualityLevel\" || inputName == \"count\" { \t// Hack - missing or misleading info\n\t\t\tconvertedClass = \"Int\"\n\t\t} else if [kCIAttributeTypeScalar, kCIAttributeTypeAngle, kCIAttributeTypeDistance, kCIAttributeTypeTime].contains(attributeType)\n\t\t\t|| inputName == \"preferredAspectRatio\"\t// missing info\n\t\t{\n\t\t\tconvertedClass = \"Float\"\n\t\t} else {\n\t\t\tprint(\"\\n// 🛑 unknown number type \\(inputName): \\(attributeType ?? \"\")\")\n\t\t\tconvertedClass = \"Float\"\t\t// seems to be when no type is specified\n\t\t}\n\tcase \"CIVector\":\n\t\tguard filterName != \"CITemperatureAndTint\" && filterName != \"CIDepthBlurEffect\" else {\t// special case, should remain a CIVector\n\t\t\tconvertedClass = \"CIVector\"\n\t\t\tbreak\n\t\t}\n\t\tconvertedClass = attributeType == kCIAttributeTypeRectangle\n\t\t? \"CGRect\"\n\t\t: attributeType == kCIAttributeTypePosition || attributeType == kCIAttributeTypeOffset\n\t\t? \"CGPoint\"\n\t\t: \"CIVector\"\t\t// CIVector tends to have no attribute type\n\tcase \"NSAffineTransform\":\n\t\tconvertedClass = \"CGAffineTransform\"\n\tcase \"NSData\":\n\t\tconvertedClass = \"Data\"\n\tcase \"NSString\":\n\t\tconvertedClass = \"String\"\n\tcase \"NSArray\":\n\t\tconvertedClass = \"[Any]\"\n\tcase \"CGImageMetadataRef\":\n\t\tconvertedClass = \"CGImageMetadata\"\n\tcase \"NSObject\":\n\t\tif inputName == \"colorSpace\" {\n\t\t\tconvertedClass = \"CGColorSpace\"\n\t\t} else {\n\t\t\tconvertedClass = attributeClass\t\t// Unexpected case\n\t\t\tprint(\"\\n// 🛑 unknown attributeClass \\(attributeClass) with \\(inputName), \\(attributeType ?? \"\")\")\n\t\t}\n\tcase \"NSValue\":\n\t\tif attributeType == kCIAttributeTypeTransform {\n\t\t\tconvertedClass = \"CGAffineTransform\"\n\t\t} else {\n\t\t\tconvertedClass = attributeClass\t// Unexpected case\n\t\t\tprint(\"\\n// 🛑 unknown attributeClass \\(attributeClass) with \\(inputName), \\(attributeType ?? \"\")\")\n\t\t}\n\tdefault:\n\t\t// Other cases where the class is the same: CIImage, CIColor, etc.\n\t\tconvertedClass = attributeClass\n\t}\n\tif inputName == \"backgroundImage\" && convertedClass == \"CIImage\" {\n\t\tconvertedClass = \"CIImage?\"\t\t// make optional, for our special identity handling\n\t}\n\tvar defaultStatement: String = \"\"\n\tif let defaultValue: AnyObject = inputAttributes[kCIAttributeDefault] {\n\n\t\tif hasReasonableDefaultValue(defaultValue, attributeType: attributeType, inputName: inputName) {\n\t\t\tlet defaultValueString = formatSmart(defaultValue, attributeType: attributeType, inputName: inputName, filterName: filterName)\n\t\t\tif !defaultValueString.isEmpty {\n\t\t\t\tdefaultStatement = \" = \\(defaultValueString)\"\n\t\t\t}\n\t\t}\n\t}\n\treturn \"\\(inputName): \\(convertedClass)\\(defaultStatement)\"\n}\n\n// Look at value and/or context.\nprivate func hasReasonableDefaultValue(_ value: Any, attributeType: String?, inputName: String) -> Bool {\n\tif nil != value as? Data {\n\t\treturn false\t// Not feasible to have data anyhow\n\t} else if let number = value as? NSNumber {\n\t\tif attributeType == kCIAttributeTypeDistance {\n\t\t\treturn number == 0\n\t\t} else if attributeType == kCIAttributeTypeInteger {\n\t\t\treturn false\n\t\t} else if attributeType == kCIAttributeTypeCount {\n\t\t\treturn false\n\t\t} else if attributeType == kCIAttributeTypeBoolean {\n\t\t\treturn true\n\t\t} else if attributeType == kCIAttributeTypeAngle {\n\t\t\treturn number.doubleValue <= Double.pi\t// avoid those weird angles that don't make any sense\n\t\t} else if attributeType == kCIAttributeTypeScalar {\n\t\t\treturn true\t// not sure\n\t\t}\n\t} else if let defaultVector = value as? CIVector {\n\n\t\tif defaultVector.count > 4 {\n\t\t\treturn false\n\t\t}\n\t\tif attributeType == kCIAttributeTypeRectangle {\n\t\t\treturn defaultVector == CIVector(x: 0, y: 0, z: 0, w: 0)\t// only keep zero rectangle\n\t\t} else if attributeType == kCIAttributeTypePosition3 {\n\t\t\treturn false\n\t\t} else if attributeType == kCIAttributeTypePosition {\n\t\t\treturn defaultVector.x < 50 && defaultVector.y < 50\t\t// seems like 50+ values are arbitrary coordinates\n\t\t} else if attributeType == kCIAttributeTypeOffset {\n\t\t\treturn defaultVector.x != 0 && defaultVector.y != 0\t\t// any non-zero points seem pretty arbitrary\n\t\t}\n\t} else if let color = value as? CIColor {\n\t\treturn color == CIColor.black\n\t\t|| color == CIColor.white\n\t\t|| color == CIColor.clear\n\t} else if nil != value as? AffineTransform {\n\t\treturn true\n\t} else if nil != value as? String {\n\t\treturn true\n\t} else if inputName == \"colorSpace\" {\t// it's a CFType so not so easy to compare\n\t\treturn true\n\t} else {\n\t\tprint(\"\\n🛑 \\(attributeType ?? \"\") \\(inputName) -> \\(value) \\((value as? AnyObject)?.className)\")\n\t\treturn true\t// not sure yet\n\t}\n\treturn false\n}\n\n\nprivate func formatSmart(_ value: Any, attributeType: String?, inputName: String, filterName: String?) -> String {\n\tvar result: String = \"\"\n\tif let number = value as? NSNumber {\n\t\tif attributeType == kCIAttributeTypeBoolean || inputName == \"extrapolate\" { // Hack - missing info\n\t\t\tresult = number.boolValue.description\n\t\t} else {\n\t\t\tresult = number.formatSmart\n\t\t}\n\t} else if let defaultVector = value as? CIVector {\n\n\t\tif attributeType == kCIAttributeTypeRectangle {\n\t\t\tresult = defaultVector.formatRectSmart\n\t\t} else if attributeType == kCIAttributeTypePosition {\n\t\t\tresult = defaultVector.formatPointSmart\n\t\t} else {\n\t\t\tresult = defaultVector.formatVectorSmart\n\t\t}\n\t} else if let color = value as? CIColor {\n\t\tresult = color.formatSmart\n\t} else if let string = value as? String {\n\t\tresult = \"\\\"\" + string.replacingOccurrences(of: \"\\\"\", with: \"\\\\\\\"\") + \"\\\"\"\n\t} else if inputName == \"colorSpace\" {\n\t\tif CFGetTypeID(value as AnyObject) == CGColorSpace.typeID {\n\t\t\tlet colorspace: CGColorSpace = value as! CGColorSpace\n\t\t\tif let name: String = colorspace.name as? String {\n\t\t\t\tvar newName = name.replacing(/^kCGColorSpace/, with: \"\")\n\t\t\t\tnewName = newName.prefix(1).lowercased() + newName.dropFirst()\n\t\t\t\tresult = \"CGColorSpace(name: CGColorSpace.\" + newName + \")!\"\n\t\t\t}\n\t\t}\n\t} else if let transform = value as? AffineTransform {\n\t\tlet transformIdentity: AffineTransform\n#if canImport(UIKit)\n\t\ttransformIdentity = CGAffineTransform.identity\n#elseif canImport(AppKit)\n\t\ttransformIdentity = NSAffineTransform()\n#endif\n\n\t\t// Special case these filters to default to identity. Their default values are weird!\n\t\tif transform == transformIdentity || filterName == \"CIAffineClamp\" || filterName == \"CIAffineTile\" {\n\t\t\tresult = \"CGAffineTransform.identity\"\n\t\t} else {\n#if canImport(UIKit)\n\t\t\tlet t: CGAffineTransform = transform\n\t\t\tresult = \"CGAffineTransform(a: \\(t.a.format5), b: \\(t.b.format5), c: \\(t.c.format5), d: \\(t.d.format5), tx: \\(t.tx.format5), ty: \\(t.tx.format5))\"\n#elseif canImport(AppKit)\n\t\t\tlet t: NSAffineTransformStruct = transform.transformStruct\n\t\t\tresult = \"CGAffineTransform(a: \\(t.m11.format5), b: \\(t.m12.format5), c: \\(t.m21.format5), d: \\(t.m22.format5), tx: \\(t.tX.format5), ty: \\(t.tY.format5))\"\n#endif\n\t\t}\n\t} else {\n\t\tprint(\"\\n🛑 \\(attributeType ?? \"\") \\(inputName) -> \\(value) \\((value as? AnyObject)?.className)\")\n\t\tresult = String(describing: value)\n\t}\n\treturn result\n}\n\n// https://unicode-org.github.io/icu/userguide/strings/regexp.html\n\nprivate extension String {\n\tvar prettyFunction: String {\n\t\tlet result: String = self.replacing(/^CI/, with: \"\").replacing(/Filter$/, with: \"\")\n\t\treturn result.fixingCamelCase\n\t}\n\n\t// AbcDef -> abcDef but ABcdef -> aBcdef, ABCDEF -> abcDef - keep the last\n\tvar fixingCamelCase: String {\n\t\tif nil != self.firstMatch(of: /^[A-Z][^A-Z]/)\n\t\t\t|| self.hasPrefix(\"SRGB\")\t// special case\n\t\t{\n\t\t\t\t// Just one uppercase characters, so make it lowercase and append the rest\n\t\t\treturn self.prefix(1).lowercased() + self.dropFirst()\n\t\t} else if let foundUppercaseMatch: Regex<Regex<Substring>.RegexOutput>.Match = self.firstMatch(of: /^[A-Z]{2,}/) {\n\t\t\t// FIXME: Might need some tweaking to deal with complex characters. But since we are just modifying ASCII, this simple case is fine.\n\t\t\t// More than one, so make all but the last character lowercased, so that the last character there stays capitalized.\n\t\t\tlet lowercasedPrefix = self[foundUppercaseMatch.range].lowercased()\n\t\t\tlet remaining = self.dropFirst(lowercasedPrefix.count)\n\t\t\tif nil != remaining.firstMatch(of: /^[a-z]/) {\t// lowercase letter after uppercase, the usual. Keep last uppercase from prefix\n\t\t\t\treturn String(lowercasedPrefix.dropLast()) + self.dropFirst(lowercasedPrefix.count - 1)\n\t\t\t} else {\n\t\t\t\t// Unusual; characters after uppercase is not a lowercase character, e.g. a number. Keep all the uppercase characters.\n\t\t\t\treturn String(lowercasedPrefix) + self.dropFirst(lowercasedPrefix.count)\n\t\t\t}\n\t\t}\n\t\treturn self\n\n\t}\n}\n\n// Format numbers with UP TO five decimal places\n\nprivate extension Float {\n\tvar format5: String {\n\t\tlet formatter = NumberFormatter()\n\t\tformatter.numberStyle = .decimal\n#if canImport(UIKit)\n\t\tformatter.numberStyle = .none\n#elseif canImport(AppKit)\n\t\tformatter.hasThousandSeparators = false\n#endif\n\t\tformatter.maximumFractionDigits = 5\n\t\tlet number = NSNumber(value: self)\n\t\treturn formatter.string(from: number) ?? \"\"\n\t}\n}\nprivate extension Double {\n\tvar format5: String {\n\t\tlet formatter = NumberFormatter()\n\t\tformatter.numberStyle = .decimal\n#if canImport(UIKit)\n\t\tformatter.numberStyle = .none\n#elseif canImport(AppKit)\n\t\tformatter.hasThousandSeparators = false\n#endif\n\t\tformatter.maximumFractionDigits = 5\n\t\tlet number = NSNumber(value: self)\n\t\treturn formatter.string(from: number) ?? \"\"\n\t}\n}\nprivate extension CGFloat {\n\tvar format5: String {\n\t\tlet formatter = NumberFormatter()\n\t\tformatter.numberStyle = .decimal\n#if canImport(UIKit)\n\t\tformatter.numberStyle = .none\n#elseif canImport(AppKit)\n\t\tformatter.hasThousandSeparators = false\n#endif\n\t\tformatter.maximumFractionDigits = 5\n\t\tlet number = NSNumber(value: self)\n\t\treturn formatter.string(from: number) ?? \"\"\n\t}\n}\n\nprivate extension NSNumber {\n\n\tvar format5: String {\n\t\tlet formatter = NumberFormatter()\n\t\tformatter.numberStyle = .decimal\n#if canImport(UIKit)\n\t\tformatter.numberStyle = .none\n#elseif canImport(AppKit)\n\t\tformatter.hasThousandSeparators = false\n#endif\n\t\tformatter.maximumFractionDigits = 5\n\t\treturn formatter.string(from: self) ?? \"\"\n\t}\n\n\tvar formatSmart: String {\n\t\tlet result: String\n\t\tswitch self.doubleValue {\n\t\tcase Double.pi:\n\t\t\tresult = \".pi\"\n\t\tcase Double.pi/2:\n\t\t\tresult = \".pi/2\"\n\t\tcase Double.pi * 18:\n\t\t\tresult = \".pi*18\"\t// for vortexDistortion\n\n\t\t\t// What about triangleKaleidoscope 5.924285296593801\n\t\tdefault:\n\t\t\tresult = self.format5\n\t\t}\n\t\treturn result\n\t}\n}\nprivate extension CIVector {\n\tvar formatPointSmart: String {\n\t\tif x == 0 && y == 0 {\n\t\t\treturn \".zero\"\n\t\t} else {\n\t\t\treturn \".init(x: \\(x.format5), y: \\(y.format5))\"\n\t\t}\n\t}\n\n\t// The CGRect structure’s X, Y, height and width values are stored in the vector’s X, Y, Z and W properties.\n\tvar formatRectSmart: String {\n\t\tif x == 0 && y == 0 && z == 0 && w == 0 {\n\t\t\treturn \".zero\"\n\t\t} else {\n\t\t\treturn \".init(x: \\(x.format5), y: \\(y.format5), width: \\(w.format5), height: \\(z.format5))\"\n\t\t}\n\t}\n\tvar formatVectorSmart: String {\n\t\tswitch count {\n\t\tcase 0:\n\t\t\treturn \".init()\"\n\t\tcase 1:\n\t\t\treturn \".init(x: \\(x.format5))\"\n\t\tcase 2:\n\t\t\treturn \".init(x: \\(x.format5), y: \\(y.format5))\"\n\t\tcase 3:\n\t\t\treturn \".init(x: \\(x.format5), y: \\(y.format5), z: \\(z.format5))\"\n\t\tcase 4:\n\t\t\treturn \".init(x: \\(x.format5), y: \\(y.format5), z: \\(z.format5), w: \\(w.format5))\"\n\t\tdefault:\n\t\t\treturn \"🛑 no vector initializer for count > 4\"\n\t\t}\n\t}\n}\nprivate extension CIColor {\n\tvar formatSmart: String {\n\n\t\tswitch self {\n\t\tcase CIColor.black:    return \"CIColor.black\"\t// Include \"CIColor.\" so it's compatible with older OS\n\t\tcase CIColor.white:    return \"CIColor.white\"\n\t\tcase CIColor.gray:     return \"CIColor.gray\"\n\t\tcase CIColor.red:      return \"CIColor.red\"\n\t\tcase CIColor.green:    return \"CIColor.green\"\n\t\tcase CIColor.blue:     return \"CIColor.blue\"\n\t\tcase CIColor.cyan:     return \"CIColor.cyan\"\n\t\tcase CIColor.magenta:  return \"CIColor.magenta\"\n\t\tcase CIColor.yellow:   return \"CIColor.yellow\"\n\t\tcase CIColor.clear:    return \"CIColor.clear\"\n\t\tdefault:\n\t\t\tlet colorSpaceName: String = colorSpace.name as? String ?? \"\"\t// e.g. kCGColorSpaceDeviceRGB\n\t\t\tlet colorSpaceNameSuffix: String = colorSpaceName.replacing(/^kCGColorSpace/, with: \"\")\n\t\t\tlet colorSpaceNameFormatted = \"CGColorSpace.\" +  colorSpaceNameSuffix.prefix(1).lowercased() + colorSpaceNameSuffix.dropFirst()\n\t\t\tlet colorSpaceSRGB: String = CGColorSpace.sRGB as String\n\n\t\t\t// Some issues with kCGColorSpaceDeviceRGB since we would have to create that. Let's just ignore.\n\t\t\tif alpha != 1.0 && colorSpaceName != colorSpaceSRGB\n\t\t\t&& colorSpaceName != \"kCGColorSpaceDeviceRGB\" {\n\t\t\t\treturn \"CIColor(red: \\(red), green: \\(green), blue: \\(blue), alpha: \\(alpha), colorSpace: \\(colorSpaceNameFormatted))\"\n\t\t\t} else if alpha == 1.0 && colorSpaceName != colorSpaceSRGB\n\t\t\t\t\t\t&& colorSpaceName != \"kCGColorSpaceDeviceRGB\" {\n\t\t\t\treturn \"CIColor(red: \\(red), green: \\(green), blue: \\(blue), colorSpace: \\(colorSpaceNameFormatted))\"\n\t\t\t} else\n\t\t\tif alpha != 1.0 {\n\t\t\t\treturn \"CIColor(red: \\(red), green: \\(green), blue: \\(blue), alpha: \\(alpha))\"\n\t\t\t} else {\n\t\t\t\treturn \"CIColor(red: \\(red), green: \\(green), blue: \\(blue))\"\n\t\t\t}\n\t\t}\n\t}\n\n}\n\nprivate extension CIFilter {\n\tvar identityInputKeys: [String] {\n\t\tinputKeys\n\t\t\t.filter { $0 != kCIInputImageKey }\n\t\t\t.map { inputKey in\n\t\t\t\t(inputKey, (attributes[inputKey] as? [String: AnyObject] ?? [:])) }\t// tuple of the inputKey and its attributes\n\t\t\t.compactMap { (inputKey: String, inputAttributes: [String: AnyObject]) in\n\t\t\t\tguard let displayName: String = inputAttributes[kCIAttributeDisplayName] as? String,\n\t\t\t\t\t  let identityValue: Any = inputAttributes[kCIAttributeIdentity]\n\t\t\t\telse { return nil }\n\n\t\t\t\tlet attributeType: String? = inputAttributes[kCIAttributeType] as? String\n\t\t\t\tlet inputName: String = parameterName(displayName: displayName, filterName: self.name)\n\t\t\t\tguard hasReasonableDefaultValue(identityValue, attributeType: attributeType, inputName: inputName)\n\t\t\t\telse { return nil }\n\n\t\t\t\treturn inputKey\n\t\t\t}\n\t}\n}\n\n"
  },
  {
    "path": "Generator/FunctionMinima.json",
    "content": "{\n\t\"areaAverage\": \"11.0\",\n\t\"areaHistogram\": \"11.0\",\n\t\"areaMaximum\": \"11.0\",\n\t\"areaMaximumAlpha\": \"11.0\",\n\t\"areaMinimum\": \"11.0\",\n\t\"areaMinimumAlpha\": \"11.0\",\n\t\"areaMinMax\": \"11.0\",\n\t\"areaMinMaxRed\": \"11.0\",\n\t\"bumpDistortion\": \"11.0\",\n\t\"bumpDistortionLinear\": \"11.0\",\n\t\"circleSplashDistortion\": \"11.0\",\n\t\"circularWrap\": \"11.0\",\n\t\"colorAbsoluteDifference\": \"11.0\",\n\t\"colorThreshold\": \"11.0\",\n\t\"colorThresholdOtsu\": \"11.0\",\n\t\"columnAverage\": \"11.0\",\n\t\"displacementDistortion\": \"11.0\",\n\t\"droste\": \"11.0\",\n\t\"glassDistortion\": \"11.0\",\n\t\"glassLozenge\": \"11.0\",\n\t\"histogramDisplay\": \"11.0\",\n\t\"holeDistortion\": \"11.0\",\n\t\"kMeans\": \"11.0\",\n\t\"lightTunnel\": \"11.0\",\n\t\"ninePartStretched\": \"11.0\",\n\t\"ninePartTiled\": \"11.0\",\n\t\"pinchDistortion\": \"11.0\",\n\t\"rowAverage\": \"11.0\",\n\t\"stretchCrop\": \"11.0\",\n\t\"torusLensDistortion\": \"11.0\",\n\t\"twirlDistortion\": \"11.0\",\n\t\"vortexDistortion\": \"11.0\",\n\t\"convolutionRGB3X3\": \"12.0\",\n\t\"convolutionRGB5X5\": \"12.0\",\n\t\"convolutionRGB7X7\": \"12.0\",\n\t\"convolutionRGB9Horizontal\": \"12.0\",\n\t\"convolutionRGB9Vertical\": \"12.0\",\n\t\"linearLightBlendMode\": \"12.0\",\n\t\"personSegmentation\": \"12.0\",\n\t\"vividLightBlendMode\": \"12.0\",\n\t\"areaLogarithmicHistogram\": \"13.0\",\n\t\"convertLabToRGB\": \"13.0\",\n\t\"convertRGBtoLab\": \"13.0\",\n\n\t\"colorCubesMixedWithMask\": \"13.0\",\n\t\"colorCubeWithColorSpace\": \"13.0\",\n\t\"colorCube\": \"13.0\",\n\n\t\"attributedTextImageGenerator\": \"13.0\",\n\t\"textImageGenerator\": \"13.0\"\n}\n"
  },
  {
    "path": "Generator/MissingParameterDocumentation.json",
    "content": "{\n\t\"CIBokehBlur\" : {\n\t\t\"softness\" : \"The softness of the bokeh effect\"\n\t},\n\t\"CIDepthBlurEffect\" : {\n\t\t\"_NOTE\" : \"THIS WAS GLEANED FROM WWDC2017#508 VIDEO; NO OFFICIAL APPLE DOCUMENTATION FOUND. NOT ACTUALLY USED HERE BECAUSE OF SEVERAL BUILT-IN INITIALIZERS.\",\n\t\t\"aperture\" : \"Simulated lens aperature to adjust blur for unfocused elements\",\n\t\t\"auxDataMetadata\" : \"UNKNOWN\",\n\t\t\"calibrationData\" : \"UNKNOWN\",\n\t\t\"chinPositions\" : \"Vector of up to 4 x,y positions indicating where peoples' chins are\",\n\t\t\"disparityImage\" : \"Grayscale image indicating depth; lighter is nearer.\",\n\t\t\"focusRectangle\" : \"Part of the rectangle to make sure to put into focus\",\n\t\t\"gainMap\" : \"UNKNOWN\",\n\t\t\"leftEyePositions\" : \"Vector of up to 4 x,y positions indicating where peoples' left eyes are\",\n\t\t\"lumaNoiseScale\" : \"UNKNOWN\",\n\t\t\"nosePositions\" : \"Vector of up to 4 x,y positions indicating where peoples' noses are\",\n\t\t\"rightEyePositions\" : \"Vector of up to 4 x,y positions indicating where peoples' right eyes are\",\n\t\t\"scaleFactor\" : \"Integrated downsampling (rather than doing it later) since this is computationally expensive\",\n\t\t\"shape\" : \"UNKNOWN\"\n\t},\n\t\"CIDepthOfField\" : {\n\t\t\"point0\" : \"A set of coordinates marking the first point to be focused on\",\n\t\t\"point1\" : \"A set of coordinates marking the second point to be focused on\",\n\t\t\"unsharpMaskIntensity\" : \"The intensity of the unsharp mask effect\",\n\t\t\"unsharpMaskRadius\" : \"The radius of the unsharpened mask effect applied to the in-focus area of effect\"\n\t},\n\t\"CIDroste\" : {\n\t\t\"insetPoint0\" : \"The x and y position that defines the first inset point\",\n\t\t\"insetPoint1\" : \"The x and y position that defines the second inset point\",\n\t\t\"periodicity\" : \"The amount of intervals\",\n\t\t\"rotation\" : \"The angle of the rotation, in radians\",\n\t\t\"strands\" : \"The amount of strands\",\n\t\t\"zoom\" : \"The zoom of the effect\"\n\t},\n\t\"CIEdgePreserveUpsampleFilter\" : {\n\t\t\"lumaSigma\" : \"Influence of the input image’s luma information on the upsampling operation\",\n\t\t\"smallImage\" : \"An image representing the reference for scaling the input image with the type CIImage\",\n\t\t\"spatialSigma\" : \"The influence of the input image’s spatial information on the upsampling operation\"\n\t},\n\t\"CIGuidedFilter\" : {\n\t\t\"_NOTE\" : \"THIS WAS GLEANED FROM DESCRIPTIONS OF THIS FILTER IN GENERAL; NO OFFICIAL APPLE DOCUMENTATION FOUND\",\n\t\t\"epsilon\" : \"Smoothness. A higher value means more smoothing.\"\n\t},\n\t\"CIHueSaturationValueGradient\" : {\n\t\t\"dither\" : \"A boolean value specifying whether the distort the generated output\",\n\t\t\"softness\" : \"The softness of the generated color wheel\"\n\t},\n\t\"CILenticularHaloGenerator\" : {\n\t\t\"haloOverlap\" : \"The overlap of red, green, and blue halos. A value of 1 results in a full overlap.\"\n\t},\n\t\"CIPerspectiveCorrection\" : {\n\t\t\"crop\" : \"A rectangle that specifies the extent of the corrected image\"\n\t},\n\t\"CIToneCurve\" : {\n\t\t\"point0\" : \"A vector containing the position of the first point of the tone curve\",\n\t\t\"point1\" : \"A vector containing the position of the second point of the tone curve\",\n\t\t\"point2\" : \"A vector containing the position of the third point of the tone curve\",\n\t\t\"point3\" : \"A vector containing the position of the fourth point of the tone curve\",\n\t\t\"point4\" : \"A vector containing the position of the fifth point of the tone curve\"\n\t}\n}\n"
  },
  {
    "path": "Generator/abstracts.json",
    "content": "{\n\"CIAccordionFoldTransition\": \"Transitions from one image to another of differing dimensions by unfolding and crossfading.\",\n\"CIAdditionCompositing\": \"Adds color components to achieve a brightening effect.\",\n\"CIAffineClamp\": \"Performs an affine transform on a source image and then clamps the pixels at the edge of the transformed image, extending them outwards.\",\n\"CIAffineTile\": \"Applies an affine transform to an image and then tiles the transformed image.\",\n\"CIAffineTransform\": \"Applies an affine transform to an image.\",\n\"CIAreaAverage\": \"Returns a single-pixel image  that contains the average color for the region of interest.\",\n\"CIAreaHistogram\": \"Returns a 1D image (inputCount wide by one pixel high) that contains the component-wise histogram computed for the specified rectangular area.\",\n\"CIAreaMaximum\": \"Returns a single-pixel image that contains the maximum color components for the region of interest.\",\n\"CIAreaMaximumAlpha\": \"Returns a single-pixel image that contains the color vector with the maximum alpha value for the region of interest.\",\n\"CIAreaMinimum\": \"Returns a single-pixel image that contains the minimum color components for the region of interest.\",\n\"CIAreaMinimumAlpha\": \"Returns a single-pixel image that contains the color vector with the minimum alpha value for the region of interest.\",\n\"CIAztecCodeGenerator\": \"Generates an Aztec code (two-dimensional barcode) from input data.\",\n\"CIBarsSwipeTransition\": \"Transitions from one image to another by passing a bar over the source image.\",\n\"CIBlendWithAlphaMask\": \"Uses alpha values from a mask to interpolate between an image and the background.\",\n\"CIBlendWithMask\": \"Uses values from a grayscale mask to interpolate between an image and the background.\",\n\"CIBloom\": \"Softens edges and applies a pleasant glow to an image.\",\n\"CIBoxBlur\": \"Blurs an image using a box-shaped convolution kernel.\",\n\"CIBumpDistortion\": \"Creates a bump that originates at a specified point in the image.\",\n\"CIBumpDistortionLinear\": \"Creates a concave or convex distortion that originates from a line in the image.\",\n\"CICheckerboardGenerator\": \"Generates a checkerboard pattern.\",\n\"CICircleSplashDistortion\": \"Distorts the pixels starting at the circumference of a circle and emanating outward.\",\n\"CICircularScreen\": \"Simulates a circular-shaped halftone screen.\",\n\"CICircularWrap\": \"Wraps an image around a transparent circle.\",\n\"CICMYKHalftone\": \"Creates a color, halftoned rendition of the source image, using cyan, magenta, yellow, and black inks over a white page.\",\n\"CICode128BarcodeGenerator\": \"Generates a Code 128 one-dimensional barcode from input data.\",\n\"CIColorBlendMode\": \"Uses the luminance values of the background with the hue and saturation values of the source image.\",\n\"CIColorBurnBlendMode\": \"Darkens the background image samples to reflect the source image samples.\",\n\"CIColorClamp\": \"Modifies color values to keep them within a specified range.\",\n\"CIColorControls\": \"Adjusts saturation, brightness, and contrast values.\",\n\"CIColorCrossPolynomial\": \"Modifies the pixel values in an image by applying a set of polynomial cross-products.\",\n\"CIColorCube\": \"Uses a three-dimensional color table to transform the source image pixels.\",\n\"CIColorCubeWithColorSpace\": \"Uses a three-dimensional color table to transform the source image pixels and maps the result to a specified color space.\",\n\"CIColorDodgeBlendMode\": \"Brightens the background image samples to reflect the source image samples.\",\n\"CIColorInvert\": \"Inverts the colors in an image.\",\n\"CIColorMap\": \"Performs a nonlinear transformation of source color values using mapping values provided in a table.\",\n\"CIColorMatrix\": \"Multiplies source color values and adds a bias factor to each color component.\",\n\"CIColorMonochrome\": \"Remaps colors so they fall within shades of a single color.\",\n\"CIColorPolynomial\": \"Modifies the pixel values in an image by applying a set of cubic polynomials.\",\n\"CIColorPosterize\": \"Remaps red, green, and blue color components to the number of brightness values you specify for each color component.\",\n\"CIColumnAverage\": \"Returns a 1-pixel high image that contains the average color for each scan column.\",\n\"CIComicEffect\": \"Simulates a comic book drawing by outlining edges and applying a color halftone effect.\",\n\"CIConstantColorGenerator\": \"Generates a solid color.\",\n\"CIConvolution3X3\": \"Modifies pixel values by performing a 3x3 matrix convolution.\",\n\"CIConvolution5X5\": \"Modifies pixel values by performing a 5x5 matrix convolution.\",\n\"CIConvolution7X7\": \"Modifies pixel values by performing a 7x7 matrix convolution.\",\n\"CIConvolution9Horizontal\": \"Modifies pixel values by performing a 9-element horizontal convolution.\",\n\"CIConvolution9Vertical\": \"Modifies pixel values by performing a 9-element vertical convolution.\",\n\"CICopyMachineTransition\": \"Transitions from one image to another by simulating the effect of a copy machine.\",\n\"CICrop\": \"Applies a crop to an image.\",\n\"CICrystallize\": \"Creates polygon-shaped color blocks by aggregating source pixel-color values.\",\n\"CIDarkenBlendMode\": \"Creates composite image samples by choosing the darker samples (from either the source image or the background).\",\n\"CIDepthOfField\": \"Simulates a depth of field effect.\",\n\"CIDifferenceBlendMode\": \"Subtracts either the source image sample color from the background image sample color, or the reverse, depending on which sample has the greater brightness value.\",\n\"CIDiscBlur\": \"Blurs an image using a disc-shaped convolution kernel.\",\n\"CIDisintegrateWithMaskTransition\": \"Transitions from one image to another using the shape defined by a mask.\",\n\"CIDisplacementDistortion\": \"Applies the grayscale values of the second image to the first image.\",\n\"CIDissolveTransition\": \"Uses a dissolve to transition from one image to another.\",\n\"CIDivideBlendMode\": \"Divides the background image sample color from the source image sample color.\",\n\"CIDotScreen\": \"Simulates the dot patterns of a halftone screen.\",\n\"CIDroste\": \"Recursively draws a portion of an image in imitation of an M. C. Escher drawing.\",\n\"CIEdges\": \"Finds all edges in an image and displays them in color.\",\n\"CIEdgeWork\": \"Produces a stylized black-and-white rendition of an image that looks similar to a woodblock cutout.\",\n\"CIEightfoldReflectedTile\": \"Produces a tiled image from a source image by applying an 8-way reflected symmetry.\",\n\"CIExclusionBlendMode\": \"Produces an effect similar to that produced by the CIDifferenceBlendMode filter but with lower contrast.\",\n\"CIExposureAdjust\": \"Adjusts the exposure setting for an image similar to the way you control exposure for a camera when you change the F-stop.\",\n\"CIFalseColor\": \"Maps luminance to a color ramp of two colors.\",\n\"CIFlashTransition\": \"Transitions from one image to another by creating a flash.\",\n\"CIFourfoldReflectedTile\": \"Produces a tiled image from a source image by applying a 4-way reflected symmetry.\",\n\"CIFourfoldRotatedTile\": \"Produces a tiled image from a source image by rotating the source image at increments of 90 degrees.\",\n\"CIFourfoldTranslatedTile\": \"Produces a tiled image from a source image by applying 4 translation operations.\",\n\"CIGammaAdjust\": \"Adjusts midtone brightness.\",\n\"CIGaussianBlur\": \"Spreads source pixels by an amount specified by a Gaussian distribution.\",\n\"CIGaussianGradient\": \"Generates a gradient that varies from one color to another using a Gaussian distribution.\",\n\"CIGlassDistortion\": \"Distorts an image by applying a glass-like texture.\",\n\"CIGlassLozenge\": \"Creates a lozenge-shaped lens and distorts the portion of the image over which the lens is placed.\",\n\"CIGlideReflectedTile\": \"Produces a tiled image from a source image by translating and smearing the image.\",\n\"CIGloom\": \"Dulls the highlights of an image.\",\n\"CIHardLightBlendMode\": \"Either multiplies or screens colors, depending on the source image sample color.\",\n\"CIHatchedScreen\": \"Simulates the hatched pattern of a halftone screen.\",\n\"CIHeightFieldFromMask\": \"Produces a continuous three-dimensional, loft-shaped height field from a grayscale mask.\",\n\"CIHexagonalPixellate\": \"Maps an image to colored hexagons whose color is defined by the replaced pixels.\",\n\"CIHighlightShadowAdjust\": \"Adjust the tonal mapping of an image while preserving spatial detail.\",\n\"CIHistogramDisplayFilter\": \"Generates a histogram image from the output of the CIAreaHistogram filter.\",\n\"CIHoleDistortion\": \"Creates a circular area that pushes the image pixels outward, distorting those pixels closest to the circle the most.\",\n\"CIHueAdjust\": \"Changes the overall hue, or tint, of the source pixels.\",\n\"CIHueBlendMode\": \"Uses the luminance and saturation values of the background image with the hue of the input image.\",\n\"CIKaleidoscope\": \"Produces a kaleidoscopic image from a source image by applying 12-way symmetry.\",\n\"CILanczosScaleTransform\": \"Produces a high-quality, scaled version of a source image.\",\n\"CILenticularHaloGenerator\": \"Simulates a lens flare.\",\n\"CILightenBlendMode\": \"Creates composite image samples by choosing the lighter samples (either from the source image or the background).\",\n\"CILightTunnel\": \"Rotates a portion of the input image specified by the center and radius parameters to give a tunneling effect.\",\n\"CILinearBurnBlendMode\": \"Darkens the background image samples to reflect the source image samples while also increasing contrast.\",\n\"CILinearDodgeBlendMode\": \"Brightens the background image samples to reflect the source image samples while also increasing contrast.\",\n\"CILinearGradient\": \"Generates a gradient that varies along a linear axis between two defined endpoints.\",\n\"CILinearToSRGBToneCurve\": \"Maps color intensity from a linear gamma curve to the sRGB color space.\",\n\"CILineOverlay\": \"Creates a sketch that outlines the edges of an image in black.\",\n\"CILineScreen\": \"Simulates the line pattern of a halftone screen.\",\n\"CILuminosityBlendMode\": \"Uses the hue and saturation of the background image with the luminance of the input image.\",\n\"CIMaskedVariableBlur\": \"Blurs the source image according to the brightness levels in a mask image.\",\n\"CIMaskToAlpha\": \"Converts a grayscale image to a white image that is masked by alpha.\",\n\"CIMaximumComponent\": \"Returns a grayscale image from max(r,g,b).\",\n\"CIMaximumCompositing\": \"Computes the maximum value, by color component, of two input images and creates an output image using the maximum values.\",\n\"CIMedianFilter\": \"Computes the median value for a group of neighboring pixels and replaces each pixel value with the median.\",\n\"CIMinimumComponent\": \"Returns a grayscale image from min(r,g,b).\",\n\"CIMinimumCompositing\": \"Computes the minimum value, by color component, of two input images and creates an output image using the minimum values.\",\n\"CIModTransition\": \"Transitions from one image to another by revealing the target image through irregularly shaped holes.\",\n\"CIMotionBlur\": \"Blurs an image to simulate the effect of using a camera that moves a specified angle and distance while capturing the image.\",\n\"CIMultiplyBlendMode\": \"Multiplies the input image samples with the background image samples.\",\n\"CIMultiplyCompositing\": \"Multiplies the color component of two input images and creates an output image using the multiplied values.\",\n\"CINoiseReduction\": \"Reduces noise using a threshold value to define what is considered noise.\",\n\"CIOpTile\": \"Segments an image, applying any specified scaling and rotation, and then assembles the image again to give an op art appearance.\",\n\"CIOverlayBlendMode\": \"Either multiplies or screens the input image samples with the background image samples, depending on the background color.\",\n\"CIPageCurlTransition\": \"Transitions from one image to another by simulating a curling page, revealing the new image as the page curls.\",\n\"CIPageCurlWithShadowTransition\": \"Transitions from one image to another by simulating a curling page, revealing the new image as the page curls.\",\n\"CIParallelogramTile\": \"Warps an image by reflecting it in a parallelogram, and then tiles the result.\",\n\"CIPDF417BarcodeGenerator\": \"Generates a PDF417 code (two-dimensional barcode) from input data.\",\n\"CIPerspectiveCorrection\": \"Applies a perspective correction, transforming an arbitrary quadrilateral region in the source image to a rectangular output image.\",\n\"CIPerspectiveTile\": \"Applies a perspective transform to an image and then tiles the result.\",\n\"CIPerspectiveTransform\": \"Alters the geometry of an image to simulate the observer changing viewing position.\",\n\"CIPerspectiveTransformWithExtent\": \"Alters the geometry of a portion of an image to simulate the observer changing viewing position.\",\n\"CIPhotoEffectChrome\": \"Applies a preconfigured set of effects that imitate vintage photography film with exaggerated color.\",\n\"CIPhotoEffectFade\": \"Applies a preconfigured set of effects that imitate vintage photography film with diminished color.\",\n\"CIPhotoEffectInstant\": \"Applies a preconfigured set of effects that imitate vintage photography film with distorted colors.\",\n\"CIPhotoEffectMono\": \"Applies a preconfigured set of effects that imitate black-and-white photography film with low contrast.\",\n\"CIPhotoEffectNoir\": \"Applies a preconfigured set of effects that imitate black-and-white photography film with exaggerated contrast.\",\n\"CIPhotoEffectProcess\": \"Applies a preconfigured set of effects that imitate vintage photography film with emphasized cool colors.\",\n\"CIPhotoEffectTonal\": \"Applies a preconfigured set of effects that imitate black-and-white photography film without significantly altering contrast.\",\n\"CIPhotoEffectTransfer\": \"Applies a preconfigured set of effects that imitate vintage photography film with emphasized warm colors.\",\n\"CIPinchDistortion\": \"Creates a rectangular area that pinches source pixels inward, distorting those pixels closest to the rectangle the most.\",\n\"CIPinLightBlendMode\": \"Conditionally replaces background image samples with source image samples depending on the brightness of the source image samples.\",\n\"CIPixellate\": \"Makes an image blocky by mapping the image to colored squares whose color is defined by the replaced pixels.\",\n\"CIPointillize\": \"Renders the source image in a pointillistic style.\",\n\"CIQRCodeGenerator\": \"Generates a Quick Response code (two-dimensional barcode) from input data.\",\n\"CIRadialGradient\": \"Generates a gradient that varies radially between two circles having the same center.\",\n\"CIRandomGenerator\": \"Generates an image of infinite extent whose pixel values are made up of four independent, uniformly-distributed random numbers in the 0 to 1 range.\",\n\"CIRippleTransition\": \"Transitions from one image to another by creating a circular wave that expands from the center point, revealing the new image in the wake of the wave.\",\n\"CIRowAverage\": \"Returns a 1-pixel high image that contains the average color for each scan row.\",\n\"CISaturationBlendMode\": \"Uses the luminance and hue values of the background image with the saturation of the input image.\",\n\"CIScreenBlendMode\": \"Multiplies the inverse of the input image samples with the inverse of the background image samples.\",\n\"CISepiaTone\": \"Maps the colors of an image to various shades of brown.\",\n\"CIShadedMaterial\": \"Produces a shaded image from a height field.\",\n\"CISharpenLuminance\": \"Increases image detail by sharpening.\",\n\"CISixfoldReflectedTile\": \"Produces a tiled image from a source image by applying a 6-way reflected symmetry.\",\n\"CISixfoldRotatedTile\": \"Produces a tiled image from a source image by rotating the source image at increments of 60 degrees.\",\n\"CISmoothLinearGradient\": \"Generates a gradient that uses an S-curve function to blend colors along a linear axis between two defined endpoints.\",\n\"CISoftLightBlendMode\": \"Either darkens or lightens colors, depending on the input image sample color.\",\n\"CISourceAtopCompositing\": \"Places the input image over the background image, then uses the luminance of the background image to determine what to show.\",\n\"CISourceInCompositing\": \"Uses the background image to define what to leave in the input image, effectively cropping the input image.\",\n\"CISourceOutCompositing\": \"Uses the background image to define what to take out of the input image.\",\n\"CISourceOverCompositing\": \"Places the input image over the input background image.\",\n\"CISpotColor\": \"Replaces one or more color ranges with spot colors.\",\n\"CISpotLight\": \"Applies a directional spotlight effect to an image.\",\n\"CISRGBToneCurveToLinear\": \"Maps color intensity from the sRGB color space to a linear gamma curve.\",\n\"CIStarShineGenerator\": \"Generates a starburst pattern that is similar to a supernova; can be used to simulate a lens flare.\",\n\"CIStraightenFilter\": \"Rotates the source image by the specified angle in radians.\",\n\"CIStretchCrop\": \"Distorts an image by stretching and or cropping it to fit a target size.\",\n\"CIStripesGenerator\": \"Generates a stripe pattern.\",\n\"CISubtractBlendMode\": \"Subtracts the background image sample color from the source image sample color.\",\n\"CISunbeamsGenerator\": \"Generates a sun effect.\",\n\"CISwipeTransition\": \"Transitions from one image to another by simulating a swiping action.\",\n\"CITemperatureAndTint\": \"Adapts the reference white point for an image.\",\n\"CIToneCurve\": \"Adjusts tone response of the R, G, and B channels of an image.\",\n\"CITorusLensDistortion\": \"Creates a torus-shaped lens and distorts the portion of the image over which the lens is placed.\",\n\"CITriangleKaleidoscope\": \"Maps a triangular portion of an input image to create a kaleidoscope effect.\",\n\"CITriangleTile\": \"Maps a triangular portion of image to a triangular area and then tiles the result.\",\n\"CITwelvefoldReflectedTile\": \"Produces a tiled image from a source image by rotating the source image at increments of 30 degrees.\",\n\"CITwirlDistortion\": \"Rotates pixels around a point to give a twirling effect.\",\n\"CIUnsharpMask\": \"Increases the contrast of the edges between pixels of different colors in an image.\",\n\"CIVibrance\": \"Adjusts the saturation of an image while keeping pleasing skin tones.\",\n\"CIVignette\": \"Reduces the brightness of an image at the periphery.\",\n\"CIVignetteEffect\": \"Modifies the brightness of an image around the periphery of a specified region.\",\n\"CIVortexDistortion\": \"Rotates pixels around a point to simulate a vortex.\",\n\"CIWhitePointAdjust\": \"Adjusts the reference white point for an image and maps all colors in the source using the new reference.\",\n\"CIZoomBlur\": \"Simulates the effect of zooming the camera while capturing the image.\"\n}\n"
  },
  {
    "path": "Generator/docLookup.json",
    "content": "{\n\"accordionFoldTransition\": \"3228263-accordionfoldtransition\",\n\"additionCompositing\": \"3228264-additioncompositing\",\n\"affineClamp\": \"3228265-affineclamp\",\n\"affineTile\": \"3228266-affinetile\",\n\"areaAverage\": \"3547111-areaaverage\",\n\"areaHistogram\": \"3547112-areahistogram\",\n\"areaLogarithmicHistogram\": \"4401848-arealogarithmichistogram\",\n\"areaMaximum\": \"3547114-areamaximum\",\n\"areaMaximumAlpha\": \"3547113-areamaximumalpha\",\n\"areaMinMax\": \"3547115-areaminmax\",\n\"areaMinMaxRed\": \"3547116-areaminmaxred\",\n\"areaMinimum\": \"3547118-areaminimum\",\n\"areaMinimumAlpha\": \"3547117-areaminimumalpha\",\n\"attributedTextImageGenerator\": \"3228267-attributedtextimagegenerator\",\n\"aztecCodeGenerator\": \"3228268-azteccodegenerator\",\n\"barcodeGenerator\": \"3228269-barcodegenerator\",\n\"barsSwipeTransition\": \"3228270-barsswipetransition\",\n\"bicubicScaleTransform\": \"3228271-bicubicscaletransform\",\n\"blendWithAlphaMask\": \"3228272-blendwithalphamask\",\n\"blendWithBlueMask\": \"3228273-blendwithbluemask\",\n\"blendWithMask\": \"3228274-blendwithmask\",\n\"blendWithRedMask\": \"3228275-blendwithredmask\",\n\"bloom\": \"3228276-bloom\",\n\"blurredRectangleGenerator\": \"4401849-blurredrectanglegenerator\",\n\"bokehBlur\": \"3228277-bokehblur\",\n\"boxBlur\": \"3228278-boxblur\",\n\"bumpDistortion\": \"4401850-bumpdistortion\",\n\"bumpDistortionLinear\": \"4401851-bumpdistortionlinear\",\n\"cannyEdgeDetector\": \"4401852-cannyedgedetector\",\n\"checkerboardGenerator\": \"3228279-checkerboardgenerator\",\n\"circleSplashDistortion\": \"4401853-circlesplashdistortion\",\n\"circularScreen\": \"3228280-circularscreen\",\n\"circularWrap\": \"4401854-circularwrap\",\n\"cmykHalftone\": \"3228259-cmykhalftone\",\n\"code128BarcodeGenerator\": \"3228281-code128barcodegenerator\",\n\"colorAbsoluteDifference\": \"3547119-colorabsolutedifference\",\n\"colorBlendMode\": \"3228282-colorblendmode\",\n\"colorBurnBlendMode\": \"3228283-colorburnblendmode\",\n\"colorClamp\": \"3228284-colorclamp\",\n\"colorControls\": \"3228285-colorcontrols\",\n\"colorCrossPolynomial\": \"3228286-colorcrosspolynomial\",\n\"colorCube\": \"3228287-colorcube\",\n\"colorCubeWithColorSpace\": \"3228288-colorcubewithcolorspace\",\n\"colorCubesMixedWithMask\": \"3228289-colorcubesmixedwithmask\",\n\"colorCurves\": \"3228290-colorcurves\",\n\"colorDodgeBlendMode\": \"3228291-colordodgeblendmode\",\n\"colorInvert\": \"3228292-colorinvert\",\n\"colorMap\": \"3228293-colormap\",\n\"colorMatrix\": \"3228294-colormatrix\",\n\"colorMonochrome\": \"3228295-colormonochrome\",\n\"colorPolynomial\": \"3228296-colorpolynomial\",\n\"colorPosterize\": \"3228297-colorposterize\",\n\"colorThreshold\": \"3547120-colorthreshold\",\n\"colorThresholdOtsu\": \"4401855-colorthresholdotsu\",\n\"columnAverage\": \"3547121-columnaverage\",\n\"comicEffect\": \"3228298-comiceffect\",\n\"convertLabToRGB\": \"4401856-convertlabtorgb\",\n\"convertRGBtoLab\": \"4401857-convertrgbtolab\",\n\"convolution3X3\": \"3228299-convolution3x3\",\n\"convolution5X5\": \"3228300-convolution5x5\",\n\"convolution7X7\": \"3228301-convolution7x7\",\n\"convolution9Horizontal\": \"3228302-convolution9horizontal\",\n\"convolution9Vertical\": \"3228303-convolution9vertical\",\n\"convolutionRGB3X3\": \"4401858-convolutionrgb3x3\",\n\"convolutionRGB5X5\": \"4401859-convolutionrgb5x5\",\n\"convolutionRGB7X7\": \"4401860-convolutionrgb7x7\",\n\"convolutionRGB9Horizontal\": \"4401861-convolutionrgb9horizontal\",\n\"convolutionRGB9Vertical\": \"4401862-convolutionrgb9vertical\",\n\"copyMachineTransition\": \"3228304-copymachinetransition\",\n\"coreMLModel\": \"3228305-coremlmodel\",\n\"crystallize\": \"3228306-crystallize\",\n\"darkenBlendMode\": \"3228307-darkenblendmode\",\n\"depthOfField\": \"3228308-depthoffield\",\n\"depthToDisparity\": \"3228309-depthtodisparity\",\n\"differenceBlendMode\": \"3228310-differenceblendmode\",\n\"discBlur\": \"3228311-discblur\",\n\"disintegrateWithMaskTransition\": \"3228312-disintegratewithmasktransition\",\n\"disparityToDepth\": \"3228313-disparitytodepth\",\n\"displacementDistortion\": \"4401863-displacementdistortion\",\n\"dissolveTransition\": \"3228314-dissolvetransition\",\n\"dither\": \"3228315-dither\",\n\"divideBlendMode\": \"3228316-divideblendmode\",\n\"documentEnhancer\": \"3228317-documentenhancer\",\n\"dotScreen\": \"3228318-dotscreen\",\n\"droste\": \"4401864-droste\",\n\"edgePreserveUpsample\": \"3228319-edgepreserveupsample\",\n\"edgeWork\": \"3228320-edgework\",\n\"edges\": \"3228321-edges\",\n\"eightfoldReflectedTile\": \"3228322-eightfoldreflectedtile\",\n\"exclusionBlendMode\": \"3228323-exclusionblendmode\",\n\"exposureAdjust\": \"3228324-exposureadjust\",\n\"falseColor\": \"3228325-falsecolor\",\n\"flashTransition\": \"3228326-flashtransition\",\n\"fourfoldReflectedTile\": \"3228327-fourfoldreflectedtile\",\n\"fourfoldRotatedTile\": \"3228328-fourfoldrotatedtile\",\n\"fourfoldTranslatedTile\": \"3228329-fourfoldtranslatedtile\",\n\"gaborGradients\": \"3325508-gaborgradients\",\n\"gammaAdjust\": \"3228330-gammaadjust\",\n\"gaussianBlur\": \"3228331-gaussianblur\",\n\"gaussianGradient\": \"3228332-gaussiangradient\",\n\"glassDistortion\": \"4401865-glassdistortion\",\n\"glassLozenge\": \"4401866-glasslozenge\",\n\"glideReflectedTile\": \"3228333-glidereflectedtile\",\n\"gloom\": \"3228334-gloom\",\n\"hardLightBlendMode\": \"3228335-hardlightblendmode\",\n\"hatchedScreen\": \"3228336-hatchedscreen\",\n\"heightFieldFromMask\": \"3228337-heightfieldfrommask\",\n\"hexagonalPixellate\": \"3228338-hexagonalpixellate\",\n\"highlightShadowAdjust\": \"3228339-highlightshadowadjust\",\n\"histogramDisplay\": \"3547122-histogramdisplay\",\n\"holeDistortion\": \"4401867-holedistortion\",\n\"hueAdjust\": \"3228340-hueadjust\",\n\"hueBlendMode\": \"3228341-hueblendmode\",\n\"hueSaturationValueGradient\": \"3228342-huesaturationvaluegradient\",\n\"kMeans\": \"3547110-kmeans\",\n\"kaleidoscope\": \"3228343-kaleidoscope\",\n\"keystoneCorrectionCombined\": \"3325509-keystonecorrectioncombined\",\n\"keystoneCorrectionHorizontal\": \"3325510-keystonecorrectionhorizontal\",\n\"keystoneCorrectionVertical\": \"3325511-keystonecorrectionvertical\",\n\"labDeltaE\": \"3228260-labdeltae\",\n\"lanczosScaleTransform\": \"3228344-lanczosscaletransform\",\n\"lenticularHaloGenerator\": \"3228345-lenticularhalogenerator\",\n\"lightTunnel\": \"4401868-lighttunnel\",\n\"lightenBlendMode\": \"3228346-lightenblendmode\",\n\"lineOverlay\": \"3228347-lineoverlay\",\n\"lineScreen\": \"3228348-linescreen\",\n\"linearBurnBlendMode\": \"3228349-linearburnblendmode\",\n\"linearDodgeBlendMode\": \"3228350-lineardodgeblendmode\",\n\"linearGradient\": \"3228351-lineargradient\",\n\"linearLightBlendMode\": \"4401869-linearlightblendmode\",\n\"linearToSRGBToneCurve\": \"3228352-lineartosrgbtonecurve\",\n\"luminosityBlendMode\": \"3228353-luminosityblendmode\",\n\"maskToAlpha\": \"3228354-masktoalpha\",\n\"maskedVariableBlur\": \"3228355-maskedvariableblur\",\n\"maximumComponent\": \"3228356-maximumcomponent\",\n\"maximumCompositing\": \"3228357-maximumcompositing\",\n\"median\": \"3228358-median\",\n\"meshGenerator\": \"3228359-meshgenerator\",\n\"minimumComponent\": \"3228360-minimumcomponent\",\n\"minimumCompositing\": \"3228361-minimumcompositing\",\n\"mix\": \"3228362-mix\",\n\"modTransition\": \"3228363-modtransition\",\n\"morphologyGradient\": \"3228364-morphologygradient\",\n\"morphologyMaximum\": \"3228365-morphologymaximum\",\n\"morphologyMinimum\": \"3228366-morphologyminimum\",\n\"morphologyRectangleMaximum\": \"3228367-morphologyrectanglemaximum\",\n\"morphologyRectangleMinimum\": \"3228368-morphologyrectangleminimum\",\n\"motionBlur\": \"3228369-motionblur\",\n\"multiplyBlendMode\": \"3228370-multiplyblendmode\",\n\"multiplyCompositing\": \"3228371-multiplycompositing\",\n\"ninePartStretched\": \"4401871-ninepartstretched\",\n\"ninePartTiled\": \"4401872-nineparttiled\",\n\"noiseReduction\": \"3228372-noisereduction\",\n\"opTile\": \"3228373-optile\",\n\"overlayBlendMode\": \"3228374-overlayblendmode\",\n\"pageCurlTransition\": \"3228375-pagecurltransition\",\n\"pageCurlWithShadowTransition\": \"3228376-pagecurlwithshadowtransition\",\n\"paletteCentroid\": \"3228377-palettecentroid\",\n\"palettize\": \"3228378-palettize\",\n\"parallelogramTile\": \"3228379-parallelogramtile\",\n\"pdf417BarcodeGenerator\": \"3228261-pdf417barcodegenerator\",\n\"personSegmentation\": \"4401873-personsegmentation\",\n\"perspectiveCorrection\": \"3228380-perspectivecorrection\",\n\"perspectiveRotate\": \"3325512-perspectiverotate\",\n\"perspectiveTile\": \"3228381-perspectivetile\",\n\"perspectiveTransform\": \"3228382-perspectivetransform\",\n\"perspectiveTransformWithExtent\": \"3228383-perspectivetransformwithextent\",\n\"photoEffectChrome\": \"3228384-photoeffectchrome\",\n\"photoEffectFade\": \"3228385-photoeffectfade\",\n\"photoEffectInstant\": \"3228386-photoeffectinstant\",\n\"photoEffectMono\": \"3228387-photoeffectmono\",\n\"photoEffectNoir\": \"3228388-photoeffectnoir\",\n\"photoEffectProcess\": \"3228389-photoeffectprocess\",\n\"photoEffectTonal\": \"3228390-photoeffecttonal\",\n\"photoEffectTransfer\": \"3228391-photoeffecttransfer\",\n\"pinLightBlendMode\": \"3228392-pinlightblendmode\",\n\"pinchDistortion\": \"4401874-pinchdistortion\",\n\"pixellate\": \"3228393-pixellate\",\n\"pointillize\": \"3228394-pointillize\",\n\"qrCodeGenerator\": \"3228262-qrcodegenerator\",\n\"radialGradient\": \"3228395-radialgradient\",\n\"randomGenerator\": \"3228396-randomgenerator\",\n\"rippleTransition\": \"3228397-rippletransition\",\n\"roundedRectangleGenerator\": \"3335007-roundedrectanglegenerator\",\n\"roundedRectangleStrokeGenerator\": \"4401875-roundedrectanglestrokegenerator\",\n\"rowAverage\": \"3547123-rowaverage\",\n\"sRGBToneCurveToLinear\": \"3228398-srgbtonecurvetolinear\",\n\"saliencyMap\": \"3228399-saliencymap\",\n\"saturationBlendMode\": \"3228400-saturationblendmode\",\n\"screenBlendMode\": \"3228401-screenblendmode\",\n\"sepiaTone\": \"3228402-sepiatone\",\n\"shadedMaterial\": \"3228403-shadedmaterial\",\n\"sharpenLuminance\": \"3228404-sharpenluminance\",\n\"sixfoldReflectedTile\": \"3228405-sixfoldreflectedtile\",\n\"sixfoldRotatedTile\": \"3228406-sixfoldrotatedtile\",\n\"smoothLinearGradient\": \"3228407-smoothlineargradient\",\n\"sobelGradients\": \"4401876-sobelgradients\",\n\"softLightBlendMode\": \"3228408-softlightblendmode\",\n\"sourceAtopCompositing\": \"3228409-sourceatopcompositing\",\n\"sourceInCompositing\": \"3228410-sourceincompositing\",\n\"sourceOutCompositing\": \"3228411-sourceoutcompositing\",\n\"sourceOverCompositing\": \"3228412-sourceovercompositing\",\n\"spotColor\": \"3228413-spotcolor\",\n\"spotLight\": \"3228414-spotlight\",\n\"starShineGenerator\": \"3228415-starshinegenerator\",\n\"straighten\": \"3228416-straighten\",\n\"stretchCrop\": \"4401877-stretchcrop\",\n\"stripesGenerator\": \"3228417-stripesgenerator\",\n\"subtractBlendMode\": \"3228418-subtractblendmode\",\n\"sunbeamsGenerator\": \"3228419-sunbeamsgenerator\",\n\"swipeTransition\": \"3228420-swipetransition\",\n\"temperatureAndTint\": \"3228421-temperatureandtint\",\n\"textImageGenerator\": \"3228422-textimagegenerator\",\n\"thermal\": \"3228423-thermal\",\n\"toneCurve\": \"3228424-tonecurve\",\n\"torusLensDistortion\": \"4401879-toruslensdistortion\",\n\"triangleKaleidoscope\": \"3228425-trianglekaleidoscope\",\n\"triangleTile\": \"3228426-triangletile\",\n\"twelvefoldReflectedTile\": \"3228427-twelvefoldreflectedtile\",\n\"twirlDistortion\": \"4401880-twirldistortion\",\n\"unsharpMask\": \"3228428-unsharpmask\",\n\"vibrance\": \"3228429-vibrance\",\n\"vignette\": \"3228431-vignette\",\n\"vignetteEffect\": \"3228430-vignetteeffect\",\n\"vividLightBlendMode\": \"4401881-vividlightblendmode\",\n\"vortexDistortion\": \"4401882-vortexdistortion\",\n\"whitePointAdjust\": \"3228432-whitepointadjust\",\n\"xRay\": \"3228433-xray\",\n\"zoomBlur\": \"3228434-zoomblur\"\n}\n"
  },
  {
    "path": "LICENSE.txt",
    "content": "MIT License\n\nCopyright (c) 2023 Dan Wood\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\nof this software and associated documentation files (the \"Software\"), to deal\nin the Software without restriction, including without limitation the rights\nto use, copy, modify, merge, publish, distribute, sublicense, and/or sell\ncopies of the Software, and to permit persons to whom the Software is\nfurnished to do so, subject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included in all\ncopies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\nOUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\nSOFTWARE."
  },
  {
    "path": "Package.swift",
    "content": "// swift-tools-version: 5.7\n// The swift-tools-version declares the minimum version of Swift required to build this package.\n\nimport PackageDescription\n\nlet package = Package(\n    name: \"SwiftUICoreImage\",\n    platforms: [\n        .macOS(.v10_15),\n        .iOS(.v13)\n    ],\n    products: [\n        // Products define the executables and libraries a package produces, and make them visible to other packages.\n        .library(\n            name: \"SwiftUICoreImage\",\n            targets: [\"SwiftUICoreImage\"]),\n    ],\n    dependencies: [\n        // Dependencies declare other packages that this package depends on.\n        // .package(url: /* package url */, from: \"1.0.0\"),\n    ],\n    targets: [\n        // Targets are the basic building blocks of a package. A target can define a module or a test suite.\n        // Targets can depend on other targets in this package, and on products in packages this package depends on.\n        .target(\n            name: \"SwiftUICoreImage\",\n            dependencies: [],\n            path: \"Sources\")\n    ]\n)\n"
  },
  {
    "path": "README.md",
    "content": "# SwiftUICoreImage\n\nHelp for using Core Image within the context of SwiftUI. Also useful even without SwiftUI.\n\n## Introduction\n\nCore Image is a wonderful image-processsing toolkit in macOS and iOS, but it's a bit clunky to use. Even after Apple added Swift APIs to many of the filters ([CoreImage.CIFilterBuiltins](https://developer.apple.com/documentation/coreimage/methods_and_protocols_for_filter_creation)), it's still pretty tedious to chain filters to images.\n\nThe purpose of this package is to provide an easier way to chain multiple filters to CIImage instances and then render them into SwiftUI (or any other context — SwiftUI is not needed).\n\n```Swift\n    Image(ciImage: CIImage(\"Bernie.jpeg\")\n        .sepiaTone(intensity: sepia)\n        .recropping { image in\n            image\n                .clampedToExtent(active: clamped)\n                .gaussianBlur(radius: gaussianBlurRadius)\n        }\n    )\n        .resizable()\n        .aspectRatio(contentMode: .fit)\n```\n\n## Manifest\n\nIncluded in this package is:\n\n * CIImage-Filters.swift\n    * about 200 modifiers on `CIImage` that return a new modified `CIImage` (or the original if unmodified)\n    * 20 static functions that return a newly generated `CIImage`\n    * Includes filters up to iOS 18, macOS 15\n* CIImage-Extensions.swift\n    * Convenience initializers for `CIImage` from a resource name and from an `NSImage`/`UIImage`\n    * Modifiers for `CIImage` to return cropped, scaled, etc. to be easier to work with SwiftUI\n    * Overloads of several built-in `CIImage` modifier functions that take an `active` boolean parameter\n* Image-Extensions.swift\n    * Convenience initializer to create a SwiftUI `Image` from a `CIImage`\n\n## How This Works\n\nSimilarly to how SwiftUI view modifiers each return a modified `View` instance, these modifiers on `CIImage` take care of the core image chaining by creating a corresponding `CIFilter`, hooking up the `inputImage` for you, and returning the resulting `outputImage`. \n\nWhen creating SwiftUI code, I think it's important that you can use [Inert Modifiers](https://developer.apple.com/videos/play/wwdc2021/10022/?time=2303) in which you pass in some parameter that causes the modifier to have no effect. (For instance, specifying opacity of 1.0 or padding of 0.0 to a view.)  \n\nIn this code, I've made sure that each of our image modifiers come with inert modifiers: in some cases it's passing in a parameter that clearly has no effect (e.g. zero intensity, zero radius); or it's a nil background image when combining with another image; or a boolean `active` parameter. If the parameter(s) specified would cause no change in the image, then the identity (self) is returned forthwith.\n\nThe contents of CIImage-Filters.swift are generated source code, using code that I've included in this repository (`CIImage-Generation.swift`, not included in the package import). This loops through the core image metadata that Apple provides (`CIFilter.filterNames(inCategories: nil)`). Unfortunately this list is somewhat out of date and contains a number of inconsistencies that I've done my best to overcome.  There are some JSON files that provide additional metadata such as a list of the functions that actually do have online documentation — 56 functions aren't documented so some guesswork is needed — or repairs to missing or obsolete documentation. You probably won't need to run this code unless you have some special requirements or the list has been updated in a future OS release.\n\n## Using With SwiftUI\n\nRemember that Core Image operations are really just a \"recipe\" for the processing steps; the actual work is not performed until the image needs to be rendered to a bitmap. \n\nInstead of creating a SwiftUI `Image` using a [built-in initializer](https://developer.apple.com/documentation/swiftui/image) from a resource name or other image type (`CGImage`, `NSImage`, `UIImage`), this code provides a new initializer to create an `Image` from a `CIImage`. When SwiftUI needs to render the image, the Core Image is rendered to the screen.\n\nYour typical approach, then, will be to create an `Image`, passing in a `CIImage` created using one of the [built-in initializers](https://developer.apple.com/documentation/coreimage/ciimage) or the convenience methods included here to create from a resource name or another image type.\n\nThen, just chain modifiers to that `CIImage` to indicate what to modify.\n\nMany modifiers are simple. For instance:\n\n```Swift\n    Image(ciImage: CIImage(\"Halloween.jpeg\")\n        .xRay()\n    )\n```\n\nIf you wish to toggle whether the filter is applied, use the `active` parameter (default value of `true`):\n\n```Swift\n    Image(ciImage: CIImage(\"Halloween.jpeg\")\n        .xRay(active: isMachineOn)\n    )\n```\n\nChain any number of modifiers found in `CIImage-Filters.swift` to construct the desired result.\n\n### Image Scaling\n\nMany Core Image filters use pixel values for parameters. Therefore, it may be needed to get an image scaled to an appropriate size _before_ applying operations. For example, applying a 10-pixel-radius blur to a 6000⨉4000 image that is then scaled down to 300⨉200 might not yield what you want; perhaps you want to first scale the image to 300⨉200 and then apply the 10-pixel-radius blur.\n\nCore Image provides a scaling operation (`CILanczosScaleTransform` and `lanczosScaleTransform()`) but this package also includes more convenient alternatives: `scaledToFill()` and `scaledToFit()` where you pass in the dimensions you want.\n\nA typical use of this works well in conjunction with `GeometryReader`. For example:\n\n```Swift\n    GeometryReader { geo in\n        let geoSize: CGSize = geo.frame(in: .local).integral.size\n        // Resize image to double the frame size, assuming we are on a retina display\n        let newSize: CGSize = CGSize(width: geoSize.width * 2,\n                                    height: geoSize.height * 2)\n\n        Image(ciImage: CIImage(\"M83.jpeg\")\n            .scaledToFit(newSize)\n            .sharpenLuminance(sharpness: 1.0, radius: 5)\n        )\n            .resizable()    // Make sure retina image is scaled to fit\n            .aspectRatio(contentMode: .fit)\n    }\n``` \n![Compare original, sharpened without pre-scaling, sharpened after pre-scaled](./Resources/sharpening.jpeg)\n\n\n## Using Without SwiftUI\n\nSwiftUI is not needed at all. Just create a `CIImage` and perform operations. Then, render to a bitmap.\n\n```Swift\n    let tiledImage: CIImage = CIImage(\"HeyGoodMorning.png\").\n        .triangleTile(center: .zero, angle: 0.356, width: 2.0)\n\n    imageView.image = UIImage(CIImage: tiledImage)\n```\n\n## Other Notes\n\nIf you've used Core Image, you'll know that sometimes you need to play with the extent of an image, e.g. clamping an image to have infinite edges before applying a gaussian blur, then re-cropping to the image's original extent. To accomplish this, you can use the **`recropping`** modifier which is followed by a closure. The operation saves the extent of the image, applies whatever is in the closure, and then re-crops to that extent. In the example below, the image in `ciImage` is converted into an image with the pixel colors along its edges extend infinitely in all directions, then it is blurred, and then upon exit from the closure, the returned image is re-cropped. \n\n```Swift\n    ciImage\n        .recropping { image in\n            image\n                .clampedToExtent()\n                .gaussianBlur(radius: 10)\n        }\n```\n\n![Compare unblurred, improper blurring, and proper blurring](./Resources/blurring.jpeg)\n\nThe `recropping` modifier is also useful if you find that a filter (e.g. `comicEffect()` has grown your image's extent slightly and you want to clamp it to its original size.\n\nAnother useful operation is **`replacing`**. Much like `recropping` except that it does not mess with the extent of the image. You pass in a closure, which starts with the image you were working with; your closure returns a new image. This can be useful when working with the compositing operations in Core Image, which require a *background* image to be passed in.  What if your chain of operations is on the background image, and you want to overlay something on top? Just wrap your operation in `.replacing` and return the composited image.\n\n```Swift\n    ciImage\n        .replacing { backgroundImage in\n            ciImage2\n                .sourceAtopCompositing(backgroundImage: backgroundImage)\n        }\n```\n\nIn this case, the image in `ciImage2` is the foreground image, placed atop the `backgroundImage`, then returned to the chain of operations.\n\n## Using Package\n\nIn Xcode, Choose File > Add Packages… then enter the URL of this repository into the search bar, and continue from there.\n\nIn your code:\n\n```Swift\nimport SwiftUICoreImage\n```\n\nThat's it!\n\n---\n\n## Future Improvements\n\nRather than generating repetitive code, it would be nice to define some macros that expand to the repetitive code!\nThe advantage of this is that one could just import the macro package and just define only the filters they want,\nrather than defining all 200+ mostly-unused filters.\n\n[Apparently](https://forums.swift.org/t/macros-attached-macros-to-methods-and-functions/65531/6) \nthis would require [Function Body Macros](https://github.com/swiftlang/swift-evolution/blob/main/proposals/0415-function-body-macros.md) \nwhich are not available in Swift 5.x but might make it into Swift 6.0.\n\nIdeally we would specify something like this:\n```\n \t@CoreImageExtension\n\tfunc pixellate(center: CGPoint, scale: Float, active: Bool = true) -> CIImage\n```\n\nAnd this would fill in the body with some code that does the following:\n * Guard statement to return self if active flag is false (or other inert modifier; maybe we'd have to indicate which parameter is this?)\n * Create built-in CIFilter object based off of name of the function being expanded\n * Set all the parameters as specified in the function parameters. Maybe need some way to indicate parameters that need to be cast to another type\n * return the outputImage from the filter\n\n---\n\nPlease file an issue or pull request if you can think of an improvement to the code or documentation of the generated filters, \nor find any other helpful utilities for manipulating Core Images in this toolkit!\n"
  },
  {
    "path": "Sources/CIImage-Extensions.swift",
    "content": "//\n//  CIImage-Extensions.swift\n//  SwiftUI Core Image\n//\n//  Created by Dan Wood on 5/9/23.\n//\n\nimport SwiftUI\nimport CoreImage\nimport CoreImage.CIFilterBuiltins\n\npublic extension CIImage {\n\n\t// Pretty fast. Subsequent invocations are cached.\n\tconvenience init(_ name: String, bundle: Bundle? = nil) {\n#if canImport(UIKit)\n\t\tif let uiImage = UIImage(named: name, in: bundle, with: nil) {\n\t\t\tself.init(uiImage: uiImage)\n\t\t} else {\n\t\t\tself.init()\n\t\t}\n#elseif canImport(AppKit)\n\t\tlet nsImage: NSImage?\n\t\tif let bundle {\n\t\t\tnsImage = bundle.image(forResource: name)\n\t\t} else {\n\t\t\tnsImage = NSImage(named: name)\n\t\t}\n\t\tif let nsImage {\n\t\t\tself.init(nsImage: nsImage)\n\t\t} else {\n\t\t\tself.init()\n\t\t}\n#endif\n\t}\n\n#if canImport(UIKit)\n\tconvenience init(uiImage: UIImage) {\n\t\tif let cgImage = uiImage.cgImage {\n\t\t\tself.init(cgImage: cgImage)\n\t\t} else {\n\t\t\tself.init()\n\t\t}\n\t}\n#elseif canImport(AppKit)\n\tconvenience init(nsImage: NSImage) {\n\t\tif let cgImage = nsImage.cgImage(forProposedRect: nil, context: nil, hints: nil) {\t// TODO: Maybe consider NSGraphicsContext\n\t\t\tself.init(cgImage: cgImage)\n\t\t} else {\n\t\t\tself.init()\n\t\t}\n\t}\n#endif\n\n\t/// Useful for debugging when chaining multiple CIImage modifiers together.\n\tfunc logExtent(file: String = #file, line: Int = #line) -> CIImage {\n\t\tNSLog(\"\\(file):\\(line) \\(self.extent)\")\n\t\treturn self\n\t}\n\n}\n\n// MARK: USEFUL EXTENSIONS FOR WORKING IN A SWIFTUI-LIKE FASHION\n\npublic extension CIImage {\n\n\t/// Save the extent and then re-crop to that extent after applying whatever is in the closure\n\tfunc recropping(apply: (CIImage) -> CIImage) -> CIImage {\n\t\tlet savedExtent: CGRect = extent\n\t\tlet newCIImage = apply(self)\n\t\tlet cropped = newCIImage.cropped(to: savedExtent)\n\t\treturn cropped\n\t}\n\n\t/// Apply to whatever is in the closure. Useful if the current image is used as a parameter to a new image process.\n\tfunc replacing(apply: (CIImage) -> CIImage) -> CIImage {\n\t\tlet newCIImage = apply(self)\n\t\treturn newCIImage\n\t}\n\n\t/// Resize an image down so it fully fills the container, cropping in the center as needed.\n\t@available(macOS 10.15, *)\n\tfunc scaledToFill(_ size: CGSize?) -> CIImage {\n\t\tguard let size else { return self }\n\t\tlet currentSize = extent.size\n\t\tlet largerRatio: CGFloat = max(size.width / currentSize.width, size.height / currentSize.height)\n\t\tlet newSize: CGSize = CGSize(width: currentSize.width * largerRatio, height: currentSize.height * largerRatio)\n\t\t// Scale to the larger of two ratios so it fills\n\t\tlet scaled = self.lanczosScaleTransform(scale: Float(largerRatio))\n\t\tlet clamped = scaled.clampedToExtent()\n\t\tlet cropped = clamped.cropped(to: CGRect(x: (newSize.width - size.width) / 2,\n\t\t\t\t\t\t\t\t\t\t\t\t y: (newSize.height - size.height) / 2,\n\t\t\t\t\t\t\t\t\t\t\t\t width: size.width, height: size.height))\n\t\treturn cropped\n\t}\n\n\t/// Resize an image down so it fully fits in container, centered as needed. No cropping.\n\t@available(macOS 10.15, *)\n\tfunc scaledToFit(_ size: CGSize?) -> CIImage {\n\t\tguard let size else { return self }\n\t\tlet currentSize = extent.size\n\t\tlet smallerRatio: CGFloat = min(size.width / currentSize.width, size.height / currentSize.height)\n\t\tlet newSize: CGSize = CGSize(width: currentSize.width * smallerRatio, height: currentSize.height * smallerRatio)\n\t\t// Scale to the smaller of two ratios so it fits\n\t\tlet scaled = self.lanczosScaleTransform(scale: Float(smallerRatio))\n\t\tlet clamped = scaled.clampedToExtent()\n\t\tlet cropped = clamped.cropped(to: CGRect(origin: .zero, size: newSize))\n\t\treturn cropped\n\t}\n\n\t/// convenience, to be similar to SwiftUI view offset\n\tfunc offset(by offset: CGSize) -> CIImage {\n\t\tguard offset != .zero else { return self }\n\t\treturn self.transformed(by: CGAffineTransform(translationX: offset.width, y: offset.height))\n\t}\n\n}\n\n// MARK: OVERLOADS OF EXISTING CIIMAGE OPERATIONS SO WE CAN PASS IN 'ACTIVE' BOOLEAN TO BE ABLE TO HAVE INERT MODIFIER\n\npublic extension CIImage {\n\n\t// Don't overload these; already a way to pass in arguments to get an inert modifier\n\t//open func transformed(by matrix: CGAffineTransform) -> CIImage // pass in CGAffineTransform.identity\n\t//open func transformed(by matrix: CGAffineTransform, highQualityDownsample: Bool) -> CIImage // pass in CGAffineTransform.identity\n\t//open func composited(over dest: CIImage) -> CIImage // pass in empty image\n\t//open func cropped(to rect: CGRect) -> CIImage // Pass in CGRect.infinite\n\t//open func clamped(to rect: CGRect) -> CIImage // Pass in CGRect.infinite\n\t//open func settingProperties(_ properties: [AnyHashable : Any]) -> CIImage // Pass in empty to add no properties\n\n\t// Maybe not worth dealing with.\n\t//open func oriented(forExifOrientation orientation: Int32) -> CIImage\n\t//open func oriented(_ orientation: CGImagePropertyOrientation) -> CIImage\n\t//open func matchedToWorkingSpace(from colorSpace: CGColorSpace) -> CIImage?\n\t//open func matchedFromWorkingSpace(to colorSpace: CGColorSpace) -> CIImage?\n\t//open func insertingIntermediate() -> CIImage\n\t//open func insertingIntermediate(cache: Bool) -> CIImage\n\t//open func convertingWorkingSpaceToLab() -> CIImage\n\t//open func convertingLabToWorkingSpace() -> CIImage\n\n\t// Doesn't really apply since the whole point is to have image modifiers for all the filters.\n\t//open func applyingFilter(_ filterName: String, parameters params: [String : Any]) -> CIImage\n\t//open func applyingFilter(_ filterName: String) -> CIImage\n\n\t// Don't implement because we have an equivalent operation already. Sigma is just the pixel radius.\n\t//open func applyingGaussianBlur(sigma: Double) -> CIImage\n\n\t// OK these get an active overload.\n\n\t/* Return a new infinite image by replicating the edge pixels of the receiver image. */\n\t@available(macOS 10.10, *)\n\tfunc clampedToExtent(active: Bool = true) -> CIImage {\n\t\tguard active else { return self }\n\t\treturn clampedToExtent()\n\t}\n\n\t/* Return a new image by multiplying the receiver's RGB values by its alpha. */\n\t@available(macOS 10.12, *)\n\tfunc premultiplyingAlpha(active: Bool = true) -> CIImage {\n\t\tguard active else { return self }\n\t\treturn premultiplyingAlpha()\n\t}\n\n\t/* Return a new image by dividing the receiver's RGB values by its alpha. */\n\t@available(macOS 10.12, *)\n\tfunc unpremultiplyingAlpha(active: Bool = true) -> CIImage {\n\t\tguard active else { return self }\n\t\treturn unpremultiplyingAlpha()\n\t}\n\n\t/* Return a new image with alpha set to 1 within the rectangle and 0 outside. */\n\t@available(macOS 10.12, *)\n\tfunc settingAlphaOne(in extent: CGRect, active: Bool = true) -> CIImage {\n\t\tguard active else { return self }\n\t\treturn settingAlphaOne(in: extent)\n\t}\n\n\t/* Returns a new image by changing the receiver's sample mode to bilinear interpolation. */\n\t@available(macOS 10.13, *)\n\tfunc samplingLinear(active: Bool = true) -> CIImage {\n\t\tguard active else { return self }\n\t\treturn samplingLinear()\n\t}\n\n\t/* Returns a new image by changing the receiver's sample mode to nearest neighbor. */\n\t@available(macOS 10.13, *)\n\tfunc samplingNearest(active: Bool = true) -> CIImage {\t// equivalent to CISampleNearest filter\n\t\tguard active else { return self }\n\t\treturn samplingNearest()\n\t}\n}\n\n"
  },
  {
    "path": "Sources/CIImage-Filters.swift",
    "content": "//\n// Automatically generated by CIImage-Generation.swift - do not edit\n//\n\nimport Foundation\nimport CoreImage\nimport CoreImage.CIFilterBuiltins\nimport CoreML\nimport AVFoundation\n\npublic extension CIImage {\n\n\t//\n\t// MARK: IMAGE-TO-IMAGE FILTERS\n\t//\n\n\t/// Accordion Fold Transition\n\t///\n\t/// Transitions from one image to another of differing dimensions by unfolding and crossfading.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228263-accordionfoldtransition)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIAccordionFoldTransition)\n\t///\n\t/// Categories: Transition, Video, Still Image, Built-In, High Dynamic Range\n\t///\n\t///\n\t/// - Parameters:\n\t///   - targetImage: The target image for a transition.\n\t///   - bottomHeight: The height in pixels from the bottom of the image to the bottom of the folded part of the transition. (0...)\n\t///   - numberOfFolds: The number of folds used in the transition. (1...50)\n\t///   - foldShadowAmount: A value that specifies the intensity of the shadow in the transition. (0...1)\n\t///   - time: The duration of the effect. (0...1)\n\t///   - active: should this filter be applied\n\t/// - Returns: processed new `CIImage`, or identity if `active` is false\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc accordionFoldTransition(targetImage: CIImage,\n\t\t\t\t\t\t\t\t bottomHeight: Float = 0,\n\t\t\t\t\t\t\t\t numberOfFolds: Float = 3,\n\t\t\t\t\t\t\t\t foldShadowAmount: Float = 0.1,\n\t\t\t\t\t\t\t\t time: Float,\n\t\t\t\t\t\t\t\t active: Bool = true) -> CIImage {\n\t\tguard active else { return self }\n\n\t\tlet filter = CIFilter.accordionFoldTransition() // CIAccordionFoldTransition\n\t\tfilter.inputImage = self\n\t\tfilter.targetImage = targetImage\n\t\tfilter.bottomHeight = bottomHeight\n\t\tfilter.numberOfFolds = numberOfFolds\n\t\tfilter.foldShadowAmount = foldShadowAmount\n\t\tfilter.time = time\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Addition\n\t///\n\t/// Adds color components to achieve a brightening effect. This filter is typically used to add highlights and lens flare effects.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228264-additioncompositing)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIAdditionCompositing)\n\t///\n\t/// Categories: Composite Operation, Video, Still Image, Interlaced, Non-Square Pixels, High Dynamic Range, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - backgroundImage: The image to use as a background image.\n\t/// - Returns: processed new `CIImage`, or identity if `backgroundImage` is nil\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc additionCompositing(backgroundImage: CIImage?) -> CIImage {\n\t\tguard let backgroundImage else { return self }\n\n\t\tlet filter = CIFilter.additionCompositing() // CIAdditionCompositing\n\t\tfilter.inputImage = self\n\t\tfilter.backgroundImage = backgroundImage\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Affine Clamp\n\t///\n\t/// Performs an affine transformation on a source image and then clamps the pixels at the edge of the transformed image, extending them outwards. This filter performs similarly to the “Affine Transform” filter except that it produces an image with infinite extent. You can use this filter when you need to blur an image but you want to avoid a soft, black fringe along the edges.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228265-affineclamp)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIAffineClamp)\n\t///\n\t/// Categories: Tile Effect, Video, Still Image, Built-In, High Dynamic Range\n\t///\n\t///\n\t/// - Parameters:\n\t///   - transform: The transform to apply to the image.\n\t/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc affineClamp(transform: CGAffineTransform = CGAffineTransform.identity) -> CIImage {\n\t\tguard transform != CGAffineTransform.identity else { return self }\n\n\t\tlet filter = CIFilter.affineClamp() // CIAffineClamp\n\t\tfilter.inputImage = self\n\t\tfilter.transform = transform\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Affine Tile\n\t///\n\t/// Applies an affine transformation to an image and then tiles the transformed image.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228266-affinetile)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIAffineTile)\n\t///\n\t/// Categories: Tile Effect, Video, Still Image, Built-In, High Dynamic Range\n\t///\n\t///\n\t/// - Parameters:\n\t///   - transform: The transform to apply to the image.\n\t/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc affineTile(transform: CGAffineTransform = CGAffineTransform.identity) -> CIImage {\n\t\tguard transform != CGAffineTransform.identity else { return self }\n\n\t\tlet filter = CIFilter.affineTile() // CIAffineTile\n\t\tfilter.inputImage = self\n\t\tfilter.transform = transform\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t// ℹ️ CIAffineTransform already has a CIImage method: func transformed(by: CGAffineTransform) -> CIImage\n\n\n\t/// Area Alpha Weighted Histogram\n\t///\n\t/// Calculates alpha-weighted histograms of the unpremultiplied R, G, B channels for the specified area of an image. The output image is a one pixel tall image containing the histogram data for the RGB channels.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/4401846-areaalphaweightedhistogram)\n\t///\n\t/// Categories: Reduction, Video, Still Image, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - extent: A rectangle that defines the extent of the effect.\n\t///   - scale: The scale value to use for the histogram values. If the scale is 1.0 and the image is opaque, then the bins in the resulting image will add up to 1.0. (0...)\n\t///   - count: The number of bins for the histogram. This value will determine the width of the output image. (1...2048)\n\t///   - active: should this filter be applied\n\t/// - Returns: processed new `CIImage`, or identity if `active` is false\n\t@available(iOS 18, macOS 15.0, *)\n\tfunc areaAlphaWeightedHistogram(extent: CGRect, scale: Float = 1, count: Int = 64, active: Bool = true) -> CIImage {\n\t\tguard active else { return self }\n\n\t\tlet filter = CIFilter.areaAlphaWeightedHistogram() // CIAreaAlphaWeightedHistogram\n\t\tfilter.inputImage = self\n\t\tfilter.extent = extent\n\t\tfilter.scale = scale\n\t\tfilter.count = count\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Area Average\n\t///\n\t/// Calculates the average color for the specified area in an image, returning the result in a pixel.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3547111-areaaverage)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIAreaAverage)\n\t///\n\t/// Categories: Reduction, Video, Still Image, High Dynamic Range, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - extent: A rectangle that specifies the subregion of the image that you want to process.\n\t///   - active: should this filter be applied\n\t/// - Returns: a single-pixel image  that contains the average color for the region of interest, or identity if `active` is false\n\t@available(iOS 14, macOS 11.0, *)\n\tfunc areaAverage(extent: CGRect, active: Bool = true) -> CIImage {\n\t\tguard active else { return self }\n\n\t\tlet filter = CIFilter.areaAverage() // CIAreaAverage\n\t\tfilter.inputImage = self\n\t\tfilter.extent = extent\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Area Bounds Red\n\t///\n\t/// Calculates the approximate bounding box of pixels within the specified area of an image where the red component values are non-zero. The result is 1x1 pixel image where the RGBA values contain the normalized X,Y,W,H dimensions of the bounding box.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/4401847-areaboundsred)\n\t///\n\t/// Categories: Reduction, Video, Still Image, High Dynamic Range, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - extent: A rectangle that specifies the subregion of the image that you want to process.\n\t///   - active: should this filter be applied\n\t/// - Returns: processed new `CIImage`, or identity if `active` is false\n\t@available(iOS 18, macOS 15.0, *)\n\tfunc areaBoundsRed(extent: CGRect, active: Bool = true) -> CIImage {\n\t\tguard active else { return self }\n\n\t\tlet filter = CIFilter.areaBoundsRed() // CIAreaBoundsRed\n\t\tfilter.inputImage = self\n\t\tfilter.extent = extent\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Area Histogram\n\t///\n\t/// Calculates histograms of the R, G, B, and A channels of the specified area of an image. The output image is a one pixel tall image containing the histogram data for all four channels.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3547112-areahistogram)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIAreaHistogram)\n\t///\n\t/// Categories: Reduction, Video, Still Image, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - extent: A rectangle that, after intersection with the image extent, specifies the subregion of the image that you want to process.\n\t///   - scale: The scale value to use for the histogram values. If the scale is 1.0, then the bins in the resulting image will add up to 1.0. (0...)\n\t///   - count: The number of bins for the histogram. This value will determine the width of the output image. (1...2048)\n\t///   - active: should this filter be applied\n\t/// - Returns: a 1D image (inputCount wide by one pixel high) that contains the component-wise histogram computed for the specified rectangular area, or identity if `active` is false\n\t@available(iOS 14, macOS 11.0, *)\n\tfunc areaHistogram(extent: CGRect, scale: Float = 1, count: Int = 64, active: Bool = true) -> CIImage {\n\t\tguard active else { return self }\n\n\t\tlet filter = CIFilter.areaHistogram() // CIAreaHistogram\n\t\tfilter.inputImage = self\n\t\tfilter.extent = extent\n\t\tfilter.scale = scale\n\t\tfilter.count = count\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Area Logarithmic Histogram\n\t///\n\t/// Calculates histogram of the R, G, B, and A channels of the specified area of an image. Before binning, the R, G, and B channel values are transformed by the log base two function. The output image is a one pixel tall image containing the histogram data for all four channels.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/4401848-arealogarithmichistogram)\n\t///\n\t/// Categories: Reduction, Video, Still Image, High Dynamic Range, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - extent: A rectangle that defines the extent of the effect.\n\t///   - scale: The amount of the effect. (0...)\n\t///   - count: The number of bins for the histogram. This value will determine the width of the output image. (1...2048)\n\t///   - minimumStop: The minimum of the range of color channel values to be in the logarithmic histogram image.\n\t///   - maximumStop: The maximum of the range of color channel values to be in the logarithmic histogram image.\n\t///   - active: should this filter be applied\n\t/// - Returns: processed new `CIImage`, or identity if `active` is false\n\t@available(iOS 16, macOS 13.0, *)\n\tfunc areaLogarithmicHistogram(extent: CGRect,\n\t\t\t\t\t\t\t\t  scale: Float = 1,\n\t\t\t\t\t\t\t\t  count: Int = 64,\n\t\t\t\t\t\t\t\t  minimumStop: Float = -10,\n\t\t\t\t\t\t\t\t  maximumStop: Float = 4,\n\t\t\t\t\t\t\t\t  active: Bool = true) -> CIImage {\n\t\tguard active else { return self }\n\n\t\tlet filter = CIFilter.areaLogarithmicHistogram() // CIAreaLogarithmicHistogram\n\t\tfilter.inputImage = self\n\t\tfilter.extent = extent\n\t\tfilter.scale = scale\n\t\tfilter.count = count\n\t\tfilter.minimumStop = minimumStop\n\t\tfilter.maximumStop = maximumStop\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Area Maximum\n\t///\n\t/// Calculates the maximum component values for the specified area in an image, returning the result in a pixel.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3547114-areamaximum)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIAreaMaximum)\n\t///\n\t/// Categories: Reduction, Video, Still Image, High Dynamic Range, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - extent: A rectangle that specifies the subregion of the image that you want to process.\n\t///   - active: should this filter be applied\n\t/// - Returns: a single-pixel image that contains the maximum color components for the region of interest, or identity if `active` is false\n\t@available(iOS 14, macOS 11.0, *)\n\tfunc areaMaximum(extent: CGRect, active: Bool = true) -> CIImage {\n\t\tguard active else { return self }\n\n\t\tlet filter = CIFilter.areaMaximum() // CIAreaMaximum\n\t\tfilter.inputImage = self\n\t\tfilter.extent = extent\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Area Maximum Alpha\n\t///\n\t/// Finds and returns the pixel with the maximum alpha value.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3547113-areamaximumalpha)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIAreaMaximumAlpha)\n\t///\n\t/// Categories: Reduction, Video, Still Image, High Dynamic Range, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - extent: A rectangle that specifies the subregion of the image that you want to process.\n\t///   - active: should this filter be applied\n\t/// - Returns: a single-pixel image that contains the color vector with the maximum alpha value for the region of interest, or identity if `active` is false\n\t@available(iOS 14, macOS 11.0, *)\n\tfunc areaMaximumAlpha(extent: CGRect, active: Bool = true) -> CIImage {\n\t\tguard active else { return self }\n\n\t\tlet filter = CIFilter.areaMaximumAlpha() // CIAreaMaximumAlpha\n\t\tfilter.inputImage = self\n\t\tfilter.extent = extent\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Area Min and Max\n\t///\n\t/// Calculates the per-component minimum and maximum value for the specified area in an image. The result is returned in a 2x1 image where the component minimum values are stored in the pixel on the left.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3547115-areaminmax)\n\t///\n\t/// Categories: Reduction, Video, Still Image, High Dynamic Range, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - extent: A rectangle that specifies the subregion of the image that you want to process.\n\t///   - active: should this filter be applied\n\t/// - Returns: processed new `CIImage`, or identity if `active` is false\n\t@available(iOS 14, macOS 11.0, *)\n\tfunc areaMinMax(extent: CGRect, active: Bool = true) -> CIImage {\n\t\tguard active else { return self }\n\n\t\tlet filter = CIFilter.areaMinMax() // CIAreaMinMax\n\t\tfilter.inputImage = self\n\t\tfilter.extent = extent\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Area Min and Max Red\n\t///\n\t/// Calculates the minimum and maximum red component value for the specified area in an image. The result is returned in the red and green channels of a one pixel image.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3547116-areaminmaxred)\n\t///\n\t/// Categories: Reduction, Video, Still Image, High Dynamic Range, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - extent: A rectangle that specifies the subregion of the image that you want to process.\n\t///   - active: should this filter be applied\n\t/// - Returns: processed new `CIImage`, or identity if `active` is false\n\t@available(iOS 14, macOS 11.0, *)\n\tfunc areaMinMaxRed(extent: CGRect, active: Bool = true) -> CIImage {\n\t\tguard active else { return self }\n\n\t\tlet filter = CIFilter.areaMinMaxRed() // CIAreaMinMaxRed\n\t\tfilter.inputImage = self\n\t\tfilter.extent = extent\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Area Minimum\n\t///\n\t/// Calculates the minimum component values for the specified area in an image, returning the result in a pixel.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3547118-areaminimum)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIAreaMinimum)\n\t///\n\t/// Categories: Reduction, Video, Still Image, High Dynamic Range, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - extent: A rectangle that specifies the subregion of the image that you want to process.\n\t///   - active: should this filter be applied\n\t/// - Returns: a single-pixel image that contains the minimum color components for the region of interest, or identity if `active` is false\n\t@available(iOS 14, macOS 11.0, *)\n\tfunc areaMinimum(extent: CGRect, active: Bool = true) -> CIImage {\n\t\tguard active else { return self }\n\n\t\tlet filter = CIFilter.areaMinimum() // CIAreaMinimum\n\t\tfilter.inputImage = self\n\t\tfilter.extent = extent\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Area Minimum Alpha\n\t///\n\t/// Finds and returns the pixel with the minimum alpha value.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3547117-areaminimumalpha)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIAreaMinimumAlpha)\n\t///\n\t/// Categories: Reduction, Video, Still Image, High Dynamic Range, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - extent: A rectangle that specifies the subregion of the image that you want to process.\n\t///   - active: should this filter be applied\n\t/// - Returns: a single-pixel image that contains the color vector with the minimum alpha value for the region of interest, or identity if `active` is false\n\t@available(iOS 14, macOS 11.0, *)\n\tfunc areaMinimumAlpha(extent: CGRect, active: Bool = true) -> CIImage {\n\t\tguard active else { return self }\n\n\t\tlet filter = CIFilter.areaMinimumAlpha() // CIAreaMinimumAlpha\n\t\tfilter.inputImage = self\n\t\tfilter.extent = extent\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Bars Swipe Transition\n\t///\n\t/// Transitions from one image to another by swiping rectangular portions of the foreground image to disclose the target image.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228270-barsswipetransition)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIBarsSwipeTransition)\n\t///\n\t/// Categories: Transition, Video, Still Image, Built-In, High Dynamic Range\n\t///\n\t///\n\t/// - Parameters:\n\t///   - targetImage: The target image for a transition.\n\t///   - angle: The angle in radians of the bars.\n\t///   - width: The width of each bar. (2...)\n\t///   - barOffset: The offset of one bar with respect to another. (1...)\n\t///   - time: The parametric time of the transition. This value drives the transition from start (at time 0) to end (at time 1). (0...1)\n\t/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc barsSwipeTransition(targetImage: CIImage,\n\t\t\t\t\t\t\t angle: Float = .pi,\n\t\t\t\t\t\t\t width: Float,\n\t\t\t\t\t\t\t barOffset: Float = 10,\n\t\t\t\t\t\t\t time: Float) -> CIImage {\n\t\tguard angle != 0 else { return self }\n\n\t\tlet filter = CIFilter.barsSwipeTransition() // CIBarsSwipeTransition\n\t\tfilter.inputImage = self\n\t\tfilter.targetImage = targetImage\n\t\tfilter.angle = angle\n\t\tfilter.width = width\n\t\tfilter.barOffset = barOffset\n\t\tfilter.time = time\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Bicubic Scale Transform\n\t///\n\t/// Produces a high-quality, scaled version of a source image. The parameters of B and C for this filter determine the sharpness or softness of the resampling. The most commonly used B and C values are 0.0 and 0.75, respectively.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228271-bicubicscaletransform)\n\t///\n\t/// Categories: Geometry Adjustment, Video, Still Image, Non-Square Pixels, Built-In, High Dynamic Range\n\t///\n\t///\n\t/// - Parameters:\n\t///   - scale: The scaling factor to use on the image. Values less than 1.0 scale down the images. Values greater than 1.0 scale up the image. (0...)\n\t///   - aspectRatio: The additional horizontal scaling factor to use on the image. (0...)\n\t///   - parameterB: Specifies the value of B to use for the cubic resampling function. (0...1)\n\t///   - parameterC: Specifies the value of C to use for the cubic resampling function. (0...1)\n\t/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc bicubicScaleTransform(scale: Float = 1,\n\t\t\t\t\t\t\t   aspectRatio: Float = 1,\n\t\t\t\t\t\t\t   parameterB: Float = 0,\n\t\t\t\t\t\t\t   parameterC: Float = 0.75) -> CIImage {\n\t\tguard scale != 1 || aspectRatio != 1 else { return self }\n\n\t\tlet filter = CIFilter.bicubicScaleTransform() // CIBicubicScaleTransform\n\t\tfilter.inputImage = self\n\t\tfilter.scale = scale\n\t\tfilter.aspectRatio = aspectRatio\n\t\tfilter.parameterB = parameterB\n\t\tfilter.parameterC = parameterC\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Blend With Alpha Mask\n\t///\n\t/// Uses values from a mask image to interpolate between an image and the background. When a mask alpha value is 0.0, the result is the background. When the mask alpha value is 1.0, the result is the image.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228272-blendwithalphamask)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIBlendWithAlphaMask)\n\t///\n\t/// Categories: Stylize, Video, Still Image, High Dynamic Range, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - backgroundImage: The image to use as a background image.\n\t///   - maskImage: A masking image.\n\t/// - Returns: processed new `CIImage`, or identity if `backgroundImage` is nil\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc blendWithAlphaMask(backgroundImage: CIImage?, maskImage: CIImage) -> CIImage {\n\t\tguard let backgroundImage else { return self }\n\n\t\tlet filter = CIFilter.blendWithAlphaMask() // CIBlendWithAlphaMask\n\t\tfilter.inputImage = self\n\t\tfilter.backgroundImage = backgroundImage\n\t\tfilter.maskImage = maskImage\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Blend With Blue Mask\n\t///\n\t/// Uses values from a mask image to interpolate between an image and the background. When a mask blue value is 0.0, the result is the background. When the mask blue value is 1.0, the result is the image.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228273-blendwithbluemask)\n\t///\n\t/// Categories: Stylize, Video, Still Image, High Dynamic Range, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - backgroundImage: The image to use as a background image.\n\t///   - maskImage: A masking image.\n\t/// - Returns: processed new `CIImage`, or identity if `backgroundImage` is nil\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc blendWithBlueMask(backgroundImage: CIImage?, maskImage: CIImage) -> CIImage {\n\t\tguard let backgroundImage else { return self }\n\n\t\tlet filter = CIFilter.blendWithBlueMask() // CIBlendWithBlueMask\n\t\tfilter.inputImage = self\n\t\tfilter.backgroundImage = backgroundImage\n\t\tfilter.maskImage = maskImage\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Blend With Mask\n\t///\n\t/// Uses values from a grayscale mask to interpolate between an image and the background. When a mask green value is 0.0, the result is the background. When the mask green value is 1.0, the result is the image.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228274-blendwithmask)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIBlendWithMask)\n\t///\n\t/// Categories: Stylize, Video, Still Image, High Dynamic Range, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - backgroundImage: The image to use as a background image.\n\t///   - maskImage: A grayscale mask. When a mask value is 0.0, the result is the background. When the mask value is 1.0, the result is the image.\n\t/// - Returns: processed new `CIImage`, or identity if `backgroundImage` is nil\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc blendWithMask(backgroundImage: CIImage?, maskImage: CIImage) -> CIImage {\n\t\tguard let backgroundImage else { return self }\n\n\t\tlet filter = CIFilter.blendWithMask() // CIBlendWithMask\n\t\tfilter.inputImage = self\n\t\tfilter.backgroundImage = backgroundImage\n\t\tfilter.maskImage = maskImage\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Blend With Red Mask\n\t///\n\t/// Uses values from a mask image to interpolate between an image and the background. When a mask red value is 0.0, the result is the background. When the mask red value is 1.0, the result is the image.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228275-blendwithredmask)\n\t///\n\t/// Categories: Stylize, Video, Still Image, High Dynamic Range, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - backgroundImage: The image to use as a background image.\n\t///   - maskImage: A masking image.\n\t/// - Returns: processed new `CIImage`, or identity if `backgroundImage` is nil\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc blendWithRedMask(backgroundImage: CIImage?, maskImage: CIImage) -> CIImage {\n\t\tguard let backgroundImage else { return self }\n\n\t\tlet filter = CIFilter.blendWithRedMask() // CIBlendWithRedMask\n\t\tfilter.inputImage = self\n\t\tfilter.backgroundImage = backgroundImage\n\t\tfilter.maskImage = maskImage\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Bloom\n\t///\n\t/// Softens edges and applies a pleasant glow to an image.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228276-bloom)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIBloom)\n\t///\n\t/// Categories: Stylize, Video, Still Image, High Dynamic Range, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - radius: The radius determines how many pixels are used to create the effect. The larger the radius, the greater the effect. (0...)\n\t///   - intensity: The intensity of the effect. A value of 0.0 is no effect. A value of 1.0 is the maximum effect. (0...)\n\t/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc bloom(radius: Float, intensity: Float = 0.5) -> CIImage {\n\t\tguard radius != 0 || intensity != 0 else { return self }\n\n\t\tlet filter = CIFilter.bloom() // CIBloom\n\t\tfilter.inputImage = self\n\t\tfilter.radius = radius\n\t\tfilter.intensity = intensity\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Bokeh Blur\n\t///\n\t/// Smooths an image using a disc-shaped convolution kernel.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228277-bokehblur)\n\t///\n\t/// Categories: Blur, Video, Still Image, Built-In, High Dynamic Range\n\t///\n\t///\n\t/// - Parameters:\n\t///   - radius: The radius determines how many pixels are used to create the blur. The larger the radius, the blurrier the result. (0...500)\n\t///   - ringAmount: The amount of extra emphasis at the ring of the bokeh. (0...1)\n\t///   - ringSize: The size of extra emphasis at the ring of the bokeh. (0...0.2)\n\t///   - softness: The softness of the bokeh effect (0...10)\n\t///   - active: should this filter be applied\n\t/// - Returns: processed new `CIImage`, or identity if `active` is false\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc bokehBlur(radius: Float,\n\t\t\t\t   ringAmount: Float = 0,\n\t\t\t\t   ringSize: Float = 0.1,\n\t\t\t\t   softness: Float = 1,\n\t\t\t\t   active: Bool = true) -> CIImage {\n\t\tguard active else { return self }\n\n\t\tlet filter = CIFilter.bokehBlur() // CIBokehBlur\n\t\tfilter.inputImage = self\n\t\tfilter.radius = radius\n\t\tfilter.ringAmount = ringAmount\n\t\tfilter.ringSize = ringSize\n\t\tfilter.softness = softness\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Box Blur\n\t///\n\t/// Smooths or sharpens an image using a box-shaped convolution kernel.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228278-boxblur)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIBoxBlur)\n\t///\n\t/// Categories: Blur, Video, Still Image, Built-In, High Dynamic Range\n\t///\n\t///\n\t/// - Parameters:\n\t///   - radius: The radius determines how many pixels are used to create the blur. The larger the radius, the blurrier the result. (1...)\n\t///   - active: should this filter be applied\n\t/// - Returns: processed new `CIImage`, or identity if `active` is false\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc boxBlur(radius: Float, active: Bool = true) -> CIImage {\n\t\tguard active else { return self }\n\n\t\tlet filter = CIFilter.boxBlur() // CIBoxBlur\n\t\tfilter.inputImage = self\n\t\tfilter.radius = radius\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Bump Distortion\n\t///\n\t/// Creates a concave or convex bump that originates at a specified point in the image.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/4401850-bumpdistortion)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIBumpDistortion)\n\t///\n\t/// Categories: Distortion Effect, Video, Still Image, Built-In, High Dynamic Range\n\t///\n\t///\n\t/// - Parameters:\n\t///   - center: The center of the effect as x and y pixel coordinates.\n\t///   - radius: The radius determines how many pixels are used to create the distortion. The larger the radius, the wider the extent of the distortion. (0...)\n\t///   - scale: The scale of the effect determines the curvature of the bump. A value of 0.0 has no effect. Positive values create an outward bump; negative values create an inward bump.\n\t/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied\n\t@available(iOS 14, macOS 11.0, *)\n\tfunc bumpDistortion(center: CGPoint, radius: Float, scale: Float = 0.5) -> CIImage {\n\t\tguard scale != 0 else { return self }\n\n\t\tlet filter = CIFilter.bumpDistortion() // CIBumpDistortion\n\t\tfilter.inputImage = self\n\t\tfilter.center = center\n\t\tfilter.radius = radius\n\t\tfilter.scale = scale\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Bump Distortion Linear\n\t///\n\t/// Creates a concave or convex distortion that originates from a line in the image.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/4401851-bumpdistortionlinear)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIBumpDistortionLinear)\n\t///\n\t/// Categories: Distortion Effect, Video, Still Image, Built-In, High Dynamic Range\n\t///\n\t///\n\t/// - Parameters:\n\t///   - center: The center of the effect as x and y pixel coordinates.\n\t///   - radius: The radius determines how many pixels are used to create the distortion. The larger the radius, the wider the extent of the distortion. (0...)\n\t///   - angle: The angle in radians of the line around which the distortion occurs.\n\t///   - scale: The scale of the effect. (-1...)\n\t/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied\n\t@available(iOS 14, macOS 11.0, *)\n\tfunc bumpDistortionLinear(center: CGPoint, radius: Float, angle: Float = 0, scale: Float = 0.5) -> CIImage {\n\t\tguard radius != 0 || angle != 0 || scale != 1 else { return self }\n\n\t\tlet filter = CIFilter.bumpDistortionLinear() // CIBumpDistortionLinear\n\t\tfilter.inputImage = self\n\t\tfilter.center = center\n\t\tfilter.radius = radius\n\t\tfilter.angle = angle\n\t\tfilter.scale = scale\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// CMYK Halftone\n\t///\n\t/// Creates a color, halftoned rendition of the source image, using cyan, magenta, yellow, and black inks over a white page.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228259-cmykhalftone)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CICMYKHalftone)\n\t///\n\t/// Categories: Halftone Effect, Video, Still Image, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - center: The center of the effect as x and y pixel coordinates.\n\t///   - width: The distance between dots in the pattern. (-2...)\n\t///   - angle: The angle in radians of the pattern.\n\t///   - sharpness: The sharpness of the pattern. The larger the value, the sharper the pattern. (0...)\n\t///   - grayComponentReplacement: The gray component replacement value. The value can vary from 0.0 (none) to 1.0. (0...)\n\t///   - underColorRemoval: The under color removal value. The value can vary from 0.0 to 1.0.  (0...)\n\t/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc cmykHalftone(center: CGPoint,\n\t\t\t\t\t  width: Float,\n\t\t\t\t\t  angle: Float = 0,\n\t\t\t\t\t  sharpness: Float,\n\t\t\t\t\t  grayComponentReplacement: Float = 1,\n\t\t\t\t\t  underColorRemoval: Float = 0.5) -> CIImage {\n\t\tguard angle != 0 || grayComponentReplacement != 1 || underColorRemoval != 0.5 else { return self }\n\n\t\tlet filter = CIFilter.cmykHalftone() // CICMYKHalftone\n\t\tfilter.inputImage = self\n\t\tfilter.center = center\n\t\tfilter.width = width\n\t\tfilter.angle = angle\n\t\tfilter.sharpness = sharpness\n\t\tfilter.grayComponentReplacement = grayComponentReplacement\n\t\tfilter.underColorRemoval = underColorRemoval\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Lens Correction for AVC\n\t///\n\t/// Geometrically distorts an image by altering the magnification based on the radial distance from the optical center to the farthest radius.\n\t///\n\t/// ⚠️ No documentation available for CICameraCalibrationLensCorrection\n\t///\n\t/// Categories: Distortion Effect, Video, Still Image, Built-In, High Dynamic Range\n\t///\n\t///\n\t/// - Parameters:\n\t///   - avcameracalibrationdata: AVCameraCalibrationData for the correction. Will be set from the input image if available and can be overridden here.\n\t///   - useInverseLookupTable: Boolean value used to select the Look Up Table from the AVCameraCalibrationData.\n\t///   - active: should this filter be applied\n\t/// - Returns: processed new `CIImage`, or identity if `active` is false\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc cameraCalibrationLensCorrection(avcameracalibrationdata: AVCameraCalibrationData,\n\t\t\t\t\t\t\t\t\t\t useInverseLookupTable: Bool = false,\n\t\t\t\t\t\t\t\t\t\t active: Bool = true) -> CIImage {\n\t\tguard active else { return self }\n\n\t\t// Filter not included in CoreImage.CIFilterBuiltins; using dictionary-based method.\n\t\tguard let filter = CIFilter(name: \"CICameraCalibrationLensCorrection\", parameters: [\n\n\t\t\t\"inputAVCameraCalibrationData\": avcameracalibrationdata,\n\t\t\t\"inputUseInverseLookUpTable\": useInverseLookupTable,\n\t\t]) else { return self }\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Canny Edge Detector\n\t///\n\t/// Applies the Canny Edge Detection algorithm to an image.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/4401852-cannyedgedetector)\n\t///\n\t/// Categories: Stylize, Video, Still Image, High Dynamic Range, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - gaussianSigma: The gaussian sigma of blur to apply to the image to reduce high-frequency noise. (0...)\n\t///   - perceptual: Specifies whether the edge thresholds should be computed in a perceptual color space.\n\t///   - thresholdHigh: The threshold that determines if gradient magnitude is a strong edge. (0...)\n\t///   - thresholdLow: The threshold that determines if gradient magnitude is a weak edge. (0...)\n\t///   - hysteresisPasses: The number of hysteresis passes to apply to promote weak edge pixels. (0...20)\n\t///   - active: should this filter be applied\n\t/// - Returns: processed new `CIImage`, or identity if `active` is false\n\t@available(iOS 17, macOS 14.0, *)\n\tfunc cannyEdgeDetector(gaussianSigma: Float = 1.6,\n\t\t\t\t\t\t   perceptual: Bool = false,\n\t\t\t\t\t\t   thresholdHigh: Float = 0.05,\n\t\t\t\t\t\t   thresholdLow: Float = 0.02,\n\t\t\t\t\t\t   hysteresisPasses: Int,\n\t\t\t\t\t\t   active: Bool = true) -> CIImage {\n\t\tguard active else { return self }\n\n\t\tlet filter = CIFilter.cannyEdgeDetector() // CICannyEdgeDetector\n\t\tfilter.inputImage = self\n\t\tfilter.gaussianSigma = gaussianSigma\n\t\tfilter.perceptual = perceptual\n\t\tfilter.thresholdHigh = thresholdHigh\n\t\tfilter.thresholdLow = thresholdLow\n\t\tfilter.hysteresisPasses = hysteresisPasses\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Circle Splash Distortion\n\t///\n\t/// Distorts the pixels starting at the circumference of a circle and emanating outward.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/4401853-circlesplashdistortion)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CICircleSplashDistortion)\n\t///\n\t/// Categories: Distortion Effect, Video, Still Image, Built-In, High Dynamic Range\n\t///\n\t///\n\t/// - Parameters:\n\t///   - center: The center of the effect as x and y pixel coordinates.\n\t///   - radius: The radius determines how many pixels are used to create the distortion. The larger the radius, the wider the extent of the distortion. (0...)\n\t///   - active: should this filter be applied\n\t/// - Returns: processed new `CIImage`, or identity if `active` is false\n\t@available(iOS 14, macOS 11.0, *)\n\tfunc circleSplashDistortion(center: CGPoint, radius: Float, active: Bool = true) -> CIImage {\n\t\tguard active else { return self }\n\n\t\tlet filter = CIFilter.circleSplashDistortion() // CICircleSplashDistortion\n\t\tfilter.inputImage = self\n\t\tfilter.center = center\n\t\tfilter.radius = radius\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Circular Screen\n\t///\n\t/// Simulates a circular-shaped halftone screen.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228280-circularscreen)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CICircularScreen)\n\t///\n\t/// Categories: Halftone Effect, Video, Still Image, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - center: The center of the effect as x and y pixel coordinates.\n\t///   - width: The distance between each circle in the pattern. (1...)\n\t///   - sharpness: The sharpness of the circles. The larger the value, the sharper the circles. (0...1)\n\t///   - active: should this filter be applied\n\t/// - Returns: processed new `CIImage`, or identity if `active` is false\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc circularScreen(center: CGPoint, width: Float, sharpness: Float = 0.7, active: Bool = true) -> CIImage {\n\t\tguard active else { return self }\n\n\t\tlet filter = CIFilter.circularScreen() // CICircularScreen\n\t\tfilter.inputImage = self\n\t\tfilter.center = center\n\t\tfilter.width = width\n\t\tfilter.sharpness = sharpness\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Circular Wrap Distortion\n\t///\n\t/// Wraps an image around a transparent circle. The distortion of the image increases with the distance from the center of the circle.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/4401854-circularwrap)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CICircularWrap)\n\t///\n\t/// Categories: Distortion Effect, Video, Still Image, Built-In, High Dynamic Range\n\t///\n\t///\n\t/// - Parameters:\n\t///   - center: The center of the effect as x and y pixel coordinates.\n\t///   - radius: The radius determines how many pixels are used to create the distortion. The larger the radius, the wider the extent of the distortion. (0...)\n\t///   - angle: The angle in radians of the effect.\n\t/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied\n\t@available(iOS 14, macOS 11.0, *)\n\tfunc circularWrap(center: CGPoint, radius: Float, angle: Float = 0) -> CIImage {\n\t\tguard angle != 0 else { return self }\n\n\t\tlet filter = CIFilter.circularWrap() // CICircularWrap\n\t\tfilter.inputImage = self\n\t\tfilter.center = center\n\t\tfilter.radius = radius\n\t\tfilter.angle = angle\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t// ℹ️ CIClamp already has a CIImage method: func clamped(to: CGRect) -> CIImage\n\n\n\t/// Color Absolute Difference\n\t///\n\t/// Produces an image that is the absolute value of the color difference between two images. The alpha channel of the result will be the product of the two image alpha channels.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3547119-colorabsolutedifference)\n\t///\n\t/// Categories: Color Adjustment, Video, Interlaced, Non-Square Pixels, Still Image, High Dynamic Range, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - inputImage2: The second input image for differencing.\n\t///   - active: should this filter be applied\n\t/// - Returns: processed new `CIImage`, or identity if `active` is false\n\t@available(iOS 14, macOS 11.0, *)\n\tfunc colorAbsoluteDifference(inputImage2: CIImage, active: Bool = true) -> CIImage {\n\t\tguard active else { return self }\n\n\t\tlet filter = CIFilter.colorAbsoluteDifference() // CIColorAbsoluteDifference\n\t\tfilter.inputImage = self\n\t\tfilter.inputImage2 = inputImage2\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Color Blend Mode\n\t///\n\t/// Uses the luminance values of the background with the hue and saturation values of the source image. This mode preserves the gray levels in the image.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228282-colorblendmode)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIColorBlendMode)\n\t///\n\t/// Categories: Composite Operation, Video, Still Image, Interlaced, Non-Square Pixels, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - backgroundImage: The image to use as a background image.\n\t/// - Returns: processed new `CIImage`, or identity if `backgroundImage` is nil\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc colorBlendMode(backgroundImage: CIImage?) -> CIImage {\n\t\tguard let backgroundImage else { return self }\n\n\t\tlet filter = CIFilter.colorBlendMode() // CIColorBlendMode\n\t\tfilter.inputImage = self\n\t\tfilter.backgroundImage = backgroundImage\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Color Burn Blend Mode\n\t///\n\t/// Darkens the background image samples to reflect the source image samples. Source image sample values that specify white do not produce a change.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228283-colorburnblendmode)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIColorBurnBlendMode)\n\t///\n\t/// Categories: Composite Operation, Video, Still Image, Interlaced, Non-Square Pixels, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - backgroundImage: The image to use as a background image.\n\t/// - Returns: processed new `CIImage`, or identity if `backgroundImage` is nil\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc colorBurnBlendMode(backgroundImage: CIImage?) -> CIImage {\n\t\tguard let backgroundImage else { return self }\n\n\t\tlet filter = CIFilter.colorBurnBlendMode() // CIColorBurnBlendMode\n\t\tfilter.inputImage = self\n\t\tfilter.backgroundImage = backgroundImage\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Color Clamp\n\t///\n\t/// Modifies color values to keep them within a specified range.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228284-colorclamp)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIColorClamp)\n\t///\n\t/// Categories: Color Adjustment, Video, Interlaced, Non-Square Pixels, Still Image, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - minComponents: Lower clamping values.\n\t///   - maxComponents: Higher clamping values.\n\t///   - active: should this filter be applied\n\t/// - Returns: processed new `CIImage`, or identity if `active` is false\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc colorClamp(minComponents: CIVector, maxComponents: CIVector, active: Bool = true) -> CIImage {\n\t\tguard active else { return self }\n\n\t\tlet filter = CIFilter.colorClamp() // CIColorClamp\n\t\tfilter.inputImage = self\n\t\tfilter.minComponents = minComponents\n\t\tfilter.maxComponents = maxComponents\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Color Controls\n\t///\n\t/// Adjusts saturation, brightness, and contrast values.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228285-colorcontrols)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIColorControls)\n\t///\n\t/// Categories: Color Adjustment, Video, Still Image, Interlaced, Non-Square Pixels, High Dynamic Range, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - saturation: The amount of saturation to apply. The larger the value, the more saturated the result. (0...)\n\t///   - brightness: The amount of brightness to apply. The larger the value, the brighter the result. (-1...)\n\t///   - contrast: The amount of contrast to apply. The larger the value, the more contrast in the resulting image. (0...)\n\t/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc colorControls(saturation: Float = 1, brightness: Float = 0, contrast: Float = 1) -> CIImage {\n\t\tguard saturation != 1 || brightness != 0 || contrast != 1 else { return self }\n\n\t\tlet filter = CIFilter.colorControls() // CIColorControls\n\t\tfilter.inputImage = self\n\t\tfilter.saturation = saturation\n\t\tfilter.brightness = brightness\n\t\tfilter.contrast = contrast\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Color Cross Polynomial\n\t///\n\t/// Modifies the pixel values in an image by applying a set of polynomial cross-products.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228286-colorcrosspolynomial)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIColorCrossPolynomial)\n\t///\n\t/// Categories: Color Effect, Video, Interlaced, Non-Square Pixels, Still Image, High Dynamic Range, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - redCoefficients: Polynomial coefficients for red channel.\n\t///   - greenCoefficients: Polynomial coefficients for green channel.\n\t///   - blueCoefficients: Polynomial coefficients for blue channel.\n\t///   - active: should this filter be applied\n\t/// - Returns: processed new `CIImage`, or identity if `active` is false\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc colorCrossPolynomial(redCoefficients: CIVector,\n\t\t\t\t\t\t\t  greenCoefficients: CIVector,\n\t\t\t\t\t\t\t  blueCoefficients: CIVector,\n\t\t\t\t\t\t\t  active: Bool = true) -> CIImage {\n\t\tguard active else { return self }\n\n\t\tlet filter = CIFilter.colorCrossPolynomial() // CIColorCrossPolynomial\n\t\tfilter.inputImage = self\n\t\tfilter.redCoefficients = redCoefficients\n\t\tfilter.greenCoefficients = greenCoefficients\n\t\tfilter.blueCoefficients = blueCoefficients\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Color Cube\n\t///\n\t/// Uses a three-dimensional color table to transform the source image pixels.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228287-colorcube)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIColorCube)\n\t///\n\t/// Categories: Color Effect, Video, Interlaced, Non-Square Pixels, Still Image, High Dynamic Range, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - cubeDimension: The dimension of the color cube.(2...64 iOS; 2...128 macOS)\n\t///   - cubeData: Data containing a 3-dimensional color table of floating-point premultiplied RGBA values. The cells are organized in a standard ordering. The columns and rows of the data are indexed by red and green, respectively. Each data plane is followed by the next higher plane in the data, with planes indexed by blue.\n\t///   - extrapolate: If true, then the color cube will be extrapolated if the input image contains RGB component values outside the range 0.0 to 1.0.\n\t///   - active: should this filter be applied\n\t/// - Returns: processed new `CIImage`, or identity if `active` is false\n\t@available(iOS 16, macOS 13.0, *)\n\tfunc colorCube(cubeDimension: Int, cubeData: Data, extrapolate: Bool = false, active: Bool = true) -> CIImage {\n\t\tguard active else { return self }\n\n\t\tlet filter = CIFilter.colorCube() // CIColorCube\n\t\tfilter.inputImage = self\n\t\tfilter.cubeDimension = Float(cubeDimension)\n\t\tfilter.cubeData = cubeData\n\t\tfilter.extrapolate = extrapolate\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Color Cube with ColorSpace\n\t///\n\t/// Uses a three-dimensional color table to transform the source image pixels and maps the result to a specified color space.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228288-colorcubewithcolorspace)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIColorCubeWithColorSpace)\n\t///\n\t/// Categories: Color Effect, Video, Interlaced, Non-Square Pixels, Still Image, High Dynamic Range, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - cubeDimension: The dimension of the color cube.(2...64 iOS; 2...128 macOS)\n\t///   - cubeData: Data containing a 3-dimensional color table of floating-point premultiplied RGBA values. The cells are organized in a standard ordering. The columns and rows of the data are indexed by red and green, respectively. Each data plane is followed by the next higher plane in the data, with planes indexed by blue.\n\t///   - extrapolate: If true, then the color cube will be extrapolated if the input image contains RGB component values outside the range 0.0 to 1.0.\n\t///   - colorSpace: The CGColorSpace that defines the RGB values in the color table.\n\t///   - active: should this filter be applied\n\t/// - Returns: processed new `CIImage`, or identity if `active` is false\n\t@available(iOS 16, macOS 13.0, *)\n\tfunc colorCubeWithColorSpace(cubeDimension: Int,\n\t\t\t\t\t\t\t\t cubeData: Data,\n\t\t\t\t\t\t\t\t extrapolate: Bool,\n\t\t\t\t\t\t\t\t colorSpace: CGColorSpace,\n\t\t\t\t\t\t\t\t active: Bool = true) -> CIImage {\n\t\tguard active else { return self }\n\n\t\tlet filter = CIFilter.colorCubeWithColorSpace() // CIColorCubeWithColorSpace\n\t\tfilter.inputImage = self\n\t\tfilter.cubeDimension = Float(cubeDimension)\n\t\tfilter.cubeData = cubeData\n\t\tfilter.extrapolate = extrapolate\n\t\tfilter.colorSpace = colorSpace\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Color Cubes Mixed With Mask\n\t///\n\t/// Uses two three-dimensional color tables in a specified colorspace to transform the source image pixels. The mask image is used as an interpolant to mix the output of the two cubes.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228289-colorcubesmixedwithmask)\n\t///\n\t/// Categories: Color Effect, Video, Interlaced, Non-Square Pixels, Still Image, High Dynamic Range, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - maskImage: A masking image.\n\t///   - cubeDimension: The dimension of the color cubes.(2...64 iOS; 2...128 macOS)\n\t///   - cube0Data: Data containing a 3-dimensional color table of floating-point premultiplied RGBA values. The cells are organized in a standard ordering. The columns and rows of the data are indexed by red and green, respectively. Each data plane is followed by the next higher plane in the data, with planes indexed by blue.\n\t///   - cube1Data: Data containing a 3-dimensional color table of floating-point premultiplied RGBA values. The cells are organized in a standard ordering. The columns and rows of the data are indexed by red and green, respectively. Each data plane is followed by the next higher plane in the data, with planes indexed by blue.\n\t///   - colorSpace: The CGColorSpace that defines the RGB values in the color table.\n\t///   - extrapolate: If true, then the color cube will be extrapolated if the input image contains RGB component values outside the range 0 to 1.\n\t///   - active: should this filter be applied\n\t/// - Returns: processed new `CIImage`, or identity if `active` is false\n\t@available(iOS 16, macOS 13.0, *)\n\tfunc colorCubesMixedWithMask(maskImage: CIImage,\n\t\t\t\t\t\t\t\t cubeDimension: Int,\n\t\t\t\t\t\t\t\t cube0Data: Data,\n\t\t\t\t\t\t\t\t cube1Data: Data,\n\t\t\t\t\t\t\t\t colorSpace: CGColorSpace,\n\t\t\t\t\t\t\t\t extrapolate: Bool = false,\n\t\t\t\t\t\t\t\t active: Bool = true) -> CIImage {\n\t\tguard active else { return self }\n\n\t\tlet filter = CIFilter.colorCubesMixedWithMask() // CIColorCubesMixedWithMask\n\t\tfilter.inputImage = self\n\t\tfilter.maskImage = maskImage\n\t\tfilter.cubeDimension = Float(cubeDimension)\n\t\tfilter.cube0Data = cube0Data\n\t\tfilter.cube1Data = cube1Data\n\t\tfilter.colorSpace = colorSpace\n\t\tfilter.extrapolate = extrapolate\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Color Curves\n\t///\n\t/// Uses a three-channel one-dimensional color table to transform the source image pixels.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228290-colorcurves)\n\t///\n\t/// Categories: Color Effect, Video, Interlaced, Non-Square Pixels, Still Image, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - curvesData: Data containing a color table of floating-point RGB values.\n\t///   - curvesDomain: A two-element vector that defines the minimum and maximum RGB component values that are used to look up result values from the color table.\n\t///   - colorSpace: The CGColorSpace that defines the RGB values in the color table.\n\t///   - active: should this filter be applied\n\t/// - Returns: processed new `CIImage`, or identity if `active` is false\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc colorCurves(curvesData: Data, curvesDomain: CIVector, colorSpace: CGColorSpace, active: Bool = true) -> CIImage {\n\t\tguard active else { return self }\n\n\t\tlet filter = CIFilter.colorCurves() // CIColorCurves\n\t\tfilter.inputImage = self\n\t\tfilter.curvesData = curvesData\n\t\tfilter.curvesDomain = curvesDomain\n\t\tfilter.colorSpace = colorSpace\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Color Dodge Blend Mode\n\t///\n\t/// Brightens the background image samples to reflect the source image samples. Source image sample values that specify black do not produce a change.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228291-colordodgeblendmode)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIColorDodgeBlendMode)\n\t///\n\t/// Categories: Composite Operation, Video, Still Image, Interlaced, Non-Square Pixels, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - backgroundImage: The image to use as a background image.\n\t/// - Returns: processed new `CIImage`, or identity if `backgroundImage` is nil\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc colorDodgeBlendMode(backgroundImage: CIImage?) -> CIImage {\n\t\tguard let backgroundImage else { return self }\n\n\t\tlet filter = CIFilter.colorDodgeBlendMode() // CIColorDodgeBlendMode\n\t\tfilter.inputImage = self\n\t\tfilter.backgroundImage = backgroundImage\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Color Invert\n\t///\n\t/// Inverts the colors in an image.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228292-colorinvert)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIColorInvert)\n\t///\n\t/// Categories: Color Effect, Video, Interlaced, Non-Square Pixels, Still Image, High Dynamic Range, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - active: should this filter be applied\n\t/// - Returns: processed new `CIImage`, or identity if `active` is false\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc colorInvert(active: Bool = true) -> CIImage {\n\t\tguard active else { return self }\n\n\t\tlet filter = CIFilter.colorInvert() // CIColorInvert\n\t\tfilter.inputImage = self\n\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Color Map\n\t///\n\t/// Performs a nonlinear transformation of source color values using mapping values provided in a table.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228293-colormap)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIColorMap)\n\t///\n\t/// Categories: Color Effect, Video, Interlaced, Non-Square Pixels, Still Image, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - gradientImage: The image data from this image transforms the source image values.\n\t///   - active: should this filter be applied\n\t/// - Returns: processed new `CIImage`, or identity if `active` is false\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc colorMap(gradientImage: CIImage, active: Bool = true) -> CIImage {\n\t\tguard active else { return self }\n\n\t\tlet filter = CIFilter.colorMap() // CIColorMap\n\t\tfilter.inputImage = self\n\t\tfilter.gradientImage = gradientImage\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Color Matrix\n\t///\n\t/// Multiplies source color values and adds a bias factor to each color component.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228294-colormatrix)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIColorMatrix)\n\t///\n\t/// Categories: Color Adjustment, Video, Still Image, Interlaced, Non-Square Pixels, High Dynamic Range, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - rVector: The amount of red to multiply the source color values by.\n\t///   - gVector: The amount of green to multiply the source color values by.\n\t///   - bVector: The amount of blue to multiply the source color values by.\n\t///   - aVector: The amount of alpha to multiply the source color values by.\n\t///   - biasVector: A vector that’s added to each color component.\n\t///   - active: should this filter be applied\n\t/// - Returns: processed new `CIImage`, or identity if `active` is false\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc colorMatrix(rVector: CIVector,\n\t\t\t\t\t gVector: CIVector,\n\t\t\t\t\t bVector: CIVector,\n\t\t\t\t\t aVector: CIVector,\n\t\t\t\t\t biasVector: CIVector,\n\t\t\t\t\t active: Bool = true) -> CIImage {\n\t\tguard active else { return self }\n\n\t\tlet filter = CIFilter.colorMatrix() // CIColorMatrix\n\t\tfilter.inputImage = self\n\t\tfilter.rVector = rVector\n\t\tfilter.gVector = gVector\n\t\tfilter.bVector = bVector\n\t\tfilter.aVector = aVector\n\t\tfilter.biasVector = biasVector\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Color Monochrome\n\t///\n\t/// Remaps colors so they fall within shades of a single color.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228295-colormonochrome)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIColorMonochrome)\n\t///\n\t/// Categories: Color Effect, Video, Interlaced, Non-Square Pixels, Still Image, High Dynamic Range, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - color: The monochrome color to apply to the image.\n\t///   - intensity: The intensity of the monochrome effect. A value of 1.0 creates a monochrome image using the supplied color. A value of 0.0 has no effect on the image. (0...)\n\t/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc colorMonochrome(color: CIColor, intensity: Float = 1) -> CIImage {\n\t\tguard intensity != 0 else { return self }\n\n\t\tlet filter = CIFilter.colorMonochrome() // CIColorMonochrome\n\t\tfilter.inputImage = self\n\t\tfilter.color = color\n\t\tfilter.intensity = intensity\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Color Polynomial\n\t///\n\t/// Modifies the pixel values in an image by applying a set of cubic polynomials.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228296-colorpolynomial)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIColorPolynomial)\n\t///\n\t/// Categories: Color Adjustment, Video, Interlaced, Non-Square Pixels, Still Image, High Dynamic Range, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - redCoefficients: Polynomial coefficients for red channel.\n\t///   - greenCoefficients: Polynomial coefficients for green channel.\n\t///   - blueCoefficients: Polynomial coefficients for blue channel.\n\t///   - alphaCoefficients: Polynomial coefficients for alpha channel.\n\t///   - active: should this filter be applied\n\t/// - Returns: processed new `CIImage`, or identity if `active` is false\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc colorPolynomial(redCoefficients: CIVector,\n\t\t\t\t\t\t greenCoefficients: CIVector,\n\t\t\t\t\t\t blueCoefficients: CIVector,\n\t\t\t\t\t\t alphaCoefficients: CIVector,\n\t\t\t\t\t\t active: Bool = true) -> CIImage {\n\t\tguard active else { return self }\n\n\t\tlet filter = CIFilter.colorPolynomial() // CIColorPolynomial\n\t\tfilter.inputImage = self\n\t\tfilter.redCoefficients = redCoefficients\n\t\tfilter.greenCoefficients = greenCoefficients\n\t\tfilter.blueCoefficients = blueCoefficients\n\t\tfilter.alphaCoefficients = alphaCoefficients\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Color Posterize\n\t///\n\t/// Remaps red, green, and blue color components to the number of brightness values you specify for each color component. This filter flattens colors to achieve a look similar to that of a silk-screened poster.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228297-colorposterize)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIColorPosterize)\n\t///\n\t/// Categories: Color Effect, Video, Interlaced, Non-Square Pixels, Still Image, High Dynamic Range, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - levels: The number of brightness levels to use for each color component. Lower values result in a more extreme poster effect. (1...)\n\t/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc colorPosterize(levels: Float = 6) -> CIImage {\n\t\tguard levels != 300 else { return self }\n\n\t\tlet filter = CIFilter.colorPosterize() // CIColorPosterize\n\t\tfilter.inputImage = self\n\t\tfilter.levels = levels\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Color Threshold\n\t///\n\t/// Produces a binarized image from an image and a threshold value. The red, green and blue channels of the resulting image will be one if its value is greater than the threshold and zero otherwise.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3547120-colorthreshold)\n\t///\n\t/// Categories: Color Adjustment, Video, Interlaced, Non-Square Pixels, Still Image, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - threshold: The threshold value that governs if the RGB channels of the resulting image will be zero or one.\n\t///   - active: should this filter be applied\n\t/// - Returns: processed new `CIImage`, or identity if `active` is false\n\t@available(iOS 14, macOS 11.0, *)\n\tfunc colorThreshold(threshold: Float = 0.5, active: Bool = true) -> CIImage {\n\t\tguard active else { return self }\n\n\t\tlet filter = CIFilter.colorThreshold() // CIColorThreshold\n\t\tfilter.inputImage = self\n\t\tfilter.threshold = threshold\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Color Threshold Otsu\n\t///\n\t/// Produces a binarized image from an image with finite extent. The threshold is calculated from the image histogram using Otsu’s method. The red, green and blue channels of the resulting image will be one if its value is greater than the threshold and zero otherwise.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/4401855-colorthresholdotsu)\n\t///\n\t/// Categories: Color Adjustment, Video, Interlaced, Non-Square Pixels, Still Image, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - active: should this filter be applied\n\t/// - Returns: processed new `CIImage`, or identity if `active` is false\n\t@available(iOS 14, macOS 11.0, *)\n\tfunc colorThresholdOtsu(active: Bool = true) -> CIImage {\n\t\tguard active else { return self }\n\n\t\tlet filter = CIFilter.colorThresholdOtsu() // CIColorThresholdOtsu\n\t\tfilter.inputImage = self\n\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Column Average\n\t///\n\t/// Calculates the average color for each column of the specified area in an image, returning the result in a 1D image.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3547121-columnaverage)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIColumnAverage)\n\t///\n\t/// Categories: Reduction, Video, Still Image, High Dynamic Range, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - extent: A rectangle that specifies the subregion of the image that you want to process.\n\t///   - active: should this filter be applied\n\t/// - Returns: a 1-pixel high image that contains the average color for each scan column, or identity if `active` is false\n\t@available(iOS 14, macOS 11.0, *)\n\tfunc columnAverage(extent: CGRect, active: Bool = true) -> CIImage {\n\t\tguard active else { return self }\n\n\t\tlet filter = CIFilter.columnAverage() // CIColumnAverage\n\t\tfilter.inputImage = self\n\t\tfilter.extent = extent\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Comic Effect\n\t///\n\t/// Simulates a comic book drawing by outlining edges and applying a color halftone effect.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228298-comiceffect)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIComicEffect)\n\t///\n\t/// Categories: Stylize, Video, Still Image, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - active: should this filter be applied\n\t/// - Returns: processed new `CIImage`, or identity if `active` is false\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc comicEffect(active: Bool = true) -> CIImage {\n\t\tguard active else { return self }\n\n\t\tlet filter = CIFilter.comicEffect() // CIComicEffect\n\t\tfilter.inputImage = self\n\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Convert Lab to RGB\n\t///\n\t/// Converts an image from La*b* color space to the Core Image RGB working space.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/4401856-convertlabtorgb)\n\t///\n\t/// Categories: Color Effect, Video, Interlaced, Non-Square Pixels, Still Image, High Dynamic Range, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - normalize: If normalize is false then the L channel is in the range 0 to 100 and the a*b* channels are in the range -128 to 128. If normalize is true then the La*b* channels are in the range 0 to 1.\n\t///   - active: should this filter be applied\n\t/// - Returns: processed new `CIImage`, or identity if `active` is false\n\t@available(iOS 16, macOS 13.0, *)\n\tfunc convertLabToRGB(normalize: Bool = false, active: Bool = true) -> CIImage {\n\t\tguard active else { return self }\n\n\t\tlet filter = CIFilter.convertLabToRGB() // CIConvertLabToRGB\n\t\tfilter.inputImage = self\n\t\tfilter.normalize = normalize\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Convert RGB to Lab\n\t///\n\t/// Converts an image from the Core Image RGB working space to La*b* color space.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/4401857-convertrgbtolab)\n\t///\n\t/// Categories: Color Effect, Video, Interlaced, Non-Square Pixels, Still Image, High Dynamic Range, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - normalize: If normalize is false then the L channel is in the range 0 to 100 and the a*b* channels are in the range -128 to 128. If normalize is true then the La*b* channels are in the range 0 to 1.\n\t///   - active: should this filter be applied\n\t/// - Returns: processed new `CIImage`, or identity if `active` is false\n\t@available(iOS 16, macOS 13.0, *)\n\tfunc convertRGBtoLab(normalize: Bool = false, active: Bool = true) -> CIImage {\n\t\tguard active else { return self }\n\n\t\tlet filter = CIFilter.convertRGBtoLab() // CIConvertRGBtoLab\n\t\tfilter.inputImage = self\n\t\tfilter.normalize = normalize\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// 3 by 3 Convolution\n\t///\n\t/// Modifies pixel values by performing a 3x3 matrix convolution.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228299-convolution3x3)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIConvolution3X3)\n\t///\n\t/// Categories: Stylize, Video, Still Image, High Dynamic Range, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - weights: A vector containing the 9 weights of the convolution kernel.\n\t///   - bias: A value that is added to the RGBA components of the output pixel.\n\t/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc convolution3X3(weights: CIVector, bias: Float = 0) -> CIImage {\n\t\tguard bias != 0 else { return self }\n\n\t\tlet filter = CIFilter.convolution3X3() // CIConvolution3X3\n\t\tfilter.inputImage = self\n\t\tfilter.weights = weights\n\t\tfilter.bias = bias\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// 5 by 5 Convolution\n\t///\n\t/// Modifies pixel values by performing a 5x5 matrix convolution.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228300-convolution5x5)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIConvolution5X5)\n\t///\n\t/// Categories: Stylize, Video, Still Image, High Dynamic Range, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - weights: A vector containing the 25 weights of the convolution kernel.\n\t///   - bias: A value that is added to the RGBA components of the output pixel.\n\t/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc convolution5X5(weights: CIVector, bias: Float = 0) -> CIImage {\n\t\tguard bias != 0 else { return self }\n\n\t\tlet filter = CIFilter.convolution5X5() // CIConvolution5X5\n\t\tfilter.inputImage = self\n\t\tfilter.weights = weights\n\t\tfilter.bias = bias\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// 7 by 7 Convolution\n\t///\n\t/// Modifies pixel values by performing a 7x7 matrix convolution.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228301-convolution7x7)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIConvolution7X7)\n\t///\n\t/// Categories: Stylize, Video, Still Image, High Dynamic Range, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - weights: A vector containing the 49 weights of the convolution kernel.\n\t///   - bias: A value that is added to the RGBA components of the output pixel.\n\t/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc convolution7X7(weights: CIVector, bias: Float = 0) -> CIImage {\n\t\tguard bias != 0 else { return self }\n\n\t\tlet filter = CIFilter.convolution7X7() // CIConvolution7X7\n\t\tfilter.inputImage = self\n\t\tfilter.weights = weights\n\t\tfilter.bias = bias\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Horizontal 9 Convolution\n\t///\n\t/// Modifies pixel values by performing a 9-element horizontal convolution.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228302-convolution9horizontal)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIConvolution9Horizontal)\n\t///\n\t/// Categories: Stylize, Video, Still Image, High Dynamic Range, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - weights: A vector containing the 9 weights of the convolution kernel.\n\t///   - bias: A value that is added to the RGBA components of the output pixel.\n\t/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc convolution9Horizontal(weights: CIVector, bias: Float = 0) -> CIImage {\n\t\tguard bias != 0 else { return self }\n\n\t\tlet filter = CIFilter.convolution9Horizontal() // CIConvolution9Horizontal\n\t\tfilter.inputImage = self\n\t\tfilter.weights = weights\n\t\tfilter.bias = bias\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Vertical 9 Convolution\n\t///\n\t/// Modifies pixel values by performing a 9-element vertical convolution.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228303-convolution9vertical)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIConvolution9Vertical)\n\t///\n\t/// Categories: Stylize, Video, Still Image, High Dynamic Range, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - weights: A vector containing the 9 weights of the convolution kernel.\n\t///   - bias: A value that is added to the RGBA components of the output pixel.\n\t/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc convolution9Vertical(weights: CIVector, bias: Float = 0) -> CIImage {\n\t\tguard bias != 0 else { return self }\n\n\t\tlet filter = CIFilter.convolution9Vertical() // CIConvolution9Vertical\n\t\tfilter.inputImage = self\n\t\tfilter.weights = weights\n\t\tfilter.bias = bias\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// 3 by 3 RGB Convolution\n\t///\n\t/// Convolution of RGB channels with 3 by 3 matrix.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/4401858-convolutionrgb3x3)\n\t///\n\t/// Categories: Stylize, Video, Still Image, High Dynamic Range, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - weights: A vector containing the 9 weights of the convolution kernel.\n\t///   - bias: A value that is added to the RGB components of the output pixel.\n\t/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied\n\t@available(iOS 15, macOS 12.0, *)\n\tfunc convolutionRGB3X3(weights: CIVector, bias: Float = 0) -> CIImage {\n\t\tguard bias != 0 else { return self }\n\n\t\tlet filter = CIFilter.convolutionRGB3X3() // CIConvolutionRGB3X3\n\t\tfilter.inputImage = self\n\t\tfilter.weights = weights\n\t\tfilter.bias = bias\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// 5 by 5 RGB Convolution\n\t///\n\t/// Convolution of RGB channels with 5 by 5 matrix.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/4401859-convolutionrgb5x5)\n\t///\n\t/// Categories: Stylize, Video, Still Image, High Dynamic Range, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - weights: A vector containing the 25 weights of the convolution kernel.\n\t///   - bias: A value that is added to the RGB components of the output pixel.\n\t/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied\n\t@available(iOS 15, macOS 12.0, *)\n\tfunc convolutionRGB5X5(weights: CIVector, bias: Float = 0) -> CIImage {\n\t\tguard bias != 0 else { return self }\n\n\t\tlet filter = CIFilter.convolutionRGB5X5() // CIConvolutionRGB5X5\n\t\tfilter.inputImage = self\n\t\tfilter.weights = weights\n\t\tfilter.bias = bias\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// 7 by 7 RGB Convolution\n\t///\n\t/// Convolution of RGB channels with 7 by 7 matrix.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/4401860-convolutionrgb7x7)\n\t///\n\t/// Categories: Stylize, Video, Still Image, High Dynamic Range, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - weights: A vector containing the 49 weights of the convolution kernel.\n\t///   - bias: A value that is added to the RGB components of the output pixel.\n\t/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied\n\t@available(iOS 15, macOS 12.0, *)\n\tfunc convolutionRGB7X7(weights: CIVector, bias: Float = 0) -> CIImage {\n\t\tguard bias != 0 else { return self }\n\n\t\tlet filter = CIFilter.convolutionRGB7X7() // CIConvolutionRGB7X7\n\t\tfilter.inputImage = self\n\t\tfilter.weights = weights\n\t\tfilter.bias = bias\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Horizontal 9 RGB Convolution\n\t///\n\t/// Horizontal Convolution of RGB channels with 9 values.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/4401861-convolutionrgb9horizontal)\n\t///\n\t/// Categories: Stylize, Video, Still Image, High Dynamic Range, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - weights: A vector containing the 9 weights of the convolution kernel.\n\t///   - bias: A value that is added to the RGB components of the output pixel.\n\t/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied\n\t@available(iOS 15, macOS 12.0, *)\n\tfunc convolutionRGB9Horizontal(weights: CIVector, bias: Float = 0) -> CIImage {\n\t\tguard bias != 0 else { return self }\n\n\t\tlet filter = CIFilter.convolutionRGB9Horizontal() // CIConvolutionRGB9Horizontal\n\t\tfilter.inputImage = self\n\t\tfilter.weights = weights\n\t\tfilter.bias = bias\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Vertical 9 RGB Convolution\n\t///\n\t/// Vertical Convolution of RGB channels with 9 values.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/4401862-convolutionrgb9vertical)\n\t///\n\t/// Categories: Stylize, Video, Still Image, High Dynamic Range, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - weights: A vector containing the 9 weights of the convolution kernel.\n\t///   - bias: A value that is added to the RGB components of the output pixel.\n\t/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied\n\t@available(iOS 15, macOS 12.0, *)\n\tfunc convolutionRGB9Vertical(weights: CIVector, bias: Float = 0) -> CIImage {\n\t\tguard bias != 0 else { return self }\n\n\t\tlet filter = CIFilter.convolutionRGB9Vertical() // CIConvolutionRGB9Vertical\n\t\tfilter.inputImage = self\n\t\tfilter.weights = weights\n\t\tfilter.bias = bias\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Copy Machine\n\t///\n\t/// Transitions from one image to another by simulating the effect of a copy machine.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228304-copymachinetransition)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CICopyMachineTransition)\n\t///\n\t/// Categories: Transition, Video, Still Image, Built-In, High Dynamic Range\n\t///\n\t///\n\t/// - Parameters:\n\t///   - targetImage: The target image for a transition.\n\t///   - extent: A rectangle that defines the extent of the effect.\n\t///   - color: The color of the copier light.\n\t///   - time: The parametric time of the transition. This value drives the transition from start (at time 0) to end (at time 1). (0...1)\n\t///   - angle: The angle in radians of the copier light. (0...)\n\t///   - width: The width of the copier light.  (0.1...)\n\t///   - opacity: The opacity of the copier light. A value of 0.0 is transparent. A value of 1.0 is opaque. (0...)\n\t/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc copyMachineTransition(targetImage: CIImage,\n\t\t\t\t\t\t\t   extent: CGRect,\n\t\t\t\t\t\t\t   color: CIColor,\n\t\t\t\t\t\t\t   time: Float,\n\t\t\t\t\t\t\t   angle: Float = 0,\n\t\t\t\t\t\t\t   width: Float,\n\t\t\t\t\t\t\t   opacity: Float = 1.3) -> CIImage {\n\t\tguard angle != 0 || opacity != 1.3 else { return self }\n\n\t\tlet filter = CIFilter.copyMachineTransition() // CICopyMachineTransition\n\t\tfilter.inputImage = self\n\t\tfilter.targetImage = targetImage\n\t\tfilter.extent = extent\n\t\tfilter.color = color\n\t\tfilter.time = time\n\t\tfilter.angle = angle\n\t\tfilter.width = width\n\t\tfilter.opacity = opacity\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// CoreML Model Filter\n\t///\n\t/// Generates output image by applying input CoreML model to the input image.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228305-coremlmodel)\n\t/// [WWDC Video](https://developer.apple.com/videos/play/wwdc2018-719/?time=2378)\n\t///\n\t/// Categories: Still Image, Built-In, Stylize\n\t///\n\t///\n\t/// - Parameters:\n\t///   - model: The CoreML model to be used for applying effect on the image.\n\t///   - headIndex: A number to specify which output of a multi-head CoreML model should be used for applying effect on the image. (0...10)\n\t///   - softmaxNormalization: A boolean value to specify that Softmax normalization should be applied to the output of the model.\n\t///   - active: should this filter be applied\n\t/// - Returns: processed new `CIImage`, or identity if `active` is false\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc coreMLModel(model: MLModel, headIndex: Int, softmaxNormalization: Bool = false, active: Bool = true) -> CIImage {\n\t\tguard active else { return self }\n\n\t\tlet filter = CIFilter.coreMLModel() // CICoreMLModelFilter\n\t\tfilter.inputImage = self\n\t\tfilter.model = model\n\t\tfilter.headIndex = Float(headIndex)\n\t\tfilter.softmaxNormalization = softmaxNormalization\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t// ℹ️ CICrop already has a CIImage method: func cropped(to: CGRect) -> CIImage\n\n\n\t/// Crystallize\n\t///\n\t/// Creates polygon-shaped color blocks by aggregating source pixel-color values.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228306-crystallize)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CICrystallize)\n\t///\n\t/// Categories: Stylize, Video, Still Image, High Dynamic Range, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - radius: The radius determines how many pixels are used to create the effect. The larger the radius, the larger the resulting crystals. (1...)\n\t///   - center: The center of the effect as x and y pixel coordinates.\n\t///   - active: should this filter be applied\n\t/// - Returns: processed new `CIImage`, or identity if `active` is false\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc crystallize(radius: Float, center: CGPoint, active: Bool = true) -> CIImage {\n\t\tguard active else { return self }\n\n\t\tlet filter = CIFilter.crystallize() // CICrystallize\n\t\tfilter.inputImage = self\n\t\tfilter.radius = radius\n\t\tfilter.center = center\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Darken Blend Mode\n\t///\n\t/// Creates composite image samples by choosing the darker samples (from either the source image or the background). The result is that the background image samples are replaced by any source image samples that are darker. Otherwise, the background image samples are left unchanged.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228307-darkenblendmode)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIDarkenBlendMode)\n\t///\n\t/// Categories: Composite Operation, Video, Still Image, Interlaced, Non-Square Pixels, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - backgroundImage: The image to use as a background image.\n\t/// - Returns: processed new `CIImage`, or identity if `backgroundImage` is nil\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc darkenBlendMode(backgroundImage: CIImage?) -> CIImage {\n\t\tguard let backgroundImage else { return self }\n\n\t\tlet filter = CIFilter.darkenBlendMode() // CIDarkenBlendMode\n\t\tfilter.inputImage = self\n\t\tfilter.backgroundImage = backgroundImage\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t// ℹ️ CIDepthBlurEffect already has a CIImage method: func depthBlurEffectFilter(for...) -> CIImage\n\t// https://developer.apple.com/documentation/coreimage/cicontext#4375374\n\n\n\t/// Depth of Field\n\t///\n\t/// Simulates miniaturization effect created by Tilt & Shift lens by performing depth of field effects.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228308-depthoffield)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIDepthOfField)\n\t///\n\t/// Categories: Stylize, Video, Still Image, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - point0: A set of coordinates marking the first point to be focused on\n\t///   - point1: A set of coordinates marking the second point to be focused on\n\t///   - saturation: The amount to adjust the saturation. (0...)\n\t///   - unsharpMaskRadius: The radius of the unsharpened mask effect applied to the in-focus area of effect (0...)\n\t///   - unsharpMaskIntensity: The intensity of the unsharp mask effect (0...)\n\t///   - radius: The distance from the center of the effect. (0...)\n\t///   - active: should this filter be applied\n\t/// - Returns: processed new `CIImage`, or identity if `active` is false\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc depthOfField(point0: CGPoint,\n\t\t\t\t\t  point1: CGPoint,\n\t\t\t\t\t  saturation: Float = 1.5,\n\t\t\t\t\t  unsharpMaskRadius: Float = 2.5,\n\t\t\t\t\t  unsharpMaskIntensity: Float = 0.5,\n\t\t\t\t\t  radius: Float = 6,\n\t\t\t\t\t  active: Bool = true) -> CIImage {\n\t\tguard active else { return self }\n\n\t\tlet filter = CIFilter.depthOfField() // CIDepthOfField\n\t\tfilter.inputImage = self\n\t\tfilter.point0 = point0\n\t\tfilter.point1 = point1\n\t\tfilter.saturation = saturation\n\t\tfilter.unsharpMaskRadius = unsharpMaskRadius\n\t\tfilter.unsharpMaskIntensity = unsharpMaskIntensity\n\t\tfilter.radius = radius\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Depth To Disparity\n\t///\n\t/// Convert a depth data image to disparity data.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228309-depthtodisparity)\n\t///\n\t/// Categories: Color Adjustment, Video, Still Image, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - active: should this filter be applied\n\t/// - Returns: processed new `CIImage`, or identity if `active` is false\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc depthToDisparity(active: Bool = true) -> CIImage {\n\t\tguard active else { return self }\n\n\t\tlet filter = CIFilter.depthToDisparity() // CIDepthToDisparity\n\t\tfilter.inputImage = self\n\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Difference Blend Mode\n\t///\n\t/// Subtracts either the source image sample color from the background image sample color, or the reverse, depending on which sample has the greater brightness value. Source image sample values that are black produce no change; white inverts the background color values.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228310-differenceblendmode)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIDifferenceBlendMode)\n\t///\n\t/// Categories: Composite Operation, Video, Still Image, Interlaced, Non-Square Pixels, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - backgroundImage: The image to use as a background image.\n\t/// - Returns: processed new `CIImage`, or identity if `backgroundImage` is nil\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc differenceBlendMode(backgroundImage: CIImage?) -> CIImage {\n\t\tguard let backgroundImage else { return self }\n\n\t\tlet filter = CIFilter.differenceBlendMode() // CIDifferenceBlendMode\n\t\tfilter.inputImage = self\n\t\tfilter.backgroundImage = backgroundImage\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Disc Blur\n\t///\n\t/// Smooths an image using a disc-shaped convolution kernel.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228311-discblur)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIDiscBlur)\n\t///\n\t/// Categories: Blur, Video, Still Image, Built-In, High Dynamic Range\n\t///\n\t///\n\t/// - Parameters:\n\t///   - radius: The radius determines how many pixels are used to create the blur. The larger the radius, the blurrier the result. (0...)\n\t///   - active: should this filter be applied\n\t/// - Returns: processed new `CIImage`, or identity if `active` is false\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc discBlur(radius: Float, active: Bool = true) -> CIImage {\n\t\tguard active else { return self }\n\n\t\tlet filter = CIFilter.discBlur() // CIDiscBlur\n\t\tfilter.inputImage = self\n\t\tfilter.radius = radius\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Disintegrate With Mask\n\t///\n\t/// Transitions from one image to another using the shape defined by a mask.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228312-disintegratewithmasktransition)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIDisintegrateWithMaskTransition)\n\t///\n\t/// Categories: Transition, Video, Still Image, Built-In, High Dynamic Range\n\t///\n\t///\n\t/// - Parameters:\n\t///   - targetImage: The target image for a transition.\n\t///   - maskImage: An image that defines the shape to use when disintegrating from the source to the target image.\n\t///   - time: The parametric time of the transition. This value drives the transition from start (at time 0) to end (at time 1). (0...1)\n\t///   - shadowRadius: The radius of the shadow created by the mask. (0...)\n\t///   - shadowDensity: The density of the shadow created by the mask. (0...1)\n\t///   - shadowOffset: The offset of the shadow created by the mask.\n\t/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc disintegrateWithMaskTransition(targetImage: CIImage,\n\t\t\t\t\t\t\t\t\t\tmaskImage: CIImage,\n\t\t\t\t\t\t\t\t\t\ttime: Float,\n\t\t\t\t\t\t\t\t\t\tshadowRadius: Float,\n\t\t\t\t\t\t\t\t\t\tshadowDensity: Float = 0.65,\n\t\t\t\t\t\t\t\t\t\tshadowOffset: CGPoint) -> CIImage {\n\t\tguard shadowDensity != 0 else { return self }\n\n\t\tlet filter = CIFilter.disintegrateWithMaskTransition() // CIDisintegrateWithMaskTransition\n\t\tfilter.inputImage = self\n\t\tfilter.targetImage = targetImage\n\t\tfilter.maskImage = maskImage\n\t\tfilter.time = time\n\t\tfilter.shadowRadius = shadowRadius\n\t\tfilter.shadowDensity = shadowDensity\n\t\tfilter.shadowOffset = shadowOffset\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Disparity To Depth\n\t///\n\t/// Convert a disparity data image to depth data.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228313-disparitytodepth)\n\t///\n\t/// Categories: Color Adjustment, Video, Still Image, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - active: should this filter be applied\n\t/// - Returns: processed new `CIImage`, or identity if `active` is false\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc disparityToDepth(active: Bool = true) -> CIImage {\n\t\tguard active else { return self }\n\n\t\tlet filter = CIFilter.disparityToDepth() // CIDisparityToDepth\n\t\tfilter.inputImage = self\n\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Displacement Distortion\n\t///\n\t/// Applies the grayscale values of the second image to the first image. The output image has a texture defined by the grayscale values.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/4401863-displacementdistortion)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIDisplacementDistortion)\n\t///\n\t/// Categories: Distortion Effect, Video, Still Image, Built-In, High Dynamic Range\n\t///\n\t///\n\t/// - Parameters:\n\t///   - displacementImage: An image whose grayscale values will be applied to the source image.\n\t///   - scale: The amount of texturing of the resulting image. The larger the value, the greater the texturing. (0...)\n\t/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied\n\t@available(iOS 14, macOS 11.0, *)\n\tfunc displacementDistortion(displacementImage: CIImage, scale: Float) -> CIImage {\n\t\tguard scale != 0 else { return self }\n\n\t\tlet filter = CIFilter.displacementDistortion() // CIDisplacementDistortion\n\t\tfilter.inputImage = self\n\t\tfilter.displacementImage = displacementImage\n\t\tfilter.scale = scale\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Dissolve\n\t///\n\t/// Uses a dissolve to transition from one image to another.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228314-dissolvetransition)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIDissolveTransition)\n\t///\n\t/// Categories: Transition, Video, Still Image, Interlaced, Non-Square Pixels, Built-In, High Dynamic Range\n\t///\n\t///\n\t/// - Parameters:\n\t///   - targetImage: The target image for a transition.\n\t///   - time: The parametric time of the transition. This value drives the transition from start (at time 0) to end (at time 1). (0...1)\n\t///   - active: should this filter be applied\n\t/// - Returns: processed new `CIImage`, or identity if `active` is false\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc dissolveTransition(targetImage: CIImage, time: Float, active: Bool = true) -> CIImage {\n\t\tguard active else { return self }\n\n\t\tlet filter = CIFilter.dissolveTransition() // CIDissolveTransition\n\t\tfilter.inputImage = self\n\t\tfilter.targetImage = targetImage\n\t\tfilter.time = time\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Dither\n\t///\n\t/// Apply dithering to an image. This operation is usually performed in a perceptual color space.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228315-dither)\n\t///\n\t/// Categories: Color Effect, Video, Still Image, High Dynamic Range, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - intensity: The intensity of the effect. (0...5)\n\t/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc dither(intensity: Float = 0.1) -> CIImage {\n\t\tguard intensity != 0 else { return self }\n\n\t\tlet filter = CIFilter.dither() // CIDither\n\t\tfilter.inputImage = self\n\t\tfilter.intensity = intensity\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Divide Blend Mode\n\t///\n\t/// Divides the background image sample color from the source image sample color.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228316-divideblendmode)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIDivideBlendMode)\n\t///\n\t/// Categories: Composite Operation, Video, Still Image, Interlaced, Non-Square Pixels, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - backgroundImage: The image to use as a background image.\n\t/// - Returns: processed new `CIImage`, or identity if `backgroundImage` is nil\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc divideBlendMode(backgroundImage: CIImage?) -> CIImage {\n\t\tguard let backgroundImage else { return self }\n\n\t\tlet filter = CIFilter.divideBlendMode() // CIDivideBlendMode\n\t\tfilter.inputImage = self\n\t\tfilter.backgroundImage = backgroundImage\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Document Enhancer\n\t///\n\t/// Enhance a document image by removing unwanted shadows, whitening the background, and enhancing contrast.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228317-documentenhancer)\n\t///\n\t/// Categories: Color Effect, Non-Square Pixels, Still Image, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - amount: The amount of enhancement. (0...10)\n\t///   - active: should this filter be applied\n\t/// - Returns: processed new `CIImage`, or identity if `active` is false\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc documentEnhancer(amount: Float = 1, active: Bool = true) -> CIImage {\n\t\tguard active else { return self }\n\n\t\tlet filter = CIFilter.documentEnhancer() // CIDocumentEnhancer\n\t\tfilter.inputImage = self\n\t\tfilter.amount = amount\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Dot Screen\n\t///\n\t/// Simulates the dot patterns of a halftone screen.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228318-dotscreen)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIDotScreen)\n\t///\n\t/// Categories: Halftone Effect, Video, Still Image, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - center: The center of the effect as x and y pixel coordinates.\n\t///   - angle: The angle in radians of the pattern.\n\t///   - width: The distance between dots in the pattern. (1...)\n\t///   - sharpness: The sharpness of the pattern. The larger the value, the sharper the pattern. (0...1)\n\t/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc dotScreen(center: CGPoint, angle: Float = 0, width: Float, sharpness: Float = 0.7) -> CIImage {\n\t\tguard angle != 0 else { return self }\n\n\t\tlet filter = CIFilter.dotScreen() // CIDotScreen\n\t\tfilter.inputImage = self\n\t\tfilter.center = center\n\t\tfilter.angle = angle\n\t\tfilter.width = width\n\t\tfilter.sharpness = sharpness\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Droste\n\t///\n\t/// Recursively draws a portion of an image in imitation of an M. C. Escher drawing.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/4401864-droste)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIDroste)\n\t///\n\t/// Categories: Distortion Effect, Video, Still Image, Built-In, High Dynamic Range\n\t///\n\t///\n\t/// - Parameters:\n\t///   - insetPoint0: The x and y position that defines the first inset point\n\t///   - insetPoint1: The x and y position that defines the second inset point\n\t///   - strands: The amount of strands (-10...10)\n\t///   - periodicity: The amount of intervals (1...)\n\t///   - rotation: The angle of the rotation, in radians\n\t///   - zoom: The zoom of the effect (0.01...)\n\t///   - active: should this filter be applied\n\t/// - Returns: processed new `CIImage`, or identity if `active` is false\n\t@available(iOS 14, macOS 11.0, *)\n\tfunc droste(insetPoint0: CGPoint,\n\t\t\t\tinsetPoint1: CGPoint,\n\t\t\t\tstrands: Float = 1,\n\t\t\t\tperiodicity: Float = 1,\n\t\t\t\trotation: Float = 0,\n\t\t\t\tzoom: Float = 1,\n\t\t\t\tactive: Bool = true) -> CIImage {\n\t\tguard active else { return self }\n\n\t\tlet filter = CIFilter.droste() // CIDroste\n\t\tfilter.inputImage = self\n\t\tfilter.insetPoint0 = insetPoint0\n\t\tfilter.insetPoint1 = insetPoint1\n\t\tfilter.strands = strands\n\t\tfilter.periodicity = periodicity\n\t\tfilter.rotation = rotation\n\t\tfilter.zoom = zoom\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Edge Preserve Upsample Filter\n\t///\n\t/// Upsamples a small image to the size of the input image using the luminance of the input image as a guide to preserve detail.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228319-edgepreserveupsample)\n\t///\n\t/// Categories: Geometry Adjustment, Video, Still Image, Interlaced, Non-Square Pixels, Built-In, High Dynamic Range\n\t///\n\t///\n\t/// - Parameters:\n\t///   - smallImage: An image representing the reference for scaling the input image with the type CIImage\n\t///   - spatialSigma: The influence of the input image’s spatial information on the upsampling operation (0...5)\n\t///   - lumaSigma: Influence of the input image’s luma information on the upsampling operation (0...1)\n\t///   - active: should this filter be applied\n\t/// - Returns: processed new `CIImage`, or identity if `active` is false\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc edgePreserveUpsample(smallImage: CIImage,\n\t\t\t\t\t\t\t  spatialSigma: Float = 3,\n\t\t\t\t\t\t\t  lumaSigma: Float = 0.15,\n\t\t\t\t\t\t\t  active: Bool = true) -> CIImage {\n\t\tguard active else { return self }\n\n\t\tlet filter = CIFilter.edgePreserveUpsample() // CIEdgePreserveUpsampleFilter\n\t\tfilter.inputImage = self\n\t\tfilter.smallImage = smallImage\n\t\tfilter.spatialSigma = spatialSigma\n\t\tfilter.lumaSigma = lumaSigma\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Edge Work\n\t///\n\t/// Produces a stylized black-and-white rendition of an image that looks similar to a woodblock cutout.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228320-edgework)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIEdgeWork)\n\t///\n\t/// Categories: Stylize, Video, Still Image, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - radius: The thickness of the edges. The larger the value, the thicker the edges. (0...)\n\t///   - active: should this filter be applied\n\t/// - Returns: processed new `CIImage`, or identity if `active` is false\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc edgeWork(radius: Float, active: Bool = true) -> CIImage {\n\t\tguard active else { return self }\n\n\t\tlet filter = CIFilter.edgeWork() // CIEdgeWork\n\t\tfilter.inputImage = self\n\t\tfilter.radius = radius\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Edges\n\t///\n\t/// Finds all edges in an image and displays them in color.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228321-edges)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIEdges)\n\t///\n\t/// Categories: Stylize, Video, Still Image, High Dynamic Range, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - intensity: The intensity of the edges. The larger the value, the higher the intensity. (0...)\n\t/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc edges(intensity: Float = 1) -> CIImage {\n\t\tguard intensity != 0 else { return self }\n\n\t\tlet filter = CIFilter.edges() // CIEdges\n\t\tfilter.inputImage = self\n\t\tfilter.intensity = intensity\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Eightfold Reflected Tile\n\t///\n\t/// Produces a tiled image from a source image by applying an 8-way reflected symmetry.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228322-eightfoldreflectedtile)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIEightfoldReflectedTile)\n\t///\n\t/// Categories: Tile Effect, Video, Still Image, Built-In, High Dynamic Range\n\t///\n\t///\n\t/// - Parameters:\n\t///   - center: The center of the effect as x and y pixel coordinates.\n\t///   - angle: The angle in radians of the tiled pattern.\n\t///   - width: The width of a tile. (0...)\n\t/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc eightfoldReflectedTile(center: CGPoint, angle: Float = 0, width: Float) -> CIImage {\n\t\tguard angle != 0 else { return self }\n\n\t\tlet filter = CIFilter.eightfoldReflectedTile() // CIEightfoldReflectedTile\n\t\tfilter.inputImage = self\n\t\tfilter.center = center\n\t\tfilter.angle = angle\n\t\tfilter.width = width\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Exclusion Blend Mode\n\t///\n\t/// Produces an effect similar to that produced by the “Difference Blend Mode” filter but with lower contrast. Source image sample values that are black do not produce a change; white inverts the background color values.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228323-exclusionblendmode)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIExclusionBlendMode)\n\t///\n\t/// Categories: Composite Operation, Video, Still Image, Interlaced, Non-Square Pixels, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - backgroundImage: The image to use as a background image.\n\t/// - Returns: processed new `CIImage`, or identity if `backgroundImage` is nil\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc exclusionBlendMode(backgroundImage: CIImage?) -> CIImage {\n\t\tguard let backgroundImage else { return self }\n\n\t\tlet filter = CIFilter.exclusionBlendMode() // CIExclusionBlendMode\n\t\tfilter.inputImage = self\n\t\tfilter.backgroundImage = backgroundImage\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Exposure Adjust\n\t///\n\t/// Adjusts the exposure setting for an image similar to the way you control exposure for a camera when you change the F-stop.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228324-exposureadjust)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIExposureAdjust)\n\t///\n\t/// Categories: Color Adjustment, Video, Still Image, Interlaced, Non-Square Pixels, High Dynamic Range, Built-In, CICategoryXMPSerializable\n\t///\n\t///\n\t/// - Parameters:\n\t///   - ev: The amount to adjust the exposure of the image by. The larger the value, the brighter the exposure.\n\t/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc exposureAdjust(ev: Float = 0) -> CIImage {\n\t\tguard ev != 0 else { return self }\n\n\t\tlet filter = CIFilter.exposureAdjust() // CIExposureAdjust\n\t\tfilter.inputImage = self\n\t\tfilter.ev = ev\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// False Color\n\t///\n\t/// Maps luminance to a color ramp of two colors. False color is often used to process astronomical and other scientific data, such as ultraviolet and X-ray images.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228325-falsecolor)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIFalseColor)\n\t///\n\t/// Categories: Color Effect, Video, Interlaced, Non-Square Pixels, Still Image, High Dynamic Range, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - color0: The first color to use for the color ramp.\n\t///   - color1: The second color to use for the color ramp.\n\t///   - active: should this filter be applied\n\t/// - Returns: processed new `CIImage`, or identity if `active` is false\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc falseColor(color0: CIColor, color1: CIColor, active: Bool = true) -> CIImage {\n\t\tguard active else { return self }\n\n\t\tlet filter = CIFilter.falseColor() // CIFalseColor\n\t\tfilter.inputImage = self\n\t\tfilter.color0 = color0\n\t\tfilter.color1 = color1\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Flash\n\t///\n\t/// Transitions from one image to another by creating a flash. The flash originates from a point you specify. Small at first, it rapidly expands until the image frame is completely filled with the flash color. As the color fades, the target image begins to appear.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228326-flashtransition)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIFlashTransition)\n\t///\n\t/// Categories: Transition, Video, Still Image, Built-In, High Dynamic Range\n\t///\n\t///\n\t/// - Parameters:\n\t///   - targetImage: The target image for a transition.\n\t///   - center: The center of the effect as x and y pixel coordinates.\n\t///   - extent: The extent of the flash.\n\t///   - color: The color of the light rays emanating from the flash.\n\t///   - time: The parametric time of the transition. This value drives the transition from start (at time 0) to end (at time 1). (0...1)\n\t///   - maxStriationRadius: The radius of the light rays emanating from the flash. (0...)\n\t///   - striationStrength: The strength of the light rays emanating from the flash. (0...)\n\t///   - striationContrast: The contrast of the light rays emanating from the flash. (0...)\n\t///   - fadeThreshold: The amount of fade between the flash and the target image. The higher the value, the more flash time and the less fade time. (0...1)\n\t///   - active: should this filter be applied\n\t/// - Returns: processed new `CIImage`, or identity if `active` is false\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc flashTransition(targetImage: CIImage,\n\t\t\t\t\t\t center: CGPoint,\n\t\t\t\t\t\t extent: CGRect,\n\t\t\t\t\t\t color: CIColor,\n\t\t\t\t\t\t time: Float,\n\t\t\t\t\t\t maxStriationRadius: Float = 2.58,\n\t\t\t\t\t\t striationStrength: Float = 0.5,\n\t\t\t\t\t\t striationContrast: Float = 1.375,\n\t\t\t\t\t\t fadeThreshold: Float = 0.85,\n\t\t\t\t\t\t active: Bool = true) -> CIImage {\n\t\tguard active else { return self }\n\n\t\tlet filter = CIFilter.flashTransition() // CIFlashTransition\n\t\tfilter.inputImage = self\n\t\tfilter.targetImage = targetImage\n\t\tfilter.center = center\n\t\tfilter.extent = extent\n\t\tfilter.color = color\n\t\tfilter.time = time\n\t\tfilter.maxStriationRadius = maxStriationRadius\n\t\tfilter.striationStrength = striationStrength\n\t\tfilter.striationContrast = striationContrast\n\t\tfilter.fadeThreshold = fadeThreshold\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Fourfold Reflected Tile\n\t///\n\t/// Produces a tiled image from a source image by applying a 4-way reflected symmetry.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228327-fourfoldreflectedtile)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIFourfoldReflectedTile)\n\t///\n\t/// Categories: Tile Effect, Video, Still Image, Built-In, High Dynamic Range\n\t///\n\t///\n\t/// - Parameters:\n\t///   - center: The center of the effect as x and y pixel coordinates.\n\t///   - angle: The angle in radians of the tiled pattern.\n\t///   - width: The width of a tile. (0...)\n\t///   - acuteAngle: The primary angle for the repeating reflected tile. Small values create thin diamond tiles, and higher values create fatter reflected tiles.\n\t/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc fourfoldReflectedTile(center: CGPoint, angle: Float = 0, width: Float, acuteAngle: Float = .pi/2) -> CIImage {\n\t\tguard angle != 0 || acuteAngle != .pi/2 else { return self }\n\n\t\tlet filter = CIFilter.fourfoldReflectedTile() // CIFourfoldReflectedTile\n\t\tfilter.inputImage = self\n\t\tfilter.center = center\n\t\tfilter.angle = angle\n\t\tfilter.width = width\n\t\tfilter.acuteAngle = acuteAngle\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Fourfold Rotated Tile\n\t///\n\t/// Produces a tiled image from a source image by rotating the source image at increments of 90 degrees.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228328-fourfoldrotatedtile)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIFourfoldRotatedTile)\n\t///\n\t/// Categories: Tile Effect, Video, Still Image, Built-In, High Dynamic Range\n\t///\n\t///\n\t/// - Parameters:\n\t///   - center: The center of the effect as x and y pixel coordinates.\n\t///   - angle: The angle in radians of the tiled pattern.\n\t///   - width: The width of a tile. (0...)\n\t/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc fourfoldRotatedTile(center: CGPoint, angle: Float = 0, width: Float) -> CIImage {\n\t\tguard angle != 0 else { return self }\n\n\t\tlet filter = CIFilter.fourfoldRotatedTile() // CIFourfoldRotatedTile\n\t\tfilter.inputImage = self\n\t\tfilter.center = center\n\t\tfilter.angle = angle\n\t\tfilter.width = width\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Fourfold Translated Tile\n\t///\n\t/// Produces a tiled image from a source image by applying 4 translation operations.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228329-fourfoldtranslatedtile)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIFourfoldTranslatedTile)\n\t///\n\t/// Categories: Tile Effect, Video, Still Image, Built-In, High Dynamic Range\n\t///\n\t///\n\t/// - Parameters:\n\t///   - center: The center of the effect as x and y pixel coordinates.\n\t///   - angle: The angle in radians of the tiled pattern.\n\t///   - width: The width of a tile. (0...)\n\t///   - acuteAngle: The primary angle for the repeating translated tile. Small values create thin diamond tiles, and higher values create fatter translated tiles.\n\t/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc fourfoldTranslatedTile(center: CGPoint, angle: Float = 0, width: Float, acuteAngle: Float = .pi/2) -> CIImage {\n\t\tguard angle != 0 || acuteAngle != .pi/2 else { return self }\n\n\t\tlet filter = CIFilter.fourfoldTranslatedTile() // CIFourfoldTranslatedTile\n\t\tfilter.inputImage = self\n\t\tfilter.center = center\n\t\tfilter.angle = angle\n\t\tfilter.width = width\n\t\tfilter.acuteAngle = acuteAngle\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Gabor Gradients\n\t///\n\t/// Applies multichannel 5 by 5 Gabor gradient filter to an image. The resulting image has maximum horizontal gradient in the red channel and the maximum vertical gradient in the green channel. The gradient values can be positive or negative.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3325508-gaborgradients)\n\t///\n\t/// Categories: Stylize, Video, Still Image, High Dynamic Range, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - active: should this filter be applied\n\t/// - Returns: processed new `CIImage`, or identity if `active` is false\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc gaborGradients(active: Bool = true) -> CIImage {\n\t\tguard active else { return self }\n\n\t\tlet filter = CIFilter.gaborGradients() // CIGaborGradients\n\t\tfilter.inputImage = self\n\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Gamma Adjust\n\t///\n\t/// Adjusts midtone brightness. This filter is typically used to compensate for nonlinear effects of displays. Adjusting the gamma effectively changes the slope of the transition between black and white.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228330-gammaadjust)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIGammaAdjust)\n\t///\n\t/// Categories: Color Adjustment, Video, Still Image, Interlaced, Non-Square Pixels, High Dynamic Range, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - power: A gamma value to use to correct image brightness. The larger the value, the darker the result.\n\t/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc gammaAdjust(power: Float = 1) -> CIImage {\n\t\tguard power != 1 else { return self }\n\n\t\tlet filter = CIFilter.gammaAdjust() // CIGammaAdjust\n\t\tfilter.inputImage = self\n\t\tfilter.power = power\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Gaussian Blur\n\t///\n\t/// Spreads source pixels by an amount specified by a Gaussian distribution.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228331-gaussianblur)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIGaussianBlur)\n\t///\n\t/// Categories: Blur, Still Image, Video, Built-In, High Dynamic Range\n\t///\n\t///\n\t/// - Parameters:\n\t///   - radius: The radius determines how many pixels are used to create the blur. The larger the radius, the blurrier the result. (0...)\n\t/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc gaussianBlur(radius: Float = 10) -> CIImage {\n\t\tguard radius != 0 else { return self }\n\n\t\tlet filter = CIFilter.gaussianBlur() // CIGaussianBlur\n\t\tfilter.inputImage = self\n\t\tfilter.radius = radius\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Glass Distortion\n\t///\n\t/// Distorts an image by applying a glass-like texture. The raised portions of the output image are the result of applying a texture map.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/4401865-glassdistortion)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIGlassDistortion)\n\t///\n\t/// Categories: Distortion Effect, Video, Still Image, Built-In, High Dynamic Range\n\t///\n\t///\n\t/// - Parameters:\n\t///   - textureImage: A texture to apply to the source image.\n\t///   - center: The center of the effect as x and y pixel coordinates.\n\t///   - scale: The amount of texturing of the resulting image. The larger the value, the greater the texturing. (0...)\n\t/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied\n\t@available(iOS 14, macOS 11.0, *)\n\tfunc glassDistortion(textureImage: CIImage, center: CGPoint, scale: Float) -> CIImage {\n\t\tguard scale != 0 else { return self }\n\n\t\tlet filter = CIFilter.glassDistortion() // CIGlassDistortion\n\t\tfilter.inputImage = self\n\t\tfilter.textureImage = textureImage\n\t\tfilter.center = center\n\t\tfilter.scale = scale\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Glass Lozenge\n\t///\n\t/// Creates a lozenge-shaped lens and distorts the portion of the image over which the lens is placed.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/4401866-glasslozenge)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIGlassLozenge)\n\t///\n\t/// Categories: Distortion Effect, Video, Still Image, Built-In, High Dynamic Range\n\t///\n\t///\n\t/// - Parameters:\n\t///   - point0: The x and y position that defines the center of the circle at one end of the lozenge.\n\t///   - point1: The x and y position that defines the center of the circle at the other end of the lozenge.\n\t///   - radius: The radius of the lozenge. The larger the radius, the wider the extent of the distortion. (0...)\n\t///   - refraction: The refraction of the glass. (0...)\n\t/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied\n\t@available(iOS 14, macOS 11.0, *)\n\tfunc glassLozenge(point0: CGPoint, point1: CGPoint, radius: Float, refraction: Float = 1.7) -> CIImage {\n\t\tguard refraction != 1 else { return self }\n\n\t\tlet filter = CIFilter.glassLozenge() // CIGlassLozenge\n\t\tfilter.inputImage = self\n\t\tfilter.point0 = point0\n\t\tfilter.point1 = point1\n\t\tfilter.radius = radius\n\t\tfilter.refraction = refraction\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Glide Reflected Tile\n\t///\n\t/// Produces a tiled image from a source image by translating and smearing the image.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228333-glidereflectedtile)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIGlideReflectedTile)\n\t///\n\t/// Categories: Tile Effect, Video, Still Image, Built-In, High Dynamic Range\n\t///\n\t///\n\t/// - Parameters:\n\t///   - center: The center of the effect as x and y pixel coordinates.\n\t///   - angle: The angle in radians of the tiled pattern.\n\t///   - width: The width of a tile. (0...)\n\t/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc glideReflectedTile(center: CGPoint, angle: Float = 0, width: Float) -> CIImage {\n\t\tguard angle != 0 else { return self }\n\n\t\tlet filter = CIFilter.glideReflectedTile() // CIGlideReflectedTile\n\t\tfilter.inputImage = self\n\t\tfilter.center = center\n\t\tfilter.angle = angle\n\t\tfilter.width = width\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Gloom\n\t///\n\t/// Dulls the highlights of an image.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228334-gloom)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIGloom)\n\t///\n\t/// Categories: Stylize, Video, Still Image, High Dynamic Range, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - radius: The radius determines how many pixels are used to create the effect. The larger the radius, the greater the effect. (0...)\n\t///   - intensity: The intensity of the effect. A value of 0.0 is no effect. A value of 1.0 is the maximum effect. (0...)\n\t/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc gloom(radius: Float, intensity: Float = 0.5) -> CIImage {\n\t\tguard radius != 0 || intensity != 0 else { return self }\n\n\t\tlet filter = CIFilter.gloom() // CIGloom\n\t\tfilter.inputImage = self\n\t\tfilter.radius = radius\n\t\tfilter.intensity = intensity\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Guided Filter\n\t///\n\t/// Upsamples a small image to the size of the guide image using the content of the guide to preserve detail.\n\t///\n\t/// ⚠️ No documentation available for CIGuidedFilter\n\t///\n\t/// Categories: Geometry Adjustment, Video, Still Image, Built-In, High Dynamic Range\n\t///\n\t///\n\t/// - Parameters:\n\t///   - aLargerImageToUseAsAGuide: A larger image to use as a guide.\n\t///   - radius: The distance from the center of the effect.\n\t///   - epsilon: Smoothness. A higher value means more smoothing.\n\t///   - active: should this filter be applied\n\t/// - Returns: processed new `CIImage`, or identity if `active` is false\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc guided(aLargerImageToUseAsAGuide: CIImage,\n\t\t\t\tradius: Float = 1,\n\t\t\t\tepsilon: Float = 0.0001,\n\t\t\t\tactive: Bool = true) -> CIImage {\n\t\tguard active else { return self }\n\n\t\t// Filter not included in CoreImage.CIFilterBuiltins; using dictionary-based method.\n\t\tguard let filter = CIFilter(name: \"CIGuidedFilter\", parameters: [\n\n\t\t\t\"inputGuideImage\": aLargerImageToUseAsAGuide,\n\t\t\t\"inputRadius\": radius,\n\t\t\t\"inputEpsilon\": epsilon,\n\t\t]) else { return self }\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Hard Light Blend Mode\n\t///\n\t/// Either multiplies or screens colors, depending on the source image sample color. If the source image sample color is lighter than 50% gray, the background is lightened, similar to screening. If the source image sample color is darker than 50% gray, the background is darkened, similar to multiplying. If the source image sample color is equal to 50% gray, the source image is not changed. Image samples that are equal to pure black or pure white result in pure black or white. The overall effect is similar to what you would achieve by shining a harsh spotlight on the source image.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228335-hardlightblendmode)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIHardLightBlendMode)\n\t///\n\t/// Categories: Composite Operation, Video, Still Image, Interlaced, Non-Square Pixels, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - backgroundImage: The image to use as a background image.\n\t/// - Returns: processed new `CIImage`, or identity if `backgroundImage` is nil\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc hardLightBlendMode(backgroundImage: CIImage?) -> CIImage {\n\t\tguard let backgroundImage else { return self }\n\n\t\tlet filter = CIFilter.hardLightBlendMode() // CIHardLightBlendMode\n\t\tfilter.inputImage = self\n\t\tfilter.backgroundImage = backgroundImage\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Hatched Screen\n\t///\n\t/// Simulates the hatched pattern of a halftone screen.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228336-hatchedscreen)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIHatchedScreen)\n\t///\n\t/// Categories: Halftone Effect, Video, Still Image, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - center: The center of the effect as x and y pixel coordinates.\n\t///   - angle: The angle in radians of the pattern.\n\t///   - width: The distance between lines in the pattern. (1...)\n\t///   - sharpness: The amount of sharpening to apply. (0...1)\n\t/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc hatchedScreen(center: CGPoint, angle: Float = 0, width: Float, sharpness: Float = 0.7) -> CIImage {\n\t\tguard angle != 0 else { return self }\n\n\t\tlet filter = CIFilter.hatchedScreen() // CIHatchedScreen\n\t\tfilter.inputImage = self\n\t\tfilter.center = center\n\t\tfilter.angle = angle\n\t\tfilter.width = width\n\t\tfilter.sharpness = sharpness\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Height Field From Mask\n\t///\n\t/// Produces a continuous three-dimensional, loft-shaped height field from a grayscale mask. The white values of the mask define those pixels that are inside the height field while the black values define those pixels that are outside. The field varies smoothly and continuously inside the mask, reaching the value 0 at the edge of the mask. You can use this filter with the Shaded Material filter to produce extremely realistic shaded objects.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228337-heightfieldfrommask)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIHeightFieldFromMask)\n\t///\n\t/// Categories: Stylize, Video, Still Image, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - radius: The distance from the edge of the mask for the smooth transition is proportional to the input radius. Larger values make the transition smoother and more pronounced. Smaller values make the transition approximate a fillet radius. (0...)\n\t///   - active: should this filter be applied\n\t/// - Returns: processed new `CIImage`, or identity if `active` is false\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc heightFieldFromMask(radius: Float, active: Bool = true) -> CIImage {\n\t\tguard active else { return self }\n\n\t\tlet filter = CIFilter.heightFieldFromMask() // CIHeightFieldFromMask\n\t\tfilter.inputImage = self\n\t\tfilter.radius = radius\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Hexagonal Pixelate\n\t///\n\t/// Displays an image as colored hexagons whose color is an average of the pixels they replace.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228338-hexagonalpixellate)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIHexagonalPixellate)\n\t///\n\t/// Categories: Stylize, Video, Still Image, High Dynamic Range, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - center: The center of the effect as x and y pixel coordinates.\n\t///   - scale: The scale determines the size of the hexagons. Larger values result in larger hexagons. (1...)\n\t///   - active: should this filter be applied\n\t/// - Returns: processed new `CIImage`, or identity if `active` is false\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc hexagonalPixellate(center: CGPoint, scale: Float, active: Bool = true) -> CIImage {\n\t\tguard active else { return self }\n\n\t\tlet filter = CIFilter.hexagonalPixellate() // CIHexagonalPixellate\n\t\tfilter.inputImage = self\n\t\tfilter.center = center\n\t\tfilter.scale = scale\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Highlight and Shadow Adjust\n\t///\n\t/// Adjust the tonal mapping of an image while preserving spatial detail.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228339-highlightshadowadjust)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIHighlightShadowAdjust)\n\t///\n\t/// Categories: Stylize, Video, Still Image, High Dynamic Range, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - radius: Shadow Highlight Radius. (0...)\n\t///   - shadowAmount: The amount of adjustment to the shadows of the image. (-1...1)\n\t///   - highlightAmount: The amount of adjustment to the highlights of the image. (0...1)\n\t/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc highlightShadowAdjust(radius: Float = 0, shadowAmount: Float = 0, highlightAmount: Float = 1) -> CIImage {\n\t\tguard radius != 0 || shadowAmount != 0 || highlightAmount != 1 else { return self }\n\n\t\tlet filter = CIFilter.highlightShadowAdjust() // CIHighlightShadowAdjust\n\t\tfilter.inputImage = self\n\t\tfilter.radius = radius\n\t\tfilter.shadowAmount = shadowAmount\n\t\tfilter.highlightAmount = highlightAmount\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Histogram Display\n\t///\n\t/// Generates a displayable histogram image from the output of the “Area Histogram” filter.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3547122-histogramdisplay)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIHistogramDisplayFilter)\n\t///\n\t/// Categories: Reduction, Video, Still Image, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - height: The height of the displayable histogram image. (1...200)\n\t///   - highLimit: The fraction of the right portion of the histogram image to make lighter. (0...1)\n\t///   - lowLimit: The fraction of the left portion of the histogram image to make darker. (0...1)\n\t///   - active: should this filter be applied\n\t/// - Returns: processed new `CIImage`, or identity if `active` is false\n\t@available(iOS 14, macOS 11.0, *)\n\tfunc histogramDisplay(height: Float = 100, highLimit: Float = 1, lowLimit: Float = 0, active: Bool = true) -> CIImage {\n\t\tguard active else { return self }\n\n\t\tlet filter = CIFilter.histogramDisplay() // CIHistogramDisplayFilter\n\t\tfilter.inputImage = self\n\t\tfilter.height = height\n\t\tfilter.highLimit = highLimit\n\t\tfilter.lowLimit = lowLimit\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Hole Distortion\n\t///\n\t/// Creates a circular area that pushes the image pixels outward, distorting those pixels closest to the circle the most.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/4401867-holedistortion)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIHoleDistortion)\n\t///\n\t/// Categories: Distortion Effect, Video, Still Image, Built-In, High Dynamic Range\n\t///\n\t///\n\t/// - Parameters:\n\t///   - center: The center of the effect as x and y pixel coordinates.\n\t///   - radius: The radius determines how many pixels are used to create the distortion. The larger the radius, the wider the extent of the distortion. (0.01...)\n\t///   - active: should this filter be applied\n\t/// - Returns: processed new `CIImage`, or identity if `active` is false\n\t@available(iOS 14, macOS 11.0, *)\n\tfunc holeDistortion(center: CGPoint, radius: Float, active: Bool = true) -> CIImage {\n\t\tguard active else { return self }\n\n\t\tlet filter = CIFilter.holeDistortion() // CIHoleDistortion\n\t\tfilter.inputImage = self\n\t\tfilter.center = center\n\t\tfilter.radius = radius\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Hue Adjust\n\t///\n\t/// Changes the overall hue, or tint, of the source pixels.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228340-hueadjust)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIHueAdjust)\n\t///\n\t/// Categories: Color Adjustment, Video, Still Image, Interlaced, Non-Square Pixels, High Dynamic Range, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - angle: An angle in radians to use to correct the hue of an image.\n\t/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc hueAdjust(angle: Float = 0) -> CIImage {\n\t\tguard angle != 0 else { return self }\n\n\t\tlet filter = CIFilter.hueAdjust() // CIHueAdjust\n\t\tfilter.inputImage = self\n\t\tfilter.angle = angle\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Hue Blend Mode\n\t///\n\t/// Uses the luminance and saturation values of the background image with the hue of the input image.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228341-hueblendmode)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIHueBlendMode)\n\t///\n\t/// Categories: Composite Operation, Video, Still Image, Interlaced, Non-Square Pixels, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - backgroundImage: The image to use as a background image.\n\t/// - Returns: processed new `CIImage`, or identity if `backgroundImage` is nil\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc hueBlendMode(backgroundImage: CIImage?) -> CIImage {\n\t\tguard let backgroundImage else { return self }\n\n\t\tlet filter = CIFilter.hueBlendMode() // CIHueBlendMode\n\t\tfilter.inputImage = self\n\t\tfilter.backgroundImage = backgroundImage\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// KMeans\n\t///\n\t/// Create a palette of the most common colors found in the image.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3547110-kmeans)\n\t///\n\t/// Categories: Reduction, Video, Still Image, High Dynamic Range, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - extent: A rectangle that defines the extent of the effect.\n\t///   - inputMeans: Specifies the color seeds to use for k-means clustering, either passed as an image or an array of colors.\n\t///   - count: Specifies how many k-means color clusters should be used. (0...128)\n\t///   - passes: Specifies how many k-means passes should be performed. (0...20)\n\t///   - perceptual: Specifies whether the k-means color palette should be computed in a perceptual color space.\n\t///   - active: should this filter be applied\n\t/// - Returns: processed new `CIImage`, or identity if `active` is false\n\t@available(iOS 14, macOS 11.0, *)\n\tfunc kMeans(extent: CGRect,\n\t\t\t\tinputMeans: CIImage,\n\t\t\t\tcount: Int,\n\t\t\t\tpasses: Int,\n\t\t\t\tperceptual: Bool = false,\n\t\t\t\tactive: Bool = true) -> CIImage {\n\t\tguard active else { return self }\n\n\t\tlet filter = CIFilter.kMeans() // CIKMeans\n\t\tfilter.inputImage = self\n\t\tfilter.extent = extent\n\t\tfilter.inputMeans = inputMeans\n\t\tfilter.count = count\n\t\tfilter.passes = Float(passes)\n\t\tfilter.perceptual = perceptual\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Kaleidoscope\n\t///\n\t/// Produces a kaleidoscopic image from a source image by applying 12-way symmetry.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228343-kaleidoscope)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIKaleidoscope)\n\t///\n\t/// Categories: Tile Effect, Video, Still Image, Built-In, High Dynamic Range\n\t///\n\t///\n\t/// - Parameters:\n\t///   - count: The number of reflections in the pattern. (1...)\n\t///   - center: The center of the effect as x and y pixel coordinates.\n\t///   - angle: The angle in radians of reflection.\n\t/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc kaleidoscope(count: Int = 6, center: CGPoint, angle: Float = 0) -> CIImage {\n\t\tguard angle != 0 else { return self }\n\n\t\tlet filter = CIFilter.kaleidoscope() // CIKaleidoscope\n\t\tfilter.inputImage = self\n\t\tfilter.count = count\n\t\tfilter.center = center\n\t\tfilter.angle = angle\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Combined Keystone Correction\n\t///\n\t/// Apply keystone correction to an image with combined horizontal and vertical guides.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3325509-keystonecorrectioncombined)\n\t///\n\t/// Categories: Geometry Adjustment, Video, Still Image, Built-In, High Dynamic Range\n\t///\n\t///\n\t/// - Parameters:\n\t///   - focalLength: 35mm equivalent focal length of the input image.\n\t///   - topLeft: The top left coordinate of the guide.\n\t///   - topRight: The top right coordinate of the guide.\n\t///   - bottomRight: The bottom right coordinate of the guide.\n\t///   - bottomLeft: The bottom left coordinate of the guide.\n\t///   - active: should this filter be applied\n\t/// - Returns: processed new `CIImage`, or identity if `active` is false\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc keystoneCorrectionCombined(focalLength: Float = 28,\n\t\t\t\t\t\t\t\t\ttopLeft: CGPoint,\n\t\t\t\t\t\t\t\t\ttopRight: CGPoint,\n\t\t\t\t\t\t\t\t\tbottomRight: CGPoint,\n\t\t\t\t\t\t\t\t\tbottomLeft: CGPoint,\n\t\t\t\t\t\t\t\t\tactive: Bool = true) -> CIImage {\n\t\tguard active else { return self }\n\n\t\tlet filter = CIFilter.keystoneCorrectionCombined() // CIKeystoneCorrectionCombined\n\t\tfilter.inputImage = self\n\t\tfilter.focalLength = focalLength\n\t\tfilter.topLeft = topLeft\n\t\tfilter.topRight = topRight\n\t\tfilter.bottomRight = bottomRight\n\t\tfilter.bottomLeft = bottomLeft\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Horizontal Keystone Correction\n\t///\n\t/// Apply horizontal keystone correction to an image with guides.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3325510-keystonecorrectionhorizontal)\n\t///\n\t/// Categories: Geometry Adjustment, Video, Still Image, Built-In, High Dynamic Range\n\t///\n\t///\n\t/// - Parameters:\n\t///   - focalLength: 35mm equivalent focal length of the input image.\n\t///   - topLeft: The top left coordinate of the guide.\n\t///   - topRight: The top right coordinate of the guide.\n\t///   - bottomRight: The bottom right coordinate of the guide.\n\t///   - bottomLeft: The bottom left coordinate of the guide.\n\t///   - active: should this filter be applied\n\t/// - Returns: processed new `CIImage`, or identity if `active` is false\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc keystoneCorrectionHorizontal(focalLength: Float = 28,\n\t\t\t\t\t\t\t\t\t  topLeft: CGPoint,\n\t\t\t\t\t\t\t\t\t  topRight: CGPoint,\n\t\t\t\t\t\t\t\t\t  bottomRight: CGPoint,\n\t\t\t\t\t\t\t\t\t  bottomLeft: CGPoint,\n\t\t\t\t\t\t\t\t\t  active: Bool = true) -> CIImage {\n\t\tguard active else { return self }\n\n\t\tlet filter = CIFilter.keystoneCorrectionHorizontal() // CIKeystoneCorrectionHorizontal\n\t\tfilter.inputImage = self\n\t\tfilter.focalLength = focalLength\n\t\tfilter.topLeft = topLeft\n\t\tfilter.topRight = topRight\n\t\tfilter.bottomRight = bottomRight\n\t\tfilter.bottomLeft = bottomLeft\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Vertical Keystone Correction\n\t///\n\t/// Apply vertical keystone correction to an image with guides.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3325511-keystonecorrectionvertical)\n\t///\n\t/// Categories: Geometry Adjustment, Video, Still Image, Built-In, High Dynamic Range\n\t///\n\t///\n\t/// - Parameters:\n\t///   - focalLength: 35mm equivalent focal length of the input image.\n\t///   - topLeft: The top left coordinate of the guide.\n\t///   - topRight: The top right coordinate of the guide.\n\t///   - bottomRight: The bottom right coordinate of the guide.\n\t///   - bottomLeft: The bottom left coordinate of the guide.\n\t///   - active: should this filter be applied\n\t/// - Returns: processed new `CIImage`, or identity if `active` is false\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc keystoneCorrectionVertical(focalLength: Float = 28,\n\t\t\t\t\t\t\t\t\ttopLeft: CGPoint,\n\t\t\t\t\t\t\t\t\ttopRight: CGPoint,\n\t\t\t\t\t\t\t\t\tbottomRight: CGPoint,\n\t\t\t\t\t\t\t\t\tbottomLeft: CGPoint,\n\t\t\t\t\t\t\t\t\tactive: Bool = true) -> CIImage {\n\t\tguard active else { return self }\n\n\t\tlet filter = CIFilter.keystoneCorrectionVertical() // CIKeystoneCorrectionVertical\n\t\tfilter.inputImage = self\n\t\tfilter.focalLength = focalLength\n\t\tfilter.topLeft = topLeft\n\t\tfilter.topRight = topRight\n\t\tfilter.bottomRight = bottomRight\n\t\tfilter.bottomLeft = bottomLeft\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Lab ∆E\n\t///\n\t/// Produces an image with the Lab ∆E difference values between two images. The result image will contain ∆E 1994 values between 0.0 and 100.0 where 2.0 is considered a just noticeable difference.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228260-labdeltae)\n\t///\n\t/// Categories: Color Effect, Video, Interlaced, Non-Square Pixels, Still Image, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - image2: The second input image for comparison.\n\t///   - active: should this filter be applied\n\t/// - Returns: processed new `CIImage`, or identity if `active` is false\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc labDeltaE(image2: CIImage, active: Bool = true) -> CIImage {\n\t\tguard active else { return self }\n\n\t\tlet filter = CIFilter.labDeltaE() // CILabDeltaE\n\t\tfilter.inputImage = self\n\t\tfilter.image2 = image2\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Lanczos Scale Transform\n\t///\n\t/// Produces a high-quality, scaled version of a source image. You typically use this filter to scale down an image.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228344-lanczosscaletransform)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CILanczosScaleTransform)\n\t///\n\t/// Categories: Geometry Adjustment, Video, Still Image, Built-In, High Dynamic Range\n\t///\n\t///\n\t/// - Parameters:\n\t///   - scale: The scaling factor to use on the image. Values less than 1.0 scale down the images. Values greater than 1.0 scale up the image. (0...)\n\t///   - aspectRatio: The additional horizontal scaling factor to use on the image. (0...)\n\t/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc lanczosScaleTransform(scale: Float = 1, aspectRatio: Float = 1) -> CIImage {\n\t\tguard scale != 1 || aspectRatio != 1 else { return self }\n\n\t\tlet filter = CIFilter.lanczosScaleTransform() // CILanczosScaleTransform\n\t\tfilter.inputImage = self\n\t\tfilter.scale = scale\n\t\tfilter.aspectRatio = aspectRatio\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Light Tunnel Distortion\n\t///\n\t/// Rotates a portion of the input image specified by the center and radius parameters to give a tunneling effect.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/4401868-lighttunnel)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CILightTunnel)\n\t///\n\t/// Categories: Distortion Effect, Video, Still Image, Built-In, High Dynamic Range\n\t///\n\t///\n\t/// - Parameters:\n\t///   - center: The center of the effect as x and y pixel coordinates.\n\t///   - rotation: Rotation angle in radians of the light tunnel.\n\t///   - radius: Center radius of the light tunnel.\n\t/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied\n\t@available(iOS 14, macOS 11.0, *)\n\tfunc lightTunnel(center: CGPoint, rotation: Float = 0, radius: Float) -> CIImage {\n\t\tguard rotation != 0 || radius != 0 else { return self }\n\n\t\tlet filter = CIFilter.lightTunnel() // CILightTunnel\n\t\tfilter.inputImage = self\n\t\tfilter.center = center\n\t\tfilter.rotation = rotation\n\t\tfilter.radius = radius\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Lighten Blend Mode\n\t///\n\t/// Creates composite image samples by choosing the lighter samples (either from the source image or the background). The result is that the background image samples are replaced by any source image samples that are lighter. Otherwise, the background image samples are left unchanged.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228346-lightenblendmode)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CILightenBlendMode)\n\t///\n\t/// Categories: Composite Operation, Video, Still Image, Interlaced, Non-Square Pixels, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - backgroundImage: The image to use as a background image.\n\t/// - Returns: processed new `CIImage`, or identity if `backgroundImage` is nil\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc lightenBlendMode(backgroundImage: CIImage?) -> CIImage {\n\t\tguard let backgroundImage else { return self }\n\n\t\tlet filter = CIFilter.lightenBlendMode() // CILightenBlendMode\n\t\tfilter.inputImage = self\n\t\tfilter.backgroundImage = backgroundImage\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Line Overlay\n\t///\n\t/// Creates a sketch that outlines the edges of an image in black, leaving the non-outlined portions of the image transparent. The result has alpha and is rendered in black, so it won’t look like much until you render it over another image using source over compositing.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228347-lineoverlay)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CILineOverlay)\n\t///\n\t/// Categories: Built-In, Still Image, Video, Stylize\n\t///\n\t///\n\t/// - Parameters:\n\t///   - nrNoiseLevel: The noise level of the image (used with camera data) that gets removed before tracing the edges of the image. Increasing the noise level helps to clean up the traced edges of the image. (0...)\n\t///   - nrSharpness: The amount of sharpening done when removing noise in the image before tracing the edges of the image. This improves the edge acquisition. (0...)\n\t///   - edgeIntensity: The accentuation factor of the Sobel gradient information when tracing the edges of the image. Higher values find more edges, although typically a low value (such as 1.0) is used. (0...)\n\t///   - threshold: This value determines edge visibility. Larger values thin out the edges. (0...)\n\t///   - contrast: The amount of anti-aliasing to use on the edges produced by this filter. Higher values produce higher contrast edges (they are less anti-aliased). (0.25...)\n\t/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc lineOverlay(nrNoiseLevel: Float = 0.07,\n\t\t\t\t\t nrSharpness: Float = 0.71,\n\t\t\t\t\t edgeIntensity: Float = 1,\n\t\t\t\t\t threshold: Float = 0.1,\n\t\t\t\t\t contrast: Float = 50) -> CIImage {\n\t\tguard nrNoiseLevel != 0 || nrSharpness != 0 || edgeIntensity != 0 || threshold != 0 || contrast != 1 else { return self }\n\n\t\tlet filter = CIFilter.lineOverlay() // CILineOverlay\n\t\tfilter.inputImage = self\n\t\tfilter.nrNoiseLevel = nrNoiseLevel\n\t\tfilter.nrSharpness = nrSharpness\n\t\tfilter.edgeIntensity = edgeIntensity\n\t\tfilter.threshold = threshold\n\t\tfilter.contrast = contrast\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Line Screen\n\t///\n\t/// Simulates the line pattern of a halftone screen.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228348-linescreen)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CILineScreen)\n\t///\n\t/// Categories: Halftone Effect, Video, Still Image, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - center: The center of the effect as x and y pixel coordinates.\n\t///   - angle: The angle in radians of the pattern.\n\t///   - width: The distance between lines in the pattern. (1...)\n\t///   - sharpness: The sharpness of the pattern. The larger the value, the sharper the pattern. (0...1)\n\t/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc lineScreen(center: CGPoint, angle: Float = 0, width: Float, sharpness: Float = 0.7) -> CIImage {\n\t\tguard angle != 0 else { return self }\n\n\t\tlet filter = CIFilter.lineScreen() // CILineScreen\n\t\tfilter.inputImage = self\n\t\tfilter.center = center\n\t\tfilter.angle = angle\n\t\tfilter.width = width\n\t\tfilter.sharpness = sharpness\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Linear Burn Blend Mode\n\t///\n\t/// Inverts the unpremultiplied source and background image sample color, inverts the sum, and then blends the result with the background according to the PDF basic compositing formula. Source image values that are white produce no change. Source image values that are black invert the background color values.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228349-linearburnblendmode)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CILinearBurnBlendMode)\n\t///\n\t/// Categories: Composite Operation, Video, Still Image, Interlaced, Non-Square Pixels, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - backgroundImage: The image to use as a background image.\n\t/// - Returns: processed new `CIImage`, or identity if `backgroundImage` is nil\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc linearBurnBlendMode(backgroundImage: CIImage?) -> CIImage {\n\t\tguard let backgroundImage else { return self }\n\n\t\tlet filter = CIFilter.linearBurnBlendMode() // CILinearBurnBlendMode\n\t\tfilter.inputImage = self\n\t\tfilter.backgroundImage = backgroundImage\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Linear Dodge Blend Mode\n\t///\n\t/// Unpremultiplies the source and background image sample colors, adds them, and then blends the result with the background according to the PDF basic compositing formula. Source image values that are black produces output that is the same as the background. Source image values that are non-black brighten the background color values.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228350-lineardodgeblendmode)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CILinearDodgeBlendMode)\n\t///\n\t/// Categories: Composite Operation, Video, Still Image, Interlaced, Non-Square Pixels, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - backgroundImage: The image to use as a background image.\n\t/// - Returns: processed new `CIImage`, or identity if `backgroundImage` is nil\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc linearDodgeBlendMode(backgroundImage: CIImage?) -> CIImage {\n\t\tguard let backgroundImage else { return self }\n\n\t\tlet filter = CIFilter.linearDodgeBlendMode() // CILinearDodgeBlendMode\n\t\tfilter.inputImage = self\n\t\tfilter.backgroundImage = backgroundImage\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Linear Light Blend Mode\n\t///\n\t/// A blend mode that is a combination of linear burn and linear dodge blend modes.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/4401869-linearlightblendmode)\n\t///\n\t/// Categories: Composite Operation, Video, Still Image, Interlaced, Non-Square Pixels, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - backgroundImage: The image to use as a background image.\n\t/// - Returns: processed new `CIImage`, or identity if `backgroundImage` is nil\n\t@available(iOS 15, macOS 12.0, *)\n\tfunc linearLightBlendMode(backgroundImage: CIImage?) -> CIImage {\n\t\tguard let backgroundImage else { return self }\n\n\t\tlet filter = CIFilter.linearLightBlendMode() // CILinearLightBlendMode\n\t\tfilter.inputImage = self\n\t\tfilter.backgroundImage = backgroundImage\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Linear to sRGB Tone Curve\n\t///\n\t/// Maps color intensity from a linear gamma curve to the sRGB color space.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228352-lineartosrgbtonecurve)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CILinearToSRGBToneCurve)\n\t///\n\t/// Categories: Color Adjustment, Video, Still Image, Interlaced, Non-Square Pixels, High Dynamic Range, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - active: should this filter be applied\n\t/// - Returns: processed new `CIImage`, or identity if `active` is false\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc linearToSRGBToneCurve(active: Bool = true) -> CIImage {\n\t\tguard active else { return self }\n\n\t\tlet filter = CIFilter.linearToSRGBToneCurve() // CILinearToSRGBToneCurve\n\t\tfilter.inputImage = self\n\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Luminosity Blend Mode\n\t///\n\t/// Uses the hue and saturation of the background with the luminance of the source image. This mode creates an effect that is inverse to the effect created by the “Color Blend Mode” filter.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228353-luminosityblendmode)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CILuminosityBlendMode)\n\t///\n\t/// Categories: Composite Operation, Video, Still Image, Interlaced, Non-Square Pixels, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - backgroundImage: The image to use as a background image.\n\t/// - Returns: processed new `CIImage`, or identity if `backgroundImage` is nil\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc luminosityBlendMode(backgroundImage: CIImage?) -> CIImage {\n\t\tguard let backgroundImage else { return self }\n\n\t\tlet filter = CIFilter.luminosityBlendMode() // CILuminosityBlendMode\n\t\tfilter.inputImage = self\n\t\tfilter.backgroundImage = backgroundImage\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Mask to Alpha\n\t///\n\t/// Converts a grayscale image to a white image that is masked by alpha. The white values from the source image produce the inside of the mask; the black values become completely transparent.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228354-masktoalpha)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIMaskToAlpha)\n\t///\n\t/// Categories: Color Effect, Video, Interlaced, Non-Square Pixels, Still Image, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - active: should this filter be applied\n\t/// - Returns: processed new `CIImage`, or identity if `active` is false\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc maskToAlpha(active: Bool = true) -> CIImage {\n\t\tguard active else { return self }\n\n\t\tlet filter = CIFilter.maskToAlpha() // CIMaskToAlpha\n\t\tfilter.inputImage = self\n\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Masked Variable Blur\n\t///\n\t/// Blurs the source image according to the brightness levels in a mask image.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228355-maskedvariableblur)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIMaskedVariableBlur)\n\t///\n\t/// Categories: Blur, Still Image, Video, Built-In, High Dynamic Range\n\t///\n\t///\n\t/// - Parameters:\n\t///   - mask: The mask image that determines how much to blur the image. The mask’s green channel value from 0.0 to 1.0 determines if the image is not blurred or blurred by the full radius.\n\t///   - radius: A value that governs the maximum blur radius to apply. (0...)\n\t/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc maskedVariableBlur(mask: CIImage, radius: Float = 5) -> CIImage {\n\t\tguard radius != 0 else { return self }\n\n\t\tlet filter = CIFilter.maskedVariableBlur() // CIMaskedVariableBlur\n\t\tfilter.inputImage = self\n\t\tfilter.mask = mask\n\t\tfilter.radius = radius\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Maximum Component\n\t///\n\t/// Converts an image to grayscale using the maximum of the three color components.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228356-maximumcomponent)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIMaximumComponent)\n\t///\n\t/// Categories: Color Effect, Video, Interlaced, Non-Square Pixels, Still Image, High Dynamic Range, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - active: should this filter be applied\n\t/// - Returns: a grayscale image from max(r,g,b), or identity if `active` is false\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc maximumComponent(active: Bool = true) -> CIImage {\n\t\tguard active else { return self }\n\n\t\tlet filter = CIFilter.maximumComponent() // CIMaximumComponent\n\t\tfilter.inputImage = self\n\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Maximum\n\t///\n\t/// Computes the maximum value, by color component, of two input images and creates an output image using the maximum values. This is similar to dodging.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228357-maximumcompositing)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIMaximumCompositing)\n\t///\n\t/// Categories: Composite Operation, Video, Still Image, Interlaced, Non-Square Pixels, High Dynamic Range, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - backgroundImage: The image to use as a background image.\n\t/// - Returns: processed new `CIImage`, or identity if `backgroundImage` is nil\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc maximumCompositing(backgroundImage: CIImage?) -> CIImage {\n\t\tguard let backgroundImage else { return self }\n\n\t\tlet filter = CIFilter.maximumCompositing() // CIMaximumCompositing\n\t\tfilter.inputImage = self\n\t\tfilter.backgroundImage = backgroundImage\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Maximum Scale Transform\n\t///\n\t/// Produces a scaled version of a source image that uses the maximum of neighboring pixels instead of linear averaging.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/4401870-maximumscaletransform)\n\t///\n\t/// Categories: Geometry Adjustment, Video, Still Image, Built-In, High Dynamic Range\n\t///\n\t///\n\t/// - Parameters:\n\t///   - scale: The scaling factor to use on the image. Values less than 1.0 scale down the images. Values greater than 1.0 scale up the image. (0...)\n\t///   - aspectRatio: The additional horizontal scaling factor to use on the image. (0...)\n\t/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied\n\t@available(iOS 18, macOS 15.0, *)\n\tfunc maximumScaleTransform(scale: Float = 1, aspectRatio: Float = 1) -> CIImage {\n\t\tguard scale != 1 || aspectRatio != 1 else { return self }\n\n\t\tlet filter = CIFilter.maximumScaleTransform() // CIMaximumScaleTransform\n\t\tfilter.inputImage = self\n\t\tfilter.scale = scale\n\t\tfilter.aspectRatio = aspectRatio\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Median\n\t///\n\t/// Computes the median value for a group of neighboring pixels and replaces each pixel value with the median. The effect is to reduce noise.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228358-median)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIMedianFilter)\n\t///\n\t/// Categories: Blur, Still Image, Video, Built-In, High Dynamic Range\n\t///\n\t///\n\t/// - Parameters:\n\t///   - active: should this filter be applied\n\t/// - Returns: processed new `CIImage`, or identity if `active` is false\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc median(active: Bool = true) -> CIImage {\n\t\tguard active else { return self }\n\n\t\tlet filter = CIFilter.median() // CIMedianFilter\n\t\tfilter.inputImage = self\n\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Minimum Component\n\t///\n\t/// Converts an image to grayscale using the minimum of the three color components.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228360-minimumcomponent)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIMinimumComponent)\n\t///\n\t/// Categories: Color Effect, Video, Interlaced, Non-Square Pixels, Still Image, High Dynamic Range, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - active: should this filter be applied\n\t/// - Returns: a grayscale image from min(r,g,b), or identity if `active` is false\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc minimumComponent(active: Bool = true) -> CIImage {\n\t\tguard active else { return self }\n\n\t\tlet filter = CIFilter.minimumComponent() // CIMinimumComponent\n\t\tfilter.inputImage = self\n\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Minimum\n\t///\n\t/// Computes the minimum value, by color component, of two input images and creates an output image using the minimum values. This is similar to burning.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228361-minimumcompositing)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIMinimumCompositing)\n\t///\n\t/// Categories: Composite Operation, Video, Still Image, Interlaced, Non-Square Pixels, High Dynamic Range, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - backgroundImage: The image to use as a background image.\n\t/// - Returns: processed new `CIImage`, or identity if `backgroundImage` is nil\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc minimumCompositing(backgroundImage: CIImage?) -> CIImage {\n\t\tguard let backgroundImage else { return self }\n\n\t\tlet filter = CIFilter.minimumCompositing() // CIMinimumCompositing\n\t\tfilter.inputImage = self\n\t\tfilter.backgroundImage = backgroundImage\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Mix\n\t///\n\t/// Uses an amount parameter to interpolate between an image and a background image. When value is 0.0 or less, the result is the background image. When the value is 1.0 or more, the result is the image.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228362-mix)\n\t///\n\t/// Categories: Stylize, Video, Still Image, High Dynamic Range, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - backgroundImage: The image to use as a background image.\n\t///   - amount: The amount of the effect.\n\t/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc mix(backgroundImage: CIImage?, amount: Float = 1) -> CIImage {\n\t\tguard amount != 1 else { return self }\n\n\t\tlet filter = CIFilter.mix() // CIMix\n\t\tfilter.inputImage = self\n\t\tfilter.backgroundImage = backgroundImage\n\t\tfilter.amount = amount\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Mod\n\t///\n\t/// Transitions from one image to another by revealing the target image through irregularly shaped holes.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228363-modtransition)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIModTransition)\n\t///\n\t/// Categories: Transition, Video, Still Image, Built-In, High Dynamic Range\n\t///\n\t///\n\t/// - Parameters:\n\t///   - targetImage: The target image for a transition.\n\t///   - center: The center of the effect as x and y pixel coordinates.\n\t///   - time: The parametric time of the transition. This value drives the transition from start (at time 0) to end (at time 1). (0...1)\n\t///   - angle: The angle in radians of the mod hole pattern.\n\t///   - radius: The radius of the undistorted holes in the pattern. (1...)\n\t///   - compression: The amount of stretching applied to the mod hole pattern. Holes in the center are not distorted as much as those at the edge of the image. (1...)\n\t/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc modTransition(targetImage: CIImage,\n\t\t\t\t\t   center: CGPoint,\n\t\t\t\t\t   time: Float,\n\t\t\t\t\t   angle: Float = 2,\n\t\t\t\t\t   radius: Float,\n\t\t\t\t\t   compression: Float) -> CIImage {\n\t\tguard angle != 0 else { return self }\n\n\t\tlet filter = CIFilter.modTransition() // CIModTransition\n\t\tfilter.inputImage = self\n\t\tfilter.targetImage = targetImage\n\t\tfilter.center = center\n\t\tfilter.time = time\n\t\tfilter.angle = angle\n\t\tfilter.radius = radius\n\t\tfilter.compression = compression\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Morphology Gradient\n\t///\n\t/// Finds the edges of an image by returning the difference between the morphological minimum and maximum operations to the image.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228364-morphologygradient)\n\t///\n\t/// Categories: Blur, Video, Still Image, Built-In, High Dynamic Range\n\t///\n\t///\n\t/// - Parameters:\n\t///   - radius: The desired radius of the circular morphological operation to the image. (0...)\n\t/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc morphologyGradient(radius: Float) -> CIImage {\n\t\tguard radius != 0 else { return self }\n\n\t\tlet filter = CIFilter.morphologyGradient() // CIMorphologyGradient\n\t\tfilter.inputImage = self\n\t\tfilter.radius = radius\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Morphology Maximum\n\t///\n\t/// Lightens areas of an image by applying a circular morphological maximum operation to the image.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228365-morphologymaximum)\n\t///\n\t/// Categories: Blur, Video, Still Image, Built-In, High Dynamic Range\n\t///\n\t///\n\t/// - Parameters:\n\t///   - radius: The desired radius of the circular morphological operation to the image.\n\t/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc morphologyMaximum(radius: Float = 0) -> CIImage {\n\t\tguard radius != 0 else { return self }\n\n\t\tlet filter = CIFilter.morphologyMaximum() // CIMorphologyMaximum\n\t\tfilter.inputImage = self\n\t\tfilter.radius = radius\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Morphology Minimum\n\t///\n\t/// Darkens areas of an image by applying a circular morphological maximum operation to the image.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228366-morphologyminimum)\n\t///\n\t/// Categories: Blur, Video, Still Image, Built-In, High Dynamic Range\n\t///\n\t///\n\t/// - Parameters:\n\t///   - radius: The desired radius of the circular morphological operation to the image.\n\t/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc morphologyMinimum(radius: Float = 0) -> CIImage {\n\t\tguard radius != 0 else { return self }\n\n\t\tlet filter = CIFilter.morphologyMinimum() // CIMorphologyMinimum\n\t\tfilter.inputImage = self\n\t\tfilter.radius = radius\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Morphology Rectangle Maximum\n\t///\n\t/// Lightens areas of an image by applying a rectangular morphological maximum operation to the image.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228367-morphologyrectanglemaximum)\n\t///\n\t/// Categories: Blur, Video, Still Image, Built-In, High Dynamic Range\n\t///\n\t///\n\t/// - Parameters:\n\t///   - width: The width in pixels of the morphological operation. (1...)\n\t///   - height: The height in pixels of the morphological operation. (1...)\n\t///   - active: should this filter be applied\n\t/// - Returns: processed new `CIImage`, or identity if `active` is false\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc morphologyRectangleMaximum(width: Int, height: Int, active: Bool = true) -> CIImage {\n\t\tguard active else { return self }\n\n\t\tlet filter = CIFilter.morphologyRectangleMaximum() // CIMorphologyRectangleMaximum\n\t\tfilter.inputImage = self\n\t\tfilter.width = Float(width)\n\t\tfilter.height = Float(height)\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Morphology Rectangle Minimum\n\t///\n\t/// Darkens areas of an image by applying a rectangular morphological maximum operation to the image.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228368-morphologyrectangleminimum)\n\t///\n\t/// Categories: Blur, Video, Still Image, Built-In, High Dynamic Range\n\t///\n\t///\n\t/// - Parameters:\n\t///   - width: The width in pixels of the morphological operation. (1...)\n\t///   - height: The height in pixels of the morphological operation. (1...)\n\t///   - active: should this filter be applied\n\t/// - Returns: processed new `CIImage`, or identity if `active` is false\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc morphologyRectangleMinimum(width: Int, height: Int, active: Bool = true) -> CIImage {\n\t\tguard active else { return self }\n\n\t\tlet filter = CIFilter.morphologyRectangleMinimum() // CIMorphologyRectangleMinimum\n\t\tfilter.inputImage = self\n\t\tfilter.width = Float(width)\n\t\tfilter.height = Float(height)\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Motion Blur\n\t///\n\t/// Blurs an image to simulate the effect of using a camera that moves a specified angle and distance while capturing the image.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228369-motionblur)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIMotionBlur)\n\t///\n\t/// Categories: Blur, Still Image, Video, Built-In, High Dynamic Range\n\t///\n\t///\n\t/// - Parameters:\n\t///   - radius: The radius determines how many pixels are used to create the blur. The larger the radius, the blurrier the result. (0...)\n\t///   - angle: The angle in radians of the motion determines which direction the blur smears.\n\t/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc motionBlur(radius: Float, angle: Float = 0) -> CIImage {\n\t\tguard radius != 0 || angle != 0 else { return self }\n\n\t\tlet filter = CIFilter.motionBlur() // CIMotionBlur\n\t\tfilter.inputImage = self\n\t\tfilter.radius = radius\n\t\tfilter.angle = angle\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Multiply Blend Mode\n\t///\n\t/// Multiplies the source image samples with the background image samples. This results in colors that are at least as dark as either of the two contributing sample colors.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228370-multiplyblendmode)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIMultiplyBlendMode)\n\t///\n\t/// Categories: Composite Operation, Video, Still Image, Interlaced, Non-Square Pixels, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - backgroundImage: The image to use as a background image.\n\t/// - Returns: processed new `CIImage`, or identity if `backgroundImage` is nil\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc multiplyBlendMode(backgroundImage: CIImage?) -> CIImage {\n\t\tguard let backgroundImage else { return self }\n\n\t\tlet filter = CIFilter.multiplyBlendMode() // CIMultiplyBlendMode\n\t\tfilter.inputImage = self\n\t\tfilter.backgroundImage = backgroundImage\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Multiply\n\t///\n\t/// Multiplies the color component of two input images and creates an output image using the multiplied values. This filter is typically used to add a spotlight or similar lighting effect to an image.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228371-multiplycompositing)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIMultiplyCompositing)\n\t///\n\t/// Categories: Composite Operation, Video, Still Image, Interlaced, Non-Square Pixels, High Dynamic Range, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - backgroundImage: The image to use as a background image.\n\t/// - Returns: processed new `CIImage`, or identity if `backgroundImage` is nil\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc multiplyCompositing(backgroundImage: CIImage?) -> CIImage {\n\t\tguard let backgroundImage else { return self }\n\n\t\tlet filter = CIFilter.multiplyCompositing() // CIMultiplyCompositing\n\t\tfilter.inputImage = self\n\t\tfilter.backgroundImage = backgroundImage\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Nine Part Stretched\n\t///\n\t/// Distorts an image by stretching an image based on two input breakpoints.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/4401871-ninepartstretched)\n\t///\n\t/// Categories: Distortion Effect, Video, Still Image, Built-In, High Dynamic Range\n\t///\n\t///\n\t/// - Parameters:\n\t///   - breakpoint0: Lower left corner of image to retain before stretching begins.\n\t///   - breakpoint1: Upper right corner of image to retain after stretching ends.\n\t///   - growAmount: Vector indicating how much image should grow in pixels in both dimensions.\n\t///   - active: should this filter be applied\n\t/// - Returns: processed new `CIImage`, or identity if `active` is false\n\t@available(iOS 14, macOS 11.0, *)\n\tfunc ninePartStretched(breakpoint0: CGPoint,\n\t\t\t\t\t\t   breakpoint1: CGPoint,\n\t\t\t\t\t\t   growAmount: CGPoint = .init(x: 100, y: 100),\n\t\t\t\t\t\t   active: Bool = true) -> CIImage {\n\t\tguard active else { return self }\n\n\t\tlet filter = CIFilter.ninePartStretched() // CINinePartStretched\n\t\tfilter.inputImage = self\n\t\tfilter.breakpoint0 = breakpoint0\n\t\tfilter.breakpoint1 = breakpoint1\n\t\tfilter.growAmount = growAmount\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Nine Part Tiled\n\t///\n\t/// Distorts an image by tiling an image based on two input breakpoints.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/4401872-nineparttiled)\n\t///\n\t/// Categories: Distortion Effect, Video, Still Image, Built-In, High Dynamic Range\n\t///\n\t///\n\t/// - Parameters:\n\t///   - breakpoint0: Lower left corner of image to retain before tiling begins.\n\t///   - breakpoint1: Upper right corner of image to retain after tiling ends.\n\t///   - growAmount: Vector indicating how much image should grow in pixels in both dimensions.\n\t///   - flipYTiles: Indicates that Y-Axis flip should occur.\n\t///   - active: should this filter be applied\n\t/// - Returns: processed new `CIImage`, or identity if `active` is false\n\t@available(iOS 14, macOS 11.0, *)\n\tfunc ninePartTiled(breakpoint0: CGPoint,\n\t\t\t\t\t   breakpoint1: CGPoint,\n\t\t\t\t\t   growAmount: CGPoint = .init(x: 100, y: 100),\n\t\t\t\t\t   flipYTiles: Bool = true,\n\t\t\t\t\t   active: Bool = true) -> CIImage {\n\t\tguard active else { return self }\n\n\t\tlet filter = CIFilter.ninePartTiled() // CINinePartTiled\n\t\tfilter.inputImage = self\n\t\tfilter.breakpoint0 = breakpoint0\n\t\tfilter.breakpoint1 = breakpoint1\n\t\tfilter.growAmount = growAmount\n\t\tfilter.flipYTiles = flipYTiles\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Noise Reduction\n\t///\n\t/// Reduces noise using a threshold value to define what is considered noise. Small changes in luminance below that value are considered noise and get a noise reduction treatment, which is a local blur. Changes above the threshold value are considered edges, so they are sharpened.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228372-noisereduction)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CINoiseReduction)\n\t///\n\t/// Categories: Blur, Video, Still Image, Built-In, High Dynamic Range\n\t///\n\t///\n\t/// - Parameters:\n\t///   - noiseLevel: The amount of noise reduction. The larger the value, the more noise reduction. (0...)\n\t///   - sharpness: The sharpness of the final image. The larger the value, the sharper the result. (0...)\n\t/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc noiseReduction(noiseLevel: Float = 0.02, sharpness: Float = 0.4) -> CIImage {\n\t\tguard noiseLevel != 0 || sharpness != 0 else { return self }\n\n\t\tlet filter = CIFilter.noiseReduction() // CINoiseReduction\n\t\tfilter.inputImage = self\n\t\tfilter.noiseLevel = noiseLevel\n\t\tfilter.sharpness = sharpness\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Op Tile\n\t///\n\t/// Segments an image, applying any specified scaling and rotation, and then assembles the image again to give an op art appearance.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228373-optile)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIOpTile)\n\t///\n\t/// Categories: Tile Effect, Video, Still Image, Built-In, High Dynamic Range\n\t///\n\t///\n\t/// - Parameters:\n\t///   - center: The center of the effect as x and y pixel coordinates.\n\t///   - scale: The scale determines the number of tiles in the effect. (0...)\n\t///   - angle: The angle in radians of a tile.\n\t///   - width: The width of a tile. (0...)\n\t/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc opTile(center: CGPoint, scale: Float = 2.8, angle: Float = 0, width: Float) -> CIImage {\n\t\tguard scale != 1 || angle != 0 else { return self }\n\n\t\tlet filter = CIFilter.opTile() // CIOpTile\n\t\tfilter.inputImage = self\n\t\tfilter.center = center\n\t\tfilter.scale = scale\n\t\tfilter.angle = angle\n\t\tfilter.width = width\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Overlay Blend Mode\n\t///\n\t/// Either multiplies or screens the source image samples with the background image samples, depending on the background color. The result is to overlay the existing image samples while preserving the highlights and shadows of the background. The background color mixes with the source image to reflect the lightness or darkness of the background.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228374-overlayblendmode)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIOverlayBlendMode)\n\t///\n\t/// Categories: Composite Operation, Video, Still Image, Interlaced, Non-Square Pixels, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - backgroundImage: The image to use as a background image.\n\t/// - Returns: processed new `CIImage`, or identity if `backgroundImage` is nil\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc overlayBlendMode(backgroundImage: CIImage?) -> CIImage {\n\t\tguard let backgroundImage else { return self }\n\n\t\tlet filter = CIFilter.overlayBlendMode() // CIOverlayBlendMode\n\t\tfilter.inputImage = self\n\t\tfilter.backgroundImage = backgroundImage\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Page Curl\n\t///\n\t/// Transitions from one image to another by simulating a curling page, revealing the new image as the page curls.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228375-pagecurltransition)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIPageCurlTransition)\n\t///\n\t/// Categories: Transition, Video, Still Image, Built-In, High Dynamic Range\n\t///\n\t///\n\t/// - Parameters:\n\t///   - targetImage: The target image for a transition.\n\t///   - backsideImage: The image that appears on the back of the source image, as the page curls to reveal the target image.\n\t///   - shadingImage: An image that looks like a shaded sphere enclosed in a square image.\n\t///   - extent: The extent of the effect.\n\t///   - time: The parametric time of the transition. This value drives the transition from start (at time 0) to end (at time 1). (0...1)\n\t///   - angle: The angle in radians of the curling page.\n\t///   - radius: The radius of the curl. (0.01...)\n\t/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc pageCurlTransition(targetImage: CIImage,\n\t\t\t\t\t\t\tbacksideImage: CIImage,\n\t\t\t\t\t\t\tshadingImage: CIImage,\n\t\t\t\t\t\t\textent: CGRect,\n\t\t\t\t\t\t\ttime: Float,\n\t\t\t\t\t\t\tangle: Float = 0,\n\t\t\t\t\t\t\tradius: Float) -> CIImage {\n\t\tguard angle != 0 else { return self }\n\n\t\tlet filter = CIFilter.pageCurlTransition() // CIPageCurlTransition\n\t\tfilter.inputImage = self\n\t\tfilter.targetImage = targetImage\n\t\tfilter.backsideImage = backsideImage\n\t\tfilter.shadingImage = shadingImage\n\t\tfilter.extent = extent\n\t\tfilter.time = time\n\t\tfilter.angle = angle\n\t\tfilter.radius = radius\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Page Curl With Shadow\n\t///\n\t/// Transitions from one image to another by simulating a curling page, revealing the new image as the page curls.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228376-pagecurlwithshadowtransition)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIPageCurlWithShadowTransition)\n\t///\n\t/// Categories: Transition, Video, Still Image, Built-In, High Dynamic Range\n\t///\n\t///\n\t/// - Parameters:\n\t///   - targetImage: The target image for a transition.\n\t///   - backsideImage: The image that appears on the back of the source image, as the page curls to reveal the target image.\n\t///   - extent: The extent of the effect.\n\t///   - time: The parametric time of the transition. This value drives the transition from start (at time 0) to end (at time 1). (0...1)\n\t///   - angle: The angle in radians of the curling page.\n\t///   - radius: The radius of the curl. (0.01...)\n\t///   - shadowSize: The maximum size in pixels of the shadow. (0...1)\n\t///   - shadowAmount: The strength of the shadow. (0...1)\n\t///   - shadowExtent: The rectagular portion of input image that will cast a shadow.\n\t/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc pageCurlWithShadowTransition(targetImage: CIImage,\n\t\t\t\t\t\t\t\t\t  backsideImage: CIImage,\n\t\t\t\t\t\t\t\t\t  extent: CGRect = .zero,\n\t\t\t\t\t\t\t\t\t  time: Float,\n\t\t\t\t\t\t\t\t\t  angle: Float = 0,\n\t\t\t\t\t\t\t\t\t  radius: Float,\n\t\t\t\t\t\t\t\t\t  shadowSize: Float,\n\t\t\t\t\t\t\t\t\t  shadowAmount: Float,\n\t\t\t\t\t\t\t\t\t  shadowExtent: CGRect = .zero) -> CIImage {\n\t\tguard angle != 0 else { return self }\n\n\t\tlet filter = CIFilter.pageCurlWithShadowTransition() // CIPageCurlWithShadowTransition\n\t\tfilter.inputImage = self\n\t\tfilter.targetImage = targetImage\n\t\tfilter.backsideImage = backsideImage\n\t\tfilter.extent = extent\n\t\tfilter.time = time\n\t\tfilter.angle = angle\n\t\tfilter.radius = radius\n\t\tfilter.shadowSize = shadowSize\n\t\tfilter.shadowAmount = shadowAmount\n\t\tfilter.shadowExtent = shadowExtent\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Palette Centroid\n\t///\n\t/// Calculate the mean (x,y) image coordinates of a color palette.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228377-palettecentroid)\n\t///\n\t/// Categories: Color Effect, Video, Still Image, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - paletteImage: The input color palette, obtained using “CIKMeans“ filter.\n\t///   - perceptual: Specifies whether the color palette should be applied in a perceptual color space.\n\t///   - active: should this filter be applied\n\t/// - Returns: processed new `CIImage`, or identity if `active` is false\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc paletteCentroid(paletteImage: CIImage, perceptual: Bool = false, active: Bool = true) -> CIImage {\n\t\tguard active else { return self }\n\n\t\tlet filter = CIFilter.paletteCentroid() // CIPaletteCentroid\n\t\tfilter.inputImage = self\n\t\tfilter.paletteImage = paletteImage\n\t\tfilter.perceptual = perceptual\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Palettize\n\t///\n\t/// Paint an image from a color palette obtained using “CIKMeans“.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228378-palettize)\n\t///\n\t/// Categories: Color Effect, Video, Still Image, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - paletteImage: The input color palette, obtained using “CIKMeans“ filter.\n\t///   - perceptual: Specifies whether the color palette should be applied in a perceptual color space.\n\t///   - active: should this filter be applied\n\t/// - Returns: processed new `CIImage`, or identity if `active` is false\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc palettize(paletteImage: CIImage, perceptual: Bool = false, active: Bool = true) -> CIImage {\n\t\tguard active else { return self }\n\n\t\tlet filter = CIFilter.palettize() // CIPalettize\n\t\tfilter.inputImage = self\n\t\tfilter.paletteImage = paletteImage\n\t\tfilter.perceptual = perceptual\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Parallelogram Tile\n\t///\n\t/// Warps an image by reflecting it in a parallelogram, and then tiles the result.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228379-parallelogramtile)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIParallelogramTile)\n\t///\n\t/// Categories: Tile Effect, Video, Still Image, Built-In, High Dynamic Range\n\t///\n\t///\n\t/// - Parameters:\n\t///   - center: The center of the effect as x and y pixel coordinates.\n\t///   - angle: The angle in radians of the tiled pattern.\n\t///   - acuteAngle: The primary angle for the repeating parallelogram tile. Small values create thin diamond tiles, and higher values create fatter parallelogram tiles.\n\t///   - width: The width of a tile. (0...)\n\t/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc parallelogramTile(center: CGPoint, angle: Float = 0, acuteAngle: Float = .pi/2, width: Float) -> CIImage {\n\t\tguard angle != 0 || acuteAngle != .pi/2 else { return self }\n\n\t\tlet filter = CIFilter.parallelogramTile() // CIParallelogramTile\n\t\tfilter.inputImage = self\n\t\tfilter.center = center\n\t\tfilter.angle = angle\n\t\tfilter.acuteAngle = acuteAngle\n\t\tfilter.width = width\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Person Segmentation\n\t///\n\t/// Returns a segmentation mask that is red in the portions of an image that are likely to be persons. The returned image may have a different size and aspect ratio from the input image.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/4401873-personsegmentation)\n\t///\n\t/// Categories: Video, Still Image, Built-In, Stylize\n\t///\n\t///\n\t/// - Parameters:\n\t///   - qualityLevel: Determines the size and quality of the resulting segmentation mask. The value can be a number where 0 is accurate, 1 is balanced, and 2 is fast. (0...2)\n\t///   - active: should this filter be applied\n\t/// - Returns: processed new `CIImage`, or identity if `active` is false\n\t@available(iOS 15, macOS 12.0, *)\n\tfunc personSegmentation(qualityLevel: Int, active: Bool = true) -> CIImage {\n\t\tguard active else { return self }\n\n\t\tlet filter = CIFilter.personSegmentation() // CIPersonSegmentation\n\t\tfilter.inputImage = self\n\t\tfilter.qualityLevel = qualityLevel\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Perspective Correction\n\t///\n\t/// Applies a perspective correction, transforming an arbitrary quadrilateral region in the source image to a rectangular output image.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228380-perspectivecorrection)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIPerspectiveCorrection)\n\t///\n\t/// Categories: Geometry Adjustment, Still Image, Video, Built-In, High Dynamic Range\n\t///\n\t///\n\t/// - Parameters:\n\t///   - topLeft: The top left coordinate to be perspective corrected.\n\t///   - topRight: The top right coordinate to be perspective corrected.\n\t///   - bottomRight: The bottom right coordinate to be perspective corrected.\n\t///   - bottomLeft: The bottom left coordinate to be perspective corrected.\n\t///   - crop: A rectangle that specifies the extent of the corrected image\n\t///   - active: should this filter be applied\n\t/// - Returns: processed new `CIImage`, or identity if `active` is false\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc perspectiveCorrection(topLeft: CGPoint,\n\t\t\t\t\t\t\t   topRight: CGPoint,\n\t\t\t\t\t\t\t   bottomRight: CGPoint,\n\t\t\t\t\t\t\t   bottomLeft: CGPoint,\n\t\t\t\t\t\t\t   crop: Bool = true,\n\t\t\t\t\t\t\t   active: Bool = true) -> CIImage {\n\t\tguard active else { return self }\n\n\t\tlet filter = CIFilter.perspectiveCorrection() // CIPerspectiveCorrection\n\t\tfilter.inputImage = self\n\t\tfilter.topLeft = topLeft\n\t\tfilter.topRight = topRight\n\t\tfilter.bottomRight = bottomRight\n\t\tfilter.bottomLeft = bottomLeft\n\t\tfilter.crop = crop\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Perspective Rotate\n\t///\n\t/// Apply a homogenous rotation transform to an image.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3325512-perspectiverotate)\n\t///\n\t/// Categories: Geometry Adjustment, Video, Still Image, Built-In, High Dynamic Range\n\t///\n\t///\n\t/// - Parameters:\n\t///   - focalLength: 35mm equivalent focal length of the input image.\n\t///   - pitch: Pitch angle in radians.\n\t///   - yaw: Yaw angle in radians.\n\t///   - roll: Roll angle in radians.\n\t/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc perspectiveRotate(focalLength: Float = 28, pitch: Float = 0, yaw: Float = 0, roll: Float = 0) -> CIImage {\n\t\tguard pitch != 0 || yaw != 0 || roll != 0 else { return self }\n\n\t\tlet filter = CIFilter.perspectiveRotate() // CIPerspectiveRotate\n\t\tfilter.inputImage = self\n\t\tfilter.focalLength = focalLength\n\t\tfilter.pitch = pitch\n\t\tfilter.yaw = yaw\n\t\tfilter.roll = roll\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Perspective Tile\n\t///\n\t/// Applies a perspective transform to an image and then tiles the result.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228381-perspectivetile)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIPerspectiveTile)\n\t///\n\t/// Categories: Tile Effect, Video, Still Image, Built-In, High Dynamic Range\n\t///\n\t///\n\t/// - Parameters:\n\t///   - topLeft: The top left coordinate of a tile.\n\t///   - topRight: The top right coordinate of a tile.\n\t///   - bottomRight: The bottom right coordinate of a tile.\n\t///   - bottomLeft: The bottom left coordinate of a tile.\n\t///   - active: should this filter be applied\n\t/// - Returns: processed new `CIImage`, or identity if `active` is false\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc perspectiveTile(topLeft: CGPoint,\n\t\t\t\t\t\t topRight: CGPoint,\n\t\t\t\t\t\t bottomRight: CGPoint,\n\t\t\t\t\t\t bottomLeft: CGPoint,\n\t\t\t\t\t\t active: Bool = true) -> CIImage {\n\t\tguard active else { return self }\n\n\t\tlet filter = CIFilter.perspectiveTile() // CIPerspectiveTile\n\t\tfilter.inputImage = self\n\t\tfilter.topLeft = topLeft\n\t\tfilter.topRight = topRight\n\t\tfilter.bottomRight = bottomRight\n\t\tfilter.bottomLeft = bottomLeft\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Perspective Transform\n\t///\n\t/// Alters the geometry of an image to simulate the observer changing viewing position. You can use the perspective filter to skew an image.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228382-perspectivetransform)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIPerspectiveTransform)\n\t///\n\t/// Categories: Geometry Adjustment, Video, Still Image, Built-In, High Dynamic Range\n\t///\n\t///\n\t/// - Parameters:\n\t///   - topLeft: The top left coordinate to map the image to.\n\t///   - topRight: The top right coordinate to map the image to.\n\t///   - bottomRight: The bottom right coordinate to map the image to.\n\t///   - bottomLeft: The bottom left coordinate to map the image to.\n\t///   - active: should this filter be applied\n\t/// - Returns: processed new `CIImage`, or identity if `active` is false\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc perspectiveTransform(topLeft: CGPoint,\n\t\t\t\t\t\t\t  topRight: CGPoint,\n\t\t\t\t\t\t\t  bottomRight: CGPoint,\n\t\t\t\t\t\t\t  bottomLeft: CGPoint,\n\t\t\t\t\t\t\t  active: Bool = true) -> CIImage {\n\t\tguard active else { return self }\n\n\t\tlet filter = CIFilter.perspectiveTransform() // CIPerspectiveTransform\n\t\tfilter.inputImage = self\n\t\tfilter.topLeft = topLeft\n\t\tfilter.topRight = topRight\n\t\tfilter.bottomRight = bottomRight\n\t\tfilter.bottomLeft = bottomLeft\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Perspective Transform with Extent\n\t///\n\t/// Alters the geometry of an image to simulate the observer changing viewing position. You can use the perspective filter to skew an image.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228383-perspectivetransformwithextent)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIPerspectiveTransformWithExtent)\n\t///\n\t/// Categories: Geometry Adjustment, Video, Still Image, Built-In, High Dynamic Range\n\t///\n\t///\n\t/// - Parameters:\n\t///   - extent: A rectangle that defines the extent of the effect.\n\t///   - topLeft: The top left coordinate to map the image to.\n\t///   - topRight: The top right coordinate to map the image to.\n\t///   - bottomRight: The bottom right coordinate to map the image to.\n\t///   - bottomLeft: The bottom left coordinate to map the image to.\n\t///   - active: should this filter be applied\n\t/// - Returns: processed new `CIImage`, or identity if `active` is false\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc perspectiveTransformWithExtent(extent: CGRect,\n\t\t\t\t\t\t\t\t\t\ttopLeft: CGPoint,\n\t\t\t\t\t\t\t\t\t\ttopRight: CGPoint,\n\t\t\t\t\t\t\t\t\t\tbottomRight: CGPoint,\n\t\t\t\t\t\t\t\t\t\tbottomLeft: CGPoint,\n\t\t\t\t\t\t\t\t\t\tactive: Bool = true) -> CIImage {\n\t\tguard active else { return self }\n\n\t\tlet filter = CIFilter.perspectiveTransformWithExtent() // CIPerspectiveTransformWithExtent\n\t\tfilter.inputImage = self\n\t\tfilter.extent = extent\n\t\tfilter.topLeft = topLeft\n\t\tfilter.topRight = topRight\n\t\tfilter.bottomRight = bottomRight\n\t\tfilter.bottomLeft = bottomLeft\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Photo Effect Chrome\n\t///\n\t/// Applies a preconfigured set of effects that imitate vintage photography film with exaggerated color.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228384-photoeffectchrome)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIPhotoEffectChrome)\n\t///\n\t/// Categories: Color Effect, Video, Interlaced, Non-Square Pixels, Still Image, High Dynamic Range, Built-In, CICategoryXMPSerializable\n\t///\n\t///\n\t/// - Parameters:\n\t///   - extrapolate: If true, then the color effect will be extrapolated if the input image contains RGB component values outside the range 0.0 to 1.0.\n\t///   - active: should this filter be applied\n\t/// - Returns: processed new `CIImage`, or identity if `active` is false\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc photoEffectChrome(extrapolate: Bool = false, active: Bool = true) -> CIImage {\n\t\tguard active else { return self }\n\n\t\tlet filter = CIFilter.photoEffectChrome() // CIPhotoEffectChrome\n\t\tfilter.inputImage = self\n\t\tfilter.extrapolate = extrapolate\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Photo Effect Fade\n\t///\n\t/// Applies a preconfigured set of effects that imitate vintage photography film with diminished color.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228385-photoeffectfade)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIPhotoEffectFade)\n\t///\n\t/// Categories: Color Effect, Video, Interlaced, Non-Square Pixels, Still Image, High Dynamic Range, Built-In, CICategoryXMPSerializable\n\t///\n\t///\n\t/// - Parameters:\n\t///   - extrapolate: If true, then the color effect will be extrapolated if the input image contains RGB component values outside the range 0.0 to 1.0.\n\t///   - active: should this filter be applied\n\t/// - Returns: processed new `CIImage`, or identity if `active` is false\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc photoEffectFade(extrapolate: Bool = false, active: Bool = true) -> CIImage {\n\t\tguard active else { return self }\n\n\t\tlet filter = CIFilter.photoEffectFade() // CIPhotoEffectFade\n\t\tfilter.inputImage = self\n\t\tfilter.extrapolate = extrapolate\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Photo Effect Instant\n\t///\n\t/// Applies a preconfigured set of effects that imitate vintage photography film with distorted colors.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228386-photoeffectinstant)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIPhotoEffectInstant)\n\t///\n\t/// Categories: Color Effect, Video, Interlaced, Non-Square Pixels, Still Image, High Dynamic Range, Built-In, CICategoryXMPSerializable\n\t///\n\t///\n\t/// - Parameters:\n\t///   - extrapolate: If true, then the color effect will be extrapolated if the input image contains RGB component values outside the range 0.0 to 1.0.\n\t///   - active: should this filter be applied\n\t/// - Returns: processed new `CIImage`, or identity if `active` is false\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc photoEffectInstant(extrapolate: Bool = false, active: Bool = true) -> CIImage {\n\t\tguard active else { return self }\n\n\t\tlet filter = CIFilter.photoEffectInstant() // CIPhotoEffectInstant\n\t\tfilter.inputImage = self\n\t\tfilter.extrapolate = extrapolate\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Photo Effect Mono\n\t///\n\t/// Applies a preconfigured set of effects that imitate black-and-white photography film with low contrast.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228387-photoeffectmono)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIPhotoEffectMono)\n\t///\n\t/// Categories: Color Effect, Video, Interlaced, Non-Square Pixels, Still Image, High Dynamic Range, Built-In, CICategoryXMPSerializable\n\t///\n\t///\n\t/// - Parameters:\n\t///   - extrapolate: If true, then the color effect will be extrapolated if the input image contains RGB component values outside the range 0.0 to 1.0.\n\t///   - active: should this filter be applied\n\t/// - Returns: processed new `CIImage`, or identity if `active` is false\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc photoEffectMono(extrapolate: Bool = false, active: Bool = true) -> CIImage {\n\t\tguard active else { return self }\n\n\t\tlet filter = CIFilter.photoEffectMono() // CIPhotoEffectMono\n\t\tfilter.inputImage = self\n\t\tfilter.extrapolate = extrapolate\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Photo Effect Noir\n\t///\n\t/// Applies a preconfigured set of effects that imitate black-and-white photography film with exaggerated contrast.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228388-photoeffectnoir)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIPhotoEffectNoir)\n\t///\n\t/// Categories: Color Effect, Video, Interlaced, Non-Square Pixels, Still Image, High Dynamic Range, Built-In, CICategoryXMPSerializable\n\t///\n\t///\n\t/// - Parameters:\n\t///   - extrapolate: If true, then the color effect will be extrapolated if the input image contains RGB component values outside the range 0.0 to 1.0.\n\t///   - active: should this filter be applied\n\t/// - Returns: processed new `CIImage`, or identity if `active` is false\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc photoEffectNoir(extrapolate: Bool = false, active: Bool = true) -> CIImage {\n\t\tguard active else { return self }\n\n\t\tlet filter = CIFilter.photoEffectNoir() // CIPhotoEffectNoir\n\t\tfilter.inputImage = self\n\t\tfilter.extrapolate = extrapolate\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Photo Effect Process\n\t///\n\t/// Applies a preconfigured set of effects that imitate vintage photography film with emphasized cool colors.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228389-photoeffectprocess)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIPhotoEffectProcess)\n\t///\n\t/// Categories: Color Effect, Video, Interlaced, Non-Square Pixels, Still Image, High Dynamic Range, Built-In, CICategoryXMPSerializable\n\t///\n\t///\n\t/// - Parameters:\n\t///   - extrapolate: If true, then the color effect will be extrapolated if the input image contains RGB component values outside the range 0.0 to 1.0.\n\t///   - active: should this filter be applied\n\t/// - Returns: processed new `CIImage`, or identity if `active` is false\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc photoEffectProcess(extrapolate: Bool = false, active: Bool = true) -> CIImage {\n\t\tguard active else { return self }\n\n\t\tlet filter = CIFilter.photoEffectProcess() // CIPhotoEffectProcess\n\t\tfilter.inputImage = self\n\t\tfilter.extrapolate = extrapolate\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Photo Effect Tonal\n\t///\n\t/// Applies a preconfigured set of effects that imitate black-and-white photography film without significantly altering contrast.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228390-photoeffecttonal)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIPhotoEffectTonal)\n\t///\n\t/// Categories: Color Effect, Video, Interlaced, Non-Square Pixels, Still Image, High Dynamic Range, Built-In, CICategoryXMPSerializable\n\t///\n\t///\n\t/// - Parameters:\n\t///   - extrapolate: If true, then the color effect will be extrapolated if the input image contains RGB component values outside the range 0.0 to 1.0.\n\t///   - active: should this filter be applied\n\t/// - Returns: processed new `CIImage`, or identity if `active` is false\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc photoEffectTonal(extrapolate: Bool = false, active: Bool = true) -> CIImage {\n\t\tguard active else { return self }\n\n\t\tlet filter = CIFilter.photoEffectTonal() // CIPhotoEffectTonal\n\t\tfilter.inputImage = self\n\t\tfilter.extrapolate = extrapolate\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Photo Effect Transfer\n\t///\n\t/// Applies a preconfigured set of effects that imitate vintage photography film with emphasized warm colors.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228391-photoeffecttransfer)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIPhotoEffectTransfer)\n\t///\n\t/// Categories: Color Effect, Video, Interlaced, Non-Square Pixels, Still Image, High Dynamic Range, Built-In, CICategoryXMPSerializable\n\t///\n\t///\n\t/// - Parameters:\n\t///   - extrapolate: If true, then the color effect will be extrapolated if the input image contains RGB component values outside the range 0.0 to 1.0.\n\t///   - active: should this filter be applied\n\t/// - Returns: processed new `CIImage`, or identity if `active` is false\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc photoEffectTransfer(extrapolate: Bool = false, active: Bool = true) -> CIImage {\n\t\tguard active else { return self }\n\n\t\tlet filter = CIFilter.photoEffectTransfer() // CIPhotoEffectTransfer\n\t\tfilter.inputImage = self\n\t\tfilter.extrapolate = extrapolate\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Pin Light Blend Mode\n\t///\n\t/// Unpremultiplies the source and background image sample color, combines them according to the relative difference, and then blends the result with the background according to the PDF basic compositing formula. Source image values that are brighter than the destination will produce an output that is lighter than the destination. Source image values that are darker than the destination will produce an output that is darker than the destination.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228392-pinlightblendmode)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIPinLightBlendMode)\n\t///\n\t/// Categories: Composite Operation, Video, Still Image, Interlaced, Non-Square Pixels, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - backgroundImage: The image to use as a background image.\n\t/// - Returns: processed new `CIImage`, or identity if `backgroundImage` is nil\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc pinLightBlendMode(backgroundImage: CIImage?) -> CIImage {\n\t\tguard let backgroundImage else { return self }\n\n\t\tlet filter = CIFilter.pinLightBlendMode() // CIPinLightBlendMode\n\t\tfilter.inputImage = self\n\t\tfilter.backgroundImage = backgroundImage\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Pinch Distortion\n\t///\n\t/// Creates a rectangular-shaped area that pinches source pixels inward, distorting those pixels closest to the rectangle the most.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/4401874-pinchdistortion)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIPinchDistortion)\n\t///\n\t/// Categories: Distortion Effect, Video, Still Image, Built-In, High Dynamic Range\n\t///\n\t///\n\t/// - Parameters:\n\t///   - center: The center of the effect as x and y pixel coordinates.\n\t///   - radius: The radius determines how many pixels are used to create the distortion. The larger the radius, the wider the extent of the distortion. (0...)\n\t///   - scale: The amount of pinching. A value of 0.0 has no effect. A value of 1.0 is the maximum pinch. (0...)\n\t/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied\n\t@available(iOS 14, macOS 11.0, *)\n\tfunc pinchDistortion(center: CGPoint, radius: Float, scale: Float = 0.5) -> CIImage {\n\t\tguard radius != 0 || scale != 0 else { return self }\n\n\t\tlet filter = CIFilter.pinchDistortion() // CIPinchDistortion\n\t\tfilter.inputImage = self\n\t\tfilter.center = center\n\t\tfilter.radius = radius\n\t\tfilter.scale = scale\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Pixelate\n\t///\n\t/// Makes an image blocky by mapping the image to colored squares whose color is defined by the replaced pixels.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228393-pixellate)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIPixellate)\n\t///\n\t/// Categories: Stylize, Video, Still Image, High Dynamic Range, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - center: The center of the effect as x and y pixel coordinates.\n\t///   - scale: The scale determines the size of the squares. Larger values result in larger squares. (1...)\n\t///   - active: should this filter be applied\n\t/// - Returns: processed new `CIImage`, or identity if `active` is false\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc pixellate(center: CGPoint, scale: Float, active: Bool = true) -> CIImage {\n\t\tguard active else { return self }\n\n\t\tlet filter = CIFilter.pixellate() // CIPixellate\n\t\tfilter.inputImage = self\n\t\tfilter.center = center\n\t\tfilter.scale = scale\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Pointillize\n\t///\n\t/// Renders the source image in a pointillistic style.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228394-pointillize)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIPointillize)\n\t///\n\t/// Categories: Stylize, Video, Still Image, High Dynamic Range, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - radius: The radius of the circles in the resulting pattern. (1...)\n\t///   - center: The center of the effect as x and y pixel coordinates.\n\t///   - active: should this filter be applied\n\t/// - Returns: processed new `CIImage`, or identity if `active` is false\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc pointillize(radius: Float, center: CGPoint, active: Bool = true) -> CIImage {\n\t\tguard active else { return self }\n\n\t\tlet filter = CIFilter.pointillize() // CIPointillize\n\t\tfilter.inputImage = self\n\t\tfilter.radius = radius\n\t\tfilter.center = center\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Ripple\n\t///\n\t/// Transitions from one image to another by creating a circular wave that expands from the center point, revealing the new image in the wake of the wave.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228397-rippletransition)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIRippleTransition)\n\t///\n\t/// Categories: Transition, Video, Still Image, Built-In, High Dynamic Range\n\t///\n\t///\n\t/// - Parameters:\n\t///   - targetImage: The target image for a transition.\n\t///   - shadingImage: An image that looks like a shaded sphere enclosed in a square image.\n\t///   - center: The center of the effect as x and y pixel coordinates.\n\t///   - extent: A rectangle that defines the extent of the effect.\n\t///   - time: The parametric time of the transition. This value drives the transition from start (at time 0) to end (at time 1). (0...1)\n\t///   - width: The width of the ripple. (1...)\n\t///   - scale: A value that determines whether the ripple starts as a bulge (higher value) or a dimple (lower value). (-50...)\n\t/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc rippleTransition(targetImage: CIImage,\n\t\t\t\t\t\t  shadingImage: CIImage,\n\t\t\t\t\t\t  center: CGPoint,\n\t\t\t\t\t\t  extent: CGRect,\n\t\t\t\t\t\t  time: Float,\n\t\t\t\t\t\t  width: Float,\n\t\t\t\t\t\t  scale: Float = 50) -> CIImage {\n\t\tguard scale != 0 else { return self }\n\n\t\tlet filter = CIFilter.rippleTransition() // CIRippleTransition\n\t\tfilter.inputImage = self\n\t\tfilter.targetImage = targetImage\n\t\tfilter.shadingImage = shadingImage\n\t\tfilter.center = center\n\t\tfilter.extent = extent\n\t\tfilter.time = time\n\t\tfilter.width = width\n\t\tfilter.scale = scale\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Row Average\n\t///\n\t/// Calculates the average color for each row of the specified area in an image, returning the result in a 1D image.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3547123-rowaverage)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIRowAverage)\n\t///\n\t/// Categories: Reduction, Video, Still Image, High Dynamic Range, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - extent: A rectangle that specifies the subregion of the image that you want to process.\n\t///   - active: should this filter be applied\n\t/// - Returns: a 1-pixel high image that contains the average color for each scan row, or identity if `active` is false\n\t@available(iOS 14, macOS 11.0, *)\n\tfunc rowAverage(extent: CGRect, active: Bool = true) -> CIImage {\n\t\tguard active else { return self }\n\n\t\tlet filter = CIFilter.rowAverage() // CIRowAverage\n\t\tfilter.inputImage = self\n\t\tfilter.extent = extent\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// sRGB Tone Curve to Linear\n\t///\n\t/// Maps color intensity from the sRGB color space to a linear gamma curve.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228398-srgbtonecurvetolinear)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CISRGBToneCurveToLinear)\n\t///\n\t/// Categories: Color Adjustment, Video, Still Image, Interlaced, Non-Square Pixels, High Dynamic Range, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - active: should this filter be applied\n\t/// - Returns: processed new `CIImage`, or identity if `active` is false\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc sRGBToneCurveToLinear(active: Bool = true) -> CIImage {\n\t\tguard active else { return self }\n\n\t\tlet filter = CIFilter.sRGBToneCurveToLinear() // CISRGBToneCurveToLinear\n\t\tfilter.inputImage = self\n\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Saliency Map Filter\n\t///\n\t/// Generates output image as a saliency map of the input image.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228399-saliencymap)\n\t///\n\t/// Categories: Video, Still Image, Built-In, Stylize\n\t///\n\t///\n\t/// - Parameters:\n\t///   - active: should this filter be applied\n\t/// - Returns: processed new `CIImage`, or identity if `active` is false\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc saliencyMap(active: Bool = true) -> CIImage {\n\t\tguard active else { return self }\n\n\t\tlet filter = CIFilter.saliencyMap() // CISaliencyMapFilter\n\t\tfilter.inputImage = self\n\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t// ℹ️ CISampleNearest already has a CIImage method: func samplingNearest() -> CIImage\n\t// https://developer.apple.com/documentation/coreimage/ciimage/2867429-samplingnearest\n\n\n\t/// Saturation Blend Mode\n\t///\n\t/// Uses the luminance and hue values of the background with the saturation of the source image. Areas of the background that have no saturation (that is, pure gray areas) do not produce a change.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228400-saturationblendmode)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CISaturationBlendMode)\n\t///\n\t/// Categories: Composite Operation, Video, Still Image, Interlaced, Non-Square Pixels, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - backgroundImage: The image to use as a background image.\n\t/// - Returns: processed new `CIImage`, or identity if `backgroundImage` is nil\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc saturationBlendMode(backgroundImage: CIImage?) -> CIImage {\n\t\tguard let backgroundImage else { return self }\n\n\t\tlet filter = CIFilter.saturationBlendMode() // CISaturationBlendMode\n\t\tfilter.inputImage = self\n\t\tfilter.backgroundImage = backgroundImage\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Screen Blend Mode\n\t///\n\t/// Multiplies the inverse of the source image samples with the inverse of the background image samples. This results in colors that are at least as light as either of the two contributing sample colors.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228401-screenblendmode)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIScreenBlendMode)\n\t///\n\t/// Categories: Composite Operation, Video, Still Image, Interlaced, Non-Square Pixels, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - backgroundImage: The image to use as a background image.\n\t/// - Returns: processed new `CIImage`, or identity if `backgroundImage` is nil\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc screenBlendMode(backgroundImage: CIImage?) -> CIImage {\n\t\tguard let backgroundImage else { return self }\n\n\t\tlet filter = CIFilter.screenBlendMode() // CIScreenBlendMode\n\t\tfilter.inputImage = self\n\t\tfilter.backgroundImage = backgroundImage\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Sepia Tone\n\t///\n\t/// Maps the colors of an image to various shades of brown.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228402-sepiatone)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CISepiaTone)\n\t///\n\t/// Categories: Color Effect, Video, Interlaced, Non-Square Pixels, Still Image, High Dynamic Range, Built-In, CICategoryXMPSerializable\n\t///\n\t///\n\t/// - Parameters:\n\t///   - intensity: The intensity of the sepia effect. A value of 1.0 creates a monochrome sepia image. A value of 0.0 has no effect on the image. (0...)\n\t/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc sepiaTone(intensity: Float = 1) -> CIImage {\n\t\tguard intensity != 0 else { return self }\n\n\t\tlet filter = CIFilter.sepiaTone() // CISepiaTone\n\t\tfilter.inputImage = self\n\t\tfilter.intensity = intensity\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Shaded Material\n\t///\n\t/// Produces a shaded image from a height field. The height field is defined to have greater heights with lighter shades, and lesser heights (lower areas) with darker shades. You can combine this filter with the “Height Field From Mask” filter to produce quick shadings of masks, such as text.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228403-shadedmaterial)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIShadedMaterial)\n\t///\n\t/// Categories: Stylize, Video, Still Image, High Dynamic Range, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - shadingImage: The image to use as the height field. The resulting image has greater heights with lighter shades, and lesser heights (lower areas) with darker shades.\n\t///   - scale: The scale of the effect. The higher the value, the more dramatic the effect. (0...)\n\t///   - active: should this filter be applied\n\t/// - Returns: processed new `CIImage`, or identity if `active` is false\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc shadedMaterial(shadingImage: CIImage, scale: Float, active: Bool = true) -> CIImage {\n\t\tguard active else { return self }\n\n\t\tlet filter = CIFilter.shadedMaterial() // CIShadedMaterial\n\t\tfilter.inputImage = self\n\t\tfilter.shadingImage = shadingImage\n\t\tfilter.scale = scale\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Sharpen Luminance\n\t///\n\t/// Increases image detail by sharpening. It operates on the luminance of the image; the chrominance of the pixels remains unaffected.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228404-sharpenluminance)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CISharpenLuminance)\n\t///\n\t/// Categories: Sharpen, Video, Still Image, Built-In, High Dynamic Range\n\t///\n\t///\n\t/// - Parameters:\n\t///   - sharpness: The amount of sharpening to apply. Larger values are sharper.\n\t///   - radius: The distance from the center of the effect.\n\t/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc sharpenLuminance(sharpness: Float = 0.4, radius: Float = 1.69) -> CIImage {\n\t\tguard sharpness != 0 || radius != 0 else { return self }\n\n\t\tlet filter = CIFilter.sharpenLuminance() // CISharpenLuminance\n\t\tfilter.inputImage = self\n\t\tfilter.sharpness = sharpness\n\t\tfilter.radius = radius\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Sixfold Reflected Tile\n\t///\n\t/// Produces a tiled image from a source image by applying a 6-way reflected symmetry.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228405-sixfoldreflectedtile)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CISixfoldReflectedTile)\n\t///\n\t/// Categories: Tile Effect, Video, Still Image, Built-In, High Dynamic Range\n\t///\n\t///\n\t/// - Parameters:\n\t///   - center: The center of the effect as x and y pixel coordinates.\n\t///   - angle: The angle in radians of the tiled pattern.\n\t///   - width: The width of a tile. (0...)\n\t/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc sixfoldReflectedTile(center: CGPoint, angle: Float = 0, width: Float) -> CIImage {\n\t\tguard angle != 0 else { return self }\n\n\t\tlet filter = CIFilter.sixfoldReflectedTile() // CISixfoldReflectedTile\n\t\tfilter.inputImage = self\n\t\tfilter.center = center\n\t\tfilter.angle = angle\n\t\tfilter.width = width\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Sixfold Rotated Tile\n\t///\n\t/// Produces a tiled image from a source image by rotating the source image at increments of 60 degrees.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228406-sixfoldrotatedtile)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CISixfoldRotatedTile)\n\t///\n\t/// Categories: Tile Effect, Video, Still Image, Built-In, High Dynamic Range\n\t///\n\t///\n\t/// - Parameters:\n\t///   - center: The center of the effect as x and y pixel coordinates.\n\t///   - angle: The angle in radians of the tiled pattern.\n\t///   - width: The width of a tile. (0...)\n\t/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc sixfoldRotatedTile(center: CGPoint, angle: Float = 0, width: Float) -> CIImage {\n\t\tguard angle != 0 else { return self }\n\n\t\tlet filter = CIFilter.sixfoldRotatedTile() // CISixfoldRotatedTile\n\t\tfilter.inputImage = self\n\t\tfilter.center = center\n\t\tfilter.angle = angle\n\t\tfilter.width = width\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Sobel Gradients\n\t///\n\t/// Applies multichannel 3 by 3 Sobel gradient filter to an image. The resulting image has maximum horizontal gradient in the red channel and the maximum vertical gradient in the green channel. The gradient values can be positive or negative.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/4401876-sobelgradients)\n\t///\n\t/// Categories: Stylize, Video, Still Image, High Dynamic Range, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - active: should this filter be applied\n\t/// - Returns: processed new `CIImage`, or identity if `active` is false\n\t@available(iOS 17, macOS 14.0, *)\n\tfunc sobelGradients(active: Bool = true) -> CIImage {\n\t\tguard active else { return self }\n\n\t\tlet filter = CIFilter.sobelGradients() // CISobelGradients\n\t\tfilter.inputImage = self\n\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Soft Light Blend Mode\n\t///\n\t/// Either darkens or lightens colors, depending on the source image sample color. If the source image sample color is lighter than 50% gray, the background is lightened, similar to dodging. If the source image sample color is darker than 50% gray, the background is darkened, similar to burning. If the source image sample color is equal to 50% gray, the background is not changed. Image samples that are equal to pure black or pure white produce darker or lighter areas, but do not result in pure black or white. The overall effect is similar to what you would achieve by shining a diffuse spotlight on the source image.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228408-softlightblendmode)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CISoftLightBlendMode)\n\t///\n\t/// Categories: Composite Operation, Video, Still Image, Interlaced, Non-Square Pixels, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - backgroundImage: The image to use as a background image.\n\t/// - Returns: processed new `CIImage`, or identity if `backgroundImage` is nil\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc softLightBlendMode(backgroundImage: CIImage?) -> CIImage {\n\t\tguard let backgroundImage else { return self }\n\n\t\tlet filter = CIFilter.softLightBlendMode() // CISoftLightBlendMode\n\t\tfilter.inputImage = self\n\t\tfilter.backgroundImage = backgroundImage\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Source Atop\n\t///\n\t/// Places the source image over the background image, then uses the luminance of the background image to determine what to show. The composite shows the background image and only those portions of the source image that are over visible parts of the background.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228409-sourceatopcompositing)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CISourceAtopCompositing)\n\t///\n\t/// Categories: Composite Operation, Video, Still Image, Interlaced, Non-Square Pixels, High Dynamic Range, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - backgroundImage: The image to use as a background image.\n\t/// - Returns: processed new `CIImage`, or identity if `backgroundImage` is nil\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc sourceAtopCompositing(backgroundImage: CIImage?) -> CIImage {\n\t\tguard let backgroundImage else { return self }\n\n\t\tlet filter = CIFilter.sourceAtopCompositing() // CISourceAtopCompositing\n\t\tfilter.inputImage = self\n\t\tfilter.backgroundImage = backgroundImage\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Source In\n\t///\n\t/// Uses the background image to define what to leave in the input image, effectively cropping the input image.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228410-sourceincompositing)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CISourceInCompositing)\n\t///\n\t/// Categories: Composite Operation, Video, Still Image, Interlaced, Non-Square Pixels, High Dynamic Range, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - backgroundImage: The image to use as a background image.\n\t/// - Returns: processed new `CIImage`, or identity if `backgroundImage` is nil\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc sourceInCompositing(backgroundImage: CIImage?) -> CIImage {\n\t\tguard let backgroundImage else { return self }\n\n\t\tlet filter = CIFilter.sourceInCompositing() // CISourceInCompositing\n\t\tfilter.inputImage = self\n\t\tfilter.backgroundImage = backgroundImage\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Source Out\n\t///\n\t/// Uses the background image to define what to take out of the input image.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228411-sourceoutcompositing)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CISourceOutCompositing)\n\t///\n\t/// Categories: Composite Operation, Video, Still Image, Interlaced, Non-Square Pixels, High Dynamic Range, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - backgroundImage: The image to use as a background image.\n\t/// - Returns: processed new `CIImage`, or identity if `backgroundImage` is nil\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc sourceOutCompositing(backgroundImage: CIImage?) -> CIImage {\n\t\tguard let backgroundImage else { return self }\n\n\t\tlet filter = CIFilter.sourceOutCompositing() // CISourceOutCompositing\n\t\tfilter.inputImage = self\n\t\tfilter.backgroundImage = backgroundImage\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Source Over\n\t///\n\t/// Places the input image over the input background image.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228412-sourceovercompositing)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CISourceOverCompositing)\n\t///\n\t/// Categories: Composite Operation, Video, Still Image, Interlaced, Non-Square Pixels, High Dynamic Range, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - backgroundImage: The image to use as a background image.\n\t/// - Returns: processed new `CIImage`, or identity if `backgroundImage` is nil\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc sourceOverCompositing(backgroundImage: CIImage?) -> CIImage {\n\t\tguard let backgroundImage else { return self }\n\n\t\tlet filter = CIFilter.sourceOverCompositing() // CISourceOverCompositing\n\t\tfilter.inputImage = self\n\t\tfilter.backgroundImage = backgroundImage\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Spot Color\n\t///\n\t/// Replaces one or more color ranges with spot colors.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228413-spotcolor)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CISpotColor)\n\t///\n\t/// Categories: Built-In, Still Image, Video, High Dynamic Range, Stylize\n\t///\n\t///\n\t/// - Parameters:\n\t///   - centerColor1: The center value of the first color range to replace.\n\t///   - replacementColor1: A replacement color for the first color range.\n\t///   - closeness1: A value that indicates how close the first color must match before it is replaced. (0...)\n\t///   - contrast1: The contrast of the first replacement color. (0...)\n\t///   - centerColor2: The center value of the second color range to replace.\n\t///   - replacementColor2: A replacement color for the second color range.\n\t///   - closeness2: A value that indicates how close the second color must match before it is replaced. (0...)\n\t///   - contrast2: The contrast of the second replacement color. (0...)\n\t///   - centerColor3: The center value of the third color range to replace.\n\t///   - replacementColor3: A replacement color for the third color range.\n\t///   - closeness3: A value that indicates how close the third color must match before it is replaced. (0...)\n\t///   - contrast3: The contrast of the third replacement color. (0...)\n\t///   - active: should this filter be applied\n\t/// - Returns: processed new `CIImage`, or identity if `active` is false\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc spotColor(centerColor1: CIColor,\n\t\t\t\t   replacementColor1: CIColor,\n\t\t\t\t   closeness1: Float = 0.22,\n\t\t\t\t   contrast1: Float = 0.98,\n\t\t\t\t   centerColor2: CIColor,\n\t\t\t\t   replacementColor2: CIColor,\n\t\t\t\t   closeness2: Float = 0.15,\n\t\t\t\t   contrast2: Float = 0.98,\n\t\t\t\t   centerColor3: CIColor,\n\t\t\t\t   replacementColor3: CIColor,\n\t\t\t\t   closeness3: Float = 0.5,\n\t\t\t\t   contrast3: Float = 0.99,\n\t\t\t\t   active: Bool = true) -> CIImage {\n\t\tguard active else { return self }\n\n\t\tlet filter = CIFilter.spotColor() // CISpotColor\n\t\tfilter.inputImage = self\n\t\tfilter.centerColor1 = centerColor1\n\t\tfilter.replacementColor1 = replacementColor1\n\t\tfilter.closeness1 = closeness1\n\t\tfilter.contrast1 = contrast1\n\t\tfilter.centerColor2 = centerColor2\n\t\tfilter.replacementColor2 = replacementColor2\n\t\tfilter.closeness2 = closeness2\n\t\tfilter.contrast2 = contrast2\n\t\tfilter.centerColor3 = centerColor3\n\t\tfilter.replacementColor3 = replacementColor3\n\t\tfilter.closeness3 = closeness3\n\t\tfilter.contrast3 = contrast3\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Spot Light\n\t///\n\t/// Applies a directional spotlight effect to an image.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228414-spotlight)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CISpotLight)\n\t///\n\t/// Categories: Stylize, Video, Still Image, High Dynamic Range, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - lightPosition: The x and y position of the spotlight.\n\t///   - lightPointsAt: The x and y position that the spotlight points at.\n\t///   - brightness: The brightness of the spotlight. (0...)\n\t///   - concentration: The spotlight size. The smaller the value, the more tightly focused the light beam. (0.001...)\n\t///   - color: The color of the spotlight.\n\t/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc spotLight(lightPosition: CIVector,\n\t\t\t\t   lightPointsAt: CIVector,\n\t\t\t\t   brightness: Float,\n\t\t\t\t   concentration: Float = 0.1,\n\t\t\t\t   color: CIColor = CIColor.white) -> CIImage {\n\t\tguard concentration != 20 else { return self }\n\n\t\tlet filter = CIFilter.spotLight() // CISpotLight\n\t\tfilter.inputImage = self\n\t\tfilter.lightPosition = lightPosition\n\t\tfilter.lightPointsAt = lightPointsAt\n\t\tfilter.brightness = brightness\n\t\tfilter.concentration = concentration\n\t\tfilter.color = color\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Straighten\n\t///\n\t/// Rotates a source image by the specified angle in radians. The image is then scaled and cropped so that the rotated image fits the extent of the input image.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228416-straighten)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIStraightenFilter)\n\t///\n\t/// Categories: Geometry Adjustment, Video, Still Image, Built-In, High Dynamic Range\n\t///\n\t///\n\t/// - Parameters:\n\t///   - angle: The angle in radians of the effect.\n\t/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc straighten(angle: Float = 0) -> CIImage {\n\t\tguard angle != 0 else { return self }\n\n\t\tlet filter = CIFilter.straighten() // CIStraightenFilter\n\t\tfilter.inputImage = self\n\t\tfilter.angle = angle\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Stretch Crop\n\t///\n\t/// Distorts an image by stretching and or cropping it to fit a target size.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/4401877-stretchcrop)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIStretchCrop)\n\t///\n\t/// Categories: Distortion Effect, Video, Still Image, Built-In, High Dynamic Range\n\t///\n\t///\n\t/// - Parameters:\n\t///   - size: The size in pixels of the output image.\n\t///   - cropAmount: Determines if and how much cropping should be used to achieve the target size. If value is 0 then only stretching is used. If 1 then only cropping is used. (0...1)\n\t///   - centerStretchAmount: Determine how much the center of the image is stretched if stretching is used. If value is 0 then the center of the image maintains the original aspect ratio. If 1 then the image is stretched uniformly. (0...1)\n\t///   - active: should this filter be applied\n\t/// - Returns: processed new `CIImage`, or identity if `active` is false\n\t@available(iOS 14, macOS 11.0, *)\n\tfunc stretchCrop(size: CGPoint,\n\t\t\t\t\t cropAmount: Float = 0.25,\n\t\t\t\t\t centerStretchAmount: Float = 0.25,\n\t\t\t\t\t active: Bool = true) -> CIImage {\n\t\tguard active else { return self }\n\n\t\tlet filter = CIFilter.stretchCrop() // CIStretchCrop\n\t\tfilter.inputImage = self\n\t\tfilter.size = size\n\t\tfilter.cropAmount = cropAmount\n\t\tfilter.centerStretchAmount = centerStretchAmount\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Subtract Blend Mode\n\t///\n\t/// Unpremultiplies the source and background image sample colors, subtracts the source from the background, and then blends the result with the background according to the PDF basic compositing formula. Source image values that are black produces output that is the same as the background. Source image values that are non-black darken the background color values.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228418-subtractblendmode)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CISubtractBlendMode)\n\t///\n\t/// Categories: Composite Operation, Video, Still Image, Interlaced, Non-Square Pixels, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - backgroundImage: The image to use as a background image.\n\t/// - Returns: processed new `CIImage`, or identity if `backgroundImage` is nil\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc subtractBlendMode(backgroundImage: CIImage?) -> CIImage {\n\t\tguard let backgroundImage else { return self }\n\n\t\tlet filter = CIFilter.subtractBlendMode() // CISubtractBlendMode\n\t\tfilter.inputImage = self\n\t\tfilter.backgroundImage = backgroundImage\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Swipe\n\t///\n\t/// Transitions from one image to another by simulating a swiping action.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228420-swipetransition)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CISwipeTransition)\n\t///\n\t/// Categories: Transition, Video, Still Image, Built-In, High Dynamic Range\n\t///\n\t///\n\t/// - Parameters:\n\t///   - targetImage: The target image for a transition.\n\t///   - extent: The extent of the effect.\n\t///   - color: The color of the swipe.\n\t///   - time: The parametric time of the transition. This value drives the transition from start (at time 0) to end (at time 1). (0...1)\n\t///   - angle: The angle in radians of the swipe.\n\t///   - width: The width of the swipe. (0.1...)\n\t///   - opacity: The opacity of the swipe. (0...)\n\t/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc swipeTransition(targetImage: CIImage,\n\t\t\t\t\t\t extent: CGRect,\n\t\t\t\t\t\t color: CIColor = CIColor.white,\n\t\t\t\t\t\t time: Float,\n\t\t\t\t\t\t angle: Float = 0,\n\t\t\t\t\t\t width: Float,\n\t\t\t\t\t\t opacity: Float = 0) -> CIImage {\n\t\tguard angle != 0 || opacity != 0 else { return self }\n\n\t\tlet filter = CIFilter.swipeTransition() // CISwipeTransition\n\t\tfilter.inputImage = self\n\t\tfilter.targetImage = targetImage\n\t\tfilter.extent = extent\n\t\tfilter.color = color\n\t\tfilter.time = time\n\t\tfilter.angle = angle\n\t\tfilter.width = width\n\t\tfilter.opacity = opacity\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Temperature and Tint\n\t///\n\t/// Adapts the reference white point for an image.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228421-temperatureandtint)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CITemperatureAndTint)\n\t///\n\t/// Categories: Color Adjustment, Video, Still Image, Interlaced, Non-Square Pixels, High Dynamic Range, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - neutral: A vector containing the source white point defined by color temperature and tint or chromaticity (x,y).\n\t///   - targetNeutral: A vector containing the desired white point defined by color temperature and tint or chromaticity (x,y).\n\t///   - active: should this filter be applied\n\t/// - Returns: processed new `CIImage`, or identity if `active` is false\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc temperatureAndTint(neutral: CIVector, targetNeutral: CIVector, active: Bool = true) -> CIImage {\n\t\tguard active else { return self }\n\n\t\tlet filter = CIFilter.temperatureAndTint() // CITemperatureAndTint\n\t\tfilter.inputImage = self\n\t\tfilter.neutral = neutral\n\t\tfilter.targetNeutral = targetNeutral\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Thermal\n\t///\n\t/// Apply a “Thermal” style effect to an image.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228423-thermal)\n\t///\n\t/// Categories: Color Effect, Video, Interlaced, Non-Square Pixels, Still Image, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - active: should this filter be applied\n\t/// - Returns: processed new `CIImage`, or identity if `active` is false\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc thermal(active: Bool = true) -> CIImage {\n\t\tguard active else { return self }\n\n\t\tlet filter = CIFilter.thermal() // CIThermal\n\t\tfilter.inputImage = self\n\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Tone Curve\n\t///\n\t/// Adjusts tone response of the R, G, and B channels of an image. The input points are five x,y values that are interpolated using a spline curve. The curve is applied in a perceptual (gamma 2) version of the working space.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228424-tonecurve)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIToneCurve)\n\t///\n\t/// Categories: Color Adjustment, Video, Still Image, Interlaced, Non-Square Pixels, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - point0: A vector containing the position of the first point of the tone curve\n\t///   - point1: A vector containing the position of the second point of the tone curve\n\t///   - point2: A vector containing the position of the third point of the tone curve\n\t///   - point3: A vector containing the position of the fourth point of the tone curve\n\t///   - point4: A vector containing the position of the fifth point of the tone curve\n\t/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc toneCurve(point0: CGPoint,\n\t\t\t\t   point1: CGPoint = .init(x: 0.25, y: 0.25),\n\t\t\t\t   point2: CGPoint = .init(x: 0.5, y: 0.5),\n\t\t\t\t   point3: CGPoint = .init(x: 0.75, y: 0.75),\n\t\t\t\t   point4: CGPoint = .init(x: 1, y: 1)) -> CIImage {\n\t\tguard point1 != .init(x: 0.25, y: 0.25) || point2 != .init(x: 0.5, y: 0.5) || point3 != .init(x: 0.75, y: 0.75) || point4 != .init(x: 1, y: 1) else { return self }\n\n\t\tlet filter = CIFilter.toneCurve() // CIToneCurve\n\t\tfilter.inputImage = self\n\t\tfilter.point0 = point0\n\t\tfilter.point1 = point1\n\t\tfilter.point2 = point2\n\t\tfilter.point3 = point3\n\t\tfilter.point4 = point4\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Tone Map Headroom\n\t///\n\t/// Apply a global tone curve to an image that reduces colors from a source headroom value to a target headroom value.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/4401878-tonemapheadroom)\n\t///\n\t/// Categories: Color Adjustment, Video, Interlaced, High Dynamic Range, Non-Square Pixels, Still Image, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - sourceHeadroom: Specifies the headroom of the input image. (1...32)\n\t///   - targetHeadroom: Specifies the target headroom of the output image. (1...32)\n\t///   - active: should this filter be applied\n\t/// - Returns: processed new `CIImage`, or identity if `active` is false\n\t@available(iOS 18, macOS 15.0, *)\n\tfunc toneMapHeadroom(sourceHeadroom: Float, targetHeadroom: Float = 1, active: Bool = true) -> CIImage {\n\t\tguard active else { return self }\n\n\t\tlet filter = CIFilter.toneMapHeadroom() // CIToneMapHeadroom\n\t\tfilter.inputImage = self\n\t\tfilter.sourceHeadroom = sourceHeadroom\n\t\tfilter.targetHeadroom = targetHeadroom\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Torus Lens Distortion\n\t///\n\t/// Creates a torus-shaped lens and distorts the portion of the image over which the lens is placed.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/4401879-toruslensdistortion)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CITorusLensDistortion)\n\t///\n\t/// Categories: Distortion Effect, Video, Still Image, Built-In, High Dynamic Range\n\t///\n\t///\n\t/// - Parameters:\n\t///   - center: The center of the effect as x and y pixel coordinates.\n\t///   - radius: The outer radius of the torus. (0...)\n\t///   - width: The width of the ring. (0...)\n\t///   - refraction: The refraction of the glass. (0...)\n\t/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied\n\t@available(iOS 14, macOS 11.0, *)\n\tfunc torusLensDistortion(center: CGPoint, radius: Float, width: Float, refraction: Float = 1.7) -> CIImage {\n\t\tguard radius != 0 || width != 0 || refraction != 1 else { return self }\n\n\t\tlet filter = CIFilter.torusLensDistortion() // CITorusLensDistortion\n\t\tfilter.inputImage = self\n\t\tfilter.center = center\n\t\tfilter.radius = radius\n\t\tfilter.width = width\n\t\tfilter.refraction = refraction\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Triangle Kaleidoscope\n\t///\n\t/// Maps a triangular portion of image to a triangular area and then generates a kaleidoscope effect.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228425-trianglekaleidoscope)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CITriangleKaleidoscope)\n\t///\n\t/// Categories: Tile Effect, Video, Still Image, Built-In, High Dynamic Range\n\t///\n\t///\n\t/// - Parameters:\n\t///   - point: The x and y position to use as the center of the triangular area in the input image.\n\t///   - size: The size in pixels of the triangle.\n\t///   - rotation: Rotation angle in radians of the triangle.\n\t///   - decay: The decay determines how fast the color fades from the center triangle.\n\t///   - active: should this filter be applied\n\t/// - Returns: processed new `CIImage`, or identity if `active` is false\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc triangleKaleidoscope(point: CGPoint,\n\t\t\t\t\t\t\t  size: Float = 700,\n\t\t\t\t\t\t\t  rotation: Float,\n\t\t\t\t\t\t\t  decay: Float = 0.85,\n\t\t\t\t\t\t\t  active: Bool = true) -> CIImage {\n\t\tguard active else { return self }\n\n\t\tlet filter = CIFilter.triangleKaleidoscope() // CITriangleKaleidoscope\n\t\tfilter.inputImage = self\n\t\tfilter.point = point\n\t\tfilter.size = size\n\t\tfilter.rotation = rotation\n\t\tfilter.decay = decay\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Triangle Tile\n\t///\n\t/// Maps a triangular portion of image to a triangular area and then tiles the result.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228426-triangletile)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CITriangleTile)\n\t///\n\t/// Categories: Tile Effect, Video, Still Image, Built-In, High Dynamic Range\n\t///\n\t///\n\t/// - Parameters:\n\t///   - center: The center of the effect as x and y pixel coordinates.\n\t///   - angle: The angle in radians of the tiled pattern.\n\t///   - width: The width of a tile. (0...)\n\t/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc triangleTile(center: CGPoint, angle: Float = 0, width: Float) -> CIImage {\n\t\tguard angle != 0 else { return self }\n\n\t\tlet filter = CIFilter.triangleTile() // CITriangleTile\n\t\tfilter.inputImage = self\n\t\tfilter.center = center\n\t\tfilter.angle = angle\n\t\tfilter.width = width\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Twelvefold Reflected Tile\n\t///\n\t/// Produces a tiled image from a source image by rotating the source image at increments of 30 degrees.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228427-twelvefoldreflectedtile)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CITwelvefoldReflectedTile)\n\t///\n\t/// Categories: Tile Effect, Video, Still Image, Built-In, High Dynamic Range\n\t///\n\t///\n\t/// - Parameters:\n\t///   - center: The center of the effect as x and y pixel coordinates.\n\t///   - angle: The angle in radians of the tiled pattern.\n\t///   - width: The width of a tile. (0...)\n\t/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc twelvefoldReflectedTile(center: CGPoint, angle: Float = 0, width: Float) -> CIImage {\n\t\tguard angle != 0 else { return self }\n\n\t\tlet filter = CIFilter.twelvefoldReflectedTile() // CITwelvefoldReflectedTile\n\t\tfilter.inputImage = self\n\t\tfilter.center = center\n\t\tfilter.angle = angle\n\t\tfilter.width = width\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Twirl Distortion\n\t///\n\t/// Rotates pixels around a point to give a twirling effect. You can specify the number of rotations as well as the center and radius of the effect.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/4401880-twirldistortion)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CITwirlDistortion)\n\t///\n\t/// Categories: Distortion Effect, Video, Still Image, Built-In, High Dynamic Range\n\t///\n\t///\n\t/// - Parameters:\n\t///   - center: The center of the effect as x and y pixel coordinates.\n\t///   - radius: The radius determines how many pixels are used to create the distortion. The larger the radius, the wider the extent of the distortion. (0...)\n\t///   - angle: The angle in radians of the twirl. Values can be positive or negative.\n\t/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied\n\t@available(iOS 14, macOS 11.0, *)\n\tfunc twirlDistortion(center: CGPoint, radius: Float, angle: Float = .pi) -> CIImage {\n\t\tguard angle != 0 else { return self }\n\n\t\tlet filter = CIFilter.twirlDistortion() // CITwirlDistortion\n\t\tfilter.inputImage = self\n\t\tfilter.center = center\n\t\tfilter.radius = radius\n\t\tfilter.angle = angle\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Unsharp Mask\n\t///\n\t/// Increases the contrast of the edges between pixels of different colors in an image.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228428-unsharpmask)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIUnsharpMask)\n\t///\n\t/// Categories: Sharpen, Video, Still Image, Built-In, High Dynamic Range\n\t///\n\t///\n\t/// - Parameters:\n\t///   - radius: The radius around a given pixel to apply the unsharp mask. The larger the radius, the more of the image is affected. (0...)\n\t///   - intensity: The intensity of the effect. The larger the value, the more contrast in the affected area. (0...)\n\t/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc unsharpMask(radius: Float, intensity: Float = 0.5) -> CIImage {\n\t\tguard radius != 0 || intensity != 0 else { return self }\n\n\t\tlet filter = CIFilter.unsharpMask() // CIUnsharpMask\n\t\tfilter.inputImage = self\n\t\tfilter.radius = radius\n\t\tfilter.intensity = intensity\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Vibrance\n\t///\n\t/// Adjusts the saturation of an image while keeping pleasing skin tones.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228429-vibrance)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIVibrance)\n\t///\n\t/// Categories: Color Adjustment, Video, Still Image, Interlaced, Non-Square Pixels, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - amount: The amount to adjust the saturation. (-1...1)\n\t/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc vibrance(amount: Float = 0) -> CIImage {\n\t\tguard amount != 0 else { return self }\n\n\t\tlet filter = CIFilter.vibrance() // CIVibrance\n\t\tfilter.inputImage = self\n\t\tfilter.amount = amount\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Vignette\n\t///\n\t/// Applies a vignette shading to the corners of an image.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228431-vignette)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIVignette)\n\t///\n\t/// Categories: Color Effect, Video, Interlaced, Still Image, High Dynamic Range, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - intensity: The intensity of the effect. (-1...1)\n\t///   - radius: The distance from the center of the effect. (0...2)\n\t/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc vignette(intensity: Float = 0, radius: Float = 1) -> CIImage {\n\t\tguard intensity != 0 else { return self }\n\n\t\tlet filter = CIFilter.vignette() // CIVignette\n\t\tfilter.inputImage = self\n\t\tfilter.intensity = intensity\n\t\tfilter.radius = radius\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Vignette Effect\n\t///\n\t/// Modifies the brightness of an image around the periphery of a specified region.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228430-vignetteeffect)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIVignetteEffect)\n\t///\n\t/// Categories: Color Effect, Video, Interlaced, Still Image, High Dynamic Range, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - center: The center of the effect as x and y pixel coordinates.\n\t///   - radius: The distance from the center of the effect. (0...)\n\t///   - intensity: The intensity of the effect. (-1...1)\n\t///   - falloff: The falloff of the effect. (0...1)\n\t/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc vignetteEffect(center: CGPoint, radius: Float, intensity: Float = 1, falloff: Float = 0.5) -> CIImage {\n\t\tguard intensity != 0 else { return self }\n\n\t\tlet filter = CIFilter.vignetteEffect() // CIVignetteEffect\n\t\tfilter.inputImage = self\n\t\tfilter.center = center\n\t\tfilter.radius = radius\n\t\tfilter.intensity = intensity\n\t\tfilter.falloff = falloff\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Vivid Light Blend Mode\n\t///\n\t/// A blend mode that is a combination of color burn and color dodge blend modes.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/4401881-vividlightblendmode)\n\t///\n\t/// Categories: Composite Operation, Video, Still Image, Interlaced, Non-Square Pixels, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - backgroundImage: The image to use as a background image.\n\t/// - Returns: processed new `CIImage`, or identity if `backgroundImage` is nil\n\t@available(iOS 15, macOS 12.0, *)\n\tfunc vividLightBlendMode(backgroundImage: CIImage?) -> CIImage {\n\t\tguard let backgroundImage else { return self }\n\n\t\tlet filter = CIFilter.vividLightBlendMode() // CIVividLightBlendMode\n\t\tfilter.inputImage = self\n\t\tfilter.backgroundImage = backgroundImage\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Vortex Distortion\n\t///\n\t/// Rotates pixels around a point to simulate a vortex. You can specify the number of rotations as well the center and radius of the effect.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/4401882-vortexdistortion)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIVortexDistortion)\n\t///\n\t/// Categories: Distortion Effect, Video, Still Image, Built-In, High Dynamic Range\n\t///\n\t///\n\t/// - Parameters:\n\t///   - center: The center of the effect as x and y pixel coordinates.\n\t///   - radius: The radius determines how many pixels are used to create the distortion. The larger the radius, the wider the extent of the distortion. (0...)\n\t///   - angle: The angle in radians of the effect.\n\t/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied\n\t@available(iOS 14, macOS 11.0, *)\n\tfunc vortexDistortion(center: CGPoint, radius: Float, angle: Float) -> CIImage {\n\t\tguard radius != 0 || angle != 0 else { return self }\n\n\t\tlet filter = CIFilter.vortexDistortion() // CIVortexDistortion\n\t\tfilter.inputImage = self\n\t\tfilter.center = center\n\t\tfilter.radius = radius\n\t\tfilter.angle = angle\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// White Point Adjust\n\t///\n\t/// Adjusts the reference white point for an image and maps all colors in the source using the new reference.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228432-whitepointadjust)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIWhitePointAdjust)\n\t///\n\t/// Categories: Color Adjustment, Video, Still Image, Interlaced, Non-Square Pixels, High Dynamic Range, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - color: A color to use as the white point.\n\t/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc whitePointAdjust(color: CIColor = CIColor.white) -> CIImage {\n\t\tguard color != CIColor.white else { return self }\n\n\t\tlet filter = CIFilter.whitePointAdjust() // CIWhitePointAdjust\n\t\tfilter.inputImage = self\n\t\tfilter.color = color\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// X-Ray\n\t///\n\t/// Apply an “XRay” style effect to an image.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228433-xray)\n\t///\n\t/// Categories: Color Effect, Video, Interlaced, Non-Square Pixels, Still Image, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - active: should this filter be applied\n\t/// - Returns: processed new `CIImage`, or identity if `active` is false\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc xRay(active: Bool = true) -> CIImage {\n\t\tguard active else { return self }\n\n\t\tlet filter = CIFilter.xRay() // CIXRay\n\t\tfilter.inputImage = self\n\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Zoom Blur\n\t///\n\t/// Simulates the effect of zooming the camera while capturing the image.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228434-zoomblur)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIZoomBlur)\n\t///\n\t/// Categories: Blur, Still Image, Video, Built-In, High Dynamic Range\n\t///\n\t///\n\t/// - Parameters:\n\t///   - center: The center of the effect as x and y pixel coordinates.\n\t///   - amount: The zoom-in amount. Larger values result in more zooming in.\n\t/// - Returns: processed new `CIImage` or identity if parameters result in no operation applied\n\t@available(iOS 13, macOS 10.15, *)\n\tfunc zoomBlur(center: CGPoint, amount: Float) -> CIImage {\n\t\tguard amount != 0 else { return self }\n\n\t\tlet filter = CIFilter.zoomBlur() // CIZoomBlur\n\t\tfilter.inputImage = self\n\t\tfilter.center = center\n\t\tfilter.amount = amount\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t//\n\t// MARK: GENERATORS\n\t//\n\n\t/// Attributed Text Image Generator\n\t///\n\t/// Generate an image attributed string.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228267-attributedtextimagegenerator)\n\t///\n\t/// Categories: Generator, Video, Still Image, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - text: The attributed text to render.\n\t///   - scaleFactor: The scale of the font to use for the generated text. (0...)\n\t///   - padding: A value for an additional number of pixels to pad around the text’s bounding box. (0...200)\n\t/// - Returns: an image attributed string\n\t@available(iOS 16, macOS 13.0, *)\n\tstatic func attributedTextImageGenerator(text: NSAttributedString, scaleFactor: Float = 1, padding: Int) -> CIImage {\n\t\tlet filter = CIFilter.attributedTextImageGenerator() // CIAttributedTextImageGenerator\n\t\tfilter.text = text\n\t\tfilter.scaleFactor = scaleFactor\n\t\tfilter.padding = Float(padding)\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Aztec Code Generator\n\t///\n\t/// Generates an Aztec code (two-dimensional barcode) from input data.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228268-azteccodegenerator)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIAztecCodeGenerator)\n\t///\n\t/// Categories: Generator, Still Image, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - message: The message to encode in the Aztec Barcode\n\t///   - correctionLevel: Aztec error correction value between 5 and 95 (5...95)\n\t///   - layers: Aztec layers value between 1 and 32. (1...32)\n\t///   - compactStyle: A Boolean that specifies whether to force a compact style Aztec code.\n\t/// - Returns: an Aztec code (two-dimensional barcode) from input data\n\t@available(iOS 13, macOS 10.15, *)\n\tstatic func aztecCodeGenerator(message: Data, correctionLevel: Int, layers: Int, compactStyle: Bool) -> CIImage {\n\t\tlet filter = CIFilter.aztecCodeGenerator() // CIAztecCodeGenerator\n\t\tfilter.message = message\n\t\tfilter.correctionLevel = Float(correctionLevel)\n\t\tfilter.layers = Float(layers)\n\t\tfilter.compactStyle = Float(compactStyle ? 1 : 0)\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Barcode Generator\n\t///\n\t/// Generate a barcode image from a CIBarcodeDescriptor.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228269-barcodegenerator)\n\t///\n\t/// Categories: Generator, Video, Still Image, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - barcodeDescriptor: The CIBarcodeDescription object to generate an image for.\n\t/// - Returns: a barcode image from a CIBarcodeDescriptor\n\t@available(iOS 13, macOS 10.15, *)\n\tstatic func barcodeGenerator(barcodeDescriptor: CIBarcodeDescriptor) -> CIImage {\n\t\tlet filter = CIFilter.barcodeGenerator() // CIBarcodeGenerator\n\t\tfilter.barcodeDescriptor = barcodeDescriptor\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Blurred Rectangle Generator\n\t///\n\t/// Generates a blurred rectangle image with the specified extent, blur sigma, and color.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/4401849-blurredrectanglegenerator)\n\t///\n\t/// Categories: Generator, Still Image, High Dynamic Range, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - extent: A rectangle that defines the extent of the effect.\n\t///   - sigma: The sigma for a gaussian blur. (0...)\n\t///   - color: A color.\n\t/// - Returns: a blurred rectangle image with the specified extent, blur sigma, and color\n\t@available(iOS 17, macOS 14.0, *)\n\tstatic func blurredRectangleGenerator(extent: CGRect, sigma: Float, color: CIColor = CIColor.white) -> CIImage {\n\t\tlet filter = CIFilter.blurredRectangleGenerator() // CIBlurredRectangleGenerator\n\t\tfilter.extent = extent\n\t\tfilter.sigma = sigma\n\t\tfilter.color = color\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Checkerboard\n\t///\n\t/// Generates a pattern of squares of alternating colors. You can specify the size, colors, and the sharpness of the pattern.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228279-checkerboardgenerator)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CICheckerboardGenerator)\n\t///\n\t/// Categories: Generator, Video, Still Image, High Dynamic Range, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - center: The center of the effect as x and y pixel coordinates.\n\t///   - color0: A color to use for the first set of squares.\n\t///   - color1: A color to use for the second set of squares.\n\t///   - width: The width of the squares in the pattern.\n\t///   - sharpness: The sharpness of the edges in pattern. The smaller the value, the more blurry the pattern. Values range from 0.0 to 1.0. (0...1)\n\t/// - Returns: a checkerboard pattern\n\t@available(iOS 13, macOS 10.15, *)\n\tstatic func checkerboardGenerator(center: CGPoint,\n\t\t\t\t\t\t\t\t\t  color0: CIColor = CIColor.white,\n\t\t\t\t\t\t\t\t\t  color1: CIColor = CIColor.black,\n\t\t\t\t\t\t\t\t\t  width: Float,\n\t\t\t\t\t\t\t\t\t  sharpness: Float = 1) -> CIImage {\n\t\tlet filter = CIFilter.checkerboardGenerator() // CICheckerboardGenerator\n\t\tfilter.center = center\n\t\tfilter.color0 = color0\n\t\tfilter.color1 = color1\n\t\tfilter.width = width\n\t\tfilter.sharpness = sharpness\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Code 128 Barcode Generator\n\t///\n\t/// Generates a Code 128 one-dimensional barcode from input data.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228281-code128barcodegenerator)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CICode128BarcodeGenerator)\n\t///\n\t/// Categories: Generator, Still Image, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - message: The message to encode in the Code 128 Barcode\n\t///   - quietSpace: The number of empty white pixels that should surround the barcode. (0...100)\n\t///   - barcodeHeight: The height of the generated barcode in pixels. (1...500)\n\t/// - Returns: a Code 128 one-dimensional barcode from input data\n\t@available(iOS 13, macOS 10.15, *)\n\tstatic func code128BarcodeGenerator(message: Data, quietSpace: Int, barcodeHeight: Int) -> CIImage {\n\t\tlet filter = CIFilter.code128BarcodeGenerator() // CICode128BarcodeGenerator\n\t\tfilter.message = message\n\t\tfilter.quietSpace = Float(quietSpace)\n\t\tfilter.barcodeHeight = Float(barcodeHeight)\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\t// ℹ️ CIConstantColorGenerator already has a CIImage initializer: init(color: CIColor)\n\n\t/// Gaussian Gradient\n\t///\n\t/// Generates a gradient that varies from one color to another using a Gaussian distribution.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228332-gaussiangradient)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIGaussianGradient)\n\t///\n\t/// Categories: Gradient, Video, Still Image, High Dynamic Range, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - center: The center of the effect as x and y pixel coordinates.\n\t///   - color0: The first color to use in the gradient.\n\t///   - color1: The second color to use in the gradient.\n\t///   - radius: The radius of the Gaussian distribution. (0...)\n\t/// - Returns: a gradient that varies from one color to another using a Gaussian distribution\n\t@available(iOS 13, macOS 10.15, *)\n\tstatic func gaussianGradient(center: CGPoint,\n\t\t\t\t\t\t\t\t color0: CIColor = CIColor.white,\n\t\t\t\t\t\t\t\t color1: CIColor = CIColor.clear,\n\t\t\t\t\t\t\t\t radius: Float) -> CIImage {\n\t\tlet filter = CIFilter.gaussianGradient() // CIGaussianGradient\n\t\tfilter.center = center\n\t\tfilter.color0 = color0\n\t\tfilter.color1 = color1\n\t\tfilter.radius = radius\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Hue/Saturation/Value Gradient\n\t///\n\t/// Generates a color wheel that shows hues and saturations for a specified value.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228342-huesaturationvaluegradient)\n\t///\n\t/// Categories: Gradient, Video, Still Image, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - value: The color value used to generate the color wheel. (0...)\n\t///   - radius: The distance from the center of the effect. (0...)\n\t///   - softness: The softness of the generated color wheel (0...)\n\t///   - dither: A boolean value specifying whether the distort the generated output (0...)\n\t///   - colorSpace: The CGColorSpaceRef that the color wheel should be generated in.\n\t/// - Returns: a color wheel that shows hues and saturations for a specified value\n\t@available(iOS 13, macOS 10.15, *)\n\tstatic func hueSaturationValueGradient(value: Float = 1,\n\t\t\t\t\t\t\t\t\t\t   radius: Float,\n\t\t\t\t\t\t\t\t\t\t   softness: Float = 1,\n\t\t\t\t\t\t\t\t\t\t   dither: Float = 1,\n\t\t\t\t\t\t\t\t\t\t   colorSpace: CGColorSpace = CGColorSpace(name: CGColorSpace.sRGB)!) -> CIImage {\n\t\tlet filter = CIFilter.hueSaturationValueGradient() // CIHueSaturationValueGradient\n\t\tfilter.value = value\n\t\tfilter.radius = radius\n\t\tfilter.softness = softness\n\t\tfilter.dither = dither\n\t\tfilter.colorSpace = colorSpace\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Lenticular Halo\n\t///\n\t/// Simulates a halo that is generated by the diffraction associated with the spread of a lens. This filter is typically applied to another image to simulate lens flares and similar effects.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228345-lenticularhalogenerator)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CILenticularHaloGenerator)\n\t///\n\t/// Categories: Generator, Video, Still Image, High Dynamic Range, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - center: The center of the effect as x and y pixel coordinates.\n\t///   - color: A color.\n\t///   - haloRadius: The radius of the halo. (0...)\n\t///   - haloWidth: The width of the halo, from its inner radius to its outer radius. (0...)\n\t///   - haloOverlap: The overlap of red, green, and blue halos. A value of 1 results in a full overlap. (0...)\n\t///   - striationStrength: The intensity of the halo colors. Larger values are more intense. (0...)\n\t///   - striationContrast: The contrast of the halo colors. Larger values are higher contrast. (0...)\n\t///   - time: The duration of the effect. (0...1)\n\t/// - Returns: new `CIImage`\n\t@available(iOS 13, macOS 10.15, *)\n\tstatic func lenticularHaloGenerator(center: CGPoint,\n\t\t\t\t\t\t\t\t\t\tcolor: CIColor,\n\t\t\t\t\t\t\t\t\t\thaloRadius: Float,\n\t\t\t\t\t\t\t\t\t\thaloWidth: Float,\n\t\t\t\t\t\t\t\t\t\thaloOverlap: Float = 0.77,\n\t\t\t\t\t\t\t\t\t\tstriationStrength: Float = 0.5,\n\t\t\t\t\t\t\t\t\t\tstriationContrast: Float = 1,\n\t\t\t\t\t\t\t\t\t\ttime: Float = 0) -> CIImage {\n\t\tlet filter = CIFilter.lenticularHaloGenerator() // CILenticularHaloGenerator\n\t\tfilter.center = center\n\t\tfilter.color = color\n\t\tfilter.haloRadius = haloRadius\n\t\tfilter.haloWidth = haloWidth\n\t\tfilter.haloOverlap = haloOverlap\n\t\tfilter.striationStrength = striationStrength\n\t\tfilter.striationContrast = striationContrast\n\t\tfilter.time = time\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Linear Gradient\n\t///\n\t/// Generates a gradient that varies along a linear axis between two defined endpoints.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228351-lineargradient)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CILinearGradient)\n\t///\n\t/// Categories: Gradient, Video, Still Image, High Dynamic Range, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - point0: The starting position of the gradient -- where the first color begins.\n\t///   - point1: The ending position of the gradient -- where the second color begins.\n\t///   - color0: The first color to use in the gradient.\n\t///   - color1: The second color to use in the gradient.\n\t/// - Returns: a gradient that varies along a linear axis between two defined endpoints\n\t@available(iOS 13, macOS 10.15, *)\n\tstatic func linearGradient(point0: CGPoint = .zero,\n\t\t\t\t\t\t\t   point1: CGPoint,\n\t\t\t\t\t\t\t   color0: CIColor = CIColor.white,\n\t\t\t\t\t\t\t   color1: CIColor = CIColor.black) -> CIImage {\n\t\tlet filter = CIFilter.linearGradient() // CILinearGradient\n\t\tfilter.point0 = point0\n\t\tfilter.point1 = point1\n\t\tfilter.color0 = color0\n\t\tfilter.color1 = color1\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Mesh Generator\n\t///\n\t/// Generates a mesh from an array of line segments.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228359-meshgenerator)\n\t///\n\t/// Categories: Generator, Video, Still Image, High Dynamic Range, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - width: The width in pixels of the effect. (0...)\n\t///   - color: A color.\n\t///   - mesh: An array of line segments stored as an array of CIVectors each containing a start point and end point.\n\t/// - Returns: a mesh from an array of line segments\n\t@available(iOS 13, macOS 10.15, *)\n\tstatic func meshGenerator(width: Float, color: CIColor = CIColor.white, mesh: [Any]) -> CIImage {\n\t\tlet filter = CIFilter.meshGenerator() // CIMeshGenerator\n\t\tfilter.width = width\n\t\tfilter.color = color\n\t\tfilter.mesh = mesh\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// PDF417 Barcode Generator\n\t///\n\t/// Generates a PDF417 code (two-dimensional barcode) from input data.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228261-pdf417barcodegenerator)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIPDF417BarcodeGenerator)\n\t///\n\t/// Categories: Generator, Video, Still Image, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - message: The message to encode in the PDF417 Barcode\n\t///   - minWidth: The minimum width of the generated barcode in pixels. (56...583)\n\t///   - maxWidth: The maximum width of the generated barcode in pixels. (56...583)\n\t///   - minHeight: The minimum height of the generated barcode in pixels. (13...283)\n\t///   - maxHeight: The maximum height of the generated barcode in pixels. (13...283)\n\t///   - dataColumns: The number of data columns in the generated barcode (1...30)\n\t///   - rows: The number of rows in the generated barcode (3...90)\n\t///   - preferredAspectRatio: The preferred aspect ratio of the generated barcode (0...)\n\t///   - compactionMode: The compaction mode of the generated barcode. (0...3)\n\t///   - compactStyle: A Boolean that specifies whether to force a compact style Aztec code.\n\t///   - correctionLevel: The correction level ratio of the generated barcode (0...8)\n\t///   - alwaysSpecifyCompaction: A Boolean value specifying whether to force compaction style.\n\t/// - Returns: a PDF417 code (two-dimensional barcode) from input data\n\t@available(iOS 13, macOS 10.15, *)\n\tstatic func pdf417BarcodeGenerator(message: Data,\n\t\t\t\t\t\t\t\t\t   minWidth: Int,\n\t\t\t\t\t\t\t\t\t   maxWidth: Int,\n\t\t\t\t\t\t\t\t\t   minHeight: Int,\n\t\t\t\t\t\t\t\t\t   maxHeight: Int,\n\t\t\t\t\t\t\t\t\t   dataColumns: Int,\n\t\t\t\t\t\t\t\t\t   rows: Int,\n\t\t\t\t\t\t\t\t\t   preferredAspectRatio: Float,\n\t\t\t\t\t\t\t\t\t   compactionMode: Int,\n\t\t\t\t\t\t\t\t\t   compactStyle: Bool,\n\t\t\t\t\t\t\t\t\t   correctionLevel: Int,\n\t\t\t\t\t\t\t\t\t   alwaysSpecifyCompaction: Bool) -> CIImage {\n\t\tlet filter = CIFilter.pdf417BarcodeGenerator() // CIPDF417BarcodeGenerator\n\t\tfilter.message = message\n\t\tfilter.minWidth = Float(minWidth)\n\t\tfilter.maxWidth = Float(maxWidth)\n\t\tfilter.minHeight = Float(minHeight)\n\t\tfilter.maxHeight = Float(maxHeight)\n\t\tfilter.dataColumns = Float(dataColumns)\n\t\tfilter.rows = Float(rows)\n\t\tfilter.preferredAspectRatio = preferredAspectRatio\n\t\tfilter.compactionMode = Float(compactionMode)\n\t\tfilter.compactStyle = Float(compactStyle ? 1 : 0)\n\t\tfilter.correctionLevel = Float(correctionLevel)\n\t\tfilter.alwaysSpecifyCompaction = Float(alwaysSpecifyCompaction ? 1 : 0)\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// QR Code Generator\n\t///\n\t/// Generates a Quick Response code (two-dimensional barcode) from input data.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228262-qrcodegenerator)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIQRCodeGenerator)\n\t///\n\t/// Categories: Generator, Still Image, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - message: The message to encode in the QR Code\n\t///   - correctionLevel: QR Code correction level L, M, Q, or H.\n\t/// - Returns: a Quick Response code (two-dimensional barcode) from input data\n\t@available(iOS 13, macOS 10.15, *)\n\tstatic func qrCodeGenerator(message: Data, correctionLevel: String = \"M\") -> CIImage {\n\t\tlet filter = CIFilter.qrCodeGenerator() // CIQRCodeGenerator\n\t\tfilter.message = message\n\t\tfilter.correctionLevel = correctionLevel\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Radial Gradient\n\t///\n\t/// Generates a gradient that varies radially between two circles having the same center. It is valid for one of the two circles to have a radius of 0.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228395-radialgradient)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIRadialGradient)\n\t///\n\t/// Categories: Gradient, Video, Still Image, High Dynamic Range, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - center: The center of the effect as x and y pixel coordinates.\n\t///   - radius0: The radius of the starting circle to use in the gradient. (0...)\n\t///   - radius1: The radius of the ending circle to use in the gradient. (0...)\n\t///   - color0: The first color to use in the gradient.\n\t///   - color1: The second color to use in the gradient.\n\t/// - Returns: a gradient that varies radially between two circles having the same center\n\t@available(iOS 13, macOS 10.15, *)\n\tstatic func radialGradient(center: CGPoint,\n\t\t\t\t\t\t\t   radius0: Float,\n\t\t\t\t\t\t\t   radius1: Float,\n\t\t\t\t\t\t\t   color0: CIColor = CIColor.white,\n\t\t\t\t\t\t\t   color1: CIColor = CIColor.black) -> CIImage {\n\t\tlet filter = CIFilter.radialGradient() // CIRadialGradient\n\t\tfilter.center = center\n\t\tfilter.radius0 = radius0\n\t\tfilter.radius1 = radius1\n\t\tfilter.color0 = color0\n\t\tfilter.color1 = color1\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Random Generator\n\t///\n\t/// Generates an image of infinite extent whose pixel values are made up of four independent, uniformly-distributed random numbers in the 0 to 1 range.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228396-randomgenerator)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIRandomGenerator)\n\t///\n\t/// Categories: Generator, Video, Still Image, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t/// - Returns: an image of infinite extent whose pixel values are made up of four independent, uniformly-distributed random numbers in the 0 to 1 range\n\t@available(iOS 13, macOS 10.15, *)\n\tstatic func randomGenerator() -> CIImage {\n\t\tlet filter = CIFilter.randomGenerator() // CIRandomGenerator\n\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Rounded Rectangle Generator\n\t///\n\t/// Generates a rounded rectangle image with the specified extent, corner radius, and color.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3335007-roundedrectanglegenerator)\n\t///\n\t/// Categories: Generator, Still Image, High Dynamic Range, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - extent: A rectangle that defines the extent of the effect.\n\t///   - radius: The distance from the center of the effect. (0...)\n\t///   - color: A color.\n\t/// - Returns: a rounded rectangle image with the specified extent, corner radius, and color\n\t@available(iOS 13, macOS 10.15, *)\n\tstatic func roundedRectangleGenerator(extent: CGRect, radius: Float, color: CIColor = CIColor.white) -> CIImage {\n\t\tlet filter = CIFilter.roundedRectangleGenerator() // CIRoundedRectangleGenerator\n\t\tfilter.extent = extent\n\t\tfilter.radius = radius\n\t\tfilter.color = color\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Rounded Rectangle Stroke Generator\n\t///\n\t/// Generates a rounded rectangle stroke image with the specified extent, corner radius, stroke width, and color.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/4401875-roundedrectanglestrokegenerator)\n\t///\n\t/// Categories: Generator, Still Image, High Dynamic Range, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - extent: A rectangle that defines the extent of the effect.\n\t///   - radius: The distance from the center of the effect. (0...)\n\t///   - color: A color.\n\t///   - width: The width in pixels of the effect. (0...)\n\t/// - Returns: a rounded rectangle stroke image with the specified extent, corner radius, stroke width, and color\n\t@available(iOS 17, macOS 14.0, *)\n\tstatic func roundedRectangleStrokeGenerator(extent: CGRect,\n\t\t\t\t\t\t\t\t\t\t\t\tradius: Float,\n\t\t\t\t\t\t\t\t\t\t\t\tcolor: CIColor = CIColor.white,\n\t\t\t\t\t\t\t\t\t\t\t\twidth: Float) -> CIImage {\n\t\tlet filter = CIFilter.roundedRectangleStrokeGenerator() // CIRoundedRectangleStrokeGenerator\n\t\tfilter.extent = extent\n\t\tfilter.radius = radius\n\t\tfilter.color = color\n\t\tfilter.width = width\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Smooth Linear Gradient\n\t///\n\t/// Generates a gradient that uses an S-curve function to blend colors along a linear axis between two defined endpoints.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228407-smoothlineargradient)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CISmoothLinearGradient)\n\t///\n\t/// Categories: Gradient, Video, Still Image, High Dynamic Range, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - point0: The starting position of the gradient -- where the first color begins.\n\t///   - point1: The ending position of the gradient -- where the second color begins.\n\t///   - color0: The first color to use in the gradient.\n\t///   - color1: The second color to use in the gradient.\n\t/// - Returns: a gradient that uses an S-curve function to blend colors along a linear axis between two defined endpoints\n\t@available(iOS 13, macOS 10.15, *)\n\tstatic func smoothLinearGradient(point0: CGPoint = .zero,\n\t\t\t\t\t\t\t\t\t point1: CGPoint,\n\t\t\t\t\t\t\t\t\t color0: CIColor = CIColor.white,\n\t\t\t\t\t\t\t\t\t color1: CIColor = CIColor.black) -> CIImage {\n\t\tlet filter = CIFilter.smoothLinearGradient() // CISmoothLinearGradient\n\t\tfilter.point0 = point0\n\t\tfilter.point1 = point1\n\t\tfilter.color0 = color0\n\t\tfilter.color1 = color1\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Star Shine\n\t///\n\t/// Generates a starburst pattern that is similar to a supernova; can be used to simulate a lens flare.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228415-starshinegenerator)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIStarShineGenerator)\n\t///\n\t/// Categories: Generator, Video, Still Image, High Dynamic Range, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - center: The center of the effect as x and y pixel coordinates.\n\t///   - color: The color to use for the outer shell of the circular star.\n\t///   - radius: The radius of the star. (0...)\n\t///   - crossScale: The size of the cross pattern. (0...)\n\t///   - crossAngle: The angle in radians of the cross pattern.\n\t///   - crossOpacity: The opacity of the cross pattern. (-8...)\n\t///   - crossWidth: The width of the cross pattern. (0...)\n\t///   - epsilon: The length of the cross spikes. (-8...)\n\t/// - Returns: a starburst pattern that is similar to a supernova; can be used to simulate a lens flare\n\t@available(iOS 13, macOS 10.15, *)\n\tstatic func starShineGenerator(center: CGPoint,\n\t\t\t\t\t\t\t\t   color: CIColor,\n\t\t\t\t\t\t\t\t   radius: Float,\n\t\t\t\t\t\t\t\t   crossScale: Float = 15,\n\t\t\t\t\t\t\t\t   crossAngle: Float = 0.6,\n\t\t\t\t\t\t\t\t   crossOpacity: Float = -2,\n\t\t\t\t\t\t\t\t   crossWidth: Float,\n\t\t\t\t\t\t\t\t   epsilon: Float = -2) -> CIImage {\n\t\tlet filter = CIFilter.starShineGenerator() // CIStarShineGenerator\n\t\tfilter.center = center\n\t\tfilter.color = color\n\t\tfilter.radius = radius\n\t\tfilter.crossScale = crossScale\n\t\tfilter.crossAngle = crossAngle\n\t\tfilter.crossOpacity = crossOpacity\n\t\tfilter.crossWidth = crossWidth\n\t\tfilter.epsilon = epsilon\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Stripes\n\t///\n\t/// Generates a stripe pattern. You can control the color of the stripes, the spacing, and the contrast.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228417-stripesgenerator)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CIStripesGenerator)\n\t///\n\t/// Categories: Generator, Video, Still Image, High Dynamic Range, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - center: The center of the effect as x and y pixel coordinates.\n\t///   - color0: A color to use for the odd stripes.\n\t///   - color1: A color to use for the even stripes.\n\t///   - width: The width of a stripe.\n\t///   - sharpness: The sharpness of the stripe pattern. The smaller the value, the more blurry the pattern. Values range from 0.0 to 1.0. (0...1)\n\t/// - Returns: a stripe pattern\n\t@available(iOS 13, macOS 10.15, *)\n\tstatic func stripesGenerator(center: CGPoint,\n\t\t\t\t\t\t\t\t color0: CIColor = CIColor.white,\n\t\t\t\t\t\t\t\t color1: CIColor = CIColor.black,\n\t\t\t\t\t\t\t\t width: Float,\n\t\t\t\t\t\t\t\t sharpness: Float = 1) -> CIImage {\n\t\tlet filter = CIFilter.stripesGenerator() // CIStripesGenerator\n\t\tfilter.center = center\n\t\tfilter.color0 = color0\n\t\tfilter.color1 = color1\n\t\tfilter.width = width\n\t\tfilter.sharpness = sharpness\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Sunbeams\n\t///\n\t/// Generates a sun effect. You typically use the output of the sunbeams filter as input to a composite filter.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228419-sunbeamsgenerator)\n\t/// [Classic Documentation](https://t.ly/Gyd6#//apple_ref/doc/filter/ci/CISunbeamsGenerator)\n\t///\n\t/// Categories: Generator, Video, Still Image, High Dynamic Range, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - center: The center of the effect as x and y pixel coordinates.\n\t///   - color: The color of the sun.\n\t///   - sunRadius: The radius of the sun. (0...)\n\t///   - maxStriationRadius: The radius of the sunbeams. (0...)\n\t///   - striationStrength: The intensity of the sunbeams. Higher values result in more intensity. (0...)\n\t///   - striationContrast: The contrast of the sunbeams. Higher values result in more contrast. (0...)\n\t///   - time: The duration of the effect. (0...1)\n\t/// - Returns: a sun effect\n\t@available(iOS 13, macOS 10.15, *)\n\tstatic func sunbeamsGenerator(center: CGPoint,\n\t\t\t\t\t\t\t\t  color: CIColor,\n\t\t\t\t\t\t\t\t  sunRadius: Float,\n\t\t\t\t\t\t\t\t  maxStriationRadius: Float = 2.58,\n\t\t\t\t\t\t\t\t  striationStrength: Float = 0.5,\n\t\t\t\t\t\t\t\t  striationContrast: Float = 1.375,\n\t\t\t\t\t\t\t\t  time: Float = 0) -> CIImage {\n\t\tlet filter = CIFilter.sunbeamsGenerator() // CISunbeamsGenerator\n\t\tfilter.center = center\n\t\tfilter.color = color\n\t\tfilter.sunRadius = sunRadius\n\t\tfilter.maxStriationRadius = maxStriationRadius\n\t\tfilter.striationStrength = striationStrength\n\t\tfilter.striationContrast = striationContrast\n\t\tfilter.time = time\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n\n\t/// Text Image Generator\n\t///\n\t/// Generate an image from a string and font information.\n\t///\n\t/// [Documentation](https://developer.apple.com/documentation/coreimage/cifilter/3228422-textimagegenerator)\n\t///\n\t/// Categories: Generator, Video, Still Image, Built-In\n\t///\n\t///\n\t/// - Parameters:\n\t///   - text: The text to render.\n\t///   - fontName: The name of the font to use for the generated text.\n\t///   - fontSize: The size of the font to use for the generated text. (0...)\n\t///   - scaleFactor: The scale of the font to use for the generated text. (0...)\n\t///   - padding: The number of additional pixels to pad around the text’s bounding box. (0...200)\n\t/// - Returns: an image from a string and font information\n\t@available(iOS 16, macOS 13.0, *)\n\tstatic func textImageGenerator(text: String,\n\t\t\t\t\t\t\t\t   fontName: String = \"HelveticaNeue\",\n\t\t\t\t\t\t\t\t   fontSize: Float = 12,\n\t\t\t\t\t\t\t\t   scaleFactor: Float = 1,\n\t\t\t\t\t\t\t\t   padding: Int) -> CIImage {\n\t\tlet filter = CIFilter.textImageGenerator() // CITextImageGenerator\n\t\tfilter.text = text\n\t\tfilter.fontName = fontName\n\t\tfilter.fontSize = fontSize\n\t\tfilter.scaleFactor = scaleFactor\n\t\tfilter.padding = Float(padding)\n\t\treturn filter.outputImage ?? CIImage.empty()\n\t}\n}\n\n\n\n\n\n\n\n"
  },
  {
    "path": "Sources/Image-Extensions.swift",
    "content": "//\n//  Image-Extensions.swift\n//  SwiftUI Core Image\n//\n//  Created by Dan Wood on 5/9/23.\n//\n\nimport Foundation\nimport CoreGraphics\nimport CoreImage\nimport SwiftUI\n\npublic extension Image {\n    private static let context = CIContext(options: nil)\n\n\tinit(ciImage: CIImage) {\n\n#if canImport(UIKit)\n\t\t// Note that making a UIImage and then using that to initialize the Image doesn't seem to work, but CGImage is fine.\n\t\tif let cgImage = Self.context.createCGImage(ciImage, from: ciImage.extent) {\n\t\t\tself.init(cgImage, scale: 1.0, orientation: .up, label: Text(\"\"))\n\t\t} else {\n\t\t\tself.init(systemName: \"questionmark\")\n\t\t}\n#elseif canImport(AppKit)\n\t\t// Looks like the NSCIImageRep is slightly better optimized for repeated runs,\n\t\t// I'm guessing that it doesn't actually render the bitmap unless it needs to.\n\t\tlet rep = NSCIImageRep(ciImage: ciImage)\n\t\tguard rep.size.width <= 10000, rep.size.height <= 10000 else {\t\t// simple test to make sure we don't have overflow extent\n\t\t\tself.init(nsImage: NSImage())\n\t\t\treturn\n\t\t}\n\t\tlet nsImage = NSImage(size: rep.size)\t// size affects aspect ratio but not resolution\n\t\tnsImage.addRepresentation(rep)\n\t\tself.init(nsImage: nsImage)\n#endif\n\t}\n}\n"
  }
]